├── favicon.ico ├── readme_images ├── db.png ├── output.png ├── web_gui.png ├── run_server.png └── select_file.png ├── .gitignore ├── package.json ├── README.md ├── homepage.html └── snapback.js /favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chrismaddalena/snapback/master/favicon.ico -------------------------------------------------------------------------------- /readme_images/db.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chrismaddalena/snapback/master/readme_images/db.png -------------------------------------------------------------------------------- /readme_images/output.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chrismaddalena/snapback/master/readme_images/output.png -------------------------------------------------------------------------------- /readme_images/web_gui.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chrismaddalena/snapback/master/readme_images/web_gui.png -------------------------------------------------------------------------------- /readme_images/run_server.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chrismaddalena/snapback/master/readme_images/run_server.png -------------------------------------------------------------------------------- /readme_images/select_file.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chrismaddalena/snapback/master/readme_images/select_file.png -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | notes.txt 2 | *.nessus 3 | *.xml 4 | report/ 5 | node_modules/ 6 | package-lock.json 7 | *test.js 8 | test.txt 9 | *.csv 10 | *.zip 11 | *_*-*-*_*-*-*.js 12 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "Snapback", 3 | "version": "1.0.0", 4 | "description": "Super Fast HTTP Screenshots", 5 | "main": "snapback.js", 6 | "scripts": { 7 | "test": "echo \"Error: no test specified\" && exit 1" 8 | }, 9 | "author": "Forrest Kasler (ph3eds) @fkasler", 10 | "license": "ISC", 11 | "dependencies": { 12 | "archiver": "^3.0.0", 13 | "express": "^4.17.1", 14 | "jquery": "^3.4.1", 15 | "line-reader": "^0.4.0", 16 | "md5-file": "^4.0.0", 17 | "puppeteer": "^1.17.0", 18 | "socket.io": "^2.2.0", 19 | "sqlite3": "^4.0.9", 20 | "xml2js": "^0.4.19" 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | Snapback 2 | ============ 3 | Snapback is a HTTP(s) screenshot tool written to take advantage of asynchronous threading in Nodejs. It's like EyeWitness, gowitness, and rawr, etc. but generally faster, and compatible with MacOS, Windows, and Linux. 4 | 5 | Installation 6 | ============ 7 | You will need to install Node and NPM (Node Package Manager) to run the project. 8 | I recommend running this tool on the native OS, but it works fine on a VM as well. I have tested on a Kali VM after the basic apt-get install of node and npm without issues. It has also been tested on a Win7 VM with a standard Node.js install. 9 | To install dependencies: 10 | 11 | ``` 12 | git clone https://github.com/fkasler/snapback.git 13 | cd snapback 14 | npm install 15 | ``` 16 | 17 | That's it. Now you're ready to start collecting screenshots 18 | 19 | Usage 20 | ===== 21 | 22 | ``` 23 | node snapback.js 24 | ``` 25 | 26 | ![A picture of the server side](./readme_images/run_server.png) 27 | 28 | Then navigate to http://localhost:2997 and click the "Select Input File" to start taking screenshots. Snapback currently supports .nessus files, nmap .xml files, ScopeCreep exports, and .txt files with one target url per line. 29 | 30 | ![A picture of the web GUI](./readme_images/web_gui.png) 31 | 32 | ![A picture of file selection](./readme_images/select_file.png) 33 | 34 | This should kick off the scan and should give you screens and html data in a folder called "report". 35 | 36 | ![A picture of the tool working](./readme_images/output.png) 37 | 38 | The "report" folder is created by the tool if it does not exist. To start a new scan/project simply move or remove the "report" folder, restart the server side ("node snapback.js"), and refresh the web GUI. 39 | 40 | You can also use a socks5 or HTTP proxy by checking the "Use Proxy" box and specifying the proxy location. For example, to scan through a remote box over port 8080, set up the proxy with: 41 | 42 | ``` 43 | ssh username@remote.server.com -D 8080 44 | ``` 45 | 46 | Then add "socks5://localhost:8080" as your proxy setting in Snapback. 47 | 48 | The "Delay" setting allows the user to set a number of seconds to wait, after intial page load events, before taking the screenshot. This can be useful when services are rendering slowly. 49 | 50 | The "No Sandbox" setting is just to get around restrictions running the headless browser as root. It is really only useful for running the tool on Kali Linux or similar without needing to jump through a bunch of hoops. 51 | 52 | Report checkboxes are saved automatically. Report notes are saved on "focusout" so just click outside of the input and your notes will be saved. The notes feature is meant to keep track of default/weak creds you find. 53 | 54 | A cool use case for this tool is data mining. Because the HTML of each page is saved in your report folder, you can create a .txt input file of pages to scrape and then use a little grep/sed/vim foo to get the data you want from each page of HTML. I found myself mining a lot of emails using this method so I went ahead and built it into the tool. Just check the 'scrape emails' checkbox before scanning and the tool will run some JavaScript on each page to do an email regex search. Results are stored in a file called 'emails.txt' in your report folder. 55 | 56 | Don't like the web UI? Prefer to live in the CLI? No problem! You can specify an input file from the command line like so: 57 | 58 | ``` 59 | node snapback.js /full/path/to/my/input_file.nessus 60 | ``` 61 | 62 | Want to use a proxy from the CLI? Just specify the proxy as an additional argument after your input file: 63 | 64 | ``` 65 | node snapback.js /full/path/to/my/input_file.nessus socks5://localhost:8080 66 | ``` 67 | 68 | Reporting 69 | ===== 70 | To export just a .zip with a copy of your sqlite database and screenshots that might be pulled into a report (auth prompts and weak creds), you can use the "Report Export" button. The .zip will be stored in your Snapback project root. 71 | 72 | The database is only a single table "services" with the following structure: 73 | 74 | ![A picture of the database tables](./readme_images/db.png) 75 | 76 | **url** TEXT NOT NULL UNIQUE - the URL of the image 77 | 78 | **image_path** TEXT - the file path to the captured image relative to the snapback project root 79 | 80 | **image_hash** TEXT - md5 hash of the captured image for fingerprinting/grouping 81 | 82 | **text_path** TEXT - path to the html of the captured page relative to the snapback project root 83 | 84 | **text_hash** TEXT - md5 hash of the captured html for fingerprinting/grouping 85 | 86 | **text_size** INTEGER - RESERVERD BUT NOT USED YET for fingerprinting/grouping 87 | 88 | **captured** INTEGER - 1 == Captured, 0 == Not Captured 89 | 90 | **error** INTEGER - 1 == Error Capturing, 0 == No Error Capturing (yet... may not have run against this URL) 91 | 92 | **viewed** INTEGER - 1 == Viewed, 0 == Not Viewed 93 | 94 | **default_creds** TEXT - Used to record default creds that the user finds 95 | 96 | **auth_prompt** INTEGER - 1 == There is an auth prompt on the page, 0 == No auth prompt detected 97 | 98 | **notes** TEXT - Used to record device type or other notes for further testing/investigation 99 | 100 | Happy Hacking :) 101 | -------------------------------------------------------------------------------- /homepage.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | Snapback 9 | 15 | 16 | 17 | 303 | 304 | 305 | 306 | Use Proxy: 307 | 308 | Delay: 309 | No Sandbox (dangerous): 310 | Scrape Emails: 311 | 312 | 313 | 314 | 315 | 316 | 317 | 318 | 319 | 320 | 321 | 322 |
323 |
324 |
325 | 326 | 327 | -------------------------------------------------------------------------------- /snapback.js: -------------------------------------------------------------------------------- 1 | const puppeteer = require('puppeteer'); 2 | const fs = require('fs'); 3 | const lineReader = require('line-reader'); 4 | const parseString = require('xml2js').parseString; 5 | const md5File = require('md5-file') 6 | var app = require('express')(); 7 | var http = require('http').Server(app); 8 | var io = require('socket.io')(http); 9 | var archiver = require('archiver'); 10 | 11 | //set up a report directory if it doesn't exist 12 | try{ 13 | fs.mkdirSync('./report') 14 | }catch(err){ 15 | //must exist already. We do it this way to avoid a race condition of checking the existence of the dir before trying to write to it 16 | } 17 | 18 | //create a write stream to store scraped emails 19 | email_file = fs.createWriteStream("./report/emails.txt", {flags:'a'}); 20 | 21 | // ======================================================= 22 | // Connect to/create the database 23 | // ======================================================= 24 | var sqlite3 = require('sqlite3').verbose(); 25 | db = new sqlite3.Database('./report/snapback.db', sqlite3.OPEN_READWRITE | sqlite3.OPEN_CREATE, (err) => { 26 | if (err) { 27 | console.error(err.message); 28 | } 29 | console.log('Connected to the snapback database.'); 30 | }); 31 | 32 | //create the db if it doesn't exist. 33 | db.serialize(function() { 34 | db.run("CREATE TABLE IF NOT EXISTS services (url TEXT NOT NULL UNIQUE, image_path TEXT, image_hash TEXT, text_path TEXT, text_hash TEXT, text_size INTEGER, captured INTEGER, error INTEGER, viewed INTEGER, default_creds TEXT, auth_prompt INTEGER, notes TEXT)"); 35 | }); 36 | 37 | //nessus parsing functions 38 | xml_buffer = '' 39 | add_to_buffer = false 40 | 41 | function parseReportHost(xml, browser, io){ 42 | parseString(xml, function (err, report_host) { 43 | let host = report_host.ReportHost.$.name 44 | let report_items = report_host.ReportHost.ReportItem 45 | try{ 46 | Object.keys(report_items).forEach(function(key) { 47 | parseReportItem(host, report_items[key], browser, io) 48 | }) 49 | }catch(err){ 50 | //host must not have any findings 51 | } 52 | }); 53 | } 54 | 55 | function parseReportItem(host, item, browser, io){ 56 | let plugin_id = item.$.pluginID 57 | let service = item.$.svc_name 58 | let port = item.$.port 59 | if(((port == '443') || (port == '8443')) || (service == "https?")){ 60 | push_to_queue("https://" + host + ":" + port, browser, io); 61 | }else if((plugin_id == '22964') & (service == "www")){ 62 | //console.log(item.plugin_output); 63 | if(item.plugin_output.toString().match(/TLS|SSL/i)){ 64 | push_to_queue("https://" + host + ":" + port, browser, io); 65 | }else{ 66 | push_to_queue("http://" + host + ":" + port, browser, io); 67 | } 68 | }else if((service == "www") || (service == "http?")){ 69 | push_to_queue("http://" + host + ":" + port, browser, io); 70 | //matching Chris Truncer's EyeWitness logic of checking for port 80 on all RDP and VNC hosts 71 | }//else if((service == "msrdp") || (port == "3389") || (service == "vnc")){ 72 | // push_to_queue("http://" + host + ":80", browser, io); 73 | //} 74 | } 75 | 76 | function parseNmapHost(xml, browser, io){ 77 | parseString(xml, function (err, report_host) { 78 | addresses = report_host.host.address 79 | let host = "" 80 | try{ 81 | Object.keys(addresses).forEach(function(key) { 82 | if(addresses[key].$.addrtype == "ipv4"){ 83 | host = addresses[key].$.addr 84 | } 85 | }) 86 | }catch(err){ 87 | //host must not have any findings 88 | return 89 | } 90 | let report_items = report_host.host.ports 91 | try{ 92 | Object.keys(report_items).forEach(function(key) { 93 | parseNmapPort(host, report_items[key], browser, io) 94 | }) 95 | }catch(err){ 96 | //host must not have any findings 97 | } 98 | }); 99 | } 100 | 101 | function parseNmapPort(host, item, browser, io){ 102 | let services = item.port 103 | try{ 104 | Object.keys(services).forEach(function(key) { 105 | if(services[key].state[0].$.state == "open"){ 106 | if(services[key].service[0].$.name.match(/https/ig)){ 107 | push_to_queue("https://" + host + ":" + services[key].$.portid, browser, io); 108 | }else if(services[key].service[0].$.name.match(/http/ig)){ 109 | push_to_queue("http://" + host + ":" + services[key].$.portid, browser, io); 110 | }else if(services[key].service[0].$.name.match(/ssl/ig)){ 111 | push_to_queue("https://" + host + ":" + services[key].$.portid, browser, io); 112 | } 113 | } 114 | }) 115 | }catch(err){ 116 | //host must not have any findings 117 | } 118 | } 119 | 120 | function push_to_queue(url, browser, io){ 121 | if(!allServices.includes(url)){ 122 | let stmt = db.prepare("INSERT INTO services VALUES (?,?,?,?,?,?,?,?,?,?,?,?)"); 123 | stmt.run([url, '', '', '', '', '', 0, 0, 0, '', 0, ''], function(err){ 124 | //must not be unique. Exit function early 125 | //console.log("duplicate entry:" + url) 126 | return 127 | }) 128 | //if there weren't any errors, then continue pushing to queue 129 | myQueue.push(url); 130 | allServices.push(url); 131 | } 132 | } 133 | 134 | //screen cap functions 135 | myQueue = [] 136 | allServices = [] 137 | 138 | var active_threads = 0 139 | const max_threads = 15 140 | scrape_emails = false 141 | 142 | async function getPic(browser, page, url, io) { 143 | try{ 144 | await page.setViewport({width: 1000, height: 500}) 145 | await page.goto(url); 146 | let file_name = url.replace(/[\.\/:\?\&=]+/g,"_") 147 | await wait(delay_setting); 148 | await page.screenshot({path: 'report/' + file_name + '.png'}); 149 | update_record(url,"image_path",'report/' + file_name + '.png') 150 | md5File('report/' + file_name + '.png', (err, hash) => { 151 | if (err) { 152 | console.log("problem getting image hash for:" + file_name) 153 | } 154 | update_record(url,"image_hash",hash) 155 | }) 156 | bodyHTML = await page.evaluate(() => document.documentElement.innerHTML); 157 | fs.writeFileSync('report/' + file_name + '.txt', bodyHTML); 158 | //try to automatically find auth prompts 159 | if(bodyHTML.match(/type=['"]password['"]/ig)){ 160 | update_record(url,"auth_prompt", 1) 161 | } 162 | if(scrape_emails){ 163 | try{ 164 | email_accounts = await page.evaluate('output="";emails = document.documentElement.innerHTML.match(/(([^<>()\\[\\]\\\\.,;:\\s@"]+(\\.[^<>()\\[\\]\\\\.,;:\\s@"]+)*)|(".+"))@((\\[[0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3}\\.[0-9]{1,3}])|(([a-zA-Z\\-0-9]+\\.)+[a-zA-Z]{2,}))/ig);for(i=0;i { 175 | if (err) { 176 | console.log("problem getting file stats for:" + file_name) 177 | } 178 | stats = fs.statSync('report/' + file_name + '.txt'); 179 | update_record(url,"text_size",stats.size,) 180 | //call display here to make sure we have logged the right stats first 181 | update_record(url,"text_hash",hash,function(){ 182 | display_service(url, io) 183 | }) 184 | }) 185 | update_record(url,"error", 0) //in case we re-try a host 186 | active_threads -= 1 187 | await page.close() 188 | process_queue(browser, io) 189 | }catch(err){ 190 | //console.log(err) 191 | console.log("queue:[" + myQueue.length.toString() + "/" + allServices.length.toString() + "]threads[" + active_threads.toString() + "] problem capturing page: " + url) 192 | io.emit("server_message","queue:[" + myQueue.length.toString() + "/" + allServices.length.toString() + "]threads[" + active_threads.toString() + "] problem capturing page: " + url) 193 | update_record(url,"error", 1) 194 | io.emit('show_error', url) 195 | await page.close() 196 | active_threads -= 1 197 | process_queue(browser, io) 198 | } 199 | } 200 | 201 | async function display_service(url, io){ 202 | db.get("SELECT * FROM services WHERE url = '" + url +"'", (err, row)=>{ 203 | io.emit('add_service', row) 204 | }); 205 | } 206 | 207 | async function update_record(url,key,value,callback){ 208 | let stmt = db.prepare("UPDATE services SET (" + key + ") = (?) WHERE url = '" + url + "'"); 209 | stmt.run(value, function(){ 210 | if((typeof callback) != 'undefined'){ 211 | stmt.finalize(callback()) 212 | } 213 | }) 214 | } 215 | 216 | //helper for timeouts 217 | async function wait (timeout) { 218 | return new Promise((resolve) => { 219 | setTimeout(() => { 220 | resolve() 221 | }, timeout) 222 | }) 223 | } 224 | 225 | async function process_queue(browser,io){ 226 | if((myQueue.length > 0) & (active_threads < max_threads)){ 227 | let target = myQueue.pop() 228 | if((target != 'undefined') & (target != '')){ 229 | page = await browser.newPage(); 230 | active_threads += 1 231 | getPic(browser, page, target, io); 232 | if(active_threads < max_threads){ 233 | process_queue(browser, io) 234 | } 235 | } 236 | }else if((active_threads == 0) & (allServices.length > 0)){ 237 | await browser.close(); 238 | scrape_emails = false 239 | io.emit("server_message", "Done!") 240 | } 241 | } 242 | 243 | //process nessus and start running through the queue 244 | async function process_file(request,io) { 245 | 246 | puppet_options = ["--ignore-certificate-errors"] 247 | 248 | scrape_emails = request.scrape_emails 249 | 250 | delay_setting = request.delay_setting 251 | 252 | if(request.use_proxy){ 253 | puppet_options.push("--proxy-server=" + request.proxy_setting) 254 | } 255 | 256 | if(request.use_no_sandbox){ 257 | puppet_options.push("--no-sandbox") 258 | } 259 | 260 | const browser = await puppeteer.launch( 261 | { 262 | headless: true, 263 | ignoreHTTPSErrors: true, 264 | args: puppet_options 265 | } 266 | ); 267 | 268 | if(request.file_path.match(/nessus$/ig)){ 269 | io.emit('server_message', "processing as Nessus file: "+ request.file_path); 270 | lineReader.eachLine(request.file_path, function(line) { 271 | if (line.match(//i)){ 329 | parseNmapHost(xml_buffer, browser, io) 330 | xml_buffer = '' 331 | add_to_buffer = false 332 | } 333 | } 334 | }); 335 | await wait(2000) 336 | process_queue(browser,io) 337 | }else{ 338 | io.emit('server_message', "Snapback currently only supports .nessus, flat .txt url files, .xml nmap files, and ScopeCreep exports. Try again..."); 339 | await browser.close(); 340 | } 341 | } 342 | 343 | //continue with newly generated queue 344 | async function resume_scan(request, io) { 345 | io.emit('server_message', "processing unfinished queue from DB" ); 346 | 347 | scrape_emails = request.scrape_emails 348 | 349 | delay_setting = request.delay_setting 350 | 351 | puppet_options = ["--ignore-certificate-errors"] 352 | 353 | if(request.use_proxy){ 354 | puppet_options.push("--proxy-server=" + request.proxy_setting) 355 | } 356 | 357 | if(request.use_no_sandbox){ 358 | puppet_options.push("--no-sandbox") 359 | } 360 | 361 | const browser = await puppeteer.launch( 362 | { 363 | headless: true, 364 | ignoreHTTPSErrors: true, 365 | args: puppet_options 366 | } 367 | ); 368 | 369 | process_queue(browser,io) 370 | } 371 | 372 | 373 | //this function auto-runs and is our Main() for the program 374 | (async function(){ 375 | 376 | //if we are giving it a file to parse in the commandline 377 | if((typeof process.argv[2]) != 'undefined'){ 378 | //if we specify a proxy in the commandline 379 | if((typeof process.argv[3]) != 'undefined'){ 380 | process_file({"file_path": process.argv[2], "use_proxy": true, "use_no_sandbox": false, "proxy_setting": process.argv[3]}, io) 381 | }else{ 382 | process_file({"file_path": process.argv[2], "use_proxy": false, "use_no_sandbox": false, "proxy_setting": ""}, io) 383 | } 384 | } 385 | 386 | app.get('/', function(req, res){ 387 | res.sendFile(__dirname + '/homepage.html'); 388 | }); 389 | 390 | app.get('/favicon.ico', function(req, res){ 391 | res.sendFile(__dirname + '/favicon.ico'); 392 | }); 393 | 394 | app.get('/report*', function(req, res){ 395 | res.sendFile(__dirname + req.path); 396 | }); 397 | 398 | app.get('/jquery.min.js', function(req, res){ 399 | res.sendFile(__dirname + '/node_modules/jquery/dist/jquery.min.js'); 400 | }); 401 | 402 | app.get('/all_services', function(req, res){ 403 | all_services = [] 404 | db.serialize(function() { 405 | var sql = "SELECT * FROM services WHERE captured = 1 OR error = 1" 406 | db.each(sql, function(err, row) { 407 | all_services.push(row) 408 | }, function(){ 409 | res.write(JSON.stringify(all_services)) 410 | res.end() 411 | }); 412 | }); 413 | }); 414 | 415 | app.get('/auth_prompts', function(req, res){ 416 | all_services = [] 417 | db.serialize(function() { 418 | var sql = "SELECT * FROM services WHERE auth_prompt = 1" 419 | db.each(sql, function(err, row) { 420 | all_services.push(row) 421 | }, function(){ 422 | res.write(JSON.stringify(all_services)) 423 | res.end() 424 | }); 425 | }); 426 | }); 427 | 428 | app.get('/unviewed_services', function(req, res){ 429 | all_services = [] 430 | db.serialize(function() { 431 | var sql = "SELECT * FROM services WHERE viewed = 0 AND error = 0" 432 | db.each(sql, function(err, row) { 433 | all_services.push(row) 434 | }, function(){ 435 | res.write(JSON.stringify(all_services)) 436 | res.end() 437 | }); 438 | }); 439 | }); 440 | 441 | app.get('/notes_services', function(req, res){ 442 | all_services = [] 443 | db.serialize(function() { 444 | var sql = "SELECT * FROM services WHERE notes != '' OR default_creds != ''" 445 | db.each(sql, function(err, row) { 446 | all_services.push(row) 447 | }, function(){ 448 | res.write(JSON.stringify(all_services)) 449 | res.end() 450 | }); 451 | }); 452 | }); 453 | 454 | io.on('connection', function(socket){ 455 | socket.on('process_file', function(request){ 456 | process_file(request,io); 457 | }); 458 | 459 | socket.on('update_record', function(request){ 460 | update_record(request.url,request.key,request.value) 461 | }); 462 | 463 | socket.on('pause_scan', function(){ 464 | myQueue = [] 465 | }); 466 | 467 | socket.on('resume_scan', function(request){ 468 | myQueue = [] 469 | db.serialize(function() { 470 | var sql = "SELECT * FROM services WHERE captured = 0 AND error = 0" 471 | db.each(sql, function(err, row) { 472 | myQueue.push(row.url) 473 | }, function(){ 474 | resume_scan(request, io) 475 | }); 476 | }); 477 | }); 478 | 479 | socket.on('scan_errors', function(request){ 480 | myQueue = [] 481 | db.serialize(function() { 482 | var sql = "SELECT * FROM services WHERE error = 1" 483 | db.each(sql, function(err, row) { 484 | myQueue.push(row.url) 485 | }, function(){ 486 | resume_scan(request, io) 487 | }); 488 | }); 489 | }); 490 | 491 | socket.on('csv_export', function(request){ 492 | 493 | let fileOutput = fs.createWriteStream('./' + request.csv_name); 494 | 495 | fileOutput.write(`"url","image_path","image_hash","text_path","text_hash","text_size","captured","error","viewed","default_creds","auth_prompt","notes"\n`) 496 | db.serialize(function() { 497 | var sql = "SELECT * FROM services" 498 | db.each(sql, function(err, row) { 499 | fileOutput.write(`"${row.url}","${row.image_path}","${row.image_hash}","${row.text_path}","${row.text_hash}","${row.text_size}","${row.captured}","${row.error}","${row.viewed}","${row.default_creds}","${row.auth_prompt}","${row.notes}"\n`) 500 | }, function(){ 501 | fileOutput.close(); 502 | }); 503 | }); 504 | }); 505 | 506 | socket.on('report_export', function(request){ 507 | let archive = archiver('zip'); 508 | 509 | let fileOutput = fs.createWriteStream('./' + request.zip_name); 510 | 511 | fileOutput.on('close', function () { 512 | console.log(archive.pointer() + ' total bytes'); 513 | console.log('Report Export saved to: ' + request.zip_name); 514 | }); 515 | 516 | archive.pipe(fileOutput); 517 | 518 | archive.on('error', function(err){ 519 | throw err; 520 | }); 521 | 522 | archive.glob('./report/snapback.db', false) 523 | 524 | db.serialize(function() { 525 | var sql = "SELECT * FROM services WHERE default_creds != '' OR auth_prompt = 1" 526 | db.each(sql, function(err, row) { 527 | archive.glob('./' + row.image_path, false) 528 | }, function(){ 529 | archive.finalize(); 530 | }); 531 | }); 532 | }); 533 | 534 | }); 535 | 536 | http.listen(2997, "0.0.0.0", function(){ 537 | console.log('listening on *:2997'); 538 | }); 539 | 540 | //catch any server exceptions instead of exiting 541 | http.on('error', function (e) { 542 | console.log("[-]" + " " + e + "\n"); 543 | }); 544 | 545 | //catch any node exceptions instead of exiting 546 | process.on('uncaughtException', function (err) { 547 | console.log("[-]" + " " + 'Caught exception: ' + err + "\n"); 548 | }); 549 | })() 550 | --------------------------------------------------------------------------------