├── server ├── certificates │ └── README.md ├── .dockerignore ├── requirements.txt ├── controllers │ ├── TopSitesController.js │ ├── FaviconsController.js │ ├── db_operations.js │ ├── LoginDataController.js │ ├── VolumesController.js │ ├── WebDataController.js │ ├── SuspectProfile.js │ ├── HistoryController.js │ └── CacheController.js ├── .gitignore ├── routers │ ├── topsites.js │ ├── cache.js │ ├── favicons.js │ ├── logindata.js │ ├── evidence.js │ ├── webdata.js │ ├── volumes.js │ ├── user.js │ ├── bookmarks.js │ ├── profile.js │ └── history.js ├── db │ └── mongoose.js ├── models │ ├── evidence.js │ └── user.js ├── middleware │ └── auth.js ├── package.json ├── server.js ├── app.js ├── Dockerfile └── utils │ └── predictor │ └── url-class.py ├── .gitignore ├── teardown.sh ├── data └── README.md ├── client ├── public │ ├── robots.txt │ ├── favicon.ico │ ├── logo192.png │ ├── logo512.png │ ├── manifest.json │ └── index.html ├── src │ ├── layout │ │ ├── CenteredWrapper │ │ │ ├── forest.jpeg │ │ │ ├── CenteredWrapper.js │ │ │ └── CenteredWrapper.css │ │ ├── ContentWrapper │ │ │ └── ContentWrapper.js │ │ ├── TopBar │ │ │ ├── TopBar.css │ │ │ └── TopBar.js │ │ └── Sidemenu │ │ │ ├── Sidemenu.css │ │ │ └── Sidemenu.js │ ├── views │ │ ├── favicons │ │ │ ├── FaviconsContainer.css │ │ │ ├── components │ │ │ │ └── FaviconModal.js │ │ │ └── FaviconsContainer.js │ │ ├── dashboard │ │ │ ├── components │ │ │ │ ├── Profile │ │ │ │ │ ├── Profile.css │ │ │ │ │ └── Profile.js │ │ │ │ ├── BrowsingActivity │ │ │ │ │ ├── react-calendar-heatmap.css │ │ │ │ │ └── BrowsingActivity.js │ │ │ │ ├── TopSites │ │ │ │ │ └── TopSites.js │ │ │ │ ├── SystemSpecs │ │ │ │ │ └── SystemSpecs.js │ │ │ │ ├── UserActivity │ │ │ │ │ └── UserActivity.js │ │ │ │ ├── RadarWidget │ │ │ │ │ └── RadarWidget.js │ │ │ │ └── LoginPie │ │ │ │ │ └── LoginPie.js │ │ │ └── DashboardContainer.js │ │ ├── history │ │ │ ├── components │ │ │ │ ├── AvgVisitChart │ │ │ │ │ └── AvgVisitChart.js │ │ │ │ └── HistoryTable │ │ │ │ │ └── HistoryTable.js │ │ │ └── HistoryContainer.js │ │ ├── webdata │ │ │ ├── components │ │ │ │ ├── GoogleMaps.js │ │ │ │ └── WebDataTable.js │ │ │ └── WebDataContainer.js │ │ ├── downloads │ │ │ ├── components │ │ │ │ └── DownloadsTable.js │ │ │ └── DownloadsContainer.js │ │ ├── volumes │ │ │ └── VolumesContainer.js │ │ ├── bookmarks │ │ │ └── BookmarksContainer.js │ │ ├── database │ │ │ └── DatabaseContainer.js │ │ ├── logindata │ │ │ └── LoginDataContainer.js │ │ └── cache │ │ │ └── CacheContainer.js │ ├── setupTests.js │ ├── App.test.js │ ├── axios-api.js │ ├── index.css │ ├── common │ │ ├── Logo │ │ │ └── Logo.js │ │ └── SaveEvidenceModal │ │ │ └── SaveEvidenceModal.js │ ├── volumeMenu │ │ ├── VolumeMenu.css │ │ ├── Volume.js │ │ └── VolumeMenu.js │ ├── login │ │ ├── Login.css │ │ └── Login.js │ ├── store │ │ ├── reducers │ │ │ ├── auth.js │ │ │ └── appData.js │ │ └── actions │ │ │ ├── auth.js │ │ │ └── appData.js │ ├── index.js │ ├── App.js │ └── serviceWorker.js ├── certificates │ └── README.md ├── Dockerfile ├── .gitignore ├── server.js ├── package.json └── README.md ├── .gitattributes ├── startup.sh ├── install.sh ├── docker-compose.yml ├── LICENSE ├── download-model.sh └── README.md /server/certificates/README.md: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | mongo-volume/ 2 | .idea/ 3 | data/ -------------------------------------------------------------------------------- /server/.dockerignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | npm-debug.log -------------------------------------------------------------------------------- /teardown.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | docker-compose down 4 | docker system prune -------------------------------------------------------------------------------- /data/README.md: -------------------------------------------------------------------------------- 1 | Placeholder for content of Google Chrome /Default directory. 2 | -------------------------------------------------------------------------------- /client/public/robots.txt: -------------------------------------------------------------------------------- 1 | # https://www.robotstxt.org/robotstxt.html 2 | User-agent: * 3 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | server/utils/predictor/finalized_model.sav filter=lfs diff=lfs merge=lfs -text 2 | -------------------------------------------------------------------------------- /client/public/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ChmaraX/forensix/HEAD/client/public/favicon.ico -------------------------------------------------------------------------------- /client/public/logo192.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ChmaraX/forensix/HEAD/client/public/logo192.png -------------------------------------------------------------------------------- /client/public/logo512.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ChmaraX/forensix/HEAD/client/public/logo512.png -------------------------------------------------------------------------------- /startup.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | docker-compose up -d 4 | 5 | echo "All containers are up and running." -------------------------------------------------------------------------------- /client/src/layout/CenteredWrapper/forest.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ChmaraX/forensix/HEAD/client/src/layout/CenteredWrapper/forest.jpeg -------------------------------------------------------------------------------- /client/src/views/favicons/FaviconsContainer.css: -------------------------------------------------------------------------------- 1 | .favicon-tab:hover { 2 | cursor: pointer; 3 | background-color: rgba(215, 237, 255, 0.449); 4 | } 5 | -------------------------------------------------------------------------------- /client/certificates/README.md: -------------------------------------------------------------------------------- 1 | ## Certificates 2 | 3 | #### Generate server certificate and key: 4 | 5 | ```bash 6 | openssl req -nodes -new -x509 -keyout server.key -out server.cert 7 | ``` 8 | -------------------------------------------------------------------------------- /install.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | if [ "$1" == "-b" ] 3 | then 4 | docker-compose build --no-cache 5 | else 6 | docker-compose pull 7 | fi 8 | 9 | echo "All images have been successfully built." 10 | -------------------------------------------------------------------------------- /client/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:16.14-bullseye-slim 2 | 3 | # Create app directory 4 | WORKDIR /app 5 | 6 | # Install app dependencies 7 | COPY . . 8 | 9 | RUN npm install -qy 10 | RUN npm run build 11 | 12 | EXPOSE 3000 13 | CMD [ "sh", "-c", "node server.js"] -------------------------------------------------------------------------------- /client/src/layout/CenteredWrapper/CenteredWrapper.js: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | import "./CenteredWrapper.css"; 3 | 4 | function CenteredWrapper(props) { 5 | return
{props.children}
; 6 | } 7 | 8 | export default CenteredWrapper; 9 | -------------------------------------------------------------------------------- /server/requirements.txt: -------------------------------------------------------------------------------- 1 | # ForensiX Python Dependencies 2 | # Run: pip install -r requirements.txt 3 | 4 | # Core ML dependencies - optimized for Docker build speed and compatibility 5 | scikit-learn==1.2.2 6 | numpy==1.24.3 7 | scipy==1.10.1 8 | pandas==2.0.3 9 | joblib==1.2.0 10 | -------------------------------------------------------------------------------- /client/src/setupTests.js: -------------------------------------------------------------------------------- 1 | // jest-dom adds custom jest matchers for asserting on DOM nodes. 2 | // allows you to do things like: 3 | // expect(element).toHaveTextContent(/react/i) 4 | // learn more: https://github.com/testing-library/jest-dom 5 | import '@testing-library/jest-dom/extend-expect'; 6 | -------------------------------------------------------------------------------- /client/src/layout/CenteredWrapper/CenteredWrapper.css: -------------------------------------------------------------------------------- 1 | .centered-wrapper { 2 | position: relative; 3 | background-image: url("./forest.jpeg"); 4 | background-size: cover; 5 | min-height: 100vh; 6 | display: flex; 7 | flex-direction: column; 8 | align-items: center; 9 | justify-content: center; 10 | } 11 | -------------------------------------------------------------------------------- /client/src/App.test.js: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import { render } from '@testing-library/react'; 3 | import App from './App'; 4 | 5 | test('renders learn react link', () => { 6 | const { getByText } = render(); 7 | const linkElement = getByText(/learn react/i); 8 | expect(linkElement).toBeInTheDocument(); 9 | }); 10 | -------------------------------------------------------------------------------- /client/src/axios-api.js: -------------------------------------------------------------------------------- 1 | import axios from "axios"; 2 | 3 | // change protocol to https in case of https 4 | var baseURL = `http://${window.location.hostname}:3001`; 5 | 6 | if (process.env.REACT_APP_DEV) { 7 | baseURL = "http://localhost:3001"; 8 | } 9 | 10 | const instance = axios.create({ 11 | baseURL: baseURL 12 | }); 13 | 14 | export default instance; 15 | -------------------------------------------------------------------------------- /client/src/views/dashboard/components/Profile/Profile.css: -------------------------------------------------------------------------------- 1 | .profile { 2 | border-left: 10px solid #306bac !important; 3 | height: inherit; 4 | } 5 | 6 | .profile .content { 7 | padding: 15px 15px 15px 5px; 8 | } 9 | 10 | .profile .content .left { 11 | float: left; 12 | } 13 | 14 | .profile .content .right { 15 | float: right; 16 | margin-left: 10px; 17 | } 18 | -------------------------------------------------------------------------------- /server/controllers/TopSitesController.js: -------------------------------------------------------------------------------- 1 | const getDbTable = require("../controllers/db_operations"); 2 | 3 | const getTopSites = async () => { 4 | const top_sites = await getDbTable({ 5 | db_name: "Top Sites", 6 | table: "top_sites", 7 | orderBy: "url_rank ASC" 8 | }); 9 | 10 | return top_sites.results; 11 | }; 12 | 13 | module.exports = getTopSites; 14 | -------------------------------------------------------------------------------- /client/.gitignore: -------------------------------------------------------------------------------- 1 | # See https://help.github.com/articles/ignoring-files/ for more about ignoring files. 2 | 3 | # dependencies 4 | /node_modules 5 | /.pnp 6 | .pnp.js 7 | 8 | # testing 9 | /coverage 10 | 11 | # production 12 | /build 13 | 14 | # misc 15 | .DS_Store 16 | .env.local 17 | .env.development.local 18 | .env.test.local 19 | .env.production.local 20 | 21 | npm-debug.log* 22 | yarn-debug.log* 23 | yarn-error.log* 24 | -------------------------------------------------------------------------------- /server/.gitignore: -------------------------------------------------------------------------------- 1 | # See https://help.github.com/articles/ignoring-files/ for more about ignoring files. 2 | 3 | # dependencies 4 | /node_modules 5 | /.pnp 6 | .pnp.js 7 | 8 | # testing 9 | /coverage 10 | 11 | # production 12 | /build 13 | 14 | # misc 15 | .DS_Store 16 | .env.local 17 | .env.development.local 18 | .env.test.local 19 | .env.production.local 20 | 21 | npm-debug.log* 22 | yarn-debug.log* 23 | yarn-error.log* 24 | -------------------------------------------------------------------------------- /client/src/index.css: -------------------------------------------------------------------------------- 1 | body { 2 | margin: 0; 3 | font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", "Roboto", "Oxygen", 4 | "Ubuntu", "Cantarell", "Fira Sans", "Droid Sans", "Helvetica Neue", 5 | sans-serif; 6 | -webkit-font-smoothing: antialiased; 7 | -moz-osx-font-smoothing: grayscale; 8 | } 9 | 10 | code { 11 | font-family: source-code-pro, Menlo, Monaco, Consolas, "Courier New", 12 | monospace; 13 | } 14 | -------------------------------------------------------------------------------- /client/src/layout/ContentWrapper/ContentWrapper.js: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | import Sidemenu from "../Sidemenu/Sidemenu"; 3 | import TopBar from "../TopBar/TopBar"; 4 | 5 | function ContentWrapper(props) { 6 | return ( 7 | 8 | 9 | 10 |
{props.children}
11 |
12 | ); 13 | } 14 | 15 | export default ContentWrapper; 16 | -------------------------------------------------------------------------------- /client/src/layout/TopBar/TopBar.css: -------------------------------------------------------------------------------- 1 | .top-bar { 2 | height: 50px; 3 | margin-left: 300px; 4 | padding: 5px 20px 5px 20px; 5 | background-color: #306bac; 6 | display: flex; 7 | justify-content: space-between; 8 | color: white; 9 | } 10 | 11 | .top-bar .avatar { 12 | display: flex; 13 | align-items: center; 14 | cursor: pointer; 15 | } 16 | 17 | .top-bar .avatar .border { 18 | margin-left: 20px; 19 | border: 3px solid #6f9ceb; 20 | } 21 | -------------------------------------------------------------------------------- /server/routers/topsites.js: -------------------------------------------------------------------------------- 1 | const express = require("express"); 2 | const router = new express.Router(); 3 | const chalk = require("chalk"); 4 | const getTopSites = require("../controllers/TopSitesController"); 5 | 6 | router.get("/topsites", async (req, res) => { 7 | try { 8 | process.stdout.write(`[GET] ${req.path} ... `); 9 | const data = await getTopSites(); 10 | 11 | res.send(data); 12 | console.log(chalk.green(" [ OK ]")); 13 | } catch (e) { 14 | res.status(500).send(); 15 | } 16 | }); 17 | 18 | module.exports = router; 19 | -------------------------------------------------------------------------------- /client/src/common/Logo/Logo.js: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | 3 | const size = props => { 4 | switch (props.size) { 5 | case "small": 6 | return "20px"; 7 | case "medium": 8 | return "40px"; 9 | case "large": 10 | return "60px"; 11 | default: 12 | return "40px"; 13 | } 14 | }; 15 | 16 | function Logo(props) { 17 | return ( 18 |

19 | ForensiX 20 |

21 | ); 22 | } 23 | 24 | export default Logo; 25 | -------------------------------------------------------------------------------- /server/db/mongoose.js: -------------------------------------------------------------------------------- 1 | const mongoose = require("mongoose"); 2 | 3 | process.env.MONGO_HOST = process.env.DEV ? "localhost" : "mongodb"; 4 | 5 | mongoose.set('strictQuery', false); 6 | 7 | mongoose.connect(`mongodb://${process.env.MONGO_HOST}:27017/forensix-mongodb`, { 8 | useNewUrlParser: true, 9 | useUnifiedTopology: true 10 | }); 11 | 12 | mongoose.connection.on("open", () => { 13 | console.log("Successfully connected to mongodb"); 14 | }); 15 | 16 | mongoose.connection.on("error", (err) => { 17 | console.log("Error connecting to mongodb:", err); 18 | }); 19 | -------------------------------------------------------------------------------- /server/models/evidence.js: -------------------------------------------------------------------------------- 1 | const mongoose = require("mongoose"); 2 | 3 | const schema = new mongoose.Schema( 4 | { 5 | data: { type: Object, required: true }, 6 | description: { type: String, required: true, trim: true }, 7 | fullname: { type: String, required: true }, 8 | reporter: { 9 | type: mongoose.Schema.Types.ObjectId, 10 | required: true, 11 | ref: "User" 12 | } 13 | }, 14 | { 15 | timestamps: true 16 | } 17 | ); 18 | 19 | const Evidence = mongoose.model("Evidence", schema); 20 | 21 | module.exports = Evidence; 22 | -------------------------------------------------------------------------------- /client/src/volumeMenu/VolumeMenu.css: -------------------------------------------------------------------------------- 1 | .volumes-wrapper { 2 | height: 600px; 3 | width: 500px; 4 | padding: 30px; 5 | text-align: center; 6 | overflow: scroll; 7 | outline: 20px solid; 8 | outline-color: rgba(0, 0, 0, 0.3); 9 | } 10 | 11 | .volume-box { 12 | height: 150px; 13 | width: 100% !important; 14 | margin-bottom: 10px !important; 15 | margin-top: 20px !important; 16 | text-align: left; 17 | transition: all 0.5s ease-in-out; 18 | border-left: 20px solid #306bac !important; 19 | cursor: pointer; 20 | } 21 | 22 | .volume-box:hover { 23 | transform: scale(1.02); 24 | } 25 | -------------------------------------------------------------------------------- /client/public/manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | "short_name": "React App", 3 | "name": "Create React App Sample", 4 | "icons": [ 5 | { 6 | "src": "favicon.ico", 7 | "sizes": "64x64 32x32 24x24 16x16", 8 | "type": "image/x-icon" 9 | }, 10 | { 11 | "src": "logo192.png", 12 | "type": "image/png", 13 | "sizes": "192x192" 14 | }, 15 | { 16 | "src": "logo512.png", 17 | "type": "image/png", 18 | "sizes": "512x512" 19 | } 20 | ], 21 | "start_url": ".", 22 | "display": "standalone", 23 | "theme_color": "#000000", 24 | "background_color": "#ffffff" 25 | } 26 | -------------------------------------------------------------------------------- /server/routers/cache.js: -------------------------------------------------------------------------------- 1 | const express = require("express"); 2 | const router = new express.Router(); 3 | const { getCacheEntries } = require("../controllers/CacheController"); 4 | const chalk = require("chalk"); 5 | 6 | router.get("/cache", async (req, res) => { 7 | try { 8 | process.stdout.write(`[SCAN] cache ... `); 9 | const {parsedBlocks, totalCount} = getCacheEntries(req.query.count); 10 | 11 | res.send({parsedBlocks, totalCount}); 12 | console.log(chalk.green(" [ OK ]")); 13 | } catch (e) { 14 | console.log(e); 15 | res.status(500).send(); 16 | } 17 | }); 18 | 19 | module.exports = router; 20 | -------------------------------------------------------------------------------- /server/controllers/FaviconsController.js: -------------------------------------------------------------------------------- 1 | const getDbTable = require("./db_operations"); 2 | const _ = require("lodash"); 3 | const parse = require("url-parse"); 4 | 5 | const getFavicons = async () => { 6 | const favicons = await getDbTable({ 7 | db_name: "Favicons", 8 | table: "favicons" 9 | }); 10 | 11 | return favicons.results; 12 | }; 13 | 14 | const getFavicon = async id => { 15 | const favicon = await getDbTable({ 16 | db_name: "Favicons", 17 | table: "icon_mapping", 18 | where: `icon_id = ${id}` 19 | }); 20 | 21 | return favicon.results; 22 | }; 23 | 24 | module.exports = { getFavicons, getFavicon }; 25 | -------------------------------------------------------------------------------- /server/middleware/auth.js: -------------------------------------------------------------------------------- 1 | const jwt = require("jsonwebtoken"); 2 | const User = require("../models/user"); 3 | 4 | const auth = async (req, res, next) => { 5 | try { 6 | const token = req.header("Authorization").replace("Bearer ", ""); 7 | const decoded = jwt.verify(token, "secrettoken123"); 8 | 9 | const user = await User.findOne({ 10 | _id: decoded._id, 11 | token: token 12 | }); 13 | 14 | if (!user) { 15 | throw new Error(); 16 | } 17 | 18 | req.token = token; 19 | req.user = user; 20 | next(); 21 | } catch (e) { 22 | res.status(401).send({ error: "Please authenticate." }); 23 | } 24 | }; 25 | 26 | module.exports = auth; 27 | -------------------------------------------------------------------------------- /client/src/login/Login.css: -------------------------------------------------------------------------------- 1 | .black-base { 2 | height: 400px; 3 | width: 1000px; 4 | opacity: 0.9; 5 | outline: 1px solid white; 6 | outline-offset: 15px; 7 | } 8 | 9 | .form-panel { 10 | position: absolute !important; 11 | height: 500px; 12 | width: 500px; 13 | margin-left: 220px !important; 14 | padding: 70px 50px 70px 50px !important; 15 | } 16 | 17 | .left-box { 18 | max-width: 440px; 19 | height: 100%; 20 | padding: 70px; 21 | text-align: center; 22 | border-left: 1px solid #306bac; 23 | border-bottom: 1px solid #306bac; 24 | } 25 | 26 | .login-button { 27 | color: white !important; 28 | background-image: linear-gradient(to right, #306bac, #98b9f2) !important; 29 | } 30 | -------------------------------------------------------------------------------- /client/src/layout/Sidemenu/Sidemenu.css: -------------------------------------------------------------------------------- 1 | .sidemenu { 2 | position: fixed; 3 | top: 0; 4 | left: 0; 5 | width: 300px; 6 | height: 100%; 7 | background-color: #141b41; 8 | color: white; 9 | z-index: 100; 10 | display: block; 11 | padding-top: 30px; 12 | padding-bottom: 30px; 13 | } 14 | 15 | .sidemenu .logo { 16 | text-align: center; 17 | } 18 | 19 | .sidemenu .nav { 20 | color: white; 21 | margin-top: 50px; 22 | font-size: 1.1rem; 23 | } 24 | 25 | .sidemenu .nav .item { 26 | padding: 7px 7px 7px 30px; 27 | } 28 | 29 | .sidemenu .nav .item:hover { 30 | padding-left: 20px; 31 | background-color: rgba(255, 255, 255, 0.05); 32 | cursor: pointer; 33 | } 34 | 35 | .sidemenu .nav .item.selected { 36 | padding-left: 20px; 37 | border-left: 10px solid #6f9ceb; 38 | background-color: rgba(255, 255, 255, 0.05); 39 | cursor: pointer; 40 | } 41 | -------------------------------------------------------------------------------- /server/routers/favicons.js: -------------------------------------------------------------------------------- 1 | const express = require("express"); 2 | const router = new express.Router(); 3 | const chalk = require("chalk"); 4 | const { 5 | getFavicons, 6 | getFavicon 7 | } = require("../controllers/FaviconsController"); 8 | 9 | router.get("/favicons", async (req, res) => { 10 | try { 11 | process.stdout.write(`[GET] ${req.path} ... `); 12 | const data = await getFavicons(); 13 | 14 | res.send(data); 15 | console.log(chalk.green(" [ OK ]")); 16 | } catch (e) { 17 | res.status(500).send(); 18 | } 19 | }); 20 | 21 | router.get("/favicons/:id", async (req, res) => { 22 | try { 23 | process.stdout.write(`[GET] ${req.path} ... `); 24 | const data = await getFavicon(req.params.id); 25 | 26 | res.send(data); 27 | console.log(chalk.green(" [ OK ]")); 28 | } catch (e) { 29 | res.status(500).send(); 30 | } 31 | }); 32 | 33 | module.exports = router; 34 | -------------------------------------------------------------------------------- /server/routers/logindata.js: -------------------------------------------------------------------------------- 1 | const express = require("express"); 2 | const router = new express.Router(); 3 | const { 4 | getLoginCredentials, 5 | getLoginData 6 | } = require("../controllers/LoginDataController"); 7 | 8 | const chalk = require("chalk"); 9 | 10 | router.get("/logindata", async (req, res) => { 11 | try { 12 | process.stdout.write(`\n[GET] ${req.path} ... `); 13 | const data = await getLoginData(); 14 | 15 | res.send(data); 16 | console.log(chalk.green(" [ OK ]")); 17 | } catch (e) { 18 | res.status(500).send(); 19 | } 20 | }); 21 | 22 | router.get("/logindata/credentials", async (req, res) => { 23 | try { 24 | process.stdout.write(`[GET] ${req.path} ... `); 25 | const data = await getLoginCredentials(); 26 | 27 | res.send(data); 28 | console.log(chalk.green(" [ OK ]")); 29 | } catch (e) { 30 | res.status(500).send(); 31 | } 32 | }); 33 | 34 | module.exports = router; 35 | -------------------------------------------------------------------------------- /client/src/store/reducers/auth.js: -------------------------------------------------------------------------------- 1 | import { 2 | AUTH_FAIL, 3 | AUTH_LOGOUT, 4 | AUTH_START, 5 | AUTH_SUCCESS 6 | } from "../actions/auth"; 7 | 8 | const initialState = { 9 | error: null, 10 | loading: false, 11 | token: null, 12 | username: null 13 | }; 14 | 15 | const reducer = (state = initialState, action) => { 16 | switch (action.type) { 17 | case AUTH_START: { 18 | return { ...state, error: null, loading: true }; 19 | } 20 | case AUTH_FAIL: { 21 | const { error } = action; 22 | return { ...state, error, loading: false }; 23 | } 24 | case AUTH_SUCCESS: { 25 | const { username, token } = action; 26 | return { ...state, username, token, error: null, loading: false }; 27 | } 28 | case AUTH_LOGOUT: { 29 | return { ...state, error: null, token: null, username: null }; 30 | } 31 | default: 32 | break; 33 | } 34 | return state; 35 | }; 36 | 37 | export default reducer; 38 | -------------------------------------------------------------------------------- /server/routers/evidence.js: -------------------------------------------------------------------------------- 1 | const express = require("express"); 2 | const router = new express.Router(); 3 | const Evidence = require("../models/evidence"); 4 | const auth = require("../middleware/auth"); 5 | const chalk = require("chalk"); 6 | 7 | router.post("/evidences", auth, async (req, res) => { 8 | process.stdout.write(`[EVIDENCE] adding ... `); 9 | const evidence = new Evidence({ 10 | ...req.body, 11 | reporter: req.user._id, 12 | fullname: req.user.firstName + " " + req.user.lastName 13 | }); 14 | 15 | try { 16 | await evidence.save(); 17 | res.status(201).send(evidence); 18 | } catch (e) { 19 | res.status(400).send(e); 20 | } 21 | console.log(chalk.green(" [ OK ]")); 22 | }); 23 | 24 | router.get("/evidences", auth, async (req, res) => { 25 | try { 26 | const evidences = await Evidence.find().select(); 27 | res.status(201).send(evidences); 28 | } catch (e) { 29 | res.status(400).send(e); 30 | } 31 | }); 32 | 33 | module.exports = router; 34 | -------------------------------------------------------------------------------- /client/src/views/dashboard/components/BrowsingActivity/react-calendar-heatmap.css: -------------------------------------------------------------------------------- 1 | .react-calendar-heatmap text { 2 | font-size: 10px; 3 | fill: #aaa; 4 | } 5 | 6 | .react-calendar-heatmap { 7 | margin-top: 30px; 8 | } 9 | 10 | .react-calendar-heatmap .react-calendar-heatmap-small-text { 11 | font-size: 5px; 12 | } 13 | 14 | .react-calendar-heatmap rect:hover { 15 | stroke: #555; 16 | stroke-width: 1px; 17 | } 18 | 19 | /* 20 | * Default color scale 21 | */ 22 | 23 | .react-calendar-heatmap .color-empty { 24 | fill: #eeeeee; 25 | } 26 | 27 | .react-calendar-heatmap .color-filled { 28 | fill: #8cc665; 29 | } 30 | 31 | /* 32 | * Gitlab color scale 33 | */ 34 | 35 | .react-calendar-heatmap .color-gitlab-4 { 36 | fill: #b9daf1; 37 | } 38 | .react-calendar-heatmap .color-gitlab-3 { 39 | fill: #acd5f2; 40 | } 41 | .react-calendar-heatmap .color-gitlab-2 { 42 | fill: #7fa8d1; 43 | } 44 | .react-calendar-heatmap .color-gitlab-1 { 45 | fill: #49729b; 46 | } 47 | .react-calendar-heatmap .color-gitlab-0 { 48 | fill: #254e77; 49 | } 50 | -------------------------------------------------------------------------------- /client/src/volumeMenu/Volume.js: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | import { Card } from "semantic-ui-react"; 3 | import { Link } from "react-router-dom"; 4 | 5 | function Volume(props) { 6 | return ( 7 | 8 | 14 | type: {props.volumeInfo.file_system}
15 | mounted on: {props.volumeInfo.mount_point}
16 | mount options: {props.volumeInfo.mount_opts}
17 | 22 | integrity: {props.integrity.status} 23 | 24 |
25 |

26 | } 27 | /> 28 | 29 | ); 30 | } 31 | 32 | export default Volume; 33 | -------------------------------------------------------------------------------- /client/src/views/history/components/AvgVisitChart/AvgVisitChart.js: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | import { 3 | BarChart, 4 | Bar, 5 | XAxis, 6 | YAxis, 7 | CartesianGrid, 8 | Tooltip, 9 | ResponsiveContainer 10 | } from "recharts"; 11 | import { Header, Segment } from "semantic-ui-react"; 12 | import _ from "lodash"; 13 | 14 | function AvgVisitChart(props) { 15 | return ( 16 | 17 |
Avg. visit time duration for 5 most common sites
18 | 19 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 |
34 | ); 35 | } 36 | 37 | export default AvgVisitChart; 38 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: "3.7" 2 | 3 | services: 4 | server: 5 | build: 6 | context: ./server 7 | dockerfile: Dockerfile 8 | container_name: forensix-server 9 | healthcheck: 10 | test: ["CMD", "test", "-f", "/app/utils/predictor/finalized_model.sav"] 11 | interval: 30s 12 | timeout: 10s 13 | retries: 3 14 | volumes: 15 | - type: bind 16 | source: ./data 17 | target: /app/data 18 | read_only: true 19 | ports: 20 | - "3001:3001" 21 | links: 22 | - mongodb 23 | depends_on: 24 | - mongodb 25 | client: 26 | build: 27 | context: ./client 28 | dockerfile: Dockerfile 29 | container_name: forensix-client 30 | ports: 31 | - "3000:3000" 32 | links: 33 | - server 34 | depends_on: 35 | - server 36 | mongodb: 37 | image: mongo:latest 38 | container_name: "forensix-mongodb" 39 | volumes: 40 | - ./mongo-volume:/data/db 41 | environment: 42 | - MONGO_DATA_DIR=/data/db 43 | - MONGO_LOG_DIR=/dev/null 44 | ports: 45 | - "27017:27017" 46 | -------------------------------------------------------------------------------- /server/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "node-prototype", 3 | "version": "1.0.0", 4 | "description": "Prototype for BP01", 5 | "main": "server.js", 6 | "scripts": { 7 | "test": "echo \"Error: no test specified\" && exit 1", 8 | "dev": "DEV=true nodemon --ignore 'sha1_data.json' --ignore 'data/' server.js", 9 | "start": "nodemon --ignore 'sha1_data.json' --ignore 'data/' server.js" 10 | }, 11 | "author": "", 12 | "license": "ISC", 13 | "dependencies": { 14 | "any-base": "^1.1.0", 15 | "axios": "^1.7.7", 16 | "bcryptjs": "^2.4.3", 17 | "bluebird": "^3.7.2", 18 | "chalk": "^4.1.2", 19 | "cors": "^2.8.5", 20 | "directory-tree": "^3.5.1", 21 | "express": "^4.21.1", 22 | "folder-hash": "^4.0.4", 23 | "hexdump-nodejs": "^0.1.0", 24 | "jsonwebtoken": "^9.0.2", 25 | "libphonenumber-js": "^1.11.14", 26 | "lodash": "^4.17.21", 27 | "moment": "^2.30.1", 28 | "mongoose": "^6.12.8", 29 | "better-sqlite3": "^12.2.0", 30 | "tld-countries": "^1.0.1", 31 | "url-parse": "^1.5.10" 32 | }, 33 | "devDependencies": { 34 | "nodemon": "^3.1.7" 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 Adam Chmara 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /client/src/views/favicons/components/FaviconModal.js: -------------------------------------------------------------------------------- 1 | import React, { useState, useEffect } from "react"; 2 | import { Image, Modal, Header, List } from "semantic-ui-react"; 3 | import axios from "../../../axios-api"; 4 | 5 | function FaviconModal(props) { 6 | const [metadata, setMetadata] = useState(); 7 | 8 | useEffect(() => { 9 | if (props.data.id) { 10 | axios.get(`/favicons/${props.data.id}`).then(res => { 11 | setMetadata(res.data); 12 | }); 13 | } 14 | }, [props.data.id]); 15 | 16 | return ( 17 | 18 | {props.data.url} 19 | 20 | 21 | 22 |
Favicon used on:
23 | 24 | {metadata?.map(data => { 25 | return {data.page_url}; 26 | })} 27 | 28 |
29 |
30 |
31 | ); 32 | } 33 | 34 | export default FaviconModal; 35 | -------------------------------------------------------------------------------- /client/src/index.js: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | import ReactDOM from "react-dom"; 3 | import "./index.css"; 4 | import App from "./App"; 5 | import * as serviceWorker from "./serviceWorker"; 6 | import { Provider } from "react-redux"; 7 | import thunk from "redux-thunk"; 8 | import { combineReducers, createStore, applyMiddleware, compose } from "redux"; 9 | import authReducer from "./store/reducers/auth"; 10 | import appDataReducer from "./store/reducers/appData"; 11 | 12 | const rootReducer = combineReducers({ 13 | authReducer, 14 | appDataReducer 15 | }); 16 | 17 | const composeEnhancers = window.__REDUX_DEVTOOLS_EXTENSION_COMPOSE__ || compose; 18 | 19 | const store = createStore( 20 | rootReducer, 21 | composeEnhancers(applyMiddleware(thunk)) 22 | ); 23 | 24 | ReactDOM.render( 25 | 26 | 27 | , 28 | document.getElementById("root") 29 | ); 30 | 31 | // If you want your app to work offline and load faster, you can change 32 | // unregister() to register() below. Note this comes with some pitfalls. 33 | // Learn more about service workers: https://bit.ly/CRA-PWA 34 | serviceWorker.unregister(); 35 | -------------------------------------------------------------------------------- /server/routers/webdata.js: -------------------------------------------------------------------------------- 1 | const express = require("express"); 2 | const router = new express.Router(); 3 | const chalk = require("chalk"); 4 | const { 5 | getAutofill, 6 | findPhoneNumbers, 7 | findGeolocationData 8 | } = require("../controllers/WebDataController.js"); 9 | 10 | router.get("/webdata/autofills", async (req, res) => { 11 | try { 12 | process.stdout.write(`[GET] ${req.path} ... `); 13 | const data = await getAutofill(); 14 | 15 | res.send(data); 16 | console.log(chalk.green(" [ OK ]")); 17 | } catch (e) { 18 | res.status(500).send(); 19 | } 20 | }); 21 | 22 | router.get("/webdata/phonenums", async (req, res) => { 23 | try { 24 | process.stdout.write(`[GET] ${req.path} ... `); 25 | const data = findPhoneNumbers(await getAutofill()); 26 | 27 | res.send(data); 28 | console.log(chalk.green(" [ OK ]")); 29 | } catch (e) { 30 | res.status(500).send(); 31 | } 32 | }); 33 | 34 | router.get("/webdata/geo", async (req, res) => { 35 | try { 36 | process.stdout.write(`[GET] ${req.path} ... `); 37 | const data = findGeolocationData(await getAutofill()); 38 | 39 | res.send(data); 40 | console.log(chalk.green(" [ OK ]")); 41 | } catch (e) { 42 | res.status(500).send(); 43 | } 44 | }); 45 | 46 | module.exports = router; 47 | -------------------------------------------------------------------------------- /client/server.js: -------------------------------------------------------------------------------- 1 | const express = require("express"); 2 | const https = require("https"); 3 | const http = require("http"); 4 | const path = require("path"); 5 | const fs = require("fs"); 6 | 7 | const app = express(); 8 | 9 | app.use(express.static(path.join(__dirname, "build"))); 10 | 11 | app.get("/*", function(req, res) { 12 | res.sendFile(path.join(__dirname, "build", "index.html")); 13 | }); 14 | 15 | const PORT = process.env.PORT || 3000; 16 | const HOST = process.env.HOST || "0.0.0.0"; 17 | 18 | fs.readdir("./certificates", function(err, files) { 19 | if (files.length <= 1) { 20 | const httpServer = http.createServer(app); 21 | 22 | httpServer.listen(PORT, HOST, function() { 23 | console.log( 24 | "Production UI server (http) is listening on %s:%s", 25 | HOST, 26 | PORT 27 | ); 28 | }); 29 | } else { 30 | const httpsServer = https.createServer( 31 | { 32 | key: fs.readFileSync("./certificates/server.key"), 33 | cert: fs.readFileSync("./certificates/server.cert") 34 | }, 35 | app 36 | ); 37 | httpsServer.listen(PORT, HOST, function() { 38 | console.log( 39 | "Production UI server (https) is listening on %s:%s", 40 | HOST, 41 | PORT 42 | ); 43 | }); 44 | } 45 | }); 46 | -------------------------------------------------------------------------------- /client/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "forensix-client", 3 | "version": "0.1.0", 4 | "private": true, 5 | "dependencies": { 6 | "@material-ui/core": "^4.12.4", 7 | "@nivo/sunburst": "^0.61.0", 8 | "@react-google-maps/api": "^1.13.0", 9 | "@testing-library/jest-dom": "^4.2.4", 10 | "@testing-library/react": "^9.5.0", 11 | "@testing-library/user-event": "^7.2.1", 12 | "axios": "^1.7.7", 13 | "material-table": "^1.69.3", 14 | "react": "^16.14.0", 15 | "react-calendar-heatmap": "^1.9.0", 16 | "react-dom": "^16.14.0", 17 | "react-json-view": "^1.21.3", 18 | "react-redux": "^7.2.9", 19 | "react-router-dom": "^5.3.4", 20 | "react-scripts": "4.0.3", 21 | "react-tooltip": "^4.5.1", 22 | "recharts": "^2.12.7", 23 | "redux-thunk": "^2.4.2", 24 | "semantic-ui-react": "^0.88.2" 25 | }, 26 | "scripts": { 27 | "start": "NODE_OPTIONS='--openssl-legacy-provider' REACT_APP_DEV=true react-scripts start", 28 | "build": "react-scripts build", 29 | "test": "react-scripts test", 30 | "eject": "react-scripts eject" 31 | }, 32 | "eslintConfig": { 33 | "extends": "react-app" 34 | }, 35 | "browserslist": { 36 | "production": [ 37 | ">0.2%", 38 | "not dead", 39 | "not op_mini all" 40 | ], 41 | "development": [ 42 | "last 1 chrome version", 43 | "last 1 firefox version", 44 | "last 1 safari version" 45 | ] 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /server/server.js: -------------------------------------------------------------------------------- 1 | const app = require("./app"); 2 | const https = require("https"); 3 | const http = require("http"); 4 | const fs = require("fs"); 5 | const { generateChecksum } = require("./controllers/VolumesController"); 6 | 7 | const PORT = process.env.PORT || 3001; 8 | const HOST = process.env.HOST || "0.0.0.0"; 9 | 10 | // first generate SHA1 hash out of data medium 11 | console.log( 12 | "Generating initial SHA1 hash over the folder: %s", 13 | process.env.VOLUME_PATH 14 | ); 15 | 16 | generateChecksum(process.env.VOLUME_PATH).then(checksum => { 17 | process.env.VOLUME_CHECKSUM = checksum; 18 | console.log("SHA1 hash of medium: %s", checksum); 19 | 20 | // if certificates are present run as https 21 | fs.readdir("./certificates", function(err, files) { 22 | if (files.length <= 1) { 23 | const httpServer = http.createServer(app); 24 | httpServer.listen(PORT, HOST, () => { 25 | console.log("Server (http) is listening on %s:%s", HOST, PORT); 26 | }); 27 | } else { 28 | const httpsServer = https.createServer( 29 | { 30 | key: fs.readFileSync("./certificates/server.key"), 31 | cert: fs.readFileSync("./certificates/server.cert") 32 | }, 33 | app 34 | ); 35 | httpsServer.listen(PORT, HOST, () => { 36 | console.log("Server (https) is listening on %s:%s", HOST, PORT); 37 | }); 38 | } 39 | }); 40 | }); 41 | 42 | module.exports = app; 43 | -------------------------------------------------------------------------------- /client/src/views/webdata/components/GoogleMaps.js: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | import { Segment, Message } from "semantic-ui-react"; 3 | import { GoogleMap, LoadScript } from "@react-google-maps/api"; 4 | 5 | const center = { 6 | lat: 48.14816, 7 | lng: 17.10674 8 | }; 9 | 10 | function GoogleMaps() { 11 | const apiKey = process.env.REACT_APP_GOOGLE_MAPS_API_KEY; 12 | 13 | if (!apiKey) { 14 | return ( 15 | 16 | 17 | Google Maps API Key Missing 18 |

19 | Please set REACT_APP_GOOGLE_MAPS_API_KEY in your environment variables. 20 |
21 | Create a .env file in the client directory with: 22 |
23 | REACT_APP_GOOGLE_MAPS_API_KEY=your_api_key_here 24 |

25 |
26 |
27 | ); 28 | } 29 | 30 | return ( 31 | 32 | 36 | 45 | 46 | 47 | ); 48 | } 49 | 50 | export default GoogleMaps; 51 | -------------------------------------------------------------------------------- /server/routers/volumes.js: -------------------------------------------------------------------------------- 1 | const express = require("express"); 2 | const router = new express.Router(); 3 | const chalk = require("chalk"); 4 | const { 5 | getVolumeInfo, 6 | generateChecksum, 7 | compareChecksums, 8 | getVolumeDirTree 9 | } = require("../controllers/VolumesController"); 10 | 11 | router.get("/volumes", async (req, res) => { 12 | try { 13 | const volume = getVolumeInfo(); 14 | 15 | res.status(200).send(volume); 16 | } catch (e) { 17 | res.status(400).send(e); 18 | } 19 | }); 20 | 21 | router.get("/volumes/tree", async (req, res) => { 22 | try { 23 | const tree = getVolumeDirTree(); 24 | 25 | res.status(200).send(tree); 26 | } catch (e) { 27 | res.status(400).send(e); 28 | } 29 | }); 30 | 31 | router.get("/volumes/verify", async (req, res) => { 32 | try { 33 | process.stdout.write(`[VERIFY] veryfing integrity ... `); 34 | const sum = await generateChecksum(process.env.VOLUME_PATH); 35 | 36 | if (compareChecksums(sum, process.env.VOLUME_CHECKSUM)) { 37 | console.log(chalk.green(" [ OK ]")); 38 | res.status(200).send({ status: "verified", hash: sum }); 39 | } else { 40 | console.log(chalk.red(" [ COMPROMISED ]")); 41 | res.status(200).send({ 42 | status: "compromised", 43 | hashNew: sum, 44 | hashOld: process.env.VOLUME_CHECKSUM 45 | }); 46 | } 47 | } catch (e) { 48 | console.log(e); 49 | res.status(500).send("Error veryfiing."); 50 | } 51 | }); 52 | 53 | module.exports = router; 54 | -------------------------------------------------------------------------------- /server/routers/user.js: -------------------------------------------------------------------------------- 1 | const express = require("express"); 2 | const User = require("../models/user"); 3 | const auth = require("../middleware/auth"); 4 | const router = new express.Router(); 5 | 6 | router.get("/users", auth, async (req, res) => { 7 | try { 8 | const users = await User.find().select(); 9 | res.status(201).send({ users }); 10 | } catch (e) { 11 | res.status(400).send(e); 12 | } 13 | }); 14 | 15 | router.post("/users/register", async (req, res) => { 16 | const user = new User(req.body); 17 | 18 | try { 19 | await user.save(); 20 | res.status(201).send({ user }); 21 | } catch (e) { 22 | res.status(400).send(e); 23 | } 24 | }); 25 | 26 | router.post("/users/login", async (req, res) => { 27 | try { 28 | const { user, token } = await User.authenticate( 29 | req.body.username, 30 | req.body.password 31 | ); 32 | res.send({ user, token }); 33 | } catch (e) { 34 | res.status(400).send(); 35 | } 36 | }); 37 | 38 | router.post("/users/logout", auth, async (req, res) => { 39 | try { 40 | req.user.token = null; 41 | await req.user.save(); 42 | 43 | res.send(); 44 | } catch (e) { 45 | res.status(500).send(); 46 | } 47 | }); 48 | 49 | router.get("/users/me", auth, async (req, res) => { 50 | res.send(req.user); 51 | }); 52 | 53 | router.delete("/users/me", auth, async (req, res) => { 54 | try { 55 | await req.user.remove(); 56 | res.send(req.user); 57 | } catch (e) { 58 | res.status(500).send(); 59 | } 60 | }); 61 | 62 | module.exports = router; 63 | -------------------------------------------------------------------------------- /server/routers/bookmarks.js: -------------------------------------------------------------------------------- 1 | const express = require("express"); 2 | const router = new express.Router(); 3 | const chalk = require("chalk"); 4 | const fs = require("fs"); 5 | const path = require("path"); 6 | const _ = require("lodash"); 7 | 8 | const converWebkitTimestamp = webkitTimestamp => { 9 | const dateInSeconds = Math.round(webkitTimestamp / 1000000) - 11644473600; 10 | return new Date(dateInSeconds * 1000).toLocaleDateString(); 11 | }; 12 | 13 | router.get("/bookmarks", async (req, res) => { 14 | try { 15 | process.stdout.write(`[GET] ${req.path} ... `); 16 | const bookmarks = JSON.parse( 17 | fs.readFileSync(path.join(process.env.VOLUME_PATH, "Bookmarks")) 18 | ); 19 | 20 | let bookmarksBar = bookmarks.roots.bookmark_bar.children; 21 | let bookmarksOther = bookmarks.roots.other.children; 22 | let bookmarksSynced = bookmarks.roots.synced.children; 23 | 24 | let bookmarksFlat = _.flatten( 25 | [...bookmarksBar, bookmarksOther, bookmarksSynced].map(b => { 26 | if (b.children) { 27 | return b.children; 28 | } 29 | return b; 30 | }) 31 | ); 32 | 33 | bookmarksFlat = bookmarksFlat.map(b => { 34 | return { 35 | ...b, 36 | date_added: converWebkitTimestamp(b.date_added), 37 | last_visited_desktop: b.meta_info 38 | ? converWebkitTimestamp(b.meta_info.last_visited_desktop) 39 | : null 40 | }; 41 | }); 42 | 43 | res.send(bookmarksFlat); 44 | console.log(chalk.green(" [ OK ]")); 45 | } catch (e) { 46 | console.log(e); 47 | res.status(500).send(); 48 | } 49 | }); 50 | 51 | module.exports = router; 52 | -------------------------------------------------------------------------------- /client/src/views/dashboard/components/TopSites/TopSites.js: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | import { Table, Segment, Header, Placeholder } from "semantic-ui-react"; 3 | 4 | const PlaceholderRow = () => ( 5 | 6 | 7 | 8 | 9 | 10 | 11 | ); 12 | 13 | function TopSites(props) { 14 | return ( 15 | 16 |
17 | 18 | 19 | 20 | Rank 21 | Title 22 | URL 23 | 24 | 25 | 26 | {props.topSites?.map(({ url, url_rank, title, redirects }) => ( 27 | 28 | {url_rank + 1} 29 | {title} 30 | {url} 31 | 32 | )) || 33 | [...Array(10).keys()].map(() => ( 34 | 35 | {PlaceholderRow()} 36 | {PlaceholderRow()} 37 | {PlaceholderRow()} 38 | 39 | ))} 40 | 41 |
42 | 43 | ); 44 | } 45 | 46 | export default TopSites; 47 | -------------------------------------------------------------------------------- /server/models/user.js: -------------------------------------------------------------------------------- 1 | const mongoose = require("mongoose"); 2 | const bcrypt = require("bcryptjs"); 3 | const jwt = require("jsonwebtoken"); 4 | 5 | const schema = new mongoose.Schema( 6 | { 7 | username: { type: String, unique: true, required: true }, 8 | password: { type: String, required: true }, 9 | firstName: { type: String, required: true }, 10 | lastName: { type: String, required: true }, 11 | token: { type: String, required: false } 12 | }, 13 | { 14 | timestamps: true 15 | } 16 | ); 17 | 18 | schema.virtual("evidences", { 19 | ref: "Evidence", 20 | localField: "_id", 21 | foreignField: "reporter" 22 | }); 23 | 24 | schema.methods.toJSON = function() { 25 | const userObject = this.toObject(); 26 | delete userObject.password; 27 | delete userObject.token; 28 | 29 | return userObject; 30 | }; 31 | 32 | schema.statics.authenticate = async (username, password) => { 33 | const user = await User.findOne({ username }); 34 | const isMatch = bcrypt.compareSync(password, user.password); 35 | 36 | if (user && isMatch) { 37 | const token = jwt.sign({ _id: user._id.toString() }, "secrettoken123"); 38 | user.token = token; 39 | await user.save(); 40 | 41 | return { user, token }; 42 | } else { 43 | throw new Error("Unable to login"); 44 | } 45 | }; 46 | 47 | // Hashing plain text password before saving 48 | schema.pre("save", async function(next) { 49 | const user = this; 50 | 51 | if (user.isModified("password")) { 52 | user.password = await bcrypt.hash(user.password, 8); 53 | } 54 | 55 | next(); 56 | }); 57 | 58 | const User = mongoose.model("User", schema); 59 | 60 | module.exports = User; 61 | -------------------------------------------------------------------------------- /server/controllers/db_operations.js: -------------------------------------------------------------------------------- 1 | const Database = require("better-sqlite3"); 2 | const path = require("path"); 3 | 4 | const getDatabase = dbPath => { 5 | try { 6 | return new Database(dbPath, { readonly: true }); 7 | } catch (err) { 8 | console.log("Cant open database."); 9 | throw err; 10 | } 11 | }; 12 | 13 | const queryBuilder = ({ ...params }) => { 14 | let query = "SELECT * FROM " + params.table; 15 | 16 | if (params.row) { 17 | query = query.replace("*", params.row); 18 | } 19 | if (params.where) { 20 | query = query + " WHERE " + params.where; 21 | } 22 | if (params.groupBy) { 23 | query = query + " GROUP BY " + params.groupBy; 24 | } 25 | if (params.orderBy) { 26 | query = query + " ORDER BY " + params.orderBy; 27 | } 28 | if (params.limit) { 29 | query = query + " LIMIT " + params.limit; 30 | } 31 | 32 | return query; 33 | }; 34 | 35 | const getDbTable = async ({ 36 | db_name, 37 | table, 38 | limit, 39 | row, 40 | where, 41 | groupBy, 42 | orderBy 43 | }) => { 44 | const dbPath = path.join(process.env.VOLUME_PATH, db_name); 45 | let db; 46 | 47 | try { 48 | db = getDatabase(dbPath); 49 | const query = queryBuilder({ 50 | db_name, 51 | table, 52 | limit, 53 | row, 54 | where, 55 | groupBy, 56 | orderBy 57 | }); 58 | 59 | const results = db.prepare(query).all(); 60 | return { results }; 61 | } catch (err) { 62 | console.error("Database query error:", err.message); 63 | return { results: [] }; 64 | } finally { 65 | if (db) { 66 | db.close(); 67 | } 68 | } 69 | }; 70 | 71 | module.exports = getDbTable; 72 | -------------------------------------------------------------------------------- /client/src/volumeMenu/VolumeMenu.js: -------------------------------------------------------------------------------- 1 | import React, { useEffect, useState } from "react"; 2 | import CenteredWrapper from "../layout/CenteredWrapper/CenteredWrapper"; 3 | import { Segment, Divider, Button } from "semantic-ui-react"; 4 | import Logo from "../common/Logo/Logo"; 5 | import "./VolumeMenu.css"; 6 | import Volume from "./Volume"; 7 | import axios from "../axios-api"; 8 | 9 | function VolumeMenu() { 10 | const [volumeInfo, setVolumeInfo] = useState(); 11 | const [loading, setLoading] = useState(false); 12 | const [integrity, setIntegrity] = useState({ status: "verified" }); 13 | 14 | const token = localStorage.getItem("token"); 15 | 16 | const config = { 17 | headers: { Authorization: `Bearer ${token}` } 18 | }; 19 | 20 | useEffect(() => { 21 | axios.get("/volumes", config).then(res => { 22 | setVolumeInfo(res.data); 23 | }); 24 | verifyIntegrity(); 25 | }, []); 26 | 27 | function verifyIntegrity() { 28 | setLoading(true); 29 | axios.get("/volumes/verify", config).then(res => { 30 | setLoading(false); 31 | setIntegrity(res.data); 32 | }); 33 | } 34 | 35 | return ( 36 | 37 | 38 | 39 | 40 | Select mounted volume 41 | 42 | 45 | {volumeInfo ? ( 46 | 47 | ) : ( 48 |

No volumes available

49 | )} 50 |
51 |
52 | ); 53 | } 54 | 55 | export default VolumeMenu; 56 | -------------------------------------------------------------------------------- /client/src/store/actions/auth.js: -------------------------------------------------------------------------------- 1 | import axios from "../../axios-api"; 2 | 3 | export const AUTH_START = "AUTH_START"; 4 | export const AUTH_SUCCESS = "AUTH_SUCCESS"; 5 | export const AUTH_FAIL = "AUTH_FAIL"; 6 | export const AUTH_LOGOUT = "AUTH_LOGOUT"; 7 | 8 | export const authStart = () => { 9 | return { type: AUTH_START }; 10 | }; 11 | 12 | export const authSuccess = authData => { 13 | return { 14 | type: AUTH_SUCCESS, 15 | token: authData.token, 16 | username: authData?.user?.username || authData.username 17 | }; 18 | }; 19 | 20 | export const authFail = error => { 21 | return { type: AUTH_FAIL, error }; 22 | }; 23 | 24 | export const logout = () => { 25 | localStorage.removeItem("token"); 26 | localStorage.removeItem("username"); 27 | return { type: AUTH_LOGOUT }; 28 | }; 29 | 30 | export const authCheckState = () => { 31 | return dispatch => { 32 | const token = localStorage.getItem("token"); 33 | if (!token) { 34 | dispatch(logout()); 35 | } else { 36 | const username = localStorage.getItem("username"); 37 | dispatch(authSuccess({ token, username })); 38 | } 39 | }; 40 | }; 41 | 42 | export const auth = (username, password) => { 43 | return dispatch => { 44 | dispatch(authStart()); 45 | const authData = { 46 | username: username, 47 | password: password 48 | }; 49 | 50 | axios 51 | .post("/users/login", authData) 52 | .then(res => { 53 | 54 | localStorage.setItem("token", res.data.token); 55 | localStorage.setItem("username", res.data.user.username); 56 | 57 | dispatch(authSuccess(res.data)); 58 | }) 59 | .catch(err => { 60 | dispatch(authFail(err)); 61 | }); 62 | }; 63 | }; 64 | -------------------------------------------------------------------------------- /download-model.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | MODEL_DIR="server/utils/predictor" 4 | MODEL_FILE="finalized_model.sav" 5 | COMPRESSED_FILE="finalized_model.sav.gz" 6 | MODEL_PATH="$MODEL_DIR/$MODEL_FILE" 7 | COMPRESSED_PATH="$MODEL_DIR/$COMPRESSED_FILE" 8 | 9 | # Create directory if it doesn't exist 10 | mkdir -p "$MODEL_DIR" 11 | 12 | # Check if model already exists 13 | if [ -f "$MODEL_PATH" ]; then 14 | echo "✅ ML model already exists at $MODEL_PATH" 15 | exit 0 16 | fi 17 | 18 | echo "📥 Downloading compressed ML model (~200-300MB)..." 19 | 20 | # Download compressed model 21 | GITHUB_RELEASE_URL="https://github.com/ChmaraX/forensix/releases/download/v1.0/finalized_model.sav.gz" 22 | 23 | # Try to download the compressed model 24 | if command -v curl >/dev/null 2>&1; then 25 | curl -L -o "$COMPRESSED_PATH" "$GITHUB_RELEASE_URL" 26 | elif command -v wget >/dev/null 2>&1; then 27 | wget -O "$COMPRESSED_PATH" "$GITHUB_RELEASE_URL" 28 | else 29 | echo "❌ Error: Neither curl nor wget is available." 30 | echo "Please install curl or wget, or download the model manually:" 31 | echo "URL: $GITHUB_RELEASE_URL" 32 | echo "Save to: $COMPRESSED_PATH" 33 | exit 1 34 | fi 35 | 36 | # Verify download and extract 37 | if [ -f "$COMPRESSED_PATH" ]; then 38 | echo "✅ Compressed model downloaded! Extracting..." 39 | gunzip "$COMPRESSED_PATH" 40 | 41 | if [ -f "$MODEL_PATH" ]; then 42 | MODEL_SIZE=$(du -h "$MODEL_PATH" | cut -f1) 43 | echo "✅ Model extracted successfully! Size: $MODEL_SIZE" 44 | echo "📍 Location: $MODEL_PATH" 45 | else 46 | echo "❌ Extraction failed." 47 | exit 1 48 | fi 49 | else 50 | echo "❌ Download failed. Please try again or download manually." 51 | exit 1 52 | fi 53 | -------------------------------------------------------------------------------- /server/app.js: -------------------------------------------------------------------------------- 1 | const express = require("express"); 2 | require("./db/mongoose"); 3 | 4 | const { 5 | setVolumeInfo, 6 | setVolumePath, 7 | } = require("./controllers/VolumesController"); 8 | const historyRouter = require("./routers/history"); 9 | const cacheRouter = require("./routers/cache"); 10 | const userRouter = require("./routers/user"); 11 | const volumeRouter = require("./routers/volumes"); 12 | const loginDataRouter = require("./routers/logindata"); 13 | const profileRouter = require("./routers/profile"); 14 | const topSitesRouter = require("./routers/topsites"); 15 | const faviconsRouter = require("./routers/favicons"); 16 | const webDataRouter = require("./routers/webdata"); 17 | const bookmarksRouter = require("./routers/bookmarks"); 18 | const evidenceRouter = require("./routers/evidence"); 19 | 20 | const auth = require("./middleware/auth"); 21 | 22 | const cors = require("cors"); 23 | 24 | const app = express(); 25 | app.use(cors()); 26 | 27 | // registering middlewares 28 | app.all("/bookmarks", auth); 29 | app.all("/cache*", auth); 30 | app.all("/favicons", auth); 31 | app.all("/history*", auth); 32 | app.all("/logindata*", auth); 33 | app.all("/profile*", auth); 34 | app.all("/topsites*", auth); 35 | app.all("/volumes*", auth); 36 | app.all("/webdata*", auth); 37 | 38 | // registering routers 39 | app.use(express.json()); 40 | app.use( 41 | historyRouter, 42 | cacheRouter, 43 | userRouter, 44 | volumeRouter, 45 | loginDataRouter, 46 | profileRouter, 47 | topSitesRouter, 48 | faviconsRouter, 49 | webDataRouter, 50 | bookmarksRouter, 51 | evidenceRouter 52 | ); 53 | 54 | // setting global variable with location of data 55 | setVolumePath(); 56 | 57 | // get info about mounted volume 58 | setVolumeInfo(); 59 | 60 | module.exports = app; 61 | -------------------------------------------------------------------------------- /client/src/views/webdata/components/WebDataTable.js: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | import MaterialTable from "material-table"; 3 | import { MuiThemeProvider, createMuiTheme } from "@material-ui/core"; 4 | import { Header } from "semantic-ui-react"; 5 | 6 | const theme = createMuiTheme({ 7 | palette: { 8 | primary: { 9 | main: "rgb(111, 156, 235)" 10 | }, 11 | secondary: { 12 | main: "rgb(111, 156, 235)" 13 | } 14 | } 15 | }); 16 | 17 | function WebDataTable(props) { 18 | return ( 19 | 20 | Autofills
} 22 | columns={[ 23 | { 24 | title: "Element Name", 25 | field: "name" 26 | }, 27 | { 28 | title: "Element Value", 29 | field: "value" 30 | }, 31 | { 32 | title: "Date Created", 33 | field: "date_created" 34 | }, 35 | { title: "Last Used", field: "date_last_used" }, 36 | { title: "Count", field: "count", type: "numeric" } 37 | ]} 38 | data={props.autofills || []} 39 | actions={[ 40 | { 41 | icon: "save", 42 | tooltip: "Mark as evidence", 43 | onClick: (event, rowData) => 44 | props.setShowModal({ 45 | show: true, 46 | data: rowData 47 | }) 48 | } 49 | ]} 50 | options={{ 51 | selection: true, 52 | exportButton: true, 53 | tableLayout: "auto", 54 | cellStyle: { overflow: "hidden" }, 55 | searchFieldAlignment: "right", 56 | searchFieldStyle: { width: "500px" }, 57 | pageSize: 5 58 | }} 59 | /> 60 | 61 | ); 62 | } 63 | 64 | export default WebDataTable; 65 | -------------------------------------------------------------------------------- /server/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM nikolaik/python-nodejs:python3.11-nodejs18 2 | 3 | # Copy Python requirements and install them 4 | COPY requirements.txt ./ 5 | RUN pip install --upgrade pip && \ 6 | pip install --no-cache-dir -r requirements.txt 7 | 8 | # Create app directory 9 | WORKDIR /app 10 | 11 | # Install app dependencies 12 | COPY package*.json ./ 13 | RUN npm install -qy 14 | 15 | # Download and extract compressed ML model (much faster!) 16 | RUN echo "📥 Downloading compressed ML model..." && \ 17 | mkdir -p utils/predictor && \ 18 | for i in 1 2 3; do \ 19 | echo "Attempt $i/3..." && \ 20 | if command -v curl >/dev/null 2>&1; then \ 21 | curl --retry 3 --retry-delay 5 --connect-timeout 60 --max-time 300 \ 22 | -L -o utils/predictor/finalized_model.sav.gz \ 23 | "https://github.com/ChmaraX/forensix/releases/download/v1.0/finalized_model.sav.gz" && break; \ 24 | elif command -v wget >/dev/null 2>&1; then \ 25 | wget --tries=3 --timeout=60 --read-timeout=300 \ 26 | -O utils/predictor/finalized_model.sav.gz \ 27 | "https://github.com/ChmaraX/forensix/releases/download/v1.0/finalized_model.sav.gz" && break; \ 28 | else \ 29 | echo "❌ Neither curl nor wget available"; exit 1; \ 30 | fi; \ 31 | echo "Download failed, retrying in 5 seconds..."; \ 32 | sleep 5; \ 33 | done && \ 34 | if [ -f utils/predictor/finalized_model.sav.gz ]; then \ 35 | echo "✅ Compressed model downloaded! Extracting..." && \ 36 | gunzip utils/predictor/finalized_model.sav.gz && \ 37 | echo "✅ Model extracted successfully! Size: $(du -h utils/predictor/finalized_model.sav | cut -f1)"; \ 38 | else \ 39 | echo "❌ Model download failed after 3 attempts"; exit 1; \ 40 | fi 41 | 42 | # Copy application code 43 | COPY . . 44 | 45 | EXPOSE 3001 46 | CMD [ "npm", "start" ] 47 | -------------------------------------------------------------------------------- /server/controllers/LoginDataController.js: -------------------------------------------------------------------------------- 1 | const getDbTable = require("../controllers/db_operations"); 2 | 3 | const validateEmail = email => { 4 | var re = /^(([^<>()\[\]\\.,;:\s@"]+(\.[^<>()\[\]\\.,;:\s@"]+)*)|(".+"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/; 5 | return re.test(String(email).toLowerCase()); 6 | }; 7 | 8 | const divide = (arr, condition) => { 9 | const emails = arr.filter(el => condition(el.username_value)); 10 | const usernames = arr.filter(el => !condition(el.username_value)); 11 | return { emails, usernames }; 12 | }; 13 | 14 | /* 15 | returns most used emails and usernames along with 16 | their respective total count 17 | */ 18 | const getLoginCredentials = async () => { 19 | const credentials = await getDbTable({ 20 | db_name: "Login Data", 21 | table: "logins", 22 | where: 'username_value IS NOT "" ', 23 | row: "username_value, count(*) as count", 24 | groupBy: "username_value", 25 | orderBy: "count DESC" 26 | }); 27 | 28 | const { emails, usernames } = divide(credentials.results, validateEmail); 29 | 30 | const totalEmails = emails.map(el => el.count).reduce((a, b) => a + b, 0); 31 | const totalUsernames = usernames 32 | .map(el => el.count) 33 | .reduce((a, b) => a + b, 0); 34 | 35 | return { 36 | emails, 37 | usernames, 38 | totalEmails, 39 | totalUsernames, 40 | all: credentials.results 41 | }; 42 | }; 43 | 44 | const getLoginData = async () => { 45 | const loginData = await getDbTable({ 46 | db_name: "Login Data", 47 | table: "logins", 48 | row: 49 | "*, hex(password_value) AS password_value, datetime((date_created/1000000)-11644473600, 'unixepoch', 'localtime') AS date_created, datetime((date_last_used/1000000)-11644473600, 'unixepoch', 'localtime') AS date_last_used" 50 | }); 51 | 52 | return loginData.results; 53 | }; 54 | 55 | module.exports = { 56 | getLoginCredentials, 57 | getLoginData 58 | }; 59 | -------------------------------------------------------------------------------- /server/routers/profile.js: -------------------------------------------------------------------------------- 1 | const express = require("express"); 2 | const router = new express.Router(); 3 | const { 4 | estimateFullname, 5 | estimateNation, 6 | getAvatars, 7 | systemSpecs, 8 | getAccounts, 9 | getBirthday 10 | } = require("../controllers/SuspectProfile"); 11 | const { 12 | getAutofill, 13 | findPhoneNumbers, 14 | findGeolocationData 15 | } = require("../controllers/WebDataController"); 16 | const chalk = require("chalk"); 17 | 18 | router.get("/profile/estimate", async (req, res) => { 19 | try { 20 | process.stdout.write(`[GET] ${req.path} ... `); 21 | const fullname = await estimateFullname(); 22 | const nation = await estimateNation(); 23 | const avatars = getAvatars(); 24 | const birthday = getBirthday(); 25 | const { probableAddress, probableCity } = findGeolocationData( 26 | await getAutofill() 27 | ); 28 | const phone = findPhoneNumbers(await getAutofill()).probableNum; 29 | 30 | res.send({ 31 | fullname, 32 | nation, 33 | birthday, 34 | probableAddress, 35 | probableCity, 36 | avatars, 37 | phone 38 | }); 39 | console.log(chalk.green(" [ OK ]")); 40 | } catch (e) { 41 | console.log(e); 42 | res.status(500).send(); 43 | } 44 | }); 45 | 46 | router.get("/profile/accounts", async (req, res) => { 47 | try { 48 | process.stdout.write(`[GET] ${req.path} ... `); 49 | const accounts = getAccounts(); 50 | 51 | res.send(accounts); 52 | console.log(chalk.green(" [ OK ]")); 53 | } catch (e) { 54 | console.log(e); 55 | res.status(500).send(); 56 | } 57 | }); 58 | 59 | router.get("/profile/system-specs", async (req, res) => { 60 | try { 61 | process.stdout.write(`[GET] ${req.path} ... `); 62 | const specs = await systemSpecs(); 63 | 64 | res.send({ specs }); 65 | console.log(chalk.green(" [ OK ]")); 66 | } catch (e) { 67 | console.log(e); 68 | res.status(500).send(); 69 | } 70 | }); 71 | 72 | module.exports = router; 73 | -------------------------------------------------------------------------------- /client/src/common/SaveEvidenceModal/SaveEvidenceModal.js: -------------------------------------------------------------------------------- 1 | import React, { useState } from "react"; 2 | import { Header, Modal, Button, Icon, TextArea } from "semantic-ui-react"; 3 | import axios from "../../axios-api"; 4 | 5 | function SaveEvidenceModal(props) { 6 | const [description, setDescription] = useState(); 7 | 8 | const token = localStorage.getItem("token"); 9 | 10 | const config = { 11 | headers: { Authorization: `Bearer ${token}` } 12 | }; 13 | 14 | function saveEvidence() { 15 | let body = { 16 | data: props.showModal.data, 17 | description: description 18 | }; 19 | axios.post("/evidences", body, config).then(res => { 20 | props.setShowModal({ show: false, data: {} }); 21 | }); 22 | } 23 | 24 | const handleChange = (e, { name, value }) => { 25 | setDescription(value); 26 | }; 27 | 28 | return ( 29 | props.setShowModal({ show: false, data: {} })} 32 | basic 33 | size="small" 34 | > 35 |
39 | 40 |

41 | Selected records will be stored in shared database with other 42 | investigators. 43 |

44 | 132 | 133 | )} 134 | 135 | ); 136 | }} 137 | /> 138 | 139 | 140 | ); 141 | } 142 | 143 | export default CacheContainer; 144 | -------------------------------------------------------------------------------- /client/src/views/dashboard/components/Profile/Profile.js: -------------------------------------------------------------------------------- 1 | import React, { useState } from "react"; 2 | import { 3 | Button, 4 | Flag, 5 | Grid, 6 | Header, 7 | Icon, 8 | Image, 9 | List, 10 | Placeholder, 11 | Popup, 12 | Segment, 13 | } from "semantic-ui-react"; 14 | import "./Profile.css"; 15 | 16 | const PlaceholderLine = () => ( 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | ); 25 | 26 | const PlaceholderImage = () => ( 27 | 28 | 29 | 30 | ); 31 | 32 | const Avatar = (avatar) => ( 33 | 40 | ); 41 | 42 | const EstimatedHeader = () => ( 43 |
47 | Profile (estimated){" "} 48 | } 51 | /> 52 | 53 | } 54 | style={{ display: "inline-block" }} 55 | /> 56 | ); 57 | 58 | const AccountsHeader = () => ( 59 |
63 | Accounts{" "} 64 | } 67 | /> 68 | 69 | } 70 | style={{ display: "inline-block" }} 71 | /> 72 | ); 73 | 74 | const EstimatedContent = (profile) => ( 75 |
76 |
{profile.fullname}
77 |

78 | gender:{" "} 79 | {profile.birthday.gender === 1 80 | ? "male" 81 | : profile.birthday.gender === 2 82 | ? "female" 83 | : "unknown"}{" "} 84 | 95 |
96 | nation: {profile.nation.country.substring(0, 15)}{" "} 97 |
98 | birthday: {profile.birthday.birthyear || "unknown"}
99 |

100 |

101 | address: {profile.probableAddress || "unknown"} 102 |
103 | city: {profile.probableCity || "unknown"}
104 | phone: {profile.phone || "unknown"}
105 |

106 |
107 | ); 108 | 109 | const AccountContent = (accounts) => 110 | accounts.length < 1 ? ( 111 |

No accounts found.

112 | ) : ( 113 | 114 | {accounts.map((acc, i) => ( 115 | 116 | 117 | 118 | {acc.full_name} 119 | 120 |

121 | email: {acc.email}
122 | locale: {acc.locale}
123 |

124 |

125 | adv. protection:{" "} 126 | {acc.is_under_advanced_protection.toString()}
127 | child account: {acc.is_child_account.toString()}
128 |

129 |
130 |
131 |
132 | ))} 133 |
134 | ); 135 | 136 | function Profile(props) { 137 | const [showAccounts, setShowAccounts] = useState(false); 138 | 139 | return ( 140 | 141 | {showAccounts ? AccountsHeader() : EstimatedHeader()} 142 | 150 | {showAccounts ? ( 151 | !props.accounts ? ( 152 | PlaceholderLine() 153 | ) : ( 154 | AccountContent(props.accounts || []) 155 | ) 156 | ) : ( 157 | 158 | 159 | 160 | {!props.profile 161 | ? PlaceholderImage() 162 | : Avatar(props.profile?.avatars[0])} 163 | 164 | 165 | {!props.profile 166 | ? PlaceholderLine() 167 | : EstimatedContent(props.profile)} 168 | 169 | 170 | 171 | )} 172 | 173 | ); 174 | } 175 | 176 | export default Profile; 177 | -------------------------------------------------------------------------------- /client/src/serviceWorker.js: -------------------------------------------------------------------------------- 1 | // This optional code is used to register a service worker. 2 | // register() is not called by default. 3 | 4 | // This lets the app load faster on subsequent visits in production, and gives 5 | // it offline capabilities. However, it also means that developers (and users) 6 | // will only see deployed updates on subsequent visits to a page, after all the 7 | // existing tabs open on the page have been closed, since previously cached 8 | // resources are updated in the background. 9 | 10 | // To learn more about the benefits of this model and instructions on how to 11 | // opt-in, read https://bit.ly/CRA-PWA 12 | 13 | const isLocalhost = Boolean( 14 | window.location.hostname === 'localhost' || 15 | // [::1] is the IPv6 localhost address. 16 | window.location.hostname === '[::1]' || 17 | // 127.0.0.0/8 are considered localhost for IPv4. 18 | window.location.hostname.match( 19 | /^127(?:\.(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)){3}$/ 20 | ) 21 | ); 22 | 23 | export function register(config) { 24 | if (process.env.NODE_ENV === 'production' && 'serviceWorker' in navigator) { 25 | // The URL constructor is available in all browsers that support SW. 26 | const publicUrl = new URL(process.env.PUBLIC_URL, window.location.href); 27 | if (publicUrl.origin !== window.location.origin) { 28 | // Our service worker won't work if PUBLIC_URL is on a different origin 29 | // from what our page is served on. This might happen if a CDN is used to 30 | // serve assets; see https://github.com/facebook/create-react-app/issues/2374 31 | return; 32 | } 33 | 34 | window.addEventListener('load', () => { 35 | const swUrl = `${process.env.PUBLIC_URL}/service-worker.js`; 36 | 37 | if (isLocalhost) { 38 | // This is running on localhost. Let's check if a service worker still exists or not. 39 | checkValidServiceWorker(swUrl, config); 40 | 41 | // Add some additional logging to localhost, pointing developers to the 42 | // service worker/PWA documentation. 43 | navigator.serviceWorker.ready.then(() => { 44 | console.log( 45 | 'This web app is being served cache-first by a service ' + 46 | 'worker. To learn more, visit https://bit.ly/CRA-PWA' 47 | ); 48 | }); 49 | } else { 50 | // Is not localhost. Just register service worker 51 | registerValidSW(swUrl, config); 52 | } 53 | }); 54 | } 55 | } 56 | 57 | function registerValidSW(swUrl, config) { 58 | navigator.serviceWorker 59 | .register(swUrl) 60 | .then(registration => { 61 | registration.onupdatefound = () => { 62 | const installingWorker = registration.installing; 63 | if (installingWorker == null) { 64 | return; 65 | } 66 | installingWorker.onstatechange = () => { 67 | if (installingWorker.state === 'installed') { 68 | if (navigator.serviceWorker.controller) { 69 | // At this point, the updated precached content has been fetched, 70 | // but the previous service worker will still serve the older 71 | // content until all client tabs are closed. 72 | console.log( 73 | 'New content is available and will be used when all ' + 74 | 'tabs for this page are closed. See https://bit.ly/CRA-PWA.' 75 | ); 76 | 77 | // Execute callback 78 | if (config && config.onUpdate) { 79 | config.onUpdate(registration); 80 | } 81 | } else { 82 | // At this point, everything has been precached. 83 | // It's the perfect time to display a 84 | // "Content is cached for offline use." message. 85 | console.log('Content is cached for offline use.'); 86 | 87 | // Execute callback 88 | if (config && config.onSuccess) { 89 | config.onSuccess(registration); 90 | } 91 | } 92 | } 93 | }; 94 | }; 95 | }) 96 | .catch(error => { 97 | console.error('Error during service worker registration:', error); 98 | }); 99 | } 100 | 101 | function checkValidServiceWorker(swUrl, config) { 102 | // Check if the service worker can be found. If it can't reload the page. 103 | fetch(swUrl, { 104 | headers: { 'Service-Worker': 'script' } 105 | }) 106 | .then(response => { 107 | // Ensure service worker exists, and that we really are getting a JS file. 108 | const contentType = response.headers.get('content-type'); 109 | if ( 110 | response.status === 404 || 111 | (contentType != null && contentType.indexOf('javascript') === -1) 112 | ) { 113 | // No service worker found. Probably a different app. Reload the page. 114 | navigator.serviceWorker.ready.then(registration => { 115 | registration.unregister().then(() => { 116 | window.location.reload(); 117 | }); 118 | }); 119 | } else { 120 | // Service worker found. Proceed as normal. 121 | registerValidSW(swUrl, config); 122 | } 123 | }) 124 | .catch(() => { 125 | console.log( 126 | 'No internet connection found. App is running in offline mode.' 127 | ); 128 | }); 129 | } 130 | 131 | export function unregister() { 132 | if ('serviceWorker' in navigator) { 133 | navigator.serviceWorker.ready.then(registration => { 134 | registration.unregister(); 135 | }); 136 | } 137 | } 138 | -------------------------------------------------------------------------------- /server/controllers/SuspectProfile.js: -------------------------------------------------------------------------------- 1 | const { getLoginCredentials } = require("./LoginDataController"); 2 | const { findPhoneNumbers, getAutofill } = require("./WebDataController"); 3 | const _ = require("lodash"); 4 | const getDbTable = require("./db_operations"); 5 | const parse = require("url-parse"); 6 | const { getCountry, getTLD } = require("tld-countries"); 7 | const fs = require("fs"); 8 | const path = require("path"); 9 | 10 | const getMostFrequent = arr => { 11 | return _.head( 12 | _(arr) 13 | .countBy() 14 | .entries() 15 | .maxBy(_.last) 16 | ); 17 | }; 18 | 19 | const estimateFullname = async () => { 20 | const { all } = await getLoginCredentials(); 21 | 22 | let parsed = all.map(el => { 23 | return el.username_value 24 | .split("@")[0] // Split email usernames 25 | .replace(/([A-Z])/g, " $1") // Split string to words by uppercase letter 26 | .replace(/[^A-Za-z]/g, " ") // Remove every non-letter symbols 27 | .trim(); 28 | }); 29 | 30 | // Leave only ones containing space 31 | let potentialFullnames = parsed.filter(el => el.includes(" ")); 32 | 33 | // Capitalize first letter of each word 34 | potentialFullnames = potentialFullnames.map(name => { 35 | return name.replace(/\w\S*/g, word => { 36 | return word.charAt(0).toUpperCase() + word.substr(1).toLowerCase(); 37 | }); 38 | }); 39 | 40 | return getMostFrequent(potentialFullnames); 41 | }; 42 | 43 | const estimateNation = async () => { 44 | const data = findPhoneNumbers(await getAutofill()); 45 | let country; 46 | 47 | if (data.phoneNums.length < 1) { 48 | const urls = await getDbTable({ 49 | db_name: "History", 50 | table: "urls", 51 | row: "url" 52 | }); 53 | 54 | const tlds = urls.results.map(url => { 55 | return parse(url.url) 56 | .hostname.split(".") 57 | .pop(); 58 | }); 59 | 60 | const tldCountries = tlds 61 | .map(t => getCountry(t)) 62 | .filter(a => a !== undefined); 63 | 64 | country = getMostFrequent(tldCountries); 65 | } else { 66 | country = getCountry( 67 | getMostFrequent( 68 | data.phoneNums.map(e => { 69 | return e.parsedNum.country; 70 | }) 71 | ) 72 | ); 73 | } 74 | 75 | return { country: country, tld: getTLD(country) }; 76 | }; 77 | 78 | const getAvatars = () => { 79 | const directoryPath = path.join( 80 | process.env.VOLUME_PATH, 81 | "/Accounts/Avatar Images" 82 | ); 83 | 84 | let avatars = {}; 85 | 86 | try { 87 | const files = fs.readdirSync(directoryPath); 88 | avatars = files.map(file => { 89 | const data = fs.readFileSync(path.join(directoryPath, file)); 90 | const base64Image = new Buffer(data, "binary").toString("base64"); 91 | 92 | return base64Image; 93 | }); 94 | } catch (e) { 95 | console.log(e); 96 | } 97 | 98 | return avatars; 99 | }; 100 | 101 | const getBirthday = () => { 102 | const preferencesJSON = fs.readFileSync( 103 | path.join(process.env.VOLUME_PATH, "Preferences") 104 | ); 105 | 106 | const preferences = JSON.parse(preferencesJSON); 107 | let birthday, 108 | birthyear, 109 | gender = "undefined"; 110 | 111 | try { 112 | birthday = preferences.sync.birthday; 113 | birthyear = preferences.sync.demographics.birth_year; 114 | gender = preferences.sync.demographics.gender; 115 | } catch (e) { 116 | console.log(e); 117 | } 118 | 119 | return { birthday, birthyear, gender }; 120 | }; 121 | 122 | const getAccounts = () => { 123 | const preferencesJSON = fs.readFileSync( 124 | path.join(process.env.VOLUME_PATH, "Preferences") 125 | ); 126 | 127 | const preferences = JSON.parse(preferencesJSON); 128 | const accounts = preferences.account_info; 129 | 130 | return accounts; 131 | }; 132 | 133 | const getDeviceProtocols = async () => { 134 | const urls = await getDbTable({ 135 | db_name: "Login Data", 136 | table: "logins", 137 | row: "origin_url" 138 | }); 139 | 140 | const deviceProtocols = ["android", "ios"]; 141 | 142 | const deviceNativeRequests = urls.results.filter(url => { 143 | const protocol = parse(url.origin_url).protocol.split(":")[0]; 144 | return deviceProtocols.includes(protocol); 145 | }); 146 | 147 | const devices = deviceNativeRequests.map( 148 | req => parse(req.origin_url).protocol.split(":")[0] 149 | ); 150 | 151 | return _.uniq(devices); 152 | }; 153 | 154 | // C:\user\$USER\AppData\Local\Google\Chrome\User 155 | // Data\Default\ 156 | // /Users/$USER/Library/ApplicationSupport/Google/ 157 | // Chrome/Default/ 158 | // /home/$USER/.config/google-chrome/Default/ 159 | 160 | const getOperatingSystems = async () => { 161 | const osDefaultPath = { 162 | mac: "/Users/", 163 | linux: "/home/", 164 | windows: "C:\\Users\\" 165 | }; 166 | 167 | const downloads = await getDbTable({ 168 | db_name: "History", 169 | table: "downloads", 170 | row: "target_path" 171 | }); 172 | 173 | let os = []; 174 | downloads.results.forEach(path => { 175 | Object.keys(osDefaultPath).forEach(key => { 176 | if (path.target_path.startsWith(osDefaultPath[key])) { 177 | if (!os.includes(key)) { 178 | os.push(key); 179 | } 180 | } 181 | }); 182 | }); 183 | 184 | return os; 185 | }; 186 | 187 | const systemSpecs = async () => { 188 | const preferencesJSON = fs.readFileSync( 189 | path.join(process.env.VOLUME_PATH, "Preferences") 190 | ); 191 | 192 | const preferences = JSON.parse(preferencesJSON); 193 | 194 | const chromeVersion = preferences.extensions.last_chrome_version; 195 | const { bottom, right } = preferences.browser.window_placement; 196 | 197 | return { 198 | chromeVersion, 199 | resolution: { bottom, right }, 200 | mobileDevices: await getDeviceProtocols(), 201 | os: await getOperatingSystems() 202 | }; 203 | }; 204 | 205 | module.exports = { 206 | estimateFullname, 207 | estimateNation, 208 | getAvatars, 209 | getBirthday, 210 | getAccounts, 211 | systemSpecs 212 | }; 213 | -------------------------------------------------------------------------------- /server/controllers/HistoryController.js: -------------------------------------------------------------------------------- 1 | const getDbTable = require("./db_operations"); 2 | const _ = require("lodash"); 3 | const parse = require("url-parse"); 4 | const axios = require("axios"); 5 | 6 | const classifyUrls = async () => { 7 | const data = await getDbTable({ 8 | db_name: "History", 9 | table: "urls", 10 | row: "url", 11 | }); 12 | 13 | // Filter out chrome-extension URLs, localhost URLs, and other non-browsing URLs 14 | const filteredUrls = data.results.filter(urlObj => { 15 | const url = Object.values(urlObj)[0]; // Get the URL value 16 | return url && 17 | !url.startsWith('chrome-extension://') && 18 | !url.startsWith('chrome://') && 19 | !url.startsWith('edge://') && 20 | !url.startsWith('about:') && 21 | !url.startsWith('file://') && 22 | !url.includes('localhost') && 23 | !url.includes('127.0.0.1') && 24 | (url.startsWith('http://') || url.startsWith('https://')) && 25 | !url.match(/^https?:\/\/(localhost|127\.0\.0\.1|0\.0\.0\.0)(:|\/)/); 26 | }); 27 | 28 | const urls = filteredUrls.slice(0, 100); 29 | 30 | console.log(`Filtered ${data.results.length} URLs down to ${filteredUrls.length} real browsing URLs`); 31 | 32 | if (urls.length === 0) { 33 | return { classified_urls: [] }; 34 | } 35 | 36 | return new Promise((resolve, reject) => { 37 | const spawn = require("child_process").spawn; 38 | const path = require("path"); 39 | 40 | // Use absolute path and better Python detection 41 | const scriptPath = path.join(__dirname, "../utils/predictor/url-class.py"); 42 | const pythonCommands = ["python3", "python", "/usr/bin/python3", "/usr/local/bin/python3"]; 43 | 44 | let pythonProcess; 45 | let pythonCmd = "python3"; // Default 46 | 47 | // Try to find working Python command 48 | for (const cmd of pythonCommands) { 49 | try { 50 | pythonProcess = spawn(cmd, [scriptPath, JSON.stringify(urls)], { 51 | cwd: path.join(__dirname, "..") // Set working directory to server root 52 | }); 53 | pythonCmd = cmd; 54 | break; 55 | } catch (err) { 56 | continue; 57 | } 58 | } 59 | 60 | if (!pythonProcess) { 61 | console.error("Could not find Python executable"); 62 | return reject(new Error("Python not found")); 63 | } 64 | 65 | let outputData = ""; 66 | let errorData = ""; 67 | 68 | pythonProcess.stdout.on("data", function (data) { 69 | outputData += data.toString(); 70 | }); 71 | 72 | pythonProcess.stderr.on("data", function (data) { 73 | errorData += data.toString(); 74 | }); 75 | 76 | pythonProcess.on("close", function (code) { 77 | if (code === 0) { 78 | try { 79 | let url_categorized = JSON.parse(outputData); 80 | resolve(url_categorized); 81 | } catch (e) { 82 | console.error("Failed to parse Python output:", e); 83 | console.error("Output was:", outputData); 84 | reject(e); 85 | } 86 | } else { 87 | console.error(`Python process exited with code ${code}`); 88 | console.error("Error output:", errorData); 89 | reject(new Error(`Python process failed with code ${code}: ${errorData}`)); 90 | } 91 | }); 92 | 93 | pythonProcess.on("error", function (err) { 94 | console.error("Failed to start Python process:", err); 95 | reject(err); 96 | }); 97 | }); 98 | }; 99 | 100 | const getHistoryActivity = async () => { 101 | const data = await getDbTable({ 102 | db_name: "History", 103 | row: 104 | "urls.url, datetime((visit_time/1000000)-11644473600, 'unixepoch', 'localtime') AS visit_date", 105 | table: "urls, visits", 106 | where: "urls.id = visits.url", 107 | }); 108 | 109 | let month = new Array(); 110 | month[0] = "Jan"; 111 | month[1] = "Feb"; 112 | month[2] = "Mar"; 113 | month[3] = "Apr"; 114 | month[4] = "May"; 115 | month[5] = "Jun"; 116 | month[6] = "Jul"; 117 | month[7] = "Aug"; 118 | month[8] = "Sep"; 119 | month[9] = "Oct"; 120 | month[10] = "Nov"; 121 | month[11] = "Dec"; 122 | 123 | data.results.some((e, i) => { 124 | data.results[i].visit_date = e.visit_date.split(" ")[0]; 125 | data.results[i].visit_month = month[new Date(e.visit_date).getMonth()]; 126 | }); 127 | 128 | const byMonth = _.chain(data.results) 129 | .groupBy("visit_month") 130 | .map((value, key) => ({ month: key, visits: value.length })) 131 | .value(); 132 | 133 | const byDate = _.chain(data.results) 134 | .groupBy("visit_date") 135 | .map((value, key) => ({ date: key, visits: value.length })) 136 | .value(); 137 | 138 | return { all: data.results, byMonth, byDate }; 139 | }; 140 | 141 | const TRANSITIONS = [ 142 | "link", 143 | "typed", 144 | "auto_bookmark", 145 | "auto_subframe", 146 | "manual_subframe", 147 | "generated", 148 | "auto_toplevel", 149 | "form_submit", 150 | "reload", 151 | "keyword", 152 | "keyword_generated", 153 | ]; 154 | 155 | const QUALIFIERS = { 156 | 0x01000000: "FORWARD_BACK", 157 | 0x02000000: "FROM_ADDRESS_BAR", 158 | 0x04000000: "HOME_PAGE", 159 | 0x10000000: "CHAIN_START", 160 | 0x20000000: "CHAIN_END", 161 | 0x40000000: "CLIENT_REDIRECT", 162 | 0x80000000: "SERVER_REDIRECT", 163 | 0xc0000000: "IS_REDIRECT_MASK", 164 | }; 165 | 166 | const QUAL_MASK = 0xffffff00; 167 | const TRANS_MASK = 0xff; 168 | 169 | const getHistory = async () => { 170 | const data = await getDbTable({ 171 | db_name: "History", 172 | row: 173 | "urls.url, urls.title, urls.visit_count, urls.typed_count, datetime((urls.last_visit_time/1000000)-11644473600, 'unixepoch', 'localtime') AS last_visit_time, datetime((visits.visit_time/1000000)-11644473600, 'unixepoch', 'localtime') AS visit_time, visits.from_visit, (visits.visit_duration/1000000) as visit_duration, visits.transition", 174 | table: "urls, visits", 175 | where: "urls.id = visits.url", 176 | }); 177 | 178 | const history = data.results.map((record) => { 179 | return { 180 | ...record, 181 | transition: 182 | TRANSITIONS[("0x" + record.transition.toString(16)) & TRANS_MASK], 183 | }; 184 | }); 185 | 186 | return history; 187 | }; 188 | 189 | const getAvgDuration = async (urls) => { 190 | let avgDurations = []; 191 | 192 | return new Promise((resolve, reject) => { 193 | urls.map((site) => { 194 | getDbTable({ 195 | db_name: "History", 196 | row: 197 | "urls.url, AVG(visits.visit_duration/1000000) as avg_visit_duration", 198 | table: "urls, visits", 199 | where: `urls.id = visits.url AND visits.visit_duration > 0 AND urls.url LIKE '%${ 200 | parse(site.url).hostname 201 | }%' `, 202 | }).then((data) => { 203 | avgDurations.push(data.results[0]); 204 | 205 | if (avgDurations.length === urls.length) { 206 | avgDurations = avgDurations.map((site) => { 207 | return { 208 | url: parse(site.url).hostname, 209 | avg_visit_duration: Math.round(site.avg_visit_duration), 210 | }; 211 | }); 212 | resolve(avgDurations); 213 | } 214 | }); 215 | }); 216 | }); 217 | }; 218 | 219 | const INTERRUPT_REASON = [ 220 | "No Interrupt", 221 | "File Error", 222 | "Access Denied", 223 | "Disk Full", 224 | "Path Too Long", 225 | "File Too Large", 226 | "Virus", 227 | "Temporary Problem", 228 | "Blocked", 229 | "Security Check Failed", 230 | "Resume Error", 231 | "Network Error", 232 | "Operation Timed Out", 233 | "Connection Lost", 234 | "Server Down", 235 | "Server Error", 236 | "Range Request Error", 237 | "Server Precondition Error", 238 | "Unable to get file", 239 | "Server Unauthorized", 240 | "Server Certificate Problem", 241 | "Server Access Forbidden", 242 | "Server Unreachable", 243 | "Content Length Mismatch", 244 | "Cross Origin Redirect", 245 | "Cancelled", 246 | "Browser Shutdown", 247 | "Browser Crashed", 248 | ]; 249 | 250 | const STATE = [ 251 | "In Progress", 252 | "Complete", 253 | "Cancelled", 254 | "Interrupted", 255 | "Interrupted", 256 | ]; 257 | 258 | const DANGER_TYPE = [ 259 | "Not Dangerous", 260 | "Dangerous", 261 | "Dangerous URL", 262 | "Dangerous Content", 263 | "Content May Be Malicious", 264 | "Uncommon Content", 265 | "Dangerous But User Validated", 266 | "Dangerous Host", 267 | "Potentially Unwanted", 268 | "Whitelisted by Policy", 269 | ]; 270 | 271 | const getDownloads = async () => { 272 | const data = await getDbTable({ 273 | db_name: "History", 274 | row: 275 | "*, datetime((downloads.start_time/1000000)-11644473600, 'unixepoch', 'localtime') as start_time, datetime((downloads.end_time/1000000)-11644473600, 'unixepoch', 'localtime') as end_time", 276 | table: "downloads", 277 | }); 278 | 279 | const downloads = data.results.map((record) => { 280 | return { 281 | ...record, 282 | state: STATE[record.state], 283 | interrupt_reason: INTERRUPT_REASON[record.interrupt_reason], 284 | danger_type: DANGER_TYPE[record.danger_type], 285 | }; 286 | }); 287 | 288 | return downloads; 289 | }; 290 | 291 | const getDownloadsMeta = async () => { 292 | let downloads = await getDownloads(); 293 | 294 | const downloadDirs = downloads.map((record) => { 295 | return ( 296 | record.target_path.substr(0, record.target_path.lastIndexOf("/")) || 297 | record.target_path.substr(0, record.target_path.lastIndexOf("\\")) 298 | ); 299 | }); 300 | 301 | const mostFreqDownloadDir = _.head( 302 | _(downloadDirs).countBy().entries().maxBy(_.last) 303 | ); 304 | 305 | const biggestFile = _.maxBy(downloads, "received_bytes"); 306 | 307 | downloads.some((e, i) => { 308 | downloads[i].download_date = e.start_time.split(" ")[0]; 309 | }); 310 | 311 | let byDate = _.chain(downloads) 312 | .groupBy("download_date") 313 | .map((value, key) => ({ date: key, visits: value.length })) 314 | .value(); 315 | 316 | byDate = _.orderBy(byDate, ["visits"], ["desc"]); 317 | 318 | return { mostFreqDownloadDir, biggestFile, byDate }; 319 | }; 320 | 321 | const websiteSecurityCheck = async (url) => { 322 | const apiUrl = `https://sitecheck.sucuri.net/api/v3/?scan=${url}`; 323 | 324 | return axios.get(apiUrl); 325 | }; 326 | 327 | module.exports = { 328 | classifyUrls, 329 | getHistoryActivity, 330 | getHistory, 331 | getAvgDuration, 332 | getDownloads, 333 | getDownloadsMeta, 334 | websiteSecurityCheck, 335 | }; 336 | -------------------------------------------------------------------------------- /server/controllers/CacheController.js: -------------------------------------------------------------------------------- 1 | const Promise = require("bluebird"); 2 | const fs = Promise.promisifyAll(require("fs")); 3 | const path = require("path"); 4 | const anyBase = require("any-base"); 5 | const hex2bin = anyBase(anyBase.HEX, anyBase.BIN); 6 | const bin2hex = anyBase(anyBase.BIN, anyBase.HEX); 7 | const zlib = require("zlib"); 8 | 9 | const INDEX_HEADER_LENGTH = 256 * 2; 10 | const LRU_LENGTH = 112 * 2; 11 | const ZERO_PADDING_LENGTH = 208 * 2; 12 | 13 | const hex_to_ascii = (hexString) => { 14 | var output = Buffer.from(hexString, "hex"); 15 | return output.toString("ascii"); 16 | }; 17 | 18 | // 1 | 010 | 00 | 00 | 0000 0001 | 0000 0101 0011 1011 19 | // flag | file type | reserved | block count | file number | block number 20 | // 21 | // Block Offset = 8192 + (Block Number * Block Size) 22 | // 23 | // Block Offset = 8192 + (1339 * 256) = 350 976 = 0x55B00 24 | // Block is located in data_1 at offset 0x55B00 25 | 26 | const getCacheFileType = (binAddr) => { 27 | let fileType = binAddr.substr(1, 3); 28 | 29 | if (fileType === "000") { 30 | var fileNum = bin2hex(binAddr.substr(4, 28)); 31 | 32 | while (fileNum.length !== 6) { 33 | fileNum = "0" + fileNum; 34 | } 35 | } 36 | 37 | switch (fileType) { 38 | case "000": 39 | return { 40 | file_type: 0, 41 | cache_file: `f_${fileNum}`, 42 | size: 16, 43 | }; 44 | case "001": 45 | return { 46 | file_type: 1, 47 | cache_file: "data_0", 48 | size: 36, 49 | }; 50 | case "010": 51 | return { 52 | file_type: 2, 53 | cache_file: "data_1", 54 | size: 256, 55 | }; 56 | case "011": 57 | return { 58 | file_type: 3, 59 | cache_file: "data_2", 60 | size: 1024, 61 | }; 62 | case "100": 63 | return { 64 | file_type: 4, 65 | cache_file: "data_3", 66 | size: 4096, 67 | }; 68 | } 69 | }; 70 | 71 | const changeEndianness = (string) => { 72 | const result = []; 73 | let len = string.length - 2; 74 | while (len >= 0) { 75 | result.push(string.substr(len, 2)); 76 | len -= 2; 77 | } 78 | return result.join(""); 79 | }; 80 | 81 | const getAddressesFromTable = (indexTable) => { 82 | let cacheAddrs = []; 83 | for (let i = 0; i < indexTable.length; i += 8) { 84 | let entry = indexTable.substr(i, 8); 85 | if (entry !== "00000000") { 86 | cacheAddrs.push(changeEndianness(entry)); 87 | } 88 | } 89 | return cacheAddrs; 90 | }; 91 | 92 | const parseIndexHeader = (buff) => { 93 | return { 94 | signature: parseInt(buff.substr(0, 8), 16), 95 | minorVersion: parseInt(buff.substr(8, 4), 16), 96 | majorVersion: parseInt(buff.substr(12, 4), 16), 97 | numberOfEntries: parseInt(changeEndianness(buff.substr(16, 8)), 16), 98 | storedDataSize: parseInt(buff.substr(24, 8), 16), 99 | lastCreatedFileNum: changeEndianness(buff.substr(32, 8)), 100 | dirtyFlag: parseInt(buff.substr(40, 8), 16), 101 | tableSize: parseInt(changeEndianness(buff.substr(56, 8)), 16), 102 | creationTime: converWebkitTimestamp( 103 | parseInt(changeEndianness(buff.substr(80, 16)), 16) 104 | ), 105 | }; 106 | }; 107 | 108 | const parseLRU = (buff) => { 109 | return { 110 | filledFlag: parseInt(buff.substr(16, 8), 16), 111 | arrOfSizes: buff.substr(24, 40), 112 | arrOfHeadAddr: buff.substr(64, 40), 113 | arrOfTailAddr: buff.substr(104, 40), 114 | transactionAddr: buff.substr(142, 8), 115 | }; 116 | }; 117 | 118 | const getBlocksFromAddr = (blockAddresses) => { 119 | const blocks = blockAddresses.map((addr) => { 120 | // Try multiple cache directories (prioritize working formats) 121 | const cacheDirs = [ 122 | "/Cache/", // Original Chrome cache 123 | "/Application Cache/Cache/", // Alternative Chrome cache 124 | "/GPUCache/", // GPU cache 125 | "/DawnWebGPUCache/", // Dawn WebGPU cache 126 | "/DawnGraphiteCache/", // Dawn Graphite cache 127 | ]; 128 | 129 | let filePath = null; 130 | for (const dir of cacheDirs) { 131 | const testPath = path.join(process.env.VOLUME_PATH, dir + addr.file_type["cache_file"]); 132 | if (fs.existsSync(testPath)) { 133 | filePath = testPath; 134 | break; 135 | } 136 | } 137 | 138 | if (!filePath) { 139 | console.log(`Cache file not found: ${addr.file_type["cache_file"]}`); 140 | return ""; // Return empty block 141 | } 142 | 143 | try { 144 | const file = fs.readFileSync(filePath); 145 | const buff = Buffer.from(file, "ascii"); 146 | 147 | const block = buff.toString( 148 | "hex", 149 | addr.block_offset, 150 | addr.block_offset + addr.file_type.size 151 | ); 152 | 153 | return block; 154 | } catch (error) { 155 | console.log(`Error reading cache file: ${error.message}`); 156 | return ""; // Return empty block on error 157 | } 158 | }); 159 | 160 | return blocks.filter(block => block !== ""); // Filter out empty blocks 161 | }; 162 | 163 | const gunzip = (block) => { 164 | let decompressedBlock = zlib.gunzipSync(Buffer.from(block, "hex")); 165 | return decompressedBlock.toString("hex"); 166 | }; 167 | 168 | const brunzip = (block) => { 169 | let decompressedBlock = zlib.brotliDecompressSync(Buffer.from(block, "hex")); 170 | return decompressedBlock.toString("hex"); 171 | }; 172 | 173 | const decompress = (block) => { 174 | const GZIP_SIGN = "1f8b08"; 175 | const BR_ENCODING = "f158"; 176 | 177 | if (block.startsWith(GZIP_SIGN)) { 178 | return gunzip(block) 179 | } 180 | if (block.startsWith(BR_ENCODING)) { 181 | return brunzip(block) 182 | } 183 | return block 184 | } 185 | 186 | const getPayloadBlock = (blockAddresses, size) => { 187 | const blocks = blockAddresses.map((addr) => { 188 | // Try multiple cache directories (prioritize working formats) 189 | const cacheDirs = [ 190 | "/Cache/", // Original Chrome cache 191 | "/Application Cache/Cache/", // Alternative Chrome cache 192 | "/GPUCache/", // GPU cache 193 | "/DawnWebGPUCache/", // Dawn WebGPU cache 194 | "/DawnGraphiteCache/", // Dawn Graphite cache 195 | ]; 196 | 197 | let filePath = null; 198 | for (const dir of cacheDirs) { 199 | const testPath = path.join(process.env.VOLUME_PATH, dir + addr.file_type["cache_file"]); 200 | if (fs.existsSync(testPath)) { 201 | filePath = testPath; 202 | break; 203 | } 204 | } 205 | 206 | if (!filePath) { 207 | console.log(`Payload cache file not found: ${addr.file_type["cache_file"]}`); 208 | return ""; // Return empty payload 209 | } 210 | 211 | try { 212 | const file = fs.readFileSync(filePath); 213 | const buff = Buffer.from(file); 214 | 215 | const block = buff.toString( 216 | "hex", 217 | addr.file_type.file_type === 0 ? 0 : addr.block_offset, 218 | addr.file_type.file_type === 0 ? parseInt(size) : addr.block_offset + parseInt(size) 219 | ); 220 | 221 | const PNG_SIGN = "89504e470d0a1a0a"; 222 | const JPEG_SIGN = "ffd8ff"; 223 | const decompressedBlock = decompress(block); 224 | 225 | if (decompressedBlock.startsWith(PNG_SIGN) || decompressedBlock.startsWith(JPEG_SIGN)) { 226 | return Buffer.from(decompressedBlock, "hex").toString('base64') 227 | } else { 228 | return Buffer(decompressedBlock, "hex").toString('utf8') 229 | } 230 | } catch (error) { 231 | console.log(`Error reading payload cache file: ${error.message}`); 232 | return ""; // Return empty payload on error 233 | } 234 | }); 235 | 236 | return blocks.filter(block => block !== ""); // Filter out empty blocks 237 | }; 238 | 239 | const parseIndexFile = (index) => { 240 | const indexFile = fs.readFileSync(index); 241 | const buff = Buffer.from(indexFile, "ascii"); 242 | 243 | const indexHeader = buff.toString("hex", 0, INDEX_HEADER_LENGTH); 244 | const indexLRU = buff.toString( 245 | "hex", 246 | INDEX_HEADER_LENGTH, 247 | INDEX_HEADER_LENGTH + LRU_LENGTH 248 | ); 249 | 250 | const parsedIndexHeader = parseIndexHeader(indexHeader); 251 | const parsedLRU = parseLRU(indexLRU); 252 | 253 | //const INDEX_TABLE_LENGTH = parsedIndexHeader.tableSize; 254 | const INDEX_TABLE_LENGTH = 255 | buff.length - INDEX_HEADER_LENGTH + ZERO_PADDING_LENGTH + LRU_LENGTH; 256 | 257 | const indexTable = buff.toString( 258 | "hex", 259 | INDEX_HEADER_LENGTH + ZERO_PADDING_LENGTH + LRU_LENGTH, 260 | INDEX_HEADER_LENGTH + ZERO_PADDING_LENGTH + LRU_LENGTH + INDEX_TABLE_LENGTH 261 | ); 262 | 263 | return { parsedIndexHeader, parsedLRU, indexTable }; 264 | }; 265 | 266 | const parseCacheAddresses = (cacheAddresses) => { 267 | const blockAddresses = cacheAddresses.map((addr) => { 268 | return { 269 | flag: addr[0], 270 | file_type: getCacheFileType(addr), 271 | reserved: addr.substr(4, 2), 272 | block_count: parseInt(addr.substr(6, 2), 2), 273 | file_number: parseInt(addr.substr(8, 8), 2), 274 | block_number: parseInt(addr.substr(16, 16), 2), 275 | block_offset: 276 | 8192 + parseInt(addr.substr(16, 16), 2) * getCacheFileType(addr).size, 277 | }; 278 | }); 279 | 280 | return blockAddresses; 281 | }; 282 | 283 | const converWebkitTimestamp = (webkitTimestamp) => { 284 | const dateInSeconds = Math.round(webkitTimestamp / 1000000) - 11644473600; 285 | return new Date(dateInSeconds * 1000).toLocaleDateString(); 286 | }; 287 | 288 | const getCacheEntries = (from = 0) => { 289 | // Try multiple cache locations in the data directory (prioritize working formats) 290 | const cacheLocations = [ 291 | path.join(process.env.VOLUME_PATH, "/Cache/index"), // Original Chrome cache 292 | path.join(process.env.VOLUME_PATH, "/Application Cache/Cache/index"), // Alternative Chrome cache 293 | path.join(process.env.VOLUME_PATH, "/GPUCache/index"), // GPU cache (classic format) 294 | path.join(process.env.VOLUME_PATH, "/DawnWebGPUCache/index"), // Dawn WebGPU cache (classic format) 295 | path.join(process.env.VOLUME_PATH, "/DawnGraphiteCache/index"), // Dawn Graphite cache (classic format) 296 | ]; 297 | 298 | let indexFile = null; 299 | let cacheType = "unknown"; 300 | 301 | // Find the first existing cache with proper format 302 | for (const location of cacheLocations) { 303 | if (fs.existsSync(location)) { 304 | // Check if it's a valid classic cache format 305 | try { 306 | const indexContent = fs.readFileSync(location); 307 | if (indexContent.length > 100) { 308 | const signature = indexContent.slice(0, 4).toString('hex'); 309 | if (signature === 'c103cac3' || signature === 'c3ca03c1') { 310 | indexFile = location; 311 | if (location.includes("GPUCache")) cacheType = "GPU Cache"; 312 | else if (location.includes("DawnWebGPUCache")) cacheType = "Dawn WebGPU Cache"; 313 | else if (location.includes("DawnGraphiteCache")) cacheType = "Dawn Graphite Cache"; 314 | else if (location.includes("Application Cache")) cacheType = "Application Cache"; 315 | else cacheType = "Default Cache"; 316 | 317 | console.log(`Found ${cacheType} at: ${location}`); 318 | break; 319 | } 320 | } 321 | } catch (error) { 322 | console.log(`Skipping invalid cache at ${location}: ${error.message}`); 323 | } 324 | } 325 | } 326 | 327 | // If no valid cache found, return empty data with helpful message 328 | if (!indexFile) { 329 | console.log("No valid cache index file found."); 330 | console.log("Modern browsers use a new cache format that ForensiX doesn't support yet."); 331 | console.log("ForensiX can analyze:"); 332 | console.log(" ✅ Classic Chrome/Chromium cache (data_0, data_1, etc. + index)"); 333 | console.log(" ✅ GPU cache (for graphics-related content)"); 334 | console.log(" ❌ Modern HTTP cache (Cache_Data format) - not supported"); 335 | console.log(""); 336 | console.log("Available cache data in your data directory:"); 337 | 338 | // Show what cache data is available - WITH PROPER ERROR HANDLING 339 | try { 340 | const cacheDataPath = path.join(process.env.VOLUME_PATH, "/Cache_Data"); 341 | const gpuCachePath = path.join(process.env.VOLUME_PATH, "/GPUCache"); 342 | 343 | if (fs.existsSync(cacheDataPath)) { 344 | try { 345 | // Use a safer approach to count files without opening all at once 346 | const stats = fs.statSync(cacheDataPath); 347 | if (stats.isDirectory()) { 348 | console.log(` 📁 Cache_Data: directory exists (modern format - not supported)`); 349 | } 350 | } catch (err) { 351 | console.log(` 📁 Cache_Data: exists but cannot read (${err.message})`); 352 | } 353 | } 354 | 355 | if (fs.existsSync(gpuCachePath)) { 356 | try { 357 | const files = fs.readdirSync(gpuCachePath); 358 | console.log(` 📁 GPUCache: ${files.length} files (classic format - supported)`); 359 | } catch (err) { 360 | console.log(` 📁 GPUCache: exists but cannot read (${err.message})`); 361 | } 362 | } 363 | } catch (error) { 364 | console.log(`Error checking cache directories: ${error.message}`); 365 | } 366 | 367 | return { parsedBlocks: [], totalCount: 0 }; 368 | } 369 | 370 | // Continue with existing logic for valid cache files... 371 | const { indexTable } = parseIndexFile(indexFile); 372 | 373 | // index => index table => hex addrs => bin addrs 374 | const cacheAddresses = getAddressesFromTable(indexTable).map((addr) => 375 | hex2bin(addr) 376 | ); 377 | 378 | console.log(`Found ${cacheAddresses.length} cache addresses`); 379 | 380 | // bin addr => parsed block addr (filename, offset) 381 | const blockAddresses = parseCacheAddresses(cacheAddresses).slice( 382 | +from, 383 | +from + 20 384 | ); 385 | 386 | // filename + offset => block chunk 387 | const blocks = getBlocksFromAddr(blockAddresses); 388 | 389 | // block chunk => parsed block 390 | let { parsedBlocks, toBeParsedAddrs } = parseCacheBlocks(blocks); 391 | 392 | let nextToBeParsedAddrs = toBeParsedAddrs; 393 | 394 | // get next cache addresses till end of list 395 | while (nextToBeParsedAddrs.length > 0) { 396 | const nextBlockAddresses = parseCacheAddresses(nextToBeParsedAddrs); 397 | const nextBlocks = getBlocksFromAddr(nextBlockAddresses); 398 | const nextParsedBlocks = parseCacheBlocks(nextBlocks).parsedBlocks; 399 | parsedBlocks.push(...nextParsedBlocks); 400 | nextToBeParsedAddrs = parseCacheBlocks(nextBlocks).toBeParsedAddrs; 401 | } 402 | 403 | return {parsedBlocks, totalCount: cacheAddresses.length}; 404 | }; 405 | 406 | const getRankings = (rankingsAddr) => { 407 | if (rankingsAddr) { 408 | let binAddr = hex2bin(rankingsAddr); 409 | let rankingAddressesParsed = parseCacheAddresses([binAddr]); 410 | let rankingBlock = getBlocksFromAddr(rankingAddressesParsed)[0]; 411 | 412 | return { 413 | lastUsed: converWebkitTimestamp( 414 | parseInt(changeEndianness(rankingBlock.substr(0, 16)), 16) 415 | ), 416 | lastModified: converWebkitTimestamp( 417 | parseInt(changeEndianness(rankingBlock.substr(16, 16)), 16) 418 | ), 419 | }; 420 | } 421 | }; 422 | 423 | const getHTTPHeader = (headerAddr) => { 424 | if (parseInt(headerAddr, 16) !== 0) { 425 | let parsedHTTPAddr = parseCacheAddresses([hex2bin(headerAddr)]); 426 | let httpBlock = getBlocksFromAddr(parsedHTTPAddr); 427 | 428 | return httpBlock[0]; 429 | } 430 | }; 431 | 432 | const parseCacheEntryState = (state) => { 433 | switch (state) { 434 | case 0: 435 | return "ENTRY_NORMAL"; 436 | case 1: 437 | return "ENTRY_EVICTED"; 438 | case 2: 439 | return "ENTRY_DOOMED"; 440 | } 441 | }; 442 | 443 | const parseCacheEntryFlag = (flag) => { 444 | switch (flag) { 445 | case 1: 446 | return "PARENT_ENTRY"; 447 | case 2: 448 | return "CHILD_ENTRY"; 449 | } 450 | }; 451 | 452 | const parseHttpHeader = (httpHeader) => { 453 | // e.g.: (C|content-T|type:(|)image/jpeg) 454 | let contentTypeRegExp = /(([A-Z]|[a-z])ontent-([A-Z]|[a-z])ype:)[ ]?([a-z]+\/[a-z]+)/g; 455 | // e.g.: (C|content-L|length:(|)[0-9]+) 456 | let contentLengthRegExp = /(([A-Z]|[a-z])ontent-([A-Z]|[a-z])ength:)[ ]?([0-9]+)/g; 457 | 458 | var contentTypeMatch = hex_to_ascii(httpHeader) 459 | .toString() 460 | .match(contentTypeRegExp); 461 | 462 | var contentLengthMatch = hex_to_ascii(httpHeader) 463 | .toString() 464 | .match(contentLengthRegExp); 465 | 466 | var contentType = contentTypeMatch 467 | ? contentTypeMatch[0].split(":")[1] 468 | : "unknown"; 469 | 470 | var contentLength = contentLengthMatch 471 | ? contentLengthMatch[0].split(":")[1] 472 | : "unknown"; 473 | 474 | return { contentType, contentLength }; 475 | }; 476 | 477 | const parseCacheBlocks = (blocks) => { 478 | let toBeParsedAddrs = []; 479 | 480 | const parsedBlocks = blocks.map((block) => { 481 | // if there is next cache addr to be parsed 482 | if (parseInt(block.substr(8, 8)) !== 0) { 483 | toBeParsedAddrs.push(hex2bin(changeEndianness(block.substr(8, 8)))); 484 | } 485 | 486 | var dataStreamCacheArr = block 487 | .substr(112, 32) 488 | .match(/.{1,8}/g) 489 | .map((a) => changeEndianness(a)); 490 | 491 | // parse header 492 | let httpHeader = getHTTPHeader(dataStreamCacheArr[0]); 493 | if (httpHeader) { 494 | var { contentType, contentLength } = parseHttpHeader(httpHeader); 495 | } 496 | 497 | // get and parse payload 498 | if (parseInt(dataStreamCacheArr[1], 16) !== 0) { 499 | let parsedPayloadAddr = parseCacheAddresses([ 500 | hex2bin(dataStreamCacheArr[1]), 501 | ]); 502 | var payloadBlock = getPayloadBlock(parsedPayloadAddr, contentLength); 503 | } 504 | 505 | return { 506 | hashNumber: block.substr(0, 8), 507 | reuseCount: parseInt(changeEndianness(block.substr(24, 8)), 16), 508 | rankings: getRankings(changeEndianness(block.substr(16, 8))), 509 | refetchCount: parseInt(changeEndianness(block.substr(32, 8)), 16), 510 | cacheEntryState: parseCacheEntryState( 511 | parseInt(changeEndianness(block.substr(40, 8)), 16) 512 | ), 513 | creationTime: converWebkitTimestamp( 514 | parseInt(changeEndianness(block.substr(48, 16)), 16) 515 | ), 516 | keyDataSize: parseInt(changeEndianness(block.substr(64, 8)), 16) * 2, 517 | longKeyDataAddr: changeEndianness(block.substr(72, 8)), 518 | keyData: hex_to_ascii( 519 | block.substr(192, parseInt(block.substr(64, 8), 16)) 520 | ).replace(/\0/g, ""), 521 | contentType: contentType, 522 | contentLength: contentLength, 523 | payload: payloadBlock ? payloadBlock[0] : "", 524 | }; 525 | }); 526 | 527 | return { parsedBlocks, toBeParsedAddrs }; 528 | }; 529 | 530 | module.exports = { 531 | getCacheEntries, 532 | }; 533 | --------------------------------------------------------------------------------