├── Procfile ├── src ├── http │ ├── html │ │ ├── favicon.ico │ │ ├── favicon.png │ │ ├── sw.js │ │ ├── manifest.json │ │ ├── style.css │ │ ├── normalize.css │ │ ├── index.js │ │ └── index.html │ ├── api │ │ ├── constants │ │ │ ├── httpCode.js │ │ │ └── commonSchemas.js │ │ ├── utils │ │ │ ├── knex.js │ │ │ ├── basicAuth.js │ │ │ └── Util.js │ │ ├── routes │ │ │ ├── directory │ │ │ │ ├── delete.js │ │ │ │ ├── create.js │ │ │ │ ├── routes.js │ │ │ │ ├── update.js │ │ │ │ └── get.js │ │ │ └── file │ │ │ │ ├── delete.js │ │ │ │ ├── routes.js │ │ │ │ ├── get.js │ │ │ │ ├── update.js │ │ │ │ ├── create.js │ │ │ │ └── download.js │ │ └── services │ │ │ ├── auth.js │ │ │ └── database.js │ └── index.js ├── index.js └── DFs │ ├── lib │ ├── AsyncStreamProcessor.js │ ├── StreamChunker.js │ └── AsyncStreamProcessorWithConcurrency.js │ └── index.js ├── .dockerignore ├── docker ├── entrypoint └── Dockerfile ├── .gitignore ├── .devcontainer └── docker-compose.yaml ├── bin ├── ddrive.js ├── config.js └── migrate.js ├── .npmignore ├── .github └── workflows │ ├── lint.yml │ ├── codeql-analysis.yml │ └── docker-publish.yml ├── config └── .env_sample ├── knexfile.js ├── LICENSE ├── package.json ├── migrations └── 20230104113348_1.0.0.js └── README.md /Procfile: -------------------------------------------------------------------------------- 1 | web: npm start 2 | -------------------------------------------------------------------------------- /src/http/html/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/forscht/ddrive/HEAD/src/http/html/favicon.ico -------------------------------------------------------------------------------- /src/http/html/favicon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/forscht/ddrive/HEAD/src/http/html/favicon.png -------------------------------------------------------------------------------- /.dockerignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | config.json 3 | .dockerignore 4 | docker/Dockerfile 5 | .git 6 | .gitignore 7 | -------------------------------------------------------------------------------- /docker/entrypoint: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env sh 2 | 3 | cd /app && npm run migration:up 4 | node /app/bin/ddrive "$@" 5 | -------------------------------------------------------------------------------- /src/http/html/sw.js: -------------------------------------------------------------------------------- 1 | /** An empty service worker! */ 2 | 3 | self.addEventListener('fetch', (event) => { 4 | 5 | }) 6 | -------------------------------------------------------------------------------- /src/index.js: -------------------------------------------------------------------------------- 1 | const DFs = require('./DFs') 2 | const HttpServer = require('./http') 3 | 4 | module.exports = { DFs, HttpServer } 5 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .idea/ 2 | node_modules/ 3 | .vscode/ 4 | .history/ 5 | .DS_Store 6 | test/coverage/ 7 | !**/.keep 8 | dist 9 | .env 10 | -------------------------------------------------------------------------------- /src/http/api/constants/httpCode.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | OK: 200, 3 | NO_CONTENT: 204, 4 | BAD_REQUEST: 400, 5 | PARTIAL_CONTENT: 206, 6 | NOT_FOUND: 404, 7 | UNAUTHORIZED: 401, 8 | INTERNAL_SERVER_ERROR: 500, 9 | } 10 | -------------------------------------------------------------------------------- /src/http/api/utils/knex.js: -------------------------------------------------------------------------------- 1 | const Knex = require('knex') 2 | const config = require('../../../../knexfile') 3 | 4 | const environment = process.env.NODE_ENV || 'development' 5 | 6 | // 7 | // Expose The Knex connection object 8 | // 9 | module.exports = Knex(config[environment]) 10 | -------------------------------------------------------------------------------- /.devcontainer/docker-compose.yaml: -------------------------------------------------------------------------------- 1 | version: '3.4' 2 | 3 | services: 4 | postgres: 5 | container_name: ddrive-postgres 6 | image: postgres 7 | volumes: 8 | - ddrive_data:/var/lib/postgresql/data 9 | ports: 10 | - "5429:5432" 11 | environment: 12 | POSTGRES_USER: ddrive 13 | POSTGRES_DB: ddrive 14 | POSTGRES_PASSWORD: ddrive 15 | volumes: 16 | ddrive_data: 17 | -------------------------------------------------------------------------------- /bin/ddrive.js: -------------------------------------------------------------------------------- 1 | const config = require('./config')() 2 | const { DFs, HttpServer } = require('../src') 3 | 4 | const startApp = async () => { 5 | const { DFsConfig, httpConfig } = config 6 | // Create DFs Instance 7 | const dfs = new DFs(DFsConfig) 8 | // Create http Server instance 9 | const httpServer = HttpServer(dfs, httpConfig) 10 | 11 | return httpServer.listen({ host: '0.0.0.0', port: httpConfig.port }) 12 | } 13 | 14 | startApp().then() 15 | -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | ### macOS template 2 | # General 3 | .DS_Store 4 | .AppleDouble 5 | .LSOverride 6 | 7 | # User-specific stuff 8 | .idea/ 9 | 10 | # KDE directory preferences 11 | .directory 12 | 13 | # Linux trash folder which might appear on any partition or disk 14 | .Trash-* 15 | 16 | # .nfs files are created when an open file is removed but is still being accessed 17 | .nfs* 18 | 19 | _lab/ 20 | 21 | .github/ 22 | 23 | docker/ 24 | 25 | .dockerignore 26 | 27 | .env 28 | -------------------------------------------------------------------------------- /src/DFs/lib/AsyncStreamProcessor.js: -------------------------------------------------------------------------------- 1 | const { Transform } = require('stream') 2 | 3 | class StreamChunker extends Transform { 4 | constructor(chunkProcessor) { 5 | super() 6 | this.chunkProcessor = chunkProcessor 7 | } 8 | 9 | _transform(chunk, encoding, callback) { 10 | this.chunkProcessor(chunk) 11 | .then(() => callback(null)) 12 | .catch((err) => callback(err)) 13 | } 14 | } 15 | 16 | module.exports = StreamChunker 17 | -------------------------------------------------------------------------------- /src/http/html/manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "DDrive", 3 | "short_name": "DDrive", 4 | "theme_color": "#62c4ff", 5 | "background_color": "#222225", 6 | "display": "standalone", 7 | "scope": "/", 8 | "start_url": "/", 9 | "description": "A lightweight cloud storage system using discord as storage.", 10 | "orientation": "any", 11 | "icons": [ 12 | { 13 | "src":"favicon.png", 14 | "sizes": "192x192", 15 | "type": "image/png" 16 | } 17 | ] 18 | } 19 | -------------------------------------------------------------------------------- /.github/workflows/lint.yml: -------------------------------------------------------------------------------- 1 | name: Lint 2 | on: [push, pull_request] 3 | jobs: 4 | lint: 5 | name: ESLint 6 | runs-on: ubuntu-latest 7 | steps: 8 | - name: Checkout repository 9 | uses: actions/checkout@v2 10 | 11 | - name: Install Node v16 12 | uses: actions/setup-node@v2 13 | with: 14 | node-version: 16 15 | 16 | - name: Install dependencies 17 | run: npm install 18 | 19 | - name: Run ESLint 20 | run: npm run lint 21 | -------------------------------------------------------------------------------- /docker/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:16-alpine 2 | 3 | # Set node env 4 | ENV NODE_ENV production 5 | 6 | # Set WORKDIR 7 | WORKDIR /app 8 | 9 | # Copy project files 10 | COPY --chown=node:node . /app 11 | 12 | # NPM Update 13 | RUN npm update -g npm 14 | 15 | # Install packages 16 | RUN npm install 17 | 18 | # Copy entrypoint 19 | # to be able to pass process argv 20 | COPY docker/entrypoint / 21 | 22 | RUN chmod +x /entrypoint 23 | 24 | # Set user as node 25 | USER node 26 | 27 | # Start app 28 | ENTRYPOINT ["/entrypoint"] 29 | -------------------------------------------------------------------------------- /src/http/api/routes/directory/delete.js: -------------------------------------------------------------------------------- 1 | const db = require('../../services/database') 2 | 3 | module.exports.opts = { 4 | schema: { 5 | params: { 6 | type: 'object', 7 | required: ['directoryId'], 8 | properties: { 9 | directoryId: { type: 'string', format: 'uuid' }, 10 | }, 11 | }, 12 | }, 13 | } 14 | 15 | module.exports.handler = async (req, reply) => { 16 | const { directoryId } = req.params 17 | await db.deleteDirectory(directoryId) 18 | reply.code(204) 19 | } 20 | -------------------------------------------------------------------------------- /config/.env_sample: -------------------------------------------------------------------------------- 1 | # Required 2 | DATABASE_URL=postgres://ddrive:ddrive@127.0.0.1:5429/ddrive // Postgres DB URL 3 | WEBHOOKS= // ',' seperated urls 4 | 5 | # Optional 6 | PORT=3000 // HTTP Server port 7 | REQUEST_TIMEOUT=60000 // Request timeout - Increase if you have slow internet connection 8 | CHUNK_SIZE=25165824 // 24MB - Can't increase. Webhooks are max allowed to 25MB 9 | SECRET=myrandomsecret // Put here something if you want to encrypt your files. May cause very high cpu usage 10 | PUBLIC_ACCESS=READ_ONLY_FILE // ['READ_ONLY_FILE', 'READ_ONLY_PANEL'] Allow public user to access to download link of file or read only access of whole panel 11 | AUTH=admin:admin 12 | UPLOAD_CONCURRENCY= 13 | -------------------------------------------------------------------------------- /src/http/api/routes/directory/create.js: -------------------------------------------------------------------------------- 1 | const HTTP_CODE = require('../../constants/httpCode') 2 | const db = require('../../services/database') 3 | 4 | module.exports.opts = { 5 | schema: { 6 | body: { 7 | type: 'object', 8 | required: ['name', 'parentId'], 9 | properties: { 10 | name: { type: 'string' }, 11 | parentId: { type: 'string' }, 12 | }, 13 | }, 14 | response: { 15 | [HTTP_CODE.OK]: { $ref: 'Directory#' }, 16 | }, 17 | }, 18 | } 19 | 20 | module.exports.handler = async (req, reply) => { 21 | const directory = await db.createDirectoryOrFile(req.body) 22 | reply.send(directory) 23 | } 24 | -------------------------------------------------------------------------------- /src/http/api/routes/file/delete.js: -------------------------------------------------------------------------------- 1 | const HTTP_CODE = require('../../constants/httpCode') 2 | const db = require('../../services/database') 3 | 4 | module.exports.opts = { 5 | schema: { 6 | params: { 7 | type: 'object', 8 | required: ['fileId'], 9 | properties: { 10 | directoryId: { type: 'string', format: 'uuid' }, 11 | }, 12 | }, 13 | response: { 14 | [HTTP_CODE.BAD_REQUEST]: { $ref: 'CommonError#' }, 15 | [HTTP_CODE.UNAUTHORIZED]: { $ref: 'CommonError#' }, 16 | }, 17 | }, 18 | } 19 | 20 | module.exports.handler = async (req, reply) => { 21 | const { fileId } = req.params 22 | await db.deleteDirectory(fileId, 'file') 23 | reply.code(HTTP_CODE.NO_CONTENT) 24 | } 25 | -------------------------------------------------------------------------------- /src/http/api/routes/file/routes.js: -------------------------------------------------------------------------------- 1 | const getFile = require('./get') 2 | const updateFile = require('./update') 3 | const createFile = require('./create') 4 | const deleteFile = require('./delete') 5 | const downloadFile = require('./download') 6 | 7 | module.exports = async function routes(fastify) { 8 | // Add auth handler 9 | fastify.addHook('preHandler', fastify.auth([fastify.basicAuth])) 10 | // Register route 11 | fastify.get('/files/:fileId', getFile.opts, getFile.handler) 12 | fastify.get('/files/:fileId/download', downloadFile.opts, downloadFile.handler) 13 | fastify.post('/files/:directoryId', createFile.opts, createFile.handler) 14 | fastify.put('/files/:fileId', updateFile.opts, updateFile.handler) 15 | fastify.delete('/files/:fileId', deleteFile.opts, deleteFile.handler) 16 | } 17 | -------------------------------------------------------------------------------- /.github/workflows/codeql-analysis.yml: -------------------------------------------------------------------------------- 1 | name: "CodeQL" 2 | on: [push, pull_request] 3 | 4 | jobs: 5 | analyze: 6 | name: Analyze 7 | runs-on: ubuntu-latest 8 | permissions: 9 | actions: read 10 | contents: read 11 | security-events: write 12 | 13 | strategy: 14 | fail-fast: false 15 | matrix: 16 | language: [ 'javascript' ] 17 | 18 | steps: 19 | - name: Checkout repository 20 | uses: actions/checkout@v2 21 | 22 | # Initializes the CodeQL tools for scanning. 23 | - name: Initialize CodeQL 24 | uses: github/codeql-action/init@v1 25 | with: 26 | languages: ${{ matrix.language }} 27 | 28 | - name: Autobuild 29 | uses: github/codeql-action/autobuild@v1 30 | 31 | - name: Perform CodeQL Analysis 32 | uses: github/codeql-action/analyze@v1 33 | -------------------------------------------------------------------------------- /src/http/api/routes/directory/routes.js: -------------------------------------------------------------------------------- 1 | const getDirectory = require('./get') 2 | const createDirectory = require('./create') 3 | const deleteDirectory = require('./delete') 4 | const updateDirectory = require('./update') 5 | 6 | module.exports = async function routes(fastify) { 7 | // Register auth handler 8 | fastify.addHook('preHandler', fastify.auth([fastify.basicAuth])) 9 | // Register routes 10 | fastify.get('/directories', getDirectory.opts, getDirectory.handler) 11 | fastify.get('/directories/:directoryId', getDirectory.opts, getDirectory.handler) 12 | fastify.post('/directories', createDirectory.opts, createDirectory.handler) 13 | fastify.delete('/directories/:directoryId', deleteDirectory.opts, deleteDirectory.handler) 14 | fastify.put('/directories/:directoryId', updateDirectory.opts, updateDirectory.handler) 15 | } 16 | -------------------------------------------------------------------------------- /src/http/api/routes/file/get.js: -------------------------------------------------------------------------------- 1 | const { throwHttpError } = require('../../utils/Util') 2 | const HTTP_CODE = require('../../constants/httpCode') 3 | const db = require('../../services/database') 4 | 5 | module.exports.opts = { 6 | config: { 7 | ACCESS_TAGS: ['READ_ONLY_PANEL'], 8 | }, 9 | schema: { 10 | params: { 11 | type: 'object', 12 | required: ['fileId'], 13 | properties: { 14 | fileId: { type: 'string' }, 15 | }, 16 | }, 17 | response: { 18 | [HTTP_CODE.OK]: { $ref: 'File#' }, 19 | }, 20 | }, 21 | } 22 | 23 | module.exports.handler = async (req, reply) => { 24 | const { fileId } = req.params 25 | const file = await db.getFile(fileId, true) 26 | if (!file) throwHttpError('File not found for given fileId', HTTP_CODE.NOT_FOUND) 27 | reply.send(file) 28 | } 29 | -------------------------------------------------------------------------------- /src/http/api/services/auth.js: -------------------------------------------------------------------------------- 1 | const parseBasicAuth = require('../utils/basicAuth') 2 | 3 | module.exports = ({ auth, publicAccess }) => async (req, reply, done) => { 4 | // If creds are not given skip this route 5 | if (!auth.user && !auth.pass) return 6 | // Check if route is public or not 7 | const { routeConfig: { ACCESS_TAGS } } = req 8 | if (ACCESS_TAGS && ACCESS_TAGS.includes(publicAccess)) return 9 | // Verify credentials 10 | const authorization = parseBasicAuth(req) 11 | if ((!authorization) 12 | || authorization.user !== auth.user 13 | || authorization.pass !== auth.pass) { 14 | // Throw error if invalid 15 | reply.header('WWW-Authenticate', 'Basic realm="DDrive Login"') 16 | const error = new Error('Missing or bad formatted authorization header') 17 | error.statusCode = 401 18 | done(error) 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /knexfile.js: -------------------------------------------------------------------------------- 1 | require('dotenv').config({ path: './config/.env' }) 2 | 3 | module.exports = { 4 | development: { 5 | client: 'pg', 6 | connection: process.env.DATABASE_URL, 7 | ssl: false, 8 | migrations: { 9 | tableName: 'knex_migrations', 10 | }, 11 | pool: { 12 | min: 2, 13 | max: 10, 14 | }, 15 | }, 16 | docker: { 17 | client: 'pg', 18 | connection: process.env.DATABASE_URL, 19 | migrations: { 20 | tableName: 'knex_migrations', 21 | }, 22 | pool: { 23 | min: 2, 24 | max: 10, 25 | }, 26 | }, 27 | production: { 28 | client: 'pg', 29 | connection: process.env.DATABASE_URL, 30 | ssl: true, 31 | migrations: { 32 | tableName: 'knex_migrations', 33 | }, 34 | pool: { 35 | min: 2, 36 | max: 10, 37 | }, 38 | }, 39 | } 40 | -------------------------------------------------------------------------------- /src/DFs/lib/StreamChunker.js: -------------------------------------------------------------------------------- 1 | const { Transform } = require('stream') 2 | 3 | class StreamChunker extends Transform { 4 | constructor(chunkSize) { 5 | super() 6 | this.chunkSize = chunkSize 7 | this.fill = 0 8 | this.chunks = [] 9 | } 10 | 11 | _transform(chunk, encoding, callback) { 12 | this.fill += chunk.length 13 | this.chunks.push(chunk) 14 | while (this.fill >= this.chunkSize) { 15 | this.push(Buffer.concat(this.chunks, this.chunkSize)) 16 | const lastChunk = this.chunks[this.chunks.length - 1] 17 | const residue = this.fill - this.chunkSize 18 | this.chunks = residue === 0 ? [] : [Buffer.from(lastChunk.slice(lastChunk.length - residue))] 19 | this.fill = residue 20 | } 21 | 22 | callback() 23 | } 24 | 25 | _flush(callback) { 26 | this.push(Buffer.concat(this.chunks)) 27 | callback() 28 | } 29 | } 30 | 31 | module.exports = StreamChunker 32 | -------------------------------------------------------------------------------- /.github/workflows/docker-publish.yml: -------------------------------------------------------------------------------- 1 | name: Docker Image 2 | 3 | on: 4 | release: 5 | types: [published] 6 | 7 | jobs: 8 | push_to_registry: 9 | name: Push Docker image to Docker Hub 10 | runs-on: ubuntu-latest 11 | steps: 12 | - name: Check out the repo 13 | uses: actions/checkout@v2 14 | 15 | - name: Log in to Docker Hub 16 | uses: docker/login-action@f054a8b539a109f9f41c372932f1ae047eff08c9 17 | with: 18 | username: ${{ secrets.DOCKER_USERNAME }} 19 | password: ${{ secrets.DOCKER_PASSWORD }} 20 | 21 | - name: Extract metadata (tags, labels) for Docker 22 | id: meta 23 | uses: docker/metadata-action@98669ae865ea3cffbcbaa878cf57c20bbf1c6c38 24 | with: 25 | images: forscht/ddrive 26 | 27 | - name: Build and push Docker image 28 | uses: docker/build-push-action@ad44023a93711e3deb337508980b4b5e9bcdc5dc 29 | with: 30 | context: . 31 | file: docker/Dockerfile 32 | push: true 33 | tags: ${{ steps.meta.outputs.tags }} 34 | labels: ${{ steps.meta.outputs.labels }} 35 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021 Lazy14K 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /src/http/api/routes/file/update.js: -------------------------------------------------------------------------------- 1 | const HTTP_CODE = require('../../constants/httpCode') 2 | const { throwHttpError } = require('../../utils/Util') 3 | const db = require('../../services/database') 4 | 5 | module.exports.opts = { 6 | schema: { 7 | params: { 8 | type: 'object', 9 | required: ['fileId'], 10 | properties: { 11 | fileId: { type: 'string' }, 12 | }, 13 | }, 14 | body: { 15 | type: 'object', 16 | properties: { 17 | name: { type: 'string' }, 18 | parentId: { type: 'string' }, 19 | }, 20 | }, 21 | response: { 22 | [HTTP_CODE.OK]: { $ref: 'File#' }, 23 | [HTTP_CODE.NOT_FOUND]: { $ref: 'CommonError#' }, 24 | }, 25 | }, 26 | } 27 | 28 | module.exports.handler = async (req, reply) => { 29 | const { fileId } = req.params 30 | const file = await db.updateDirectoryOrFile(fileId, req.body) 31 | if (!file) throwHttpError('File not found for given Id', HTTP_CODE.NOT_FOUND) 32 | reply.send(await db.getFile(fileId, true)) 33 | } 34 | -------------------------------------------------------------------------------- /src/http/api/routes/directory/update.js: -------------------------------------------------------------------------------- 1 | const HTTP_CODE = require('../../constants/httpCode') 2 | const { throwHttpError } = require('../../utils/Util') 3 | const db = require('../../services/database') 4 | 5 | module.exports.opts = { 6 | schema: { 7 | params: { 8 | type: 'object', 9 | required: ['directoryId'], 10 | properties: { 11 | directoryId: { type: 'string' }, 12 | }, 13 | }, 14 | body: { 15 | type: 'object', 16 | properties: { 17 | name: { type: 'string' }, 18 | parentId: { type: 'number' }, 19 | }, 20 | }, 21 | response: { 22 | [HTTP_CODE.OK]: { $ref: 'Directory#' }, 23 | [HTTP_CODE.NOT_FOUND]: { $ref: 'CommonError#' }, 24 | }, 25 | }, 26 | } 27 | 28 | module.exports.handler = async (req, reply) => { 29 | const { directoryId } = req.params 30 | const directory = await db.updateDirectoryOrFile(directoryId, req.body) 31 | if (!directory) throwHttpError('Directory not found for given Id', HTTP_CODE.NOT_FOUND) 32 | reply.send(directory) 33 | } 34 | -------------------------------------------------------------------------------- /src/http/api/routes/directory/get.js: -------------------------------------------------------------------------------- 1 | const { throwHttpError } = require('../../utils/Util') 2 | const HTTP_CODE = require('../../constants/httpCode') 3 | const db = require('../../services/database') 4 | 5 | module.exports.opts = { 6 | config: { 7 | ACCESS_TAGS: ['READ_ONLY_PANEL'], 8 | }, 9 | schema: { 10 | params: { 11 | type: 'object', 12 | properties: { 13 | directoryId: { type: 'string' }, 14 | }, 15 | }, 16 | response: { 17 | [HTTP_CODE.OK]: { $ref: 'Directory#' }, 18 | [HTTP_CODE.NOT_FOUND]: { $ref: 'CommonError#' }, 19 | }, 20 | }, 21 | } 22 | 23 | module.exports.handler = async (req, reply) => { 24 | const { directoryId } = req.params 25 | // Get Directory from db 26 | const directory = await db.getDirectory(directoryId, true) 27 | if (!directory) throwHttpError('Directory not found for givenId', HTTP_CODE.NOT_FOUND) 28 | // Transform response from db 29 | directory.child = directory.child || [] // Handle chunk = null from db 30 | const resp = { 31 | ...directory, 32 | child: { 33 | directories: directory.child.filter((c) => c.type === 'directory'), 34 | files: directory.child.filter((c) => c.type === 'file'), 35 | }, 36 | } 37 | reply.send(resp) 38 | } 39 | -------------------------------------------------------------------------------- /src/http/api/utils/basicAuth.js: -------------------------------------------------------------------------------- 1 | 2 | /** 3 | * RegExp for basic auth credentials 4 | * 5 | * credentials = auth-scheme 1*SP token68 6 | * auth-scheme = "Basic" ; case insensitive 7 | * token68 = 1*( ALPHA / DIGIT / "-" / "." / "_" / "~" / "+" / "/" ) *"=" 8 | * @private 9 | */ 10 | 11 | const CREDENTIALS_REGEXP = /^ *(?:[Bb][Aa][Ss][Ii][Cc]) +([A-Za-z0-9._~+/-]+=*) *$/ 12 | 13 | /** 14 | * RegExp for basic auth user/pass 15 | * 16 | * user-pass = userid ":" password 17 | * userid = * 18 | * password = *TEXT 19 | * @private 20 | */ 21 | 22 | const USER_PASS_REGEXP = /^([^:]*):(.*)$/ 23 | 24 | function decodeBase64(str) { 25 | return Buffer.from(str, 'base64').toString() 26 | } 27 | 28 | function parse(string) { 29 | if (typeof string !== 'string') return undefined 30 | 31 | // parse header 32 | const match = CREDENTIALS_REGEXP.exec(string) 33 | if (!match) return undefined 34 | 35 | // decode user pass 36 | const userPass = USER_PASS_REGEXP.exec(decodeBase64(match[1])) 37 | if (!userPass) return undefined 38 | 39 | // return credentials object 40 | return { user: userPass[1], pass: userPass[2] } 41 | } 42 | 43 | function auth(req) { 44 | // get header 45 | const header = req?.headers?.authorization 46 | 47 | // parse header 48 | return parse(header) 49 | } 50 | 51 | module.exports = auth 52 | -------------------------------------------------------------------------------- /src/http/api/constants/commonSchemas.js: -------------------------------------------------------------------------------- 1 | module.exports = [ 2 | { 3 | $id: 'CommonError', 4 | type: 'object', 5 | properties: { 6 | message: { type: 'string' }, 7 | }, 8 | }, 9 | { 10 | $id: 'File', 11 | type: 'object', 12 | required: ['id', 'name', 'parentId', 'size'], 13 | properties: { 14 | id: { type: 'string' }, 15 | name: { type: 'string' }, 16 | parentId: { type: 'string' }, 17 | size: { type: 'number' }, 18 | createdAt: { type: 'string' }, 19 | }, 20 | }, 21 | { 22 | $id: 'Directory', 23 | required: ['name', 'parentId'], 24 | properties: { 25 | id: { type: 'string' }, 26 | name: { type: 'string' }, 27 | parentId: { type: 'string' }, 28 | child: { 29 | type: ['object', 'null'], 30 | properties: { 31 | directories: { 32 | type: 'array', 33 | items: { $ref: 'Directory#' }, 34 | }, 35 | files: { 36 | type: 'array', 37 | items: { $ref: 'File#' }, 38 | }, 39 | }, 40 | }, 41 | createdAt: { type: 'string' }, 42 | }, 43 | }, 44 | ] 45 | -------------------------------------------------------------------------------- /src/http/api/routes/file/create.js: -------------------------------------------------------------------------------- 1 | const { throwHttpError } = require('../../utils/Util') 2 | const HTTP_CODE = require('../../constants/httpCode') 3 | const db = require('../../services/database') 4 | 5 | module.exports.opts = { 6 | schema: { 7 | params: { 8 | type: 'object', 9 | required: ['directoryId'], 10 | properties: { 11 | directoryId: { type: 'string' }, 12 | }, 13 | }, 14 | response: { 15 | [HTTP_CODE.OK]: { $ref: 'File#' }, 16 | }, 17 | }, 18 | } 19 | 20 | module.exports.handler = async (req, reply) => { 21 | const { directoryId } = req.params 22 | // Check if directory exist for given directoryId 23 | const directory = await db.getDirectory(directoryId, false) 24 | if (!directory) throwHttpError('Invalid directoryId', HTTP_CODE.NOT_FOUND) 25 | 26 | // Check if file exist or not in req 27 | const data = await req.file({ limits: { files: 1 } }) 28 | const { file: fileStream, filename } = data 29 | if (!fileStream || !filename) throwHttpError('File is missing in request body', HTTP_CODE.BAD_REQUEST) 30 | 31 | // Upload file to discord in chunks 32 | const discordFileParts = await req.dfs.write(fileStream) 33 | 34 | // Create File in db 35 | const fileData = { name: filename, parentId: directoryId, type: 'file' } 36 | const file = await db.createFileWithParts(fileData, discordFileParts) 37 | 38 | // Response 39 | reply.send(file) 40 | } 41 | -------------------------------------------------------------------------------- /src/http/api/utils/Util.js: -------------------------------------------------------------------------------- 1 | class Util { 2 | /** 3 | * Parse "Range" header `str` relative to the given file `size`. 4 | * 5 | * @param {Number} size 6 | * @param {String} str 7 | * @return {Object|Number} 8 | */ 9 | static rangeParser(size, str) { 10 | if (typeof str !== 'string') return -1 11 | 12 | const index = str.indexOf('=') 13 | 14 | if (index === -1) return -1 15 | 16 | // split the range string 17 | const [rangeStr] = str.slice(index + 1).split(',') 18 | 19 | const range = rangeStr.split('-') 20 | let start = parseInt(range[0], 10) 21 | let end = parseInt(range[1], 10) 22 | 23 | // -nnn 24 | if (Number.isNaN(start)) { 25 | start = size - end 26 | end = size - 1 27 | // nnn- 28 | } else if (Number.isNaN(end)) { 29 | end = size - 1 30 | } 31 | 32 | // limit last-byte-pos to current length 33 | if (end > size - 1) { 34 | end = size - 1 35 | } 36 | 37 | // invalid or unsatisfiable 38 | if (Number.isNaN(start) || Number.isNaN(end) || start > end || start < 0) { 39 | return -1 40 | } 41 | 42 | // add range 43 | return { start, end } 44 | } 45 | 46 | /** 47 | * Throw http error code with status code 48 | * @param message 49 | * @param [statusCode=500] 50 | */ 51 | static throwHttpError(message, statusCode = 500) { 52 | const error = new Error(message) 53 | error.statusCode = statusCode 54 | throw error 55 | } 56 | } 57 | module.exports = Util 58 | -------------------------------------------------------------------------------- /src/DFs/lib/AsyncStreamProcessorWithConcurrency.js: -------------------------------------------------------------------------------- 1 | const { Transform } = require('stream') 2 | 3 | function cbNoop(cb) { 4 | cb() 5 | } 6 | 7 | class AsyncStreamProcessorWithConcurrency extends Transform { 8 | constructor(chunkProcessor, maxConcurrency = 1) { 9 | super() 10 | this.chunkProcessor = chunkProcessor 11 | this.maxConcurrency = maxConcurrency 12 | this.chunkCount = 0 13 | 14 | this.lastCallback = undefined 15 | this.pendingFinish = undefined 16 | this.concurrent = 0 17 | this._final = this.callOnFinish(cbNoop) 18 | } 19 | 20 | callOnFinish(original) { 21 | return function cbHell(callback) { 22 | if (this.concurrent === 0) original.call(this, callback) 23 | else this.pendingFinish = original.bind(this, callback) 24 | } 25 | } 26 | 27 | _transform(chunk, encoding, callback) { 28 | this.concurrent += 1 29 | if (this.concurrent < this.maxConcurrency) { 30 | callback(null) 31 | } else this.lastCallback = callback 32 | this.chunkProcessor(chunk, this.chunkCount) 33 | .then(() => { 34 | this.concurrent -= 1 35 | if (this.lastCallback) { 36 | this.lastCallback() 37 | this.lastCallback = null 38 | } 39 | if (this.concurrent === 0 && this.pendingFinish) { 40 | this.pendingFinish() 41 | this.pendingFinish = null 42 | } 43 | }) 44 | .catch((err) => this.emit('error', err)) 45 | this.chunkCount += 1 46 | } 47 | } 48 | 49 | module.exports = AsyncStreamProcessorWithConcurrency 50 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@forscht/ddrive", 3 | "version": "4.3.0", 4 | "description": "A lightweight cloud storage system using discord as storage device written in nodejs", 5 | "main": "src/index.js", 6 | "author": "Darshan Patel (https://github.com/forscht/ddrive)", 7 | "homepage": "https://github.com/forscht/ddrive", 8 | "license": "MIT", 9 | "bugs": { 10 | "url": "https://github.com/forscht/ddrive/issues" 11 | }, 12 | "repository": { 13 | "type": "git", 14 | "url": "https://github.com/forscht/ddrive.git" 15 | }, 16 | "scripts": { 17 | "start": "node bin/ddrive", 18 | "railway": "npm run migration:up && npm start", 19 | "lint": "eslint --ext mjs,js,ts . --ignore-pattern 'src/http/html'", 20 | "migration:latest": "knex migrate:latest", 21 | "migration:up": "knex migrate:up", 22 | "migration:down": "knex migrate:down", 23 | "migration:make": "knex migrate:make" 24 | }, 25 | "bin": {}, 26 | "engines": { 27 | "node": "16.x" 28 | }, 29 | "devDependencies": { 30 | "@forscht/eslint-config": "latest" 31 | }, 32 | "eslintConfig": { 33 | "env": { 34 | "browser": true 35 | }, 36 | "extends": "@forscht" 37 | }, 38 | "dependencies": { 39 | "@discordjs/rest": "^1.0.0", 40 | "@fastify/auth": "^4.2.0", 41 | "@fastify/multipart": "^7.3.0", 42 | "@fastify/static": "^6.6.0", 43 | "dotenv": "^16.0.3", 44 | "fastify": "^4.11.0", 45 | "knex": "^2.4.0", 46 | "lodash": "^4.17.21", 47 | "mime-types": "^2.1.35", 48 | "pg": "^8.8.0", 49 | "uuid": "^8.3.2" 50 | }, 51 | "keywords": [ 52 | "discord", 53 | "filesystem", 54 | "channel", 55 | "server", 56 | "http server", 57 | "discord file system", 58 | "discord storage" 59 | ] 60 | } 61 | -------------------------------------------------------------------------------- /src/http/index.js: -------------------------------------------------------------------------------- 1 | const path = require('path') 2 | const Fastify = require('fastify') 3 | const FastifyStatic = require('@fastify/static') 4 | const FastifyMultipart = require('@fastify/multipart') 5 | const FastifyAuth = require('@fastify/auth') 6 | 7 | const commonSchemas = require('./api/constants/commonSchemas') 8 | const directoryRoutes = require('./api/routes/directory/routes') 9 | const fileRoutes = require('./api/routes/file/routes') 10 | const Auth = require('./api/services/auth') 11 | 12 | module.exports = (dfs, opts) => { 13 | // Create fastify instance 14 | const fastify = Fastify({ logger: { base: undefined } }) 15 | 16 | // Load common schemas 17 | commonSchemas.forEach((schema) => fastify.addSchema(schema)) 18 | 19 | // Enable Multipart upload 20 | fastify.register(FastifyMultipart, { limits: { fileSize: Infinity } }) 21 | 22 | // Load Auth and then register the routes 23 | fastify.decorate('basicAuth', Auth(opts.authOpts)) 24 | fastify.register(FastifyAuth) 25 | .after(() => { 26 | fastify.register(FastifyStatic, { root: path.join(__dirname, 'html') }) 27 | fastify.register(directoryRoutes, { prefix: '/api' }) 28 | fastify.register(fileRoutes, { prefix: '/api' }) 29 | }) 30 | 31 | // Attach dfs to every req 32 | fastify.addHook('onRequest', async (req) => { req.dfs = dfs }) 33 | 34 | // Setup Error handler 35 | fastify.setErrorHandler(function handler(error, request, reply) { 36 | if (error.statusCode > 500 || !error.statusCode) { 37 | const errorToLog = error.rawError || error 38 | errorToLog.reqId = request.id 39 | this.log.error(errorToLog) 40 | error.statusCode = 500 // eslint-disable-line no-param-reassign 41 | error.message = 'Internal server error' // eslint-disable-line no-param-reassign 42 | } 43 | reply.status(error.statusCode).send({ id: request.id, message: error.message }) 44 | }) 45 | 46 | // Handle Not found handler 47 | fastify.setNotFoundHandler((request, reply) => { 48 | reply.status(404).send({ message: 'Not found' }) 49 | }) 50 | 51 | return fastify 52 | } 53 | -------------------------------------------------------------------------------- /src/http/html/style.css: -------------------------------------------------------------------------------- 1 | :root { 2 | --global-font-size: 1rem; 3 | --global-line-height: 1.4em; 4 | --global-space: 5px; 5 | --page-width: 50rem; 6 | --font-stack: Menlo, Monaco, Lucida Console, Liberation Mono, 7 | DejaVu Sans Mono, Bitstream Vera Sans Mono, Courier New, monospace, 8 | serif; 9 | --mono-font-stack: Menlo, Monaco, Lucida Console, Liberation Mono, 10 | DejaVu Sans Mono, Bitstream Vera Sans Mono, Courier New, monospace, 11 | serif; 12 | --input-style: solid; 13 | --display-h1-decoration: none; 14 | } 15 | 16 | :root.dark { 17 | --background-color: #222225; 18 | --font-color: #ffffff; 19 | --invert-font-color: #222225; 20 | --secondary-color: #a3abba; 21 | --tertiary-color: #a3abba; 22 | --primary-color: #F05454; 23 | --error-color: #ff3c74; 24 | --progress-bar-background: #3f3f44; 25 | --progress-bar-fill: #F05454; 26 | --code-bg-color: #3f3f44; 27 | } 28 | 29 | :root.light { 30 | --background-color: #E8E8E8; 31 | --font-color: #222831; 32 | --invert-font-color: #E8E8E8; 33 | --secondary-color: #a3abba; 34 | --tertiary-color: #a3abba; 35 | --primary-color: #F05454; 36 | --error-color: #ff3c74; 37 | --progress-bar-background: #3f3f44; 38 | --progress-bar-fill: #F05454; 39 | --code-bg-color: #3f3f44; 40 | 41 | } 42 | 43 | table { 44 | /*max-height: 80%;*/ 45 | /*height: 500px;*/ 46 | /*overflow-y: scroll;*/ 47 | } 48 | th { 49 | text-align: left; 50 | } 51 | 52 | table td { 53 | padding-left: 1rem; 54 | } 55 | 56 | td { 57 | max-width: 25rem; 58 | overflow: clip; 59 | text-overflow: ellipsis; 60 | white-space: nowrap; 61 | } 62 | 63 | table th { 64 | padding-left: 1rem; 65 | } 66 | 67 | table tbody td:first-child { 68 | font-weight: normal; 69 | color: unset; 70 | } 71 | 72 | .selected { 73 | background-color: var(--primary-color); 74 | color: var(--invert-font-color); 75 | } 76 | 77 | button { 78 | /*cursor: pointer;*/ 79 | color: var(--font-color); 80 | } 81 | 82 | button:hover { 83 | cursor: pointer; 84 | } 85 | button:disabled { 86 | cursor: unset; 87 | color: var(--tertiary-color); 88 | } 89 | 90 | @media only screen and (min-width: 120em) { 91 | } 92 | -------------------------------------------------------------------------------- /migrations/20230104113348_1.0.0.js: -------------------------------------------------------------------------------- 1 | const DIRECTORY_TABLE = 'directory' 2 | const BLOCKS_TABLE = 'block' 3 | 4 | /** 5 | * @param { import("knex").Knex } knex 6 | * @returns { Promise } 7 | */ 8 | exports.up = async (knex) => { 9 | await knex.schema.createTable(DIRECTORY_TABLE, (table) => { 10 | table.uuid('id') 11 | .primary() 12 | .defaultTo(knex.raw('gen_random_uuid()')) 13 | 14 | table.string('name') 15 | .notNullable() 16 | .comment('Name of the file') 17 | 18 | table.uuid('parentId') 19 | .references('id') 20 | .inTable(DIRECTORY_TABLE) 21 | .onDelete('CASCADE') 22 | .index('directory_parent_id_idx') 23 | .comment('Id of the parent') 24 | table.enum('type', ['directory', 'file']) 25 | .notNullable() 26 | .comment('Type of the entry') 27 | table 28 | .timestamp('createdAt') 29 | .notNullable() 30 | .defaultTo(knex.fn.now()) 31 | .comment('We want to know when this entry was created') 32 | 33 | table.unique(['name', 'parentId']) 34 | }) 35 | 36 | await knex.schema.createTable(BLOCKS_TABLE, (table) => { 37 | table.uuid('id') 38 | .primary() 39 | .defaultTo(knex.raw('gen_random_uuid()')) 40 | table 41 | .uuid('fileId') 42 | .notNullable() 43 | .index('block_file_idx') 44 | .references('id') 45 | .inTable(DIRECTORY_TABLE) 46 | .onDelete('CASCADE') 47 | .comment('Id of the file from directory table') 48 | table 49 | .string('url') 50 | .notNullable() 51 | .comment('URL of the file') 52 | table 53 | .integer('size') 54 | .unsigned() 55 | .notNullable() 56 | .comment('Size of the block in Bytes') 57 | table.string('iv') 58 | .nullable() 59 | .comment('Iv to decrypt the block') 60 | table 61 | .timestamp('createdAt') 62 | .notNullable() 63 | .defaultTo(knex.fn.now()) 64 | .comment('We want to know when this entry was created') 65 | }) 66 | 67 | await knex(DIRECTORY_TABLE).insert({ name: 'root', type: 'directory' }) 68 | } 69 | 70 | /** 71 | * @param { import("knex").Knex } knex 72 | * @returns { Promise } 73 | */ 74 | exports.down = async (knex) => { 75 | await knex.schema.dropTable(BLOCKS_TABLE) 76 | await knex.schema.dropTable(DIRECTORY_TABLE) 77 | } 78 | -------------------------------------------------------------------------------- /src/http/api/routes/file/download.js: -------------------------------------------------------------------------------- 1 | const path = require('path') 2 | const mime = require('mime-types') 3 | const { throwHttpError, rangeParser } = require('../../utils/Util') 4 | const HTTP_CODE = require('../../constants/httpCode') 5 | const db = require('../../services/database') 6 | 7 | module.exports.opts = { 8 | config: { 9 | ACCESS_TAGS: ['READ_ONLY_PANEL', 'READ_ONLY_FILE'], 10 | }, 11 | schema: { 12 | params: { 13 | type: 'object', 14 | required: ['fileId'], 15 | properties: { 16 | fileId: { type: 'string' }, 17 | }, 18 | }, 19 | }, 20 | } 21 | 22 | // 23 | // Returns number of elements based on start and end 24 | // 25 | const rangedParts = (parts, start, end) => { 26 | const chunkSize = parts[0].size 27 | const startPartNumber = Math.ceil(start / chunkSize) ? Math.ceil(start / chunkSize) - 1 : 0 28 | const endPartNumber = Math.ceil(end / chunkSize) 29 | const partsToDownload = parts.slice(startPartNumber, endPartNumber) 30 | partsToDownload[0].start = start % chunkSize 31 | partsToDownload[partsToDownload.length - 1].end = end % chunkSize 32 | 33 | return partsToDownload 34 | } 35 | 36 | module.exports.handler = async (req, reply) => { 37 | const { fileId } = req.params 38 | const { range } = req.headers 39 | // 40 | // Check in db if file exist for given fileId 41 | // 42 | const file = await db.getFile(fileId, true, true) 43 | if (!file) throwHttpError('File not found for given fileId', HTTP_CODE.NOT_FOUND) 44 | if (!file.parts.length) throwHttpError('Corrupt file', HTTP_CODE.INTERNAL_SERVER_ERROR) 45 | 46 | // 47 | // Prepare response headers 48 | // 49 | const resHeaders = { 50 | 'Content-Length': file.size, 51 | 'Accept-Ranges': 'bytes', 52 | 'Content-Disposition': `attachment; filename="${encodeURI(file.name)}"`, 53 | } 54 | const mimeType = mime.lookup(path.extname(file.name)) 55 | if (mimeType) resHeaders['Content-Type'] = mimeType 56 | 57 | // 58 | // Handle Partial content request (Resume download) 59 | // 60 | const parsedRange = rangeParser(file.size, range) 61 | if (range && parsedRange !== -1) { 62 | const { start, end } = parsedRange 63 | reply.raw.writeHead(HTTP_CODE.PARTIAL_CONTENT, { 64 | ...resHeaders, 65 | 'Content-Length': end - start + 1, 66 | 'Content-Range': `bytes ${start}-${end}/${file.size}`, 67 | }) 68 | file.parts = rangedParts(file.parts, parsedRange.start, parsedRange.end) 69 | 70 | return req.dfs.read(reply.raw, file.parts) 71 | } 72 | // 73 | // Handle request without partial content 74 | // 75 | reply.raw.writeHead(HTTP_CODE.OK, resHeaders) 76 | 77 | return req.dfs.read(reply.raw, file.parts) 78 | } 79 | -------------------------------------------------------------------------------- /bin/config.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs') 2 | const path = require('path') 3 | require('dotenv').config({ path: './config/.env' }) 4 | const _ = require('lodash') 5 | 6 | // Valid public access mode 7 | const VALID_PUBLIC_ACCESS = ['READ_ONLY_FILE', 'READ_ONLY_PANEL'] 8 | 9 | // If webhook file exist load webhook urls from file 10 | const loadWebhooks = () => { 11 | const filePath = path.join(process.cwd(), 'webhook.txt') 12 | const fileExist = fs.existsSync(filePath) 13 | if (!fileExist) return undefined 14 | const webhookFileBuffer = fs.readFileSync(filePath) 15 | 16 | return webhookFileBuffer.toString().split('\n') 17 | } 18 | 19 | const HttpConfig = () => { 20 | const { 21 | PORT = 3000, 22 | AUTH = '', 23 | PUBLIC_ACCESS, 24 | DATABASE_URL, 25 | } = process.env 26 | 27 | // Check if database url exist. 28 | if (!DATABASE_URL) throw new Error('Database URL is missing') 29 | 30 | // Validate correct public access is supplied 31 | if (PUBLIC_ACCESS 32 | && !VALID_PUBLIC_ACCESS.includes(PUBLIC_ACCESS)) { 33 | throw new Error(`Invalid PUBLIC_ACCESS ${PUBLIC_ACCESS} supplied. Possible values are - ${VALID_PUBLIC_ACCESS.join(' ')}`) 34 | } 35 | // Prepare username password from auth 36 | const [user, pass] = AUTH.split(':') 37 | 38 | return { 39 | authOpts: { 40 | auth: { user, pass }, 41 | publicAccess: PUBLIC_ACCESS, 42 | }, 43 | port: PORT, 44 | } 45 | } 46 | 47 | const DFsConfig = () => { 48 | const { 49 | CHUNK_SIZE, 50 | UPLOAD_CONCURRENCY = '', 51 | REQUEST_TIMEOUT = '', 52 | SECRET = '', 53 | WEBHOOKS = '', 54 | } = process.env 55 | 56 | // Get webhook URLs 57 | let webhooks = loadWebhooks() 58 | if (!webhooks) webhooks = WEBHOOKS.split(',') 59 | webhooks = webhooks.filter((w) => !!w) 60 | if (!webhooks || !webhooks.length) { 61 | throw new Error('Webhook URLs missing. Webhook URLs seperated by "," in .env and seperated by "\n" webhook.txt file supported') 62 | } 63 | // If chunkSize is invalid set the default chunkSize 64 | let chunkSize = parseInt(CHUNK_SIZE, 10) 65 | if (!_.isFinite(chunkSize) 66 | || chunkSize < 1 67 | || chunkSize > 26109542) chunkSize = 25165824 // 24 MB 68 | 69 | // Set proper request timeout 70 | let timeout = parseInt(REQUEST_TIMEOUT, 10) 71 | if (!_.isFinite(timeout) || timeout < 1) timeout = 60000 72 | 73 | let maxConcurrency = parseInt(UPLOAD_CONCURRENCY, 10) 74 | if (!_.isFinite(maxConcurrency) || maxConcurrency < 1) maxConcurrency = 3 75 | 76 | return { 77 | chunkSize, 78 | webhooks, 79 | secret: SECRET, 80 | maxConcurrency, 81 | restOpts: { 82 | timeout, 83 | }, 84 | } 85 | } 86 | 87 | module.exports = () => ({ 88 | httpConfig: HttpConfig(), 89 | DFsConfig: DFsConfig(), 90 | }) 91 | -------------------------------------------------------------------------------- /bin/migrate.js: -------------------------------------------------------------------------------- 1 | const path = require('path') 2 | const _ = require('lodash') 3 | const knex = require('../src/http/api/utils/knex') 4 | 5 | const normalizePath = (p, addLastSlash = false) => { 6 | let r = path.posix.normalize(p.replace(/\\/g, '/')) 7 | if (r.endsWith('/') && r !== '/') r = r.slice(0, -1) 8 | if (!r.endsWith('/') && addLastSlash) r = `${r}/` 9 | 10 | return r.startsWith('/') ? r : `/${r}` 11 | } 12 | 13 | const migrate = async () => { 14 | // Get file path from command line argv 15 | // node bin/migrate.js raw.json 16 | const metadataFilePath = process.argv[2] 17 | 18 | const metadata = require(path.relative(__dirname, metadataFilePath)) // eslint-disable-line global-require,import/no-dynamic-require 19 | 20 | const { files, directories } = metadata 21 | 22 | const createFile = async (file, parentId) => { 23 | try { 24 | await knex('directory').insert({ 25 | id: file.id, 26 | name: file.name, 27 | type: 'file', 28 | parentId, 29 | }) 30 | } catch (err) { 31 | if (err.code === '23505') { 32 | console.log('Duplicate file found', file.name) 33 | 34 | return 35 | } 36 | throw err 37 | } 38 | 39 | let { parts } = file 40 | parts = _.orderBy(parts, 'partNumber') 41 | parts = parts.map((p) => ({ 42 | size: p.size, 43 | url: p.url, 44 | fileId: file.id, 45 | })) 46 | 47 | await knex('block').insert(parts) 48 | } 49 | 50 | const getChildDirectories = (directoryName) => { 51 | const normalizedDirectoryName = normalizePath(directoryName, true) 52 | const children = [] 53 | directories.forEach((item) => { 54 | if (item.name !== normalizedDirectoryName && item.name.startsWith(normalizedDirectoryName) && item.name.trim() !== '') { 55 | let { name } = item 56 | if (name.indexOf(normalizedDirectoryName) === 0) name = name.substring(normalizedDirectoryName.length) 57 | if (name.indexOf('/') === 0) name = name.substring(1) 58 | if (!name.includes('/')) children.push(name) 59 | } 60 | // if (this.isChildOf(item.name, normalizedDirectoryName)) children.push(path.basename(item.name)) 61 | }) 62 | 63 | return children 64 | } 65 | 66 | const createDir = async (dirName = '/', parentId = null) => { 67 | try { 68 | const dir = directories.find((d) => d.name === dirName) 69 | if (!dir) return 70 | // Skip creating root dir 71 | if (dirName === '/') dir.id = parentId 72 | else { 73 | await knex('directory').insert({ 74 | id: dir.id, name: path.basename(dirName), parentId, type: 'directory', 75 | }) 76 | } 77 | const childFiles = files.filter((f) => f.directoryId === dir.id) 78 | await Promise.all(childFiles.map((f) => createFile(f, dir.id))) 79 | const childDirectories = getChildDirectories(dirName) 80 | .map((d) => path.normalize(`${dirName}/${d}`)) 81 | await Promise.all(childDirectories.map(async (d) => createDir(d, dir.id))) 82 | } catch (err) { 83 | console.log('Error => ', dirName) 84 | throw err 85 | } 86 | } 87 | 88 | const rootDir = await knex('directory').whereNull('parentId').first() 89 | await createDir('/', rootDir.id) 90 | 91 | console.log('------------- Migration Done -------------------') 92 | process.exit(0) 93 | } 94 | 95 | migrate().then() 96 | -------------------------------------------------------------------------------- /src/http/api/services/database.js: -------------------------------------------------------------------------------- 1 | const knex = require('../utils/knex') 2 | 3 | /** 4 | * 5 | * @param id {String} 6 | * @param size {Boolean} 7 | * @param parts {Boolean} 8 | * @returns {Promise} 9 | */ 10 | const getFile = async (id, size = false, parts = false) => { 11 | const query = knex(`directory as d`) 12 | .select('d.*') 13 | .leftJoin(`block as b`, 'd.id', 'b.fileId') 14 | .where('d.id', '=', id) 15 | .groupBy('d.id') 16 | .first() 17 | if (size) query.select(knex.raw(`sum(b.size) as size`)) 18 | if (parts) query.select(knex.raw(`jsonb_agg(to_jsonb(b) - 'fileId') as parts`)) 19 | 20 | return query 21 | } 22 | 23 | /** 24 | * Get directory with or without child 25 | * @param id 26 | * @param child 27 | * @returns {Promise} 28 | */ 29 | const getDirectory = async (id = null, child = false) => { 30 | // 31 | // Knex Base Query 32 | // 33 | const query = knex(`directory`) 34 | .select('*') 35 | .first() 36 | // 37 | // If id is not provided return result for root dir 38 | // 39 | if (id) query.where('id', '=', id) 40 | if (!id) query.whereNull('parentId') 41 | // 42 | // Fetch child if asked for 43 | // 44 | if (child && !id) { 45 | query.select(knex.raw( 46 | `(select json_agg(r) FROM (select "d".*, sum(b.size) as size 47 | from "directory" as "d" 48 | left join "block" as "b" on "d"."id" = "b"."fileId" 49 | where "d"."parentId" = (select "id" from "directory" where "parentId" is null) 50 | group by "d"."id") r ) as child`, 51 | )) 52 | } 53 | if (child && id) { 54 | query.select(knex.raw( 55 | `(select json_agg(r) FROM (select "d".*, sum(b.size) as size 56 | from "directory" as "d" 57 | left join "block" as "b" on "d"."id" = "b"."fileId" 58 | where "d"."parentId" = ? 59 | group by "d"."id") r ) as child`, [id], 60 | )) 61 | } 62 | 63 | return query 64 | } 65 | 66 | /** 67 | * Create directory in db 68 | * @param data {Object} 69 | * @param type {String} 70 | * @returns {Promise<*>} 71 | */ 72 | const createDirectoryOrFile = async (data, type = 'directory') => { 73 | try { 74 | const [directory] = await knex('directory') 75 | .insert({ ...data, type }) 76 | .returning('*') 77 | 78 | return directory 79 | } catch (err) { 80 | // Handle SQL error for unique key constraint 81 | if (err.code === '23505') { 82 | err.message = 'Directory or file with same name already exist' 83 | err.statusCode = 400 84 | } 85 | throw err 86 | } 87 | } 88 | 89 | /** 90 | * Delete directory for given directoryId 91 | * @param id {String} 92 | * @param type {String} 93 | * @returns {Promise} 94 | */ 95 | const deleteDirectory = async (id, type = 'directory') => { 96 | await knex('directory') 97 | .where({ id, type }) 98 | .whereNotNull('parentId') // Do not let user delete root dir 99 | .delete() 100 | .catch() // Ignore error if directory id is invalid guid 101 | } 102 | 103 | /** 104 | * Update directory record for given fileId or directoryId 105 | * @param id {String} 106 | * @param data {Object} 107 | * @returns {Promise<*>} 108 | */ 109 | const updateDirectoryOrFile = async (id, data) => { 110 | try { 111 | const [directory] = await knex('directory') 112 | .update(data) 113 | .where({ id }) 114 | .whereNotNull('parentId') // Do not let user delete root dir 115 | .returning('*') 116 | 117 | return directory 118 | } catch (err) { 119 | // Handle SQL error for unique key constraint 120 | if (err.code === '23505') { 121 | err.message = 'Directory or file with same name already exist' 122 | err.statusCode = 400 123 | } 124 | throw err 125 | } 126 | } 127 | 128 | const createFileWithParts = async (data, parts) => { 129 | const file = await createDirectoryOrFile(data, 'file') 130 | await knex('block').insert(parts.map((p) => ({ fileId: file.id, ...p }))) 131 | 132 | return getFile(file.id, true, false) 133 | } 134 | 135 | module.exports = { 136 | getFile, getDirectory, createDirectoryOrFile, deleteDirectory, updateDirectoryOrFile, createFileWithParts, 137 | } 138 | -------------------------------------------------------------------------------- /src/DFs/index.js: -------------------------------------------------------------------------------- 1 | /* eslint-disable no-restricted-syntax,no-await-in-loop */ 2 | const https = require('https') 3 | const crypto = require('crypto') 4 | const { REST } = require('@discordjs/rest') 5 | const _ = require('lodash') 6 | const uuid = require('uuid').v4 7 | const AsyncStreamProcessorWithConcurrency = require('./lib/AsyncStreamProcessorWithConcurrency') 8 | const AsyncStreamProcessor = require('./lib/AsyncStreamProcessor') 9 | const StreamChunker = require('./lib/StreamChunker') 10 | 11 | const DEFAULT_CHUNK_SIZE = 25165824 // 24MB 12 | const DEFAULT_ENCRYPTION = 'aes-256-ctr' 13 | const DEFAULT_REST_OPTS = { version: 10, timeout: 60000 } 14 | const DEFAULT_MAX_UPLOAD_CONCURRENCY = 3 15 | 16 | class DiscordFileSystem { 17 | constructor(opts) { 18 | this.webhooks = opts.webhooks 19 | this.maxUploadConc = opts.maxUploadConc || DEFAULT_MAX_UPLOAD_CONCURRENCY 20 | this.chunkSize = opts.chunkSize || DEFAULT_CHUNK_SIZE 21 | this.encAlg = opts.encAlg || DEFAULT_ENCRYPTION 22 | this.secret = opts.secret 23 | this.rest = new REST({ ...DEFAULT_REST_OPTS, ...opts.restOpts }) 24 | this.lastWbIdx = 0 25 | 26 | // 27 | // Validate parameters 28 | // 29 | if (!this.webhooks) throw new Error('webhooks parameter is missing') 30 | if (!this.webhooks.length) throw new Error('At least 1 valid webhookURL required') 31 | 32 | if (!_.isFinite(this.chunkSize) 33 | || this.chunkSize < 1 34 | || this.chunkSize > 26109542) { 35 | throw new Error('Invalid chunkSize - chunkSize should be valid number and > 1 and < 26109542') 36 | } 37 | 38 | const { timeout } = opts.restOpts 39 | if (!_.isFinite(timeout) || timeout < 1) { 40 | throw new Error('Invalid timeout - timeout should be valid number and > 0') 41 | } 42 | } 43 | 44 | get webhookURL() { 45 | const webhookURL = this.webhooks[this.lastWbIdx] 46 | this.lastWbIdx = this.lastWbIdx + 1 >= this.webhooks.length 47 | ? 0 48 | : this.lastWbIdx + 1 49 | 50 | return webhookURL.replace('https://discord.com/api', '') 51 | } 52 | 53 | /** 54 | * @description Encrypt the given buffer 55 | * @param secret 56 | * @param data 57 | * @returns {{encrypted: Buffer, iv: string}} 58 | * @private 59 | */ 60 | _encrypt(secret, data) { 61 | // Create hash for given secret 62 | const key = crypto.createHash('sha256').update(secret).digest() 63 | // Create iv 64 | const iv = crypto.randomBytes(16) 65 | // Create cipher and encrypt the data 66 | const cipher = crypto.createCipheriv(this.encAlg, key, iv) 67 | let encrypted = cipher.update(data) 68 | encrypted = Buffer.concat([encrypted, cipher.final()]) 69 | // Return iv and encrypted data 70 | 71 | return { 72 | iv: iv.toString('hex'), 73 | encrypted, 74 | } 75 | } 76 | 77 | /** 78 | * @description Returns the decryption cipher 79 | * @param secret 80 | * @param iv 81 | * @private 82 | */ 83 | _decrypt(secret, iv) { 84 | // Create key hash 85 | const key = crypto.createHash('sha256').update(secret).digest() 86 | // Return decipher transform stream 87 | 88 | return crypto.createDecipheriv(this.encAlg, key, Buffer.from(iv, 'hex')) 89 | } 90 | 91 | /** 92 | * @description Upload single file to discord 93 | * @param file {Object} 94 | * @returns {Promise} 95 | * @private 96 | */ 97 | _uploadFile(file) { 98 | return this.rest.post(this.webhookURL, { files: [file], auth: false }) 99 | } 100 | 101 | /** 102 | * @description Read files from discord and write it to stream 103 | * @param stream 104 | * @param parts {Array} 105 | * @returns {Promise} 106 | */ 107 | async read(stream, parts) { 108 | for (const part of parts) { 109 | let headers = {} 110 | if (part.start || part.end) headers = { Range: `bytes=${part.start || 0}-${part.end || ''}` } 111 | await new Promise((resolve, reject) => { 112 | https.get(part.url, { headers }, (res) => { 113 | // Handle incoming data chunks from discord server 114 | const handleData = async (data) => { 115 | // https://nodejs.org/docs/latest-v16.x/api/stream.html#writablewritechunk-encoding-callback 116 | if (!stream.write(data)) { 117 | await new Promise((r) => stream.once('drain', r)) 118 | } 119 | } 120 | // Handle Decryption if file is encrypted 121 | if (part.iv) { 122 | if (!this.secret) throw new Error('secret not provided') 123 | // Create decipher 124 | const decipher = this._decrypt(this.secret, part.iv) 125 | decipher.on('end', () => resolve()) 126 | decipher.on('error', (err) => reject(err)) 127 | res.pipe(decipher).pipe(new AsyncStreamProcessor(handleData)) 128 | } else { 129 | res.pipe(new AsyncStreamProcessor(handleData)) 130 | res.on('end', () => resolve()) 131 | } 132 | 133 | res.on('error', (err) => reject(err)) 134 | }) 135 | }) 136 | } 137 | stream.end() 138 | } 139 | 140 | /** 141 | * @description Read from readable stream and upload file on discord in chunks 142 | * @param stream 143 | * @returns {Promise} 144 | */ 145 | async write(stream) { 146 | const parts = [] 147 | // This function will be executed to process each chunk for file 148 | const processChunk = async (data, chunkCount) => { 149 | // Encrypt the data if secret is provided 150 | let iv 151 | let encrypted 152 | if (this.secret)({ iv, encrypted } = this._encrypt(this.secret, data)) 153 | // Upload file to discord 154 | const part = { name: uuid(), data: encrypted || data } 155 | const { attachments: [attachment] } = await this._uploadFile(part) 156 | // Push part object into array and return later 157 | parts[chunkCount] = { url: attachment.url, size: attachment.size, iv } 158 | } 159 | 160 | return new Promise((resolve, reject) => { 161 | stream 162 | .on('aborted', () => reject(new Error('file upload aborted'))) // On HTTP request abort delete all the messages and reject promise 163 | .pipe(new StreamChunker(this.chunkSize)) 164 | .pipe(new AsyncStreamProcessorWithConcurrency(processChunk, this.maxUploadConc)) 165 | .on('finish', () => resolve(parts)) 166 | .on('error', (err) => reject(err)) 167 | }) 168 | } 169 | } 170 | 171 | module.exports = DiscordFileSystem 172 | -------------------------------------------------------------------------------- /src/http/html/normalize.css: -------------------------------------------------------------------------------- 1 | /*! normalize.css v8.0.1 | MIT License | github.com/necolas/normalize.css */ 2 | 3 | /* Document 4 | ========================================================================== */ 5 | 6 | /** 7 | * 1. Correct the line height in all browsers. 8 | * 2. Prevent adjustments of font size after orientation changes in iOS. 9 | */ 10 | 11 | html { 12 | line-height: 1.15; /* 1 */ 13 | -webkit-text-size-adjust: 100%; /* 2 */ 14 | } 15 | 16 | /* Sections 17 | ========================================================================== */ 18 | 19 | /** 20 | * Remove the margin in all browsers. 21 | */ 22 | 23 | body { 24 | margin: 0; 25 | } 26 | 27 | /** 28 | * Render the `main` element consistently in IE. 29 | */ 30 | 31 | main { 32 | display: block; 33 | } 34 | 35 | /** 36 | * Correct the font size and margin on `h1` elements within `section` and 37 | * `article` contexts in Chrome, Firefox, and Safari. 38 | */ 39 | 40 | h1 { 41 | font-size: 2em; 42 | margin: 0.67em 0; 43 | } 44 | 45 | /* Grouping content 46 | ========================================================================== */ 47 | 48 | /** 49 | * 1. Add the correct box sizing in Firefox. 50 | * 2. Show the overflow in Edge and IE. 51 | */ 52 | 53 | hr { 54 | box-sizing: content-box; /* 1 */ 55 | height: 0; /* 1 */ 56 | overflow: visible; /* 2 */ 57 | } 58 | 59 | /** 60 | * 1. Correct the inheritance and scaling of font size in all browsers. 61 | * 2. Correct the odd `em` font sizing in all browsers. 62 | */ 63 | 64 | pre { 65 | font-family: monospace, monospace; /* 1 */ 66 | font-size: 1em; /* 2 */ 67 | } 68 | 69 | /* Text-level semantics 70 | ========================================================================== */ 71 | 72 | /** 73 | * Remove the gray background on active links in IE 10. 74 | */ 75 | 76 | a { 77 | background-color: transparent; 78 | } 79 | 80 | /** 81 | * 1. Remove the bottom border in Chrome 57- 82 | * 2. Add the correct text decoration in Chrome, Edge, IE, Opera, and Safari. 83 | */ 84 | 85 | abbr[title] { 86 | border-bottom: none; /* 1 */ 87 | text-decoration: underline; /* 2 */ 88 | text-decoration: underline dotted; /* 2 */ 89 | } 90 | 91 | /** 92 | * Add the correct font weight in Chrome, Edge, and Safari. 93 | */ 94 | 95 | b, 96 | strong { 97 | font-weight: bolder; 98 | } 99 | 100 | /** 101 | * 1. Correct the inheritance and scaling of font size in all browsers. 102 | * 2. Correct the odd `em` font sizing in all browsers. 103 | */ 104 | 105 | code, 106 | kbd, 107 | samp { 108 | font-family: monospace, monospace; /* 1 */ 109 | font-size: 1em; /* 2 */ 110 | } 111 | 112 | /** 113 | * Add the correct font size in all browsers. 114 | */ 115 | 116 | small { 117 | font-size: 80%; 118 | } 119 | 120 | /** 121 | * Prevent `sub` and `sup` elements from affecting the line height in 122 | * all browsers. 123 | */ 124 | 125 | sub, 126 | sup { 127 | font-size: 75%; 128 | line-height: 0; 129 | position: relative; 130 | vertical-align: baseline; 131 | } 132 | 133 | sub { 134 | bottom: -0.25em; 135 | } 136 | 137 | sup { 138 | top: -0.5em; 139 | } 140 | 141 | /* Embedded content 142 | ========================================================================== */ 143 | 144 | /** 145 | * Remove the border on images inside links in IE 10. 146 | */ 147 | 148 | img { 149 | border-style: none; 150 | } 151 | 152 | /* Forms 153 | ========================================================================== */ 154 | 155 | /** 156 | * 1. Change the font styles in all browsers. 157 | * 2. Remove the margin in Firefox and Safari. 158 | */ 159 | 160 | button, 161 | input, 162 | optgroup, 163 | select, 164 | textarea { 165 | font-family: inherit; /* 1 */ 166 | font-size: 100%; /* 1 */ 167 | line-height: 1.15; /* 1 */ 168 | margin: 0; /* 2 */ 169 | } 170 | 171 | /** 172 | * Show the overflow in IE. 173 | * 1. Show the overflow in Edge. 174 | */ 175 | 176 | button, 177 | input { /* 1 */ 178 | overflow: visible; 179 | } 180 | 181 | /** 182 | * Remove the inheritance of text transform in Edge, Firefox, and IE. 183 | * 1. Remove the inheritance of text transform in Firefox. 184 | */ 185 | 186 | button, 187 | select { /* 1 */ 188 | text-transform: none; 189 | } 190 | 191 | /** 192 | * Correct the inability to style clickable types in iOS and Safari. 193 | */ 194 | 195 | button, 196 | [type="button"], 197 | [type="reset"], 198 | [type="submit"] { 199 | -webkit-appearance: button; 200 | } 201 | 202 | /** 203 | * Remove the inner border and padding in Firefox. 204 | */ 205 | 206 | button::-moz-focus-inner, 207 | [type="button"]::-moz-focus-inner, 208 | [type="reset"]::-moz-focus-inner, 209 | [type="submit"]::-moz-focus-inner { 210 | border-style: none; 211 | padding: 0; 212 | } 213 | 214 | /** 215 | * Restore the focus styles unset by the previous rule. 216 | */ 217 | 218 | button:-moz-focusring, 219 | [type="button"]:-moz-focusring, 220 | [type="reset"]:-moz-focusring, 221 | [type="submit"]:-moz-focusring { 222 | outline: 1px dotted ButtonText; 223 | } 224 | 225 | /** 226 | * Correct the padding in Firefox. 227 | */ 228 | 229 | fieldset { 230 | padding: 0.35em 0.75em 0.625em; 231 | } 232 | 233 | /** 234 | * 1. Correct the text wrapping in Edge and IE. 235 | * 2. Correct the color inheritance from `fieldset` elements in IE. 236 | * 3. Remove the padding so developers are not caught out when they zero out 237 | * `fieldset` elements in all browsers. 238 | */ 239 | 240 | legend { 241 | box-sizing: border-box; /* 1 */ 242 | color: inherit; /* 2 */ 243 | display: table; /* 1 */ 244 | max-width: 100%; /* 1 */ 245 | padding: 0; /* 3 */ 246 | white-space: normal; /* 1 */ 247 | } 248 | 249 | /** 250 | * Add the correct vertical alignment in Chrome, Firefox, and Opera. 251 | */ 252 | 253 | progress { 254 | vertical-align: baseline; 255 | } 256 | 257 | /** 258 | * Remove the default vertical scrollbar in IE 10+. 259 | */ 260 | 261 | textarea { 262 | overflow: auto; 263 | } 264 | 265 | /** 266 | * 1. Add the correct box sizing in IE 10. 267 | * 2. Remove the padding in IE 10. 268 | */ 269 | 270 | [type="checkbox"], 271 | [type="radio"] { 272 | box-sizing: border-box; /* 1 */ 273 | padding: 0; /* 2 */ 274 | } 275 | 276 | /** 277 | * Correct the cursor style of increment and decrement buttons in Chrome. 278 | */ 279 | 280 | [type="number"]::-webkit-inner-spin-button, 281 | [type="number"]::-webkit-outer-spin-button { 282 | height: auto; 283 | } 284 | 285 | /** 286 | * 1. Correct the odd appearance in Chrome and Safari. 287 | * 2. Correct the outline style in Safari. 288 | */ 289 | 290 | [type="search"] { 291 | -webkit-appearance: textfield; /* 1 */ 292 | outline-offset: -2px; /* 2 */ 293 | } 294 | 295 | /** 296 | * Remove the inner padding in Chrome and Safari on macOS. 297 | */ 298 | 299 | [type="search"]::-webkit-search-decoration { 300 | -webkit-appearance: none; 301 | } 302 | 303 | /** 304 | * 1. Correct the inability to style clickable types in iOS and Safari. 305 | * 2. Change font properties to `inherit` in Safari. 306 | */ 307 | 308 | ::-webkit-file-upload-button { 309 | -webkit-appearance: button; /* 1 */ 310 | font: inherit; /* 2 */ 311 | } 312 | 313 | /* Interactive 314 | ========================================================================== */ 315 | 316 | /* 317 | * Add the correct display in Edge, IE 10+, and Firefox. 318 | */ 319 | 320 | details { 321 | display: block; 322 | } 323 | 324 | /* 325 | * Add the correct display in all browsers. 326 | */ 327 | 328 | summary { 329 | display: list-item; 330 | } 331 | 332 | /* Misc 333 | ========================================================================== */ 334 | 335 | /** 336 | * Add the correct display in IE 10+. 337 | */ 338 | 339 | template { 340 | display: none; 341 | } 342 | 343 | /** 344 | * Add the correct display in IE 10. 345 | */ 346 | 347 | [hidden] { 348 | display: none; 349 | } 350 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 |

DDRIVE

2 | 3 |

Turn Discord into a datastore that can manage and store your files.

4 |

5 | 6 | Discord server 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 |

24 |
25 | 26 | ##### **DDrive** A lightweight cloud storage system using discord as storage device written in nodejs. Supports an unlimited file size and unlimited storage, Implemented using node js streams with multi-part up & download. 27 | 28 | https://user-images.githubusercontent.com/59018146/167635903-48cdace0-c383-4e7d-a037-4a32eaa4ab69.mp4 29 | 30 | #### Current stable branch `4.x` 31 | 32 | ### Live demo at [ddrive.forscht.dev](https://ddrive.forscht.dev/) 33 | 34 | ### Features 35 | - Theoretically unlimited file size, thanks to splitting the file in 24mb chunks using nodejs streams API. 36 | - Simple yet robust HTTP front end 37 | - Rest API with OpenAPI 3.1 specifications. 38 | - Tested with storing 4000 GB of data on single discord channel (With max file size of 16GB). 39 | - Supports basic auth with read only public access to panel. 40 | - Easily deployable on heroku/replit and use as private cloud storage. 41 | 42 | ## New Version 4.0 43 | 44 | 45 | This next major version release 4.0 is ddrive written from scratch. It comes with most requested features and several improvements. 46 | 47 | - Now uses `postgres` to store files metadata. Why? 48 | - Once you have huge amount of data stored on ddrive it makes ddrive significantly slow to start since ddrive have to fetch all the metadata from discord channel (For 3 TB of data it takes me 30+ minutes.) 49 | - With postgres, deleting file is extremely faster because now ddrive don't have to delete files on discord channel and just need to remove from metadata only. 50 | - With postgres now it's possible to move or rename files/folders which was impossible with older version. 51 | - Added support for `rename` files/folders. 52 | - Added support to `move` file/folder (Only via API, Not sure how to do it with frontend, PR welcomes.) 53 | - Now uses `webhooks` instead of `bot/user tokens` to bypass the discord rate limit 54 | - DDrive now uploads file chunks in parallel with limit. Which significantly increase the upload speed. I was able to upload file with `5GB of size in just 85 seconds`. 55 | - Public access mode - It is now possible to provide users read-only access with just one config var 56 | - Batch upload files - Now you can upload multiple files at once from panel. (DClone support has been removed from this version) 57 | - Bug fix - `download reset` for few mobile devices 58 | - Added support for optional encryption to files uploaded to discord 59 | - DDrive now has proper rest API with OpenAPI 3.1 standards 60 | - Added support for dark/light mode on panel 61 | 62 | I spent several weeks finalizing this new version. Any support is highly appreciated - [Buy me a coffee](https://www.buymeacoffee.com/forscht) 63 | 64 | ### Requirements 65 | - NodeJS v16.x or Docker 66 | - Postgres Database, Discord Webhook URLs 67 | - Avg technical knowledge 68 | 69 | ## Setup Guide 70 | 1. Clone this project 71 | 2. Create few webhook urls. For better performance and to avoid rate limit at least create 5 with 1 webhook / text channel. ([How to create webhook url](https://support.discord.com/hc/en-us/articles/228383668-Intro-to-Webhooks)) 72 | 3. Setup postgres using docker, if you already don't have it running 73 | - `cd .devcontainer` 74 | - `docker-compose up -d` 75 | 4. Copy `config/.env_sample` to `config/.env` and make necessary changes 76 | 5. Optional - If you have lots of webhookURLs you can put those in `webhook.txt` with `\n` seperated. 77 | 6. Run - `npm run migration:up` 78 | 7. Run - `node bin/ddrive` 79 | 8. Navigate to `http://localhost:3000` in your browser. 80 | 81 | ### How to keep it running forever 82 | 1. Install pm2 with `npm install -g pm2` 83 | 2. Run - `pm2 start bin/ddrive` 84 | 3. Run - `pm2 list` to check status of ddrive 85 | 4. Run - `pm2 logs` to check ddrive logs 86 | 87 | ### Config variables explanation 88 | ```shell 89 | # config/.env 90 | 91 | # Required params 92 | DATABASE_URL= # Database URL of postgres with valid postgres uri 93 | 94 | WEBHOOKS={url1},{url2} # Webhook urls seperated by "," 95 | 96 | # Optional params 97 | PORT=3000 # HTTP Port where ddrive panel will start running 98 | 99 | REQUEST_TIMEOUT=60000 # Time in ms after which ddrive will abort request to discord api server. Set it high if you have very slow internet 100 | 101 | CHUNK_SIZE=25165824 # ChunkSize in bytes. You should probably never touch this and if you do don't set it to more than 25MB, with discord webhooks you can't upload file bigger than 25MB 102 | 103 | SECRET=someverysecuresecret # If you set this every files on discord will be stored using strong encryption, but it will cause significantly high cpu usage, so don't use it unless you're storing important stuff 104 | 105 | AUTH=admin:admin # Username password seperated by ":". If you set this panel will ask for username password before access 106 | 107 | PUBLIC_ACCESS=READ_ONLY_FILE # If you want to give read only access to panel or file use this option. Check below for valid options. 108 | # READ_ONLY_FILE - User will be only access download links of file and not panel 109 | # READ_ONLY_PANEL - User will be able to browse the panel for files/directories but won't be able to upload/delete/rename any file/folder. 110 | 111 | UPLOAD_CONCURRENCY=3 # ddrive will upload this many chunks in parallel to discord. If you have fast internet increasing it will significantly increase performance at cost of cpu/disk usage 112 | 113 | ``` 114 | 115 | ### Run using docker 116 | ```shell 117 | docker run -rm -it -p 8080:8080 \ 118 | -e PORT=8080 \ 119 | -e WEBHOOKS={url1},{url2} \ 120 | -e DATABASE_URL={database url} \ 121 | --name ddrive forscht/ddrive 122 | ``` 123 | ### One Click Deploy with Railway 124 | [![Deploy on Railway](https://railway.app/button.svg)](https://railway.app/new/template/tL53xa) 125 | 126 | ### Setup tutorials 127 | - Setup under 4 minutes in local/cloud server using `neon.tech` postgres - [Youtube](https://youtu.be/Zvr1BHjrYC0) 128 | ## API Usage 129 | `npm install @forscht/ddrive` 130 | ```javascript 131 | const { DFs, HttpServer } = require('@forscht/ddrive') 132 | 133 | const DFsConfig = { 134 | chunkSize: 25165824, 135 | webhooks: 'webhookURL1,webhookURL2', 136 | secret: 'somerandomsecret', 137 | maxConcurrency: 3, // UPLOAD_CONCURRENCY 138 | restOpts: { 139 | timeout: '60000', 140 | }, 141 | } 142 | 143 | const httpConfig = { 144 | authOpts: { 145 | auth: { user: 'admin', pass: 'admin' }, 146 | publicAccess: 'READ_ONLY_FILE', // or 'READ_ONLY_PANEL' 147 | }, 148 | port: 8080, 149 | } 150 | 151 | const run = async () => { 152 | // Create DFs Instance 153 | const dfs = new DFs(DFsConfig) 154 | // Create HTTP Server instance 155 | const httpServer = HttpServer(dfs, httpConfig) 156 | 157 | return httpServer.listen({ host: '0.0.0.0', port: httpConfig.port }) 158 | } 159 | 160 | run().then() 161 | 162 | ``` 163 | 164 | ## Migrate from v3 to v4 165 | Migrating ddrive v3 to v4 is one way process once you migrate ddrive to v4 and add new files you can't migrate new files to v3 again but you can still use v3 with old files. 166 | 167 | 1. Clone this project 168 | 2. Create few webhooks (1 webhook/text channel). Do not create webhook on old text channel where you have already stored v3 data. 169 | 3. Take pull of latest ddrive v3 170 | 4. Start ddrive v3 with option `--metadata=true`. Ex - `ddrive --channelId {id} --token {token} --metadata=true` 171 | 5. Open `localhost:{ddrive-port}/metadata` in browser 172 | 6. Save JSON as old_data.json in cloned ddrive directory 173 | 7. Put valid `DATABASE_URL` in `config/.env` 174 | 8. Run `node bin/migrate old_data.json` 175 | 9. After few seconds once process is done you should see the message `Migration is done` 176 | 177 | Feel free to create [new issue](https://github.com/forscht/ddrive/issues/new) if it's not working for you or need any help. 178 | 179 | [Discord Support server](https://discord.gg/3TCZRYafhW) 180 | -------------------------------------------------------------------------------- /src/http/html/index.js: -------------------------------------------------------------------------------- 1 | /* eslint-disable no-restricted-syntax,no-loop-func */ 2 | const table = document.getElementById('fm-table') 3 | const tbody = document.getElementById('tbody') 4 | const lightModeBtn = document.getElementById('light-mode-btn') 5 | const darkModeBtn = document.getElementById('dark-mode-btn') 6 | 7 | const fileInput = document.getElementById('file-input') 8 | 9 | const prevBtn = document.getElementById('previous-btn') 10 | 11 | const createFolderBtn = document.getElementById('create-btn') 12 | const uploadBtn = document.getElementById('upload-btn') 13 | 14 | const trashBtn = document.getElementById('trash-btn') 15 | const renameBtn = document.getElementById('rename-btn') 16 | const clipboardBtn = document.getElementById('clipboard-btn') 17 | 18 | let currDirectory = '' 19 | let parentDirectory = '' 20 | 21 | // 22 | // Utility functions 23 | // 24 | function makeid(length) { 25 | let result = '' 26 | const characters = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789' 27 | const charactersLength = characters.length 28 | for (let i = 0; i < length; i += 1) { 29 | result += characters.charAt(Math.floor(Math.random() * charactersLength)) 30 | } 31 | 32 | return result 33 | } 34 | 35 | function sort(arr, key) { 36 | return arr.sort((a, b) => { 37 | if (a[key] < b[key]) return -1 38 | if (a[key] > b[key]) return 1 39 | 40 | return 0 41 | }) 42 | } 43 | function download(url) { 44 | const a = document.createElement('a') 45 | a.href = url 46 | document.body.appendChild(a) 47 | a.click() 48 | document.body.removeChild(a) 49 | } 50 | 51 | function fallbackCopyTextToClipboard(text) { 52 | const textArea = document.createElement('textarea') 53 | textArea.value = text 54 | 55 | // Avoid scrolling to bottom 56 | textArea.style.top = '0' 57 | textArea.style.left = '0' 58 | textArea.style.position = 'fixed' 59 | 60 | document.body.appendChild(textArea) 61 | textArea.focus() 62 | textArea.select() 63 | document.execCommand('copy') 64 | document.body.removeChild(textArea) 65 | } 66 | 67 | function copyTextToClipboard(text) { 68 | navigator.clipboard?.writeText(text).catch(() => fallbackCopyTextToClipboard(text)) 69 | } 70 | 71 | // 72 | // Helper functions 73 | // 74 | function getSelected() { 75 | const [selected] = table.getElementsByClassName('selected') 76 | if (!selected) return undefined 77 | if (selected.classList.contains('file')) selected.type = 'file' 78 | else selected.type = 'folder' 79 | 80 | return selected 81 | } 82 | 83 | function resetBtns() { 84 | createFolderBtn.disabled = false 85 | uploadBtn.disabled = false 86 | const selected = getSelected() 87 | trashBtn.disabled = !selected 88 | renameBtn.disabled = !selected 89 | clipboardBtn.disabled = !(selected && selected.type === 'file') 90 | } 91 | 92 | function disableAllBtns() { 93 | prevBtn.disabled = true 94 | createFolderBtn.disabled = true 95 | uploadBtn.disabled = true 96 | trashBtn.disabled = true 97 | renameBtn.disabled = true 98 | clipboardBtn.disabled = true 99 | } 100 | 101 | function clearTableAndResetButtons() { 102 | while (tbody.hasChildNodes()) tbody.removeChild(tbody.lastChild) 103 | resetBtns() 104 | } 105 | 106 | function setTheme(mode) { 107 | let currMode = mode 108 | if (!currMode) currMode = window.localStorage.getItem('mode') || 'dark' 109 | document.documentElement.classList.add(currMode) 110 | document.documentElement.classList.remove(currMode === 'dark' ? 'light' : 'dark') 111 | window.localStorage.setItem('mode', currMode) 112 | if (currMode === 'dark') { 113 | lightModeBtn.parentNode.classList.add('active') 114 | darkModeBtn.parentNode.classList.remove('active') 115 | } else { 116 | lightModeBtn.parentNode.classList.remove('active') 117 | darkModeBtn.parentNode.classList.add('active') 118 | } 119 | } 120 | 121 | // 122 | // API Operations 123 | // 124 | async function refreshTable() { 125 | const resp = await fetch(`/api/directories/${currDirectory}`) 126 | const body = await resp.json() 127 | if (resp.status !== 200) { 128 | disableAllBtns() 129 | 130 | return 131 | } 132 | currDirectory = body.id 133 | parentDirectory = body.parentId 134 | prevBtn.disabled = !parentDirectory 135 | clearTableAndResetButtons() 136 | for (const directory of sort(body.child.directories, 'name')) { 137 | tbody.appendChild(prepareFolderTR(directory)) 138 | } 139 | for (const file of sort(body.child.files, 'name')) { 140 | tbody.appendChild(prepareFileTR(file)) 141 | } 142 | } 143 | 144 | async function deleteFileOrFolder(type = 'file', id) { 145 | const url = `/api/${type === 'file' ? 'files' : 'directories'}/${id}` 146 | await fetch(url, { method: 'DELETE' }) 147 | await refreshTable() 148 | } 149 | 150 | async function uploadFile() { 151 | for (const file of fileInput.files) { 152 | // Create unique id for progress bar 153 | const id = makeid(20) 154 | 155 | // Prepare form data and create progress bar 156 | const formData = new FormData() 157 | formData.append('file', file) 158 | createProgressBar(id, `Uploading ${file.name}`) 159 | 160 | // Create XHR request 161 | const xhr = new XMLHttpRequest() 162 | xhr.open('POST', `/api/files/${currDirectory}`, true) 163 | 164 | // handle update progress 165 | xhr.upload.onprogress = (e) => { 166 | const progress = (e.total === 0) ? 0 : (e.loaded / e.total) * 100 167 | updateProgressBar(id, Math.floor(progress)) 168 | } 169 | 170 | // Non 200 status handler 171 | xhr.onload = async () => { 172 | deleteProgressBar(id) 173 | await refreshTable() 174 | } 175 | 176 | // Network error handler 177 | xhr.onerror = async () => { 178 | deleteProgressBar(id) 179 | await refreshTable() 180 | } 181 | 182 | // Finally send data 183 | xhr.send(formData) 184 | } 185 | } 186 | 187 | async function navigatePrevFolder() { 188 | currDirectory = parentDirectory 189 | await refreshTable() 190 | } 191 | 192 | async function createFolder() { 193 | const id = makeid(20) // Temp id 194 | const input = createFolderTR(id) 195 | input.addEventListener('keypress', async (e) => { 196 | if (e.key === 'Enter') { 197 | await fetch(`/api/directories`, { 198 | method: 'POST', 199 | body: JSON.stringify({ name: input.value, parentId: currDirectory }), 200 | headers: { 'Content-Type': 'application/json' }, 201 | }) 202 | await refreshTable() 203 | } 204 | }) 205 | input.focus() 206 | disableAllBtns() 207 | } 208 | 209 | function copyToClipboard() { 210 | const selected = getSelected() 211 | if (selected && selected.type === 'file') { 212 | // eslint-disable-next-line no-restricted-globals 213 | const url = `${location.href.replace(/\/$/, '')}/api/files/${selected.id}/download` 214 | copyTextToClipboard(url) 215 | } 216 | } 217 | async function renameFileOrFolder() { 218 | const selected = getSelected() 219 | const inputText = document.createElement('input') 220 | inputText.style.height = '1rem' 221 | inputText.style.width = '80%' 222 | inputText.setAttribute('type', 'text') 223 | inputText.setAttribute('value', selected.textContent) 224 | selected.textContent = '' 225 | inputText.addEventListener('keypress', async (e) => { 226 | if (e.key === 'Enter') { 227 | await fetch(`/api/${selected.type === 'file' ? 'files' : 'directories'}/${selected.id}`, { 228 | method: 'PUT', 229 | body: JSON.stringify({ name: inputText.value }), 230 | headers: { 'Content-Type': 'application/json' }, 231 | }) 232 | await refreshTable() 233 | } 234 | }) 235 | selected.appendChild(inputText) 236 | inputText.focus() 237 | disableAllBtns() 238 | } 239 | 240 | // 241 | // Event handlers 242 | // 243 | async function handleClick(e) { 244 | const selected = getSelected() 245 | if (selected && e.target.id === trashBtn.id) { 246 | await deleteFileOrFolder(selected.type, selected.id) 247 | } 248 | if (e.target.id === prevBtn.id && parentDirectory) { 249 | await navigatePrevFolder() 250 | } 251 | if (selected && e.target.id === renameBtn.id) { 252 | await renameFileOrFolder() 253 | } 254 | if (e.target.id === createFolderBtn.id) { 255 | await createFolder() 256 | } 257 | if (e.target.id === clipboardBtn.id) { 258 | copyToClipboard() 259 | } 260 | if (e.target.id === lightModeBtn.id) { 261 | setTheme('light') 262 | } 263 | if (e.target.id === darkModeBtn.id) { 264 | setTheme('dark') 265 | } 266 | if (selected) { 267 | selected.classList.remove('selected') 268 | resetBtns() 269 | } 270 | const { classList } = e.target 271 | if (classList.contains('file') || classList.contains('folder')) { 272 | classList.add('selected') 273 | resetBtns() 274 | } 275 | } 276 | 277 | async function handleDoubleClick(e) { 278 | const { classList } = e.target 279 | if (classList.contains('file')) { 280 | download(`/api/files/${e.target.id}/download`) 281 | } 282 | if (classList.contains('folder')) { 283 | currDirectory = e.target.id 284 | await refreshTable() 285 | } 286 | } 287 | 288 | function loadDataTable() { 289 | document.onclick = handleClick 290 | document.ondblclick = handleDoubleClick 291 | clearTableAndResetButtons() 292 | refreshTable().then() 293 | fileInput.addEventListener('change', () => uploadFile()) 294 | 295 | setTheme() 296 | } 297 | 298 | loadDataTable() 299 | -------------------------------------------------------------------------------- /src/http/html/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | DDrive - Encrypted Open Source File Sharing 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 |
30 |
31 | 35 | 58 |
59 |

60 | Cloud storage system using discord. 61 |

62 |
63 | 64 |
65 | 66 | 69 | 70 | 71 | 72 | 73 | 141 | 142 | 143 | 144 | 145 | 146 | 147 | 148 | 149 | 150 | 151 | 152 |
67 | DDrive File Manager 68 |
74 | 75 | 84 | 85 | 96 | 97 | 107 | 108 | 117 | 118 | 119 | 129 | 130 | 140 |
TitleSizeCreated At
153 |
154 |
155 | 156 |

Built with ❤️ by Darshan H.
Licensed under the MIT License.

158 |
159 | 160 | 292 |
293 | 294 | 295 | --------------------------------------------------------------------------------