├── .gitignore ├── .replit ├── Makefile ├── Procfile ├── README.md ├── app.json ├── bin ├── rclone.conf └── rclone.js ├── commands ├── download.js ├── fshare.js ├── gdrive.js ├── list.js ├── mirror.js ├── nugu.js ├── rclone.js └── ytdl.js ├── discord.js ├── index.js ├── package-lock.json ├── package.json ├── server.js └── utils.js /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | 3 | node_modules 4 | 5 | .env 6 | .htpasswd 7 | bot.pid 8 | bin/rclone 9 | -------------------------------------------------------------------------------- /.replit: -------------------------------------------------------------------------------- 1 | language = "nodejs" 2 | run = "npm start" 3 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | 2 | 3 | bot.pid: index.js $(wildcard commands/*.js) 4 | @echo "Restarting bot" 5 | @if [ -e bot.pid ]; then \ 6 | kill -TERM $$(cat bot.pid) || true; \ 7 | fi; 8 | 9 | @node . & echo $$! > bot.pid 10 | 11 | watch: ## Simple interval-polling watcher that will run `make` when there is something to be done. 12 | @while true; do $(MAKE) -q || $(MAKE); sleep 0.5; done 13 | -------------------------------------------------------------------------------- /Procfile: -------------------------------------------------------------------------------- 1 | bot: npm start 2 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Data Hoarder 2 | 3 | A bot for hoarding any data from Discord. 4 | 5 | [![Deploy to Heroku](https://www.herokucdn.com/deploy/button.svg)](https://heroku.com/deploy) 6 | [![Run on Repl.it](https://repl.it/badge/github/vietcode/DataHoarder)](https://repl.it/github/vietcode/DataHoarder) 7 | 8 | ## Commands 9 | 10 | - `/mirror [destpath]` - download any URL to a remote destination. 11 | - `/download [destpath]` - mirror a direct link. 12 | - `/fshare [destpath]` - mirror a file on Fshare.vn. 13 | - `/ytdl ` - mirror a YouTube video. 14 | - `/list ` - search for files matching the query. 15 | 16 | ## Getting Started 17 | 18 | - Install Node.js v14+ (for `discord.js@13` that uses optional chaining) 19 | - Set up a bot on Discord. 20 | - Add the bot to your server. 21 | - Clone the repo. 22 | - Create a `.env` file with the following variables, using the format 23 | `export VARIABLE_NAME=variable_value`, one line per variable: 24 | - For Discord and the bot: 25 | - `export DISCORD_TOKEN='...'`: The token of the bot retrieved from Discord. 26 | - `export COMMAND_PREFIX='/'`: (optional) custom prefix for the commands. Default to `/`. 27 | - `export COMMAND_SUFFIX=''`: (optional) custom suffix for the commands. Default to empty string. 28 | - `export COMMAND_MIRROR_ALIASES=''`: (optional) Space-separated list of aliases for `mirror` command. 29 | - `export MAX_JOBS=4`: (optional) maximum number of jobs to run in parallel. Default to 4. 30 | - For Rclone: 31 | - `export RCLONE_DRIVE_CLIENT_ID=''`: (optional) Google's client ID for rclone. 32 | - `export RCLONE_DRIVE_CLIENT_SECRET=''`: (optional) Google's client secret for rclone. 33 | - `RCLONE_CONFIG_TARGET_TEAM_DRIVE='...'`: The ID of the shared drive to upload file to. 34 | - `export RCLONE_CONFIG_TARGET_ROOT_FOLDER_ID=''`: (optional) The ID of the folder to upload file to. 35 | - `export RCLONE_CONFIG_TARGET_TOKEN`='': (optional) access token to the target folder. 36 | - `export RCLONE_CONFIG_TARGET_SERVICE_ACCOUNT_FILE=''`: (optional) path to service account file with access to the target folder. 37 | - `export RCLONE_EXECUTABLE=''`: (optional) path to custom `rclone` executable, such as `gclone`, `fclone`. 38 | - `export RCLONE_CONFIG_TARGET_SERVICE_ACCOUNT_FILE_PATH=''`: (optional) path to the folder contains service account files. This setting is used by the variants of `rclone`. 39 | - For FShare: 40 | - `export FSHARE_USER_EMAIL='.'`: VIP email to login. 41 | - `export FSHARE_PASSWORD=''`: VIP password to login. 42 | - For Usenet: 43 | - `export USENET_POST_HOST=''`: Hostname of the news-server to post to. 44 | - `export USENET_POST_PORT=''`: Port of the news-server to post to. 45 | - `export USENET_POST_USER=''` 46 | - `export USENET_POST_PASSWORD=''` 47 | - `export USENET_POST_FROM=''`: The name and email of the poster. Default to "{Discord display name} <{Discord discriminator}@{Guild's name}>" 48 | - `export USENET_POST_GROUPS='alt.binaries.test'`: Comma-separated group names to post to. Default to "alt.binaries.test". 49 | - Inside the project, `npm install` to install dependencies. 50 | - `npm start` to start the bot. 51 | 52 | `DataHoarder` should appear on your Discord's roster. 53 | -------------------------------------------------------------------------------- /app.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "Data Hoarder", 3 | "description": "Chatbot for hoarding data", 4 | "repository": "https://github.com/vietcode/DataHoarder", 5 | "keywords": ["bot", "chatbot", "Discord", "fshare", "youtube"] 6 | } 7 | -------------------------------------------------------------------------------- /bin/rclone.conf: -------------------------------------------------------------------------------- 1 | [source] 2 | type = drive 3 | scope = drive 4 | root_folder_id = 5 | stop_on_upload_limit = true 6 | server_side_across_configs = true 7 | 8 | [target] 9 | type = drive 10 | scope = drive 11 | root_folder_id = 12 | stop_on_upload_limit = true 13 | server_side_across_configs = true 14 | -------------------------------------------------------------------------------- /bin/rclone.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | /** 4 | * A wrapper around `rclone.js` to set our own config file. 5 | */ 6 | 7 | const { join } = require("path"); 8 | 9 | require("nvar")(); 10 | 11 | if (!process.env.RCLONE_CONFIG) { 12 | const CWD = process.cwd(); 13 | const RCLONE_DIR = join(CWD, "bin"); 14 | process.env.RCLONE_CONFIG = join(RCLONE_DIR, "rclone.conf"); 15 | } 16 | 17 | const rclone = require("rclone.js"); 18 | 19 | const [/** node **/, /** file **/, commandName, ...args] = process.argv; 20 | if (commandName) { 21 | require("rclone.js/bin/rclone.js"); 22 | } 23 | 24 | module.exports = rclone; 25 | -------------------------------------------------------------------------------- /commands/download.js: -------------------------------------------------------------------------------- 1 | const { basename, extname, join } = require("path"); 2 | 3 | const debug = require("debug")("hoarder:download"); 4 | const Discord = require("discord.js"); 5 | const fetch = require("node-fetch"); 6 | const ps = require("ps-node"); 7 | 8 | const { bytes, progress, rcat } = require("../utils.js"); 9 | 10 | const { 11 | RCLONE_CONFIG_TARGET_TEAM_DRIVE, 12 | RCLONE_CONFIG_TARGET_ROOT_FOLDER_ID, 13 | } = process.env; 14 | 15 | const folder = RCLONE_CONFIG_TARGET_ROOT_FOLDER_ID || RCLONE_CONFIG_TARGET_TEAM_DRIVE; 16 | 17 | async function editReply(reply, content) { 18 | if (reply.deleted) return; 19 | 20 | return reply.edit(content).catch(error => { 21 | // Reply was deleted but request was still in progress. 22 | // @TODO: Ignore only when it's a unknown message error. 23 | }); 24 | } 25 | 26 | module.exports = { 27 | name: "download", 28 | aliases: [], 29 | description: "Downloads a file at an URL to a Google Drive location", 30 | guildOnly: true, 31 | params: [ 32 | { 33 | name: "url", 34 | type: "url", 35 | }, 36 | { 37 | name: "destpath", 38 | type: "text", 39 | }, 40 | ], 41 | usage: " [destpath]", 42 | /** 43 | * Downloads a link to a Google Drive folder 44 | * @param {Discord.Message} reply - The reply message. 45 | * @param {URL} url The URL to download 46 | * @param {string} [destpath] Path to Google Drive to save file to. 47 | */ 48 | async execute(reply, url, destpath = basename(url.pathname)) { 49 | const { client, referencedMessage } = reply; 50 | 51 | let remote = "target"; 52 | 53 | // If `destpath` is a folder, append the filename from URL. 54 | if (!extname(destpath)) { 55 | destpath = join(destpath, basename(url.pathname)); 56 | } 57 | 58 | destpath = decodeURIComponent(destpath); 59 | 60 | debug(`download ${ url } ${ destpath }`); 61 | 62 | let header = `**File**: ${ destpath }`; 63 | 64 | editReply(reply, `${ header }\n**Status**: Pending`); 65 | 66 | const response = await fetch(url, { 67 | method: "get", 68 | headers: {}, 69 | }); 70 | 71 | if (!response.ok) { // res.status >= 200 && res.status < 300 72 | throw Error(response.status); 73 | } 74 | 75 | const fileSize = Number(response.headers.get("content-length")); 76 | 77 | if (fileSize) { 78 | header += ` (${ bytes(fileSize) })`; 79 | } 80 | 81 | editReply(reply, `${ header }\n**Status**: Downloading...`); 82 | 83 | function onRequestDeleted(deletedMessage) { 84 | if (deletedMessage.id === referencedMessage.id) { 85 | debug(`request deleted. Cancelling execution.`); 86 | 87 | ps.lookup({ 88 | command: "rclone", 89 | arguments: `${ remote }:${ destpath }`, 90 | }, (error, resultList) => { 91 | if (error) { 92 | console.error(error); 93 | return; 94 | } 95 | 96 | resultList.forEach(async (process) => { 97 | if( process ){ 98 | debug(`Deleting reply.`); 99 | response.body.destroy(); 100 | reply.delete(); 101 | ps.kill(process.pid); 102 | } 103 | }); 104 | }); 105 | 106 | client.off("messageDelete", onRequestDeleted); 107 | } 108 | } 109 | 110 | // If the request message is deleted, we cancel the job. 111 | client.on("messageDelete", onRequestDeleted); 112 | 113 | progress(response.body, { 114 | delay: 1000, 115 | total: fileSize, 116 | }).on("progress", async ({ doneh, rateh, etaDate }) => { 117 | editReply(reply, `${ header }\n**Status**: ${ doneh } @ ${ rateh }/s. ETA: ${ etaDate.toLocaleString() }.`); 118 | }); 119 | 120 | response.body.on("error", (error) => { 121 | throw error; 122 | }); 123 | 124 | response.body.on("end", () => { 125 | editReply(reply, `${ header }\n**Status**: Finishing last bytes...`); 126 | }); 127 | 128 | const fileId = await rcat(response.body, `${ remote }:${ destpath }`); 129 | 130 | client.off("messageDelete", onRequestDeleted); 131 | 132 | editReply(reply, `${ header }\nhttps://drive.google.com/file/d/${ fileId }`); 133 | }, 134 | }; 135 | -------------------------------------------------------------------------------- /commands/fshare.js: -------------------------------------------------------------------------------- 1 | const debug = require("debug")("hoarder:fshare"); 2 | const Discord = require("discord.js"); 3 | const fetch = require("node-fetch"); 4 | 5 | const { 6 | FSHARE_APP_KEY = "L2S7R6ZMagggC5wWkQhX2+aDi467PPuftWUMRFSn", 7 | FSHARE_USER_EMAIL = "", 8 | FSHARE_PASSWORD = "", 9 | FSHARE_USER_AGENT = "Fshare/1 CFNetwork/1209 Darwin/20.2.0", 10 | } = process.env; 11 | 12 | const API_URL = "https://api.fshare.vn/api"; 13 | 14 | async function checkStatus(response) { 15 | if (response.ok) { // response.status >= 200 && response.status < 300 16 | return response; 17 | } else { 18 | const { msg } = await response.json(); 19 | throw Error(msg); 20 | } 21 | } 22 | 23 | /** 24 | * An FShare session with ID and token. 25 | * @typedef {Object} Session 26 | * @property {string} session_id - The ID of the session 27 | * @property {string} token - Token 28 | */ 29 | 30 | /** 31 | * Sends a POST request 32 | * @param {string} endpoint - API endpoint to send a POST to 33 | * @param {Object} body - A JSON of payload to send 34 | * @param {Object} [headers={}] - Custom headers 35 | * @returns {Promise} 36 | */ 37 | async function post(endpoint, body, headers = {}) { 38 | return fetch(`${API_URL}${ endpoint }`, { 39 | method: "post", 40 | headers: { 41 | "User-Agent": FSHARE_USER_AGENT, 42 | "Content-Type": "application/json", 43 | ...headers, 44 | }, 45 | body: JSON.stringify(body), 46 | }) 47 | .then(checkStatus) 48 | .then(response => response.json()); 49 | } 50 | 51 | /** 52 | * Logins into Fshare 53 | * @param {Object} credentials 54 | * @returns {Promise} 55 | */ 56 | async function login({ 57 | user_email = FSHARE_USER_EMAIL, 58 | password = FSHARE_PASSWORD, 59 | app_key = FSHARE_APP_KEY, 60 | } = {}) { 61 | return post("/user/login", { 62 | user_email, 63 | password, 64 | app_key, 65 | }); 66 | } 67 | 68 | module.exports = { 69 | name: "fshare", 70 | aliases: [], 71 | description: "Mirroring from fshare.vn", 72 | guildOnly: true, 73 | params: [ 74 | { 75 | name: "url", 76 | type: "url", 77 | }, 78 | { 79 | name: "destpath", 80 | type: "text", 81 | }, 82 | ], 83 | usage: " [destpath]", 84 | /** 85 | * Downloads an FShare link 86 | * @param {Discord.Message} reply - The reply message. 87 | * @param {URL} url The URL to download 88 | * @param {string} [destpath] Path to Google Drive to save file to. 89 | */ 90 | async execute(reply, url, password = "", destpath = "") { 91 | // Removes any search params. 92 | url.search = ""; 93 | 94 | debug("Logging in..."); 95 | const { token, session_id } = await login(); 96 | debug(`Retrieving download link for ${ url }`); 97 | const { location } = await post("/session/download", { 98 | url, 99 | token, 100 | password, 101 | }, { 102 | // Fshare requires the `session_id` set in cookie. 103 | "Cookie": `session_id=${ session_id }`, 104 | }); 105 | 106 | debug(`Downloading ${ location }`); 107 | const commands = /** @type { Discord.Collection } */(reply.client.commands); 108 | return commands.get("download").execute(reply, new URL(location), destpath); 109 | }, 110 | }; 111 | -------------------------------------------------------------------------------- /commands/gdrive.js: -------------------------------------------------------------------------------- 1 | const debug = require("debug")("hoarder:gdrive"); 2 | 3 | const rclone = require("../bin/rclone.js"); 4 | 5 | const REGEX = /(?:drive\/)?(?:u\/\d\/)?(?:mobile\/)?(file|folders)\/(?:d\/)?([-\w]+)[?+]?\/?(w+)?/; 6 | 7 | module.exports = { 8 | name: "gdrive", 9 | aliases: [], 10 | description: "Mirroring from Google Drive", 11 | guildOnly: true, 12 | params: [ 13 | { 14 | name: "url", 15 | type: "url", 16 | }, 17 | { 18 | name: "destpath", 19 | type: "text", 20 | }, 21 | ], 22 | usage: " [destpath]", 23 | /** 24 | * Downloads a Google Drive link 25 | * @param {Discord.Message} reply - The reply message. 26 | * @param {URL} url The URL to download 27 | * @param {string} [destpath] Path to Google Drive to save file to. 28 | */ 29 | async execute(reply, url, destpath = "") { 30 | const { pathname } = url; 31 | const [, type, id] = pathname.match(REGEX) || []; 32 | 33 | if (!id) { 34 | throw new Error("Invalid Google Drive link"); 35 | } 36 | 37 | debug(`Copy ${ type }/${ id }`); 38 | 39 | reply.edit(`**Status**: Pending`); 40 | 41 | const args = []; 42 | if (type === "file") { 43 | args.push("backend", "copyid", "source:", id, `target:${ destpath }`); 44 | } else if (type === "folders") { 45 | args.push("copy", "source:", `target:${ destpath }`); 46 | process.env.RCLONE_CONFIG_SOURCE_ROOT_FOLDER_ID = id; 47 | } 48 | 49 | args.push("--stats-one-line", "-P", "--stats", "2s"); 50 | 51 | debug(`rclone ${ args.join(" ") }`); 52 | 53 | return new Promise((resolve, reject) => { 54 | const subprocess = rclone(...args); 55 | let status = ""; 56 | 57 | subprocess.stdout.on("data", (data) => { 58 | status += data; 59 | // Truncate to the last 1997 characters. 60 | status = status.substring(status.length - 1997); 61 | if (status.length === 1997) { 62 | status = "..." + status; 63 | } 64 | reply.edit(`${ status }`); 65 | }); 66 | 67 | // Throws error if there is an issue spawning rclone. 68 | subprocess.on("error", (error) => { 69 | reject(new Error(`rclone ${ command } ${ args.join(" ") } encountered error ${ error.message }`)); 70 | }); 71 | 72 | subprocess.on("exit", () => { 73 | debug(`rclone exited`); 74 | resolve(); 75 | }); 76 | }); 77 | 78 | } 79 | } 80 | -------------------------------------------------------------------------------- /commands/list.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | name: "list", 3 | aliases: [], 4 | description: "List files based on a query", 5 | async: true, 6 | params: [ 7 | { 8 | name: "query", 9 | type: "text", 10 | }, 11 | ], 12 | usage: "", 13 | /** 14 | * List files matching a query 15 | * @param {Discord.Message} reply - The reply message. 16 | * @param {string} query The query text to search for 17 | */ 18 | async execute(reply, query = "") { 19 | const commands = /** @type { Discord.Collection } */(reply.client.commands); 20 | const args = [ 21 | "lsf", 22 | "--separator", " | ", 23 | "-R", "--files-only", 24 | "--format", "tsp", 25 | "--ignore-case", 26 | "--include", `**${ query }**`, 27 | "target:" 28 | ]; 29 | 30 | return commands.get("rclone").execute(reply, ...args); 31 | }, 32 | }; 33 | -------------------------------------------------------------------------------- /commands/mirror.js: -------------------------------------------------------------------------------- 1 | const debug = require("debug")("hoarder:mirror"); 2 | const Discord = require("discord.js"); 3 | 4 | const { 5 | COMMAND_MIRROR_ALIASES = "", 6 | } = process.env; 7 | 8 | module.exports = { 9 | name: "mirror", 10 | aliases: COMMAND_MIRROR_ALIASES.split(/\s+/), 11 | description: "Mirroring a URL", 12 | guildOnly: true, 13 | params: [ 14 | { 15 | name: "url", 16 | type: "url", 17 | }, 18 | ], 19 | usage: "", 20 | /** 21 | * Handles the mirror request 22 | * @param {Discord.Message} reply - The reply message. 23 | * @param {URL} url The URL to download 24 | */ 25 | async execute(reply, url, ...args) { 26 | const commands = /** @type { Discord.Collection } */(reply.client.commands); 27 | const { host } = url; 28 | 29 | if (/(www\.)?fshare\.vn/.test(host)) { 30 | return commands.get("fshare").execute(reply, url, ...args); 31 | } else if (/(www\.)?(youtu\.be|youtube\.com)/.test(host)) { 32 | return commands.get("ytdl").execute(reply, url, ...args); 33 | } else if (/drive\.google\.com/.test(host)) { 34 | return commands.get("gdrive").execute(reply, url, ...args); 35 | } else { 36 | // Direct links. 37 | return commands.get("download").execute(reply, url, ...args); 38 | } 39 | }, 40 | }; 41 | -------------------------------------------------------------------------------- /commands/nugu.js: -------------------------------------------------------------------------------- 1 | const { basename, extname } = require("path"); 2 | 3 | const debug = require("debug")("hoarder:nugu"); 4 | const Discord = require("discord.js"); 5 | const nugu = require("nugu"); 6 | 7 | const { 8 | USENET_POST_PROGRESS, 9 | USENET_POST_GROUPS, 10 | } = process.env; 11 | 12 | module.exports = { 13 | name: "nugu", 14 | aliases: [], 15 | description: "Posts a file to a Newserver", 16 | guildOnly: true, 17 | params: [ 18 | { 19 | name: "sourcePath", 20 | type: "text", 21 | }, 22 | { 23 | name: "groups", 24 | type: "text", 25 | }, 26 | ], 27 | usage: "", 28 | /** 29 | * Handles the posting request 30 | * @param {Discord.Message} reply - The reply message. 31 | * @param {string} sourcePath The path to the source to post. 32 | * @param {string} [groups] Comma-separated list of groups to post to. 33 | */ 34 | async execute(reply, sourcePath, groups = USENET_POST_GROUPS) { 35 | const referencedMessage = reply.referencedMessage; 36 | const { channel, author } = referencedMessage; 37 | 38 | const { displayName, guild } = channel.members.get(author.id); 39 | 40 | let remote = "target"; 41 | const title = basename(sourcePath); 42 | 43 | debug(`upload ${ sourcePath }`); 44 | 45 | let header = `**Source**: ${ sourcePath }`; 46 | 47 | reply.edit(`${ header }\n**Status**: Uploading`); 48 | 49 | // @TODO: Parse file name to set `nzb-tag` and `nzb-category` and `meta` options. 50 | 51 | const nzb = await nugu(`${ remote }:${ sourcePath }`, { 52 | groups, 53 | // Use the reply's ID in place of filename in subject . 54 | subject: `{comment} [{0filenum}/{files}] - "${ reply.id }" yEnc ({part}/{parts}) {filesize} {comment2}`, 55 | // Use the requester's nickname. 56 | from: `${ displayName } <${ author.discriminator }@${ guild.name }.>`, 57 | // Places the title in of the NZB. 58 | "nzb-title": title, 59 | // Uses the remote file ID as name, but keeps extension. 60 | filename: ({ ID, Name }) => `${ ID }${ extname(Name) }`, 61 | progress: USENET_POST_PROGRESS || function({ articles, files, totalSize, read, posted, checked }) { 62 | reply.edit(`${ header } (${ totalSize })\n**Status**: ${ posted }/${ articles } article(s) posted. ${ checked } article(s) checked.`); 63 | }, 64 | // Enable 1 connection for post checking. 65 | "check-connections": 1, 66 | }); 67 | 68 | // Create an embed and attach the NZB file to it. 69 | const embed = { 70 | files: [{ 71 | name: `${ title }.nzb`, 72 | attachment: nzb, 73 | }], 74 | } 75 | 76 | // Can't attach a file to an existing reply, so we delete it and create new one. 77 | await reply.delete(); 78 | 79 | referencedMessage.reply({ 80 | embed, 81 | }); 82 | }, 83 | }; 84 | -------------------------------------------------------------------------------- /commands/rclone.js: -------------------------------------------------------------------------------- 1 | const debug = require("debug")("hoarder:rclone"); 2 | const Discord = require("discord.js"); 3 | 4 | const rclone = require("../bin/rclone.js"); 5 | 6 | const COMMANDS = [ 7 | "about", 8 | "check", 9 | "cryptcheck", 10 | "cryptdecode", 11 | "hashsum", 12 | "ls", 13 | "lsd", 14 | "lsf", 15 | "lsjson", 16 | "lsl", 17 | "md5sum", 18 | "sha1sum", 19 | "size", 20 | "tree", 21 | "version", 22 | ]; 23 | 24 | module.exports = { 25 | name: "rclone", 26 | aliases: [], 27 | description: "Executes rclone commands", 28 | async: true, 29 | params: [ 30 | { 31 | name: "command", 32 | type: "text", 33 | }, 34 | ], 35 | usage: " [arg...]", 36 | /** 37 | * Executes rclone commands 38 | * @param {Discord.Message} reply - The reply message. 39 | * @param {string} command the command to execute. 40 | */ 41 | async execute(reply, command, ...args) { 42 | // Only support a few commands over chat. 43 | if (COMMANDS.indexOf(command) === -1) { 44 | debug(`Attempted rclone ${ command } ${ args.join(" ") }`); 45 | reply.edit("Can't execute that command."); 46 | return; 47 | } 48 | 49 | return new Promise((resolve, reject) => { 50 | debug(`rclone ${ command } ${ args.join(" ") }`); 51 | 52 | const subprocess = rclone[command](...args); 53 | 54 | // Collects both stdout and stderr from rclone to reply with. 55 | let stdout = "", stderr = ""; 56 | subprocess.stdout.on("data", (data) => { 57 | stdout += data; 58 | }); 59 | 60 | subprocess.stderr.on("data", (data) => { 61 | stderr += data; 62 | }); 63 | 64 | subprocess.stdout.on("end", () => { 65 | stdout = stdout.substring(1, 1998); 66 | if (stdout.length === 1997) { 67 | stdout += "..."; 68 | } 69 | stdout && reply.edit(stdout); 70 | }); 71 | 72 | subprocess.stderr.on("end", () => { 73 | stderr = stderr.substring(1, 1998); 74 | if (stderr.length === 1997) { 75 | stderr += "..."; 76 | } 77 | stderr && reply.edit(stderr); 78 | }); 79 | 80 | // Throws error if there is an issue spawning rclone. 81 | subprocess.on("error", (error) => { 82 | reject(new Error(`rclone ${ command } ${ args.join(" ") } encountered error ${ error.message }`)); 83 | }); 84 | 85 | subprocess.on("exit", resolve); 86 | }); 87 | }, 88 | }; 89 | -------------------------------------------------------------------------------- /commands/ytdl.js: -------------------------------------------------------------------------------- 1 | const { extname } = require("path"); 2 | const { spawn, ChildProcess } = require("child_process"); 3 | 4 | const debug = require("debug")("hoarder:ytdl"); 5 | const Discord = require("discord.js"); 6 | const { getInfo, chooseFormat, downloadFromInfo } = require("ytdl-core"); 7 | const ffmpeg = require("@ffmpeg-installer/ffmpeg"); 8 | 9 | const { bytes, progress, rcat } = require("../utils.js"); 10 | 11 | const { 12 | RCLONE_CONFIG_TARGET_TEAM_DRIVE, 13 | RCLONE_CONFIG_TARGET_ROOT_FOLDER_ID, 14 | USER_AGENT = "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.103 Safari/537.36", 15 | } = process.env; 16 | 17 | const folder = RCLONE_CONFIG_TARGET_ROOT_FOLDER_ID || RCLONE_CONFIG_TARGET_TEAM_DRIVE; 18 | 19 | module.exports = { 20 | name: "ytdl", 21 | aliases: [], 22 | description: "Mirroring a YouTube Video", 23 | guildOnly: true, 24 | params: [ 25 | { 26 | name: "url", 27 | type: "url", 28 | }, 29 | { 30 | name: "filename", 31 | type: "text", 32 | }, 33 | ], 34 | usage: "", 35 | /** 36 | * Handles the mirror request 37 | * @param {Discord.Message} reply - The reply message. 38 | * @param {URL} url The URL to download 39 | */ 40 | async execute(reply, url, filename = "") { 41 | let header = `**File**: ${ filename }`; 42 | reply.edit(`${ header }\n**Status**: Pending`); 43 | 44 | debug(`Retrieving infomation for ${ url }`); 45 | const info = await getInfo(url); 46 | 47 | const { 48 | videoDetails: { 49 | videoId, 50 | publishDate, 51 | }, 52 | formats, 53 | } = info; 54 | 55 | let audioQuality = "highestaudio"; 56 | 57 | // Checks if provided filename contains keyword for video quality, or default to highest. 58 | let [, videoQuality = "highestvideo" ] = filename.match(/\.(4320p|3072p|2160p|1080p|720p|480p|360p|270p|240p|180p|144p)\./) || []; 59 | // Checks if provided filename indicates request for HDR content. 60 | const isHDR = filename.indexOf(".HDR.") > -1; 61 | videoQuality += isHDR ? " HDR" : ""; 62 | 63 | let audioFormat, videoFormat; 64 | 65 | try { 66 | audioFormat = chooseFormat(formats, { 67 | // Only want formats with audio only 68 | filter: "audioonly", 69 | // We just want highest audio quality. 70 | quality: audioQuality, 71 | }); 72 | } catch(error) { 73 | reply.edit(`${ header }\n**Error**: No such format found: ${ audioQuality }`); 74 | return; 75 | }; 76 | 77 | try { 78 | videoFormat = chooseFormat(formats, { 79 | // Filters for video formats that match requesting quality. 80 | filter: ({ qualityLabel, hasVideo, hasAudio }) => { 81 | if (!hasVideo) return false; 82 | if (hasAudio) return false; 83 | if (videoQuality === "highestvideo") return true; 84 | 85 | return qualityLabel.indexOf(videoQuality) > -1; 86 | }, 87 | // We want higest matching video quality. 88 | quality: "highestvideo", 89 | }); 90 | } catch(error) { 91 | reply.edit(`${ header }\n**Error**: No such format found: ${ videoQuality }`); 92 | return; 93 | }; 94 | 95 | const { audioCodec, contentLength: audioSize } = audioFormat; 96 | const { qualityLabel: resolution, videoCodec, contentLength: videoSize } = videoFormat; 97 | 98 | const fileSize = parseInt(audioSize) + parseInt(videoSize); 99 | 100 | if (!filename) { 101 | filename = [ 102 | videoId, 103 | publishDate.substring(0, 4), // year 104 | resolution.replace(" HDR, HFR", ""), // resolution 105 | "YT.WEB-DL", 106 | videoCodec.toUpperCase(), // VP9, VP8, H.264,... 107 | isHDR? "HDR" : "", 108 | audioCodec.toUpperCase(), // OPUS, VORBIS, AAC,... 109 | "mkv", 110 | ].filter(Boolean).join("."); 111 | } 112 | 113 | debug(`Downloading audio and video for ${ videoId }`); 114 | 115 | header = `**File**: ${ filename } (${ bytes(fileSize) })`; 116 | reply.edit(`${ header }\n**Status**: Downloading...`); 117 | 118 | let container = "matroska"; 119 | 120 | switch (extname(filename)) { 121 | case ".mkv": 122 | container = "matroska"; 123 | break; 124 | case ".mp4": 125 | container = "mp4"; 126 | break; 127 | case ".webm": 128 | container = "webm"; 129 | break; 130 | } 131 | 132 | const audio = downloadFromInfo(info, { 133 | format: audioFormat, 134 | requestOptions: { 135 | headers: { 136 | "User-Agent": USER_AGENT, 137 | }, 138 | }, 139 | }).on("end", () => { 140 | debug(`Finished downloading audio for ${ videoId }`); 141 | }); 142 | const video = downloadFromInfo(info, { 143 | format: videoFormat, 144 | requestOptions: { 145 | headers: { 146 | "User-Agent": USER_AGENT, 147 | }, 148 | }, 149 | }).on("end", () => { 150 | debug(`Finished downloading video for ${ videoId }`); 151 | }); 152 | 153 | const ffmpegProcess = spawn(ffmpeg.path, [ 154 | // Remove ffmpeg's console spamming 155 | "-loglevel", "8", "-hide_banner", 156 | // Set inputs 157 | "-i", "pipe:3", 158 | "-i", "pipe:4", 159 | // Map audio & video from streams 160 | "-map", "0:a", 161 | "-map", "1:v", 162 | // Keep encoding 163 | "-c:v", "copy", 164 | // Define output container 165 | "-f", container, "pipe:5", 166 | ], { 167 | windowsHide: true, 168 | stdio: [ 169 | /* Standard: stdin, stdout, stderr */ 170 | "inherit", "inherit", "inherit", 171 | /* Custom: pipe:3, pipe:4, pipe:5 */ 172 | "pipe", "pipe", "pipe", 173 | ], 174 | }); 175 | 176 | const stdout = ffmpegProcess.stdio[5]; 177 | stdout.on("end", () => { 178 | debug(`Finished remuxing video and audio into a container for ${ videoId }`); 179 | }); 180 | 181 | progress(stdout, { 182 | delay: 1000, 183 | total: fileSize, 184 | }).on("progress", ({ doneh, rateh, etaDate }) => { 185 | reply.edit(`${ header }\n**Status**: ${ doneh } @ ${ rateh }/s. ETA: ${ etaDate.toLocaleString() }.`); 186 | }); 187 | 188 | const promise = rcat(stdout, `target:${ filename }`); 189 | 190 | audio.pipe(ffmpegProcess.stdio[3]); 191 | video.pipe(ffmpegProcess.stdio[4]); 192 | 193 | const fileId = await promise; 194 | debug(`Finished uploading ${ filename }`); 195 | reply.edit(`${ header }\nhttps://drive.google.com/file/d/${ fileId }`); 196 | }, 197 | }; 198 | -------------------------------------------------------------------------------- /discord.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | const fs = require("fs"); 4 | 5 | const debug = require("debug")("hoarder:discord"); 6 | const shellParser = require("shell-parser"); 7 | const { Client, Intents, Collection } = require("discord.js"); 8 | const Queue = require("fastq"); 9 | 10 | require("nvar")(); 11 | const { 12 | COMMAND_PREFIX = "/", COMMAND_SUFFIX = "", 13 | DISCORD_TOKEN, 14 | MAX_JOBS = 4, 15 | } = process.env; 16 | 17 | // Regex to check if a message contains command. 18 | const COMMAND_REGEX = new RegExp(`^${ COMMAND_PREFIX }([a-z-]+)${ COMMAND_SUFFIX }\\s+`); 19 | 20 | const client = new Client({ 21 | intents: [ 22 | Intents.FLAGS.GUILDS, 23 | Intents.FLAGS.GUILD_MESSAGES, 24 | Intents.FLAGS.GUILD_MESSAGE_REACTIONS, 25 | Intents.FLAGS.DIRECT_MESSAGES, 26 | Intents.FLAGS.DIRECT_MESSAGE_REACTIONS, 27 | ], 28 | }); 29 | const commands = client.commands = new Collection(); 30 | 31 | const commandFiles = fs.readdirSync("./commands").filter(file => file.endsWith(".js")); 32 | 33 | for (const file of commandFiles) { 34 | /** @type {object} */ 35 | const command = require(`./commands/${file}`); 36 | 37 | // set a new item in the Collection 38 | // with the key as the command name and the value as the exported module 39 | commands.set(command.name, command); 40 | } 41 | 42 | const jobs = Queue(worker, parseInt(MAX_JOBS)); 43 | 44 | client.on("ready", () => { 45 | console.log(`Logged in as ${client.user.tag}!`); 46 | 47 | // @TODO: enqueues broken or pending tasks from message list. 48 | }); 49 | 50 | client.on("message", async message => { 51 | const [, commandName] = message.content.trim().match(COMMAND_REGEX) || []; 52 | if (!commandName) return; 53 | 54 | // Parse for arguments, ignoring the first item because it's the command name. 55 | const [, ...args] = shellParser(message.content); 56 | 57 | debug(`Received command ${ message.content.trim() }`); 58 | 59 | const command = commands.get(commandName) 60 | || commands.find(cmd => cmd.aliases && cmd.aliases.includes(commandName)); 61 | 62 | if (!command) return; 63 | 64 | message.suppressEmbeds(true); 65 | 66 | if (command.guildOnly && message.channel.type === "dm") { 67 | return message.reply(`I can't execute that command inside DMs!`); 68 | } 69 | 70 | const params = command.params || []; 71 | let errors = []; 72 | 73 | if (params.length && !args.length) { 74 | errors.push(`You didn't provide any arguments, ${message.author}!`); 75 | } 76 | 77 | // Validates arguments. 78 | args.forEach((arg, index) => { 79 | const { name, type = "text" } = params[index] || {}; 80 | 81 | switch (type) { 82 | case "url": 83 | try { 84 | const url = new URL(arg); 85 | if (url.protocol === "file:") { 86 | throw new Error("File protocol is not supported"); 87 | } 88 | args[index] = url; 89 | } catch(_) { 90 | errors.push(`Invalid argument for ${ name }`); 91 | } 92 | break; 93 | 94 | default: 95 | break; 96 | } 97 | }); 98 | 99 | // Replies with error if any. 100 | if (errors.length) { 101 | if (command.usage) { 102 | errors.push(`Usage: \`${ COMMAND_PREFIX }${command.name}${ COMMAND_SUFFIX} ${command.usage}\``); 103 | } 104 | 105 | return message.channel.send(errors.join("\n")); 106 | } 107 | 108 | // For async commands, we execute them without adding to queue. 109 | if (command.async) { 110 | const reply = await message.reply("Status: Executing"); 111 | await command.execute(reply, ...args); 112 | return; 113 | } 114 | 115 | // Acknowledge request received. 116 | const reply = await message.reply("Status: In Queued"); 117 | 118 | // Push request into our job queue. 119 | // We only push the reply object so each command can update it if need to. 120 | jobs.push({ reply, command, args, }, (error, reply) => { 121 | if (error) { 122 | reply.edit(`Status: Error - ${ error.message }`); 123 | return; 124 | } 125 | }); 126 | }); 127 | 128 | client.login(DISCORD_TOKEN); 129 | 130 | // A simple worker that is run for each job. 131 | async function worker({ reply, command, args, }, cb) { 132 | try { 133 | // Removes the reaction to indicate the request being started. 134 | reply.edit("Status: Starting"); 135 | await command.execute(reply, ...args); 136 | cb(null, reply); 137 | } catch(error) { 138 | cb(error, reply); 139 | } 140 | } 141 | -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | require("nvar")(); 4 | 5 | require("./discord.js"); 6 | require("./server.js"); 7 | -------------------------------------------------------------------------------- /package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "data-hoarder", 3 | "version": "0.1.0", 4 | "lockfileVersion": 1, 5 | "requires": true, 6 | "dependencies": { 7 | "@discordjs/collection": { 8 | "version": "0.1.6", 9 | "resolved": "https://registry.npmjs.org/@discordjs/collection/-/collection-0.1.6.tgz", 10 | "integrity": "sha512-utRNxnd9kSS2qhyivo9lMlt5qgAUasH2gb7BEOn6p0efFh24gjGomHzWKMAPn2hEReOPQZCJaRKoURwRotKucQ==" 11 | }, 12 | "@discordjs/form-data": { 13 | "version": "3.0.1", 14 | "resolved": "https://registry.npmjs.org/@discordjs/form-data/-/form-data-3.0.1.tgz", 15 | "integrity": "sha512-ZfFsbgEXW71Rw/6EtBdrP5VxBJy4dthyC0tpQKGKmYFImlmmrykO14Za+BiIVduwjte0jXEBlhSKf0MWbFp9Eg==", 16 | "requires": { 17 | "asynckit": "^0.4.0", 18 | "combined-stream": "^1.0.8", 19 | "mime-types": "^2.1.12" 20 | } 21 | }, 22 | "@ffmpeg-installer/darwin-x64": { 23 | "version": "4.1.0", 24 | "resolved": "https://registry.npmjs.org/@ffmpeg-installer/darwin-x64/-/darwin-x64-4.1.0.tgz", 25 | "integrity": "sha512-Z4EyG3cIFjdhlY8wI9aLUXuH8nVt7E9SlMVZtWvSPnm2sm37/yC2CwjUzyCQbJbySnef1tQwGG2Sx+uWhd9IAw==", 26 | "optional": true 27 | }, 28 | "@ffmpeg-installer/ffmpeg": { 29 | "version": "1.0.20", 30 | "resolved": "https://registry.npmjs.org/@ffmpeg-installer/ffmpeg/-/ffmpeg-1.0.20.tgz", 31 | "integrity": "sha512-wbgd//6OdwbFXYgV68ZyKrIcozEQpUKlvV66XHaqO2h3sFbX0jYLzx62Q0v8UcFWN21LoxT98NU2P+K0OWsKNA==", 32 | "requires": { 33 | "@ffmpeg-installer/darwin-x64": "4.1.0", 34 | "@ffmpeg-installer/linux-arm": "4.1.3", 35 | "@ffmpeg-installer/linux-arm64": "4.1.4", 36 | "@ffmpeg-installer/linux-ia32": "4.1.0", 37 | "@ffmpeg-installer/linux-x64": "4.1.0", 38 | "@ffmpeg-installer/win32-ia32": "4.1.0", 39 | "@ffmpeg-installer/win32-x64": "4.1.0" 40 | } 41 | }, 42 | "@ffmpeg-installer/linux-arm": { 43 | "version": "4.1.3", 44 | "resolved": "https://registry.npmjs.org/@ffmpeg-installer/linux-arm/-/linux-arm-4.1.3.tgz", 45 | "integrity": "sha512-NDf5V6l8AfzZ8WzUGZ5mV8O/xMzRag2ETR6+TlGIsMHp81agx51cqpPItXPib/nAZYmo55Bl2L6/WOMI3A5YRg==", 46 | "optional": true 47 | }, 48 | "@ffmpeg-installer/linux-arm64": { 49 | "version": "4.1.4", 50 | "resolved": "https://registry.npmjs.org/@ffmpeg-installer/linux-arm64/-/linux-arm64-4.1.4.tgz", 51 | "integrity": "sha512-dljEqAOD0oIM6O6DxBW9US/FkvqvQwgJ2lGHOwHDDwu/pX8+V0YsDL1xqHbj1DMX/+nP9rxw7G7gcUvGspSoKg==", 52 | "optional": true 53 | }, 54 | "@ffmpeg-installer/linux-ia32": { 55 | "version": "4.1.0", 56 | "resolved": "https://registry.npmjs.org/@ffmpeg-installer/linux-ia32/-/linux-ia32-4.1.0.tgz", 57 | "integrity": "sha512-0LWyFQnPf+Ij9GQGD034hS6A90URNu9HCtQ5cTqo5MxOEc7Rd8gLXrJvn++UmxhU0J5RyRE9KRYstdCVUjkNOQ==", 58 | "optional": true 59 | }, 60 | "@ffmpeg-installer/linux-x64": { 61 | "version": "4.1.0", 62 | "resolved": "https://registry.npmjs.org/@ffmpeg-installer/linux-x64/-/linux-x64-4.1.0.tgz", 63 | "integrity": "sha512-Y5BWhGLU/WpQjOArNIgXD3z5mxxdV8c41C+U15nsE5yF8tVcdCGet5zPs5Zy3Ta6bU7haGpIzryutqCGQA/W8A==", 64 | "optional": true 65 | }, 66 | "@ffmpeg-installer/win32-ia32": { 67 | "version": "4.1.0", 68 | "resolved": "https://registry.npmjs.org/@ffmpeg-installer/win32-ia32/-/win32-ia32-4.1.0.tgz", 69 | "integrity": "sha512-FV2D7RlaZv/lrtdhaQ4oETwoFUsUjlUiasiZLDxhEUPdNDWcH1OU9K1xTvqz+OXLdsmYelUDuBS/zkMOTtlUAw==", 70 | "optional": true 71 | }, 72 | "@ffmpeg-installer/win32-x64": { 73 | "version": "4.1.0", 74 | "resolved": "https://registry.npmjs.org/@ffmpeg-installer/win32-x64/-/win32-x64-4.1.0.tgz", 75 | "integrity": "sha512-Drt5u2vzDnIONf4ZEkKtFlbvwj6rI3kxw1Ck9fpudmtgaZIHD4ucsWB2lCZBXRxJgXR+2IMSti+4rtM4C4rXgg==", 76 | "optional": true 77 | }, 78 | "abort-controller": { 79 | "version": "3.0.0", 80 | "resolved": "https://registry.npmjs.org/abort-controller/-/abort-controller-3.0.0.tgz", 81 | "integrity": "sha512-h8lQ8tacZYnR3vNQTgibj+tODHI5/+l06Au2Pcriv/Gmet0eaj4TwWH41sO9wnHDiQsEj19q0drzdWdeAHtweg==", 82 | "requires": { 83 | "event-target-shim": "^5.0.0" 84 | } 85 | }, 86 | "adm-zip": { 87 | "version": "0.5.2", 88 | "resolved": "https://registry.npmjs.org/adm-zip/-/adm-zip-0.5.2.tgz", 89 | "integrity": "sha512-lUI3ZSNsfQXNYNzGjt68MdxzCs0eW29lgL74y/Y2h4nARgHmH3poFWuK3LonvFbNHFt4dTb2X/QQ4c1ZUWWsJw==" 90 | }, 91 | "async": { 92 | "version": "2.6.3", 93 | "resolved": "https://registry.npmjs.org/async/-/async-2.6.3.tgz", 94 | "integrity": "sha512-zflvls11DCy+dQWzTW2dzuilv8Z5X/pjfmZOWba6TNIVDm+2UDaJmXSOXlasHKfNBs8oo3M0aT50fDEWfKZjXg==", 95 | "requires": { 96 | "lodash": "^4.17.14" 97 | } 98 | }, 99 | "asynckit": { 100 | "version": "0.4.0", 101 | "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", 102 | "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=" 103 | }, 104 | "bindings": { 105 | "version": "1.5.0", 106 | "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.5.0.tgz", 107 | "integrity": "sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==", 108 | "optional": true, 109 | "requires": { 110 | "file-uri-to-path": "1.0.0" 111 | } 112 | }, 113 | "bufferutil": { 114 | "version": "4.0.3", 115 | "resolved": "https://registry.npmjs.org/bufferutil/-/bufferutil-4.0.3.tgz", 116 | "integrity": "sha512-yEYTwGndELGvfXsImMBLop58eaGW+YdONi1fNjTINSY98tmMmFijBG6WXgdkfuLNt4imzQNtIE+eBp1PVpMCSw==", 117 | "optional": true, 118 | "requires": { 119 | "node-gyp-build": "^4.2.0" 120 | } 121 | }, 122 | "combined-stream": { 123 | "version": "1.0.8", 124 | "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", 125 | "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", 126 | "requires": { 127 | "delayed-stream": "~1.0.0" 128 | } 129 | }, 130 | "connected-domain": { 131 | "version": "1.0.0", 132 | "resolved": "https://registry.npmjs.org/connected-domain/-/connected-domain-1.0.0.tgz", 133 | "integrity": "sha1-v+dyOMdL5FOnnwy2BY3utPI1jpM=" 134 | }, 135 | "data-uri-to-buffer": { 136 | "version": "3.0.1", 137 | "resolved": "https://registry.npmjs.org/data-uri-to-buffer/-/data-uri-to-buffer-3.0.1.tgz", 138 | "integrity": "sha512-WboRycPNsVw3B3TL559F7kuBUM4d8CgMEvk6xEJlOp7OBPjt6G7z8WMWlD2rOFZLk6OYfFIUGsCOWzcQH9K2og==" 139 | }, 140 | "debug": { 141 | "version": "4.3.1", 142 | "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.1.tgz", 143 | "integrity": "sha512-doEwdvm4PCeK4K3RQN2ZC2BYUBaxwLARCqZmMjtF8a51J2Rb0xpVloFRnCODwqjpwnAoao4pelN8l3RJdv3gRQ==", 144 | "requires": { 145 | "ms": "2.1.2" 146 | }, 147 | "dependencies": { 148 | "ms": { 149 | "version": "2.1.2", 150 | "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", 151 | "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==" 152 | } 153 | } 154 | }, 155 | "delayed-stream": { 156 | "version": "1.0.0", 157 | "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", 158 | "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=" 159 | }, 160 | "discord.js": { 161 | "version": "github:discordjs/discord.js#9ffcd83027f0fc06d69df21475865ad55138de01", 162 | "from": "github:discordjs/discord.js", 163 | "requires": { 164 | "@discordjs/collection": "^0.1.6", 165 | "@discordjs/form-data": "^3.0.1", 166 | "abort-controller": "^3.0.0", 167 | "node-fetch": "^2.6.1", 168 | "prism-media": "^1.2.2", 169 | "tweetnacl": "^1.0.3", 170 | "ws": "^7.3.1" 171 | }, 172 | "dependencies": { 173 | "node-fetch": { 174 | "version": "2.6.1", 175 | "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.1.tgz", 176 | "integrity": "sha512-V4aYg89jEoVRxRb2fJdAg8FHvI7cEyYdVAh94HH0UIK8oJxUfkjlDQN9RbMx+bEjP7+ggMiFRprSti032Oipxw==" 177 | } 178 | } 179 | }, 180 | "erlpack": { 181 | "version": "github:discord/erlpack#e27db8f82892bdb9b28a0547cc394d68b5d2242d", 182 | "from": "github:discord/erlpack", 183 | "optional": true, 184 | "requires": { 185 | "bindings": "^1.5.0", 186 | "nan": "^2.14.0" 187 | } 188 | }, 189 | "event-target-shim": { 190 | "version": "5.0.1", 191 | "resolved": "https://registry.npmjs.org/event-target-shim/-/event-target-shim-5.0.1.tgz", 192 | "integrity": "sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==" 193 | }, 194 | "fastq": { 195 | "version": "1.10.1", 196 | "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.10.1.tgz", 197 | "integrity": "sha512-AWuv6Ery3pM+dY7LYS8YIaCiQvUaos9OB1RyNgaOWnaX+Tik7Onvcsf8x8c+YtDeT0maYLniBip2hox5KtEXXA==", 198 | "requires": { 199 | "reusify": "^1.0.4" 200 | } 201 | }, 202 | "fetch-blob": { 203 | "version": "2.1.1", 204 | "resolved": "https://registry.npmjs.org/fetch-blob/-/fetch-blob-2.1.1.tgz", 205 | "integrity": "sha512-Uf+gxPCe1hTOFXwkxYyckn8iUSk6CFXGy5VENZKifovUTZC9eUODWSBhOBS7zICGrAetKzdwLMr85KhIcePMAQ==" 206 | }, 207 | "file-uri-to-path": { 208 | "version": "1.0.0", 209 | "resolved": "https://registry.npmjs.org/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz", 210 | "integrity": "sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==", 211 | "optional": true 212 | }, 213 | "lodash": { 214 | "version": "4.17.21", 215 | "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", 216 | "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" 217 | }, 218 | "m3u8stream": { 219 | "version": "0.8.3", 220 | "resolved": "https://registry.npmjs.org/m3u8stream/-/m3u8stream-0.8.3.tgz", 221 | "integrity": "sha512-0nAcdrF8YJKUkb6PzWdvGftTPyCVWgoiot1AkNVbPKTeIGsWs6DrOjifrJ0Zi8WQfQmD2SuVCjkYIOip12igng==", 222 | "requires": { 223 | "miniget": "^4.0.0", 224 | "sax": "^1.2.4" 225 | } 226 | }, 227 | "mime-db": { 228 | "version": "1.45.0", 229 | "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.45.0.tgz", 230 | "integrity": "sha512-CkqLUxUk15hofLoLyljJSrukZi8mAtgd+yE5uO4tqRZsdsAJKv0O+rFMhVDRJgozy+yG6md5KwuXhD4ocIoP+w==" 231 | }, 232 | "mime-types": { 233 | "version": "2.1.28", 234 | "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.28.tgz", 235 | "integrity": "sha512-0TO2yJ5YHYr7M2zzT7gDU1tbwHxEUWBCLt0lscSNpcdAfFyJOVEpRYNS7EXVcTLNj/25QO8gulHC5JtTzSE2UQ==", 236 | "requires": { 237 | "mime-db": "1.45.0" 238 | } 239 | }, 240 | "miniget": { 241 | "version": "4.2.0", 242 | "resolved": "https://registry.npmjs.org/miniget/-/miniget-4.2.0.tgz", 243 | "integrity": "sha512-IzTOaNgBw/qEpzkPTE7X2cUVXQfSKbG8w52Emi93zb+Zya2ZFrbmavpixzebuDJD9Ku4ecbaFlC7Y1cEESzQtQ==" 244 | }, 245 | "mri": { 246 | "version": "1.1.6", 247 | "resolved": "https://registry.npmjs.org/mri/-/mri-1.1.6.tgz", 248 | "integrity": "sha512-oi1b3MfbyGa7FJMP9GmLTttni5JoICpYBRlq+x5V16fZbLsnL9N3wFqqIm/nIG43FjUFkFh9Epzp/kzUGUnJxQ==" 249 | }, 250 | "nan": { 251 | "version": "2.14.2", 252 | "resolved": "https://registry.npmjs.org/nan/-/nan-2.14.2.tgz", 253 | "integrity": "sha512-M2ufzIiINKCuDfBSAUr1vWQ+vuVcA9kqx8JJUsbQi6yf1uGRyb7HfpdfUr5qLXf3B/t8dPvcjhKMmlfnP47EzQ==", 254 | "optional": true 255 | }, 256 | "node": { 257 | "version": "14.15.0", 258 | "resolved": "https://registry.npmjs.org/node/-/node-14.15.0.tgz", 259 | "integrity": "sha512-FrsP5wcA72CXNgQUk7zIdZm4vciBa/ahzaGC5iv3T0coNvz7hGsiI4pMdqqr0OXlVqyvSxDHzUUrhxlY3Hb2Kg==", 260 | "requires": { 261 | "node-bin-setup": "^1.0.0" 262 | } 263 | }, 264 | "node-bin-setup": { 265 | "version": "1.0.6", 266 | "resolved": "https://registry.npmjs.org/node-bin-setup/-/node-bin-setup-1.0.6.tgz", 267 | "integrity": "sha512-uPIxXNis1CRbv1DwqAxkgBk5NFV3s7cMN/Gf556jSw6jBvV7ca4F9lRL/8cALcZecRibeqU+5dFYqFFmzv5a0Q==" 268 | }, 269 | "node-fetch": { 270 | "version": "3.0.0-beta.9", 271 | "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-3.0.0-beta.9.tgz", 272 | "integrity": "sha512-RdbZCEynH2tH46+tj0ua9caUHVWrd/RHnRfvly2EVdqGmI3ndS1Vn/xjm5KuGejDt2RNDQsVRLPNd2QPwcewVg==", 273 | "requires": { 274 | "data-uri-to-buffer": "^3.0.1", 275 | "fetch-blob": "^2.1.1" 276 | } 277 | }, 278 | "node-gyp-build": { 279 | "version": "4.2.3", 280 | "resolved": "https://registry.npmjs.org/node-gyp-build/-/node-gyp-build-4.2.3.tgz", 281 | "integrity": "sha512-MN6ZpzmfNCRM+3t57PTJHgHyw/h4OWnZ6mR8P5j/uZtqQr46RRuDE/P+g3n0YR/AiYXeWixZZzaip77gdICfRg==", 282 | "optional": true 283 | }, 284 | "nugu": { 285 | "version": "github:vietcode/nugu#2d011252512089f9e2de57bdcf89fc8fa0224e6d", 286 | "from": "github:vietcode/nugu", 287 | "requires": { 288 | "mri": "^1.1.6", 289 | "nvar": "^1.3.1", 290 | "nyuu": "^0.4.0", 291 | "rclone.js": "^0.5.0" 292 | } 293 | }, 294 | "nvar": { 295 | "version": "1.3.1", 296 | "resolved": "https://registry.npmjs.org/nvar/-/nvar-1.3.1.tgz", 297 | "integrity": "sha512-2U58nVI2o0SXvjiTiQ6UdXpfNu0/A6rqvjCPk7bEsqQujXWDkmLdKppMkh3UXXAvfX9b2LSs1YYYkX2uNE/oAg==" 298 | }, 299 | "nyuu": { 300 | "version": "0.4.0", 301 | "resolved": "https://registry.npmjs.org/nyuu/-/nyuu-0.4.0.tgz", 302 | "integrity": "sha512-MDZPgSJmkMxIKYzAWhVmuPb45/aHpaTjdWT9y66V7RnFVIdB3KkPzH9Am+ZHJl4pMyk5CNO7ujDLrLmujCHhyg==", 303 | "requires": { 304 | "async": "0.2.0 - 2.9999.9999", 305 | "yencode": "1.0.6 - 1.9999.9999" 306 | } 307 | }, 308 | "prism-media": { 309 | "version": "1.2.3", 310 | "resolved": "https://registry.npmjs.org/prism-media/-/prism-media-1.2.3.tgz", 311 | "integrity": "sha512-fSrR66n0l6roW9Rx4rSLMyTPTjRTiXy5RVqDOurACQ6si1rKHHKDU5gwBJoCsIV0R3o9gi+K50akl/qyw1C74A==" 312 | }, 313 | "ps-node": { 314 | "version": "0.1.6", 315 | "resolved": "https://registry.npmjs.org/ps-node/-/ps-node-0.1.6.tgz", 316 | "integrity": "sha1-mvZ6mdex0BMuUaUDCZ04qNKs4sM=", 317 | "requires": { 318 | "table-parser": "^0.1.3" 319 | } 320 | }, 321 | "rclone.js": { 322 | "version": "0.5.0", 323 | "resolved": "https://registry.npmjs.org/rclone.js/-/rclone.js-0.5.0.tgz", 324 | "integrity": "sha512-S29D87EnyjtgiibqLj6AFubV1BgKtc4iQF74HwKo2++4RaVGehn3AZLIvSghkX5dFKy8RUKGmAo4LHuCMrEJQw==", 325 | "requires": { 326 | "adm-zip": "^0.5.3", 327 | "node-fetch": "^2.6.1" 328 | }, 329 | "dependencies": { 330 | "adm-zip": { 331 | "version": "0.5.4", 332 | "resolved": "https://registry.npmjs.org/adm-zip/-/adm-zip-0.5.4.tgz", 333 | "integrity": "sha512-GMQg1a1cAegh+/EgWbz+XHZrwB467iB/IgtToldvxs7Xa5Br8mPmvCeRfY/Un2fLzrlIPt6Yu7Cej+8Ut9TGPg==" 334 | }, 335 | "node-fetch": { 336 | "version": "2.6.1", 337 | "resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.1.tgz", 338 | "integrity": "sha512-V4aYg89jEoVRxRb2fJdAg8FHvI7cEyYdVAh94HH0UIK8oJxUfkjlDQN9RbMx+bEjP7+ggMiFRprSti032Oipxw==" 339 | } 340 | } 341 | }, 342 | "reusify": { 343 | "version": "1.0.4", 344 | "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", 345 | "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==" 346 | }, 347 | "sax": { 348 | "version": "1.2.4", 349 | "resolved": "https://registry.npmjs.org/sax/-/sax-1.2.4.tgz", 350 | "integrity": "sha512-NqVDv9TpANUjFm0N8uM5GxL36UgKi9/atZw+x7YFnQ8ckwFGKrl4xX4yWtrey3UJm5nP1kUbnYgLopqWNSRhWw==" 351 | }, 352 | "shell-parser": { 353 | "version": "1.0.0", 354 | "resolved": "https://registry.npmjs.org/shell-parser/-/shell-parser-1.0.0.tgz", 355 | "integrity": "sha512-RBbMwEVcz+qaj99bzzDMAs/sqhwJS4RgC+PNBLaH1EaUo20ppp3iq4rh3hzplOrbzjDXt5wviIf0B7k9nTBAZA==" 356 | }, 357 | "table-parser": { 358 | "version": "0.1.3", 359 | "resolved": "https://registry.npmjs.org/table-parser/-/table-parser-0.1.3.tgz", 360 | "integrity": "sha1-BEHPzhallIFoTCfRtaZ/8VpDx7A=", 361 | "requires": { 362 | "connected-domain": "^1.0.0" 363 | } 364 | }, 365 | "throttle-debounce": { 366 | "version": "3.0.1", 367 | "resolved": "https://registry.npmjs.org/throttle-debounce/-/throttle-debounce-3.0.1.tgz", 368 | "integrity": "sha512-dTEWWNu6JmeVXY0ZYoPuH5cRIwc0MeGbJwah9KUNYSJwommQpCzTySTpEe8Gs1J23aeWEuAobe4Ag7EHVt/LOg==" 369 | }, 370 | "tweetnacl": { 371 | "version": "1.0.3", 372 | "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-1.0.3.tgz", 373 | "integrity": "sha512-6rt+RN7aOi1nGMyC4Xa5DdYiukl2UWCbcJft7YhxReBGQD7OAM8Pbxw6YMo4r2diNEA8FEmu32YOn9rhaiE5yw==" 374 | }, 375 | "utf-8-validate": { 376 | "version": "5.0.4", 377 | "resolved": "https://registry.npmjs.org/utf-8-validate/-/utf-8-validate-5.0.4.tgz", 378 | "integrity": "sha512-MEF05cPSq3AwJ2C7B7sHAA6i53vONoZbMGX8My5auEVm6W+dJ2Jd/TZPyGJ5CH42V2XtbI5FD28HeHeqlPzZ3Q==", 379 | "optional": true, 380 | "requires": { 381 | "node-gyp-build": "^4.2.0" 382 | } 383 | }, 384 | "ws": { 385 | "version": "7.4.2", 386 | "resolved": "https://registry.npmjs.org/ws/-/ws-7.4.2.tgz", 387 | "integrity": "sha512-T4tewALS3+qsrpGI/8dqNMLIVdq/g/85U98HPMa6F0m6xTbvhXU6RCQLqPH3+SlomNV/LdY6RXEbBpMH6EOJnA==" 388 | }, 389 | "yencode": { 390 | "version": "1.1.1", 391 | "resolved": "https://registry.npmjs.org/yencode/-/yencode-1.1.1.tgz", 392 | "integrity": "sha512-DrpXv0x9wVTq6eBsCn4XL+gDesD/ggeciHPiftCV9EAqBna59g6/p63dJ9ygR1O2LLh1qO16e8uCXpOKnG5HKw==" 393 | }, 394 | "ytdl-core": { 395 | "version": "4.4.5", 396 | "resolved": "https://registry.npmjs.org/ytdl-core/-/ytdl-core-4.4.5.tgz", 397 | "integrity": "sha512-/ugilIF2PskxNfQgkYZ/esvkp+fiB/mOPdIExW/En+1QTbq5VeTVqgf2hETTen0Co2lHMM4eRxPaczDHTmkb7A==", 398 | "requires": { 399 | "m3u8stream": "^0.8.3", 400 | "miniget": "^4.0.0", 401 | "sax": "^1.1.3" 402 | } 403 | }, 404 | "zlib-sync": { 405 | "version": "0.1.7", 406 | "resolved": "https://registry.npmjs.org/zlib-sync/-/zlib-sync-0.1.7.tgz", 407 | "integrity": "sha512-UmciU6ZrIwtwPC8noMzq+kGMdiWwNRZ3wC0SbED4Ew5Ikqx14MqDPRs/Pbk+3rZPh5SzsOgUBs1WRE0iieddpg==", 408 | "optional": true, 409 | "requires": { 410 | "nan": "^2.14.0" 411 | } 412 | } 413 | } 414 | } 415 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "data-hoarder", 3 | "version": "0.1.0", 4 | "description": "Chatbot for hoarding data", 5 | "main": "index.js", 6 | "scripts": { 7 | "postinstall": "npm run rclone update", 8 | "start": "node index.js", 9 | "discord": "node discord.js", 10 | "server": "node server.js", 11 | "rclone": "node bin/rclone.js", 12 | "test": "echo \"Error: no test specified\" && exit 1" 13 | }, 14 | "author": "", 15 | "license": "MIT", 16 | "repository": "https://github.com/vietcode/DataHoarder.git", 17 | "engines": { 18 | "node": ">=14.0", 19 | "npm": ">=7.6.3" 20 | }, 21 | "dependencies": { 22 | "@ffmpeg-installer/ffmpeg": "^1.0.20", 23 | "adm-zip": "^0.5.2", 24 | "debug": "^4.3.1", 25 | "discord.js": "github:discordjs/discord.js", 26 | "fastq": "^1.10.1", 27 | "node": "^14.15.0", 28 | "node-fetch": "^3.0.0-beta.9", 29 | "nugu": "github:vietcode/nugu", 30 | "nvar": "^1.3.1", 31 | "ps-node": "^0.1.6", 32 | "rclone.js": "^0.5.0", 33 | "shell-parser": "^1.0.0", 34 | "throttle-debounce": "^3.0.1", 35 | "ytdl-core": "^4.4.5" 36 | }, 37 | "devDependencies": {}, 38 | "optionalDependencies": { 39 | "bufferutil": "^4.0.3", 40 | "erlpack": "github:discord/erlpack", 41 | "utf-8-validate": "^5.0.4", 42 | "zlib-sync": "^0.1.7" 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /server.js: -------------------------------------------------------------------------------- 1 | const rclone = require("./bin/rclone.js"); 2 | 3 | require("nvar")(); 4 | const { 5 | HOSTNAME = "", PORT, 6 | } = process.env; 7 | 8 | if (typeof PORT !== "undefined") { 9 | 10 | // Serving the target remote as index. 11 | const server = rclone.serve("http", "target:", "--addr", `${ HOSTNAME }:${ PORT }`); 12 | 13 | server.stdout.on("data", (data) => { 14 | console.log(data.toString()); 15 | }); 16 | 17 | server.stderr.on("data", (data) => { 18 | console.error(data.toString()); 19 | }); 20 | 21 | } 22 | -------------------------------------------------------------------------------- /utils.js: -------------------------------------------------------------------------------- 1 | const { throttle } = require("throttle-debounce"); 2 | const debug = require("debug"); 3 | 4 | const rclone = require("./bin/rclone.js"); 5 | 6 | /** 7 | * Formats bytes into human-readable units. 8 | * @param {number} bytes The number of bytes 9 | * @param {number} [decimals=2] Number of decimal points 10 | */ 11 | function bytes(bytes, decimals = 2) { 12 | if (bytes === 0) return "0 Bytes"; 13 | 14 | const k = 1024; 15 | const dm = decimals < 0 ? 0 : decimals; 16 | const sizes = ["Bytes", "KB", "MB", "GB", "TB", "PB", "EB", "ZB", "YB"]; 17 | 18 | const i = Math.floor(Math.log(bytes) / Math.log(k)); 19 | 20 | return parseFloat((bytes / Math.pow(k, i)).toFixed(dm)) + " " + sizes[i]; 21 | } 22 | 23 | /** 24 | * Emits progress of a readable stream 25 | * @param {ReadableStream} stream 26 | * @param {object} options 27 | * @param {number} [options.delay=0] 28 | * @param {number} options.total - Expected total number of bytes to receive. 29 | * @returns {ReadableStream} the input stream for chaining. 30 | */ 31 | function progress(stream, { delay = 0, total } = {}) { 32 | const startAt = Date.now(); 33 | let done = 0, elapsed, rate, estimated, progress, eta; 34 | 35 | function onProgress() { 36 | const now = new Date(); 37 | elapsed = (now - startAt) / 1000; 38 | rate = done / elapsed; 39 | 40 | if (total) { 41 | estimated = total / rate; 42 | progress = done / total; 43 | eta = estimated - elapsed; 44 | now.setSeconds(now.getSeconds() + eta); 45 | } 46 | 47 | stream.emit("progress", { 48 | total, 49 | done, 50 | totalh: bytes(total), 51 | doneh: bytes(done), 52 | startAt, 53 | elapsed, 54 | rate, 55 | rateh: bytes(rate), 56 | estimated, 57 | progress, 58 | eta, 59 | etaDate: now, 60 | }); 61 | } 62 | 63 | const throttled = throttle(delay, onProgress); 64 | 65 | stream.on("data", (chunk) => { 66 | done += chunk.length; 67 | return throttled(); 68 | }); 69 | 70 | stream.on("end", () => { 71 | onProgress(); 72 | stream.emit("finish"); 73 | }); 74 | 75 | return stream; 76 | } 77 | 78 | /** 79 | * Perform a `rclone rcat` from a stream to a file. 80 | * @param {ReadableStream} stream 81 | * @param {string} filename The file name to store 82 | */ 83 | function rcat(stream, filename) { 84 | const log = debug("hoarder:rcat"); 85 | 86 | return new Promise(async (resolve, reject) => { 87 | // Checks if the file exists and resolves with the file ID immediately. 88 | try { 89 | const fileId = await rclone.promises.lsf(filename, "--format", "i"); 90 | if (fileId) { 91 | stream.destroy(); 92 | // Resolves with the existing file ID after some time to ensure all other edits have been made. 93 | setTimeout(() => resolve(fileId), 2000); 94 | return; 95 | } 96 | } catch (_error) { 97 | log(`File ${ filename } does not exists. Continuing...`); 98 | } 99 | 100 | const rcat = rclone.rcat(filename); 101 | 102 | rcat.stderr.on("data", (data) => { 103 | console.log(`stderr: ${ data }`); 104 | reject(data.toString()); 105 | }); 106 | 107 | rcat.stdout.on("end", async () => { 108 | // Retrieves ID of the new file. 109 | const fileId = await rclone.promises.lsf(filename, "--format", "i"); 110 | resolve(fileId); 111 | }); 112 | 113 | stream.pipe(rcat.stdin); 114 | }); 115 | } 116 | 117 | module.exports = { 118 | throttle, 119 | bytes, 120 | progress, 121 | rcat, 122 | }; 123 | --------------------------------------------------------------------------------