├── .github └── workflows │ ├── heroku-deploy.yml │ └── release-publish.yml ├── README.md ├── _config.yml ├── assets ├── actions.png ├── cron1.png ├── cron2.png ├── html.zip ├── secrets.png ├── site.jpg └── terminal.png ├── baseimage └── Dockerfile ├── builder └── Dockerfile ├── heroku ├── .netrc ├── Dockerfile ├── deploy.sh └── start.sh └── src ├── SetWebhook ├── aria2.js ├── backup-db.js ├── backup └── .keep ├── bookmark.js ├── check.js ├── clear-db.js ├── copy ├── count ├── create-table.sql ├── db.js ├── dedupe ├── gdurl.sqlite ├── gdutils ├── md5 ├── package.json ├── sa ├── .keep └── invalid │ └── .keep ├── server.js ├── src ├── gd.js ├── router.js ├── snap2html.js ├── snap2html.template ├── summary.js ├── tg.js └── tree.js └── validate-sa.js /.github/workflows/heroku-deploy.yml: -------------------------------------------------------------------------------- 1 | name: Deploy to heroku 2 | 3 | on: workflow_dispatch 4 | 5 | jobs: 6 | build: 7 | runs-on: ubuntu-latest 8 | steps: 9 | - uses: actions/checkout@v2 10 | - name: Deploy 11 | env: 12 | HEROKU_EMAIL: ${{ secrets.HEROKU_EMAIL }} 13 | HEROKU_API_KEY: ${{ secrets.HEROKU_API_KEY }} 14 | REGION: ${{ secrets.REGION }} 15 | HEROKU_APP: ${{ secrets.HEROKU_APP }} 16 | SA_ZIP: ${{ secrets.SA_ZIP }} 17 | GH_REPO: ${{ secrets.GH_REPO }} 18 | GH_USER: ${{ secrets.GH_USER}} 19 | GH_AUTH_TOKEN: ${{ secrets.GH_AUTH_TOKEN }} 20 | BOT_TOKEN: ${{ secrets.BOT_TOKEN }} 21 | AUTH_CHATS: ${{ secrets.AUTH_CHATS }} 22 | HTTP_USER: ${{ secrets.HTTP_USER }} 23 | HTTP_PASS: ${{ secrets.HTTP_PASS }} 24 | DEFAULT_DEST: ${{ secrets.DEFAULT_DEST }} 25 | run: | 26 | cd heroku 27 | bash deploy.sh -------------------------------------------------------------------------------- /.github/workflows/release-publish.yml: -------------------------------------------------------------------------------- 1 | name: Release 2 | 3 | on: 4 | push: 5 | tags: 6 | - "v*.*.*" 7 | 8 | jobs: 9 | ReleaseAssets: 10 | runs-on: ubuntu-latest 11 | 12 | steps: 13 | - name: Get the version 14 | id: get_version 15 | run: echo ::set-output name=VERSION::${GITHUB_REF/refs\/tags\//} 16 | - uses: actions/checkout@v2 17 | - name: Docker Build 18 | run: docker build . -f builder/Dockerfile -t builder:latest 19 | - name: Run the Docker image 20 | run: docker run --name=gdutils --rm -dit builder:latest bash 21 | - name: Copy Built files 22 | run: docker cp gdutils:/builder/ ./ 23 | - name: Compress Build Outputs 24 | run: | 25 | cd builder 26 | mkdir build 27 | tar --exclude='./build' -czf build/gdutils.tar.gz . 28 | - uses: actions/upload-artifact@v2 29 | with: 30 | name: Build Location 31 | path: builder/build/gdutils.tar.gz 32 | 33 | - name: Create Release 34 | uses: softprops/action-gh-release@v1 35 | env: 36 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 37 | with: 38 | name: GD-Utils Build ${{ steps.get_version.outputs.VERSION }} 39 | files: | 40 | builder/build/gdutils.tar.gz 41 | DockerImage: 42 | runs-on: ubuntu-latest 43 | steps: 44 | - name: Checkout 45 | uses: actions/checkout@v2 46 | - name: Docker meta 47 | id: meta 48 | uses: docker/metadata-action@v3 49 | with: 50 | images: ghcr.io/nenokkadine/gdutils 51 | - name: Login to GitHub Container Registry 52 | uses: docker/login-action@v1 53 | with: 54 | registry: ghcr.io 55 | username: token 56 | password: ${{ secrets.GH_TOKEN }} 57 | - name: Build and push 58 | uses: docker/build-push-action@v2 59 | with: 60 | context: . 61 | file: baseimage/Dockerfile 62 | platforms: linux/amd64 63 | push: true 64 | tags: ${{ steps.meta.outputs.tags }} 65 | labels: ${{ steps.meta.outputs.labels }} -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 |
2 |

GDUtils Terminal + Bot

3 |

Gd-Utils is just another Tool which helps us to Bypass the 750GB daily limit by Google

4 |

🌟 This repo is just for Docker/Heroku deployment of GDUtils 🌟

5 |
6 | 7 | > Do not overuse it, or your account might be banned by Heroku. 8 | 9 | > Old Deployments Needs to be updated as new Builds are based on Alpine 10 | 11 | ### Pre Requisites 12 | 1️⃣ [Github Account](https://github.com) 13 | 14 | 2️⃣ [Heroku Account](https://heroku.com) 15 | 16 | 3️⃣ [Telegram Account](https://telegram.org) 17 | 18 | 4️⃣ [Some Patience](https://www.google.com/search?q=how+to+be+more+patient) 19 | 20 | ### Deployment 21 | 22 | 🔷 Here I Don't Provide any Deploy button to heroku, We Use Github Actions to Deploy container to Heroku 23 | 24 | #### Steps 25 | 26 | 1. Fork this Repo 27 | 2. Go to Repository `Settings` -> `Secrets` 28 | 29 | ![Secrets](assets/secrets.png) 30 | 3. Now set the below Variables in the Github Repository Secrets 31 | [Environmental Variables](#environmental-variables) 32 | 33 | 4. After Setting this go to Actions and Run the Workflow 34 | 35 | ![Actions](assets/actions.png) 36 | 37 | 5. Now Go to the App URL and Press the link shown Below 38 | 39 | ![Webhook](assets/site.jpg) 40 | 41 | 6. Now it Opens the Web Terminal 42 | 43 | ![Terminal](assets/terminal.png) 44 | 45 | 7. Setting WebHook for Bot Mode 46 | 47 | 🔷 By Default Script will set the Webhook 48 | 49 | 🔷 if the Bot didn't respond Just type `SetWebhook` 50 | 51 | ``` 52 | > bash-4.4$ SetWebhook 53 | {"ok":true,"result":true,"description":"Webhook is already set"} 54 | ``` 55 | 8. Done You Bot is Now Live and You can use the terminal Even if you want and Can generate Snap2HTML and Tree 56 | 57 | 🔷 To access your files in your working directory go to (Avoid Using name index.html for Both Snap2HTML and Tree) 58 | 59 | ``` 60 | https://YOURAPP.herokuapp.com/out/ 61 | ``` 62 | 63 | ### Usage 64 | - **Web Terminal** 65 | 66 | Just type `gdutils` in Web Terminal 67 | ``` 68 | bash-5.0$ gdutils 69 | GD-UTILS by iwestlin, English version by Roshanconnor, Polished by Nenokkadine 70 | 1.Copy Files to your Teamdrive 71 | ➖➖➖➖➖➖➖➖➖➖➖➖➖➖➖➖➖➖➖➖➖➖➖➖ 72 | 2.Calculate Size 73 | ➖➖➖➖➖➖➖➖➖➖➖➖➖➖➖➖➖➖➖➖➖➖➖➖ 74 | 3.Remove Duplicate Files 75 | ➖➖➖➖➖➖➖➖➖➖➖➖➖➖➖➖➖➖➖➖➖➖➖➖ 76 | 4.EXIT 77 | 78 | Choose any Number [1-4]: 79 | 80 | ``` 81 | 82 | - **Telegram Bot** 83 | 84 | Just Type `/help` in Telegram Bot 85 | 86 | ### Environmental Variables 87 | 88 | | Variable | Value | Example | Required | Description | 89 | | :---: | :---: | :---: | :---: | :---: | 90 | | HEROKU_EMAIL | Heroku email | abc@abc.com | True | Just Give the email you used for Heroku Account| 91 | | HEROKU_API_KEY | Heroku API key | xxxxxxx-xxxx-xxxx-xxxx-xxxxxx | True | Get it from [Heroku](https://dashboard.heroku.com/account/applications/authorizations/new) | 92 | | REGION | Heroku Container Region | EU | False | Needed only if you need an EU Container | 93 | | HEROKU_APP | Heroku app name | Name Must be unique | True | Heroku app name that needs to be Updated or Created (Should be in lowercase) | 94 | | SA_ZIP | Direct Link | https://abc.com/sa.zip | False* | Service accounts to be zipped in a way that there should be a folder named accounts containing SA Jsons | 95 | | GH_REPO | SA repo name | accounts | False** | Git repo name where SA's are Uploaded | 96 | | GH_USER | Github Username | Nenokkadine | False** | Github username Where SA are uploaded | 97 | | GH_AUTH_TOKEN | Github Auth Token | xxxxxxxxxxxxxx | False** | Get this from [Github](https://github.com/settings/tokens/new) With Scopes `repo` | 98 | | BOT_TOKEN | Telegram Bot Token | 123456:abcdef | True | Get this from [Telegram](https://t.me/botfather) | 99 | | AUTH_CHATS | Authorized Chats | 'UserA','UserB' | True | Telegram Username's/User ID's Should be Given and Should be in Single Quotes | 100 | | HTTP_USER | Username | user | False | HTTP Basic auth adds an Extra Layer security for your app (recommended) | 101 | | HTTP_PASS | Password | pass | False | HTTP Basic auth adds an Extra Layer security for your app (recommended) | 102 | | DEFAULT_DEST | Folder ID | 0ADXXXXXXXMUk9PVA | False | Default Destination G-Drive folder ID (TeamDrive ID Recommended) | 103 | 104 | ### How to make the App Never Sleep? 105 | 1. Setup a [CronJob](https://console.cron-job.org) to Make the heroku app never sleep 106 | - First Sign-up in the above Site 107 | - Then Go [here](https://console.cron-job.org/jobs/create), and Follow the ScreenShot below 108 | 109 | ``` 110 | URL: 111 | https://YOURAPPNAME.herokuapp.com/gdutils/check.js 112 | ``` 113 | ![cronJob Basic](assets/cron1.png) 114 | - If you have Setup Basic HTTP Auth you should fill these too 115 | 116 | ![HTTP Auth](assets/cron2.png) 117 | - Now you can save and enable the cron job, and your app never Sleeps again. 118 | 119 | 2. Use [Kaffeine](https://kaffeine.herokuapp.com/) and Just give the app name, App's With Basic HTTP Auth enabled Might not work here 120 | 121 | ### Note 122 | 123 | * No Need to redeploy if new changes are made to src folder, it will built and your apps will fetch the latest release 124 | 125 | * if Docker Base image is updated then you should redeploy (Not gonna happen Soon). 126 | 127 | ### Credits 128 | 👏 [iwestlin](https://github.com/iwestlin) - The original Developer of this tool 129 | 130 | 👏 [Roshan](https://github.com/roshanconnor123) - Translated Entire repo into English and the bash Script 131 | 132 | 👏 [Shuanglei Tao](https://github.com/tsl0922) - Developer of TTYD (Terminal over Web) 133 | 134 | ## License 135 | [MIT](https://choosealicense.com/licenses/mit/) 136 | -------------------------------------------------------------------------------- /_config.yml: -------------------------------------------------------------------------------- 1 | theme: jekyll-theme-cayman -------------------------------------------------------------------------------- /assets/actions.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nenokkadine/GD-Utils/f0782a63b5212f7af4b57e92e0fcd752df42c90f/assets/actions.png -------------------------------------------------------------------------------- /assets/cron1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nenokkadine/GD-Utils/f0782a63b5212f7af4b57e92e0fcd752df42c90f/assets/cron1.png -------------------------------------------------------------------------------- /assets/cron2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nenokkadine/GD-Utils/f0782a63b5212f7af4b57e92e0fcd752df42c90f/assets/cron2.png -------------------------------------------------------------------------------- /assets/html.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nenokkadine/GD-Utils/f0782a63b5212f7af4b57e92e0fcd752df42c90f/assets/html.zip -------------------------------------------------------------------------------- /assets/secrets.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nenokkadine/GD-Utils/f0782a63b5212f7af4b57e92e0fcd752df42c90f/assets/secrets.png -------------------------------------------------------------------------------- /assets/site.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nenokkadine/GD-Utils/f0782a63b5212f7af4b57e92e0fcd752df42c90f/assets/site.jpg -------------------------------------------------------------------------------- /assets/terminal.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nenokkadine/GD-Utils/f0782a63b5212f7af4b57e92e0fcd752df42c90f/assets/terminal.png -------------------------------------------------------------------------------- /baseimage/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM mhart/alpine-node:slim-12 2 | 3 | LABEL maintainer="Nenokkadine" 4 | 5 | #Dependencies 6 | RUN apk update && \ 7 | apk add --no-cache --update git \ 8 | curl \ 9 | wget \ 10 | tar \ 11 | unzip \ 12 | xz \ 13 | bash && \ 14 | mkdir /gdutils && chmod 777 /gdutils 15 | WORKDIR /gdutils 16 | -------------------------------------------------------------------------------- /builder/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM mhart/alpine-node:12 2 | RUN apk add --no-cache --update \ 3 | bash \ 4 | curl 5 | RUN apk add --no-cache --virtual .build-deps make gcc g++ python3 && mkdir builder 6 | COPY src builder 7 | RUN cd builder && npm install && apk del .build-deps 8 | -------------------------------------------------------------------------------- /heroku/.netrc: -------------------------------------------------------------------------------- 1 | machine api.heroku.com 2 | login 3 | password 4 | machine git.heroku.com 5 | login 6 | password -------------------------------------------------------------------------------- /heroku/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM ghcr.io/nenokkadine/gdutils:latest 2 | CMD bash -c "$(curl -sL "https://github.com/nenokkadine/GD-Utils/raw/master/heroku/start.sh")" 3 | -------------------------------------------------------------------------------- /heroku/deploy.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | if [[ -n "$HEROKU_EMAIL" && -n "$HEROKU_API_KEY" ]]; then 3 | sed -Ei "s/login/login "$HEROKU_EMAIL"/g" .netrc 4 | sed -Ei "s/password/password "$HEROKU_API_KEY"/g" .netrc 5 | mv .netrc ~/.netrc 6 | else 7 | echo "Heroku Credentials Not Found, Add them in secrets" 8 | exit 2 9 | fi 10 | 11 | if [[ -n "$REGION" && -n "$HEROKU_APP" ]]; then 12 | heroku container:login 13 | echo "Creating App" 14 | heroku apps:create "$HEROKU_APP" --stack=container --region=eu 15 | if [[ $? -eq 0 ]]; then 16 | echo "Successfully created app" 17 | heroku container:push web -a "$HEROKU_APP" 18 | if [[ $? -eq 0 ]]; then 19 | echo "Deploying" 20 | heroku container:release web -a "$HEROKU_APP" 21 | if [[ $? -eq 0 ]]; then 22 | export APP_SUC=true 23 | echo "Deployment Success" 24 | else 25 | echo "Failed to Release, Try again" 26 | exit 2 27 | fi 28 | else 29 | echo "Failed to deploy, Try again" 30 | exit 2 31 | fi 32 | else 33 | echo "Could not create app, Trying to push to Registry" 34 | echo "Building and pushing the app to Heroku Registry" 35 | heroku container:push web -a "$HEROKU_APP" 36 | if [[ $? -eq 0 ]]; then 37 | echo "Deploying" 38 | heroku container:release web -a "$HEROKU_APP" 39 | if [[ $? -eq 0 ]]; then 40 | export APP_SUC=true 41 | echo "Deployment Success" 42 | else 43 | echo "Container Release Failed" 44 | exit 2 45 | fi 46 | else 47 | echo "App Name is not available, Please select another" 48 | exit 2 49 | fi 50 | fi 51 | elif [[ -n "$HEROKU_APP" ]]; then 52 | heroku container:login 53 | echo "Creating App" 54 | heroku apps:create "$HEROKU_APP" --stack=container 55 | if [[ $? -eq 0 ]]; then 56 | echo "Successfully created app" 57 | heroku container:push web -a "$HEROKU_APP" 58 | if [[ $? -eq 0 ]]; then 59 | echo "Deploying" 60 | heroku container:release web -a "$HEROKU_APP" 61 | if [[ $? -eq 0 ]]; then 62 | export APP_SUC=true 63 | echo "Deployment Success" 64 | else 65 | echo "Failed to Release, Try again" 66 | exit 2 67 | fi 68 | else 69 | echo "Failed to deploy, Try again" 70 | exit 2 71 | fi 72 | else 73 | echo "Could not create app, Trying to push to Registry" 74 | echo "Building and pushing the app to Heroku Registry" 75 | heroku container:push web -a "$HEROKU_APP" 76 | echo "Deploying" 77 | if [[ $? -eq 0 ]]; then 78 | heroku container:release web -a "$HEROKU_APP" 79 | if [[ $? -eq 0 ]]; then 80 | export APP_SUC=true 81 | echo "Deployment Success" 82 | else 83 | echo "Container Release Failed" 84 | exit 2 85 | fi 86 | else 87 | echo "App Name is not available, Please select another" 88 | exit 2 89 | fi 90 | fi 91 | else 92 | echo "Heroku App name Not Provided" 93 | fi 94 | 95 | 96 | echo "Setting Config Vars" 97 | if [[ -n "$APP_SUC" ]]; then 98 | # Service Accounts 99 | if [[ -n "$SA_ZIP" ]]; then 100 | heroku config:set -a "$HEROKU_APP" SA_ZIP="$SA_ZIP" 101 | elif [[ -n "$GH_REPO" && -n "$GH_USER" && -n "$GH_AUTH_TOKEN" ]]; then 102 | heroku config:set -a "$HEROKU_APP" GH_REPO="$GH_REPO" GH_USER="$GH_USER" GH_AUTH_TOKEN="$GH_AUTH_TOKEN" 103 | else 104 | echo "Provide Some way to get Service Accounts,for Reference check README" 105 | exit 2 106 | fi 107 | 108 | #Basic Auth 109 | if [[ -n "$HTTP_USER" && -n "$HTTP_PASS" ]]; then 110 | heroku config:set -a "$HEROKU_APP" HTTP_USER="$HTTP_USER" HTTP_PASS="$HTTP_PASS" 111 | else 112 | echo "WARNING : No AUTH Variables provided, HTTP Basic Auth Disabled" 113 | fi 114 | 115 | #Config File 116 | if [[ -n "$BOT_TOKEN" && -n "$AUTH_CHATS" ]]; then 117 | heroku config:set -a "$HEROKU_APP" BOT_TOKEN="$BOT_TOKEN" AUTH_CHATS="$AUTH_CHATS" APP_NAME="$HEROKU_APP" 118 | else 119 | echo "Bot Token, Auth Chats not Provided Exiting , For Info Read Readme" 120 | exit 2 121 | fi 122 | 123 | if [[ -n "$DEFAULT_DEST" ]]; then 124 | heroku config:set -a "$HEROKU_APP" DEFAULT_DEST="$DEFAULT_DEST" 125 | else 126 | echo "WARNING : Default Destination Folder ID Not Provided" 127 | fi 128 | heroku ps:scale web=1 -a "$HEROKU_APP" 129 | echo "Deployment Completed" 130 | else 131 | echo "App Deployment Failed" 132 | exit 1 133 | fi -------------------------------------------------------------------------------- /heroku/start.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | wget -q https://github.com/nenokkadine/gdutils/releases/latest/download/gdutils.tar.gz && tar -xzf gdutils.tar.gz && rm -rf gdutils.tar.gz 3 | chmod a+x copy count dedupe md5 4 | mkdir /out 5 | wget -q https://github.com/nenokkadine/GD-Utils/raw/master/src/gdutils -O /usr/bin/gdutils && chmod a+x /usr/bin/gdutils 6 | wget -q https://github.com/nenokkadine/GD-Utils/raw/master/src/SetWebhook -O /usr/bin/SetWebhook && chmod a+x /usr/bin/SetWebhook && SetWebhook 7 | #Caddy 8 | wget -q https://github.com/caddyserver/caddy/releases/download/v2.3.0/caddy_2.3.0_linux_amd64.tar.gz -O cad.tar.gz && tar xzf cad.tar.gz && rm -rf cad.tar.gz && chmod a+x caddy && mv caddy /usr/bin/caddy 9 | #Service Accounts 10 | if [[ -n $GH_USER && -n $GH_AUTH_TOKEN && -n $GH_REPO ]]; then 11 | echo "Usage of Service Accounts (Git), Clonning git" 12 | git clone -q https://"$GH_AUTH_TOKEN"@github.com/"$GH_USER"/"$GH_REPO" accounts 13 | mv accounts/*.json sa/ 14 | rm -rf accounts 15 | elif [[ -n $SA_ZIP ]]; then 16 | echo "Usage of Service Accounts (Zip URL), Downloading" 17 | wget -q $SA_ZIP -O accounts.zip 18 | unzip -qq accounts.zip 19 | mv accounts/*.json sa/ 20 | rm -rf accounts 21 | else 22 | echo "Neither Service Accounts Nor Token Provided. Exiting..." 23 | exit 1 24 | fi 25 | 26 | # Config 27 | if [[ -n "$BOT_TOKEN" && -n "$AUTH_CHATS" ]]; then 28 | wget -qO- https://gist.github.com/nenokkadine/433284483b9df4e73dfcb90d4310bd65/raw/61775f835e216f992ed25ec71ab9d2310522caef/config.js | sed -e "s/\$BOT_TOKEN/$BOT_TOKEN/g" -e "s/\$AUTH_CHATS/$AUTH_CHATS/g" -e "s/\$DEFAULT_DEST/$DEFAULT_DEST/g" > config.js 29 | else 30 | echo "Bot Token, Auth Chats not Provided Exiting , For Info Read Readme" 31 | exit 1 32 | fi 33 | #Start GDutils Server 34 | node server.js & 35 | 36 | # HTTPS Auth 37 | if [[ -n "$HTTP_USER" && -n "$HTTP_PASS" ]]; then 38 | wget -qO- https://gist.github.com/nenokkadine/5db0fff9216fcedc0dd5862d0a5ab864/raw/4dd7e7b8edb691fc77a78cc174e88baab4ff073c/caddyauth | sed -e "s/\$HTTP_USER/$HTTP_USER/g" -e "s/\$HASHPASS/$(caddy hash-password --plaintext $HTTP_PASS)/g" > /Caddyfile 39 | else 40 | wget -q https://gist.github.com/nenokkadine/5db0fff9216fcedc0dd5862d0a5ab864/raw/4dd7e7b8edb691fc77a78cc174e88baab4ff073c/caddynoauth -O /Caddyfile 41 | fi 42 | 43 | #Terminal over Web 44 | wget -q https://github.com/tsl0922/ttyd/releases/download/1.6.3/ttyd.x86_64 -O ttyd && chmod a+x ttyd 45 | ./ttyd -i /usr/ttyd.sock -a -s 9 -b /bash -P 1 -t disableLeaveAlert=true -t rendererType=webgl -t titleFixed='Web Terminal' bash & 46 | cd .. 47 | # Caddy Run 48 | wget -q https://github.com/nenokkadine/gdutils/raw/master/assets/html.zip -O assets.zip && unzip -qq /assets.zip && rm -rf /assets.zip 49 | caddy run --config /Caddyfile 50 | -------------------------------------------------------------------------------- /src/SetWebhook: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | if [[ -n "$HTTP_USER" && -n "$HTTP_PASS" ]]; then 4 | curl -F "url=https://${HTTP_USER}:${HTTP_PASS}@${APP_NAME}.herokuapp.com/gutils/api/gdurl/tgbot" "https://api.telegram.org/bot${BOT_TOKEN}/setWebhook" 5 | else 6 | curl -F "url=https://${APP_NAME}.herokuapp.com/gutils/api/gdurl/tgbot" "https://api.telegram.org/bot${BOT_TOKEN}/setWebhook" 7 | fi 8 | -------------------------------------------------------------------------------- /src/aria2.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const fs = require('fs') 4 | const crypto = require('crypto') 5 | 6 | const { format_size } = require('./src/summary') 7 | const { get_name_by_id, get_sa_token, get_access_token, walk_and_save, validate_fid } = require('./src/gd') 8 | 9 | const ID_DIR_MAPPING = {} 10 | const FOLDER_TYPE = 'application/vnd.google-apps.folder' 11 | 12 | const { argv } = require('yargs') 13 | .usage('Usage: ./$0 [options]') 14 | .alias('o', 'output') 15 | .describe('output', 'Specify Output File,Do not fill in the default url.txt') 16 | .alias('u', 'update') 17 | .describe('u', 'Do not use local cache, force to obtain source folder information online') 18 | .alias('S', 'service_account') 19 | .describe('S', 'Use service account to operate, provided that the sa authorized json file must be placed in the ./sa directory') 20 | .alias('k', 'hashkey') 21 | .describe('k', 'Use the hashkey set by the website deployed at https://github.com/iwestlin/gdshare to generate a legal download link') 22 | .alias('c', 'cf') 23 | .describe('cf', 'Website URL deployed using gdshare') 24 | .alias('e', 'expire') 25 | .describe('e', 'gdshare direct link expiration time, unit hour, default value 24') 26 | .help('h') 27 | .alias('h', 'help') 28 | 29 | const [fid] = argv._ 30 | if (validate_fid(fid)) { 31 | let { update, service_account, output, hashkey, cf, expire } = argv 32 | output = output || 'uri.txt' 33 | gen_input_file({ fid, update, service_account, output, hashkey, cf, expire }) 34 | .then(cmd => { 35 | console.log('Generated', output) 36 | console.log('Execute the command to download:\n', cmd) 37 | }) 38 | .catch(console.error) 39 | } else { 40 | console.warn('FolderID is wrong or invalid') 41 | } 42 | 43 | async function gen_input_file ({ fid, service_account, update, output, hashkey, cf, expire }) { 44 | const root = await get_name_by_id(fid, service_account) 45 | const data = await walk_and_save({ fid, service_account, update }) 46 | const files = data.filter(v => v.mimeType !== FOLDER_TYPE) 47 | const folders = data.filter(v => v.mimeType === FOLDER_TYPE) 48 | let result 49 | if (hashkey && cf) { 50 | result = [`# aria2c -c --enable-rpc=false -i ${output}`] 51 | } else { 52 | const access_token = service_account ? (await get_sa_token()).access_token : await get_access_token() 53 | result = [`# aria2c -c --enable-rpc=false --header "Authorization: Bearer ${access_token}" -i ${output}`] 54 | } 55 | result = result.concat(files.map(file => { 56 | const { id, name, parent, size } = file 57 | const dir = get_dir(parent, folders) 58 | const download_uri = (hashkey && cf) ? gen_direct_link({ file, hashkey, cf, expire }) : `https://www.googleapis.com/drive/v3/files/${id}?alt=media` 59 | return `# File Size:${format_size(size)} 60 | ${download_uri} 61 | dir=${root}${dir} 62 | out=${name}` 63 | })) 64 | fs.writeFileSync(output, result.join('\n\n')) 65 | return result[0].replace('# ', '') 66 | } 67 | 68 | function gen_direct_link ({ file, hashkey, cf, expire }) { 69 | const { name, id } = file 70 | const expired = Date.now() + (Number(expire) || 24) * 3600 * 1000 71 | const str = `expired=${expired}&id=${id}` 72 | const sig = hmac(str, hashkey) 73 | if (!cf.startsWith('http')) cf = 'https://' + cf 74 | return `${cf}/api/download/${name}?${str}&sig=${sig}` 75 | } 76 | 77 | function hmac (str, hashkey) { 78 | return crypto.createHmac('sha256', hashkey).update(str).digest('hex') 79 | } 80 | 81 | function get_dir (id, folders) { 82 | let result = ID_DIR_MAPPING[id] 83 | if (result !== undefined) return result 84 | result = '' 85 | let temp = id 86 | let folder = folders.filter(v => v.id === temp)[0] 87 | while (folder) { 88 | result = `/${folder.name}` + result 89 | temp = folder.parent 90 | if (ID_DIR_MAPPING[temp]) { 91 | result = ID_DIR_MAPPING[temp] + result 92 | return ID_DIR_MAPPING[id] = result 93 | } 94 | folder = folders.filter(v => v.id === temp)[0] 95 | } 96 | return ID_DIR_MAPPING[id] = result 97 | } 98 | -------------------------------------------------------------------------------- /src/backup-db.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const path = require('path') 4 | const {db} = require('./db') 5 | 6 | const filepath = path.join(__dirname, 'backup', `${Date.now()}.sqlite`) 7 | 8 | db.backup(filepath) 9 | .then(() => { 10 | console.log(filepath) 11 | }) 12 | .catch((err) => { 13 | console.log('backup failed:', err) 14 | }) 15 | -------------------------------------------------------------------------------- /src/backup/.keep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nenokkadine/GD-Utils/f0782a63b5212f7af4b57e92e0fcd752df42c90f/src/backup/.keep -------------------------------------------------------------------------------- /src/bookmark.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs') 2 | const {db} = require('./db') 3 | 4 | const action = process.argv[2] || 'export' 5 | const filepath = process.argv[3] || 'bookmarks.json' 6 | 7 | if (action === 'export') { 8 | const bookmarks = db.prepare('select * from bookmark').all() 9 | fs.writeFileSync(filepath, JSON.stringify(bookmarks)) 10 | console.log('bookmarks exported', filepath) 11 | } else if (action === 'import') { 12 | let bookmarks = fs.readFileSync(filepath, 'utf8') 13 | bookmarks = JSON.parse(bookmarks) 14 | bookmarks.forEach(v => { 15 | const {alias, target} = v 16 | const exist = db.prepare('select alias from bookmark where alias=?').get(alias) 17 | if (exist) { 18 | db.prepare('update bookmark set target=? where alias=?').run(target, alias) 19 | } else { 20 | db.prepare('INSERT INTO bookmark (alias, target) VALUES (?, ?)').run(alias, target) 21 | } 22 | }) 23 | console.log('bookmarks imported', bookmarks) 24 | } else { 25 | console.log('[help info]') 26 | console.log('export: node bookmark.js export bm.json') 27 | console.log('import: node bookmark.js import bm.json') 28 | } 29 | -------------------------------------------------------------------------------- /src/check.js: -------------------------------------------------------------------------------- 1 | const { ls_folder } = require('./src/gd') 2 | 3 | ls_folder({ fid: 'root' }).then(console.log).catch(console.error) 4 | -------------------------------------------------------------------------------- /src/clear-db.js: -------------------------------------------------------------------------------- 1 | const { db } = require('./db') 2 | 3 | const record = db.prepare('select count(*) as c from gd').get() 4 | db.prepare('delete from gd').run() 5 | console.log('Deleted', record.c, 'Data') 6 | 7 | db.exec('vacuum') 8 | db.close() 9 | -------------------------------------------------------------------------------- /src/copy: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const bytes = require('bytes') 4 | 5 | const { argv } = require('yargs') 6 | .usage('Usage: ./$0 [options]\ndestination folderid is Optional,If its not filled, it use DEFAULT_TARGET in config.js') 7 | .alias('u', 'update') 8 | .describe('u', 'Do not use local cache, force to obtain source folder information online') 9 | .alias('y', 'yes') 10 | .describe('yes', 'If a copy record is found, resume without asking') 11 | .alias('f', 'file') 12 | .describe('f', 'Copy a single file') 13 | .alias('n', 'name') 14 | .describe('n', 'Rename the target folder, leave the original folder name blank') 15 | .alias('N', 'not_teamdrive') 16 | .describe('N', 'If it is not a team drive link, you can add this parameter to improve interface query efficiency and reduce latency') 17 | .alias('s', 'size') 18 | .describe('s', 'If you add this, all files are copied by default. If this value is set, files smaller than this size will be filtered out - must end with b, such as 10mb') 19 | .alias('S', 'service_account') 20 | .describe('S', 'Specify the service account for operation, provided that the json authorization file must be placed in the /sa Folder, please ensure that the SA account has Proper permissions。') 21 | .alias('D', 'dncnr') 22 | .describe('D', 'do not create new root, Does not create a folder with the same name at the destination, will directly copy the files in the source folder to the destination folder as they are') 23 | .help('h') 24 | .alias('h', 'help') 25 | 26 | const { copy, copy_file, validate_fid } = require('./src/gd') 27 | const { DEFAULT_TARGET } = require('./config') 28 | 29 | let [source, target] = argv._ 30 | 31 | if (validate_fid(source)) { 32 | const { name, update, file, not_teamdrive, size, service_account, dncnr } = argv 33 | if (file) { 34 | target = target || DEFAULT_TARGET 35 | if (!validate_fid(target)) throw new Error('Drive ID is incorrect') 36 | return copy_file(source, target, service_account).then(r => { 37 | const link = 'https://drive.google.com/drive/folders/' + target 38 | console.log('Clone is completed,File Location:\n', link) 39 | }).catch(console.error) 40 | } 41 | let min_size 42 | if (size) { 43 | console.log(`Do not Copy Size File Below ${size} `) 44 | min_size = bytes.parse(size) 45 | } 46 | copy({ source, target, name, min_size, update, not_teamdrive, service_account, dncnr }).then(folder => { 47 | if (!folder) return 48 | const link = 'https://drive.google.com/drive/folders/' + folder.id 49 | console.log('\nClone Completed,Folder Link:\n', link) 50 | }) 51 | } else { 52 | console.warn('FolderID is wrong or invalid') 53 | } 54 | -------------------------------------------------------------------------------- /src/count: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const { argv } = require('yargs') 4 | .usage('Usage: ./$0 [options]') 5 | .example('./$0 1ULY8ISgWSOVc0UrzejykVgXfVL_I4r75', 'Get statistics of all files contained in https://drive.google.com/drive/folders/1ULY8ISgWSOVc0UrzejykVgXfVL_I4r75') 6 | .example('./$0 root -s size -t html -o out.html', 'Get the personal drive root directory statistics, the results are output in HTML form, sorted in reverse order according to the total size, and saved to the out.html file in this directory (create new if it does not exist, overwrite if it exists) ') 7 | .example('./$0 root -s name -t json -o out.json', 'Get the statistics information of the root directory of the personal drive. The results are output in JSON format, sorted by file extension, and saved to the out.json file in this directory') 8 | .example('./$0 root -t all -o all.json', 'Get the statistics of the root Folder of the personal drive, output all file information (including folders) in JSON format, and save it to the all.json file in this Folder') 9 | .alias('u', 'update') 10 | .describe('u', 'Force to get information online (regardless of whether there is a local cache)') 11 | .alias('N', 'not_teamdrive') 12 | .describe('N', 'If it is not a team drive link, you can add this parameter to improve interface query efficiency and reduce latency. If you want to count a personal drive and the service account in the ./sa directory does not have relevant permissions, please make sure to add this flag to use personal auth information for query') 13 | .alias('S', 'service_account') 14 | .describe('S', 'Specify the use of service account for statistics,The thing is that the SA json file must be placed in the sa Folder') 15 | .alias('s', 'sort') 16 | .describe('s', 'Sorting method of statistical results,Optional value name or size,If it is not filled in, it will be arranged in reverse order according to the number of files by default') 17 | .alias('t', 'type') 18 | .describe('t', 'The output type of the statistical result, the optional value is html/tree/snap/json/all, all means output the data as a json, it is best to use with -o. If not filled, the command line form will be output by default') 19 | .alias('o', 'output') 20 | .describe('o', 'Statistics output file, suitable to use with -t') 21 | .help('h') 22 | .alias('h', 'help') 23 | 24 | const { count, validate_fid } = require('./src/gd') 25 | const [fid] = argv._ 26 | if (validate_fid(fid)) { 27 | const { update, sort, type, output, not_teamdrive, service_account } = argv 28 | count({ fid, update, sort, type, output, not_teamdrive, service_account }).catch(console.error) 29 | } else { 30 | console.warn('FolderID is wrong or invalid') 31 | } 32 | -------------------------------------------------------------------------------- /src/create-table.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE "gd" ( 2 | "id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT UNIQUE, 3 | "fid" TEXT NOT NULL UNIQUE, 4 | "info" TEXT, 5 | "summary" TEXT, 6 | "subf" TEXT, 7 | "ctime" INTEGER, 8 | "mtime" INTEGER 9 | ); 10 | 11 | CREATE UNIQUE INDEX "gd_fid" ON "gd" ( 12 | "fid" 13 | ); 14 | 15 | CREATE TABLE "task" ( 16 | "id" INTEGER NOT NULL PRIMARY KEY AUTOINCREMENT UNIQUE, 17 | "source" TEXT NOT NULL, 18 | "target" TEXT NOT NULL, 19 | "status" TEXT, 20 | "copied" TEXT DEFAULT '', 21 | "mapping" TEXT DEFAULT '', 22 | "ctime" INTEGER, 23 | "ftime" INTEGER 24 | ); 25 | 26 | CREATE UNIQUE INDEX "task_source_target" ON "task" ( 27 | "source", 28 | "target" 29 | ); 30 | 31 | CREATE TABLE "copied" ( 32 | "taskid" INTEGER, 33 | "fileid" TEXT 34 | ); 35 | 36 | CREATE INDEX "copied_taskid" ON "copied" ("taskid"); 37 | 38 | CREATE TABLE "bookmark" ( 39 | "alias" TEXT, 40 | "target" TEXT 41 | ); 42 | 43 | CREATE UNIQUE INDEX "bookmark_alias" ON "bookmark" ( 44 | "alias" 45 | ); 46 | 47 | CREATE TABLE "hash" ( 48 | "md5" TEXT NOT NULL, 49 | "gid" TEXT NOT NULL UNIQUE, 50 | "status" TEXT NOT NULL DEFAULT 'normal' 51 | ); 52 | 53 | CREATE INDEX "hash_md5" ON "hash" ( 54 | "md5" 55 | ); 56 | 57 | CREATE UNIQUE INDEX "hash_gid" ON "hash" ( 58 | "gid" 59 | ); -------------------------------------------------------------------------------- /src/db.js: -------------------------------------------------------------------------------- 1 | const path = require('path') 2 | const db_location = path.join(__dirname, 'gdurl.sqlite') 3 | const db = require('better-sqlite3')(db_location) 4 | 5 | db.pragma('journal_mode = WAL') 6 | 7 | create_table_copied() 8 | function create_table_copied () { 9 | const [exists] = db.prepare('PRAGMA table_info(copied)').all() 10 | if (exists) return 11 | const create_table = `CREATE TABLE "copied" ( 12 | "taskid" INTEGER, 13 | "fileid" TEXT 14 | )` 15 | db.prepare(create_table).run() 16 | const create_index = `CREATE INDEX "copied_taskid" ON "copied" ("taskid");` 17 | db.prepare(create_index).run() 18 | } 19 | 20 | create_table_bookmark() 21 | function create_table_bookmark () { 22 | const [exists] = db.prepare('PRAGMA table_info(bookmark)').all() 23 | if (exists) return 24 | const create_table = `CREATE TABLE "bookmark" ( 25 | "alias" TEXT, 26 | "target" TEXT 27 | );` 28 | db.prepare(create_table).run() 29 | const create_index = `CREATE UNIQUE INDEX "bookmark_alias" ON "bookmark" ( 30 | "alias" 31 | );` 32 | db.prepare(create_index).run() 33 | } 34 | 35 | create_table_hash() 36 | function create_table_hash () { 37 | const [exists] = db.prepare('PRAGMA table_info(hash)').all() 38 | if (exists) return 39 | const create_table = `CREATE TABLE "hash" ( 40 | "md5" TEXT NOT NULL, 41 | "gid" TEXT NOT NULL UNIQUE, 42 | "status" TEXT NOT NULL DEFAULT 'normal' 43 | );` 44 | db.prepare(create_table).run() 45 | const create_index = 'CREATE INDEX "hash_md5" ON "hash" ("md5");' 46 | db.prepare(create_index).run() 47 | const create_index2 = 'CREATE INDEX "hash_gid" ON "hash" ("gid");' 48 | db.prepare(create_index2).run() 49 | } 50 | 51 | module.exports = { db } 52 | -------------------------------------------------------------------------------- /src/dedupe: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const { argv } = require('yargs') 4 | .usage('Usage: ./$0 [options]') 5 | .alias('y', 'yes') 6 | .describe('yes', 'If duplicate items are found, delete them without asking') 7 | .alias('u', 'update') 8 | .describe('u', 'Do not use local cache, force to obtain source folder information online') 9 | .alias('S', 'service_account') 10 | .describe('S', 'Use service account to operate, provided that the sa authorized json file must be placed in the ./sa directory') 11 | .help('h') 12 | .alias('h', 'help') 13 | 14 | const { dedupe, validate_fid } = require('./src/gd') 15 | 16 | const [fid] = argv._ 17 | if (validate_fid(fid)) { 18 | const { update, service_account, yes } = argv 19 | dedupe({ fid, update, service_account, yes }).then(info => { 20 | if (!info) return 21 | const { file_count, folder_count } = info 22 | console.log('Succesfully Deleted,Total Number Of Deleted Files:', file_count, 'Number of Folders:', folder_count) 23 | }) 24 | } else { 25 | console.warn('FolderID is wrong or invalid') 26 | } 27 | -------------------------------------------------------------------------------- /src/gdurl.sqlite: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nenokkadine/GD-Utils/f0782a63b5212f7af4b57e92e0fcd752df42c90f/src/gdurl.sqlite -------------------------------------------------------------------------------- /src/gdutils: -------------------------------------------------------------------------------- 1 | 2 | #!/bin/bash 3 | 4 | cecho() { 5 | local code="\033[" 6 | case "$1" in 7 | black | bk) color="${code}0;30m";; 8 | red | r) color="${code}1;31m";; 9 | green | g) color="${code}1;32m";; 10 | yellow | y) color="${code}1;33m";; 11 | blue | b) color="${code}1;34m";; 12 | purple | p) color="${code}1;35m";; 13 | cyan | c) color="${code}1;36m";; 14 | gray | gr) color="${code}0;37m";; 15 | *) local text="$1" 16 | esac 17 | [ -z "$text" ] && local text="$color$2${code}0m" 18 | echo -e "$text" 19 | } 20 | 21 | # ★★★Copy from source to destination★★★ 22 | copy() { 23 | cd /gdutils 24 | cecho r "Remember to add your SAs as Viewer in source TD and as a Contributor in Destination TD" 25 | echo "Provide Source Folder ID" 26 | read SRC 27 | echo "Provide Destination Folder ID" 28 | read DST 29 | cecho r "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~" 30 | echo && echo "Copy mode selected" 31 | 32 | cecho p "A.Create a New Folder with the same name in the destination" 33 | cecho p "B.Do not create a new Folder in the destination" && echo 34 | read -p " Choose A/B:" option 35 | 36 | case "$option" in 37 | A) 38 | cecho p "Use Cache?" 39 | read -p " Choose Y/N:" option1 40 | case "$option1" in 41 | Y) 42 | node copy $SRC $DST -S 43 | ;; 44 | N) 45 | node copy $SRC $DST -S -u 46 | ;; 47 | *) 48 | echo 49 | cecho r "Enter the Correct Option" 50 | ;; 51 | esac 52 | ;; 53 | B) 54 | cecho p "Use Cache?" 55 | read -p " Choose Y/N:" option1 56 | case "$option1" in 57 | Y) 58 | node copy $SRC $DST -S -D 59 | ;; 60 | N) 61 | node copy $SRC $DST -S -D -u 62 | ;; 63 | *) 64 | echo 65 | cecho r "Choose Correct Value" 66 | ;; 67 | esac 68 | ;; 69 | *) 70 | echo 71 | cecho r "Choose the Correct Option" 72 | ;; 73 | esac 74 | } 75 | # ★★★Calculate the size★★★ 76 | count() { 77 | cd /gdutils 78 | cecho r "Remember to add your SAs as Viewer (atleast) in source TD" 79 | echo "Provide Folder ID" 80 | read SRC 81 | cecho r "~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~" 82 | echo && echo "Size mode selected" 83 | 84 | cecho p "A.Normal Size Info" 85 | cecho p "B.Create a html file with tree like pattern" 86 | cecho p "C.Create a snap2html file" && echo 87 | read -p "Choose A/B/C:" option 88 | 89 | case "$option" in 90 | A) 91 | cecho p "Use Cache?" 92 | read -p " Choose Y/N:" option2 93 | case "$option2" in 94 | Y) 95 | node count $SRC -S 96 | ;; 97 | N) 98 | node count $SRC -S -u 99 | ;; 100 | *) 101 | echo 102 | cecho r "Choose Correct Value" 103 | ;; 104 | esac 105 | ;; 106 | B) 107 | cecho p "Use Cache?" 108 | read -p " Choose Y/N:" option2 109 | case "$option2" in 110 | Y) 111 | node count $SRC -S -t tree -o /out/tree.html 112 | if [[ $? -eq 0 ]]; then 113 | printf " Your Tree Has been Generated, Go to %s to preview it" "$(cecho g "https://${APP_NAME}.herokuapp.com/out/tree.html")" 114 | else 115 | cecho r "Tree Generation Failed" 116 | fi 117 | ;; 118 | N) 119 | node count $SRC -S -u -t tree -o /out/tree.html 120 | if [[ $? -eq 0 ]]; then 121 | printf " Your Tree Has been Generated, Go to %s to preview it" "$(cecho g "https://${APP_NAME}.herokuapp.com/out/tree.html")" 122 | else 123 | cecho r "Tree Generation Failed" 124 | fi 125 | ;; 126 | *) 127 | echo 128 | cecho r "Choose Correct Value" 129 | ;; 130 | esac 131 | ;; 132 | C) 133 | cecho p "Use Cache?" 134 | read -p " Choose Y/N:" option2 135 | case "$option2" in 136 | Y) 137 | node count $SRC -S -t snap -o /out/snap.html 138 | if [[ $? -eq 0 ]]; then 139 | printf " Your Snap2HTML Has been Generated, Go to %s to preview it" "$(cecho g "https://${APP_NAME}.herokuapp.com/out/snap.html")" 140 | else 141 | cecho r "Snap2HTML Generation Failed" 142 | fi 143 | ;; 144 | N) 145 | node count $SRC -S -u -t snap -o /out/snap.html 146 | if [[ $? -eq 0 ]]; then 147 | printf " Your Snap2HTML Has been Generated, Go to %s to preview it" "$(cecho g "https://${APP_NAME}.herokuapp.com/out/snap.html")" 148 | else 149 | cecho r "Snap2HTML Generation Failed" 150 | fi 151 | ;; 152 | *) 153 | echo 154 | cecho r "Choose Correct Value" 155 | ;; 156 | esac 157 | ;; 158 | *) 159 | echo 160 | cecho r "Choose the Correct Option" 161 | ;; 162 | esac 163 | } 164 | # ★★★Dedupe The Folder★★★ 165 | dedupe() { 166 | cd /gdutils 167 | cecho r "Remember to add your SAs as Content manager (atleast) in source TD\n" 168 | echo "Provide Folder ID\n" 169 | read SRC 170 | node dedupe $SRC -S 171 | } 172 | 173 | 174 | # ★★★Running Gdutils★★★ 175 | printf "%s by %s, English version by %s, %s by %s\n" "$(cecho b GD-UTILS)" "$(cecho r iwestlin)" "$(cecho c Roshanconnor)" "$(cecho p Polished)" "$(cecho r Nenokkadine)" 176 | 177 | cecho g "1.Copy Files to your Teamdrive" 178 | echo "➖➖➖➖➖➖➖➖➖➖➖➖➖➖➖➖➖➖➖➖➖➖➖➖" 179 | cecho g "2.Calculate Size" 180 | echo "➖➖➖➖➖➖➖➖➖➖➖➖➖➖➖➖➖➖➖➖➖➖➖➖" 181 | cecho g "3.Remove Duplicate Files" 182 | echo "➖➖➖➖➖➖➖➖➖➖➖➖➖➖➖➖➖➖➖➖➖➖➖➖" 183 | cecho y "4.EXIT" && echo 184 | read -p " Choose any Number [1-4]:" option 185 | 186 | case "$option" in 187 | 1) 188 | copy 189 | ;; 190 | 2) 191 | count 192 | ;; 193 | 3) 194 | dedupe 195 | ;; 196 | 4) 197 | exit 198 | ;; 199 | *) 200 | echo 201 | cecho r "Choose Correct Number from the Options" 202 | ;; 203 | esac 204 | -------------------------------------------------------------------------------- /src/md5: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const { argv } = require('yargs') 4 | .usage('Usage: ./$0 [options]') 5 | .example('./$0 1ULY8ISgWSOVc0UrzejykVgXfVL_I4r75', 'Obtain all file information in https://drive.google.com/drive/folders/1ULY8ISgWSOVc0UrzejykVgXfVL_I4r75, and save the md5 value and ID of each file in the local database') 6 | .example('./$0 1ULY8ISgWSOVc0UrzejykVgXfVL_I4r75 -s 10mb', 'Obtain all file information in the specified directory, and save the md5 value and ID of files not less than 10MB in the local database') 7 | .alias('s', 'size') 8 | .describe('s', 'Do not fill in the md5 records of all files stored by default. If this value is set, files smaller than this size will be filtered out, and must end with b, such as 10mb') 9 | .alias('u', 'update') 10 | .describe('u', 'Force to get information online (ignoring whether there is a local cache)') 11 | .alias('N', 'not_teamdrive') 12 | .describe('N', 'If it is not a team disk link, you can add this parameter to improve interface query efficiency and reduce latency') 13 | .alias('S', 'service_account') 14 | .describe('S', 'Use service account to obtain file information, provided that the SA json file must be placed in the ./sa directory') 15 | .help('h') 16 | .alias('h', 'help') 17 | 18 | const { save_md5, validate_fid } = require('./src/gd') 19 | const [fid] = argv._ 20 | if (validate_fid(fid)) { 21 | const { update, size, not_teamdrive, service_account } = argv 22 | save_md5({fid, size, not_teamdrive, update, service_account}).catch(console.error) 23 | } else { 24 | console.warn('Directory ID is missing or incorrectly formatted') 25 | } 26 | -------------------------------------------------------------------------------- /src/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "GD-Utils", 3 | "version": "1.0.2", 4 | "description": "Google Drive Utils", 5 | "repository": "iwestlin/gd-utils", 6 | "keywords": [], 7 | "author": "viegg", 8 | "license": "ISC", 9 | "dependencies": { 10 | "@koa/router": "^10.0.0", 11 | "@viegg/axios": "^1.0.0", 12 | "better-sqlite3": "^7.4.1", 13 | "bytes": "^3.1.0", 14 | "cli-table3": "^0.6.0", 15 | "colors": "^1.4.0", 16 | "dayjs": "^1.10.5", 17 | "gtoken": "^5.3.0", 18 | "html-escaper": "^3.0.3", 19 | "https-proxy-agent": "^5.0.0", 20 | "koa": "^2.13.1", 21 | "koa-bodyparser": "^4.3.0", 22 | "p-limit": "^3.1.0", 23 | "prompts": "^2.4.1", 24 | "proxy-agent": "^4.0.1", 25 | "signal-exit": "^3.0.3", 26 | "yargs": "^17.0.1" 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /src/sa/.keep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nenokkadine/GD-Utils/f0782a63b5212f7af4b57e92e0fcd752df42c90f/src/sa/.keep -------------------------------------------------------------------------------- /src/sa/invalid/.keep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/nenokkadine/GD-Utils/f0782a63b5212f7af4b57e92e0fcd752df42c90f/src/sa/invalid/.keep -------------------------------------------------------------------------------- /src/server.js: -------------------------------------------------------------------------------- 1 | const dayjs = require('dayjs') 2 | const Koa = require('koa') 3 | const bodyParser = require('koa-bodyparser') 4 | const router = require('./src/router') 5 | var fs = require('fs'); 6 | 7 | const app = new Koa() 8 | app.proxy = true 9 | 10 | app.use(catcher) 11 | app.use(bodyParser()) 12 | app.use(router.routes()) 13 | app.use(router.allowedMethods()) 14 | 15 | const G_SOCK = "/usr/gdutils.sock" 16 | 17 | if (fs.existsSync(G_SOCK)) { 18 | fs.unlinkSync(G_SOCK) 19 | } 20 | 21 | app.listen(G_SOCK, '0.0.0.0') 22 | 23 | async function catcher (ctx, next) { 24 | try { 25 | return await next() 26 | } catch (e) { 27 | console.error(e) 28 | ctx.status = 500 29 | ctx.body = e.message 30 | } 31 | } -------------------------------------------------------------------------------- /src/src/gd.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs') 2 | const path = require('path') 3 | const dayjs = require('dayjs') 4 | const prompts = require('prompts') 5 | const pLimit = require('p-limit') 6 | const axios = require('@viegg/axios') 7 | const { GoogleToken } = require('gtoken') 8 | const handle_exit = require('signal-exit') 9 | const bytes = require('bytes') 10 | const { argv } = require('yargs') 11 | 12 | let { PARALLEL_LIMIT, EXCEED_LIMIT } = require('../config') 13 | PARALLEL_LIMIT = argv.l || argv.limit || PARALLEL_LIMIT 14 | EXCEED_LIMIT = EXCEED_LIMIT || 7 15 | 16 | const { AUTH, RETRY_LIMIT, TIMEOUT_BASE, TIMEOUT_MAX, LOG_DELAY, PAGE_SIZE, DEFAULT_TARGET } = require('../config') 17 | const { db } = require('../db') 18 | const { make_table, make_tg_table, make_html, summary } = require('./summary') 19 | const { gen_tree_html } = require('./tree') 20 | const { snap2html } = require('./snap2html') 21 | 22 | const FILE_EXCEED_MSG = 'The number of files on your team drive has exceeded the limit (400,000), Please move the folder that has not been copied to another team drive, and then run the copy command to resume the transfer' 23 | const FOLDER_TYPE = 'application/vnd.google-apps.folder' 24 | const sleep = ms => new Promise((resolve, reject) => setTimeout(resolve, ms)) 25 | 26 | const { https_proxy, http_proxy, all_proxy } = process.env 27 | const proxy_url = https_proxy || http_proxy || all_proxy 28 | 29 | let axins 30 | if (proxy_url) { 31 | console.log('Use Proxy:', proxy_url) 32 | let ProxyAgent 33 | try { 34 | ProxyAgent = require('proxy-agent') 35 | } catch (e) { // run npm i proxy-agent 36 | ProxyAgent = require('https-proxy-agent') 37 | } 38 | axins = axios.create({ httpsAgent: new ProxyAgent(proxy_url) }) 39 | } else { 40 | axins = axios.create({}) 41 | } 42 | 43 | const SA_LOCATION = argv.sa || 'sa' 44 | const SA_BATCH_SIZE = 1000 45 | const SA_FILES = fs.readdirSync(path.join(__dirname, '..', SA_LOCATION)).filter(v => v.endsWith('.json')) 46 | SA_FILES.flag = 0 47 | let SA_TOKENS = get_sa_batch() 48 | 49 | if (is_pm2()) { 50 | setInterval(() => { 51 | SA_FILES.flag = 0 52 | SA_TOKENS = get_sa_batch() 53 | }, 1000 * 3600 * 2) 54 | } 55 | 56 | // https://github.com/Leelow/is-pm2/blob/master/index.js 57 | function is_pm2 () { 58 | return 'PM2_HOME' in process.env || 'PM2_JSON_PROCESSING' in process.env || 'PM2_CLI' in process.env 59 | } 60 | 61 | function get_sa_batch () { 62 | const new_flag = SA_FILES.flag + SA_BATCH_SIZE 63 | const files = SA_FILES.slice(SA_FILES.flag, new_flag) 64 | SA_FILES.flag = new_flag 65 | return files.map(filename => { 66 | const gtoken = new GoogleToken({ 67 | keyFile: path.join(__dirname, '..', SA_LOCATION, filename), 68 | scope: ['https://www.googleapis.com/auth/drive'] 69 | }) 70 | return { gtoken, expires: 0 } 71 | }) 72 | } 73 | 74 | handle_exit((code, signal) => { 75 | if (code === 0 && !is_pm2()) return // normal exit in command line, do nothing 76 | const records = db.prepare('select id from task where status=?').all('copying') 77 | records.forEach(v => { 78 | db.prepare('update task set status=? where id=?').run('interrupt', v.id) 79 | }) 80 | records.length && console.log(records.length, 'task interrupted') 81 | db.close() 82 | }) 83 | 84 | async function save_md5 ({fid, size, not_teamdrive, update, service_account}) { 85 | let files = await walk_and_save({ fid, not_teamdrive, update, service_account }) 86 | files = files.filter(v => v.mimeType !== FOLDER_TYPE) 87 | if (typeof size !== 'number') size = bytes.parse(size) 88 | if (size) files = files.filter(v => v.size >= size) 89 | let cnt = 0 90 | files.forEach(file => { 91 | const {md5Checksum, id} = file 92 | if (!md5Checksum) return 93 | const record = db.prepare('SELECT * FROM hash WHERE gid = ?').get(id) 94 | if (record) return 95 | db.prepare('INSERT INTO hash (gid, md5) VALUES (?, ?)') 96 | .run(id, md5Checksum) 97 | cnt++ 98 | }) 99 | console.log('Added', cnt, 'Md5 records') 100 | } 101 | 102 | function get_gid_by_md5 (md5) { 103 | const records = db.prepare('select * from hash where md5=? and status=?').all(md5, 'normal') 104 | if (!records.length) return null 105 | // console.log('got existed md5 record in db:', md5) 106 | return get_random_element(records).gid 107 | } 108 | 109 | async function gen_count_body ({ fid, type, update, service_account, limit, tg }) { 110 | async function update_info () { 111 | const info = await walk_and_save({ fid, update, service_account, tg }) 112 | return [info, summary(info)] 113 | } 114 | 115 | function render_smy (smy, type, unfinished_number) { 116 | if (!smy) return 117 | if (['html', 'curl', 'tg'].includes(type)) { 118 | smy = (typeof smy === 'object') ? smy : JSON.parse(smy) 119 | const type_func = { 120 | html: make_html, 121 | curl: make_table, 122 | tg: make_tg_table 123 | } 124 | let result = type_func[type](smy, limit) 125 | if (unfinished_number) result += `\nNumber of Folders not read:${unfinished_number}` 126 | return result 127 | } else { // Default output json 128 | return (typeof smy === 'string') ? smy : JSON.stringify(smy) 129 | } 130 | } 131 | const file = await get_info_by_id(fid, service_account) 132 | if (file && file.mimeType !== FOLDER_TYPE) return render_smy(summary([file]), type) 133 | 134 | let info, smy 135 | const record = db.prepare('SELECT * FROM gd WHERE fid = ?').get(fid) 136 | if (!file && !record) { 137 | throw new Error(`Unable to access the link, please check if the link is valid and SA has the appropriate permissions:https://drive.google.com/drive/folders/${fid}`) 138 | } 139 | if (!record || update) { 140 | [info, smy] = await update_info() 141 | } 142 | if (type === 'all') { 143 | info = info || get_all_by_fid(fid) 144 | if (!info) { // Explain that the last statistical process was interrupted 145 | [info] = await update_info() 146 | } 147 | return info && JSON.stringify(info) 148 | } 149 | if (smy) return render_smy(smy, type) 150 | if (record && record.summary) return render_smy(record.summary, type) 151 | info = info || get_all_by_fid(fid) 152 | if (info) { 153 | smy = summary(info) 154 | } else { 155 | [info, smy] = await update_info() 156 | } 157 | return render_smy(smy, type, info.unfinished_number) 158 | } 159 | 160 | async function count ({ fid, update, sort, type, output, not_teamdrive, service_account }) { 161 | sort = (sort || '').toLowerCase() 162 | type = (type || '').toLowerCase() 163 | output = (output || '').toLowerCase() 164 | let out_str 165 | if (!update) { 166 | if (!type && !sort && !output) { 167 | const record = db.prepare('SELECT * FROM gd WHERE fid = ?').get(fid) 168 | const smy = record && record.summary && JSON.parse(record.summary) 169 | if (smy) return console.log(make_table(smy)) 170 | } 171 | const info = get_all_by_fid(fid) 172 | if (info) { 173 | console.log('cached data found in local database, cache time:', dayjs(info.mtime).format('YYYY-MM-DD HH:mm:ss')) 174 | if (type === 'snap') { 175 | const name = await get_name_by_id(fid, service_account) 176 | out_str = snap2html({ root: { name, id: fid }, data: info }) 177 | } else { 178 | out_str = get_out_str({ info, type, sort }) 179 | } 180 | if (output) return fs.writeFileSync(output, out_str) 181 | return console.log(out_str) 182 | } 183 | } 184 | const with_modifiedTime = type === 'snap' 185 | const result = await walk_and_save({ fid, not_teamdrive, update, service_account, with_modifiedTime }) 186 | if (type === 'snap') { 187 | const name = await get_name_by_id(fid, service_account) 188 | out_str = snap2html({ root: { name, id: fid }, data: result }) 189 | } else { 190 | out_str = get_out_str({ info: result, type, sort }) 191 | } 192 | if (output) { 193 | fs.writeFileSync(output, out_str) 194 | } else { 195 | console.log(out_str) 196 | } 197 | } 198 | 199 | function get_out_str ({ info, type, sort }) { 200 | const smy = summary(info, sort) 201 | let out_str 202 | if (type === 'tree') { 203 | out_str = gen_tree_html(info) 204 | } else if (type === 'html') { 205 | out_str = make_html(smy) 206 | } else if (type === 'json') { 207 | out_str = JSON.stringify(smy) 208 | } else if (type === 'all') { 209 | out_str = JSON.stringify(info) 210 | } else { 211 | out_str = make_table(smy) 212 | } 213 | return out_str 214 | } 215 | 216 | function get_all_by_fid (fid) { 217 | const record = db.prepare('SELECT * FROM gd WHERE fid = ?').get(fid) 218 | if (!record) return null 219 | const { info, subf } = record 220 | let result = JSON.parse(info) 221 | result = result.map(v => { 222 | v.parent = fid 223 | return v 224 | }) 225 | if (!subf) return result 226 | return recur(result, JSON.parse(subf)) 227 | 228 | function recur (result, subf) { 229 | if (!subf.length) return result 230 | const arr = subf.map(v => { 231 | const row = db.prepare('SELECT * FROM gd WHERE fid = ?').get(v) 232 | if (!row) return null // If the corresponding fid record is not found, it means that the process was interrupted last time or the folder was not read completely 233 | let info = JSON.parse(row.info) 234 | info = info.map(vv => { 235 | vv.parent = v 236 | return vv 237 | }) 238 | return { info, subf: JSON.parse(row.subf) } 239 | }) 240 | if (arr.some(v => v === null)) return null 241 | const sub_subf = [].concat(...arr.map(v => v.subf).filter(v => v)) 242 | result = result.concat(...arr.map(v => v.info)) 243 | return recur(result, sub_subf) 244 | } 245 | } 246 | 247 | async function walk_and_save ({ fid, not_teamdrive, update, service_account, with_modifiedTime, tg }) { 248 | let result = [] 249 | const unfinished_folders = [] 250 | const limit = pLimit(PARALLEL_LIMIT) 251 | 252 | if (update) { 253 | const exists = db.prepare('SELECT fid FROM gd WHERE fid = ?').get(fid) 254 | exists && db.prepare('UPDATE gd SET summary=? WHERE fid=?').run(null, fid) 255 | } 256 | 257 | const loop = setInterval(() => { 258 | const now = dayjs().format('HH:mm:ss') 259 | const message = `${now} | Copied ${result.length} | Ongoing ${limit.activeCount} | Pending ${limit.pendingCount}` 260 | print_progress(message) 261 | }, 1000) 262 | 263 | const tg_loop = tg && setInterval(() => { 264 | tg({ 265 | obj_count: result.length, 266 | processing_count: limit.activeCount, 267 | pending_count: limit.pendingCount 268 | }) 269 | }, 10 * 1000) 270 | 271 | async function recur (parent) { 272 | let files, should_save 273 | if (update) { 274 | files = await limit(() => ls_folder({ fid: parent, not_teamdrive, service_account, with_modifiedTime })) 275 | should_save = true 276 | } else { 277 | const record = db.prepare('SELECT * FROM gd WHERE fid = ?').get(parent) 278 | if (record) { 279 | files = JSON.parse(record.info) 280 | } else { 281 | files = await limit(() => ls_folder({ fid: parent, not_teamdrive, service_account, with_modifiedTime })) 282 | should_save = true 283 | } 284 | } 285 | if (!files) return 286 | if (files.unfinished) unfinished_folders.push(parent) 287 | should_save && save_files_to_db(parent, files) 288 | const folders = files.filter(v => v.mimeType === FOLDER_TYPE) 289 | files.forEach(v => v.parent = parent) 290 | result = result.concat(files) 291 | return Promise.all(folders.map(v => recur(v.id))) 292 | } 293 | try { 294 | await recur(fid) 295 | } catch (e) { 296 | console.error(e) 297 | } 298 | console.log('\nInfo obtained') 299 | unfinished_folders.length ? console.log('Unread FolderID:', JSON.stringify(unfinished_folders)) : console.log('All Folders have been read') 300 | clearInterval(loop) 301 | if (tg_loop) { 302 | clearInterval(tg_loop) 303 | tg({ 304 | obj_count: result.length, 305 | processing_count: limit.activeCount, 306 | pending_count: limit.pendingCount 307 | }) 308 | } 309 | const smy = unfinished_folders.length ? null : summary(result) 310 | smy && db.prepare('UPDATE gd SET summary=?, mtime=? WHERE fid=?').run(JSON.stringify(smy), Date.now(), fid) 311 | result.unfinished_number = unfinished_folders.length 312 | return result 313 | } 314 | 315 | function save_files_to_db (fid, files) { 316 | // Do not save the folder where the request is not completed, then the next call to get_all_by_id will return null, so call walk_and_save again to try to complete the request for this folder 317 | if (files.unfinished) return 318 | let subf = files.filter(v => v.mimeType === FOLDER_TYPE).map(v => v.id) 319 | subf = subf.length ? JSON.stringify(subf) : null 320 | const exists = db.prepare('SELECT fid FROM gd WHERE fid = ?').get(fid) 321 | if (exists) { 322 | db.prepare('UPDATE gd SET info=?, subf=?, mtime=? WHERE fid=?') 323 | .run(JSON.stringify(files), subf, Date.now(), fid) 324 | } else { 325 | db.prepare('INSERT INTO gd (fid, info, subf, ctime) VALUES (?, ?, ?, ?)') 326 | .run(fid, JSON.stringify(files), subf, Date.now()) 327 | } 328 | } 329 | 330 | async function ls_folder ({ fid, not_teamdrive, service_account, with_modifiedTime }) { 331 | let files = [] 332 | let pageToken 333 | const search_all = { includeItemsFromAllDrives: true, supportsAllDrives: true } 334 | const params = ((fid === 'root') || not_teamdrive) ? {} : search_all 335 | params.q = `'${fid}' in parents and trashed = false` 336 | params.orderBy = 'folder,name desc' 337 | params.fields = 'nextPageToken, files(id, name, mimeType, size, md5Checksum)' 338 | if (with_modifiedTime) { 339 | params.fields = 'nextPageToken, files(id, name, mimeType, modifiedTime, size, md5Checksum)' 340 | } 341 | params.pageSize = Math.min(PAGE_SIZE, 1000) 342 | // const use_sa = (fid !== 'root') && (service_account || !not_teamdrive) // Without parameters, use sa by default 343 | const use_sa = (fid !== 'root') && service_account 344 | // const headers = await gen_headers(use_sa) 345 | // For Folders with a large number of subfolders(1ctMwpIaBg8S1lrZDxdynLXJpMsm5guAl),The access_token may have expired before listing 346 | // Because nextPageToken is needed to get the data of the next page,So you cannot use parallel requests,The test found that each request to obtain 1000 files usually takes more than 20 seconds to complete 347 | const gtoken = use_sa && (await get_sa_token()).gtoken 348 | do { 349 | if (pageToken) params.pageToken = pageToken 350 | let url = 'https://www.googleapis.com/drive/v3/files' 351 | url += '?' + params_to_query(params) 352 | let retry = 0 353 | let data 354 | const payload = { timeout: TIMEOUT_BASE } 355 | while (!data && (retry < RETRY_LIMIT)) { 356 | const access_token = gtoken ? (await gtoken.getToken()).access_token : (await get_access_token()) 357 | const headers = { authorization: 'Bearer ' + access_token } 358 | payload.headers = headers 359 | try { 360 | data = (await axins.get(url, payload)).data 361 | } catch (err) { 362 | handle_error(err) 363 | retry++ 364 | payload.timeout = Math.min(payload.timeout * 2, TIMEOUT_MAX) 365 | } 366 | } 367 | if (!data) { 368 | console.error('Folder is not read completely, Parameters:', params) 369 | files.unfinished = true 370 | return files 371 | } 372 | files = files.concat(data.files) 373 | argv.sfl && console.log('files.length:', files.length) 374 | pageToken = data.nextPageToken 375 | } while (pageToken) 376 | 377 | return files 378 | } 379 | 380 | async function gen_headers (use_sa) { 381 | // use_sa = use_sa && SA_TOKENS.length 382 | const access_token = use_sa ? (await get_sa_token()).access_token : (await get_access_token()) 383 | return { authorization: 'Bearer ' + access_token } 384 | } 385 | 386 | function params_to_query (data) { 387 | const ret = [] 388 | for (let d in data) { 389 | ret.push(encodeURIComponent(d) + '=' + encodeURIComponent(data[d])) 390 | } 391 | return ret.join('&') 392 | } 393 | 394 | async function get_access_token () { 395 | const { expires, access_token, client_id, client_secret, refresh_token } = AUTH 396 | if (expires > Date.now()) return access_token 397 | 398 | const url = 'https://www.googleapis.com/oauth2/v4/token' 399 | const headers = { 'Content-Type': 'application/x-www-form-urlencoded' } 400 | const config = { headers } 401 | const params = { client_id, client_secret, refresh_token, grant_type: 'refresh_token' } 402 | const { data } = await axins.post(url, params_to_query(params), config) 403 | // console.log('Got new token:', data) 404 | AUTH.access_token = data.access_token 405 | AUTH.expires = Date.now() + 1000 * data.expires_in 406 | return data.access_token 407 | } 408 | 409 | // get_sa_token().then(console.log).catch(console.error) 410 | async function get_sa_token () { 411 | if (!SA_TOKENS.length) SA_TOKENS = get_sa_batch() 412 | while (SA_TOKENS.length) { 413 | const tk = get_random_element(SA_TOKENS) 414 | try { 415 | return await real_get_sa_token(tk) 416 | } catch (e) { 417 | console.warn('SA failed to get access_token:', e.message) 418 | SA_TOKENS = SA_TOKENS.filter(v => v.gtoken !== tk.gtoken) 419 | if (!SA_TOKENS.length) SA_TOKENS = get_sa_batch() 420 | } 421 | } 422 | throw new Error('No SA available') 423 | } 424 | 425 | async function real_get_sa_token (el) { 426 | const { value, expires, gtoken } = el 427 | // The reason for passing out gtoken is that when an account is exhausted, it can be filtered accordingly 428 | if (Date.now() < expires) return { access_token: value, gtoken } 429 | const { access_token, expires_in } = await gtoken.getToken({ forceRefresh: true }) 430 | el.value = access_token 431 | el.expires = Date.now() + 1000 * (expires_in - 60 * 5) // 5 mins passed is taken as Expired 432 | return { access_token, gtoken } 433 | } 434 | 435 | function get_random_element (arr) { 436 | return arr[~~(arr.length * Math.random())] 437 | } 438 | 439 | function validate_fid (fid) { 440 | if (!fid) return false 441 | fid = String(fid) 442 | const whitelist = ['root', 'appDataFolder', 'photos'] 443 | if (whitelist.includes(fid)) return true 444 | if (fid.length < 10 || fid.length > 100) return false 445 | const reg = /^[a-zA-Z0-9_-]+$/ 446 | return fid.match(reg) 447 | } 448 | 449 | async function create_folder (name, parent, use_sa, limit) { 450 | let url = `https://www.googleapis.com/drive/v3/files` 451 | const params = { supportsAllDrives: true } 452 | url += '?' + params_to_query(params) 453 | const post_data = { 454 | name, 455 | mimeType: FOLDER_TYPE, 456 | parents: [parent] 457 | } 458 | let retry = 0 459 | let err_message 460 | while (retry < RETRY_LIMIT) { 461 | try { 462 | const headers = await gen_headers(use_sa) 463 | return (await axins.post(url, post_data, { headers })).data 464 | } catch (err) { 465 | err_message = err.message 466 | retry++ 467 | handle_error(err) 468 | const data = err && err.response && err.response.data 469 | const message = data && data.error && data.error.message 470 | if (message && message.toLowerCase().includes('file limit')) { 471 | if (limit) limit.clearQueue() 472 | throw new Error(FILE_EXCEED_MSG) 473 | } 474 | console.log('Creating Folder and Retrying:', name, 'No of retries:', retry) 475 | } 476 | } 477 | throw new Error(err_message + ' Folder Name:' + name) 478 | } 479 | 480 | async function get_name_by_id (fid, use_sa) { 481 | const info = await get_info_by_id(fid, use_sa) 482 | return info ? info.name : fid 483 | } 484 | 485 | async function get_info_by_id (fid, use_sa) { 486 | let url = `https://www.googleapis.com/drive/v3/files/${fid}` 487 | let params = { 488 | includeItemsFromAllDrives: true, 489 | supportsAllDrives: true, 490 | corpora: 'allDrives', 491 | fields: 'id, name, size, parents, mimeType, modifiedTime' 492 | } 493 | url += '?' + params_to_query(params) 494 | let retry = 0 495 | while (retry < RETRY_LIMIT) { 496 | try { 497 | const headers = await gen_headers(use_sa) 498 | const { data } = await axins.get(url, { headers }) 499 | return data 500 | } catch (e) { 501 | retry++ 502 | handle_error(e) 503 | } 504 | } 505 | // throw new Error('Unable to access this FolderID:' + fid) 506 | } 507 | 508 | async function user_choose () { 509 | const answer = await prompts({ 510 | type: 'select', 511 | name: 'value', 512 | message: 'Do you wish to resume?', 513 | choices: [ 514 | { title: 'Continue', description: 'Resume the transfer', value: 'continue' }, 515 | { title: 'Restart', description: 'Restart the process', value: 'restart' }, 516 | { title: 'Exit', description: 'Exit', value: 'exit' } 517 | ], 518 | initial: 0 519 | }) 520 | return answer.value 521 | } 522 | 523 | async function copy ({ source, target, name, min_size, update, not_teamdrive, service_account, dncnr, is_server }) { 524 | target = target || DEFAULT_TARGET 525 | if (!target) throw new Error('Destination ID cannot be empty') 526 | 527 | const file = await get_info_by_id(source, service_account) 528 | if (!file) return console.error(`Unable to access the link, please check if the link is valid and SA has the appropriate permissions:https://drive.google.com/drive/folders/${source}`) 529 | if (file && file.mimeType !== FOLDER_TYPE) { 530 | if (argv.hash_server === 'local') source = get_gid_by_md5(file.md5Checksum) 531 | return copy_file(source, target, service_account).catch(console.error) 532 | } 533 | 534 | const record = db.prepare('select id, status from task where source=? and target=?').get(source, target) 535 | if (record && record.status === 'copying') return console.log('This Task is already running. Force Quit') 536 | 537 | try { 538 | return await real_copy({ source, target, name, min_size, update, dncnr, not_teamdrive, service_account, is_server }) 539 | } catch (err) { 540 | console.error('Error copying folder', err) 541 | const record = db.prepare('select id, status from task where source=? and target=?').get(source, target) 542 | if (record) db.prepare('update task set status=? where id=?').run('error', record.id) 543 | } 544 | } 545 | 546 | // To be resolved: If the user manually interrupts the process with ctrl+c, the request that has been issued will not be recorded in the local database even if it is completed, so duplicate files (folders) may be generated 547 | async function real_copy ({ source, target, name, min_size, update, dncnr, not_teamdrive, service_account, is_server }) { 548 | async function get_new_root () { 549 | if (dncnr) return { id: target } 550 | if (name) { 551 | return create_folder(name, target, service_account) 552 | } else { 553 | const file = await get_info_by_id(source, service_account) 554 | if (!file) throw new Error(`Unable to access the link, please check if the link is valid and SA has the appropriate permissions:https://drive.google.com/drive/folders/${source}`) 555 | return create_folder(file.name, target, service_account) 556 | } 557 | } 558 | 559 | const record = db.prepare('select * from task where source=? and target=?').get(source, target) 560 | if (record) { 561 | const copied = db.prepare('select fileid from copied where taskid=?').all(record.id).map(v => v.fileid) 562 | const choice = (is_server || argv.yes) ? 'continue' : await user_choose() 563 | if (choice === 'exit') { 564 | return console.log('exit the program') 565 | } else if (choice === 'continue') { 566 | let { mapping } = record 567 | const old_mapping = {} 568 | const copied_ids = {} 569 | copied.forEach(id => copied_ids[id] = true) 570 | mapping = mapping.trim().split('\n').map(line => line.split(' ')) 571 | const root = mapping[0][1] 572 | mapping.forEach(arr => old_mapping[arr[0]] = arr[1]) 573 | db.prepare('update task set status=? where id=?').run('copying', record.id) 574 | const arr = await walk_and_save({ fid: source, update, not_teamdrive, service_account }) 575 | let files = arr.filter(v => v.mimeType !== FOLDER_TYPE).filter(v => !copied_ids[v.id]) 576 | if (min_size) files = files.filter(v => v.size >= min_size) 577 | const folders = arr.filter(v => v.mimeType === FOLDER_TYPE) 578 | const all_mapping = await create_folders({ 579 | old_mapping, 580 | source, 581 | folders, 582 | service_account, 583 | root, 584 | task_id: record.id 585 | }) 586 | await copy_files({ files, service_account, root, mapping: all_mapping, task_id: record.id }) 587 | db.prepare('update task set status=?, ftime=? where id=?').run('finished', Date.now(), record.id) 588 | return { id: root, task_id: record.id } 589 | } else if (choice === 'restart') { 590 | const new_root = await get_new_root() 591 | const root_mapping = source + ' ' + new_root.id + '\n' 592 | db.prepare('update task set status=?, mapping=? where id=?').run('copying', root_mapping, record.id) 593 | db.prepare('delete from copied where taskid=?').run(record.id) 594 | // const arr = await walk_and_save({ fid: source, update: true, not_teamdrive, service_account }) 595 | const arr = await walk_and_save({ fid: source, update, not_teamdrive, service_account }) 596 | 597 | let files = arr.filter(v => v.mimeType !== FOLDER_TYPE) 598 | if (min_size) files = files.filter(v => v.size >= min_size) 599 | const folders = arr.filter(v => v.mimeType === FOLDER_TYPE) 600 | console.log('Number of folders to be copied:', folders.length) 601 | console.log('Number of files to be copied:', files.length) 602 | const mapping = await create_folders({ 603 | source, 604 | folders, 605 | service_account, 606 | root: new_root.id, 607 | task_id: record.id 608 | }) 609 | await copy_files({ files, mapping, service_account, root: new_root.id, task_id: record.id }) 610 | db.prepare('update task set status=?, ftime=? where id=?').run('finished', Date.now(), record.id) 611 | return { id: new_root.id, task_id: record.id } 612 | } else { 613 | // ctrl+c Exit 614 | return console.log('Exit') 615 | } 616 | } else { 617 | const new_root = await get_new_root() 618 | const root_mapping = source + ' ' + new_root.id + '\n' 619 | const { lastInsertRowid } = db.prepare('insert into task (source, target, status, mapping, ctime) values (?, ?, ?, ?, ?)').run(source, target, 'copying', root_mapping, Date.now()) 620 | const arr = await walk_and_save({ fid: source, update, not_teamdrive, service_account }) 621 | let files = arr.filter(v => v.mimeType !== FOLDER_TYPE) 622 | if (min_size) files = files.filter(v => v.size >= min_size) 623 | const folders = arr.filter(v => v.mimeType === FOLDER_TYPE) 624 | console.log('Number of folders to be copied:', folders.length) 625 | console.log('Number of files to be copied:', files.length) 626 | const mapping = await create_folders({ 627 | source, 628 | folders, 629 | service_account, 630 | root: new_root.id, 631 | task_id: lastInsertRowid 632 | }) 633 | await copy_files({ files, mapping, service_account, root: new_root.id, task_id: lastInsertRowid }) 634 | db.prepare('update task set status=?, ftime=? where id=?').run('finished', Date.now(), lastInsertRowid) 635 | return { id: new_root.id, task_id: lastInsertRowid } 636 | } 637 | } 638 | 639 | async function copy_files ({ files, mapping, service_account, root, task_id }) { 640 | if (!files.length) return 641 | console.log('\nStarted copying files, total:', files.length) 642 | 643 | const loop = setInterval(() => { 644 | const now = dayjs().format('HH:mm:ss') 645 | const message = `${now} | Number of files copied ${count} | ongoing ${concurrency} | Number of Files Pending ${files.length}` 646 | print_progress(message) 647 | }, 1000) 648 | 649 | let count = 0 650 | let concurrency = 0 651 | let err 652 | do { 653 | if (err) { 654 | clearInterval(loop) 655 | files = null 656 | throw err 657 | } 658 | if (concurrency >= PARALLEL_LIMIT) { 659 | await sleep(100) 660 | continue 661 | } 662 | const file = files.shift() 663 | if (!file) { 664 | await sleep(1000) 665 | continue 666 | } 667 | concurrency++ 668 | let { id, parent, md5Checksum } = file 669 | if (argv.hash_server === 'local') id = get_gid_by_md5(md5Checksum) || id 670 | const target = mapping[parent] || root 671 | const use_sa = (id !== file.id) ? true : service_account //If the same md5 record is found in the local database, use sa copy 672 | copy_file(id, target, use_sa, null, task_id).then(new_file => { 673 | if (new_file) { 674 | count++ 675 | db.prepare('INSERT INTO copied (taskid, fileid) VALUES (?, ?)').run(task_id, file.id) 676 | } 677 | }).catch(e => { 678 | err = e 679 | }).finally(() => { 680 | concurrency-- 681 | }) 682 | } while (concurrency || files.length) 683 | clearInterval(loop) 684 | if (err) throw err 685 | // const limit = pLimit(PARALLEL_LIMIT) 686 | // let count = 0 687 | // const loop = setInterval(() => { 688 | // const now = dayjs().format('HH:mm:ss') 689 | // const {activeCount, pendingCount} = limit 690 | // const message = `${now} | Number of files copied ${count} | Ongoing ${activeCount} | Pending ${pendingCount}` 691 | // print_progress(message) 692 | // }, 1000) 693 | // May cause excessive memory usage and be forced to exit by node 694 | // return Promise.all(files.map(async file => { 695 | // const { id, parent } = file 696 | // const target = mapping[parent] || root 697 | // const new_file = await limit(() => copy_file(id, target, service_account, limit, task_id)) 698 | // if (new_file) { 699 | // count++ 700 | // db.prepare('INSERT INTO copied (taskid, fileid) VALUES (?, ?)').run(task_id, id) 701 | // } 702 | // })).finally(() => clearInterval(loop)) 703 | } 704 | 705 | async function copy_file (id, parent, use_sa, limit, task_id) { 706 | let url = `https://www.googleapis.com/drive/v3/files/${id}/copy` 707 | let params = { supportsAllDrives: true } 708 | url += '?' + params_to_query(params) 709 | const config = {} 710 | let retry = 0 711 | while (retry < RETRY_LIMIT) { 712 | let gtoken 713 | if (use_sa) { 714 | const temp = await get_sa_token() 715 | gtoken = temp.gtoken 716 | config.headers = { authorization: 'Bearer ' + temp.access_token } 717 | } else { 718 | config.headers = await gen_headers() 719 | } 720 | try { 721 | const { data } = await axins.post(url, { parents: [parent] }, config) 722 | if (gtoken) gtoken.exceed_count = 0 723 | return data 724 | } catch (err) { 725 | retry++ 726 | handle_error(err) 727 | const data = err && err.response && err.response.data 728 | const message = data && data.error && data.error.message 729 | if (message && message.toLowerCase().includes('file limit')) { 730 | if (limit) limit.clearQueue() 731 | if (task_id) db.prepare('update task set status=? where id=?').run('error', task_id) 732 | throw new Error(FILE_EXCEED_MSG) 733 | } 734 | if (!use_sa && message && message.toLowerCase().includes('rate limit')) { 735 | throw new Error('Personal Drive Limit:' + message) 736 | } 737 | // if (use_sa && message && message.toLowerCase().includes('user rate limit')) { 738 | // if (retry >= RETRY_LIMIT) throw new Error(`This resource triggers a userRateLimitExceeded error for ${EXCEED_LIMIT} consecutive times and stops copying`) 739 | // if (gtoken.exceed_count >= EXCEED_LIMIT) { 740 | // SA_TOKENS = SA_TOKENS.filter(v => v.gtoken !== gtoken) 741 | // if (!SA_TOKENS.length) SA_TOKENS = get_sa_batch() 742 | // console.log(`This account has triggered the daily usage limit${EXCEED_LIMIT} consecutive times, the remaining amount of SA available in this batch:`, SA_TOKENS.length) 743 | // } else { 744 | // console.log('This account triggers its daily usage limit and has been marked. If the next request is normal, it will be unmarked, otherwise the SA will be removed') 745 | // if (gtoken.exceed_count) { 746 | // gtoken.exceed_count++ 747 | // } else { 748 | // gtoken.exceed_count = 1 749 | // } 750 | // } 751 | // } 752 | } 753 | } 754 | if (use_sa && !SA_TOKENS.length) { 755 | if (limit) limit.clearQueue() 756 | if (task_id) db.prepare('update task set status=? where id=?').run('error', task_id) 757 | throw new Error('All SA are exhausted') 758 | } else { 759 | console.warn('File creation failed,Fileid: ' + id) 760 | } 761 | } 762 | 763 | async function create_folders ({ source, old_mapping, folders, root, task_id, service_account }) { 764 | if (argv.dncf) return {} // do not copy folders 765 | if (!Array.isArray(folders)) throw new Error('folders must be Array:' + folders) 766 | const mapping = old_mapping || {} 767 | mapping[source] = root 768 | if (!folders.length) return mapping 769 | 770 | const missed_folders = folders.filter(v => !mapping[v.id]) 771 | console.log('Start copying folders, total:', missed_folders.length) 772 | const limit = pLimit(PARALLEL_LIMIT) 773 | let count = 0 774 | let same_levels = folders.filter(v => v.parent === folders[0].parent) 775 | 776 | const loop = setInterval(() => { 777 | const now = dayjs().format('HH:mm:ss') 778 | const message = `${now} | Folders Created ${count} | Ongoing ${limit.activeCount} | Pending ${limit.pendingCount}` 779 | print_progress(message) 780 | }, 1000) 781 | 782 | while (same_levels.length) { 783 | const same_levels_missed = same_levels.filter(v => !mapping[v.id]) 784 | await Promise.all(same_levels_missed.map(async v => { 785 | try { 786 | const { name, id, parent } = v 787 | const target = mapping[parent] || root 788 | const new_folder = await limit(() => create_folder(name, target, service_account, limit)) 789 | count++ 790 | mapping[id] = new_folder.id 791 | const mapping_record = id + ' ' + new_folder.id + '\n' 792 | db.prepare('update task set mapping = mapping || ? where id=?').run(mapping_record, task_id) 793 | } catch (e) { 794 | if (e.message === FILE_EXCEED_MSG) { 795 | clearInterval(loop) 796 | throw new Error(FILE_EXCEED_MSG) 797 | } 798 | console.error('Error creating Folder:', e.message) 799 | } 800 | })) 801 | // folders = folders.filter(v => !mapping[v.id]) 802 | same_levels = [].concat(...same_levels.map(v => folders.filter(vv => vv.parent === v.id))) 803 | } 804 | 805 | clearInterval(loop) 806 | return mapping 807 | } 808 | 809 | function find_dupe (arr) { 810 | const files = arr.filter(v => v.mimeType !== FOLDER_TYPE) 811 | const folders = arr.filter(v => v.mimeType === FOLDER_TYPE) 812 | const exists = {} 813 | const dupe_files = [] 814 | const dupe_folder_keys = {} 815 | for (const folder of folders) { 816 | const { parent, name } = folder 817 | const key = parent + '|' + name 818 | if (exists[key]) { 819 | dupe_folder_keys[key] = true 820 | } else { 821 | exists[key] = true 822 | } 823 | } 824 | const dupe_empty_folders = folders.filter(folder => { 825 | const { parent, name } = folder 826 | const key = parent + '|' + name 827 | return dupe_folder_keys[key] 828 | }).filter(folder => { 829 | const has_child = arr.some(v => v.parent === folder.id) 830 | return !has_child 831 | }) 832 | for (const file of files) { 833 | const { md5Checksum, parent, name, size } = file 834 | // Determining Duplicates based on file location and md5 value 835 | const key = parent + '|' + md5Checksum 836 | if (exists[key]) { 837 | dupe_files.push(file) 838 | } else { 839 | exists[key] = true 840 | } 841 | } 842 | return dupe_files.concat(dupe_empty_folders) 843 | } 844 | 845 | async function confirm_dedupe ({ file_number, folder_number }) { 846 | const answer = await prompts({ 847 | type: 'select', 848 | name: 'value', 849 | message: `Duplicate files detected ${file_number},Empty Folders detected${folder_number},Delete them?`, 850 | choices: [ 851 | { title: 'Yes', description: 'confirm deletion', value: 'yes' }, 852 | { title: 'No', description: 'Donot delete', value: 'no' } 853 | ], 854 | initial: 0 855 | }) 856 | return answer.value 857 | } 858 | 859 | // Need sa to be the manager of the Teamdrive where the source folder is located 860 | async function mv_file ({ fid, new_parent, service_account }) { 861 | const file = await get_info_by_id(fid, service_account) 862 | if (!file) return 863 | const removeParents = file.parents[0] 864 | let url = `https://www.googleapis.com/drive/v3/files/${fid}` 865 | const params = { 866 | removeParents, 867 | supportsAllDrives: true, 868 | addParents: new_parent 869 | } 870 | url += '?' + params_to_query(params) 871 | const headers = await gen_headers(service_account) 872 | return axins.patch(url, {}, { headers }) 873 | } 874 | 875 | // To move files or folders to the recycle bin, SA should be content manager or above 876 | async function trash_file ({ fid, service_account }) { 877 | const url = `https://www.googleapis.com/drive/v3/files/${fid}?supportsAllDrives=true` 878 | const headers = await gen_headers(service_account) 879 | return axins.patch(url, { trashed: true }, { headers }) 880 | } 881 | 882 | // Delete files or folders directly without entering the recycle bin, requires SA as manager 883 | async function rm_file ({ fid, service_account }) { 884 | const headers = await gen_headers(service_account) 885 | let retry = 0 886 | const url = `https://www.googleapis.com/drive/v3/files/${fid}?supportsAllDrives=true` 887 | while (retry < RETRY_LIMIT) { 888 | try { 889 | return await axins.delete(url, { headers }) 890 | } catch (err) { 891 | retry++ 892 | handle_error(err) 893 | console.log('retrying to Delete, retry count', retry) 894 | } 895 | } 896 | } 897 | 898 | async function dedupe ({ fid, update, service_account, yes }) { 899 | let arr 900 | if (!update) { 901 | const info = get_all_by_fid(fid) 902 | if (info) { 903 | console.log('Locally cached data Found, cache time:', dayjs(info.mtime).format('YYYY-MM-DD HH:mm:ss')) 904 | arr = info 905 | } 906 | } 907 | arr = arr || await walk_and_save({ fid, update, service_account }) 908 | const dupes = find_dupe(arr) 909 | const folder_number = dupes.filter(v => v.mimeType === FOLDER_TYPE).length 910 | const file_number = dupes.length - folder_number 911 | const choice = yes || await confirm_dedupe({ file_number, folder_number }) 912 | if (choice === 'no') { 913 | return console.log('Exit') 914 | } else if (!choice) { 915 | return // ctrl+c 916 | } 917 | const limit = pLimit(PARALLEL_LIMIT) 918 | let folder_count = 0 919 | let file_count = 0 920 | await Promise.all(dupes.map(async v => { 921 | try { 922 | await limit(() => trash_file({ fid: v.id, service_account })) 923 | if (v.mimeType === FOLDER_TYPE) { 924 | console.log('Folder successfully deleted', v.name) 925 | folder_count++ 926 | } else { 927 | console.log('File successfully deleted', v.name) 928 | file_count++ 929 | } 930 | } catch (e) { 931 | console.log('Failed to delete', v) 932 | handle_error(e) 933 | } 934 | })) 935 | return { file_count, folder_count } 936 | } 937 | 938 | function handle_error (err) { 939 | const data = err && err.response && err.response.data 940 | if (data) { 941 | const message = data.error && data.error.message 942 | if (message && message.toLowerCase().includes('rate limit') && !argv.verbose) return 943 | console.error(JSON.stringify(data)) 944 | } else { 945 | if (!err.message.includes('timeout') || argv.verbose) console.error(err.message) 946 | } 947 | } 948 | 949 | function print_progress (msg) { 950 | if (process.stdout.cursorTo) { 951 | process.stdout.cursorTo(0) 952 | process.stdout.write(msg + ' ') 953 | } else { 954 | console.log(msg) 955 | } 956 | } 957 | 958 | module.exports = { ls_folder, count, validate_fid, copy, dedupe, copy_file, gen_count_body, real_copy, get_name_by_id, get_info_by_id, get_access_token, get_sa_token, walk_and_save, save_md5} 959 | -------------------------------------------------------------------------------- /src/src/router.js: -------------------------------------------------------------------------------- 1 | const Router = require('@koa/router') 2 | 3 | const { db } = require('../db') 4 | const { validate_fid, gen_count_body } = require('./gd') 5 | const { send_count, send_help, send_choice, send_task_info, sm, extract_fid, extract_from_text, reply_cb_query, tg_copy, send_all_tasks, send_bm_help, get_target_by_alias, send_all_bookmarks, set_bookmark, unset_bookmark, clear_tasks, send_task_help, rm_task } = require('./tg') 6 | 7 | const { AUTH, ROUTER_PASSKEY, TG_IPLIST } = require('../config') 8 | const { tg_whitelist } = AUTH 9 | 10 | const COPYING_FIDS = {} 11 | const counting = {} 12 | const router = new Router() 13 | 14 | function is_pm2 () { 15 | return 'PM2_HOME' in process.env || 'PM2_JSON_PROCESSING' in process.env || 'PM2_CLI' in process.env 16 | } 17 | 18 | function is_int (n) { 19 | return n === parseInt(n) 20 | } 21 | 22 | router.get('/gutils/api/gdurl/count', async ctx => { 23 | if (!ROUTER_PASSKEY) return ctx.body = 'gd-utils Successfully started' 24 | const { query, headers } = ctx.request 25 | let { fid, type, update, passkey } = query 26 | if (passkey !== ROUTER_PASSKEY) return ctx.body = 'invalid passkey' 27 | if (!validate_fid(fid)) throw new Error('Invalid FolderID') 28 | 29 | let ua = headers['user-agent'] || '' 30 | ua = ua.toLowerCase() 31 | type = (type || '').toLowerCase() 32 | // todo type=tree 33 | if (!type) { 34 | if (ua.includes('curl')) { 35 | type = 'curl' 36 | } else if (ua.includes('mozilla')) { 37 | type = 'html' 38 | } else { 39 | type = 'json' 40 | } 41 | } 42 | if (type === 'html') { 43 | ctx.set('Content-Type', 'text/html; charset=utf-8') 44 | } else if (['json', 'all'].includes(type)) { 45 | ctx.set('Content-Type', 'application/json; charset=UTF-8') 46 | } 47 | ctx.body = await gen_count_body({ fid, type, update, service_account: true }) 48 | }) 49 | 50 | router.post('/gutils/api/gdurl/tgbot', async ctx => { 51 | const { body } = ctx.request 52 | console.log('ctx.ip', ctx.ip) // You can only allow the ip of the tg server 53 | console.log('tg message:', JSON.stringify(body, null, ' ')) 54 | if (TG_IPLIST && !TG_IPLIST.includes(ctx.ip)) return ctx.body = 'invalid ip' 55 | ctx.body = '' // Release the connection early 56 | const message = body.message || body.edited_message 57 | const message_str = JSON.stringify(message) 58 | 59 | const { callback_query } = body 60 | if (callback_query) { 61 | const { id, message, data } = callback_query 62 | const chat_id = callback_query.from.id 63 | const [action, fid, target] = data.split(' ').filter(v => v) 64 | if (action === 'count') { 65 | if (counting[fid]) return sm({ chat_id, text: fid + ' Counting, please wait a moment' }) 66 | counting[fid] = true 67 | send_count({ fid, chat_id }).catch(err => { 68 | console.error(err) 69 | sm({ chat_id, text: fid + ' Stats Failed:' + err.message }) 70 | }).finally(() => { 71 | delete counting[fid] 72 | }) 73 | } else if (action === 'copy') { 74 | if (COPYING_FIDS[fid + target]) return sm({ chat_id, text: 'Processing copy command with the same source and destination' }) 75 | COPYING_FIDS[fid + target] = true 76 | tg_copy({ fid, target: get_target_by_alias(target), chat_id }).then(task_id => { 77 | is_int(task_id) && sm({ chat_id, text: `Clone Started For Task ID: ${task_id}\nType /task ${task_id} to check the progress` }) 78 | }).finally(() => COPYING_FIDS[fid + target] = false) 79 | } else if (action === 'update') { 80 | if (counting[fid]) return sm({ chat_id, text: fid + ' Counting, please wait a moment' }) 81 | counting[fid] = true 82 | send_count({ fid, chat_id, update: true }).catch(err => { 83 | console.error(err) 84 | sm({ chat_id, text: fid + ' Stats Failed:' + err.message }) 85 | }).finally(() => { 86 | delete counting[fid] 87 | }) 88 | } else if (action === 'clear_button') { 89 | const { message_id, text } = message || {} 90 | if (message_id) sm({ chat_id, message_id, text, parse_mode: 'HTML' }, 'editMessageText') 91 | } 92 | return reply_cb_query({ id, data }).catch(console.error) 93 | } 94 | 95 | const chat_id = message && message.chat && message.chat.id 96 | const text = (message && message.text && message.text.trim()) || '' 97 | let username = message && message.from && message.from.username 98 | username = username && String(username).toLowerCase() 99 | let user_id = message && message.from && message.from.id 100 | user_id = user_id && String(user_id).toLowerCase() 101 | if (!chat_id || !tg_whitelist.some(v => { 102 | v = String(v).toLowerCase() 103 | return v === username || v === user_id 104 | })) { 105 | chat_id && sm({ chat_id, text: 'You are not supposed to Message me you idiot, go back to the hole you came from' }) 106 | return console.warn('Received a request from a non-whitelisted user') 107 | } 108 | 109 | const fid = extract_fid(text) || extract_from_text(text) || extract_from_text(message_str) 110 | const no_fid_commands = ['/task', '/help', '/bm', '/reload'] 111 | if (!no_fid_commands.some(cmd => text.startsWith(cmd)) && !validate_fid(fid)) { 112 | return sm({ chat_id, text: 'Folder ID is invalid or not accessible' }) 113 | } 114 | if (text.startsWith('/help')) return send_help(chat_id) 115 | if (text.startsWith('/reload')) { 116 | if (!is_pm2()) return sm({ chat_id, text: 'Process is not a pm2 daemon,Do not restart' }) 117 | sm({ chat_id, text: 'Restart' }).then(() => process.exit()) 118 | } else if (text.startsWith('/bm')) { 119 | const [cmd, action, alias, target] = text.split(' ').map(v => v.trim()).filter(v => v) 120 | if (!action) return send_all_bookmarks(chat_id) 121 | if (action === 'set') { 122 | if (!alias || !target) return sm({ chat_id, text: 'Name and Destination FolderID cannot be empty ' }) 123 | if (alias.length > 24) return sm({ chat_id, text: 'Name Shouldnt be more than 24 Letters in Length' }) 124 | if (!validate_fid(target)) return sm({ chat_id, text: 'Incorrect Destination FolderID' }) 125 | set_bookmark({ chat_id, alias, target }) 126 | } else if (action === 'unset') { 127 | if (!alias) return sm({ chat_id, text: 'Name Cannot be empty' }) 128 | unset_bookmark({ chat_id, alias }) 129 | } else { 130 | send_bm_help(chat_id) 131 | } 132 | } else if (text.startsWith('/count')) { 133 | if (counting[fid]) return sm({ chat_id, text: fid + ' Counting, please wait a moment' }) 134 | try { 135 | counting[fid] = true 136 | const update = text.endsWith(' -u') 137 | await send_count({ fid, chat_id, update }) 138 | } catch (err) { 139 | console.error(err) 140 | sm({ chat_id, text: fid + ' Stats Failed:' + err.message }) 141 | } finally { 142 | delete counting[fid] 143 | } 144 | } else if (text.startsWith('/copy')) { 145 | let target = text.replace('/copy', '').replace(' -u', '').trim().split(' ').map(v => v.trim()).filter(v => v)[1] 146 | target = get_target_by_alias(target) || target 147 | if (target && !validate_fid(target)) return sm({ chat_id, text: `Destination FolderID ${target} is Invalid` }) 148 | if (COPYING_FIDS[fid + target]) return sm({ chat_id, text: 'Processing copy command with the same source and destination' }) 149 | COPYING_FIDS[fid + target] = true 150 | const update = text.endsWith(' -u') 151 | tg_copy({ fid, target, chat_id, update }).then(task_id => { 152 | is_int(task_id) && sm({ chat_id, text: `Clone Started For Task ID: ${task_id}\nType /task ${task_id} to check the progress` }) 153 | }).finally(() => COPYING_FIDS[fid + target] = false) 154 | } else if (text.startsWith('/task')) { 155 | let task_id = text.replace('/task', '').trim() 156 | if (task_id === 'all') { 157 | return send_all_tasks(chat_id) 158 | } else if (task_id === 'clear') { 159 | return clear_tasks(chat_id) 160 | } else if (task_id === '-h') { 161 | return send_task_help(chat_id) 162 | } else if (task_id.startsWith('rm')) { 163 | task_id = task_id.replace('rm', '') 164 | task_id = parseInt(task_id) 165 | if (!task_id) return send_task_help(chat_id) 166 | return rm_task({ task_id, chat_id }) 167 | } 168 | task_id = parseInt(task_id) 169 | if (!task_id) { 170 | const running_tasks = db.prepare('select id from task where status=?').all('copying') 171 | if (!running_tasks.length) return sm({ chat_id, text: 'There are currently no running tasks' }) 172 | return running_tasks.forEach(v => send_task_info({ chat_id, task_id: v.id }).catch(console.error)) 173 | } 174 | send_task_info({ task_id, chat_id }).catch(console.error) 175 | } else if (message_str.includes('drive.google.com/') || validate_fid(text)) { 176 | return send_choice({ fid: fid || text, chat_id }) 177 | } else { 178 | sm({ chat_id, text: 'This command is not currently supported' }) 179 | } 180 | }) 181 | 182 | module.exports = router 183 | -------------------------------------------------------------------------------- /src/src/snap2html.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs') 2 | const path = require('path') 3 | const dayjs = require('dayjs') 4 | 5 | const ID_DIR_MAPPING = {} 6 | /* 7 | Data format: 8 | Each index in "dirs" array is an array representing a directory: 9 | First item: "directory path*always 0*directory modified date" 10 | Note that forward slashes are used instead of (Windows style) backslashes 11 | Then, for each each file in the directory: "filename*size of file*file modified date" 12 | Seconds to last item tells the total size of directory content 13 | Last item refrences IDs to all subdirectories of this dir (if any). 14 | ID is the item index in dirs array. 15 | const dirs = [ 16 | [ 17 | `C:/WordPress/wp-admin*0*1597318033`, 18 | `widgets.php*18175*1597318033`, 19 | 743642, 20 | // "2*11*12*13*14*15*16" 21 | "1" 22 | ], 23 | [ 24 | `C:/WordPress/wp-admin/test*0*1597318033`, 25 | `test.php*12175*1597318033`, 26 | 12175, 27 | "" 28 | ] 29 | ] */ 30 | 31 | function snap2html ({ root, data }) { 32 | const total_size = sum_size(data) 33 | const template = fs.readFileSync(path.join(__dirname, './snap2html.template'), 'utf8') 34 | let html = template.replace('var dirs = []', 'var dirs = ' + JSON.stringify(trans(data, root))) 35 | html = html.replace(/\[TITLE\]/g, root.name) 36 | html = html.replace('[GEN DATE]', dayjs().format('YYYY-MM-DD HH:mm:ss')) 37 | const file_numbers = data.filter(v => !is_folder(v)).length 38 | const folder_numbers = data.filter(v => is_folder(v)).length 39 | html = html.replace(/\[NUM FILES\]/g, file_numbers) 40 | html = html.replace('[NUM DIRS]', folder_numbers) 41 | html = html.replace('[TOT SIZE]', total_size) 42 | return html 43 | } 44 | 45 | function sum_size (arr) { 46 | let total = 0 47 | arr.forEach(v => total += Number(v.size) || 0) 48 | return total 49 | } 50 | 51 | function is_folder (v) { 52 | return v.mimeType === 'application/vnd.google-apps.folder' 53 | } 54 | 55 | function unix_time (t) { 56 | if (!t) return 0 57 | t = +new Date(t) 58 | return parseInt(t / 1000, 10) 59 | } 60 | 61 | function escape_name (name) { 62 | return name.replace(/\*/g, '*') 63 | } 64 | 65 | function trans (arr, root) { 66 | if (!arr.length) return arr 67 | const first = arr[0] 68 | get_size(root, arr) 69 | let dirs = arr.filter(is_folder) 70 | dirs.unshift(root) 71 | dirs = dirs.map(dir => { 72 | const { name, id, size, modifiedTime } = dir 73 | const dir_path = root.name + get_path(id, arr) 74 | let result = [`${escape_name(dir_path)}*0*${unix_time(modifiedTime)}`] 75 | const children = arr.filter(v => v.parent === id) 76 | const child_files = children.filter(v => !is_folder(v)).map(file => { 77 | return `${escape_name(file.name)}*${file.size}*${unix_time(file.modifiedTime)}` 78 | }) 79 | result = result.concat(child_files) 80 | result.push(size) 81 | const sub_folders = children.filter(is_folder).map(v => dirs.findIndex(vv => vv.id === v.id)) 82 | result.push(sub_folders.join('*')) 83 | return result 84 | }) 85 | return dirs 86 | } 87 | 88 | function get_size (node, arr) { 89 | if (node.size !== undefined) return node.size 90 | const children = arr.filter(v => v.parent === node.id) 91 | const sizes = children.map(child => get_size(child, arr)) 92 | const total_size = sizes.reduce((acc, val) => Number(acc) + Number(val), 0) 93 | return node.size = total_size 94 | } 95 | 96 | function get_path (id, folders) { 97 | let result = ID_DIR_MAPPING[id] 98 | if (result !== undefined) return result 99 | result = '' 100 | let temp = id 101 | let folder = folders.filter(v => v.id === temp)[0] 102 | while (folder) { 103 | result = `/${folder.name}` + result 104 | temp = folder.parent 105 | if (ID_DIR_MAPPING[temp]) { 106 | result = ID_DIR_MAPPING[temp] + result 107 | return ID_DIR_MAPPING[id] = result 108 | } 109 | folder = folders.filter(v => v.id === temp)[0] 110 | } 111 | return ID_DIR_MAPPING[id] = result 112 | } 113 | 114 | module.exports = { snap2html } 115 | -------------------------------------------------------------------------------- /src/src/summary.js: -------------------------------------------------------------------------------- 1 | const Table = require('cli-table3') 2 | const colors = require('colors/safe') 3 | const { escape } = require('html-escaper') 4 | 5 | module.exports = { make_table, summary, make_html, make_tg_table, format_size } 6 | 7 | function make_html ({ file_count, folder_count, total_size, details }) { 8 | const head = ['Type', 'Number', 'Size'] 9 | const th = '' + head.map(k => `${k}`).join('') + '' 10 | const td = details.map(v => '' + [escape(v.ext), v.count, v.size].map(k => `${k}`).join('') + '').join('') 11 | let tail = ['Total', file_count + folder_count, total_size] 12 | tail = '' + tail.map(k => `${k}`).join('') + '' 13 | const table = ` 14 | ${th} 15 | ${td} 16 | ${tail} 17 |
` 18 | return table 19 | } 20 | 21 | function make_table ({ file_count, folder_count, total_size, details }) { 22 | const tb = new Table() 23 | const hAlign = 'center' 24 | const headers = ['Type', 'Count', 'Size'].map(v => ({ content: colors.bold.brightBlue(v), hAlign })) 25 | const records = details.map(v => [v.ext, v.count, v.size]).map(arr => { 26 | return arr.map(content => ({ content, hAlign })) 27 | }) 28 | const total_count = file_count + folder_count 29 | const tails = ['Total', total_count, total_size].map(v => ({ content: colors.bold(v), hAlign })) 30 | tb.push(headers, ...records) 31 | tb.push(tails) 32 | return tb.toString() + '\n' 33 | } 34 | 35 | function make_tg_table ({ file_count, folder_count, total_size, details }, limit) { 36 | const tb = new Table({ 37 | // chars: { 38 | // 'top': '═', 39 | // 'top-mid': '╤', 40 | // 'top-left': '╔', 41 | // 'top-right': '╗', 42 | // 'bottom': '═', 43 | // 'bottom-mid': '╧', 44 | // 'bottom-left': '╚', 45 | // 'bottom-right': '╝', 46 | // 'left': '║', 47 | // 'left-mid': '╟', 48 | // 'right': '║', 49 | // 'right-mid': '╢' 50 | // }, 51 | style: { 52 | head: [], 53 | border: [] 54 | } 55 | }) 56 | const hAlign = 'center' 57 | const headers = ['Type', 'Count', 'Size'].map(v => ({ content: v, hAlign })) 58 | details.forEach(v => { 59 | if (v.ext === 'Folder') v.ext = '[Folder]' 60 | if (v.ext === 'No Extension') v.ext = '[NoExt]' 61 | }) 62 | let records = details.map(v => [v.ext, v.count, v.size]).map(arr => arr.map(content => ({ content, hAlign }))) 63 | const folder_row = records.pop() 64 | if (limit) records = records.slice(0, limit) 65 | if (folder_row) records.push(folder_row) 66 | const total_count = file_count + folder_count 67 | const tails = ['Total', total_count, total_size].map(v => ({ content: v, hAlign })) 68 | tb.push(headers, ...records) 69 | tb.push(tails) 70 | return tb.toString().replace(/─/g, '—') // Prevent the table from breaking on the mobile phone and it will look more beautiful in pc after removing the replace 71 | } 72 | 73 | function summary (info, sort_by) { 74 | const files = info.filter(v => v.mimeType !== 'application/vnd.google-apps.folder') 75 | const file_count = files.length 76 | const folder_count = info.filter(v => v.mimeType === 'application/vnd.google-apps.folder').length 77 | let total_size = info.map(v => Number(v.size) || 0).reduce((acc, val) => acc + val, 0) 78 | total_size = format_size(total_size) 79 | const exts = {} 80 | const sizes = {} 81 | let no_ext = 0; let no_ext_size = 0 82 | files.forEach(v => { 83 | let { name, size } = v 84 | size = Number(size) || 0 85 | const ext = name.split('.').pop().toLowerCase() 86 | if (!name.includes('.') || ext.length > 10) { // If there are more than 10 characters after . , it is judged as no extension 87 | no_ext_size += size 88 | return no_ext++ 89 | } 90 | if (exts[ext]) { 91 | exts[ext]++ 92 | } else { 93 | exts[ext] = 1 94 | } 95 | if (sizes[ext]) { 96 | sizes[ext] += size 97 | } else { 98 | sizes[ext] = size 99 | } 100 | }) 101 | const details = Object.keys(exts).map(ext => { 102 | const count = exts[ext] 103 | const size = sizes[ext] 104 | return { ext, count, size: format_size(size), raw_size: size } 105 | }) 106 | if (sort_by === 'size') { 107 | details.sort((a, b) => b.raw_size - a.raw_size) 108 | } else if (sort_by === 'name') { 109 | details.sort((a, b) => (a.ext > b.ext) ? 1 : -1) 110 | } else { 111 | details.sort((a, b) => b.count - a.count) 112 | } 113 | if (no_ext) details.push({ ext: 'No Extension', count: no_ext, size: format_size(no_ext_size), raw_size: no_ext_size }) 114 | if (folder_count) details.push({ ext: 'Folder', count: folder_count, size: 0, raw_size: 0 }) 115 | return { file_count, folder_count, total_size, details } 116 | } 117 | 118 | function format_size (n) { 119 | n = Number(n) 120 | if (Number.isNaN(n)) return '' 121 | if (n < 0) return 'invalid size' 122 | const units = ['B', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB'] 123 | let flag = 0 124 | while (n >= 1024) { 125 | n = (n / 1024) 126 | flag++ 127 | } 128 | return n.toFixed(2) + ' ' + units[flag] 129 | } 130 | -------------------------------------------------------------------------------- /src/src/tg.js: -------------------------------------------------------------------------------- 1 | const Table = require('cli-table3') 2 | const dayjs = require('dayjs') 3 | const axios = require('@viegg/axios') 4 | const HttpsProxyAgent = require('https-proxy-agent') 5 | 6 | const { db } = require('../db') 7 | const { gen_count_body, validate_fid, real_copy, get_name_by_id, get_info_by_id, copy_file } = require('./gd') 8 | const { AUTH, DEFAULT_TARGET, USE_PERSONAL_AUTH } = require('../config') 9 | const { tg_token } = AUTH 10 | const gen_link = (fid, text) => `${text || fid}` 11 | 12 | if (!tg_token) throw new Error('Please set Bot_token in config.js first') 13 | const { https_proxy } = process.env 14 | const axins = axios.create(https_proxy ? { httpsAgent: new HttpsProxyAgent(https_proxy) } : {}) 15 | 16 | const FID_TO_NAME = {} 17 | 18 | async function get_folder_name (fid) { 19 | let name = FID_TO_NAME[fid] 20 | if (name) return name 21 | name = await get_name_by_id(fid, !USE_PERSONAL_AUTH) 22 | return FID_TO_NAME[fid] = name 23 | } 24 | 25 | function send_help (chat_id) { 26 | const text = ` 27 | Command | Description 28 | ➖➖➖➖➖➖➖➖➖➖➖➖ 29 |
/reload
| Restart the Task 30 | ➖➖➖➖➖➖➖➖➖➖➖➖ 31 |
/count FolderID [-u]
| Calculates Size 32 | - adding
-u
at the end is optional (info will be collected online) 33 | ➖➖➖➖➖➖➖➖➖➖➖➖ 34 |
/copy sourceID DestID [-u]
| Clone Files(Will create a New Folder) 35 | - If targetID is not filled in, it will be copied to the default location (set in
config.js
) 36 | - adding
-u
at the end is optional (info will be collected online) 37 | ➖➖➖➖➖➖➖➖➖➖➖➖ 38 |
/task
| Shows info about the running task 39 | ⁍ Example: 40 |
/task
| Return Details Of All Running Tasks. 41 |
/task [ID]
| Return Info Of Specific Task. 42 |
/task all
| Return The List Of All Tasks. 43 |
/task clear
| Clear All Completed Tasks. 44 |
/task rm [ID]
| Delete Specific Task. 45 | ➖➖➖➖➖➖➖➖➖➖➖➖ 46 |
/bm [action] [alias] [target]
| Add a common FolderID as Bookmark 47 | - Helpful while cloning to same destination folder multiple times 48 | ⁍ Example: 49 |
/bm
| Shows all bookmarks 50 |
/bm set movie folder-id
| Add a Bookmark by the name movie 51 |
/bm unset movie
| Delete this bookmark 52 | ` 53 | return sm({ chat_id, text, parse_mode: 'HTML' }) 54 | } 55 | 56 | function send_bm_help (chat_id) { 57 | const text = `
/bm [action] [alias] [target]
| Add a common FolderID as Bookmark 58 | - Helpful while cloning to same destination folder multiple times 59 | ⁍ Example: 60 |
/bm
| Shows all bookmarks 61 |
/bm set movie folder-id
| Add a Bookmark by the name movie 62 |
/bm unset movie
| Delete this bookmark 63 | ` 64 | return sm({ chat_id, text, parse_mode: 'HTML' }) 65 | } 66 | 67 | function send_task_help (chat_id) { 68 | const text = `
/task
| Shows info about the running task 69 | ⁍ Example: 70 |
/task
| Return Details Of All Running Tasks. 71 |
/task [ID]
| Return Info Of Specific Task. 72 |
/task all
| Return The List Of All Tasks. 73 |
/task clear
| Clear All Completed Tasks. 74 |
/task rm [ID]
| Delete Specific Task 75 | ` 76 | return sm({ chat_id, text, parse_mode: 'HTML' }) 77 | } 78 | 79 | function clear_tasks (chat_id) { 80 | const finished_tasks = db.prepare('select id from task where status=?').all('finished') 81 | finished_tasks.forEach(task => rm_task({ task_id: task.id })) 82 | sm({ chat_id, text: 'All completed tasks have been cleared' }) 83 | } 84 | 85 | function rm_task ({ task_id, chat_id }) { 86 | const exist = db.prepare('select id from task where id=?').get(task_id) 87 | if (!exist) return sm({ chat_id, text: `Task ID:
${task_id}
. Does Not Exist`, parse_mode: 'HTML' }) 88 | db.prepare('delete from task where id=?').run(task_id) 89 | db.prepare('delete from copied where taskid=?').run(task_id) 90 | if (chat_id) sm({ chat_id, text: `Task ID:
${task_id}
. Deleted`, parse_mode: 'HTML' }) 91 | } 92 | 93 | function send_all_bookmarks (chat_id) { 94 | let records = db.prepare('select alias, target from bookmark').all() 95 | if (!records.length) return sm({ chat_id, text: 'No Bookmarks Found' }) 96 | const tb = new Table({ style: { head: [], border: [] } }) 97 | const headers = ['Name', 'FolderID'] 98 | records = records.map(v => [v.alias, v.target]) 99 | tb.push(headers, ...records) 100 | const text = tb.toString().replace(/─/g, '—') 101 | return sm({ chat_id, text: `
${text}
`, parse_mode: 'HTML' }) 102 | } 103 | 104 | function set_bookmark ({ chat_id, alias, target }) { 105 | const record = db.prepare('select alias from bookmark where alias=?').get(alias) 106 | if (record) return sm({ chat_id, text: 'There is anothe Favourite Folder with the same name' }) 107 | db.prepare('INSERT INTO bookmark (alias, target) VALUES (?, ?)').run(alias, target) 108 | return sm({ chat_id, text: `Bookmark Successfully Set
${alias}
|
${target}
`, parse_mode: 'HTML' }) 109 | } 110 | 111 | function unset_bookmark ({ chat_id, alias }) { 112 | const record = db.prepare('select alias from bookmark where alias=?').get(alias) 113 | if (!record) return sm({ chat_id, text: 'No Bookmarks found with this Name' }) 114 | db.prepare('delete from bookmark where alias=?').run(alias) 115 | return sm({ chat_id, text: `Bookmark Successfully Deleted:
${alias}
`, parse_mode: 'HTML' }) 116 | } 117 | 118 | function get_target_by_alias (alias) { 119 | const record = db.prepare('select target from bookmark where alias=?').get(alias) 120 | return record && record.target 121 | } 122 | 123 | function get_alias_by_target (target) { 124 | const record = db.prepare('select alias from bookmark where target=?').get(target) 125 | return record && record.alias 126 | } 127 | 128 | function send_choice ({ fid, chat_id }) { 129 | return sm({ 130 | chat_id, 131 | text: `Drive ID: ${fid}, \nChoose what would you like to do`, 132 | reply_markup: { 133 | inline_keyboard: [ 134 | [ 135 | { text: 'Calculate Size', callback_data: `count ${fid}` }, 136 | { text: 'Clone', callback_data: `copy ${fid}` } 137 | ], 138 | [ 139 | { text: 'Refresh', callback_data: `update ${fid}` }, 140 | { text: 'Clear', callback_data: `clear_button` } 141 | ] 142 | ].concat(gen_bookmark_choices(fid)) 143 | } 144 | }) 145 | } 146 | 147 | // console.log(gen_bookmark_choices()) 148 | function gen_bookmark_choices (fid) { 149 | const gen_choice = v => ({ text: `Clone to ${v.alias}`, callback_data: `copy ${fid} ${v.alias}` }) 150 | const records = db.prepare('select * from bookmark').all() 151 | const result = [] 152 | for (let i = 0; i < records.length; i += 2) { 153 | const line = [gen_choice(records[i])] 154 | if (records[i + 1]) line.push(gen_choice(records[i + 1])) 155 | result.push(line) 156 | } 157 | return result 158 | } 159 | 160 | async function send_all_tasks (chat_id) { 161 | let records = db.prepare('select id, status, ctime from task').all() 162 | if (!records.length) return sm({ chat_id, text: 'No task record in the database' }) 163 | const tb = new Table({ style: { head: [], border: [] } }) 164 | const headers = ['ID', 'status', 'ctime'] 165 | records = records.map(v => { 166 | const { id, status, ctime } = v 167 | return [id, status, dayjs(ctime).format('YYYY-MM-DD HH:mm:ss')] 168 | }) 169 | tb.push(headers, ...records) 170 | const text = tb.toString().replace(/─/g, '—') 171 | const url = `https://api.telegram.org/bot${tg_token}/sendMessage` 172 | return axins.post(url, { 173 | chat_id, 174 | parse_mode: 'HTML', 175 | text: `All Clone Tasks:\n
${text}
` 176 | }).catch(err => { 177 | console.error(err.message) 178 | // const description = err.response && err.response.data && err.response.data.description 179 | // if (description && description.includes('message is too long')) { 180 | const text = [headers].concat(records.slice(-100)).map(v => v.join('\t')).join('\n') 181 | return sm({ chat_id, parse_mode: 'HTML', text: `Last 100 tasks:\n${text}` }) 182 | }) 183 | } 184 | 185 | async function get_task_info (task_id) { 186 | const record = db.prepare('select * from task where id=?').get(task_id) 187 | if (!record) return {} 188 | const { source, target, status, mapping, ctime, ftime } = record 189 | const { copied_files } = db.prepare('select count(fileid) as copied_files from copied where taskid=?').get(task_id) 190 | const folder_mapping = mapping && mapping.trim().split('\n') 191 | const new_folder = folder_mapping && folder_mapping[0].split(' ')[1] 192 | const { summary } = db.prepare('select summary from gd where fid=?').get(source) || {} 193 | const { file_count, folder_count, total_size } = summary ? JSON.parse(summary) : {} 194 | const total_count = (file_count || 0) + (folder_count || 0) 195 | const copied_folders = folder_mapping ? (folder_mapping.length - 1) : 0 196 | let text = 'Task No
' + task_id + '
\n' 197 | const folder_name = await get_folder_name(source) 198 | text += 'Source Folder:' + gen_link(source, folder_name) + '\n' 199 | text += 'Destination Folder:' + gen_link(target, get_alias_by_target(target)) + '\n' 200 | text += 'New Folder:' + (new_folder ? gen_link(new_folder) : 'Not Created yet') + '\n' 201 | text += 'Task Status
' + status + '
\n' 202 | text += 'Start Time
' + dayjs(ctime).format('YYYY-MM-DD HH:mm:ss') + '
\n' 203 | text += 'End Time
' + (ftime ? dayjs(ftime).format('YYYY-MM-DD HH:mm:ss') : 'Not Done') + '
\n' 204 | text += 'Folder Progress
' + copied_folders + '/' + (folder_count === undefined ? 'Unknown' : folder_count) + '
\n' 205 | text += 'File Progress
' + copied_files + '/' + (file_count === undefined ? 'Unkno wn' : file_count) + '
\n' 206 | text += 'Total Percentage
' + ((copied_files + copied_folders) * 100 / total_count).toFixed(2) + '%
\n' 207 | text += 'Total Size
' + (total_size || 'Unknown') + '
' 208 | return { text, status, folder_count } 209 | } 210 | 211 | async function send_task_info ({ task_id, chat_id }) { 212 | const { text, status, folder_count } = await get_task_info(task_id) 213 | if (!text) return sm({ chat_id, text: `Task ID Does Not Exist In The Database:
${task_id}
`, parse_mode: 'HTML' }) 214 | const url = `https://api.telegram.org/bot${tg_token}/sendMessage` 215 | let message_id 216 | try { 217 | const { data } = await axins.post(url, { chat_id, text, parse_mode: 'HTML' }) 218 | message_id = data && data.result && data.result.message_id 219 | } catch (e) { 220 | console.log('fail to send message to tg', e.message) 221 | } 222 | // get_task_info crash cpu when the number of Folders is too large,In the future, it is better to save the mapping as a separate table 223 | if (!message_id || status !== 'copying') return 224 | const loop = setInterval(async () => { 225 | const { text, status } = await get_task_info(task_id) 226 | // TODO check if text changed 227 | if (status !== 'copying') clearInterval(loop) 228 | sm({ chat_id, message_id, text, parse_mode: 'HTML' }, 'editMessageText') 229 | }, 10 * 1000) 230 | } 231 | 232 | async function tg_copy ({ fid, target, chat_id, update }) { // return task_id 233 | target = target || DEFAULT_TARGET 234 | if (!target) return sm({ chat_id, text: 'Please enter the destination ID or set the default clone destination ID in config.js first(DEFAULT_TARGET)' }) 235 | 236 | const file = await get_info_by_id(fid, !USE_PERSONAL_AUTH) 237 | if (!file) { 238 | const text = `Unable to get info,Please check if the link is valid and the SAs have appropriate permissions:https://drive.google.com/drive/folders/${fid}` 239 | return sm({ chat_id, text }) 240 | } 241 | if (file && file.mimeType !== 'application/vnd.google-apps.folder') { 242 | return copy_file(fid, target, !USE_PERSONAL_AUTH).then(data => { 243 | sm({ chat_id, parse_mode: 'HTML', text: `File Copied Succesfully: ${gen_link(target)}` }) 244 | }).catch(e => { 245 | sm({ chat_id, text: `Failed To Clone The File
${e.message}
`, parse_mode: 'HTML' }) 246 | }) 247 | } 248 | 249 | let record = db.prepare('select id, status from task where source=? and target=?').get(fid, target) 250 | if (record) { 251 | if (record.status === 'copying') { 252 | return sm({ chat_id, text: 'Task With The Same SourceID And DestinationID Is Already In Progress,\nType /task ' + record.id }) 253 | } else if (record.status === 'finished') { 254 | sm({ chat_id, text: `Existing Task Detected
${record.id}
,Started Cloning`, parse_mode: 'HTML' }) 255 | } 256 | } 257 | 258 | real_copy({ source: fid, update, target, service_account: !USE_PERSONAL_AUTH, is_server: true }) 259 | .then(async info => { 260 | if (!record) record = {} // Prevent infinite loop 261 | if (!info) return 262 | const { task_id } = info 263 | const { text } = await get_task_info(task_id) 264 | sm({ chat_id, text, parse_mode: 'HTML' }) 265 | }) 266 | .catch(err => { 267 | const task_id = record && record.id 268 | if (task_id) db.prepare('update task set status=? where id=?').run('error', task_id) 269 | if (!record) record = {} 270 | console.error('Copy Failed', fid, '-->', target) 271 | console.error(err) 272 | sm({ chat_id, text: (task_id || '') + `Task Error
${err.message}
`, parse_mode: 'HTML' }) 273 | }) 274 | 275 | while (!record) { 276 | record = db.prepare('select id from task where source=? and target=?').get(fid, target) 277 | await sleep(1000) 278 | } 279 | return record.id 280 | } 281 | 282 | function sleep (ms) { 283 | return new Promise((resolve, reject) => { 284 | setTimeout(resolve, ms) 285 | }) 286 | } 287 | 288 | function reply_cb_query ({ id, data }) { 289 | const url = `https://api.telegram.org/bot${tg_token}/answerCallbackQuery` 290 | return axins.post(url, { 291 | callback_query_id: id, 292 | text: 'Start the Task ' + data 293 | }) 294 | } 295 | 296 | async function send_count ({ fid, chat_id, update }) { 297 | const gen_text = payload => { 298 | const { obj_count, processing_count, pending_count } = payload || {} 299 | const now = dayjs().format('YYYY-MM-DD HH:mm:ss') 300 | return `Size:${gen_link(fid)} 301 | Time:${now} 302 | Number of Files:${obj_count || ''} 303 | ${pending_count ? ('Pending:' + pending_count) : ''} 304 | ${processing_count ? ('Ongoing:' + processing_count) : ''}` 305 | } 306 | 307 | const url = `https://api.telegram.org/bot${tg_token}/sendMessage` 308 | let response 309 | try { 310 | response = await axins.post(url, { chat_id, text: `Started:
${fid}
.\nCollecting Files Stats,Please Wait.\nIt Is Recommended Not To Start Cloning Before The Stats Is Collected.`, parse_mode: 'HTML' }) 311 | } catch (e) {} 312 | const { data } = response || {} 313 | const message_id = data && data.result && data.result.message_id 314 | const message_updater = payload => sm({ 315 | chat_id, 316 | message_id, 317 | parse_mode: 'HTML', 318 | text: gen_text(payload) 319 | }, 'editMessageText') 320 | 321 | const service_account = !USE_PERSONAL_AUTH 322 | const table = await gen_count_body({ fid, update, service_account, type: 'tg', tg: message_id && message_updater }) 323 | if (!table) return sm({ chat_id, parse_mode: 'HTML', text: gen_link(fid) + ' Failed to obtain info' }) 324 | const gd_link = `https://drive.google.com/drive/folders/${fid}` 325 | const name = await get_folder_name(fid) 326 | return axins.post(url, { 327 | chat_id, 328 | parse_mode: 'HTML', 329 | text: `Source Folder Name:${name} 330 | Source Folder Link:${gd_link} 331 |
${table}
` 332 | }).catch(async err => { 333 | console.log(err.message) 334 | // const description = err.response && err.response.data && err.response.data.description 335 | // const too_long_msgs = ['request entity too large', 'message is too long'] 336 | // if (description && too_long_msgs.some(v => description.toLowerCase().includes(v))) { 337 | const limit = 20 338 | const table = await gen_count_body({ fid, type: 'tg', service_account: !USE_PERSONAL_AUTH, limit }) 339 | return sm({ 340 | chat_id, 341 | parse_mode: 'HTML', 342 | text: `Name:${name} 343 | Link${fid} 344 | The Table Is Too Long, Only Showing The First ${limit} 345 |
${table}
` 346 | }) 347 | }) 348 | } 349 | 350 | function sm (data, endpoint) { 351 | endpoint = endpoint || 'sendMessage' 352 | const url = `https://api.telegram.org/bot${tg_token}/${endpoint}` 353 | return axins.post(url, data).catch(err => { 354 | // console.error('fail to post', url, data) 355 | console.error('fail to send message to tg:', err.message) 356 | const err_data = err.response && err.response.data 357 | err_data && console.error(err_data) 358 | }) 359 | } 360 | 361 | function extract_fid (text) { 362 | text = text.replace(/^\/count/, '').replace(/^\/copy/, '').replace(/\\n/g, '').replace(/\\/g, '').trim() 363 | const [source, target] = text.split(' ').map(v => v.trim()) 364 | if (validate_fid(source)) return source 365 | try { 366 | if (!text.startsWith('http')) text = 'https://' + text 367 | const u = new URL(text) 368 | if (u.pathname.includes('/folders/')) { 369 | return u.pathname.split('/').map(v => v.trim()).filter(v => v).pop() 370 | } else if (u.pathname.includes('/file/')) { 371 | const file_reg = /file\/d\/([a-zA-Z0-9_-]+)/ 372 | const file_match = u.pathname.match(file_reg) 373 | return file_match && file_match[1] 374 | } 375 | return u.searchParams.get('id') 376 | } catch (e) { 377 | return '' 378 | } 379 | } 380 | 381 | function extract_from_text (text) { 382 | // const reg = /https?:\/\/drive.google.com\/[^\s]+/g 383 | const reg = /https?:\/\/drive.google.com\/[a-zA-Z0-9_\\/?=&-]+/g 384 | const m = text.match(reg) 385 | return m && extract_fid(m[0]) 386 | } 387 | 388 | module.exports = { send_count, send_help, sm, extract_fid, reply_cb_query, send_choice, send_task_info, send_all_tasks, tg_copy, extract_from_text, get_target_by_alias, send_bm_help, send_all_bookmarks, set_bookmark, unset_bookmark, clear_tasks, send_task_help, rm_task } 389 | -------------------------------------------------------------------------------- /src/src/tree.js: -------------------------------------------------------------------------------- 1 | module.exports = { gen_tree_html } 2 | 3 | function gen_tree_html (arr) { 4 | const data = gen_tree_data(arr, is_gd_folder) 5 | return tree_tpl(JSON.stringify(data)) 6 | } 7 | 8 | function tree_tpl (str) { 9 | return ` 10 | 11 | 12 | 13 | 14 | 15 | 16 | Folder Tree 17 | 18 | 19 | 20 | 21 | 22 |
23 | 26 | 27 | 28 | 29 | ` 30 | } 31 | 32 | function is_gd_folder (data) { 33 | return data.mimeType === 'application/vnd.google-apps.folder' 34 | } 35 | 36 | function gen_tree_data (data, is_folder) { 37 | if (!data || !data.length) return [] 38 | const folders = data.filter(is_folder) 39 | const files = data.filter(v => !is_folder(v)) 40 | const total_size = sum(files.map(v => v.size)) 41 | const root = { 42 | title: `/Root Folder [Total${files.length} Files (excluding folders) , ${format_size(total_size)}]`, 43 | key: data[0].parent 44 | } 45 | if (!folders.length) return [root] 46 | const sub_folders = folders.filter(v => v.parent === folders[0].parent) 47 | sub_folders.forEach(v => { 48 | sum_files(v, data, is_folder) 49 | count_files(v, data, is_folder) 50 | }) 51 | sort_folders(folders, 'count') 52 | sort_folders(sub_folders, 'count') 53 | folders.forEach(v => { 54 | let { name, size, count, id } = v 55 | if (name.length > 50) name = name.slice(0, 48) + '...' 56 | v.title = `${name} | [Total${count}Files ${format_size(size)}]` 57 | }) 58 | root.children = sub_folders.map(v => gen_node(v, folders)) 59 | return [root] 60 | } 61 | 62 | function sort_folders (folders, type) { 63 | if (!folders || !folders.length) return 64 | if (type === 'size') return folders.sort((a, b) => b.size - a.size) 65 | if (type === 'count') return folders.sort((a, b) => b.count - a.count) 66 | } 67 | 68 | function gen_node (v, folders) { 69 | const { id, title, node } = v 70 | if (node) return node 71 | return v.node = { 72 | title, 73 | key: id, 74 | children: v.children || folders.filter(vv => vv.parent === id).map(vv => gen_node(vv, folders)) 75 | } 76 | } 77 | 78 | function count_files (folder, arr, is_folder) { 79 | if (folder.count) return folder.count 80 | const children = arr.filter(v => v.parent === folder.id) 81 | return folder.count = sum(children.map(v => { 82 | if (is_folder(v)) return count_files(v, arr, is_folder) 83 | return 1 84 | })) 85 | } 86 | 87 | function sum_files (folder, arr, is_folder) { 88 | if (folder.size) return folder.size 89 | const children = arr.filter(v => v.parent === folder.id) 90 | return folder.size = sum(children.map(v => { 91 | if (is_folder(v)) return sum_files(v, arr, is_folder) 92 | return v.size 93 | })) 94 | } 95 | 96 | function sum (arr) { 97 | let result = 0 98 | for (const v of arr) { 99 | result += Number(v) || 0 100 | } 101 | return result 102 | } 103 | 104 | function format_size (n) { 105 | n = Number(n) 106 | if (Number.isNaN(n)) return '' 107 | if (n < 0) return 'invalid size' 108 | const units = ['Bytes', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB'] 109 | let flag = 0 110 | while (n >= 1024) { 111 | n = n / 1024 112 | flag++ 113 | } 114 | return n.toFixed(2) + ' ' + units[flag] 115 | } 116 | -------------------------------------------------------------------------------- /src/validate-sa.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | const { argv } = require('yargs') 4 | .usage('Usage: ./$0 folder-id\nfolder-id Does SA has read Pemission to the directory ID you want to detect') 5 | .help('h') 6 | .alias('h', 'help') 7 | 8 | const fs = require('fs') 9 | const path = require('path') 10 | const prompts = require('prompts') 11 | const { GoogleToken } = require('gtoken') 12 | const axios = require('@viegg/axios') 13 | const HttpsProxyAgent = require('https-proxy-agent') 14 | 15 | const { https_proxy } = process.env 16 | const axins = axios.create(https_proxy ? { httpsAgent: new HttpsProxyAgent(https_proxy) } : {}) 17 | 18 | const SA_FILES = fs.readdirSync(path.join(__dirname, 'sa')).filter(v => v.endsWith('.json')) 19 | const SA_TOKENS = SA_FILES.map(filename => { 20 | const gtoken = new GoogleToken({ 21 | keyFile: path.join(__dirname, 'sa', filename), 22 | scope: ['https://www.googleapis.com/auth/drive'] 23 | }) 24 | return {gtoken, filename} 25 | }) 26 | 27 | main() 28 | async function main () { 29 | const [fid] = argv._ 30 | if (validate_fid(fid)) { 31 | console.log('Start testing', SA_TOKENS.length, 'SA accounts') 32 | const invalid_sa = await get_invalid_sa(SA_TOKENS, fid) 33 | if (!invalid_sa.length) return console.log('Detected', SA_TOKENS.length, 'Individual SA,No invalid account detected') 34 | const choice = await choose(invalid_sa.length) 35 | if (choice === 'yes') { 36 | mv_sa(invalid_sa) 37 | console.log('Successfully moved') 38 | } else { 39 | console.log('Successful exit, invalid SA record:', invalid_sa) 40 | } 41 | } else { 42 | console.warn('Folder ID is missing or malformed') 43 | } 44 | } 45 | 46 | function mv_sa (arr) { 47 | for (const filename of arr) { 48 | const oldpath = path.join(__dirname, 'sa', filename) 49 | const new_path = path.join(__dirname, 'sa/invalid', filename) 50 | fs.renameSync(oldpath, new_path) 51 | } 52 | } 53 | 54 | async function choose (count) { 55 | const answer = await prompts({ 56 | type: 'select', 57 | name: 'value', 58 | message: `Detcted ${count} Invalid SA,Whether to move them to the sa/invalid Folder?`, 59 | choices: [ 60 | { title: 'Yes', description: 'Confirm Move', value: 'yes' }, 61 | { title: 'No', description: 'Exit without making changes', value: 'no' } 62 | ], 63 | initial: 0 64 | }) 65 | return answer.value 66 | } 67 | 68 | async function get_invalid_sa (arr, fid) { 69 | if (!fid) throw new Error('Please specify the ID of the directory to check permissions') 70 | const fails = [] 71 | let flag = 0 72 | let good = 0 73 | for (const v of arr) { 74 | console.log('Inspection Progress', `${flag++}/${arr.length}`) 75 | console.log('Normal/Abnormal', `${good}/${fails.length}`) 76 | const {gtoken, filename} = v 77 | try { 78 | const access_token = await get_sa_token(gtoken) 79 | await get_info(fid, access_token) 80 | good++ 81 | } catch (e) { 82 | handle_error(e) 83 | const status = e && e.response && e.response.status 84 | if (Number(status) === 400) fails.push(filename) // access_token Failed 85 | 86 | const data = e && e.response && e.response.data 87 | const code = data && data.error && data.error.code 88 | if ([404, 403].includes(Number(code))) fails.push(filename) // Failed to read folder information 89 | } 90 | } 91 | return fails 92 | } 93 | 94 | function handle_error (err) { 95 | const data = err && err.response && err.response.data 96 | if (data) { 97 | console.error(JSON.stringify(data)) 98 | } else { 99 | console.error(err.message) 100 | } 101 | } 102 | 103 | async function get_info (fid, access_token) { 104 | let url = `https://www.googleapis.com/drive/v3/files/${fid}` 105 | let params = { 106 | includeItemsFromAllDrives: true, 107 | supportsAllDrives: true, 108 | corpora: 'allDrives', 109 | fields: 'id,name' 110 | } 111 | url += '?' + params_to_query(params) 112 | const headers = { authorization: 'Bearer ' + access_token } 113 | const { data } = await axins.get(url, { headers }) 114 | return data 115 | } 116 | 117 | function params_to_query (data) { 118 | const ret = [] 119 | for (let d in data) { 120 | ret.push(encodeURIComponent(d) + '=' + encodeURIComponent(data[d])) 121 | } 122 | return ret.join('&') 123 | } 124 | 125 | async function get_sa_token (gtoken) { 126 | return new Promise((resolve, reject) => { 127 | gtoken.getToken((err, tk) => { 128 | err ? reject(err) : resolve(tk.access_token) 129 | }) 130 | }) 131 | } 132 | 133 | function validate_fid (fid) { 134 | if (!fid) return false 135 | fid = String(fid) 136 | if (fid.length < 10 || fid.length > 100) return false 137 | const reg = /^[a-zA-Z0-9_-]+$/ 138 | return fid.match(reg) 139 | } 140 | --------------------------------------------------------------------------------