[options]')
5 | .example('./$0 1ULY8ISgWSOVc0UrzejykVgXfVL_I4r75', 'Obtain all file information in https://drive.google.com/drive/folders/1ULY8ISgWSOVc0UrzejykVgXfVL_I4r75, and save the md5 value and ID of each file in the local database')
6 | .example('./$0 1ULY8ISgWSOVc0UrzejykVgXfVL_I4r75 -s 10mb', 'Obtain all file information in the specified directory, and save the md5 value and ID of files not less than 10MB in the local database')
7 | .alias('s', 'size')
8 | .describe('s', 'Do not fill in the md5 records of all files stored by default. If this value is set, files smaller than this size will be filtered out, and must end with b, such as 10mb')
9 | .alias('u', 'update')
10 | .describe('u', 'Force to get information online (ignoring whether there is a local cache)')
11 | .alias('N', 'not_teamdrive')
12 | .describe('N', 'If it is not a team disk link, you can add this parameter to improve interface query efficiency and reduce latency')
13 | .alias('S', 'service_account')
14 | .describe('S', 'Use service account to obtain file information, provided that the SA json file must be placed in the ./sa directory')
15 | .help('h')
16 | .alias('h', 'help')
17 |
18 | const { save_md5, validate_fid } = require('./src/gd')
19 | const [fid] = argv._
20 | if (validate_fid(fid)) {
21 | const { update, size, not_teamdrive, service_account } = argv
22 | save_md5({fid, size, not_teamdrive, update, service_account}).catch(console.error)
23 | } else {
24 | console.warn('Directory ID is missing or incorrectly formatted')
25 | }
26 |
--------------------------------------------------------------------------------
/src/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "GD-Utils",
3 | "version": "1.0.2",
4 | "description": "Google Drive Utils",
5 | "repository": "iwestlin/gd-utils",
6 | "keywords": [],
7 | "author": "viegg",
8 | "license": "ISC",
9 | "dependencies": {
10 | "@koa/router": "^10.0.0",
11 | "@viegg/axios": "^1.0.0",
12 | "better-sqlite3": "^7.4.1",
13 | "bytes": "^3.1.0",
14 | "cli-table3": "^0.6.0",
15 | "colors": "^1.4.0",
16 | "dayjs": "^1.10.5",
17 | "gtoken": "^5.3.0",
18 | "html-escaper": "^3.0.3",
19 | "https-proxy-agent": "^5.0.0",
20 | "koa": "^2.13.1",
21 | "koa-bodyparser": "^4.3.0",
22 | "p-limit": "^3.1.0",
23 | "prompts": "^2.4.1",
24 | "proxy-agent": "^4.0.1",
25 | "signal-exit": "^3.0.3",
26 | "yargs": "^17.0.1"
27 | }
28 | }
29 |
--------------------------------------------------------------------------------
/src/sa/.keep:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nenokkadine/GD-Utils/f0782a63b5212f7af4b57e92e0fcd752df42c90f/src/sa/.keep
--------------------------------------------------------------------------------
/src/sa/invalid/.keep:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/nenokkadine/GD-Utils/f0782a63b5212f7af4b57e92e0fcd752df42c90f/src/sa/invalid/.keep
--------------------------------------------------------------------------------
/src/server.js:
--------------------------------------------------------------------------------
1 | const dayjs = require('dayjs')
2 | const Koa = require('koa')
3 | const bodyParser = require('koa-bodyparser')
4 | const router = require('./src/router')
5 | var fs = require('fs');
6 |
7 | const app = new Koa()
8 | app.proxy = true
9 |
10 | app.use(catcher)
11 | app.use(bodyParser())
12 | app.use(router.routes())
13 | app.use(router.allowedMethods())
14 |
15 | const G_SOCK = "/usr/gdutils.sock"
16 |
17 | if (fs.existsSync(G_SOCK)) {
18 | fs.unlinkSync(G_SOCK)
19 | }
20 |
21 | app.listen(G_SOCK, '0.0.0.0')
22 |
23 | async function catcher (ctx, next) {
24 | try {
25 | return await next()
26 | } catch (e) {
27 | console.error(e)
28 | ctx.status = 500
29 | ctx.body = e.message
30 | }
31 | }
--------------------------------------------------------------------------------
/src/src/gd.js:
--------------------------------------------------------------------------------
1 | const fs = require('fs')
2 | const path = require('path')
3 | const dayjs = require('dayjs')
4 | const prompts = require('prompts')
5 | const pLimit = require('p-limit')
6 | const axios = require('@viegg/axios')
7 | const { GoogleToken } = require('gtoken')
8 | const handle_exit = require('signal-exit')
9 | const bytes = require('bytes')
10 | const { argv } = require('yargs')
11 |
12 | let { PARALLEL_LIMIT, EXCEED_LIMIT } = require('../config')
13 | PARALLEL_LIMIT = argv.l || argv.limit || PARALLEL_LIMIT
14 | EXCEED_LIMIT = EXCEED_LIMIT || 7
15 |
16 | const { AUTH, RETRY_LIMIT, TIMEOUT_BASE, TIMEOUT_MAX, LOG_DELAY, PAGE_SIZE, DEFAULT_TARGET } = require('../config')
17 | const { db } = require('../db')
18 | const { make_table, make_tg_table, make_html, summary } = require('./summary')
19 | const { gen_tree_html } = require('./tree')
20 | const { snap2html } = require('./snap2html')
21 |
22 | const FILE_EXCEED_MSG = 'The number of files on your team drive has exceeded the limit (400,000), Please move the folder that has not been copied to another team drive, and then run the copy command to resume the transfer'
23 | const FOLDER_TYPE = 'application/vnd.google-apps.folder'
24 | const sleep = ms => new Promise((resolve, reject) => setTimeout(resolve, ms))
25 |
26 | const { https_proxy, http_proxy, all_proxy } = process.env
27 | const proxy_url = https_proxy || http_proxy || all_proxy
28 |
29 | let axins
30 | if (proxy_url) {
31 | console.log('Use Proxy:', proxy_url)
32 | let ProxyAgent
33 | try {
34 | ProxyAgent = require('proxy-agent')
35 | } catch (e) { // run npm i proxy-agent
36 | ProxyAgent = require('https-proxy-agent')
37 | }
38 | axins = axios.create({ httpsAgent: new ProxyAgent(proxy_url) })
39 | } else {
40 | axins = axios.create({})
41 | }
42 |
43 | const SA_LOCATION = argv.sa || 'sa'
44 | const SA_BATCH_SIZE = 1000
45 | const SA_FILES = fs.readdirSync(path.join(__dirname, '..', SA_LOCATION)).filter(v => v.endsWith('.json'))
46 | SA_FILES.flag = 0
47 | let SA_TOKENS = get_sa_batch()
48 |
49 | if (is_pm2()) {
50 | setInterval(() => {
51 | SA_FILES.flag = 0
52 | SA_TOKENS = get_sa_batch()
53 | }, 1000 * 3600 * 2)
54 | }
55 |
56 | // https://github.com/Leelow/is-pm2/blob/master/index.js
57 | function is_pm2 () {
58 | return 'PM2_HOME' in process.env || 'PM2_JSON_PROCESSING' in process.env || 'PM2_CLI' in process.env
59 | }
60 |
61 | function get_sa_batch () {
62 | const new_flag = SA_FILES.flag + SA_BATCH_SIZE
63 | const files = SA_FILES.slice(SA_FILES.flag, new_flag)
64 | SA_FILES.flag = new_flag
65 | return files.map(filename => {
66 | const gtoken = new GoogleToken({
67 | keyFile: path.join(__dirname, '..', SA_LOCATION, filename),
68 | scope: ['https://www.googleapis.com/auth/drive']
69 | })
70 | return { gtoken, expires: 0 }
71 | })
72 | }
73 |
74 | handle_exit((code, signal) => {
75 | if (code === 0 && !is_pm2()) return // normal exit in command line, do nothing
76 | const records = db.prepare('select id from task where status=?').all('copying')
77 | records.forEach(v => {
78 | db.prepare('update task set status=? where id=?').run('interrupt', v.id)
79 | })
80 | records.length && console.log(records.length, 'task interrupted')
81 | db.close()
82 | })
83 |
84 | async function save_md5 ({fid, size, not_teamdrive, update, service_account}) {
85 | let files = await walk_and_save({ fid, not_teamdrive, update, service_account })
86 | files = files.filter(v => v.mimeType !== FOLDER_TYPE)
87 | if (typeof size !== 'number') size = bytes.parse(size)
88 | if (size) files = files.filter(v => v.size >= size)
89 | let cnt = 0
90 | files.forEach(file => {
91 | const {md5Checksum, id} = file
92 | if (!md5Checksum) return
93 | const record = db.prepare('SELECT * FROM hash WHERE gid = ?').get(id)
94 | if (record) return
95 | db.prepare('INSERT INTO hash (gid, md5) VALUES (?, ?)')
96 | .run(id, md5Checksum)
97 | cnt++
98 | })
99 | console.log('Added', cnt, 'Md5 records')
100 | }
101 |
102 | function get_gid_by_md5 (md5) {
103 | const records = db.prepare('select * from hash where md5=? and status=?').all(md5, 'normal')
104 | if (!records.length) return null
105 | // console.log('got existed md5 record in db:', md5)
106 | return get_random_element(records).gid
107 | }
108 |
109 | async function gen_count_body ({ fid, type, update, service_account, limit, tg }) {
110 | async function update_info () {
111 | const info = await walk_and_save({ fid, update, service_account, tg })
112 | return [info, summary(info)]
113 | }
114 |
115 | function render_smy (smy, type, unfinished_number) {
116 | if (!smy) return
117 | if (['html', 'curl', 'tg'].includes(type)) {
118 | smy = (typeof smy === 'object') ? smy : JSON.parse(smy)
119 | const type_func = {
120 | html: make_html,
121 | curl: make_table,
122 | tg: make_tg_table
123 | }
124 | let result = type_func[type](smy, limit)
125 | if (unfinished_number) result += `\nNumber of Folders not read:${unfinished_number}`
126 | return result
127 | } else { // Default output json
128 | return (typeof smy === 'string') ? smy : JSON.stringify(smy)
129 | }
130 | }
131 | const file = await get_info_by_id(fid, service_account)
132 | if (file && file.mimeType !== FOLDER_TYPE) return render_smy(summary([file]), type)
133 |
134 | let info, smy
135 | const record = db.prepare('SELECT * FROM gd WHERE fid = ?').get(fid)
136 | if (!file && !record) {
137 | throw new Error(`Unable to access the link, please check if the link is valid and SA has the appropriate permissions:https://drive.google.com/drive/folders/${fid}`)
138 | }
139 | if (!record || update) {
140 | [info, smy] = await update_info()
141 | }
142 | if (type === 'all') {
143 | info = info || get_all_by_fid(fid)
144 | if (!info) { // Explain that the last statistical process was interrupted
145 | [info] = await update_info()
146 | }
147 | return info && JSON.stringify(info)
148 | }
149 | if (smy) return render_smy(smy, type)
150 | if (record && record.summary) return render_smy(record.summary, type)
151 | info = info || get_all_by_fid(fid)
152 | if (info) {
153 | smy = summary(info)
154 | } else {
155 | [info, smy] = await update_info()
156 | }
157 | return render_smy(smy, type, info.unfinished_number)
158 | }
159 |
160 | async function count ({ fid, update, sort, type, output, not_teamdrive, service_account }) {
161 | sort = (sort || '').toLowerCase()
162 | type = (type || '').toLowerCase()
163 | output = (output || '').toLowerCase()
164 | let out_str
165 | if (!update) {
166 | if (!type && !sort && !output) {
167 | const record = db.prepare('SELECT * FROM gd WHERE fid = ?').get(fid)
168 | const smy = record && record.summary && JSON.parse(record.summary)
169 | if (smy) return console.log(make_table(smy))
170 | }
171 | const info = get_all_by_fid(fid)
172 | if (info) {
173 | console.log('cached data found in local database, cache time:', dayjs(info.mtime).format('YYYY-MM-DD HH:mm:ss'))
174 | if (type === 'snap') {
175 | const name = await get_name_by_id(fid, service_account)
176 | out_str = snap2html({ root: { name, id: fid }, data: info })
177 | } else {
178 | out_str = get_out_str({ info, type, sort })
179 | }
180 | if (output) return fs.writeFileSync(output, out_str)
181 | return console.log(out_str)
182 | }
183 | }
184 | const with_modifiedTime = type === 'snap'
185 | const result = await walk_and_save({ fid, not_teamdrive, update, service_account, with_modifiedTime })
186 | if (type === 'snap') {
187 | const name = await get_name_by_id(fid, service_account)
188 | out_str = snap2html({ root: { name, id: fid }, data: result })
189 | } else {
190 | out_str = get_out_str({ info: result, type, sort })
191 | }
192 | if (output) {
193 | fs.writeFileSync(output, out_str)
194 | } else {
195 | console.log(out_str)
196 | }
197 | }
198 |
199 | function get_out_str ({ info, type, sort }) {
200 | const smy = summary(info, sort)
201 | let out_str
202 | if (type === 'tree') {
203 | out_str = gen_tree_html(info)
204 | } else if (type === 'html') {
205 | out_str = make_html(smy)
206 | } else if (type === 'json') {
207 | out_str = JSON.stringify(smy)
208 | } else if (type === 'all') {
209 | out_str = JSON.stringify(info)
210 | } else {
211 | out_str = make_table(smy)
212 | }
213 | return out_str
214 | }
215 |
216 | function get_all_by_fid (fid) {
217 | const record = db.prepare('SELECT * FROM gd WHERE fid = ?').get(fid)
218 | if (!record) return null
219 | const { info, subf } = record
220 | let result = JSON.parse(info)
221 | result = result.map(v => {
222 | v.parent = fid
223 | return v
224 | })
225 | if (!subf) return result
226 | return recur(result, JSON.parse(subf))
227 |
228 | function recur (result, subf) {
229 | if (!subf.length) return result
230 | const arr = subf.map(v => {
231 | const row = db.prepare('SELECT * FROM gd WHERE fid = ?').get(v)
232 | if (!row) return null // If the corresponding fid record is not found, it means that the process was interrupted last time or the folder was not read completely
233 | let info = JSON.parse(row.info)
234 | info = info.map(vv => {
235 | vv.parent = v
236 | return vv
237 | })
238 | return { info, subf: JSON.parse(row.subf) }
239 | })
240 | if (arr.some(v => v === null)) return null
241 | const sub_subf = [].concat(...arr.map(v => v.subf).filter(v => v))
242 | result = result.concat(...arr.map(v => v.info))
243 | return recur(result, sub_subf)
244 | }
245 | }
246 |
247 | async function walk_and_save ({ fid, not_teamdrive, update, service_account, with_modifiedTime, tg }) {
248 | let result = []
249 | const unfinished_folders = []
250 | const limit = pLimit(PARALLEL_LIMIT)
251 |
252 | if (update) {
253 | const exists = db.prepare('SELECT fid FROM gd WHERE fid = ?').get(fid)
254 | exists && db.prepare('UPDATE gd SET summary=? WHERE fid=?').run(null, fid)
255 | }
256 |
257 | const loop = setInterval(() => {
258 | const now = dayjs().format('HH:mm:ss')
259 | const message = `${now} | Copied ${result.length} | Ongoing ${limit.activeCount} | Pending ${limit.pendingCount}`
260 | print_progress(message)
261 | }, 1000)
262 |
263 | const tg_loop = tg && setInterval(() => {
264 | tg({
265 | obj_count: result.length,
266 | processing_count: limit.activeCount,
267 | pending_count: limit.pendingCount
268 | })
269 | }, 10 * 1000)
270 |
271 | async function recur (parent) {
272 | let files, should_save
273 | if (update) {
274 | files = await limit(() => ls_folder({ fid: parent, not_teamdrive, service_account, with_modifiedTime }))
275 | should_save = true
276 | } else {
277 | const record = db.prepare('SELECT * FROM gd WHERE fid = ?').get(parent)
278 | if (record) {
279 | files = JSON.parse(record.info)
280 | } else {
281 | files = await limit(() => ls_folder({ fid: parent, not_teamdrive, service_account, with_modifiedTime }))
282 | should_save = true
283 | }
284 | }
285 | if (!files) return
286 | if (files.unfinished) unfinished_folders.push(parent)
287 | should_save && save_files_to_db(parent, files)
288 | const folders = files.filter(v => v.mimeType === FOLDER_TYPE)
289 | files.forEach(v => v.parent = parent)
290 | result = result.concat(files)
291 | return Promise.all(folders.map(v => recur(v.id)))
292 | }
293 | try {
294 | await recur(fid)
295 | } catch (e) {
296 | console.error(e)
297 | }
298 | console.log('\nInfo obtained')
299 | unfinished_folders.length ? console.log('Unread FolderID:', JSON.stringify(unfinished_folders)) : console.log('All Folders have been read')
300 | clearInterval(loop)
301 | if (tg_loop) {
302 | clearInterval(tg_loop)
303 | tg({
304 | obj_count: result.length,
305 | processing_count: limit.activeCount,
306 | pending_count: limit.pendingCount
307 | })
308 | }
309 | const smy = unfinished_folders.length ? null : summary(result)
310 | smy && db.prepare('UPDATE gd SET summary=?, mtime=? WHERE fid=?').run(JSON.stringify(smy), Date.now(), fid)
311 | result.unfinished_number = unfinished_folders.length
312 | return result
313 | }
314 |
315 | function save_files_to_db (fid, files) {
316 | // Do not save the folder where the request is not completed, then the next call to get_all_by_id will return null, so call walk_and_save again to try to complete the request for this folder
317 | if (files.unfinished) return
318 | let subf = files.filter(v => v.mimeType === FOLDER_TYPE).map(v => v.id)
319 | subf = subf.length ? JSON.stringify(subf) : null
320 | const exists = db.prepare('SELECT fid FROM gd WHERE fid = ?').get(fid)
321 | if (exists) {
322 | db.prepare('UPDATE gd SET info=?, subf=?, mtime=? WHERE fid=?')
323 | .run(JSON.stringify(files), subf, Date.now(), fid)
324 | } else {
325 | db.prepare('INSERT INTO gd (fid, info, subf, ctime) VALUES (?, ?, ?, ?)')
326 | .run(fid, JSON.stringify(files), subf, Date.now())
327 | }
328 | }
329 |
330 | async function ls_folder ({ fid, not_teamdrive, service_account, with_modifiedTime }) {
331 | let files = []
332 | let pageToken
333 | const search_all = { includeItemsFromAllDrives: true, supportsAllDrives: true }
334 | const params = ((fid === 'root') || not_teamdrive) ? {} : search_all
335 | params.q = `'${fid}' in parents and trashed = false`
336 | params.orderBy = 'folder,name desc'
337 | params.fields = 'nextPageToken, files(id, name, mimeType, size, md5Checksum)'
338 | if (with_modifiedTime) {
339 | params.fields = 'nextPageToken, files(id, name, mimeType, modifiedTime, size, md5Checksum)'
340 | }
341 | params.pageSize = Math.min(PAGE_SIZE, 1000)
342 | // const use_sa = (fid !== 'root') && (service_account || !not_teamdrive) // Without parameters, use sa by default
343 | const use_sa = (fid !== 'root') && service_account
344 | // const headers = await gen_headers(use_sa)
345 | // For Folders with a large number of subfolders(1ctMwpIaBg8S1lrZDxdynLXJpMsm5guAl),The access_token may have expired before listing
346 | // Because nextPageToken is needed to get the data of the next page,So you cannot use parallel requests,The test found that each request to obtain 1000 files usually takes more than 20 seconds to complete
347 | const gtoken = use_sa && (await get_sa_token()).gtoken
348 | do {
349 | if (pageToken) params.pageToken = pageToken
350 | let url = 'https://www.googleapis.com/drive/v3/files'
351 | url += '?' + params_to_query(params)
352 | let retry = 0
353 | let data
354 | const payload = { timeout: TIMEOUT_BASE }
355 | while (!data && (retry < RETRY_LIMIT)) {
356 | const access_token = gtoken ? (await gtoken.getToken()).access_token : (await get_access_token())
357 | const headers = { authorization: 'Bearer ' + access_token }
358 | payload.headers = headers
359 | try {
360 | data = (await axins.get(url, payload)).data
361 | } catch (err) {
362 | handle_error(err)
363 | retry++
364 | payload.timeout = Math.min(payload.timeout * 2, TIMEOUT_MAX)
365 | }
366 | }
367 | if (!data) {
368 | console.error('Folder is not read completely, Parameters:', params)
369 | files.unfinished = true
370 | return files
371 | }
372 | files = files.concat(data.files)
373 | argv.sfl && console.log('files.length:', files.length)
374 | pageToken = data.nextPageToken
375 | } while (pageToken)
376 |
377 | return files
378 | }
379 |
380 | async function gen_headers (use_sa) {
381 | // use_sa = use_sa && SA_TOKENS.length
382 | const access_token = use_sa ? (await get_sa_token()).access_token : (await get_access_token())
383 | return { authorization: 'Bearer ' + access_token }
384 | }
385 |
386 | function params_to_query (data) {
387 | const ret = []
388 | for (let d in data) {
389 | ret.push(encodeURIComponent(d) + '=' + encodeURIComponent(data[d]))
390 | }
391 | return ret.join('&')
392 | }
393 |
394 | async function get_access_token () {
395 | const { expires, access_token, client_id, client_secret, refresh_token } = AUTH
396 | if (expires > Date.now()) return access_token
397 |
398 | const url = 'https://www.googleapis.com/oauth2/v4/token'
399 | const headers = { 'Content-Type': 'application/x-www-form-urlencoded' }
400 | const config = { headers }
401 | const params = { client_id, client_secret, refresh_token, grant_type: 'refresh_token' }
402 | const { data } = await axins.post(url, params_to_query(params), config)
403 | // console.log('Got new token:', data)
404 | AUTH.access_token = data.access_token
405 | AUTH.expires = Date.now() + 1000 * data.expires_in
406 | return data.access_token
407 | }
408 |
409 | // get_sa_token().then(console.log).catch(console.error)
410 | async function get_sa_token () {
411 | if (!SA_TOKENS.length) SA_TOKENS = get_sa_batch()
412 | while (SA_TOKENS.length) {
413 | const tk = get_random_element(SA_TOKENS)
414 | try {
415 | return await real_get_sa_token(tk)
416 | } catch (e) {
417 | console.warn('SA failed to get access_token:', e.message)
418 | SA_TOKENS = SA_TOKENS.filter(v => v.gtoken !== tk.gtoken)
419 | if (!SA_TOKENS.length) SA_TOKENS = get_sa_batch()
420 | }
421 | }
422 | throw new Error('No SA available')
423 | }
424 |
425 | async function real_get_sa_token (el) {
426 | const { value, expires, gtoken } = el
427 | // The reason for passing out gtoken is that when an account is exhausted, it can be filtered accordingly
428 | if (Date.now() < expires) return { access_token: value, gtoken }
429 | const { access_token, expires_in } = await gtoken.getToken({ forceRefresh: true })
430 | el.value = access_token
431 | el.expires = Date.now() + 1000 * (expires_in - 60 * 5) // 5 mins passed is taken as Expired
432 | return { access_token, gtoken }
433 | }
434 |
435 | function get_random_element (arr) {
436 | return arr[~~(arr.length * Math.random())]
437 | }
438 |
439 | function validate_fid (fid) {
440 | if (!fid) return false
441 | fid = String(fid)
442 | const whitelist = ['root', 'appDataFolder', 'photos']
443 | if (whitelist.includes(fid)) return true
444 | if (fid.length < 10 || fid.length > 100) return false
445 | const reg = /^[a-zA-Z0-9_-]+$/
446 | return fid.match(reg)
447 | }
448 |
449 | async function create_folder (name, parent, use_sa, limit) {
450 | let url = `https://www.googleapis.com/drive/v3/files`
451 | const params = { supportsAllDrives: true }
452 | url += '?' + params_to_query(params)
453 | const post_data = {
454 | name,
455 | mimeType: FOLDER_TYPE,
456 | parents: [parent]
457 | }
458 | let retry = 0
459 | let err_message
460 | while (retry < RETRY_LIMIT) {
461 | try {
462 | const headers = await gen_headers(use_sa)
463 | return (await axins.post(url, post_data, { headers })).data
464 | } catch (err) {
465 | err_message = err.message
466 | retry++
467 | handle_error(err)
468 | const data = err && err.response && err.response.data
469 | const message = data && data.error && data.error.message
470 | if (message && message.toLowerCase().includes('file limit')) {
471 | if (limit) limit.clearQueue()
472 | throw new Error(FILE_EXCEED_MSG)
473 | }
474 | console.log('Creating Folder and Retrying:', name, 'No of retries:', retry)
475 | }
476 | }
477 | throw new Error(err_message + ' Folder Name:' + name)
478 | }
479 |
480 | async function get_name_by_id (fid, use_sa) {
481 | const info = await get_info_by_id(fid, use_sa)
482 | return info ? info.name : fid
483 | }
484 |
485 | async function get_info_by_id (fid, use_sa) {
486 | let url = `https://www.googleapis.com/drive/v3/files/${fid}`
487 | let params = {
488 | includeItemsFromAllDrives: true,
489 | supportsAllDrives: true,
490 | corpora: 'allDrives',
491 | fields: 'id, name, size, parents, mimeType, modifiedTime'
492 | }
493 | url += '?' + params_to_query(params)
494 | let retry = 0
495 | while (retry < RETRY_LIMIT) {
496 | try {
497 | const headers = await gen_headers(use_sa)
498 | const { data } = await axins.get(url, { headers })
499 | return data
500 | } catch (e) {
501 | retry++
502 | handle_error(e)
503 | }
504 | }
505 | // throw new Error('Unable to access this FolderID:' + fid)
506 | }
507 |
508 | async function user_choose () {
509 | const answer = await prompts({
510 | type: 'select',
511 | name: 'value',
512 | message: 'Do you wish to resume?',
513 | choices: [
514 | { title: 'Continue', description: 'Resume the transfer', value: 'continue' },
515 | { title: 'Restart', description: 'Restart the process', value: 'restart' },
516 | { title: 'Exit', description: 'Exit', value: 'exit' }
517 | ],
518 | initial: 0
519 | })
520 | return answer.value
521 | }
522 |
523 | async function copy ({ source, target, name, min_size, update, not_teamdrive, service_account, dncnr, is_server }) {
524 | target = target || DEFAULT_TARGET
525 | if (!target) throw new Error('Destination ID cannot be empty')
526 |
527 | const file = await get_info_by_id(source, service_account)
528 | if (!file) return console.error(`Unable to access the link, please check if the link is valid and SA has the appropriate permissions:https://drive.google.com/drive/folders/${source}`)
529 | if (file && file.mimeType !== FOLDER_TYPE) {
530 | if (argv.hash_server === 'local') source = get_gid_by_md5(file.md5Checksum)
531 | return copy_file(source, target, service_account).catch(console.error)
532 | }
533 |
534 | const record = db.prepare('select id, status from task where source=? and target=?').get(source, target)
535 | if (record && record.status === 'copying') return console.log('This Task is already running. Force Quit')
536 |
537 | try {
538 | return await real_copy({ source, target, name, min_size, update, dncnr, not_teamdrive, service_account, is_server })
539 | } catch (err) {
540 | console.error('Error copying folder', err)
541 | const record = db.prepare('select id, status from task where source=? and target=?').get(source, target)
542 | if (record) db.prepare('update task set status=? where id=?').run('error', record.id)
543 | }
544 | }
545 |
546 | // To be resolved: If the user manually interrupts the process with ctrl+c, the request that has been issued will not be recorded in the local database even if it is completed, so duplicate files (folders) may be generated
547 | async function real_copy ({ source, target, name, min_size, update, dncnr, not_teamdrive, service_account, is_server }) {
548 | async function get_new_root () {
549 | if (dncnr) return { id: target }
550 | if (name) {
551 | return create_folder(name, target, service_account)
552 | } else {
553 | const file = await get_info_by_id(source, service_account)
554 | if (!file) throw new Error(`Unable to access the link, please check if the link is valid and SA has the appropriate permissions:https://drive.google.com/drive/folders/${source}`)
555 | return create_folder(file.name, target, service_account)
556 | }
557 | }
558 |
559 | const record = db.prepare('select * from task where source=? and target=?').get(source, target)
560 | if (record) {
561 | const copied = db.prepare('select fileid from copied where taskid=?').all(record.id).map(v => v.fileid)
562 | const choice = (is_server || argv.yes) ? 'continue' : await user_choose()
563 | if (choice === 'exit') {
564 | return console.log('exit the program')
565 | } else if (choice === 'continue') {
566 | let { mapping } = record
567 | const old_mapping = {}
568 | const copied_ids = {}
569 | copied.forEach(id => copied_ids[id] = true)
570 | mapping = mapping.trim().split('\n').map(line => line.split(' '))
571 | const root = mapping[0][1]
572 | mapping.forEach(arr => old_mapping[arr[0]] = arr[1])
573 | db.prepare('update task set status=? where id=?').run('copying', record.id)
574 | const arr = await walk_and_save({ fid: source, update, not_teamdrive, service_account })
575 | let files = arr.filter(v => v.mimeType !== FOLDER_TYPE).filter(v => !copied_ids[v.id])
576 | if (min_size) files = files.filter(v => v.size >= min_size)
577 | const folders = arr.filter(v => v.mimeType === FOLDER_TYPE)
578 | const all_mapping = await create_folders({
579 | old_mapping,
580 | source,
581 | folders,
582 | service_account,
583 | root,
584 | task_id: record.id
585 | })
586 | await copy_files({ files, service_account, root, mapping: all_mapping, task_id: record.id })
587 | db.prepare('update task set status=?, ftime=? where id=?').run('finished', Date.now(), record.id)
588 | return { id: root, task_id: record.id }
589 | } else if (choice === 'restart') {
590 | const new_root = await get_new_root()
591 | const root_mapping = source + ' ' + new_root.id + '\n'
592 | db.prepare('update task set status=?, mapping=? where id=?').run('copying', root_mapping, record.id)
593 | db.prepare('delete from copied where taskid=?').run(record.id)
594 | // const arr = await walk_and_save({ fid: source, update: true, not_teamdrive, service_account })
595 | const arr = await walk_and_save({ fid: source, update, not_teamdrive, service_account })
596 |
597 | let files = arr.filter(v => v.mimeType !== FOLDER_TYPE)
598 | if (min_size) files = files.filter(v => v.size >= min_size)
599 | const folders = arr.filter(v => v.mimeType === FOLDER_TYPE)
600 | console.log('Number of folders to be copied:', folders.length)
601 | console.log('Number of files to be copied:', files.length)
602 | const mapping = await create_folders({
603 | source,
604 | folders,
605 | service_account,
606 | root: new_root.id,
607 | task_id: record.id
608 | })
609 | await copy_files({ files, mapping, service_account, root: new_root.id, task_id: record.id })
610 | db.prepare('update task set status=?, ftime=? where id=?').run('finished', Date.now(), record.id)
611 | return { id: new_root.id, task_id: record.id }
612 | } else {
613 | // ctrl+c Exit
614 | return console.log('Exit')
615 | }
616 | } else {
617 | const new_root = await get_new_root()
618 | const root_mapping = source + ' ' + new_root.id + '\n'
619 | const { lastInsertRowid } = db.prepare('insert into task (source, target, status, mapping, ctime) values (?, ?, ?, ?, ?)').run(source, target, 'copying', root_mapping, Date.now())
620 | const arr = await walk_and_save({ fid: source, update, not_teamdrive, service_account })
621 | let files = arr.filter(v => v.mimeType !== FOLDER_TYPE)
622 | if (min_size) files = files.filter(v => v.size >= min_size)
623 | const folders = arr.filter(v => v.mimeType === FOLDER_TYPE)
624 | console.log('Number of folders to be copied:', folders.length)
625 | console.log('Number of files to be copied:', files.length)
626 | const mapping = await create_folders({
627 | source,
628 | folders,
629 | service_account,
630 | root: new_root.id,
631 | task_id: lastInsertRowid
632 | })
633 | await copy_files({ files, mapping, service_account, root: new_root.id, task_id: lastInsertRowid })
634 | db.prepare('update task set status=?, ftime=? where id=?').run('finished', Date.now(), lastInsertRowid)
635 | return { id: new_root.id, task_id: lastInsertRowid }
636 | }
637 | }
638 |
639 | async function copy_files ({ files, mapping, service_account, root, task_id }) {
640 | if (!files.length) return
641 | console.log('\nStarted copying files, total:', files.length)
642 |
643 | const loop = setInterval(() => {
644 | const now = dayjs().format('HH:mm:ss')
645 | const message = `${now} | Number of files copied ${count} | ongoing ${concurrency} | Number of Files Pending ${files.length}`
646 | print_progress(message)
647 | }, 1000)
648 |
649 | let count = 0
650 | let concurrency = 0
651 | let err
652 | do {
653 | if (err) {
654 | clearInterval(loop)
655 | files = null
656 | throw err
657 | }
658 | if (concurrency >= PARALLEL_LIMIT) {
659 | await sleep(100)
660 | continue
661 | }
662 | const file = files.shift()
663 | if (!file) {
664 | await sleep(1000)
665 | continue
666 | }
667 | concurrency++
668 | let { id, parent, md5Checksum } = file
669 | if (argv.hash_server === 'local') id = get_gid_by_md5(md5Checksum) || id
670 | const target = mapping[parent] || root
671 | const use_sa = (id !== file.id) ? true : service_account //If the same md5 record is found in the local database, use sa copy
672 | copy_file(id, target, use_sa, null, task_id).then(new_file => {
673 | if (new_file) {
674 | count++
675 | db.prepare('INSERT INTO copied (taskid, fileid) VALUES (?, ?)').run(task_id, file.id)
676 | }
677 | }).catch(e => {
678 | err = e
679 | }).finally(() => {
680 | concurrency--
681 | })
682 | } while (concurrency || files.length)
683 | clearInterval(loop)
684 | if (err) throw err
685 | // const limit = pLimit(PARALLEL_LIMIT)
686 | // let count = 0
687 | // const loop = setInterval(() => {
688 | // const now = dayjs().format('HH:mm:ss')
689 | // const {activeCount, pendingCount} = limit
690 | // const message = `${now} | Number of files copied ${count} | Ongoing ${activeCount} | Pending ${pendingCount}`
691 | // print_progress(message)
692 | // }, 1000)
693 | // May cause excessive memory usage and be forced to exit by node
694 | // return Promise.all(files.map(async file => {
695 | // const { id, parent } = file
696 | // const target = mapping[parent] || root
697 | // const new_file = await limit(() => copy_file(id, target, service_account, limit, task_id))
698 | // if (new_file) {
699 | // count++
700 | // db.prepare('INSERT INTO copied (taskid, fileid) VALUES (?, ?)').run(task_id, id)
701 | // }
702 | // })).finally(() => clearInterval(loop))
703 | }
704 |
705 | async function copy_file (id, parent, use_sa, limit, task_id) {
706 | let url = `https://www.googleapis.com/drive/v3/files/${id}/copy`
707 | let params = { supportsAllDrives: true }
708 | url += '?' + params_to_query(params)
709 | const config = {}
710 | let retry = 0
711 | while (retry < RETRY_LIMIT) {
712 | let gtoken
713 | if (use_sa) {
714 | const temp = await get_sa_token()
715 | gtoken = temp.gtoken
716 | config.headers = { authorization: 'Bearer ' + temp.access_token }
717 | } else {
718 | config.headers = await gen_headers()
719 | }
720 | try {
721 | const { data } = await axins.post(url, { parents: [parent] }, config)
722 | if (gtoken) gtoken.exceed_count = 0
723 | return data
724 | } catch (err) {
725 | retry++
726 | handle_error(err)
727 | const data = err && err.response && err.response.data
728 | const message = data && data.error && data.error.message
729 | if (message && message.toLowerCase().includes('file limit')) {
730 | if (limit) limit.clearQueue()
731 | if (task_id) db.prepare('update task set status=? where id=?').run('error', task_id)
732 | throw new Error(FILE_EXCEED_MSG)
733 | }
734 | if (!use_sa && message && message.toLowerCase().includes('rate limit')) {
735 | throw new Error('Personal Drive Limit:' + message)
736 | }
737 | // if (use_sa && message && message.toLowerCase().includes('user rate limit')) {
738 | // if (retry >= RETRY_LIMIT) throw new Error(`This resource triggers a userRateLimitExceeded error for ${EXCEED_LIMIT} consecutive times and stops copying`)
739 | // if (gtoken.exceed_count >= EXCEED_LIMIT) {
740 | // SA_TOKENS = SA_TOKENS.filter(v => v.gtoken !== gtoken)
741 | // if (!SA_TOKENS.length) SA_TOKENS = get_sa_batch()
742 | // console.log(`This account has triggered the daily usage limit${EXCEED_LIMIT} consecutive times, the remaining amount of SA available in this batch:`, SA_TOKENS.length)
743 | // } else {
744 | // console.log('This account triggers its daily usage limit and has been marked. If the next request is normal, it will be unmarked, otherwise the SA will be removed')
745 | // if (gtoken.exceed_count) {
746 | // gtoken.exceed_count++
747 | // } else {
748 | // gtoken.exceed_count = 1
749 | // }
750 | // }
751 | // }
752 | }
753 | }
754 | if (use_sa && !SA_TOKENS.length) {
755 | if (limit) limit.clearQueue()
756 | if (task_id) db.prepare('update task set status=? where id=?').run('error', task_id)
757 | throw new Error('All SA are exhausted')
758 | } else {
759 | console.warn('File creation failed,Fileid: ' + id)
760 | }
761 | }
762 |
763 | async function create_folders ({ source, old_mapping, folders, root, task_id, service_account }) {
764 | if (argv.dncf) return {} // do not copy folders
765 | if (!Array.isArray(folders)) throw new Error('folders must be Array:' + folders)
766 | const mapping = old_mapping || {}
767 | mapping[source] = root
768 | if (!folders.length) return mapping
769 |
770 | const missed_folders = folders.filter(v => !mapping[v.id])
771 | console.log('Start copying folders, total:', missed_folders.length)
772 | const limit = pLimit(PARALLEL_LIMIT)
773 | let count = 0
774 | let same_levels = folders.filter(v => v.parent === folders[0].parent)
775 |
776 | const loop = setInterval(() => {
777 | const now = dayjs().format('HH:mm:ss')
778 | const message = `${now} | Folders Created ${count} | Ongoing ${limit.activeCount} | Pending ${limit.pendingCount}`
779 | print_progress(message)
780 | }, 1000)
781 |
782 | while (same_levels.length) {
783 | const same_levels_missed = same_levels.filter(v => !mapping[v.id])
784 | await Promise.all(same_levels_missed.map(async v => {
785 | try {
786 | const { name, id, parent } = v
787 | const target = mapping[parent] || root
788 | const new_folder = await limit(() => create_folder(name, target, service_account, limit))
789 | count++
790 | mapping[id] = new_folder.id
791 | const mapping_record = id + ' ' + new_folder.id + '\n'
792 | db.prepare('update task set mapping = mapping || ? where id=?').run(mapping_record, task_id)
793 | } catch (e) {
794 | if (e.message === FILE_EXCEED_MSG) {
795 | clearInterval(loop)
796 | throw new Error(FILE_EXCEED_MSG)
797 | }
798 | console.error('Error creating Folder:', e.message)
799 | }
800 | }))
801 | // folders = folders.filter(v => !mapping[v.id])
802 | same_levels = [].concat(...same_levels.map(v => folders.filter(vv => vv.parent === v.id)))
803 | }
804 |
805 | clearInterval(loop)
806 | return mapping
807 | }
808 |
809 | function find_dupe (arr) {
810 | const files = arr.filter(v => v.mimeType !== FOLDER_TYPE)
811 | const folders = arr.filter(v => v.mimeType === FOLDER_TYPE)
812 | const exists = {}
813 | const dupe_files = []
814 | const dupe_folder_keys = {}
815 | for (const folder of folders) {
816 | const { parent, name } = folder
817 | const key = parent + '|' + name
818 | if (exists[key]) {
819 | dupe_folder_keys[key] = true
820 | } else {
821 | exists[key] = true
822 | }
823 | }
824 | const dupe_empty_folders = folders.filter(folder => {
825 | const { parent, name } = folder
826 | const key = parent + '|' + name
827 | return dupe_folder_keys[key]
828 | }).filter(folder => {
829 | const has_child = arr.some(v => v.parent === folder.id)
830 | return !has_child
831 | })
832 | for (const file of files) {
833 | const { md5Checksum, parent, name, size } = file
834 | // Determining Duplicates based on file location and md5 value
835 | const key = parent + '|' + md5Checksum
836 | if (exists[key]) {
837 | dupe_files.push(file)
838 | } else {
839 | exists[key] = true
840 | }
841 | }
842 | return dupe_files.concat(dupe_empty_folders)
843 | }
844 |
845 | async function confirm_dedupe ({ file_number, folder_number }) {
846 | const answer = await prompts({
847 | type: 'select',
848 | name: 'value',
849 | message: `Duplicate files detected ${file_number},Empty Folders detected${folder_number},Delete them?`,
850 | choices: [
851 | { title: 'Yes', description: 'confirm deletion', value: 'yes' },
852 | { title: 'No', description: 'Donot delete', value: 'no' }
853 | ],
854 | initial: 0
855 | })
856 | return answer.value
857 | }
858 |
859 | // Need sa to be the manager of the Teamdrive where the source folder is located
860 | async function mv_file ({ fid, new_parent, service_account }) {
861 | const file = await get_info_by_id(fid, service_account)
862 | if (!file) return
863 | const removeParents = file.parents[0]
864 | let url = `https://www.googleapis.com/drive/v3/files/${fid}`
865 | const params = {
866 | removeParents,
867 | supportsAllDrives: true,
868 | addParents: new_parent
869 | }
870 | url += '?' + params_to_query(params)
871 | const headers = await gen_headers(service_account)
872 | return axins.patch(url, {}, { headers })
873 | }
874 |
875 | // To move files or folders to the recycle bin, SA should be content manager or above
876 | async function trash_file ({ fid, service_account }) {
877 | const url = `https://www.googleapis.com/drive/v3/files/${fid}?supportsAllDrives=true`
878 | const headers = await gen_headers(service_account)
879 | return axins.patch(url, { trashed: true }, { headers })
880 | }
881 |
882 | // Delete files or folders directly without entering the recycle bin, requires SA as manager
883 | async function rm_file ({ fid, service_account }) {
884 | const headers = await gen_headers(service_account)
885 | let retry = 0
886 | const url = `https://www.googleapis.com/drive/v3/files/${fid}?supportsAllDrives=true`
887 | while (retry < RETRY_LIMIT) {
888 | try {
889 | return await axins.delete(url, { headers })
890 | } catch (err) {
891 | retry++
892 | handle_error(err)
893 | console.log('retrying to Delete, retry count', retry)
894 | }
895 | }
896 | }
897 |
898 | async function dedupe ({ fid, update, service_account, yes }) {
899 | let arr
900 | if (!update) {
901 | const info = get_all_by_fid(fid)
902 | if (info) {
903 | console.log('Locally cached data Found, cache time:', dayjs(info.mtime).format('YYYY-MM-DD HH:mm:ss'))
904 | arr = info
905 | }
906 | }
907 | arr = arr || await walk_and_save({ fid, update, service_account })
908 | const dupes = find_dupe(arr)
909 | const folder_number = dupes.filter(v => v.mimeType === FOLDER_TYPE).length
910 | const file_number = dupes.length - folder_number
911 | const choice = yes || await confirm_dedupe({ file_number, folder_number })
912 | if (choice === 'no') {
913 | return console.log('Exit')
914 | } else if (!choice) {
915 | return // ctrl+c
916 | }
917 | const limit = pLimit(PARALLEL_LIMIT)
918 | let folder_count = 0
919 | let file_count = 0
920 | await Promise.all(dupes.map(async v => {
921 | try {
922 | await limit(() => trash_file({ fid: v.id, service_account }))
923 | if (v.mimeType === FOLDER_TYPE) {
924 | console.log('Folder successfully deleted', v.name)
925 | folder_count++
926 | } else {
927 | console.log('File successfully deleted', v.name)
928 | file_count++
929 | }
930 | } catch (e) {
931 | console.log('Failed to delete', v)
932 | handle_error(e)
933 | }
934 | }))
935 | return { file_count, folder_count }
936 | }
937 |
938 | function handle_error (err) {
939 | const data = err && err.response && err.response.data
940 | if (data) {
941 | const message = data.error && data.error.message
942 | if (message && message.toLowerCase().includes('rate limit') && !argv.verbose) return
943 | console.error(JSON.stringify(data))
944 | } else {
945 | if (!err.message.includes('timeout') || argv.verbose) console.error(err.message)
946 | }
947 | }
948 |
949 | function print_progress (msg) {
950 | if (process.stdout.cursorTo) {
951 | process.stdout.cursorTo(0)
952 | process.stdout.write(msg + ' ')
953 | } else {
954 | console.log(msg)
955 | }
956 | }
957 |
958 | module.exports = { ls_folder, count, validate_fid, copy, dedupe, copy_file, gen_count_body, real_copy, get_name_by_id, get_info_by_id, get_access_token, get_sa_token, walk_and_save, save_md5}
959 |
--------------------------------------------------------------------------------
/src/src/router.js:
--------------------------------------------------------------------------------
1 | const Router = require('@koa/router')
2 |
3 | const { db } = require('../db')
4 | const { validate_fid, gen_count_body } = require('./gd')
5 | const { send_count, send_help, send_choice, send_task_info, sm, extract_fid, extract_from_text, reply_cb_query, tg_copy, send_all_tasks, send_bm_help, get_target_by_alias, send_all_bookmarks, set_bookmark, unset_bookmark, clear_tasks, send_task_help, rm_task } = require('./tg')
6 |
7 | const { AUTH, ROUTER_PASSKEY, TG_IPLIST } = require('../config')
8 | const { tg_whitelist } = AUTH
9 |
10 | const COPYING_FIDS = {}
11 | const counting = {}
12 | const router = new Router()
13 |
14 | function is_pm2 () {
15 | return 'PM2_HOME' in process.env || 'PM2_JSON_PROCESSING' in process.env || 'PM2_CLI' in process.env
16 | }
17 |
18 | function is_int (n) {
19 | return n === parseInt(n)
20 | }
21 |
22 | router.get('/gutils/api/gdurl/count', async ctx => {
23 | if (!ROUTER_PASSKEY) return ctx.body = 'gd-utils Successfully started'
24 | const { query, headers } = ctx.request
25 | let { fid, type, update, passkey } = query
26 | if (passkey !== ROUTER_PASSKEY) return ctx.body = 'invalid passkey'
27 | if (!validate_fid(fid)) throw new Error('Invalid FolderID')
28 |
29 | let ua = headers['user-agent'] || ''
30 | ua = ua.toLowerCase()
31 | type = (type || '').toLowerCase()
32 | // todo type=tree
33 | if (!type) {
34 | if (ua.includes('curl')) {
35 | type = 'curl'
36 | } else if (ua.includes('mozilla')) {
37 | type = 'html'
38 | } else {
39 | type = 'json'
40 | }
41 | }
42 | if (type === 'html') {
43 | ctx.set('Content-Type', 'text/html; charset=utf-8')
44 | } else if (['json', 'all'].includes(type)) {
45 | ctx.set('Content-Type', 'application/json; charset=UTF-8')
46 | }
47 | ctx.body = await gen_count_body({ fid, type, update, service_account: true })
48 | })
49 |
50 | router.post('/gutils/api/gdurl/tgbot', async ctx => {
51 | const { body } = ctx.request
52 | console.log('ctx.ip', ctx.ip) // You can only allow the ip of the tg server
53 | console.log('tg message:', JSON.stringify(body, null, ' '))
54 | if (TG_IPLIST && !TG_IPLIST.includes(ctx.ip)) return ctx.body = 'invalid ip'
55 | ctx.body = '' // Release the connection early
56 | const message = body.message || body.edited_message
57 | const message_str = JSON.stringify(message)
58 |
59 | const { callback_query } = body
60 | if (callback_query) {
61 | const { id, message, data } = callback_query
62 | const chat_id = callback_query.from.id
63 | const [action, fid, target] = data.split(' ').filter(v => v)
64 | if (action === 'count') {
65 | if (counting[fid]) return sm({ chat_id, text: fid + ' Counting, please wait a moment' })
66 | counting[fid] = true
67 | send_count({ fid, chat_id }).catch(err => {
68 | console.error(err)
69 | sm({ chat_id, text: fid + ' Stats Failed:' + err.message })
70 | }).finally(() => {
71 | delete counting[fid]
72 | })
73 | } else if (action === 'copy') {
74 | if (COPYING_FIDS[fid + target]) return sm({ chat_id, text: 'Processing copy command with the same source and destination' })
75 | COPYING_FIDS[fid + target] = true
76 | tg_copy({ fid, target: get_target_by_alias(target), chat_id }).then(task_id => {
77 | is_int(task_id) && sm({ chat_id, text: `Clone Started For Task ID: ${task_id}\nType /task ${task_id} to check the progress` })
78 | }).finally(() => COPYING_FIDS[fid + target] = false)
79 | } else if (action === 'update') {
80 | if (counting[fid]) return sm({ chat_id, text: fid + ' Counting, please wait a moment' })
81 | counting[fid] = true
82 | send_count({ fid, chat_id, update: true }).catch(err => {
83 | console.error(err)
84 | sm({ chat_id, text: fid + ' Stats Failed:' + err.message })
85 | }).finally(() => {
86 | delete counting[fid]
87 | })
88 | } else if (action === 'clear_button') {
89 | const { message_id, text } = message || {}
90 | if (message_id) sm({ chat_id, message_id, text, parse_mode: 'HTML' }, 'editMessageText')
91 | }
92 | return reply_cb_query({ id, data }).catch(console.error)
93 | }
94 |
95 | const chat_id = message && message.chat && message.chat.id
96 | const text = (message && message.text && message.text.trim()) || ''
97 | let username = message && message.from && message.from.username
98 | username = username && String(username).toLowerCase()
99 | let user_id = message && message.from && message.from.id
100 | user_id = user_id && String(user_id).toLowerCase()
101 | if (!chat_id || !tg_whitelist.some(v => {
102 | v = String(v).toLowerCase()
103 | return v === username || v === user_id
104 | })) {
105 | chat_id && sm({ chat_id, text: 'You are not supposed to Message me you idiot, go back to the hole you came from' })
106 | return console.warn('Received a request from a non-whitelisted user')
107 | }
108 |
109 | const fid = extract_fid(text) || extract_from_text(text) || extract_from_text(message_str)
110 | const no_fid_commands = ['/task', '/help', '/bm', '/reload']
111 | if (!no_fid_commands.some(cmd => text.startsWith(cmd)) && !validate_fid(fid)) {
112 | return sm({ chat_id, text: 'Folder ID is invalid or not accessible' })
113 | }
114 | if (text.startsWith('/help')) return send_help(chat_id)
115 | if (text.startsWith('/reload')) {
116 | if (!is_pm2()) return sm({ chat_id, text: 'Process is not a pm2 daemon,Do not restart' })
117 | sm({ chat_id, text: 'Restart' }).then(() => process.exit())
118 | } else if (text.startsWith('/bm')) {
119 | const [cmd, action, alias, target] = text.split(' ').map(v => v.trim()).filter(v => v)
120 | if (!action) return send_all_bookmarks(chat_id)
121 | if (action === 'set') {
122 | if (!alias || !target) return sm({ chat_id, text: 'Name and Destination FolderID cannot be empty ' })
123 | if (alias.length > 24) return sm({ chat_id, text: 'Name Shouldnt be more than 24 Letters in Length' })
124 | if (!validate_fid(target)) return sm({ chat_id, text: 'Incorrect Destination FolderID' })
125 | set_bookmark({ chat_id, alias, target })
126 | } else if (action === 'unset') {
127 | if (!alias) return sm({ chat_id, text: 'Name Cannot be empty' })
128 | unset_bookmark({ chat_id, alias })
129 | } else {
130 | send_bm_help(chat_id)
131 | }
132 | } else if (text.startsWith('/count')) {
133 | if (counting[fid]) return sm({ chat_id, text: fid + ' Counting, please wait a moment' })
134 | try {
135 | counting[fid] = true
136 | const update = text.endsWith(' -u')
137 | await send_count({ fid, chat_id, update })
138 | } catch (err) {
139 | console.error(err)
140 | sm({ chat_id, text: fid + ' Stats Failed:' + err.message })
141 | } finally {
142 | delete counting[fid]
143 | }
144 | } else if (text.startsWith('/copy')) {
145 | let target = text.replace('/copy', '').replace(' -u', '').trim().split(' ').map(v => v.trim()).filter(v => v)[1]
146 | target = get_target_by_alias(target) || target
147 | if (target && !validate_fid(target)) return sm({ chat_id, text: `Destination FolderID ${target} is Invalid` })
148 | if (COPYING_FIDS[fid + target]) return sm({ chat_id, text: 'Processing copy command with the same source and destination' })
149 | COPYING_FIDS[fid + target] = true
150 | const update = text.endsWith(' -u')
151 | tg_copy({ fid, target, chat_id, update }).then(task_id => {
152 | is_int(task_id) && sm({ chat_id, text: `Clone Started For Task ID: ${task_id}\nType /task ${task_id} to check the progress` })
153 | }).finally(() => COPYING_FIDS[fid + target] = false)
154 | } else if (text.startsWith('/task')) {
155 | let task_id = text.replace('/task', '').trim()
156 | if (task_id === 'all') {
157 | return send_all_tasks(chat_id)
158 | } else if (task_id === 'clear') {
159 | return clear_tasks(chat_id)
160 | } else if (task_id === '-h') {
161 | return send_task_help(chat_id)
162 | } else if (task_id.startsWith('rm')) {
163 | task_id = task_id.replace('rm', '')
164 | task_id = parseInt(task_id)
165 | if (!task_id) return send_task_help(chat_id)
166 | return rm_task({ task_id, chat_id })
167 | }
168 | task_id = parseInt(task_id)
169 | if (!task_id) {
170 | const running_tasks = db.prepare('select id from task where status=?').all('copying')
171 | if (!running_tasks.length) return sm({ chat_id, text: 'There are currently no running tasks' })
172 | return running_tasks.forEach(v => send_task_info({ chat_id, task_id: v.id }).catch(console.error))
173 | }
174 | send_task_info({ task_id, chat_id }).catch(console.error)
175 | } else if (message_str.includes('drive.google.com/') || validate_fid(text)) {
176 | return send_choice({ fid: fid || text, chat_id })
177 | } else {
178 | sm({ chat_id, text: 'This command is not currently supported' })
179 | }
180 | })
181 |
182 | module.exports = router
183 |
--------------------------------------------------------------------------------
/src/src/snap2html.js:
--------------------------------------------------------------------------------
1 | const fs = require('fs')
2 | const path = require('path')
3 | const dayjs = require('dayjs')
4 |
5 | const ID_DIR_MAPPING = {}
6 | /*
7 | Data format:
8 | Each index in "dirs" array is an array representing a directory:
9 | First item: "directory path*always 0*directory modified date"
10 | Note that forward slashes are used instead of (Windows style) backslashes
11 | Then, for each each file in the directory: "filename*size of file*file modified date"
12 | Seconds to last item tells the total size of directory content
13 | Last item refrences IDs to all subdirectories of this dir (if any).
14 | ID is the item index in dirs array.
15 | const dirs = [
16 | [
17 | `C:/WordPress/wp-admin*0*1597318033`,
18 | `widgets.php*18175*1597318033`,
19 | 743642,
20 | // "2*11*12*13*14*15*16"
21 | "1"
22 | ],
23 | [
24 | `C:/WordPress/wp-admin/test*0*1597318033`,
25 | `test.php*12175*1597318033`,
26 | 12175,
27 | ""
28 | ]
29 | ] */
30 |
31 | function snap2html ({ root, data }) {
32 | const total_size = sum_size(data)
33 | const template = fs.readFileSync(path.join(__dirname, './snap2html.template'), 'utf8')
34 | let html = template.replace('var dirs = []', 'var dirs = ' + JSON.stringify(trans(data, root)))
35 | html = html.replace(/\[TITLE\]/g, root.name)
36 | html = html.replace('[GEN DATE]', dayjs().format('YYYY-MM-DD HH:mm:ss'))
37 | const file_numbers = data.filter(v => !is_folder(v)).length
38 | const folder_numbers = data.filter(v => is_folder(v)).length
39 | html = html.replace(/\[NUM FILES\]/g, file_numbers)
40 | html = html.replace('[NUM DIRS]', folder_numbers)
41 | html = html.replace('[TOT SIZE]', total_size)
42 | return html
43 | }
44 |
45 | function sum_size (arr) {
46 | let total = 0
47 | arr.forEach(v => total += Number(v.size) || 0)
48 | return total
49 | }
50 |
51 | function is_folder (v) {
52 | return v.mimeType === 'application/vnd.google-apps.folder'
53 | }
54 |
55 | function unix_time (t) {
56 | if (!t) return 0
57 | t = +new Date(t)
58 | return parseInt(t / 1000, 10)
59 | }
60 |
61 | function escape_name (name) {
62 | return name.replace(/\*/g, '*')
63 | }
64 |
65 | function trans (arr, root) {
66 | if (!arr.length) return arr
67 | const first = arr[0]
68 | get_size(root, arr)
69 | let dirs = arr.filter(is_folder)
70 | dirs.unshift(root)
71 | dirs = dirs.map(dir => {
72 | const { name, id, size, modifiedTime } = dir
73 | const dir_path = root.name + get_path(id, arr)
74 | let result = [`${escape_name(dir_path)}*0*${unix_time(modifiedTime)}`]
75 | const children = arr.filter(v => v.parent === id)
76 | const child_files = children.filter(v => !is_folder(v)).map(file => {
77 | return `${escape_name(file.name)}*${file.size}*${unix_time(file.modifiedTime)}`
78 | })
79 | result = result.concat(child_files)
80 | result.push(size)
81 | const sub_folders = children.filter(is_folder).map(v => dirs.findIndex(vv => vv.id === v.id))
82 | result.push(sub_folders.join('*'))
83 | return result
84 | })
85 | return dirs
86 | }
87 |
88 | function get_size (node, arr) {
89 | if (node.size !== undefined) return node.size
90 | const children = arr.filter(v => v.parent === node.id)
91 | const sizes = children.map(child => get_size(child, arr))
92 | const total_size = sizes.reduce((acc, val) => Number(acc) + Number(val), 0)
93 | return node.size = total_size
94 | }
95 |
96 | function get_path (id, folders) {
97 | let result = ID_DIR_MAPPING[id]
98 | if (result !== undefined) return result
99 | result = ''
100 | let temp = id
101 | let folder = folders.filter(v => v.id === temp)[0]
102 | while (folder) {
103 | result = `/${folder.name}` + result
104 | temp = folder.parent
105 | if (ID_DIR_MAPPING[temp]) {
106 | result = ID_DIR_MAPPING[temp] + result
107 | return ID_DIR_MAPPING[id] = result
108 | }
109 | folder = folders.filter(v => v.id === temp)[0]
110 | }
111 | return ID_DIR_MAPPING[id] = result
112 | }
113 |
114 | module.exports = { snap2html }
115 |
--------------------------------------------------------------------------------
/src/src/summary.js:
--------------------------------------------------------------------------------
1 | const Table = require('cli-table3')
2 | const colors = require('colors/safe')
3 | const { escape } = require('html-escaper')
4 |
5 | module.exports = { make_table, summary, make_html, make_tg_table, format_size }
6 |
7 | function make_html ({ file_count, folder_count, total_size, details }) {
8 | const head = ['Type', 'Number', 'Size']
9 | const th = '' + head.map(k => `${k} `).join('') + ' '
10 | const td = details.map(v => '' + [escape(v.ext), v.count, v.size].map(k => `${k} `).join('') + ' ').join('')
11 | let tail = ['Total', file_count + folder_count, total_size]
12 | tail = '' + tail.map(k => `${k} `).join('') + ' '
13 | const table = `
14 | ${th}
15 | ${td}
16 | ${tail}
17 |
`
18 | return table
19 | }
20 |
21 | function make_table ({ file_count, folder_count, total_size, details }) {
22 | const tb = new Table()
23 | const hAlign = 'center'
24 | const headers = ['Type', 'Count', 'Size'].map(v => ({ content: colors.bold.brightBlue(v), hAlign }))
25 | const records = details.map(v => [v.ext, v.count, v.size]).map(arr => {
26 | return arr.map(content => ({ content, hAlign }))
27 | })
28 | const total_count = file_count + folder_count
29 | const tails = ['Total', total_count, total_size].map(v => ({ content: colors.bold(v), hAlign }))
30 | tb.push(headers, ...records)
31 | tb.push(tails)
32 | return tb.toString() + '\n'
33 | }
34 |
35 | function make_tg_table ({ file_count, folder_count, total_size, details }, limit) {
36 | const tb = new Table({
37 | // chars: {
38 | // 'top': '═',
39 | // 'top-mid': '╤',
40 | // 'top-left': '╔',
41 | // 'top-right': '╗',
42 | // 'bottom': '═',
43 | // 'bottom-mid': '╧',
44 | // 'bottom-left': '╚',
45 | // 'bottom-right': '╝',
46 | // 'left': '║',
47 | // 'left-mid': '╟',
48 | // 'right': '║',
49 | // 'right-mid': '╢'
50 | // },
51 | style: {
52 | head: [],
53 | border: []
54 | }
55 | })
56 | const hAlign = 'center'
57 | const headers = ['Type', 'Count', 'Size'].map(v => ({ content: v, hAlign }))
58 | details.forEach(v => {
59 | if (v.ext === 'Folder') v.ext = '[Folder]'
60 | if (v.ext === 'No Extension') v.ext = '[NoExt]'
61 | })
62 | let records = details.map(v => [v.ext, v.count, v.size]).map(arr => arr.map(content => ({ content, hAlign })))
63 | const folder_row = records.pop()
64 | if (limit) records = records.slice(0, limit)
65 | if (folder_row) records.push(folder_row)
66 | const total_count = file_count + folder_count
67 | const tails = ['Total', total_count, total_size].map(v => ({ content: v, hAlign }))
68 | tb.push(headers, ...records)
69 | tb.push(tails)
70 | return tb.toString().replace(/─/g, '—') // Prevent the table from breaking on the mobile phone and it will look more beautiful in pc after removing the replace
71 | }
72 |
73 | function summary (info, sort_by) {
74 | const files = info.filter(v => v.mimeType !== 'application/vnd.google-apps.folder')
75 | const file_count = files.length
76 | const folder_count = info.filter(v => v.mimeType === 'application/vnd.google-apps.folder').length
77 | let total_size = info.map(v => Number(v.size) || 0).reduce((acc, val) => acc + val, 0)
78 | total_size = format_size(total_size)
79 | const exts = {}
80 | const sizes = {}
81 | let no_ext = 0; let no_ext_size = 0
82 | files.forEach(v => {
83 | let { name, size } = v
84 | size = Number(size) || 0
85 | const ext = name.split('.').pop().toLowerCase()
86 | if (!name.includes('.') || ext.length > 10) { // If there are more than 10 characters after . , it is judged as no extension
87 | no_ext_size += size
88 | return no_ext++
89 | }
90 | if (exts[ext]) {
91 | exts[ext]++
92 | } else {
93 | exts[ext] = 1
94 | }
95 | if (sizes[ext]) {
96 | sizes[ext] += size
97 | } else {
98 | sizes[ext] = size
99 | }
100 | })
101 | const details = Object.keys(exts).map(ext => {
102 | const count = exts[ext]
103 | const size = sizes[ext]
104 | return { ext, count, size: format_size(size), raw_size: size }
105 | })
106 | if (sort_by === 'size') {
107 | details.sort((a, b) => b.raw_size - a.raw_size)
108 | } else if (sort_by === 'name') {
109 | details.sort((a, b) => (a.ext > b.ext) ? 1 : -1)
110 | } else {
111 | details.sort((a, b) => b.count - a.count)
112 | }
113 | if (no_ext) details.push({ ext: 'No Extension', count: no_ext, size: format_size(no_ext_size), raw_size: no_ext_size })
114 | if (folder_count) details.push({ ext: 'Folder', count: folder_count, size: 0, raw_size: 0 })
115 | return { file_count, folder_count, total_size, details }
116 | }
117 |
118 | function format_size (n) {
119 | n = Number(n)
120 | if (Number.isNaN(n)) return ''
121 | if (n < 0) return 'invalid size'
122 | const units = ['B', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB']
123 | let flag = 0
124 | while (n >= 1024) {
125 | n = (n / 1024)
126 | flag++
127 | }
128 | return n.toFixed(2) + ' ' + units[flag]
129 | }
130 |
--------------------------------------------------------------------------------
/src/src/tg.js:
--------------------------------------------------------------------------------
1 | const Table = require('cli-table3')
2 | const dayjs = require('dayjs')
3 | const axios = require('@viegg/axios')
4 | const HttpsProxyAgent = require('https-proxy-agent')
5 |
6 | const { db } = require('../db')
7 | const { gen_count_body, validate_fid, real_copy, get_name_by_id, get_info_by_id, copy_file } = require('./gd')
8 | const { AUTH, DEFAULT_TARGET, USE_PERSONAL_AUTH } = require('../config')
9 | const { tg_token } = AUTH
10 | const gen_link = (fid, text) => `${text || fid} `
11 |
12 | if (!tg_token) throw new Error('Please set Bot_token in config.js first')
13 | const { https_proxy } = process.env
14 | const axins = axios.create(https_proxy ? { httpsAgent: new HttpsProxyAgent(https_proxy) } : {})
15 |
16 | const FID_TO_NAME = {}
17 |
18 | async function get_folder_name (fid) {
19 | let name = FID_TO_NAME[fid]
20 | if (name) return name
21 | name = await get_name_by_id(fid, !USE_PERSONAL_AUTH)
22 | return FID_TO_NAME[fid] = name
23 | }
24 |
25 | function send_help (chat_id) {
26 | const text = `
27 | Command | Description
28 | ➖➖➖➖➖➖➖➖➖➖➖➖
29 | /reload | Restart the Task
30 | ➖➖➖➖➖➖➖➖➖➖➖➖
31 | /count FolderID [-u] | Calculates Size
32 | - adding -u at the end is optional (info will be collected online)
33 | ➖➖➖➖➖➖➖➖➖➖➖➖
34 | /copy sourceID DestID [-u] | Clone Files(Will create a New Folder)
35 | - If targetID is not filled in, it will be copied to the default location (set in config.js )
36 | - adding -u at the end is optional (info will be collected online)
37 | ➖➖➖➖➖➖➖➖➖➖➖➖
38 | /task | Shows info about the running task
39 | ⁍ Example:
40 | /task | Return Details Of All Running Tasks.
41 | /task [ID] | Return Info Of Specific Task.
42 | /task all | Return The List Of All Tasks.
43 | /task clear | Clear All Completed Tasks.
44 | /task rm [ID] | Delete Specific Task.
45 | ➖➖➖➖➖➖➖➖➖➖➖➖
46 | /bm [action] [alias] [target] | Add a common FolderID as Bookmark
47 | - Helpful while cloning to same destination folder multiple times
48 | ⁍ Example:
49 | /bm | Shows all bookmarks
50 | /bm set movie folder-id | Add a Bookmark by the name movie
51 | /bm unset movie | Delete this bookmark
52 | `
53 | return sm({ chat_id, text, parse_mode: 'HTML' })
54 | }
55 |
56 | function send_bm_help (chat_id) {
57 | const text = `/bm [action] [alias] [target] | Add a common FolderID as Bookmark
58 | - Helpful while cloning to same destination folder multiple times
59 | ⁍ Example:
60 | /bm | Shows all bookmarks
61 | /bm set movie folder-id | Add a Bookmark by the name movie
62 | /bm unset movie | Delete this bookmark
63 | `
64 | return sm({ chat_id, text, parse_mode: 'HTML' })
65 | }
66 |
67 | function send_task_help (chat_id) {
68 | const text = `/task | Shows info about the running task
69 | ⁍ Example:
70 | /task | Return Details Of All Running Tasks.
71 | /task [ID] | Return Info Of Specific Task.
72 | /task all | Return The List Of All Tasks.
73 | /task clear | Clear All Completed Tasks.
74 | /task rm [ID] | Delete Specific Task
75 | `
76 | return sm({ chat_id, text, parse_mode: 'HTML' })
77 | }
78 |
79 | function clear_tasks (chat_id) {
80 | const finished_tasks = db.prepare('select id from task where status=?').all('finished')
81 | finished_tasks.forEach(task => rm_task({ task_id: task.id }))
82 | sm({ chat_id, text: 'All completed tasks have been cleared' })
83 | }
84 |
85 | function rm_task ({ task_id, chat_id }) {
86 | const exist = db.prepare('select id from task where id=?').get(task_id)
87 | if (!exist) return sm({ chat_id, text: `Task ID: ${task_id} . Does Not Exist`, parse_mode: 'HTML' })
88 | db.prepare('delete from task where id=?').run(task_id)
89 | db.prepare('delete from copied where taskid=?').run(task_id)
90 | if (chat_id) sm({ chat_id, text: `Task ID: ${task_id} . Deleted`, parse_mode: 'HTML' })
91 | }
92 |
93 | function send_all_bookmarks (chat_id) {
94 | let records = db.prepare('select alias, target from bookmark').all()
95 | if (!records.length) return sm({ chat_id, text: 'No Bookmarks Found' })
96 | const tb = new Table({ style: { head: [], border: [] } })
97 | const headers = ['Name', 'FolderID']
98 | records = records.map(v => [v.alias, v.target])
99 | tb.push(headers, ...records)
100 | const text = tb.toString().replace(/─/g, '—')
101 | return sm({ chat_id, text: `${text} `, parse_mode: 'HTML' })
102 | }
103 |
104 | function set_bookmark ({ chat_id, alias, target }) {
105 | const record = db.prepare('select alias from bookmark where alias=?').get(alias)
106 | if (record) return sm({ chat_id, text: 'There is anothe Favourite Folder with the same name' })
107 | db.prepare('INSERT INTO bookmark (alias, target) VALUES (?, ?)').run(alias, target)
108 | return sm({ chat_id, text: `Bookmark Successfully Set : ${alias} | ${target} `, parse_mode: 'HTML' })
109 | }
110 |
111 | function unset_bookmark ({ chat_id, alias }) {
112 | const record = db.prepare('select alias from bookmark where alias=?').get(alias)
113 | if (!record) return sm({ chat_id, text: 'No Bookmarks found with this Name' })
114 | db.prepare('delete from bookmark where alias=?').run(alias)
115 | return sm({ chat_id, text: `Bookmark Successfully Deleted : ${alias} `, parse_mode: 'HTML' })
116 | }
117 |
118 | function get_target_by_alias (alias) {
119 | const record = db.prepare('select target from bookmark where alias=?').get(alias)
120 | return record && record.target
121 | }
122 |
123 | function get_alias_by_target (target) {
124 | const record = db.prepare('select alias from bookmark where target=?').get(target)
125 | return record && record.alias
126 | }
127 |
128 | function send_choice ({ fid, chat_id }) {
129 | return sm({
130 | chat_id,
131 | text: `Drive ID: ${fid}, \nChoose what would you like to do`,
132 | reply_markup: {
133 | inline_keyboard: [
134 | [
135 | { text: 'Calculate Size', callback_data: `count ${fid}` },
136 | { text: 'Clone', callback_data: `copy ${fid}` }
137 | ],
138 | [
139 | { text: 'Refresh', callback_data: `update ${fid}` },
140 | { text: 'Clear', callback_data: `clear_button` }
141 | ]
142 | ].concat(gen_bookmark_choices(fid))
143 | }
144 | })
145 | }
146 |
147 | // console.log(gen_bookmark_choices())
148 | function gen_bookmark_choices (fid) {
149 | const gen_choice = v => ({ text: `Clone to ${v.alias}`, callback_data: `copy ${fid} ${v.alias}` })
150 | const records = db.prepare('select * from bookmark').all()
151 | const result = []
152 | for (let i = 0; i < records.length; i += 2) {
153 | const line = [gen_choice(records[i])]
154 | if (records[i + 1]) line.push(gen_choice(records[i + 1]))
155 | result.push(line)
156 | }
157 | return result
158 | }
159 |
160 | async function send_all_tasks (chat_id) {
161 | let records = db.prepare('select id, status, ctime from task').all()
162 | if (!records.length) return sm({ chat_id, text: 'No task record in the database' })
163 | const tb = new Table({ style: { head: [], border: [] } })
164 | const headers = ['ID', 'status', 'ctime']
165 | records = records.map(v => {
166 | const { id, status, ctime } = v
167 | return [id, status, dayjs(ctime).format('YYYY-MM-DD HH:mm:ss')]
168 | })
169 | tb.push(headers, ...records)
170 | const text = tb.toString().replace(/─/g, '—')
171 | const url = `https://api.telegram.org/bot${tg_token}/sendMessage`
172 | return axins.post(url, {
173 | chat_id,
174 | parse_mode: 'HTML',
175 | text: `All Clone Tasks :\n${text} `
176 | }).catch(err => {
177 | console.error(err.message)
178 | // const description = err.response && err.response.data && err.response.data.description
179 | // if (description && description.includes('message is too long')) {
180 | const text = [headers].concat(records.slice(-100)).map(v => v.join('\t')).join('\n')
181 | return sm({ chat_id, parse_mode: 'HTML', text: `Last 100 tasks :\n${text}` })
182 | })
183 | }
184 |
185 | async function get_task_info (task_id) {
186 | const record = db.prepare('select * from task where id=?').get(task_id)
187 | if (!record) return {}
188 | const { source, target, status, mapping, ctime, ftime } = record
189 | const { copied_files } = db.prepare('select count(fileid) as copied_files from copied where taskid=?').get(task_id)
190 | const folder_mapping = mapping && mapping.trim().split('\n')
191 | const new_folder = folder_mapping && folder_mapping[0].split(' ')[1]
192 | const { summary } = db.prepare('select summary from gd where fid=?').get(source) || {}
193 | const { file_count, folder_count, total_size } = summary ? JSON.parse(summary) : {}
194 | const total_count = (file_count || 0) + (folder_count || 0)
195 | const copied_folders = folder_mapping ? (folder_mapping.length - 1) : 0
196 | let text = 'Task No : ' + task_id + ' \n'
197 | const folder_name = await get_folder_name(source)
198 | text += 'Source Folder :' + gen_link(source, folder_name) + '\n'
199 | text += 'Destination Folder :' + gen_link(target, get_alias_by_target(target)) + '\n'
200 | text += 'New Folder :' + (new_folder ? gen_link(new_folder) : 'Not Created yet') + '\n'
201 | text += 'Task Status : ' + status + ' \n'
202 | text += 'Start Time : ' + dayjs(ctime).format('YYYY-MM-DD HH:mm:ss') + ' \n'
203 | text += 'End Time : ' + (ftime ? dayjs(ftime).format('YYYY-MM-DD HH:mm:ss') : 'Not Done') + ' \n'
204 | text += 'Folder Progress : ' + copied_folders + '/' + (folder_count === undefined ? 'Unknown' : folder_count) + ' \n'
205 | text += 'File Progress : ' + copied_files + '/' + (file_count === undefined ? 'Unkno wn' : file_count) + ' \n'
206 | text += 'Total Percentage : ' + ((copied_files + copied_folders) * 100 / total_count).toFixed(2) + '% \n'
207 | text += 'Total Size : ' + (total_size || 'Unknown') + ' '
208 | return { text, status, folder_count }
209 | }
210 |
211 | async function send_task_info ({ task_id, chat_id }) {
212 | const { text, status, folder_count } = await get_task_info(task_id)
213 | if (!text) return sm({ chat_id, text: `Task ID Does Not Exist In The Database: ${task_id} `, parse_mode: 'HTML' })
214 | const url = `https://api.telegram.org/bot${tg_token}/sendMessage`
215 | let message_id
216 | try {
217 | const { data } = await axins.post(url, { chat_id, text, parse_mode: 'HTML' })
218 | message_id = data && data.result && data.result.message_id
219 | } catch (e) {
220 | console.log('fail to send message to tg', e.message)
221 | }
222 | // get_task_info crash cpu when the number of Folders is too large,In the future, it is better to save the mapping as a separate table
223 | if (!message_id || status !== 'copying') return
224 | const loop = setInterval(async () => {
225 | const { text, status } = await get_task_info(task_id)
226 | // TODO check if text changed
227 | if (status !== 'copying') clearInterval(loop)
228 | sm({ chat_id, message_id, text, parse_mode: 'HTML' }, 'editMessageText')
229 | }, 10 * 1000)
230 | }
231 |
232 | async function tg_copy ({ fid, target, chat_id, update }) { // return task_id
233 | target = target || DEFAULT_TARGET
234 | if (!target) return sm({ chat_id, text: 'Please enter the destination ID or set the default clone destination ID in config.js first(DEFAULT_TARGET)' })
235 |
236 | const file = await get_info_by_id(fid, !USE_PERSONAL_AUTH)
237 | if (!file) {
238 | const text = `Unable to get info,Please check if the link is valid and the SAs have appropriate permissions:https://drive.google.com/drive/folders/${fid}`
239 | return sm({ chat_id, text })
240 | }
241 | if (file && file.mimeType !== 'application/vnd.google-apps.folder') {
242 | return copy_file(fid, target, !USE_PERSONAL_AUTH).then(data => {
243 | sm({ chat_id, parse_mode: 'HTML', text: `File Copied Succesfully : ${gen_link(target)}` })
244 | }).catch(e => {
245 | sm({ chat_id, text: `Failed To Clone The File : ${e.message} `, parse_mode: 'HTML' })
246 | })
247 | }
248 |
249 | let record = db.prepare('select id, status from task where source=? and target=?').get(fid, target)
250 | if (record) {
251 | if (record.status === 'copying') {
252 | return sm({ chat_id, text: 'Task With The Same SourceID And DestinationID Is Already In Progress,\nType /task ' + record.id })
253 | } else if (record.status === 'finished') {
254 | sm({ chat_id, text: `Existing Task Detected ${record.id} ,Started Cloning`, parse_mode: 'HTML' })
255 | }
256 | }
257 |
258 | real_copy({ source: fid, update, target, service_account: !USE_PERSONAL_AUTH, is_server: true })
259 | .then(async info => {
260 | if (!record) record = {} // Prevent infinite loop
261 | if (!info) return
262 | const { task_id } = info
263 | const { text } = await get_task_info(task_id)
264 | sm({ chat_id, text, parse_mode: 'HTML' })
265 | })
266 | .catch(err => {
267 | const task_id = record && record.id
268 | if (task_id) db.prepare('update task set status=? where id=?').run('error', task_id)
269 | if (!record) record = {}
270 | console.error('Copy Failed', fid, '-->', target)
271 | console.error(err)
272 | sm({ chat_id, text: (task_id || '') + `Task Error :${err.message} `, parse_mode: 'HTML' })
273 | })
274 |
275 | while (!record) {
276 | record = db.prepare('select id from task where source=? and target=?').get(fid, target)
277 | await sleep(1000)
278 | }
279 | return record.id
280 | }
281 |
282 | function sleep (ms) {
283 | return new Promise((resolve, reject) => {
284 | setTimeout(resolve, ms)
285 | })
286 | }
287 |
288 | function reply_cb_query ({ id, data }) {
289 | const url = `https://api.telegram.org/bot${tg_token}/answerCallbackQuery`
290 | return axins.post(url, {
291 | callback_query_id: id,
292 | text: 'Start the Task ' + data
293 | })
294 | }
295 |
296 | async function send_count ({ fid, chat_id, update }) {
297 | const gen_text = payload => {
298 | const { obj_count, processing_count, pending_count } = payload || {}
299 | const now = dayjs().format('YYYY-MM-DD HH:mm:ss')
300 | return `Size:${gen_link(fid)}
301 | Time:${now}
302 | Number of Files:${obj_count || ''}
303 | ${pending_count ? ('Pending:' + pending_count) : ''}
304 | ${processing_count ? ('Ongoing:' + processing_count) : ''}`
305 | }
306 |
307 | const url = `https://api.telegram.org/bot${tg_token}/sendMessage`
308 | let response
309 | try {
310 | response = await axins.post(url, { chat_id, text: `Started : ${fid} .\nCollecting Files Stats,Please Wait.\nIt Is Recommended Not To Start Cloning Before The Stats Is Collected.`, parse_mode: 'HTML' })
311 | } catch (e) {}
312 | const { data } = response || {}
313 | const message_id = data && data.result && data.result.message_id
314 | const message_updater = payload => sm({
315 | chat_id,
316 | message_id,
317 | parse_mode: 'HTML',
318 | text: gen_text(payload)
319 | }, 'editMessageText')
320 |
321 | const service_account = !USE_PERSONAL_AUTH
322 | const table = await gen_count_body({ fid, update, service_account, type: 'tg', tg: message_id && message_updater })
323 | if (!table) return sm({ chat_id, parse_mode: 'HTML', text: gen_link(fid) + ' Failed to obtain info' })
324 | const gd_link = `https://drive.google.com/drive/folders/${fid}`
325 | const name = await get_folder_name(fid)
326 | return axins.post(url, {
327 | chat_id,
328 | parse_mode: 'HTML',
329 | text: `Source Folder Name :${name}
330 | Source Folder Link :${gd_link}
331 | ${table} `
332 | }).catch(async err => {
333 | console.log(err.message)
334 | // const description = err.response && err.response.data && err.response.data.description
335 | // const too_long_msgs = ['request entity too large', 'message is too long']
336 | // if (description && too_long_msgs.some(v => description.toLowerCase().includes(v))) {
337 | const limit = 20
338 | const table = await gen_count_body({ fid, type: 'tg', service_account: !USE_PERSONAL_AUTH, limit })
339 | return sm({
340 | chat_id,
341 | parse_mode: 'HTML',
342 | text: `Name :${name}
343 | Link : ${fid}
344 | The Table Is Too Long, Only Showing The First ${limit}
345 | ${table} `
346 | })
347 | })
348 | }
349 |
350 | function sm (data, endpoint) {
351 | endpoint = endpoint || 'sendMessage'
352 | const url = `https://api.telegram.org/bot${tg_token}/${endpoint}`
353 | return axins.post(url, data).catch(err => {
354 | // console.error('fail to post', url, data)
355 | console.error('fail to send message to tg:', err.message)
356 | const err_data = err.response && err.response.data
357 | err_data && console.error(err_data)
358 | })
359 | }
360 |
361 | function extract_fid (text) {
362 | text = text.replace(/^\/count/, '').replace(/^\/copy/, '').replace(/\\n/g, '').replace(/\\/g, '').trim()
363 | const [source, target] = text.split(' ').map(v => v.trim())
364 | if (validate_fid(source)) return source
365 | try {
366 | if (!text.startsWith('http')) text = 'https://' + text
367 | const u = new URL(text)
368 | if (u.pathname.includes('/folders/')) {
369 | return u.pathname.split('/').map(v => v.trim()).filter(v => v).pop()
370 | } else if (u.pathname.includes('/file/')) {
371 | const file_reg = /file\/d\/([a-zA-Z0-9_-]+)/
372 | const file_match = u.pathname.match(file_reg)
373 | return file_match && file_match[1]
374 | }
375 | return u.searchParams.get('id')
376 | } catch (e) {
377 | return ''
378 | }
379 | }
380 |
381 | function extract_from_text (text) {
382 | // const reg = /https?:\/\/drive.google.com\/[^\s]+/g
383 | const reg = /https?:\/\/drive.google.com\/[a-zA-Z0-9_\\/?=&-]+/g
384 | const m = text.match(reg)
385 | return m && extract_fid(m[0])
386 | }
387 |
388 | module.exports = { send_count, send_help, sm, extract_fid, reply_cb_query, send_choice, send_task_info, send_all_tasks, tg_copy, extract_from_text, get_target_by_alias, send_bm_help, send_all_bookmarks, set_bookmark, unset_bookmark, clear_tasks, send_task_help, rm_task }
389 |
--------------------------------------------------------------------------------
/src/src/tree.js:
--------------------------------------------------------------------------------
1 | module.exports = { gen_tree_html }
2 |
3 | function gen_tree_html (arr) {
4 | const data = gen_tree_data(arr, is_gd_folder)
5 | return tree_tpl(JSON.stringify(data))
6 | }
7 |
8 | function tree_tpl (str) {
9 | return `
10 |
11 |
12 |
13 |
14 |
15 |
16 | Folder Tree
17 |
18 |
19 |
20 |
21 |
22 |
23 |
26 |
27 |
28 |
29 | `
30 | }
31 |
32 | function is_gd_folder (data) {
33 | return data.mimeType === 'application/vnd.google-apps.folder'
34 | }
35 |
36 | function gen_tree_data (data, is_folder) {
37 | if (!data || !data.length) return []
38 | const folders = data.filter(is_folder)
39 | const files = data.filter(v => !is_folder(v))
40 | const total_size = sum(files.map(v => v.size))
41 | const root = {
42 | title: `/Root Folder [Total${files.length} Files (excluding folders) , ${format_size(total_size)}]`,
43 | key: data[0].parent
44 | }
45 | if (!folders.length) return [root]
46 | const sub_folders = folders.filter(v => v.parent === folders[0].parent)
47 | sub_folders.forEach(v => {
48 | sum_files(v, data, is_folder)
49 | count_files(v, data, is_folder)
50 | })
51 | sort_folders(folders, 'count')
52 | sort_folders(sub_folders, 'count')
53 | folders.forEach(v => {
54 | let { name, size, count, id } = v
55 | if (name.length > 50) name = name.slice(0, 48) + '...'
56 | v.title = `${name} | [Total${count}Files ${format_size(size)}]`
57 | })
58 | root.children = sub_folders.map(v => gen_node(v, folders))
59 | return [root]
60 | }
61 |
62 | function sort_folders (folders, type) {
63 | if (!folders || !folders.length) return
64 | if (type === 'size') return folders.sort((a, b) => b.size - a.size)
65 | if (type === 'count') return folders.sort((a, b) => b.count - a.count)
66 | }
67 |
68 | function gen_node (v, folders) {
69 | const { id, title, node } = v
70 | if (node) return node
71 | return v.node = {
72 | title,
73 | key: id,
74 | children: v.children || folders.filter(vv => vv.parent === id).map(vv => gen_node(vv, folders))
75 | }
76 | }
77 |
78 | function count_files (folder, arr, is_folder) {
79 | if (folder.count) return folder.count
80 | const children = arr.filter(v => v.parent === folder.id)
81 | return folder.count = sum(children.map(v => {
82 | if (is_folder(v)) return count_files(v, arr, is_folder)
83 | return 1
84 | }))
85 | }
86 |
87 | function sum_files (folder, arr, is_folder) {
88 | if (folder.size) return folder.size
89 | const children = arr.filter(v => v.parent === folder.id)
90 | return folder.size = sum(children.map(v => {
91 | if (is_folder(v)) return sum_files(v, arr, is_folder)
92 | return v.size
93 | }))
94 | }
95 |
96 | function sum (arr) {
97 | let result = 0
98 | for (const v of arr) {
99 | result += Number(v) || 0
100 | }
101 | return result
102 | }
103 |
104 | function format_size (n) {
105 | n = Number(n)
106 | if (Number.isNaN(n)) return ''
107 | if (n < 0) return 'invalid size'
108 | const units = ['Bytes', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB']
109 | let flag = 0
110 | while (n >= 1024) {
111 | n = n / 1024
112 | flag++
113 | }
114 | return n.toFixed(2) + ' ' + units[flag]
115 | }
116 |
--------------------------------------------------------------------------------
/src/validate-sa.js:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env node
2 |
3 | const { argv } = require('yargs')
4 | .usage('Usage: ./$0 folder-id\nfolder-id Does SA has read Pemission to the directory ID you want to detect')
5 | .help('h')
6 | .alias('h', 'help')
7 |
8 | const fs = require('fs')
9 | const path = require('path')
10 | const prompts = require('prompts')
11 | const { GoogleToken } = require('gtoken')
12 | const axios = require('@viegg/axios')
13 | const HttpsProxyAgent = require('https-proxy-agent')
14 |
15 | const { https_proxy } = process.env
16 | const axins = axios.create(https_proxy ? { httpsAgent: new HttpsProxyAgent(https_proxy) } : {})
17 |
18 | const SA_FILES = fs.readdirSync(path.join(__dirname, 'sa')).filter(v => v.endsWith('.json'))
19 | const SA_TOKENS = SA_FILES.map(filename => {
20 | const gtoken = new GoogleToken({
21 | keyFile: path.join(__dirname, 'sa', filename),
22 | scope: ['https://www.googleapis.com/auth/drive']
23 | })
24 | return {gtoken, filename}
25 | })
26 |
27 | main()
28 | async function main () {
29 | const [fid] = argv._
30 | if (validate_fid(fid)) {
31 | console.log('Start testing', SA_TOKENS.length, 'SA accounts')
32 | const invalid_sa = await get_invalid_sa(SA_TOKENS, fid)
33 | if (!invalid_sa.length) return console.log('Detected', SA_TOKENS.length, 'Individual SA,No invalid account detected')
34 | const choice = await choose(invalid_sa.length)
35 | if (choice === 'yes') {
36 | mv_sa(invalid_sa)
37 | console.log('Successfully moved')
38 | } else {
39 | console.log('Successful exit, invalid SA record:', invalid_sa)
40 | }
41 | } else {
42 | console.warn('Folder ID is missing or malformed')
43 | }
44 | }
45 |
46 | function mv_sa (arr) {
47 | for (const filename of arr) {
48 | const oldpath = path.join(__dirname, 'sa', filename)
49 | const new_path = path.join(__dirname, 'sa/invalid', filename)
50 | fs.renameSync(oldpath, new_path)
51 | }
52 | }
53 |
54 | async function choose (count) {
55 | const answer = await prompts({
56 | type: 'select',
57 | name: 'value',
58 | message: `Detcted ${count} Invalid SA,Whether to move them to the sa/invalid Folder?`,
59 | choices: [
60 | { title: 'Yes', description: 'Confirm Move', value: 'yes' },
61 | { title: 'No', description: 'Exit without making changes', value: 'no' }
62 | ],
63 | initial: 0
64 | })
65 | return answer.value
66 | }
67 |
68 | async function get_invalid_sa (arr, fid) {
69 | if (!fid) throw new Error('Please specify the ID of the directory to check permissions')
70 | const fails = []
71 | let flag = 0
72 | let good = 0
73 | for (const v of arr) {
74 | console.log('Inspection Progress', `${flag++}/${arr.length}`)
75 | console.log('Normal/Abnormal', `${good}/${fails.length}`)
76 | const {gtoken, filename} = v
77 | try {
78 | const access_token = await get_sa_token(gtoken)
79 | await get_info(fid, access_token)
80 | good++
81 | } catch (e) {
82 | handle_error(e)
83 | const status = e && e.response && e.response.status
84 | if (Number(status) === 400) fails.push(filename) // access_token Failed
85 |
86 | const data = e && e.response && e.response.data
87 | const code = data && data.error && data.error.code
88 | if ([404, 403].includes(Number(code))) fails.push(filename) // Failed to read folder information
89 | }
90 | }
91 | return fails
92 | }
93 |
94 | function handle_error (err) {
95 | const data = err && err.response && err.response.data
96 | if (data) {
97 | console.error(JSON.stringify(data))
98 | } else {
99 | console.error(err.message)
100 | }
101 | }
102 |
103 | async function get_info (fid, access_token) {
104 | let url = `https://www.googleapis.com/drive/v3/files/${fid}`
105 | let params = {
106 | includeItemsFromAllDrives: true,
107 | supportsAllDrives: true,
108 | corpora: 'allDrives',
109 | fields: 'id,name'
110 | }
111 | url += '?' + params_to_query(params)
112 | const headers = { authorization: 'Bearer ' + access_token }
113 | const { data } = await axins.get(url, { headers })
114 | return data
115 | }
116 |
117 | function params_to_query (data) {
118 | const ret = []
119 | for (let d in data) {
120 | ret.push(encodeURIComponent(d) + '=' + encodeURIComponent(data[d]))
121 | }
122 | return ret.join('&')
123 | }
124 |
125 | async function get_sa_token (gtoken) {
126 | return new Promise((resolve, reject) => {
127 | gtoken.getToken((err, tk) => {
128 | err ? reject(err) : resolve(tk.access_token)
129 | })
130 | })
131 | }
132 |
133 | function validate_fid (fid) {
134 | if (!fid) return false
135 | fid = String(fid)
136 | if (fid.length < 10 || fid.length > 100) return false
137 | const reg = /^[a-zA-Z0-9_-]+$/
138 | return fid.match(reg)
139 | }
140 |
--------------------------------------------------------------------------------