├── tests
├── static_assets
│ ├── sometext.txt
│ └── mona_lisa
│ │ ├── 1374px-Mona_Lisa,_by_Leonardo_da_Vinci,_from_C2RMF_retouched.jpg
│ │ ├── 1374px-Mona_Lisa,_by_Leonardo_da_Vinci,_from_C2RMF_retouched.jpg.enc
│ │ ├── openssl_command.sh
│ │ └── 1374px-Mona_Lisa,_by_Leonardo_da_Vinci,_from_C2RMF_retouched.LICENSE.txt
├── configPersist.test.ts
├── metadataOnRemote.test.ts
├── encrypt.test.ts
├── presetRules.test.ts
└── misc.test.ts
├── .prettierignore
├── .github
├── ISSUE_TEMPLATE
│ ├── config.yml
│ ├── feature_request.yml
│ └── bug_report.yml
└── workflows
│ ├── auto-build.yml
│ └── release.yml
├── versions.json
├── .env.example.txt
├── .gitmodules
├── esbuild.injecthelper.mjs
├── assets
└── branding
│ ├── LICENSE.txt
│ ├── 256x256.png
│ ├── 50x50.png
│ ├── 64x64.png
│ ├── logo.png
│ ├── logo.svg
│ ├── 300x300.png
│ ├── 300x300.svg
│ ├── logo.excalidraw
│ ├── logo-transparent.svg
│ └── README.md
├── docs
├── dropbox_review_material
│ ├── attachments
│ │ ├── step03.png
│ │ ├── step05.png
│ │ ├── step06.png
│ │ ├── step11.png
│ │ ├── step07.png
│ │ ├── step08.png
│ │ ├── step09.png
│ │ ├── step10.png
│ │ ├── step12.png
│ │ └── step13.png
│ └── README.md
├── encryption.md
├── code_design.md
├── browser_env_oauth2_pkce.md
├── how_to_debug
│ ├── README.md
│ ├── check_console_output.md
│ ├── save_console_output_and_export.md
│ └── export_sync_plans.md
├── browser_env.md
├── browser_env_no_nodejs.md
├── s3_cors_configure.md
├── browser_env_cors.md
├── apache_cors_configure.md
├── minimal_intrusive_design.md
├── sync_ignoring_large_files.md
├── import_export_some_settings.md
├── sync_algorithm_v1.md
├── sync_algorithm_v2.md
└── services_connectable_or_not.md
├── .gitignore
├── .gitattributes
├── manifest-beta.json
├── manifest.json
├── tsconfig.json
├── .editorconfig
├── src
├── moreOnLog.ts
├── i18n.ts
├── syncAlgoV2Notice.ts
├── configPersist.ts
├── presetRules.ts
├── importExport.ts
├── syncSizesConflictNotice.ts
├── metadataOnRemote.ts
├── debugMode.ts
├── obsFolderLister.ts
├── encrypt.ts
├── baseTypes.ts
├── remote.ts
├── misc.ts
├── remoteForWebdav.ts
└── remoteForS3.ts
├── styles.css
├── esbuild.config.mjs
├── webpack.config.js
├── package.json
├── LICENSE
└── README.md
/tests/static_assets/sometext.txt:
--------------------------------------------------------------------------------
1 | A secret text 你好世界
2 |
--------------------------------------------------------------------------------
/.prettierignore:
--------------------------------------------------------------------------------
1 | node_modules/
2 | main.js
3 | data.json
4 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/config.yml:
--------------------------------------------------------------------------------
1 | blank_issues_enabled: false
2 |
--------------------------------------------------------------------------------
/versions.json:
--------------------------------------------------------------------------------
1 | {
2 | "0.3.2": "0.12.15",
3 | "0.3.25": "0.13.21"
4 | }
5 |
--------------------------------------------------------------------------------
/.env.example.txt:
--------------------------------------------------------------------------------
1 | DROPBOX_APP_KEY=
2 | ONEDRIVE_CLIENT_ID=
3 | ONEDRIVE_AUTHORITY=https://
4 |
--------------------------------------------------------------------------------
/.gitmodules:
--------------------------------------------------------------------------------
1 | [submodule "src/langs"]
2 | path = src/langs
3 | url = https://github.com/sboesen/langs.git
4 |
--------------------------------------------------------------------------------
/esbuild.injecthelper.mjs:
--------------------------------------------------------------------------------
1 | export let Buffer = require("buffer").Buffer;
2 | export let process = require("process/browser");
3 |
--------------------------------------------------------------------------------
/assets/branding/LICENSE.txt:
--------------------------------------------------------------------------------
1 | This work is licensed under CC BY-SA 4.0. To view a copy of this license, visit http://creativecommons.org/licenses/by-sa/4.0/
2 |
--------------------------------------------------------------------------------
/assets/branding/256x256.png:
--------------------------------------------------------------------------------
1 | version https://git-lfs.github.com/spec/v1
2 | oid sha256:a9bf1cc2403c3a32e7a6f5d5829bb065573922dc050ae2fdb8b3d34b4c87ecf5
3 | size 8296
4 |
--------------------------------------------------------------------------------
/assets/branding/50x50.png:
--------------------------------------------------------------------------------
1 | version https://git-lfs.github.com/spec/v1
2 | oid sha256:fe3b5b8bf9c32e57c0af366c455b11ae68eb9fb8a32f371c9c445b3e7e89cc02
3 | size 1120
4 |
--------------------------------------------------------------------------------
/assets/branding/64x64.png:
--------------------------------------------------------------------------------
1 | version https://git-lfs.github.com/spec/v1
2 | oid sha256:f68df7bae94eb4c4cddfa6af47d7819d01a6482d4f6afe6ef3236f72f9adf15f
3 | size 1523
4 |
--------------------------------------------------------------------------------
/assets/branding/logo.png:
--------------------------------------------------------------------------------
1 | version https://git-lfs.github.com/spec/v1
2 | oid sha256:98358aa726dcb5565164ff260634f5c527223d1f2ec1d0410b400625c8dad704
3 | size 14082
4 |
--------------------------------------------------------------------------------
/assets/branding/logo.svg:
--------------------------------------------------------------------------------
1 | version https://git-lfs.github.com/spec/v1
2 | oid sha256:02a5057ebe38e4c3835c8354f53aca01c4c2335d8a0435e22f509b4ca4a73ca4
3 | size 12889
4 |
--------------------------------------------------------------------------------
/assets/branding/300x300.png:
--------------------------------------------------------------------------------
1 | version https://git-lfs.github.com/spec/v1
2 | oid sha256:6990a598379c457442345e50c3f056c4598a121523bc10980835c08fc7447a54
3 | size 10119
4 |
--------------------------------------------------------------------------------
/assets/branding/300x300.svg:
--------------------------------------------------------------------------------
1 | version https://git-lfs.github.com/spec/v1
2 | oid sha256:432198e63fa9676541a41436917e025d1ea6af3772139c1dea450fcc0155cfd0
3 | size 13054
4 |
--------------------------------------------------------------------------------
/assets/branding/logo.excalidraw:
--------------------------------------------------------------------------------
1 | version https://git-lfs.github.com/spec/v1
2 | oid sha256:6b33b2a71f2d91988416c17015b59ef205dbf6a8c704ecac1cc95ed03ccc2c8e
3 | size 16537
4 |
--------------------------------------------------------------------------------
/assets/branding/logo-transparent.svg:
--------------------------------------------------------------------------------
1 | version https://git-lfs.github.com/spec/v1
2 | oid sha256:daedd262170200cb5e8cc288898b40892771b3962cf2bc17dc443f1c82a4aa94
3 | size 12886
4 |
--------------------------------------------------------------------------------
/docs/dropbox_review_material/attachments/step03.png:
--------------------------------------------------------------------------------
1 | version https://git-lfs.github.com/spec/v1
2 | oid sha256:655f6f487cc24af0ab86b8592622a7701c30f3369e7847a1fbb783d0962829c3
3 | size 52910
4 |
--------------------------------------------------------------------------------
/docs/dropbox_review_material/attachments/step05.png:
--------------------------------------------------------------------------------
1 | version https://git-lfs.github.com/spec/v1
2 | oid sha256:fe971af94f1f7b042b7313268e4d2d8432d926ce2392e50ca2a4abc21c8a4aab
3 | size 57033
4 |
--------------------------------------------------------------------------------
/docs/dropbox_review_material/attachments/step06.png:
--------------------------------------------------------------------------------
1 | version https://git-lfs.github.com/spec/v1
2 | oid sha256:025303053f2b2cedd2bdd61cd71e1f1c0fb12e45bfbc0e81f2c0d70747133505
3 | size 63458
4 |
--------------------------------------------------------------------------------
/docs/dropbox_review_material/attachments/step11.png:
--------------------------------------------------------------------------------
1 | version https://git-lfs.github.com/spec/v1
2 | oid sha256:fda4f6e8cf68833f239bdc85c46802fe2131d149011074f747b1829822a38939
3 | size 59041
4 |
--------------------------------------------------------------------------------
/docs/dropbox_review_material/attachments/step07.png:
--------------------------------------------------------------------------------
1 | version https://git-lfs.github.com/spec/v1
2 | oid sha256:061707977c8046a6f924881f86aac962e2f05618e201080768246270de792040
3 | size 100162
4 |
--------------------------------------------------------------------------------
/docs/dropbox_review_material/attachments/step08.png:
--------------------------------------------------------------------------------
1 | version https://git-lfs.github.com/spec/v1
2 | oid sha256:8add61e6ef34b2470e5a5bea3e10955a86eed5ed1fc4dd398f30835485a6c455
3 | size 158422
4 |
--------------------------------------------------------------------------------
/docs/dropbox_review_material/attachments/step09.png:
--------------------------------------------------------------------------------
1 | version https://git-lfs.github.com/spec/v1
2 | oid sha256:021ae1ce29d3932f088f0954787c475be826915b794320bf212d3ba7d58656da
3 | size 110529
4 |
--------------------------------------------------------------------------------
/docs/dropbox_review_material/attachments/step10.png:
--------------------------------------------------------------------------------
1 | version https://git-lfs.github.com/spec/v1
2 | oid sha256:b225d21de5778ddc032893e0f52150ab4260b0dfbe0e9141da4aa8b03131a9da
3 | size 147821
4 |
--------------------------------------------------------------------------------
/docs/dropbox_review_material/attachments/step12.png:
--------------------------------------------------------------------------------
1 | version https://git-lfs.github.com/spec/v1
2 | oid sha256:576801d5ebd8cf7fcd3c9c0c6df325654338854f7d762a5680e9aab3c9e6508a
3 | size 142155
4 |
--------------------------------------------------------------------------------
/docs/dropbox_review_material/attachments/step13.png:
--------------------------------------------------------------------------------
1 | version https://git-lfs.github.com/spec/v1
2 | oid sha256:1c93f681f0f7ce74cee0f2ed193f574ef915554f1a9ef96ba54afdede302f13a
3 | size 112057
4 |
--------------------------------------------------------------------------------
/tests/static_assets/mona_lisa/1374px-Mona_Lisa,_by_Leonardo_da_Vinci,_from_C2RMF_retouched.jpg:
--------------------------------------------------------------------------------
1 | version https://git-lfs.github.com/spec/v1
2 | oid sha256:b332c298be98be9968715952822431a051f9d07e67460cdcf91a5a3a305b5df1
3 | size 1141550
4 |
--------------------------------------------------------------------------------
/tests/static_assets/mona_lisa/1374px-Mona_Lisa,_by_Leonardo_da_Vinci,_from_C2RMF_retouched.jpg.enc:
--------------------------------------------------------------------------------
1 | version https://git-lfs.github.com/spec/v1
2 | oid sha256:7687eaf60ed93f754b107dc86a437aa97fad7bedd40432171b08c22cded7b559
3 | size 1141568
4 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Intellij
2 | *.iml
3 | .idea
4 |
5 | # npm
6 | node_modules
7 | package-lock.json
8 |
9 | # build
10 | main.js
11 | *.js.map
12 |
13 | # obsidian
14 | data.json
15 |
16 | # debug
17 | logs.txt
18 |
19 | # hidden files
20 | .*
21 |
--------------------------------------------------------------------------------
/.gitattributes:
--------------------------------------------------------------------------------
1 | *.png filter=lfs diff=lfs merge=lfs -text
2 | *.jpg filter=lfs diff=lfs merge=lfs -text
3 | *.jpeg filter=lfs diff=lfs merge=lfs -text
4 | *.pdf filter=lfs diff=lfs merge=lfs -text
5 | *.enc filter=lfs diff=lfs merge=lfs -text
6 | *.svg filter=lfs diff=lfs merge=lfs -text
7 | *.excalidraw filter=lfs diff=lfs merge=lfs -text
8 |
--------------------------------------------------------------------------------
/docs/encryption.md:
--------------------------------------------------------------------------------
1 | # Encryption
2 |
3 | If a password is set, the files are encrypted before being sent to the cloud.
4 |
5 | 1. The encryption algorithm is implemented using web-crypto.
6 | 2. The file content is encrypted using AES-GCM with a random IV.
7 | 3. The directory is considered as special "0-byte" object on remote s3, so this meta information may be easily guessed if some third party can access the remote bucket.
8 |
--------------------------------------------------------------------------------
/tests/static_assets/mona_lisa/openssl_command.sh:
--------------------------------------------------------------------------------
1 | # The encryption file is produced by the following command.
2 | # A salt is explictly provided because we need reproducible output in tests.
3 | openssl enc -p -aes-256-cbc -S 8302F586FAB491EC -pbkdf2 -iter 10000 -pass pass:somepassword -in '1374px-Mona_Lisa,_by_Leonardo_da_Vinci,_from_C2RMF_retouched.jpg' -out 1374px-Mona_Lisa,_by_Leonardo_da_Vinci,_from_C2RMF_retouched.jpg.enc
4 |
--------------------------------------------------------------------------------
/docs/code_design.md:
--------------------------------------------------------------------------------
1 | # Code Design
2 |
3 | ## Code Organization
4 |
5 | 1. Every function except `main.ts` should be pure. Pass any stateful information in parameters.
6 |
7 | 2. `misc.ts` should not depend on any other written code.
8 |
9 | 3. Each storage code should not depend on `sync.ts`.
10 |
11 | ## File and Folder Representation
12 |
13 | While writing sync codes, folders are always represented by a string ending with `/`.
14 |
--------------------------------------------------------------------------------
/manifest-beta.json:
--------------------------------------------------------------------------------
1 | {
2 | "id": "remotely-sync",
3 | "name": "Remotely Sync",
4 | "version": "0.4.49",
5 | "minAppVersion": "1.4.11",
6 | "description": "Security fixes for the remotely-save unofficial plugin allowing users to synchronize notes between local device and the cloud service. Not backwards compatible.",
7 | "author": "sboesen",
8 | "authorUrl": "https://github.com/sboesen",
9 | "isDesktopOnly": false
10 | }
11 |
--------------------------------------------------------------------------------
/manifest.json:
--------------------------------------------------------------------------------
1 | {
2 | "id": "remotely-secure",
3 | "name": "Remotely Sync",
4 | "version": "0.4.49",
5 | "minAppVersion": "1.4.11",
6 | "description": "Security and feature updates for the remotely-save unofficial plugin allowing users to synchronize notes between local device and the cloud service. Not backwards compatible.",
7 | "author": "sboesen",
8 | "authorUrl": "https://github.com/sboesen",
9 | "isDesktopOnly": false
10 | }
11 |
--------------------------------------------------------------------------------
/docs/browser_env_oauth2_pkce.md:
--------------------------------------------------------------------------------
1 | # Limitations From The Browser Environment: OAuth2 PKCE
2 |
3 | If the cloud service uses OAuth flow, it needs to support PKCE, because the plugin is released to the public, and no real secrets can be statically kept in the client.
4 |
5 | Luckily, Dropbox and OneDrive supports PKCE, making it possible for this plugin to connect to them easily.
6 |
7 | Dropbox has an excellent [article](https://dropbox.tech/developers/pkce--what-and-why-) explaining what is and how to use PKCE.
8 |
--------------------------------------------------------------------------------
/tests/static_assets/mona_lisa/1374px-Mona_Lisa,_by_Leonardo_da_Vinci,_from_C2RMF_retouched.LICENSE.txt:
--------------------------------------------------------------------------------
1 | The file 1374px-Mona_Lisa,_by_Leonardo_da_Vinci,_from_C2RMF_retouched.jpg is downloaded from Wikimedia Commons: https://commons.wikimedia.org/wiki/File:Mona_Lisa,_by_Leonardo_da_Vinci,_from_C2RMF_retouched.jpg .
2 |
3 | Quoted from the web address: ... This photographic reproduction is therefore also considered to be in the public domain in the United States. In other jurisdictions, re-use of this content may be restricted ...
4 |
--------------------------------------------------------------------------------
/docs/how_to_debug/README.md:
--------------------------------------------------------------------------------
1 | # How To Debug
2 |
3 | Ideally, users do not need to debug anything. But if something bad happened, we have to dig into details.
4 |
5 | ## Easy: Export Sync Plans
6 |
7 | See [here](./export_sync_plans.md).
8 |
9 | ## Advanced: Check Console Output Directly
10 |
11 | ...of desktop and Android.
12 |
13 | See [here](./check_console_output.md).
14 |
15 | ## Advanced: Save Console Output Then Read Them Later
16 |
17 | This method works for desktop and mobile devices (iOS, Android).
18 |
19 | See [here](./save_console_output_and_export.md).
20 |
--------------------------------------------------------------------------------
/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "baseUrl": ".",
4 | "inlineSourceMap": true,
5 | "inlineSources": true,
6 | "module": "ESNext",
7 | "target": "ESNext",
8 | "allowJs": true,
9 | "noImplicitAny": true,
10 | "moduleResolution": "node",
11 | // "allowSyntheticDefaultImports": true,
12 | "resolveJsonModule": true,
13 | "esModuleInterop": true,
14 | "importHelpers": true,
15 | "isolatedModules": true,
16 | "lib": [
17 | "dom",
18 | "es5",
19 | "scripthost",
20 | "es2015"
21 | ]
22 | },
23 | "include": [
24 | "**/*.ts"
25 | ]
26 | }
--------------------------------------------------------------------------------
/.editorconfig:
--------------------------------------------------------------------------------
1 | # EditorConfig is awesome: https://EditorConfig.org
2 |
3 | # top-most EditorConfig file
4 | root = true
5 |
6 | # Unix-style newlines with a newline ending every file
7 | [*]
8 | end_of_line = lf
9 | insert_final_newline = true
10 |
11 | # Matches multiple files with brace expansion notation
12 | # Set default charset
13 | [*.{js,jsx,mjs,cjs,ts,tsx,json,py}]
14 | charset = utf-8
15 |
16 | # 4 space indentation
17 | [*.py]
18 | indent_style = space
19 | indent_size = 4
20 |
21 | # Tab indentation (no size specified)
22 | [Makefile]
23 | indent_style = tab
24 |
25 | # Indentation override for all JS under lib directory
26 | [*.{js,jsx,mjs,cjs,ts,tsx,json}]
27 | indent_style = space
28 | indent_size = 2
29 |
--------------------------------------------------------------------------------
/docs/browser_env.md:
--------------------------------------------------------------------------------
1 | # Limitations From The Browser Environment
2 |
3 | Obsidian desktop is developed by using [Electron](https://www.electronjs.org/). And Obsidian mobile is developed by using [Capacitor](https://capacitorjs.com/)
4 |
5 | Technically, the plugin (or any plugin?) runs in the js environment provided by Obsidian. And to support the mobile Obsidian, the plugin is limited to be developed for the browser environment, instead of the Node.js environment.
6 |
7 | Then some limitations are applied:
8 |
9 | 1. [The CORS issue (solved in the new Obsidian version on some platforms).](./browser_env_cors.md)
10 | 2. [No Node.js environment.](./browser_env_no_nodejs.md)
11 | 3. If the cloud service uses an OAuth flow, it needs to support PKCE. More details are [here](./browser_env_oauth2_pkce.md).
12 | 4. Does not run in the backround when Obsidian is closed.
13 |
--------------------------------------------------------------------------------
/docs/browser_env_no_nodejs.md:
--------------------------------------------------------------------------------
1 | # Limitations From The Browser Environment: No Node.js
2 |
3 | To support the mobile Obsidian, the plugin is limited to be developed for the browser environment, instead of the Node.js environment.
4 |
5 | Many js libraries are designed to work in both the browser and the Node.js environments. But some are not, because the browser doesn't provide the corresponding abilities.
6 |
7 | For example, there is a popular npm package [`ssh2-sftp-client`](https://www.npmjs.com/package/ssh2-sftp-client) for SFTP. But it relies on the modules (e.g. `http`) from Node.js which cannot be "translated" to the browser environment. So it's impossible to make this plugin support SFTP. The same status applies to FTP / FTPS.
8 |
9 | Likewise, [MEGA](https://mega.nz/) provides a SDK, but the SDK is [for C++ only](https://mega.nz/doc), so it's also impossible to make this plugin support MEGA.
10 |
--------------------------------------------------------------------------------
/docs/s3_cors_configure.md:
--------------------------------------------------------------------------------
1 | # How To Configure S3 CORS Rules
2 |
3 | If you are using the latest version of this plugin AND Obsidian desktop >= 0.13.25, mobile >= 1.1.1, you do not need to configure it any more. If you are using Obsidian desktop < 0.13.25, moble < 1.1.1, you are required to configure the rules as following.
4 |
5 | Thanks to [@NAL100 in the Discussion](https://github.com/fyears/remotely-save/discussions/28).
6 |
7 | Please try this config:
8 |
9 | ```json
10 | [
11 | {
12 | "AllowedHeaders": ["*"],
13 | "AllowedMethods": ["GET", "PUT", "POST", "DELETE", "HEAD"],
14 | "AllowedOrigins": [
15 | "app://obsidian.md",
16 | "capacitor://localhost",
17 | "http://localhost"
18 | ],
19 | "ExposeHeaders": [
20 | "Content-Length",
21 | "Content-Type",
22 | "Connection",
23 | "Date",
24 | "ETag",
25 | "Server",
26 | "x-amz-delete-marker",
27 | "x-amz-id-2",
28 | "x-amz-request-id",
29 | "x-amz-version-id"
30 | ]
31 | }
32 | ]
33 | ```
34 |
--------------------------------------------------------------------------------
/tests/configPersist.test.ts:
--------------------------------------------------------------------------------
1 | import * as chai from "chai";
2 | import chaiAsPromised from "chai-as-promised";
3 |
4 | import { RemotelySavePluginSettings } from "../src/baseTypes";
5 | import { messyConfigToNormal, normalConfigToMessy } from "../src/configPersist";
6 |
7 | chai.use(chaiAsPromised);
8 | const expect = chai.expect;
9 |
10 | const DEFAULT_SETTINGS: RemotelySavePluginSettings = {
11 | s3: {
12 | s3AccessKeyID: "acc",
13 | } as any,
14 | webdav: {
15 | address: "addr",
16 | } as any,
17 | dropbox: {
18 | username: "测试中文",
19 | } as any,
20 | onedrive: {
21 | username: "test 🍎 emoji",
22 | } as any,
23 | password: "password",
24 | serviceType: "s3",
25 | enableStatusBarInfo: true,
26 | trashLocal: false,
27 | syncTrash: false,
28 | syncBookmarks: true
29 | };
30 |
31 | describe("Config Persist tests", () => {
32 | it("should encrypt go back and forth conrrectly", async () => {
33 | const k = DEFAULT_SETTINGS;
34 | const k2 = normalConfigToMessy(k);
35 | const k3 = messyConfigToNormal(k2);
36 | expect(k3).to.deep.equal(k);
37 | });
38 | });
39 |
--------------------------------------------------------------------------------
/docs/browser_env_cors.md:
--------------------------------------------------------------------------------
1 | # Limitations From The Browser Environment: CORS Issue
2 |
3 | The plugin is developed for the browser environment. The "fake" browser behind the scenes also follows the CORS policy.
4 |
5 | [MDN has a doc about CORS.](https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS)
6 |
7 | 1. From Obsidian desktop >= 0.13.25 or mobile >= 1.1.1, Obsidian [provides a new API `requestUrl`](https://forum.obsidian.md/t/obsidian-release-v0-13-25-insider-build/32701), that allows the plugin to fully bypass the CORS issue. As of Mar 2022, the latest public-released Obsidian desktop has supported this API, but the Obsidian mobile still stays in insider.
8 |
9 | 2. For using this plugin in Obsidian desktop < 0.13.25 or mobile < 1.1.1, we need to configure the server side to return the header `Access-Control-Allow-Origin` allowing the origins `app://obsidian.md` and `capacitor://localhost` and `http://localhost`.
10 |
11 | Example configurations:
12 |
13 | - [Amazon S3](./s3_cors_configure.md)
14 | - [Apache](./apache_cors_configure.md) ([contributed by community](https://github.com/remotely-save/remotely-save/pull/31))
15 |
--------------------------------------------------------------------------------
/src/moreOnLog.ts:
--------------------------------------------------------------------------------
1 | // It's very dangerous for this file to depend on other files in the same project.
2 | // We should avoid this situation as much as possible.
3 |
4 | import { TAbstractFile, TFolder, TFile, Vault } from "obsidian";
5 |
6 | import * as origLog from "loglevel";
7 | import type { LogLevelNumbers, Logger, LogLevel, LogLevelDesc } from "loglevel";
8 | const log2 = origLog.getLogger("rs-default");
9 |
10 | const originalFactory = log2.methodFactory;
11 |
12 | export const applyLogWriterInplace = function (writer: (...msg: any[]) => any) {
13 | log2.methodFactory = function (
14 | methodName: string,
15 | logLevel: LogLevelNumbers,
16 | loggerName: string | symbol
17 | ) {
18 | // @ts-ignore
19 | const rawMethod = originalFactory(methodName, logLevel, loggerName);
20 |
21 | return function (...msg: any[]) {
22 | rawMethod.apply(undefined, msg);
23 | writer(...msg);
24 | };
25 | };
26 |
27 | log2.setLevel(log2.getLevel());
28 | };
29 |
30 | export const restoreLogWritterInplace = () => {
31 | log2.methodFactory = originalFactory;
32 | log2.setLevel(log2.getLevel());
33 | };
34 |
35 | export const log = log2;
36 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/feature_request.yml:
--------------------------------------------------------------------------------
1 | name: Feature Request
2 | description: Some idea about new features
3 | title: "[Feature Request]: "
4 | labels: ["enhancement"]
5 | body:
6 | - type: markdown
7 | attributes:
8 | value: |
9 | Thanks for taking the time to fill out this bug report!
10 | - type: textarea
11 | id: what-are-you-expecting
12 | attributes:
13 | label: What feature are you suggesting?
14 | description: What feature are you suggesting?
15 | validations:
16 | required: true
17 | - type: dropdown
18 | id: service
19 | attributes:
20 | label: What remote cloud services are you using, or suggesting adding the feature to?
21 | multiple: true
22 | options:
23 | - S3
24 | - OneDrive for personal
25 | - Dropbox
26 | - webdav
27 | - others
28 | validations:
29 | required: false
30 | - type: checkboxes
31 | id: ensure-no-sensitive-information
32 | attributes:
33 | label: Ensure no sensitive information
34 | description: By submitting this issue, you ensure that no sensitive information is submitted in the issue.
35 | options:
36 | - label: I ensure that no sensitive information is submitted in the issue.
37 | required: true
38 |
--------------------------------------------------------------------------------
/docs/apache_cors_configure.md:
--------------------------------------------------------------------------------
1 | # How To Configure Apache CORS Rules for Webdav
2 |
3 | This method is [contributed by community](https://github.com/remotely-save/remotely-save/pull/31).
4 |
5 | You should evaluate the risk by yourself before trying this method.
6 |
7 | **You are strongly advised to backup your original Apache configuration files before making any changes.**
8 |
9 | ```apacheconf
10 |
11 | # Obsidian webdav
12 | SetEnvIf Origin "^app://obsidian.md$" IS_OBSIDIAN
13 | SetEnvIf Origin "^capacitor://localhost$" IS_OBSIDIAN
14 | SetEnvIf Origin "^http://localhost$" IS_OBSIDIAN
15 | Header always set Access-Control-Allow-Origin "*" env=IS_OBSIDIAN
16 | Header always set Access-Control-Allow-Methods "GET, HEAD, POST, PUT, OPTIONS, MOVE, DELETE, COPY, LOCK, UNLOCK, PROPFIND" env=IS_OBSIDIAN
17 | Header always set Access-Control-Allow-Headers "Authorization, Depth, DNT, User-Agent, Keep-Alive, Content-Type, accept, origin, X-Requested-With" env=IS_OBSIDIAN
18 | Header always set Access-Control-Expose-Headers "etag, dav" env=IS_OBSIDIAN
19 |
20 | # Allow OPTION request without authentication and respond with status 200
21 | RewriteCond %{ENV:IS_OBSIDIAN} 1
22 | RewriteCond %{REQUEST_METHOD} OPTIONS
23 | RewriteRule ^(.*)$ $1 [R=200,L]
24 |
25 | ```
26 |
--------------------------------------------------------------------------------
/styles.css:
--------------------------------------------------------------------------------
1 | /* set the styles */
2 |
3 | .password-second-confirm {
4 | font-weight: bold;
5 | }
6 |
7 | .password-disclaimer {
8 | font-weight: bold;
9 | }
10 |
11 | .settings-auth-related {
12 | border-top: 1px solid var(--background-modifier-border);
13 | padding-top: 18px;
14 | }
15 |
16 | .s3-disclaimer {
17 | font-weight: bold;
18 | }
19 | .s3-hide {
20 | display: none;
21 | }
22 |
23 | .dropbox-disclaimer {
24 | font-weight: bold;
25 | }
26 | .dropbox-hide {
27 | display: none;
28 | }
29 |
30 | .dropbox-auth-button-hide {
31 | display: none;
32 | }
33 |
34 | .dropbox-revoke-auth-button-hide {
35 | display: none;
36 | }
37 |
38 | .onedrive-disclaimer {
39 | font-weight: bold;
40 | }
41 | .onedrive-hide {
42 | display: none;
43 | }
44 |
45 | .onedrive-auth-button-hide {
46 | display: none;
47 | }
48 |
49 | .onedrive-revoke-auth-button-hide {
50 | display: none;
51 | }
52 |
53 | .webdav-disclaimer {
54 | font-weight: bold;
55 | }
56 | .webdav-hide {
57 | display: none;
58 | }
59 |
60 | .qrcode-img {
61 | width: 350px;
62 | height: 350px;
63 | }
64 |
65 | /* !important isn't great practice but it needs to overwrite other css classes */
66 | .remotely-sync-hidden {
67 | display: none !important;
68 | }
69 |
70 | .remotely-sync-show-status-bar {
71 | display: flex !important;
72 | }
--------------------------------------------------------------------------------
/docs/how_to_debug/check_console_output.md:
--------------------------------------------------------------------------------
1 | # Check Console Output
2 |
3 | If you are using Obsidian on desktop or Android, you can check the Obsidian console.
4 |
5 | ## Disable Auto Sync Firstly
6 |
7 | You should disable auto sync to avoid any unexpected running.
8 |
9 | ## Set The Output Level To Debug
10 |
11 | Go to the plugin settings, scroll down to the section "Debug" -> "enable debug info", and change it from "false" to "true".
12 |
13 | ## Check The Output
14 |
15 | - If you are on desktop
16 |
17 | Press the keyboard shortcut "ctrl+shift+i" if you are on Windows or Linux, or press "cmd+shift+i" if you are on macOS. You should be able to see the console of Obsidian.
18 |
19 | - If you are using Android
20 |
21 | You need to [enable USB debugging](https://developer.android.com/studio/debug/dev-options#enable) on your Android, then connect your Android to a computer using USB, then open the **desktop** Chrome browser and go to the special web page . You shall be able to see the "inspect" link inside the special page, then click the link to open the console. After debugging, remember to turn off USB debugging.
22 |
23 | Trigger the sync manually (by clicking the icon on the ribbon sidebar). Something (hopefully) helpful should show up in the console. You could understand what happened and what goes wrong more explictly by checking the output.
24 |
--------------------------------------------------------------------------------
/docs/how_to_debug/save_console_output_and_export.md:
--------------------------------------------------------------------------------
1 | # Save Console Output And Read Them Later
2 |
3 | ## Disable Auto Sync Firstly
4 |
5 | You should disable auto sync to avoid any unexpected running.
6 |
7 | ## Set The Output Level To Debug
8 |
9 | Go to the plugin settings, scroll down to the section "Debug" -> "enable debug info", and change it from "false" to "true".
10 |
11 | ## Enable Saving The Output To DB
12 |
13 | Go to the plugin settings, scroll down to the section "Debug" -> "Save Console Logs Into DB", and change it from "disable" to "enable". **This setting has some performance cost, so do NOT always turn this on when not necessary!**
14 |
15 | ## Run The Sync
16 |
17 | Trigger the sync manually (by clicking the icon on the ribbon sidebar). Something (hopefully) helpful should show up in the console. The the console logs are also saved into DB now.
18 |
19 | ## Export The Output And Read The Logs
20 |
21 | Go to the plugin settings, scroll down to the section "Debug" -> "Export Console Logs From DB", and click the button. A new file `log_hist_exported_on_....md` should be created inside the special folder `_debug_remotely_save/`. You could read it and hopefully find something useful.
22 |
23 | ## Disable Saving The Output To DB
24 |
25 | After debugging, go to the plugin settings, scroll down to the section "Debug" -> "Save Console Logs Into DB", and change it from "enable" to "disable".
26 |
--------------------------------------------------------------------------------
/src/i18n.ts:
--------------------------------------------------------------------------------
1 | import Mustache from "mustache";
2 | import { moment } from "obsidian";
3 |
4 | import { LANGS } from "./langs";
5 |
6 | export type LangType = keyof typeof LANGS;
7 | export type LangTypeAndAuto = LangType | "auto";
8 | export type TransItemType = keyof typeof LANGS["en"];
9 |
10 | export class I18n {
11 | lang: LangTypeAndAuto;
12 | readonly saveSettingFunc: (tolang: LangTypeAndAuto) => Promise;
13 | constructor(
14 | lang: LangTypeAndAuto,
15 | saveSettingFunc: (tolang: LangTypeAndAuto) => Promise
16 | ) {
17 | this.lang = lang;
18 | this.saveSettingFunc = saveSettingFunc;
19 | }
20 | async changeTo(anotherLang: LangTypeAndAuto) {
21 | this.lang = anotherLang;
22 | await this.saveSettingFunc(anotherLang);
23 | }
24 |
25 | _get(key: TransItemType) {
26 | let realLang = this.lang;
27 | if (this.lang === "auto" && moment.locale().replace("-", "_") in LANGS) {
28 | realLang = moment.locale().replace("-", "_") as LangType;
29 | } else {
30 | realLang = "en";
31 | }
32 |
33 | const res: string =
34 | (LANGS[realLang] as typeof LANGS["en"])[key] || LANGS["en"][key] || key;
35 | return res;
36 | }
37 |
38 | t(key: TransItemType, vars?: Record) {
39 | if (vars === undefined) {
40 | return this._get(key);
41 | }
42 | return Mustache.render(this._get(key), vars);
43 | }
44 | }
45 |
--------------------------------------------------------------------------------
/src/syncAlgoV2Notice.ts:
--------------------------------------------------------------------------------
1 | import { App, Modal, Setting } from "obsidian";
2 | import type { I18n } from "./i18n";
3 |
4 | export class SyncAlgoV2Modal extends Modal {
5 | result: boolean;
6 | onSubmit: (result: boolean) => void;
7 | i18n: I18n;
8 |
9 | constructor(app: App, i18n: I18n, onSubmit: (result: boolean) => void) {
10 | super(app);
11 | this.i18n = i18n;
12 | this.result = false;
13 | this.onSubmit = onSubmit;
14 | }
15 |
16 | onOpen() {
17 | let { contentEl } = this;
18 |
19 | contentEl.createEl("h2", {
20 | text: this.i18n.t("syncalgov2_title"),
21 | });
22 |
23 | this.i18n.t("syncalgov2_texts")
24 | .split("\n")
25 | .forEach((val) => {
26 | contentEl.createEl("p", {
27 | text: val,
28 | });
29 | });
30 |
31 | new Setting(contentEl)
32 | .addButton((button) => {
33 | button.setButtonText(this.i18n.t("syncalgov2_button_agree"));
34 | button.onClick(async () => {
35 | this.result = true;
36 | this.close();
37 | });
38 | })
39 | .addButton((button) => {
40 | button.setButtonText(this.i18n.t("syncalgov2_button_disagree"));
41 | button.onClick(() => {
42 | this.close();
43 | });
44 | });
45 | }
46 |
47 | onClose() {
48 | let { contentEl } = this;
49 | contentEl.empty();
50 |
51 | this.onSubmit(this.result);
52 | }
53 | }
54 |
--------------------------------------------------------------------------------
/docs/minimal_intrusive_design.md:
--------------------------------------------------------------------------------
1 | # Minimal Intrusive Design
2 |
3 | Before version 0.3.0, the plugin did not upload additional meta data to the remote.
4 |
5 | From and after version 0.3.0, the plugin just upload minimal extra necessary meta data to the remote.
6 |
7 | ## Benefits
8 |
9 | Then the plugin doesn't make more-than-necessary assumptions about information on the remote endpoint.
10 |
11 | For example, it's possbile for a uses to manually upload a file to s3, and next time the plugin can download that file to the local device.
12 |
13 | And it's also possible to combine another "sync-to-s3" solution (like, another software) on desktops, and this plugin on mobile devices, together.
14 |
15 | ## Necessarity Of Uploading Extra Metadata
16 |
17 | The main issue comes from deletions (and renamings which is actually interpreted as "deletion-then-creation").
18 |
19 | If we don't upload any extra info to the remote, there's usually no way for the second device to know what files / folders have been deleted on the first device.
20 |
21 | To overcome this issue, from and after version 0.3.0, the plugin uploads extra metadata files `_remotely-secure-metadata-on-remote.{json,bin}` to users' configured cloud services. Those files contain some info about what has been deleted on the first device, so that the second device can read the list to apply the deletions to itself. Some other necessary meta info would also be written into the extra files.
22 |
--------------------------------------------------------------------------------
/docs/sync_ignoring_large_files.md:
--------------------------------------------------------------------------------
1 | # Sync Ignoring Large Files
2 |
3 | Initially, the plugin does not ignore large files.
4 |
5 | From the new version in May 2022, it can ignore all files with some sizes. But we need some rules to make the function compatible with existing conditions.
6 |
7 | 1. If users are using E2E password mode, then the file sizes are compared on the **encrypted sizes**, rather than the original unencripted file sizes. The reasons are: the encrypted ones are in transferations, and the encrypted sizes can be computed from unencrypted sizes but not the reverse.
8 |
9 | 2. Assuming the file A, is already synced between local device and remote service before.
10 |
11 | - If the local size and remote size are both below the threshold, then the file can be synced normally.
12 | - If the local size and remote size are both above the threshold, then the file will be ignored normally.
13 | - If the local size is below the threshold, and the remote size is above the threshold, then the plugin **rejects** the sync, and throws the error to the user.
14 | - If the local size is above the threshold, and the remote size is below the threshold, then the plugin **rejects** the sync, and throws the error to the user.
15 | - When it somes to deletions, the same rules apply.
16 |
17 | The main point is that, if the file sizes "cross the line", the plugin does not introduce any further trouble and just reject to work for this file.
18 |
--------------------------------------------------------------------------------
/.github/workflows/auto-build.yml:
--------------------------------------------------------------------------------
1 | # This workflow will do a clean install of node dependencies, cache/restore them, build the source code and run tests across different versions of node
2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/using-nodejs-with-github-actions
3 |
4 | name: BuildCI
5 |
6 | on:
7 | push:
8 | branches: ["**"]
9 | pull_request:
10 | branches: [master]
11 |
12 | jobs:
13 | build:
14 | runs-on: ubuntu-latest
15 |
16 | environment: env-for-buildci
17 |
18 | env:
19 | DROPBOX_APP_KEY: ${{secrets.DROPBOX_APP_KEY}}
20 | ONEDRIVE_CLIENT_ID: ${{secrets.ONEDRIVE_CLIENT_ID}}
21 | ONEDRIVE_AUTHORITY: ${{secrets.ONEDRIVE_AUTHORITY}}
22 |
23 | strategy:
24 | matrix:
25 | node-version: [16.x]
26 | # See supported Node.js release schedule at https://nodejs.org/en/about/releases/
27 |
28 | steps:
29 | - name: Checkout codes
30 | uses: actions/checkout@v2
31 | with:
32 | lfs: true
33 | submodules: recursive
34 | - name: Checkout LFS
35 | run: git lfs checkout
36 | - name: Use Node.js ${{ matrix.node-version }}
37 | uses: actions/setup-node@v2
38 | with:
39 | node-version: ${{ matrix.node-version }}
40 | - run: npm install
41 | - run: npm test
42 | - run: npm run build
43 | - uses: actions/upload-artifact@v2
44 | with:
45 | name: my-dist
46 | path: |
47 | main.js
48 | manifest.json
49 | styles.css
50 |
--------------------------------------------------------------------------------
/src/configPersist.ts:
--------------------------------------------------------------------------------
1 | import { base64, base64url } from "rfc4648";
2 | import { reverseString } from "./misc";
3 |
4 | import type { RemotelySavePluginSettings } from "./baseTypes";
5 |
6 | import { log } from "./moreOnLog";
7 |
8 | const DEFAULT_README: string =
9 | "The file contains sensitive info, so DO NOT take screenshot of, copy, or share it to anyone! It's also generated automatically, so do not edit it manually.";
10 |
11 | interface MessyConfigType {
12 | readme: string;
13 | d: string;
14 | }
15 |
16 | /**
17 | * this should accept the result after loadData();
18 | */
19 | export const messyConfigToNormal = (
20 | x: MessyConfigType | RemotelySavePluginSettings | null | undefined
21 | ): RemotelySavePluginSettings | null | undefined => {
22 | if (x === null || x === undefined) {
23 | return x as any;
24 | }
25 | if ("readme" in x && "d" in x) {
26 | // we should decode
27 | const y = JSON.parse(
28 | (
29 | base64url.parse(reverseString(x["d"]), {
30 | out: Buffer.allocUnsafe as any,
31 | loose: true,
32 | }) as Buffer
33 | ).toString("utf-8")
34 | );
35 | return y;
36 | } else {
37 | return x;
38 | }
39 | };
40 |
41 | /**
42 | * this should accept the result of original config
43 | */
44 | export const normalConfigToMessy = (
45 | x: RemotelySavePluginSettings | null | undefined
46 | ) => {
47 | if (x === null || x === undefined) {
48 | return x;
49 | }
50 | const y = {
51 | readme: DEFAULT_README,
52 | d: reverseString(
53 | base64url.stringify(Buffer.from(JSON.stringify(x), "utf-8"), {
54 | pad: false,
55 | })
56 | ),
57 | };
58 | return y;
59 | };
60 |
--------------------------------------------------------------------------------
/src/presetRules.ts:
--------------------------------------------------------------------------------
1 | import type {
2 | RemotelySavePluginSettings,
3 | WebdavConfig,
4 | WebdavDepthType,
5 | } from "./baseTypes";
6 |
7 | const RULES = {
8 | webdav: {
9 | depth: [
10 | {
11 | url: "^https://(.+).teracloud.jp/.+",
12 | depth: "auto_1",
13 | manualRecursive: true,
14 | },
15 | {
16 | url: "^https://dav.jianguoyun.com/dav/",
17 | depth: "auto_1",
18 | manualRecursive: true,
19 | },
20 | ],
21 | },
22 | };
23 |
24 | export const applyWebdavPresetRulesInplace = (
25 | webdav: Partial | undefined
26 | ) => {
27 | if (webdav === undefined) {
28 | return {
29 | changed: false,
30 | webdav: webdav,
31 | };
32 | }
33 | for (const { url, depth, manualRecursive } of RULES.webdav.depth) {
34 | if (
35 | webdav.address !== undefined &&
36 | new RegExp(url).test(webdav.address) &&
37 | webdav.depth !== undefined &&
38 | webdav.depth.startsWith("auto_") &&
39 | webdav.depth !== depth
40 | ) {
41 | webdav.depth = depth as WebdavDepthType;
42 | webdav.manualRecursive = manualRecursive;
43 | return {
44 | changed: true,
45 | webdav: webdav,
46 | };
47 | }
48 | }
49 | return {
50 | changed: false,
51 | webdav: webdav,
52 | };
53 | };
54 |
55 | export const applyPresetRulesInplace = (
56 | settings: RemotelySavePluginSettings | undefined
57 | ) => {
58 | if (settings === undefined) {
59 | return {
60 | changed: false,
61 | settings: settings,
62 | };
63 | }
64 | const webdavRes = applyWebdavPresetRulesInplace(settings.webdav);
65 | return {
66 | changed: webdavRes.changed,
67 | settings: settings,
68 | };
69 | };
70 |
--------------------------------------------------------------------------------
/docs/import_export_some_settings.md:
--------------------------------------------------------------------------------
1 | ## Bonus: Import And Export Not-Oauth2 Plugin Settings By QR Code
2 |
3 | It's often verbose / inconvenient to input not-oauth2 credentials on mobile. (On different desktop computers, it's trivial to copy and paste `data.json` across different devices.)
4 |
5 | So this plugin has a helper function to export those settings as a QR code, then you could use mobile device's camera to import the settings.
6 |
7 | Attention:
8 |
9 | 1. Oauth2 - related information is omitted. It means that Dropbox, OneDrive login credentials are NOT included in the QR Code.
10 | 2. Please, NEVER share the QR Code to others. It's as equivalent to the login credentials.
11 |
12 | Steps:
13 |
14 | 1. Configure the plugin settings on one device, and make sure the sync functions work. (E.g, you could sync notes using S3 credentials.)
15 | 2. Open plugin settings page. Then scroll down the page, until the section "Import and Export Settings". Click the button "Get QR Code". A new modal should show up and you should see a QR code.
16 | 3. On a second device, make sure the vault name is the same as the first device's.
17 | 4. On that second device, use its camera app, or any apps that support scanning QR codes, to scan the QR code from the first device. (On latest iOS, the system's built in camera app should work. On Android, at least one open source app [Binary Eye](https://github.com/markusfisch/BinaryEye) is tested to be working.)
18 | 5. A link / url / address should be identified in the scan-QR-code app, and you could follow the instruction in the app, and then you should be redirected to open the Obsidian app.
19 | 6. And finally, there should be a new notice in the Obsidian app saying the settings are successfully imported. Otherwise please check the error message in the notice.
20 |
--------------------------------------------------------------------------------
/esbuild.config.mjs:
--------------------------------------------------------------------------------
1 | import dotenv from "dotenv/config";
2 | import esbuild from "esbuild";
3 | import process from "process";
4 | // import builtins from 'builtin-modules'
5 |
6 | const banner = `/*
7 | THIS IS A GENERATED/BUNDLED FILE BY ESBUILD
8 | if you want to view the source, please visit the github repository of this plugin
9 | */
10 | `;
11 |
12 | console.log(`esbuild version = ${esbuild.version}`);
13 |
14 | const prod = process.argv[2] === "production";
15 |
16 | const DEFAULT_DROPBOX_APP_KEY = process.env.DROPBOX_APP_KEY || "";
17 | const DEFAULT_ONEDRIVE_CLIENT_ID = process.env.ONEDRIVE_CLIENT_ID || "";
18 | const DEFAULT_ONEDRIVE_AUTHORITY = process.env.ONEDRIVE_AUTHORITY || "";
19 |
20 | esbuild
21 | .build({
22 | banner: {
23 | js: banner,
24 | },
25 | loader: {
26 | ".svg": "text",
27 | },
28 | entryPoints: ["./src/main.ts"],
29 | bundle: true,
30 | external: [
31 | "obsidian",
32 | "electron",
33 | "fs",
34 | "tls",
35 | "net",
36 | "http",
37 | "https",
38 | // ...builtins
39 | ],
40 | inject: ["./esbuild.injecthelper.mjs"],
41 | format: "cjs",
42 | watch: !prod,
43 | target: "esnext",
44 | logLevel: "info",
45 | sourcemap: prod ? false : "inline",
46 | treeShaking: true,
47 | minify: prod,
48 | outfile: "main.js",
49 | define: {
50 | "process.env.DEFAULT_DROPBOX_APP_KEY": `"${DEFAULT_DROPBOX_APP_KEY}"`,
51 | "process.env.DEFAULT_ONEDRIVE_CLIENT_ID": `"${DEFAULT_ONEDRIVE_CLIENT_ID}"`,
52 | "process.env.DEFAULT_ONEDRIVE_AUTHORITY": `"${DEFAULT_ONEDRIVE_AUTHORITY}"`,
53 | global: "window",
54 | "process.env.NODE_DEBUG": `undefined`, // ugly fix
55 | "process.env.DEBUG": `undefined`, // ugly fix
56 | },
57 | })
58 | .catch(() => process.exit(1));
59 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/bug_report.yml:
--------------------------------------------------------------------------------
1 | name: Bug Report
2 | description: Fire a bug report
3 | title: "[Bug]: "
4 | labels: ["bug"]
5 | body:
6 | - type: markdown
7 | attributes:
8 | value: |
9 | Thanks for taking the time to fill out this bug report!
10 | - type: textarea
11 | id: what-happened
12 | attributes:
13 | label: What happened?
14 | description: What did you expect to happen?
15 | placeholder: Tell us what you see!
16 | validations:
17 | required: true
18 | - type: dropdown
19 | id: os
20 | attributes:
21 | label: What OS are you using?
22 | multiple: true
23 | options:
24 | - Windows
25 | - Linux
26 | - macOS
27 | - iOS (iPhone, iPad)
28 | - Android
29 | validations:
30 | required: true
31 | - type: dropdown
32 | id: service
33 | attributes:
34 | label: What remote cloud services are you using?
35 | multiple: true
36 | options:
37 | - S3
38 | - OneDrive for personal
39 | - Dropbox
40 | - webdav
41 | - others
42 | validations:
43 | required: true
44 | - type: input
45 | id: version-plugin
46 | attributes:
47 | label: Version of the plugin
48 | description: What version of the plugin are you running?
49 | validations:
50 | required: false
51 | - type: input
52 | id: version-obsidian
53 | attributes:
54 | label: Version of Obsidian
55 | description: What version of Obsidian are you running?
56 | validations:
57 | required: false
58 | - type: checkboxes
59 | id: using-password
60 | attributes:
61 | label: Using password or not
62 | description: Are you using password (end-to-end encryption) or not?
63 | options:
64 | - label: Yes.
65 | required: false
66 | - type: checkboxes
67 | id: ensure-no-sensitive-information
68 | attributes:
69 | label: Ensure no sensitive information
70 | description: By submitting this issue, you ensure that no sensitive information is submitted in the issue.
71 | options:
72 | - label: I ensure that no sensitive information is submitted in the issue.
73 | required: true
74 |
--------------------------------------------------------------------------------
/assets/branding/README.md:
--------------------------------------------------------------------------------
1 | # the logo
2 |
3 | Some different sizes' logos are needed.
4 |
5 | ## license
6 |
7 | 
This work is licensed under a Creative Commons Attribution-ShareAlike 4.0 International License.
8 |
9 | ## the process of creation
10 |
11 | No professional designers are here. Thus the following steps involve many programer styles. :-)
12 |
13 | 1. use excalidraw and export png and svg.
14 |
15 | ```bash
16 | # results
17 | logo.excalidraw
18 | logo.png
19 | logo.svg
20 | ```
21 |
22 | 2. manually edit the `logo.svg` and make background transparent.
23 |
24 | ```bash
25 | # results
26 | logo-transparent.svg
27 | ```
28 |
29 | 3. use python library [`svgutils`](https://github.com/btel/svg_utils) to make a strictly square figure. The [doc](https://svgutils.readthedocs.io/en/latest/tutorials/composing_multipanel_figures.html) is very useful.
30 |
31 | ```python
32 | from svgutils.compose import *
33 | def get_standard_300x300(file_name):
34 | fig = Figure(300, 300,
35 | Panel(
36 | SVG(file_name),
37 | ).move(-3, 12),
38 | )
39 | return fig
40 |
41 | get_standard_300x300('logo-transparent.svg').save('300x300.svg')
42 |
43 | # def get_other_size_from_standard(file_name, px):
44 | # fig = Figure(px, px,
45 | # Panel(
46 | # SVG(file_name).scale(px/300.0),
47 | # ).move(-3*px/300.0, 12*px/300.0),
48 | # )
49 | # return fig
50 |
51 | # get_other_size_from_standard('logo.svg',256).save('256x256.svg')
52 | ```
53 |
54 | ```bash
55 | # results
56 | 300x300.svg
57 | ```
58 |
59 | 4. use `inkscape` command line to get different sizes' `.png` files.
60 |
61 | ```bash
62 | inkscape 300x300.svg -o 300x300.png
63 |
64 | inkscape 300x300.svg -o 50x50.png -w 50 -h 50
65 |
66 | inkscape 300x300.svg -o 64x64.png -w 64 -h 64
67 | inkscape 300x300.svg -o 256x256.png -w 256 -h 256
68 | ```
69 |
70 | ```bash
71 | # results
72 | 50x50.png
73 | 64x64.png
74 | 256x256.png
75 | ```
76 |
--------------------------------------------------------------------------------
/src/importExport.ts:
--------------------------------------------------------------------------------
1 | import QRCode from "qrcode";
2 | import cloneDeep from "lodash/cloneDeep";
3 |
4 | import {
5 | COMMAND_URI,
6 | UriParams,
7 | RemotelySavePluginSettings,
8 | } from "./baseTypes";
9 |
10 | import { log } from "./moreOnLog";
11 |
12 | export const exportQrCodeUri = async (
13 | settings: RemotelySavePluginSettings,
14 | currentVaultName: string,
15 | pluginVersion: string
16 | ) => {
17 | const settings2 = cloneDeep(settings);
18 | delete settings2.dropbox;
19 | delete settings2.onedrive;
20 | delete settings2.vaultRandomID;
21 | const data = encodeURIComponent(JSON.stringify(settings2));
22 | const vault = encodeURIComponent(currentVaultName);
23 | const version = encodeURIComponent(pluginVersion);
24 | const rawUri = `obsidian://${COMMAND_URI}?func=settings&version=${version}&vault=${vault}&data=${data}`;
25 | const imgUri = await QRCode.toDataURL(rawUri);
26 | return {
27 | rawUri,
28 | imgUri,
29 | };
30 | };
31 |
32 | export interface ProcessQrCodeResultType {
33 | status: "error" | "ok";
34 | message: string;
35 | result?: RemotelySavePluginSettings;
36 | }
37 |
38 | export const importQrCodeUri = (
39 | inputParams: any,
40 | currentVaultName: string
41 | ): ProcessQrCodeResultType => {
42 | let params = inputParams as UriParams;
43 | if (
44 | params.func === undefined ||
45 | params.func !== "settings" ||
46 | params.vault === undefined ||
47 | params.data === undefined
48 | ) {
49 | return {
50 | status: "error",
51 | message: `the uri is not for exporting/importing settings: ${JSON.stringify(
52 | inputParams
53 | )}`,
54 | };
55 | }
56 |
57 | if (params.vault !== currentVaultName) {
58 | return {
59 | status: "error",
60 | message: `the target vault is ${
61 | params.vault
62 | } but you are currently in ${currentVaultName}: ${JSON.stringify(
63 | inputParams
64 | )}`,
65 | };
66 | }
67 |
68 | let settings = {} as RemotelySavePluginSettings;
69 | try {
70 | settings = JSON.parse(params.data);
71 | } catch (e) {
72 | return {
73 | status: "error",
74 | message: `errors while parsing settings: ${JSON.stringify(inputParams)}`,
75 | };
76 | }
77 | return {
78 | status: "ok",
79 | message: "ok",
80 | result: settings,
81 | };
82 | };
83 |
--------------------------------------------------------------------------------
/tests/metadataOnRemote.test.ts:
--------------------------------------------------------------------------------
1 | import * as chai from "chai";
2 | import chaiAsPromised from "chai-as-promised";
3 |
4 | import {
5 | isEqualMetadataOnRemote,
6 | MetadataOnRemote,
7 | } from "../src/metadataOnRemote";
8 |
9 | chai.use(chaiAsPromised);
10 | const expect = chai.expect;
11 |
12 | describe("Metadata operations tests", () => {
13 | it("should compare objects deeply", async () => {
14 | const a: MetadataOnRemote = {
15 | deletions: [
16 | { key: "xxx", actionWhen: 1 },
17 | { key: "yyy", actionWhen: 2 },
18 | ],
19 | };
20 | const b: MetadataOnRemote = {
21 | deletions: [
22 | { key: "xxx", actionWhen: 1 },
23 | { key: "yyy", actionWhen: 2 },
24 | ],
25 | };
26 |
27 | expect(isEqualMetadataOnRemote(a, b));
28 | });
29 |
30 | it("should find diff", async () => {
31 | const a: MetadataOnRemote = {
32 | deletions: [
33 | { key: "xxxx", actionWhen: 1 },
34 | { key: "yyy", actionWhen: 2 },
35 | ],
36 | };
37 | const b: MetadataOnRemote = {
38 | deletions: [
39 | { key: "xxx", actionWhen: 1 },
40 | { key: "yyy", actionWhen: 2 },
41 | ],
42 | };
43 |
44 | expect(!isEqualMetadataOnRemote(a, b));
45 | });
46 |
47 | it("should treat undefined correctly", async () => {
48 | const a: MetadataOnRemote = undefined;
49 | let b: MetadataOnRemote = {
50 | deletions: [
51 | { key: "xxx", actionWhen: 1 },
52 | { key: "yyy", actionWhen: 2 },
53 | ],
54 | };
55 |
56 | expect(!isEqualMetadataOnRemote(a, b));
57 |
58 | b = { deletions: [] };
59 | expect(isEqualMetadataOnRemote(a, b));
60 |
61 | b = { deletions: undefined };
62 | expect(isEqualMetadataOnRemote(a, b));
63 |
64 | b = undefined;
65 | expect(isEqualMetadataOnRemote(a, b));
66 | });
67 |
68 | it("should ignore generated at fields", async () => {
69 | const a: MetadataOnRemote = {
70 | deletions: [
71 | { key: "xxxx", actionWhen: 1 },
72 | { key: "yyy", actionWhen: 2 },
73 | ],
74 | generatedWhen: 1,
75 | };
76 | const b: MetadataOnRemote = {
77 | deletions: [
78 | { key: "xxx", actionWhen: 1 },
79 | { key: "yyy", actionWhen: 2 },
80 | ],
81 | generatedWhen: 2,
82 | };
83 |
84 | expect(isEqualMetadataOnRemote(a, b));
85 | });
86 | });
87 |
--------------------------------------------------------------------------------
/src/syncSizesConflictNotice.ts:
--------------------------------------------------------------------------------
1 | import { App, Modal, Notice, PluginSettingTab, Setting } from "obsidian";
2 | import type RemotelySavePlugin from "./main"; // unavoidable
3 | import type { TransItemType } from "./i18n";
4 | import type { FileOrFolderMixedState } from "./baseTypes";
5 |
6 | import { log } from "./moreOnLog";
7 |
8 | export class SizesConflictModal extends Modal {
9 | readonly plugin: RemotelySavePlugin;
10 | readonly skipSizeLargerThan: number;
11 | readonly sizesGoWrong: FileOrFolderMixedState[];
12 | readonly hasPassword: boolean;
13 | constructor(
14 | app: App,
15 | plugin: RemotelySavePlugin,
16 | skipSizeLargerThan: number,
17 | sizesGoWrong: FileOrFolderMixedState[],
18 | hasPassword: boolean
19 | ) {
20 | super(app);
21 | this.plugin = plugin;
22 | this.skipSizeLargerThan = skipSizeLargerThan;
23 | this.sizesGoWrong = sizesGoWrong;
24 | this.hasPassword = hasPassword;
25 | }
26 | onOpen() {
27 | let { contentEl } = this;
28 | const t = (x: TransItemType, vars?: any) => {
29 | return this.plugin.i18n.t(x, vars);
30 | };
31 |
32 | contentEl.createEl("h2", {
33 | text: t("modal_sizesconflict_title"),
34 | });
35 |
36 | t("modal_sizesconflict_desc", {
37 | thresholdMB: `${this.skipSizeLargerThan / 1000 / 1000}`,
38 | thresholdBytes: `${this.skipSizeLargerThan}`,
39 | })
40 | .split("\n")
41 | .forEach((val) => {
42 | contentEl.createEl("p", { text: val });
43 | });
44 |
45 | const info = this.serialize();
46 |
47 | contentEl.createDiv().createEl(
48 | "button",
49 | {
50 | text: t("modal_sizesconflict_copybutton"),
51 | },
52 | (el) => {
53 | el.onclick = async () => {
54 | await navigator.clipboard.writeText(info);
55 | new Notice(t("modal_sizesconflict_copynotice"));
56 | };
57 | }
58 | );
59 |
60 | contentEl.createEl("pre", {
61 | text: info,
62 | });
63 | }
64 |
65 | serialize() {
66 | return this.sizesGoWrong
67 | .map((x) => {
68 | return [
69 | x.key,
70 | this.hasPassword
71 | ? `encrypted name: ${x.remoteEncryptedKey}`
72 | : undefined,
73 | `local ${this.hasPassword ? "encrypted " : ""}bytes: ${
74 | this.hasPassword ? x.sizeLocalEnc : x.sizeLocal
75 | }`,
76 | `remote ${this.hasPassword ? "encrypted " : ""}bytes: ${
77 | this.hasPassword ? x.sizeRemoteEnc : x.sizeRemote
78 | }`,
79 | ]
80 | .filter((tmp) => tmp !== undefined)
81 | .join("\n");
82 | })
83 | .join("\n\n");
84 | }
85 |
86 | onClose() {
87 | let { contentEl } = this;
88 | contentEl.empty();
89 | }
90 | }
91 |
--------------------------------------------------------------------------------
/tests/encrypt.test.ts:
--------------------------------------------------------------------------------
1 | import * as chai from "chai";
2 | import chaiAsPromised from "chai-as-promised";
3 | import * as fs from "fs";
4 | import {
5 | decryptArrayBuffer,
6 | decryptBase32ToString,
7 | encryptArrayBuffer,
8 | encryptStringToBase32,
9 | encryptStringToBase64url,
10 | getSizeFromEncToOrig,
11 | getSizeFromOrigToEnc,
12 | } from "../src/encrypt";
13 | import { base64ToBase64url, bufferToArrayBuffer } from "../src/misc";
14 |
15 | chai.use(chaiAsPromised);
16 | const expect = chai.expect;
17 |
18 | describe("Encryption tests", () => {
19 | beforeEach(function () {
20 | global.window = {
21 | crypto: require("crypto").webcrypto,
22 | } as any;
23 | });
24 |
25 | it("should encrypt string", async () => {
26 | const k = "dkjdhkfhdkjgsdklxxd";
27 | const password = "hey";
28 | expect(await encryptStringToBase32(k, password)).to.not.equal(k);
29 | });
30 |
31 | it("should raise error using different password", async () => {
32 | const k = "secret text";
33 | const password = "hey";
34 | const password2 = "hey2";
35 | const enc = await encryptStringToBase32(k, password);
36 | await expect(decryptBase32ToString(enc, password2)).to.be.rejected;
37 | });
38 |
39 | it("should encrypt and decrypt string and get the same result returned", async () => {
40 | const k = "jfkkjkjbce7983ycdeknkkjckooAIUHIDIBIE((*BII)njD/d/dd/d/sjxhux";
41 | const password = "hfiuibec989###oiu982bj1`";
42 | const enc = await encryptStringToBase32(k, password);
43 | // console.log(enc);
44 | const dec = await decryptBase32ToString(enc, password);
45 | // console.log(dec);
46 | expect(dec).equal(k);
47 | });
48 |
49 | it("should get size from origin to encrypted correctly", () => {
50 | expect(() => getSizeFromOrigToEnc(-1)).to.throw();
51 | expect(() => getSizeFromOrigToEnc(0.5)).to.throw();
52 | expect(getSizeFromOrigToEnc(0)).equals(32);
53 | expect(getSizeFromOrigToEnc(15)).equals(32);
54 | expect(getSizeFromOrigToEnc(16)).equals(48);
55 | expect(getSizeFromOrigToEnc(31)).equals(48);
56 | expect(getSizeFromOrigToEnc(32)).equals(64);
57 | expect(getSizeFromOrigToEnc(14787203)).equals(14787232);
58 | });
59 |
60 | it("should get size from encrypted to origin correctly", () => {
61 | expect(() => getSizeFromEncToOrig(-1)).to.throw();
62 | expect(() => getSizeFromEncToOrig(30)).to.throw();
63 |
64 | expect(getSizeFromEncToOrig(32)).to.deep.equal({
65 | minSize: 0,
66 | maxSize: 15,
67 | });
68 | expect(getSizeFromEncToOrig(48)).to.deep.equal({
69 | minSize: 16,
70 | maxSize: 31,
71 | });
72 |
73 | expect(() => getSizeFromEncToOrig(14787231)).to.throw();
74 |
75 | let { minSize, maxSize } = getSizeFromEncToOrig(14787232);
76 | expect(minSize <= 14787203 && 14787203 <= maxSize).to.be.true;
77 | });
78 | });
79 |
--------------------------------------------------------------------------------
/.github/workflows/release.yml:
--------------------------------------------------------------------------------
1 | # modified from https://github.com/argenos/nldates-obsidian/blob/master/.github/workflows/release.yml
2 | # which is released under MIT License
3 |
4 | # trigger:
5 | # modify manifest.json, package.json, versions.json
6 | # git tag
7 | # git push origin --tags
8 |
9 | name: Release A New Version
10 |
11 | on:
12 | push:
13 | tags:
14 | - "*" # Push events to matching any tag format, i.e. 1.0, 20.15.10
15 |
16 | jobs:
17 | build:
18 | runs-on: ubuntu-latest
19 |
20 | environment: env-for-buildci
21 |
22 | env:
23 | DROPBOX_APP_KEY: ${{secrets.DROPBOX_APP_KEY}}
24 | ONEDRIVE_CLIENT_ID: ${{secrets.ONEDRIVE_CLIENT_ID}}
25 | ONEDRIVE_AUTHORITY: ${{secrets.ONEDRIVE_AUTHORITY}}
26 |
27 | strategy:
28 | matrix:
29 | node-version: [16.x]
30 |
31 | steps:
32 | - name: Checkout codes
33 | uses: actions/checkout@v2
34 | with:
35 | lfs: true
36 | submodules: recursive
37 | - name: Checkout LFS
38 | run: git lfs checkout
39 | - name: Use Node.js ${{ matrix.node-version }}
40 | uses: actions/setup-node@v2
41 | with:
42 | node-version: ${{ matrix.node-version }}
43 | - run: npm install
44 | - run: npm test
45 | - run: npm run build
46 | - name: Create Release
47 | id: create_release
48 | uses: actions/create-release@v1
49 | env:
50 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
51 | VERSION: ${{ github.ref }}
52 | with:
53 | tag_name: ${{ github.ref }}
54 | release_name: ${{ github.ref }}
55 | draft: false
56 | prerelease: true
57 | - name: Upload main.js
58 | id: upload-main
59 | uses: actions/upload-release-asset@v1
60 | env:
61 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
62 | with:
63 | upload_url: ${{ steps.create_release.outputs.upload_url }}
64 | asset_path: ./main.js
65 | asset_name: main.js
66 | asset_content_type: text/javascript
67 | - name: Upload manifest.json
68 | id: upload-manifest
69 | uses: actions/upload-release-asset@v1
70 | env:
71 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
72 | with:
73 | upload_url: ${{ steps.create_release.outputs.upload_url }}
74 | asset_path: ./manifest.json
75 | asset_name: manifest.json
76 | asset_content_type: application/json
77 | - name: Upload styles.css
78 | id: upload-styles
79 | uses: actions/upload-release-asset@v1
80 | env:
81 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
82 | with:
83 | upload_url: ${{ steps.create_release.outputs.upload_url }}
84 | asset_path: ./styles.css
85 | asset_name: styles.css
86 | asset_content_type: text/css
87 |
--------------------------------------------------------------------------------
/webpack.config.js:
--------------------------------------------------------------------------------
1 | require("dotenv").config();
2 | const path = require("path");
3 | const webpack = require("webpack");
4 | const TerserPlugin = require("terser-webpack-plugin");
5 |
6 | const DEFAULT_DROPBOX_APP_KEY = process.env.DROPBOX_APP_KEY || "";
7 | const DEFAULT_ONEDRIVE_CLIENT_ID = process.env.ONEDRIVE_CLIENT_ID || "";
8 | const DEFAULT_ONEDRIVE_AUTHORITY = process.env.ONEDRIVE_AUTHORITY || "";
9 |
10 | module.exports = {
11 | entry: "./src/main.ts",
12 | target: "web",
13 | output: {
14 | filename: "main.js",
15 | path: __dirname,
16 | libraryTarget: "commonjs",
17 | },
18 | plugins: [
19 | new webpack.DefinePlugin({
20 | "process.env.DEFAULT_DROPBOX_APP_KEY": `"${DEFAULT_DROPBOX_APP_KEY}"`,
21 | "process.env.DEFAULT_ONEDRIVE_CLIENT_ID": `"${DEFAULT_ONEDRIVE_CLIENT_ID}"`,
22 | "process.env.DEFAULT_ONEDRIVE_AUTHORITY": `"${DEFAULT_ONEDRIVE_AUTHORITY}"`,
23 | }),
24 | // Work around for Buffer is undefined:
25 | // https://github.com/webpack/changelog-v5/issues/10
26 | new webpack.ProvidePlugin({
27 | Buffer: ["buffer", "Buffer"],
28 | }),
29 | new webpack.ProvidePlugin({
30 | process: "process/browser",
31 | }),
32 | ],
33 | module: {
34 | rules: [
35 | {
36 | test: /\.tsx?$/,
37 | use: "ts-loader",
38 | exclude: /node_modules/,
39 | },
40 | {
41 | test: /\.svg?$/,
42 | type: "asset/source",
43 | },
44 | ],
45 | },
46 | resolve: {
47 | extensions: [".tsx", ".ts", ".js"],
48 | mainFields: ["browser", "module", "main"],
49 | fallback: {
50 | // assert: require.resolve("assert"),
51 | // buffer: require.resolve("buffer/"),
52 | // console: require.resolve("console-browserify"),
53 | // constants: require.resolve("constants-browserify"),
54 | crypto: require.resolve("crypto-browserify"),
55 | // crypto: false,
56 | // domain: require.resolve("domain-browser"),
57 | // events: require.resolve("events"),
58 | // http: require.resolve("stream-http"),
59 | // https: require.resolve("https-browserify"),
60 | net: false,
61 | // os: require.resolve("os-browserify/browser"),
62 | path: require.resolve("path-browserify"),
63 | // punycode: require.resolve("punycode"),
64 | process: require.resolve("process/browser"),
65 | // querystring: require.resolve("querystring-es3"),
66 | stream: require.resolve("stream-browserify"),
67 | // string_decoder: require.resolve("string_decoder"),
68 | // sys: require.resolve("util"),
69 | // timers: require.resolve("timers-browserify"),
70 | tls: false,
71 | // tty: require.resolve("tty-browserify"),
72 | url: require.resolve("url/"),
73 | // util: require.resolve("util"),
74 | // vm: require.resolve("vm-browserify"),
75 | // zlib: require.resolve("browserify-zlib"),
76 | },
77 | },
78 | externals: {
79 | obsidian: "commonjs2 obsidian",
80 | },
81 | optimization: {
82 | minimize: true,
83 | minimizer: [new TerserPlugin({ extractComments: false })],
84 | },
85 | };
86 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "remotely-sync",
3 | "version": "0.4.49",
4 | "description": "Security updates for the remotely-save unofficial plugin allowing users to synchronize notes between local device and the cloud service. Not backwards compatible.",
5 | "scripts": {
6 | "dev2": "node esbuild.config.mjs",
7 | "build2": "tsc -noEmit -skipLibCheck && node esbuild.config.mjs production",
8 | "build": "webpack --mode production",
9 | "dev": "webpack --mode development --watch",
10 | "format": "npx prettier --write .",
11 | "clean": "npx rimraf main.js",
12 | "test": "cross-env TS_NODE_COMPILER_OPTIONS={\\\"module\\\":\\\"commonjs\\\"} mocha -r ts-node/register 'tests/**/*.ts'"
13 | },
14 | "browser": {
15 | "path": "path-browserify",
16 | "process": "process/browser",
17 | "stream": "stream-browserify",
18 | "crypto": "crypto-browserify",
19 | "url": "url/"
20 | },
21 | "source": "main.ts",
22 | "keywords": [],
23 | "author": "",
24 | "license": "Apache-2.0",
25 | "devDependencies": {
26 | "@microsoft/microsoft-graph-types": "^2.19.0",
27 | "@types/chai": "^4.3.1",
28 | "@types/chai-as-promised": "^7.1.5",
29 | "@types/jsdom": "^16.2.14",
30 | "@types/lodash": "^4.14.182",
31 | "@types/mime-types": "^2.1.1",
32 | "@types/mocha": "^9.1.1",
33 | "@types/mustache": "^4.1.2",
34 | "@types/node": "^17.0.30",
35 | "@types/qrcode": "^1.4.2",
36 | "builtin-modules": "^3.2.0",
37 | "chai": "^4.3.6",
38 | "chai-as-promised": "^7.1.1",
39 | "cross-env": "^7.0.3",
40 | "dotenv": "^16.0.0",
41 | "esbuild": "^0.14.38",
42 | "jsdom": "^19.0.0",
43 | "mocha": "^9.2.2",
44 | "prettier": "^2.6.2",
45 | "ts-loader": "^9.2.9",
46 | "ts-node": "^10.7.0",
47 | "tslib": "^2.4.0",
48 | "typescript": "^4.6.4",
49 | "webdav-server": "^2.6.2",
50 | "webpack": "^5.72.0",
51 | "webpack-cli": "^4.9.2"
52 | },
53 | "dependencies": {
54 | "@aws-sdk/client-s3": "^3.427.0",
55 | "@aws-sdk/fetch-http-handler": "3.78.0",
56 | "@aws-sdk/lib-storage": "3.449.0",
57 | "@aws-sdk/protocol-http": "3.78.0",
58 | "@aws-sdk/querystring-builder": "3.78.0",
59 | "@aws-sdk/signature-v4-crt": "3.78.0",
60 | "@aws-sdk/types": "^3.78.0",
61 | "@azure/msal-node": "^2.5.1",
62 | "@fyears/tsqueue": "^1.0.1",
63 | "@microsoft/microsoft-graph-client": "^3.0.2",
64 | "acorn": "^8.7.1",
65 | "aggregate-error": "^4.0.0",
66 | "assert": "^2.0.0",
67 | "aws-crt": "^1.12.1",
68 | "buffer": "^6.0.3",
69 | "crypto-browserify": "^3.12.0",
70 | "delay": "^5.0.0",
71 | "dropbox": "^10.28.0",
72 | "emoji-regex": "^10.1.0",
73 | "http-status-codes": "^2.2.0",
74 | "localforage": "^1.10.0",
75 | "lodash": "^4.17.21",
76 | "loglevel": "^1.8.0",
77 | "mime-types": "^2.1.35",
78 | "mustache": "^4.2.0",
79 | "nanoid": "^3.3.3",
80 | "obsidian": "^1.4.11",
81 | "p-queue": "^7.2.0",
82 | "path-browserify": "^1.0.1",
83 | "process": "^0.11.10",
84 | "qrcode": "^1.5.0",
85 | "rfc4648": "^1.5.1",
86 | "rimraf": "^3.0.2",
87 | "stream-browserify": "^3.0.0",
88 | "url": "^0.11.0",
89 | "util": "^0.12.4",
90 | "webdav": "^4.9.0",
91 | "webdav-fs": "^4.0.1",
92 | "xregexp": "^5.1.0"
93 | }
94 | }
95 |
--------------------------------------------------------------------------------
/src/metadataOnRemote.ts:
--------------------------------------------------------------------------------
1 | import isEqual from "lodash/isEqual";
2 | import { base64url } from "rfc4648";
3 | import { reverseString } from "./misc";
4 | import { log } from "./moreOnLog";
5 |
6 | const DEFAULT_README_FOR_METADATAONREMOTE =
7 | "Do NOT edit or delete the file manually. This file is for the plugin remotely-sync to store some necessary meta data on the remote services. Its content is slightly obfuscated.";
8 |
9 | const DEFAULT_VERSION_FOR_METADATAONREMOTE = "20220220";
10 |
11 | export const DEFAULT_FILE_NAME_FOR_METADATAONREMOTE =
12 | "_remotely-secure-metadata-on-remote.json";
13 |
14 | export const DEFAULT_FILE_NAME_FOR_METADATAONREMOTE2 =
15 | "_remotely-secure-metadata-on-remote.bin";
16 |
17 | export const FILE_NAME_FOR_DATA_JSON =
18 | "data.json";
19 |
20 | export const FILE_NAME_FOR_BOOKMARK_FILE =
21 | "/bookmarks.json";
22 |
23 | export interface DeletionOnRemote {
24 | key: string;
25 | actionWhen: number;
26 | }
27 |
28 | export interface MetadataOnRemote {
29 | version?: string;
30 | generatedWhen?: number;
31 | deletions?: DeletionOnRemote[];
32 | }
33 |
34 | export const isEqualMetadataOnRemote = (
35 | a: MetadataOnRemote,
36 | b: MetadataOnRemote
37 | ) => {
38 | const m1 = a === undefined ? { deletions: [] } : a;
39 | const m2 = b === undefined ? { deletions: [] } : b;
40 |
41 | // we only need to compare deletions
42 | const d1 = m1.deletions === undefined ? [] : m1.deletions;
43 | const d2 = m2.deletions === undefined ? [] : m2.deletions;
44 | return isEqual(d1, d2);
45 | };
46 |
47 | export const serializeMetadataOnRemote = (x: MetadataOnRemote) => {
48 | const y = x;
49 |
50 | if (y["version"] === undefined) {
51 | y["version"] === DEFAULT_VERSION_FOR_METADATAONREMOTE;
52 | }
53 | if (y["generatedWhen"] === undefined) {
54 | y["generatedWhen"] = Date.now();
55 | }
56 | if (y["deletions"] === undefined) {
57 | y["deletions"] = [];
58 | }
59 |
60 | const z = {
61 | readme: DEFAULT_README_FOR_METADATAONREMOTE,
62 | d: reverseString(
63 | base64url.stringify(Buffer.from(JSON.stringify(x), "utf-8"), {
64 | pad: false,
65 | })
66 | ),
67 | };
68 |
69 | return JSON.stringify(z, null, 2);
70 | };
71 |
72 | export const deserializeMetadataOnRemote = (x: string | ArrayBuffer) => {
73 | let y1 = "";
74 | if (typeof x === "string") {
75 | y1 = x;
76 | } else {
77 | y1 = new TextDecoder().decode(x);
78 | }
79 |
80 | let y2: any;
81 | try {
82 | y2 = JSON.parse(y1);
83 | } catch (e) {
84 | throw new Error(
85 | `invalid remote meta data file with first few chars: ${y1.slice(0, 5)}`
86 | );
87 | }
88 |
89 | if (!("readme" in y2 && "d" in y2)) {
90 | throw new Error(
91 | 'invalid remote meta data file (no "readme" or "d" fields)!'
92 | );
93 | }
94 |
95 | let y3: string;
96 | try {
97 | y3 = (
98 | base64url.parse(reverseString(y2["d"]), {
99 | out: Buffer.allocUnsafe as any,
100 | loose: true,
101 | }) as Buffer
102 | ).toString("utf-8");
103 | } catch (e) {
104 | throw new Error('invalid remote meta data file (invalid "d" field)!');
105 | }
106 |
107 | let y4: MetadataOnRemote;
108 | try {
109 | y4 = JSON.parse(y3) as MetadataOnRemote;
110 | } catch (e) {
111 | throw new Error(
112 | `invalid remote meta data file with \"d\" field with first few chars: ${y3.slice(
113 | 0,
114 | 5
115 | )}`
116 | );
117 | }
118 | return y4;
119 | };
120 |
--------------------------------------------------------------------------------
/tests/presetRules.test.ts:
--------------------------------------------------------------------------------
1 | import { expect } from "chai";
2 | import type { WebdavConfig } from "../src/baseTypes";
3 | import { applyWebdavPresetRulesInplace } from "../src/presetRules";
4 |
5 | describe("Preset rules tests", () => {
6 | it("should check undefined correctly", () => {
7 | let x: Partial | undefined = undefined;
8 | const y = applyWebdavPresetRulesInplace(x);
9 | expect(y.webdav === undefined);
10 | expect(!y.changed);
11 | });
12 |
13 | it("should check empty object", () => {
14 | let x: Partial | undefined = {};
15 | const y = applyWebdavPresetRulesInplace(x);
16 | expect(y.webdav).deep.equals({});
17 | expect(!y.changed);
18 | });
19 |
20 | it("should modify depths correctly", () => {
21 | let x: Partial = {
22 | address: "https://example.teracloud.jp/dav/",
23 | depth: "auto_unknown",
24 | };
25 | let y = applyWebdavPresetRulesInplace(x);
26 | expect(x.depth === "auto_1");
27 | expect(y.changed);
28 |
29 | x = {
30 | address: "https://example.teracloud.jp/dav/example",
31 | depth: "auto_unknown",
32 | };
33 | y = applyWebdavPresetRulesInplace(x);
34 | expect(x.depth === "auto_1");
35 | expect(y.changed);
36 |
37 | x = {
38 | address: "https://dav.jianguoyun.com/dav/",
39 | depth: "auto_unknown",
40 | };
41 | y = applyWebdavPresetRulesInplace(x);
42 | expect(x.depth === "auto_1");
43 | expect(y.changed);
44 |
45 | x = {
46 | address: "https://dav.jianguoyun.com/dav/",
47 | depth: "auto_infinity",
48 | };
49 | y = applyWebdavPresetRulesInplace(x);
50 | expect(x.depth === "auto_1");
51 | expect(y.changed);
52 | });
53 |
54 | it("should not modify depths if depths is set automatically correctly", () => {
55 | let x: Partial = {
56 | address: "https://dav.jianguoyun.com/dav/",
57 | depth: "auto_1",
58 | };
59 | let y = applyWebdavPresetRulesInplace(x);
60 | expect(x.depth === "auto_1");
61 | expect(!y.changed);
62 | });
63 |
64 | it("should not modify depths if depths have been set manually", () => {
65 | let x: Partial = {
66 | address: "https://example.teracloud.jp/dav/",
67 | depth: "manual_infinity",
68 | };
69 | let y = applyWebdavPresetRulesInplace(x);
70 | expect(x.depth === "manual_infinity");
71 | expect(!y.changed);
72 |
73 | x = {
74 | address: "https://example.teracloud.jp/dav/example",
75 | depth: "manual_1",
76 | };
77 | y = applyWebdavPresetRulesInplace(x);
78 | expect(x.depth === "manual_1");
79 | expect(!y.changed);
80 | });
81 |
82 | it("should not modify depths when urls are not in preset rules", () => {
83 | let x: Partial = {
84 | address: "https://teracloud.jp/dav/",
85 | depth: "auto_unknown",
86 | };
87 | applyWebdavPresetRulesInplace(x);
88 | expect(x.depth === "auto_unknown");
89 |
90 | x = {
91 | address: "https://dav.jianguoyun.com/dav_example",
92 | depth: "auto_unknown",
93 | };
94 | applyWebdavPresetRulesInplace(x);
95 | expect(x.depth === "auto_unknown");
96 |
97 | x = {
98 | address: "",
99 | depth: "auto_unknown",
100 | };
101 | applyWebdavPresetRulesInplace(x);
102 | expect(x.depth === "auto_unknown");
103 |
104 | x = {
105 | address: "https://dav.jianguoyun.com/dav/",
106 | depth: "what" as any,
107 | };
108 | applyWebdavPresetRulesInplace(x);
109 | expect(x.depth === ("what" as any));
110 | });
111 | });
112 |
--------------------------------------------------------------------------------
/docs/how_to_debug/export_sync_plans.md:
--------------------------------------------------------------------------------
1 | # Export Sync Plans
2 |
3 | ## What's this?
4 |
5 | Everytime the plugin starts a sync, it gathers all required information together, generates a "sync plan" of every operations to every files and folders, and assign the corresponding actual operations.
6 |
7 | Thus, if something goes wrong, we should check the sync plan firstly.
8 |
9 | ## How To Export The Plans?
10 |
11 | Please read through the following instructions.
12 |
13 | ### Disable Auto Sync Firstly
14 |
15 | You should disable auto sync to avoid any unexpected running.
16 |
17 | ### Manual Sync If Not Yet
18 |
19 | You should at least sync once, so that at least one sync plan is generated and saved. If you have synced the vualt before, there should be some sync plans already saved.
20 |
21 | ### Export To The File
22 |
23 | Go to the plugin settings, scroll down to the section "Debug" -> "export sync plans", and click the button "Export". \*\*It would generate a new folder `_debug_remotely_save/` in your vault, and generate a file `sync_plans_hist_exported_on_{a_timestamp},md.` inside that folder.
24 |
25 | ## How To Read The Plans
26 |
27 | Open the genrated `sync_plans_hist_exported_on_{a_timestamp},md.`. You should see a json, or multiple jsons. Every json represents a sync plan.
28 |
29 | A sync plan looks like this:
30 |
31 | ```json
32 | {
33 | "ts": 1646960867560,
34 | "remoteType": "onedrive",
35 | "mixedStates": {
36 | "abc.md": {
37 | "key": "abc.md",
38 | "existRemote": true,
39 | "mtimeRemote": 1646566632000,
40 | "sizeRemote": 56797,
41 | "remoteEncryptedKey": "abc.md",
42 | "changeMtimeUsingMapping": true,
43 | "existLocal": true,
44 | "mtimeLocal": 1646566632000,
45 | "sizeLocal": 56797,
46 | "decision": "skipUploading",
47 | "decisionBranch": 1
48 | },
49 | "New folder/": {
50 | "key": "New folder/",
51 | "deltimeRemote": 1646925354372,
52 | "existLocal": false,
53 | "existRemote": false,
54 | "decision": "keepRemoteDelHistFolder",
55 | "decisionBranch": 9
56 | }
57 | }
58 | }
59 | ```
60 |
61 | We usually care about the `mixedStates` property. As you may guess, every item in `mixedStates` represent a file or a folder.
62 |
63 | We should find out the file/folder we are interested in (or we believe something goes wrong), then checkout the following properties:
64 |
65 | ```
66 | decision
67 | What decision is made.
68 |
69 | decisionBranch
70 | It's a mark of the actual logic in the sync code. Useful to debug.
71 |
72 | existRemote
73 | Does the file/folder exist on the remote service?
74 |
75 | mtimeRemote
76 | The "last modeified time" on the remote service.
77 |
78 | deltimeRemote
79 | The "deletion time" on the remote record.
80 |
81 | existLocal
82 | Does the file/folder exist locally?
83 |
84 | mtimeLocal
85 | The max of "last modeified time" and "creation time" locally.
86 |
87 | deltimeLocal
88 | The "deletion time" locally.
89 | ```
90 |
91 | The `decision` SHOULD be determined by the modified times and deletion times, by the logic described in [the doc of sync alogorithm](../sync_algorithm_v2.md). In short, we collect four timestamps (`mtimeRemote`, `deltimeRemote`, `mtimeLocal`, `deltimeLocal`), and respect the max timestamp and its corresponding operation.
92 |
93 | ## Common Issues
94 |
95 | Some users report that their "last modeified time"s or "creation time"s are not set correctly by the operating system. In this case, the plugin cannot do anything because it determines the sync plan by comparing the timestamps. It's suggested to check the settings of the operating system or check whether other programs are doing something to the files.
96 |
--------------------------------------------------------------------------------
/docs/dropbox_review_material/README.md:
--------------------------------------------------------------------------------
1 | ## Material Source
2 |
3 | To apply for the production use of Dropbox API, some descriptions are needed to be submitted to Dropbox. Coincidently, this can also be served as a "readme" to use this plugin with Dropbox.
4 |
5 | ## Some Backgrounds
6 |
7 | 1. Obsidian allows users to take notes using markdown files.
8 | 2. The "app", remotely-save. is an unofficial Obsidian plugin, helping users download and upload ("sync") their notes from and to Dropbox.
9 | 3. Technically, remotely-save is developed using web technologies.
10 |
11 | ## Api Usages
12 |
13 | 1. remotely-save uses "App folder", to avoid unnecessary accessing users' other files.
14 | 2. remotely-save uses "account_info.read", to get the displayed username, so that users know which of their accounts has been logged in after OAuth steps.
15 | 3. remotely-save uses "files.content.read", so that it can read the "last modified time", and the content of files on Dropbox. Under some conditions, the plugin would download the files. For example, the plugin would compare the last modified timestamps of the file in the local device and that on Dropbox, and if the timestamp on Dropbox is larger, the plugin would download the "newer" file from Dropbox to local.
16 | 4. remotely-save uses "files.content.write", so that it can upload or overwrite the content of files on Dropbox. Under some conditions, the plugin would do that. For example, the plugin would compare the last modified timestamps of the file in the local device and that on Dropbox, and if the timestamp in the local device is larger, the plugin would upload the "newer" file from local to Dropbox, and overwrite that file on Dropbox.
17 |
18 | ## Steps
19 |
20 | Here are the steps to see the functionality of remotely-save.
21 |
22 | Most steps have screenshots.
23 |
24 | 1. Download the note-taking app Obsidian (Windows or Mac versions are both ok) from its official website: https://obsidian.md/ . It's free to download and use. Then install it.
25 | 2. Open Obsidian, click the "Create" button under "Create new vault".
26 | 3. Pick a vault name, "example-vault", and choose a location, then click "Create".
27 | 
28 | 4. Close any update new if prompted.
29 | 5. Create a new note by clicking a button on the left. And write something on the note.
30 | 
31 | 6. Click "setting" (a gear icon) on the button left of the sidebar.
32 | 
33 | 7. In the settings panel, go to the "Community plugins" page, turn off the safe mode, and confirm to turn off the safe mode. Then click the "Browse" button for community plugins.
34 | 
35 | 8. Search "Remotely Sync" and install on the result.
36 | 
37 | 9. After successful installing the plugin, go back to the "Community plugins" page, and enable the plugin.
38 | 
39 | 10. Go to newly added "Remotely Sync" settings, select "Dropbox" in "Choose service", and click the "Auth" button.
40 | 
41 | 11. The standard auth flow address is shown, users should click the address, and finish the auth steps on the website. Finally, the Dropbox website should automatically redirect users back to the Obsidian app.
42 | 
43 | 12. The "Auth" button disappears. A new "Revoke Auth" appears.
44 | 
45 | 13. Go back to the main interface of Obsidian, a new "switch icon" should appear on the left sidebar. Click this, then the plugin would trigger the sync progress. It would compare meta info of local files and remote files (on Dropbox), and decide to download some files and/or upload some files.
46 | 
47 | 14. Create, edit, remove some notes, and repeat step 13, the files on Dropbox should also change to reflect the changes locally.
48 |
--------------------------------------------------------------------------------
/docs/sync_algorithm_v1.md:
--------------------------------------------------------------------------------
1 | # Sync Algorithm
2 |
3 | ## Sources
4 |
5 | We have three record sources:
6 |
7 | 1. Local files. By scanning all files in the vault locally. Actually Obsidian provides an api directly returning this.
8 | 2. Remote files. By scanning all files on the remote service. Some services provide an api directly returning this, and some other services require the plugin scanning the folders recursively.
9 | 3. Local "delete-or-rename" history. It's recorded by using Obsidian's tracking api. So if users delete or rename files/folders outside Obsidian, we could do nothing.
10 |
11 | Assuming all sources are reliable.
12 |
13 | ## Deal with them
14 |
15 | We list all combinations mutually exclusive and collectively exhaustive.
16 |
17 | | ID | Remote Files | Local files | Local delete rename history | Extra | Decision |
18 | | --- | ------------ | ----------- | --------------------------- | ----------------------------------------------------------------------------- | -------------------------------------------------------------------------------------------------------- |
19 | | 1 | exist | exist | ignore | mtime_remote > mtime_local | download remote file, create local folder if not exists, clear local history if exists |
20 | | 2 | exist | exist | ignore | mtime_remote < mtime_local | upload local file, create remote folder if not exists, clear local history if exists |
21 | | 3 | exist | exist | ignore | mtime_remote === mtime_local && password === "" && size_remote === size_local | clear local history if exists (the file was synced and no changes after last sync) |
22 | | 4 | exist | exist | ignore | mtime_remote === mtime_local && password === "" && size_remote !== size_local | upload local file, clear local history if exists (we always prefer local to remote) |
23 | | 5 | exist | exist | ignore | mtime_remote === mtime_local && password !== "" | clear local history if exists (in encryption mode, file sizes are unequal. we can only rely on mtime(s)) |
24 | | 6 | exist | exist | ignore | If local is a folder. mtime_local === undefined | clear local history if exists. TODO: what if a folder and a previous file share the same name? |
25 | | 7 | exist | not exist | exist | mtime_remote >= delete_time_local | download remote file, create folder if not exists |
26 | | 8 | exist | not exist | exist | mtime_remote < delete_time_local | delete remote file, clear local history |
27 | | 9 | exist | not exist | not exist | | download remote file, create folder if not exists |
28 | | 10 | not exist | exist | ignore | local may be folder or file | upload local files recursively, create remote folder if not exists, clear local history if exists |
29 | | 11 | not exist | not exist | ignore | | clear local history if exists |
30 |
--------------------------------------------------------------------------------
/src/debugMode.ts:
--------------------------------------------------------------------------------
1 | import { TAbstractFile, TFolder, TFile, Vault } from "obsidian";
2 |
3 | import type { SyncPlanType } from "./sync";
4 | import {
5 | readAllSyncPlanRecordTextsByVault,
6 | readAllLogRecordTextsByVault,
7 | } from "./localdb";
8 | import type { InternalDBs } from "./localdb";
9 | import { mkdirpInVault } from "./misc";
10 | import {
11 | DEFAULT_DEBUG_FOLDER,
12 | DEFAULT_LOG_HISTORY_FILE_PREFIX,
13 | DEFAULT_SYNC_PLANS_HISTORY_FILE_PREFIX,
14 | FileOrFolderMixedState,
15 | } from "./baseTypes";
16 |
17 | import { log } from "./moreOnLog";
18 |
19 | const turnSyncPlanToTable = (record: string) => {
20 | const syncPlan: SyncPlanType = JSON.parse(record);
21 | const { ts, tsFmt, remoteType, mixedStates } = syncPlan;
22 |
23 | type allowedHeadersType = keyof FileOrFolderMixedState;
24 | const headers: allowedHeadersType[] = [
25 | "key",
26 | "remoteEncryptedKey",
27 | "existLocal",
28 | "sizeLocal",
29 | "sizeLocalEnc",
30 | "mtimeLocal",
31 | "deltimeLocal",
32 | "changeLocalMtimeUsingMapping",
33 | "existRemote",
34 | "sizeRemote",
35 | "sizeRemoteEnc",
36 | "mtimeRemote",
37 | "deltimeRemote",
38 | "changeRemoteMtimeUsingMapping",
39 | "decision",
40 | "decisionBranch",
41 | ];
42 |
43 | const lines = [
44 | `ts: ${ts}${tsFmt !== undefined ? " / " + tsFmt : ""}`,
45 | `remoteType: ${remoteType}`,
46 | `| ${headers.join(" | ")} |`,
47 | `| ${headers.map((x) => "---").join(" | ")} |`,
48 | ];
49 | for (const [k1, v1] of Object.entries(syncPlan.mixedStates)) {
50 | const k = k1 as string;
51 | const v = v1 as FileOrFolderMixedState;
52 | const singleLine = [];
53 | for (const h of headers) {
54 | const field = v[h];
55 | if (field === undefined) {
56 | singleLine.push("");
57 | continue;
58 | }
59 | if (
60 | h === "mtimeLocal" ||
61 | h === "deltimeLocal" ||
62 | h === "mtimeRemote" ||
63 | h === "deltimeRemote"
64 | ) {
65 | const fmt = v[(h + "Fmt") as allowedHeadersType] as string;
66 | const s = `${field}${fmt !== undefined ? " / " + fmt : ""}`;
67 | singleLine.push(s);
68 | } else {
69 | singleLine.push(field);
70 | }
71 | }
72 | lines.push(`| ${singleLine.join(" | ")} |`);
73 | }
74 |
75 | return lines.join("\n");
76 | };
77 |
78 | export const exportVaultSyncPlansToFiles = async (
79 | db: InternalDBs,
80 | vault: Vault,
81 | vaultRandomID: string,
82 | toFormat: "table" | "json" = "json"
83 | ) => {
84 | await mkdirpInVault(DEFAULT_DEBUG_FOLDER, vault);
85 | const records = await readAllSyncPlanRecordTextsByVault(db, vaultRandomID);
86 | let md = "";
87 | if (records.length === 0) {
88 | md = "No sync plans history found";
89 | } else {
90 | if (toFormat === "json") {
91 | md =
92 | "Sync plans found:\n\n" +
93 | records.map((x) => "```json\n" + x + "\n```\n").join("\n");
94 | } else if (toFormat === "table") {
95 | md =
96 | "Sync plans found:\n\n" + records.map(turnSyncPlanToTable).join("\n\n");
97 | } else {
98 | const _: never = toFormat;
99 | }
100 | }
101 | const ts = Date.now();
102 | const filePath = `${DEFAULT_DEBUG_FOLDER}${DEFAULT_SYNC_PLANS_HISTORY_FILE_PREFIX}${ts}.md`;
103 | await vault.create(filePath, md, {
104 | mtime: ts,
105 | });
106 | };
107 |
108 | export const exportVaultLoggerOutputToFiles = async (
109 | db: InternalDBs,
110 | vault: Vault,
111 | vaultRandomID: string
112 | ) => {
113 | await mkdirpInVault(DEFAULT_DEBUG_FOLDER, vault);
114 | const records = await readAllLogRecordTextsByVault(db, vaultRandomID);
115 | let md = "";
116 | if (records.length === 0) {
117 | md = "No logger history found.";
118 | } else {
119 | md =
120 | "Logger history found:\n\n" +
121 | "```text\n" +
122 | records.join("\n") +
123 | "\n```\n";
124 | }
125 | const ts = Date.now();
126 | const filePath = `${DEFAULT_DEBUG_FOLDER}${DEFAULT_LOG_HISTORY_FILE_PREFIX}${ts}.md`;
127 | await vault.create(filePath, md, {
128 | mtime: ts,
129 | });
130 | };
131 |
--------------------------------------------------------------------------------
/src/obsFolderLister.ts:
--------------------------------------------------------------------------------
1 | import { Vault, Stat, ListedFiles } from "obsidian";
2 | import { Queue } from "@fyears/tsqueue";
3 | import chunk from "lodash/chunk";
4 | import flatten from "lodash/flatten";
5 | import { statFix } from "./misc";
6 |
7 | export interface ObsConfigDirFileType {
8 | key: string;
9 | ctime: number;
10 | mtime: number;
11 | size: number;
12 | type: "folder" | "file";
13 | }
14 |
15 | const isFolderToSkip = (x: string) => {
16 | let specialFolders = [".git", ".github", ".gitlab", ".svn", "node_modules", ".DS_Store"];
17 | for (const iterator of specialFolders) {
18 | if (
19 | x === iterator ||
20 | x === `${iterator}/` ||
21 | x.endsWith(`/${iterator}`) ||
22 | x.endsWith(`/${iterator}/`)
23 | ) {
24 | return true;
25 | }
26 | }
27 | return false;
28 | };
29 |
30 | const isPluginDirItself = (x: string, pluginId: string) => {
31 | return (
32 | x === "remotely-secure" ||
33 | x === "remotely-secure/" ||
34 | x.endsWith("/remotely-secure") ||
35 | x.endsWith("/remotely-secure/")
36 | );
37 | };
38 |
39 | const isLikelyPluginSubFiles = (x: string) => {
40 | const reqFiles = [
41 | "data.json",
42 | "main.js",
43 | "manifest.json",
44 | ".gitignore",
45 | "styles.css",
46 | ];
47 | for (const iterator of reqFiles) {
48 | if (x === iterator || x.endsWith(`/${iterator}`)) {
49 | return true;
50 | }
51 | }
52 | return false;
53 | };
54 |
55 | export const isInsideObsFolder = (x: string, configDir: string) => {
56 | if (!configDir.startsWith(".")) {
57 | throw Error(`configDir should starts with . but we get ${configDir}`);
58 | }
59 | return x === configDir || x.startsWith(`${configDir}/`);
60 | };
61 |
62 | export const isInsideTrashFolder = (path: string) => {
63 | return path.startsWith(".trash");
64 | }
65 |
66 | export const listFilesInObsFolder = async (
67 | vault: Vault,
68 | pluginId: string,
69 | syncTrash: boolean
70 | ) => {
71 | let searchFolders = [vault.configDir]
72 | if (syncTrash && await vault.adapter.stat('.trash') != null) {
73 | searchFolders.push('.trash');
74 | }
75 | const q = new Queue(searchFolders);
76 | const CHUNK_SIZE = 10;
77 | const contents: ObsConfigDirFileType[] = [];
78 | while (q.length > 0) {
79 | const itemsToFetch = [];
80 | while (q.length > 0) {
81 | itemsToFetch.push(q.pop());
82 | }
83 |
84 | const itemsToFetchChunks = chunk(itemsToFetch, CHUNK_SIZE);
85 | for (const singleChunk of itemsToFetchChunks) {
86 | const r = singleChunk.map(async (fsEntry) => {
87 | const statRes = await statFix(vault, fsEntry);
88 |
89 | const isFolder = statRes.type === "folder";
90 | let children: ListedFiles = undefined;
91 | if (isFolder) {
92 | children = await vault.adapter.list(fsEntry);
93 | }
94 |
95 | return {
96 | itself: {
97 | key: isFolder ? `${fsEntry}/` : fsEntry,
98 | ...statRes,
99 | } as ObsConfigDirFileType,
100 | children: children,
101 | };
102 | });
103 | const r2 = flatten(await Promise.all(r));
104 |
105 | for (const iter of r2) {
106 | contents.push(iter.itself);
107 | const isInsideSelfPlugin = isPluginDirItself(iter.itself.key, pluginId);
108 | if (iter.children !== undefined) {
109 | for (const iter2 of iter.children.folders) {
110 | if (isFolderToSkip(iter2)) {
111 | continue;
112 | }
113 |
114 | if (isInsideSelfPlugin && !isLikelyPluginSubFiles(iter2)) {
115 | // special treatment for remotely-secure folder
116 | continue;
117 | }
118 | q.push(iter2);
119 | }
120 | for (const iter2 of iter.children.files) {
121 | if (isFolderToSkip(iter2)) {
122 | continue;
123 | }
124 | if (isInsideSelfPlugin && !isLikelyPluginSubFiles(iter2)) {
125 | // special treatment for remotely-secure folder
126 | continue;
127 | }
128 | q.push(iter2);
129 | }
130 | }
131 | }
132 | }
133 | }
134 | return contents;
135 | };
136 |
--------------------------------------------------------------------------------
/src/encrypt.ts:
--------------------------------------------------------------------------------
1 | import { base32, base64url } from "rfc4648";
2 | import { bufferToArrayBuffer, hexStringToTypedArray } from "./misc";
3 |
4 | import { log } from "./moreOnLog";
5 |
6 | const DEFAULT_ITER = 20000;
7 |
8 | const getKeyFromPassword = async (
9 | salt: Uint8Array,
10 | password: string,
11 | rounds: number = DEFAULT_ITER
12 | ) => {
13 | const k1 = await window.crypto.subtle.importKey(
14 | "raw",
15 | new TextEncoder().encode(password),
16 | { name: "PBKDF2" },
17 | false,
18 | ["deriveKey", "deriveBits"]
19 | );
20 |
21 | const k2 = await window.crypto.subtle.deriveBits(
22 | {
23 | name: "PBKDF2",
24 | salt: salt,
25 | iterations: rounds,
26 | hash: "SHA-256",
27 | },
28 | k1,
29 | 256
30 | );
31 |
32 | return k2;
33 | };
34 |
35 | export const encryptArrayBuffer = async (
36 | arrBuf: ArrayBuffer,
37 | password: string,
38 | rounds: number = DEFAULT_ITER
39 | ) => {
40 | let salt = window.crypto.getRandomValues(new Uint8Array(16));
41 |
42 | const derivedKey = await getKeyFromPassword(salt, password, rounds);
43 | // 12 bytes or 96 bits per GCM spec https://developer.mozilla.org/en-US/docs/Web/API/AesGcmParams
44 | const iv = window.crypto.getRandomValues(new Uint8Array(12));
45 |
46 | const keyCrypt = await window.crypto.subtle.importKey(
47 | "raw",
48 | derivedKey,
49 | { name: "AES-GCM" },
50 | false,
51 | ["encrypt", "decrypt"]
52 | );
53 |
54 | const enc = (await window.crypto.subtle.encrypt(
55 | { name: "AES-GCM", iv },
56 | keyCrypt,
57 | arrBuf
58 | )) as ArrayBuffer;
59 |
60 | const res = new Uint8Array([...salt, ...iv, ...new Uint8Array(enc)]);
61 |
62 | return bufferToArrayBuffer(res);
63 | };
64 |
65 | export const decryptArrayBuffer = async (
66 | arrBuf: ArrayBuffer,
67 | password: string,
68 | rounds: number = DEFAULT_ITER
69 | ) => {
70 | const salt = arrBuf.slice(0, 16); // first 16 bytes are salt
71 | const iv = arrBuf.slice(16, 28); // next 12 bytes are IV
72 | const cipherText = arrBuf.slice(28); // final bytes are ciphertext
73 | const key = await getKeyFromPassword(
74 | new Uint8Array(salt),
75 | password,
76 | rounds
77 | );
78 |
79 | const keyCrypt = await window.crypto.subtle.importKey(
80 | "raw",
81 | key,
82 | { name: "AES-GCM" },
83 | false,
84 | ["encrypt", "decrypt"]
85 | );
86 |
87 | const dec = (await window.crypto.subtle.decrypt(
88 | { name: "AES-GCM", iv },
89 | keyCrypt,
90 | cipherText
91 | )) as ArrayBuffer;
92 |
93 | return dec;
94 | };
95 |
96 | export const encryptStringToBase32 = async (
97 | text: string,
98 | password: string,
99 | rounds: number = DEFAULT_ITER
100 | ) => {
101 | const enc = await encryptArrayBuffer(
102 | bufferToArrayBuffer(new TextEncoder().encode(text)),
103 | password,
104 | rounds
105 | );
106 | return base32.stringify(new Uint8Array(enc), { pad: false });
107 | };
108 |
109 | export const decryptBase32ToString = async (
110 | text: string,
111 | password: string,
112 | rounds: number = DEFAULT_ITER
113 | ) => {
114 | return new TextDecoder().decode(
115 | await decryptArrayBuffer(
116 | bufferToArrayBuffer(base32.parse(text, { loose: true })),
117 | password,
118 | rounds
119 | )
120 | );
121 | };
122 |
123 | export const encryptStringToBase64url = async (
124 | text: string,
125 | password: string,
126 | rounds: number = DEFAULT_ITER
127 | ) => {
128 | const enc = await encryptArrayBuffer(
129 | bufferToArrayBuffer(new TextEncoder().encode(text)),
130 | password,
131 | rounds
132 | );
133 | return base64url.stringify(new Uint8Array(enc), { pad: false });
134 | };
135 |
136 | export const decryptBase64urlToString = async (
137 | text: string,
138 | password: string,
139 | rounds: number = DEFAULT_ITER
140 | ) => {
141 | return new TextDecoder().decode(
142 | await decryptArrayBuffer(
143 | bufferToArrayBuffer(base64url.parse(text, { loose: true })),
144 | password,
145 | rounds
146 | )
147 | );
148 | };
149 |
150 | export const getSizeFromOrigToEnc = (x: number) => {
151 | if (x < 0 || Number.isNaN(x) || !Number.isInteger(x)) {
152 | throw Error(`getSizeFromOrigToEnc: x=${x} is not a valid size`);
153 | }
154 | // plaintext size + salt + GCM authN tag + IV
155 | return x + 16 + 16 + 12;
156 | };
157 |
158 | // This is only used in tests, but should be fixed.
159 | export const getSizeFromEncToOrig = (x: number) => {
160 | if (x < 32 || Number.isNaN(x) || !Number.isInteger(x)) {
161 | throw Error(`getSizeFromEncToOrig: ${x} is not a valid size`);
162 | }
163 | if (x % 16 !== 0) {
164 | throw Error(
165 | `getSizeFromEncToOrig: ${x} is not a valid encrypted file size`
166 | );
167 | }
168 | return {
169 | minSize: ((x - 16) / 16 - 1) * 16,
170 | maxSize: ((x - 16) / 16 - 1) * 16 + 15,
171 | };
172 | };
173 |
--------------------------------------------------------------------------------
/src/baseTypes.ts:
--------------------------------------------------------------------------------
1 | /**
2 | * Only type defs here.
3 | * To avoid circular dependency.
4 | */
5 |
6 | import { Platform, requireApiVersion } from "obsidian";
7 | import type { LangType, LangTypeAndAuto } from "./i18n";
8 |
9 | export const DEFAULT_CONTENT_TYPE = "application/octet-stream";
10 |
11 | export type SUPPORTED_SERVICES_TYPE = "s3" | "webdav" | "dropbox" | "onedrive";
12 |
13 | export type SUPPORTED_SERVICES_TYPE_WITH_REMOTE_BASE_DIR =
14 | | "webdav"
15 | | "dropbox"
16 | | "onedrive";
17 |
18 | export interface S3Config {
19 | s3Endpoint: string;
20 | s3Region: string;
21 | s3AccessKeyID: string;
22 | s3SecretAccessKey: string;
23 | s3BucketName: string;
24 | bypassCorsLocally?: boolean;
25 | partsConcurrency?: number;
26 | forcePathStyle?: boolean;
27 | disableS3MetadataSync: boolean;
28 | }
29 |
30 | export interface DropboxConfig {
31 | accessToken: string;
32 | clientID: string;
33 | refreshToken: string;
34 | accessTokenExpiresInSeconds: number;
35 | accessTokenExpiresAtTime: number;
36 | accountID: string;
37 | username: string;
38 | credentialsShouldBeDeletedAtTime?: number;
39 | remoteBaseDir?: string;
40 | }
41 |
42 | export type WebdavAuthType = "digest" | "basic";
43 | export type WebdavDepthType =
44 | | "auto_unknown"
45 | | "auto_1"
46 | | "auto_infinity"
47 | | "manual_1"
48 | | "manual_infinity";
49 |
50 | export interface WebdavConfig {
51 | address: string;
52 | username: string;
53 | password: string;
54 | authType: WebdavAuthType;
55 | manualRecursive: boolean; // deprecated in 0.3.6, use depth
56 | depth?: WebdavDepthType;
57 | remoteBaseDir?: string;
58 | }
59 |
60 | export interface OnedriveConfig {
61 | accessToken: string;
62 | clientID: string;
63 | authority: string;
64 | refreshToken: string;
65 | accessTokenExpiresInSeconds: number;
66 | accessTokenExpiresAtTime: number;
67 | deltaLink: string;
68 | username: string;
69 | credentialsShouldBeDeletedAtTime?: number;
70 | remoteBaseDir?: string;
71 | }
72 |
73 | export interface RemotelySavePluginSettings {
74 | s3: S3Config;
75 | webdav: WebdavConfig;
76 | dropbox: DropboxConfig;
77 | onedrive: OnedriveConfig;
78 | password: string;
79 | serviceType: SUPPORTED_SERVICES_TYPE;
80 | debugEnabled?: boolean;
81 | autoRunEveryMilliseconds?: number;
82 | initRunAfterMilliseconds?: number;
83 | syncOnSaveAfterMilliseconds?: number;
84 | syncOnRemoteChangesAfterMilliseconds?: number;
85 | agreeToUploadExtraMetadata?: boolean;
86 | concurrency?: number;
87 | syncConfigDir?: boolean;
88 | syncUnderscoreItems?: boolean;
89 | lang?: LangTypeAndAuto;
90 | logToDB?: boolean;
91 | skipSizeLargerThan?: number;
92 | enableStatusBarInfo: boolean;
93 | showLastSyncedOnly?: boolean;
94 | lastSynced?: number;
95 | trashLocal: boolean;
96 | syncTrash: boolean;
97 | syncBookmarks: boolean;
98 |
99 | /**
100 | * @deprecated
101 | */
102 | vaultRandomID?: string;
103 | }
104 |
105 | export interface RemoteItem {
106 | key: string;
107 | lastModified: number;
108 | size: number;
109 | remoteType: SUPPORTED_SERVICES_TYPE;
110 | etag?: string;
111 | }
112 |
113 | export const COMMAND_URI = "remotely-secure";
114 | export const COMMAND_CALLBACK = "remotely-secure-cb";
115 | export const COMMAND_CALLBACK_ONEDRIVE = "remotely-sync-cb-onedrive";
116 | export const COMMAND_CALLBACK_DROPBOX = "remotely-secure-cb-dropbox";
117 |
118 | export interface UriParams {
119 | func?: string;
120 | vault?: string;
121 | ver?: string;
122 | data?: string;
123 | }
124 |
125 | // 80 days
126 | export const OAUTH2_FORCE_EXPIRE_MILLISECONDS = 1000 * 60 * 60 * 24 * 80;
127 |
128 | type DecisionTypeForFile =
129 | | "skipUploading" // special, mtimeLocal === mtimeRemote
130 | | "uploadLocalDelHistToRemote" // "delLocalIfExists && delRemoteIfExists && cleanLocalDelHist && uploadLocalDelHistToRemote"
131 | | "keepRemoteDelHist" // "delLocalIfExists && delRemoteIfExists && cleanLocalDelHist && keepRemoteDelHist"
132 | | "uploadLocalToRemote" // "skipLocal && uploadLocalToRemote && cleanLocalDelHist && cleanRemoteDelHist"
133 | | "downloadRemoteToLocal"; // "downloadRemoteToLocal && skipRemote && cleanLocalDelHist && cleanRemoteDelHist"
134 |
135 | type DecisionTypeForFileSize =
136 | | "skipUploadingTooLarge"
137 | | "skipDownloadingTooLarge"
138 | | "skipUsingLocalDelTooLarge"
139 | | "skipUsingRemoteDelTooLarge"
140 | | "errorLocalTooLargeConflictRemote"
141 | | "errorRemoteTooLargeConflictLocal";
142 |
143 | type DecisionTypeForFolder =
144 | | "createFolder"
145 | | "uploadLocalDelHistToRemoteFolder"
146 | | "keepRemoteDelHistFolder"
147 | | "skipFolder";
148 |
149 | export type DecisionType =
150 | | DecisionTypeForFile
151 | | DecisionTypeForFileSize
152 | | DecisionTypeForFolder;
153 |
154 | export interface FileOrFolderMixedState {
155 | key: string;
156 | existLocal?: boolean;
157 | existRemote?: boolean;
158 | mtimeLocal?: number;
159 | mtimeRemote?: number;
160 | deltimeLocal?: number;
161 | deltimeRemote?: number;
162 | sizeLocal?: number;
163 | sizeLocalEnc?: number;
164 | sizeRemote?: number;
165 | sizeRemoteEnc?: number;
166 | changeRemoteMtimeUsingMapping?: boolean;
167 | changeLocalMtimeUsingMapping?: boolean;
168 | decision?: DecisionType;
169 | decisionBranch?: number;
170 | syncDone?: "done";
171 | remoteEncryptedKey?: string;
172 |
173 | mtimeLocalFmt?: string;
174 | mtimeRemoteFmt?: string;
175 | deltimeLocalFmt?: string;
176 | deltimeRemoteFmt?: string;
177 | }
178 |
179 | export const API_VER_STAT_FOLDER = "0.13.27";
180 | export const API_VER_REQURL = "0.13.26"; // desktop ver 0.13.26, iOS ver 1.1.1
181 | export const API_VER_REQURL_ANDROID = "0.14.6"; // Android ver 1.2.1
182 |
183 | export const VALID_REQURL =
184 | (!Platform.isAndroidApp && requireApiVersion(API_VER_REQURL)) ||
185 | (Platform.isAndroidApp && requireApiVersion(API_VER_REQURL_ANDROID));
186 |
187 | export const DEFAULT_DEBUG_FOLDER = "_debug_remotely_save/";
188 | export const DEFAULT_SYNC_PLANS_HISTORY_FILE_PREFIX =
189 | "sync_plans_hist_exported_on_";
190 | export const DEFAULT_LOG_HISTORY_FILE_PREFIX = "log_hist_exported_on_";
191 |
192 | export type SyncTriggerSourceType = "manual" | "auto" | "dry" | "autoOnceInit";
193 |
--------------------------------------------------------------------------------
/docs/sync_algorithm_v2.md:
--------------------------------------------------------------------------------
1 | # Sync Algorithm V2
2 |
3 | ## Sources
4 |
5 | We have 4 record sources:
6 |
7 | 1. Local files. By scanning all files in the vault locally. Actually Obsidian provides an api directly returning this.
8 | 2. Remote files. By scanning all files on the remote service. Some services provide an api directly returning this, and some other services require the plugin scanning the folders recursively.
9 | 3. Local "delete-or-rename" history. It's recorded by using Obsidian's tracking api. So if users delete or rename files/folders outside Obsidian, we could do nothing.
10 | 4. Remote "delete" history. It's uploaded by the plugin in each sync.
11 |
12 | Assuming all sources are reliable.
13 |
14 | ## Deal with them
15 |
16 | We list all combinations mutually exclusive and collectively exhaustive.
17 |
18 | ### Files
19 |
20 | In short, we collect four timestamps, and respect the max timestamp and its corresponding operation.
21 |
22 | | t1 | t2 | t3 | t4 | local file to do | remote file to do | local del history to do | remote del history to do | equal to sync v2 branch |
23 | | -------------- | -------------- | -------------- | -------------- | ---------------- | ----------------- | ----------------------- | ------------------------ | ----------------------- |
24 | | mtime_remote | mtime_local | deltime_remote | deltime_local | del_if_exists | del_if_exists | clean | upload_local_del_history | |
25 | | mtime_local | mtime_remote | deltime_remote | deltime_local | del_if_exists | del_if_exists | clean | upload_local_del_history | |
26 | | mtime_remote | deltime_remote | mtime_local | deltime_local | del_if_exists | del_if_exists | clean | upload_local_del_history | |
27 | | deltime_remote | mtime_remote | mtime_local | deltime_local | del_if_exists | del_if_exists | clean | upload_local_del_history | |
28 | | mtime_local | deltime_remote | mtime_remote | deltime_local | del_if_exists | del_if_exists | clean | upload_local_del_history | |
29 | | deltime_remote | mtime_local | mtime_remote | deltime_local | del_if_exists | del_if_exists | clean | upload_local_del_history | 8 |
30 | | mtime_remote | mtime_local | deltime_local | deltime_remote | del_if_exists | del_if_exists | clean | keep | |
31 | | mtime_local | mtime_remote | deltime_local | deltime_remote | del_if_exists | del_if_exists | clean | keep | |
32 | | mtime_remote | deltime_local | mtime_local | deltime_remote | del_if_exists | del_if_exists | clean | keep | |
33 | | deltime_local | mtime_remote | mtime_local | deltime_remote | del_if_exists | del_if_exists | clean | keep | |
34 | | mtime_local | deltime_local | mtime_remote | deltime_remote | del_if_exists | del_if_exists | clean | keep | |
35 | | deltime_local | mtime_local | mtime_remote | deltime_remote | del_if_exists | del_if_exists | clean | keep | |
36 | | mtime_remote | deltime_remote | deltime_local | mtime_local | skip | upload_local | clean | clean | |
37 | | deltime_remote | mtime_remote | deltime_local | mtime_local | skip | upload_local | clean | clean | 10 |
38 | | mtime_remote | deltime_local | deltime_remote | mtime_local | skip | upload_local | clean | clean | |
39 | | deltime_local | mtime_remote | deltime_remote | mtime_local | skip | upload_local | clean | clean | |
40 | | deltime_remote | deltime_local | mtime_remote | mtime_local | skip | upload_local | clean | clean | 2;3;4;5;6 |
41 | | deltime_local | deltime_remote | mtime_remote | mtime_local | skip | upload_local | clean | clean | |
42 | | mtime_local | deltime_remote | deltime_local | mtime_remote | download_remote | skip | clean | clean | |
43 | | deltime_remote | mtime_local | deltime_local | mtime_remote | download_remote | skip | clean | clean | 7;9 |
44 | | mtime_local | deltime_local | deltime_remote | mtime_remote | download_remote | skip | clean | clean | |
45 | | deltime_local | mtime_local | deltime_remote | mtime_remote | download_remote | skip | clean | clean | |
46 | | deltime_remote | deltime_local | mtime_local | mtime_remote | download_remote | skip | clean | clean | 1;9 |
47 | | deltime_local | deltime_remote | mtime_local | mtime_remote | download_remote | skip | clean | clean | |
48 |
49 | ### Folders
50 |
51 | We actually do not use any folders' metadata. Thus the only relevent info is their names, while the mtime is actually ignorable.
52 |
53 | 1. Firstly generate all the files' plan. If any files exist, then it's parent folders all should exist. If the should-exist folder doesn't exist locally, the local should create it recursively. If the should-exist folder doesn't exist remotely, the remote should create it recursively.
54 | 2. Then, a folder is deletable, if and only if all the following conditions meet:
55 |
56 | - it shows up in the remote deletion history
57 | - it's empty, or all its sub-folders are deletable
58 |
59 | Some examples:
60 |
61 | - A user deletes the folder in device 1, then syncs from the device 1, then creates the same-name folder in device 2, then syncs from the device 2. The folder is deleted (again), on device 2.
62 | - A user deletes the folder in device 1, then syncs from the device 1, then creates the same-name folder in device 2, **then create a new file inside it,** then syncs from the device 2. The folder is **kept** instead of deleted because of the new file, on device 2.
63 | - A user deletes the folder in device 1, then syncs from the device 1, then do not touch the same-name folder in device 2, then syncs from the device 2. The folder and its untouched sub-files should be deleted on device 2.
64 |
--------------------------------------------------------------------------------
/tests/misc.test.ts:
--------------------------------------------------------------------------------
1 | import { expect } from "chai";
2 | import { JSDOM } from "jsdom";
3 | import * as misc from "../src/misc";
4 |
5 | describe("Misc: hidden file", () => {
6 | it("should find hidden file correctly", () => {
7 | let item = "";
8 | expect(misc.isHiddenPath(item)).to.be.false;
9 |
10 | item = ".";
11 | expect(misc.isHiddenPath(item)).to.be.false;
12 |
13 | item = "..";
14 | expect(misc.isHiddenPath(item)).to.be.false;
15 |
16 | item = "/x/y/z/../././../a/b/c";
17 | expect(misc.isHiddenPath(item)).to.be.false;
18 |
19 | item = ".hidden";
20 | expect(misc.isHiddenPath(item)).to.be.true;
21 |
22 | item = "_hidden_loose";
23 | expect(misc.isHiddenPath(item)).to.be.true;
24 | expect(misc.isHiddenPath(item, true, false)).to.be.false;
25 |
26 | item = "/sdd/_hidden_loose";
27 | expect(misc.isHiddenPath(item)).to.be.true;
28 |
29 | item = "what/../_hidden_loose/what/what/what";
30 | expect(misc.isHiddenPath(item)).to.be.true;
31 |
32 | item = "what/../_hidden_loose/what/what/what";
33 | expect(misc.isHiddenPath(item, true, false)).to.be.false;
34 |
35 | item = "what/../_hidden_loose/../.hidden/what/what/what";
36 | expect(misc.isHiddenPath(item, true, false)).to.be.true;
37 |
38 | item = "what/../_hidden_loose/../.hidden/what/what/what";
39 | expect(misc.isHiddenPath(item, false, true)).to.be.false;
40 |
41 | item = "what/_hidden_loose/what/what/what";
42 | expect(misc.isHiddenPath(item, false, true)).to.be.true;
43 | expect(misc.isHiddenPath(item, true, false)).to.be.false;
44 |
45 | item = "what/.hidden/what/what/what";
46 | expect(misc.isHiddenPath(item, false, true)).to.be.false;
47 | expect(misc.isHiddenPath(item, true, false)).to.be.true;
48 | });
49 | });
50 |
51 | describe("Misc: get folder levels", () => {
52 | it("should ignore empty path", () => {
53 | const item = "";
54 | expect(misc.getFolderLevels(item)).to.be.empty;
55 | });
56 |
57 | it("should ignore single file", () => {
58 | const item = "xxx";
59 | expect(misc.getFolderLevels(item)).to.be.empty;
60 | });
61 |
62 | it("should detect path ending with /", () => {
63 | const item = "xxx/";
64 | const res = ["xxx"];
65 | expect(misc.getFolderLevels(item)).to.deep.equal(res);
66 | });
67 |
68 | it("should correctly split folders and files", () => {
69 | const item = "xxx/yyy/zzz.md";
70 | const res = ["xxx", "xxx/yyy"];
71 | expect(misc.getFolderLevels(item)).to.deep.equal(res);
72 |
73 | const item2 = "xxx/yyy/zzz";
74 | const res2 = ["xxx", "xxx/yyy"];
75 | expect(misc.getFolderLevels(item2)).to.deep.equal(res2);
76 |
77 | const item3 = "xxx/yyy/zzz/";
78 | const res3 = ["xxx", "xxx/yyy", "xxx/yyy/zzz"];
79 | expect(misc.getFolderLevels(item3)).to.deep.equal(res3);
80 | });
81 |
82 | it("should correctly add ending slash if required", () => {
83 | const item = "xxx/yyy/zzz.md";
84 | const res = ["xxx/", "xxx/yyy/"];
85 | expect(misc.getFolderLevels(item, true)).to.deep.equal(res);
86 |
87 | const item2 = "xxx/yyy/zzz";
88 | const res2 = ["xxx/", "xxx/yyy/"];
89 | expect(misc.getFolderLevels(item2, true)).to.deep.equal(res2);
90 |
91 | const item3 = "xxx/yyy/zzz/";
92 | const res3 = ["xxx/", "xxx/yyy/", "xxx/yyy/zzz/"];
93 | expect(misc.getFolderLevels(item3, true)).to.deep.equal(res3);
94 | });
95 |
96 | it("should treat path starting with / correctly", () => {
97 | const item = "/xxx/yyy/zzz.md";
98 | const res = ["/xxx", "/xxx/yyy"];
99 | expect(misc.getFolderLevels(item)).to.deep.equal(res);
100 |
101 | const item2 = "/xxx/yyy/zzz";
102 | const res2 = ["/xxx", "/xxx/yyy"];
103 | expect(misc.getFolderLevels(item2)).to.deep.equal(res2);
104 |
105 | const item3 = "/xxx/yyy/zzz/";
106 | const res3 = ["/xxx", "/xxx/yyy", "/xxx/yyy/zzz"];
107 | expect(misc.getFolderLevels(item3)).to.deep.equal(res3);
108 |
109 | const item4 = "/xxx";
110 | const res4 = [] as string[];
111 | expect(misc.getFolderLevels(item4)).to.deep.equal(res4);
112 |
113 | const item5 = "/";
114 | const res5 = [] as string[];
115 | expect(misc.getFolderLevels(item5)).to.deep.equal(res5);
116 | });
117 | });
118 |
119 | describe("Misc: get parent folder", () => {
120 | it("should treat empty path correctly", () => {
121 | const item = "";
122 | expect(misc.getParentFolder(item)).equals("/");
123 | });
124 |
125 | it("should treat one level path correctly", () => {
126 | let item = "abc/";
127 | expect(misc.getParentFolder(item)).equals("/");
128 | item = "/efg/";
129 | expect(misc.getParentFolder(item)).equals("/");
130 | });
131 |
132 | it("should treat more levels path correctly", () => {
133 | let item = "abc/efg";
134 | expect(misc.getParentFolder(item)).equals("abc/");
135 | item = "/hij/klm/";
136 | expect(misc.getParentFolder(item)).equals("/hij/");
137 | });
138 | });
139 |
140 | describe("Misc: get dirname", () => {
141 | it("should return itself for folder", async () => {
142 | const x = misc.getPathFolder("ssss/");
143 | // console.log(x)
144 | expect(x).to.equal("ssss/");
145 | });
146 |
147 | it("should return folder for file", async () => {
148 | const x = misc.getPathFolder("sss/yyy");
149 | // console.log(x)
150 | expect(x).to.equal("sss/");
151 | });
152 |
153 | it("should treat / specially", async () => {
154 | const x = misc.getPathFolder("/");
155 | expect(x).to.equal("/");
156 |
157 | const y = misc.getPathFolder("/abc");
158 | expect(y).to.equal("/");
159 | });
160 | });
161 |
162 | describe("Misc: extract svg", () => {
163 | beforeEach(function () {
164 | const fakeBrowser = new JSDOM("");
165 | global.window = fakeBrowser.window as any;
166 | });
167 |
168 | it("should extract rect from svg correctly", () => {
169 | const x = "";
170 | const y = misc.extractSvgSub(x);
171 | // console.log(x)
172 | expect(y).to.equal("");
173 | });
174 | });
175 |
176 | describe("Misc: get split ranges", () => {
177 | it("should deal with big parts", () => {
178 | const k = misc.getSplitRanges(10, 20);
179 | const k2: misc.SplitRange[] = [
180 | {
181 | partNum: 1,
182 | start: 0,
183 | end: 10,
184 | },
185 | ];
186 | expect(k).to.deep.equal(k2);
187 | });
188 |
189 | it("should deal with 0 remainder", () => {
190 | const k = misc.getSplitRanges(20, 10);
191 | const k2: misc.SplitRange[] = [
192 | {
193 | partNum: 1,
194 | start: 0,
195 | end: 10,
196 | },
197 | {
198 | partNum: 2,
199 | start: 10,
200 | end: 20,
201 | },
202 | ];
203 | expect(k).to.deep.equal(k2);
204 | });
205 |
206 | it("should deal with not-0 remainder", () => {
207 | const k = misc.getSplitRanges(25, 10);
208 | const k2: misc.SplitRange[] = [
209 | {
210 | partNum: 1,
211 | start: 0,
212 | end: 10,
213 | },
214 | {
215 | partNum: 2,
216 | start: 10,
217 | end: 20,
218 | },
219 | {
220 | partNum: 3,
221 | start: 20,
222 | end: 25,
223 | },
224 | ];
225 | expect(k).to.deep.equal(k2);
226 | });
227 | });
228 |
229 | describe("Misc: at which level", () => {
230 | it("should throw error on some parameters", () => {
231 | expect(() => misc.atWhichLevel(undefined)).to.throw();
232 | expect(() => misc.atWhichLevel("")).to.throw();
233 | expect(() => misc.atWhichLevel("..")).to.throw();
234 | expect(() => misc.atWhichLevel(".")).to.throw();
235 | expect(() => misc.atWhichLevel("/")).to.throw();
236 | expect(() => misc.atWhichLevel("/xxyy")).to.throw();
237 | });
238 |
239 | it("should treat folders correctly", () => {
240 | expect(misc.atWhichLevel("x/")).to.be.equal(1);
241 | expect(misc.atWhichLevel("x/y/")).to.be.equal(2);
242 | });
243 |
244 | it("should treat files correctly", () => {
245 | expect(misc.atWhichLevel("x.md")).to.be.equal(1);
246 | expect(misc.atWhichLevel("x/y.md")).to.be.equal(2);
247 | expect(misc.atWhichLevel("x/y/z.md")).to.be.equal(3);
248 | });
249 | });
250 |
251 | describe("Misc: special char for dir", () => {
252 | it("should return false for normal string", () => {
253 | expect(misc.checkHasSpecialCharForDir("")).to.be.false;
254 | expect(misc.checkHasSpecialCharForDir("xxx")).to.be.false;
255 | expect(misc.checkHasSpecialCharForDir("yyy_xxx")).to.be.false;
256 | expect(misc.checkHasSpecialCharForDir("yyy.xxx")).to.be.false;
257 | expect(misc.checkHasSpecialCharForDir("yyy?xxx")).to.be.false;
258 | });
259 |
260 | it("should return true for special cases", () => {
261 | expect(misc.checkHasSpecialCharForDir("?")).to.be.true;
262 | expect(misc.checkHasSpecialCharForDir("/")).to.be.true;
263 | expect(misc.checkHasSpecialCharForDir("\\")).to.be.true;
264 | expect(misc.checkHasSpecialCharForDir("xxx/yyy")).to.be.true;
265 | expect(misc.checkHasSpecialCharForDir("xxx\\yyy")).to.be.true;
266 | expect(misc.checkHasSpecialCharForDir("xxx?yyy")).to.be.true;
267 | });
268 | });
269 |
--------------------------------------------------------------------------------
/docs/services_connectable_or_not.md:
--------------------------------------------------------------------------------
1 | # services connectability
2 |
3 | Here is an overview of the connectability ("connectable" or "not connectable" or "in the plan" or "never") to some services by this plugin.
4 |
5 | The plugin works under the browser environment in Obsidian, so CORS is an issue. Obsidian starts to provide a rich API `requestUrl` for desktop version >= 0.13.25, mobile >= 1.1.1 to bypass the CORS issue. But if the users are still using an older version of Obsidian, they need to configure CORS on server.
6 |
7 | The list is for information purposes only.
8 |
9 | | Service | Connectable | by S3 | by WebDAV | by other protocol | can bypass CORS issue in latest Obsidian | need CORS config in old Obsidian |
10 | | ------------------------------------------------------------------------------------- | ----------- | ----- | --------- | ---------------------------------------------------------------- | ---------------------------------------- | ------------------------------------------------ |
11 | | Amazon S3 | Yes | Yes | | | Yes | [CORS config needed.](./s3_cors_configure.md) |
12 | | Tencent Cloud - Cloud Object Storage (COS) 腾讯云对象存储 | Yes | Yes | | | Yes | CORS config needed. |
13 | | Alibaba Cloud - Object Storage Service 阿里云对象存储 | Yes | Yes | | | Yes | CORS config needed. |
14 | | Backblaze B2 Cloud Storage | Yes | Yes | | | Yes | Its CORS rules doesn't allow no-http(s) origins. |
15 | | [Wasabi](https://wasabi.com) | ? | ? | | | | |
16 | | [filebase](https://filebase.com/) | Yes | Yes | | | Yes | CORS config needed. |
17 | | QingStor 青云 | ? | ? | | | | |
18 | | [MinIO](https://min.io/) | ? | ? | | | | |
19 | | [WsgiDAV](https://github.com/mar10/wsgidav) | Yes | | Yes | | Yes | CORS rules can be set. |
20 | | [Nginx `ngx_http_dav_module`](http://nginx.org/en/docs/http/ngx_http_dav_module.html) | Yes? | | Yes? | | Yes? | ? |
21 | | NextCloud | Yes? | | Yes? | | Yes? | No CORS config by default. |
22 | | OwnCloud | Yes? | | Yes? | | Yes? | No CORS config by default. |
23 | | Seafile | Yes | | Yes | | Yes? | No CORS config by default. |
24 | | `rclone serve webdav` | Yes | | Yes | | Yes | No CORS support. |
25 | | [Nutstore 坚果云](https://www.jianguoyun.com/) | Yes | | Yes | | Yes | No CORS support. |
26 | | [TeraCLOUD](https://teracloud.jp/en/) | Yes | | Yes | | Yes | No CORS support. |
27 | | Dropbox | Yes | | | Yes | | |
28 | | OneDrive for personal | Yes | | | Yes | | |
29 | | OneDrive for Business | In the plan | | | ? | | |
30 | | Google Drive | In the plan | | | ? | | |
31 | | [Box](https://www.box.com/) | ? | | | May be possible but needs further development. | | |
32 | | Google Cloud Storage | ? | | | May be possible but needs further development. | | |
33 | | Microsoft Azure Blob Storage | ? | | | May be possible but needs further development. | | |
34 | | [OpenStack Storage (Swift)](https://github.com/openstack/swift) | ? | | | May be possible but needs further development. | | |
35 | | https://put.io/ | ? | | | ? | | |
36 | | Yandex Disk | ? | | | ? | | |
37 | | FTP / FTPS | Never | | | Technically never possible to be implemented. | | |
38 | | SFTP | Never | | | Technically never possible to be implemented. | | |
39 | | Jottacloud | No | | | No. It seems that no open api is available. | | |
40 | | Mega | Never | | | No. No js api is available. | | |
41 | | Git | Never | | | No. Technically very hard, if not impossible, to be implemented. | | |
42 | | | | | | | | |
43 |
--------------------------------------------------------------------------------
/src/remote.ts:
--------------------------------------------------------------------------------
1 | import { Vault } from "obsidian";
2 | import type {
3 | DropboxConfig,
4 | OnedriveConfig,
5 | S3Config,
6 | SUPPORTED_SERVICES_TYPE,
7 | WebdavConfig,
8 | } from "./baseTypes";
9 | import * as dropbox from "./remoteForDropbox";
10 | import * as onedrive from "./remoteForOnedrive";
11 | import * as s3 from "./remoteForS3";
12 | import * as webdav from "./remoteForWebdav";
13 |
14 | import { log } from "./moreOnLog";
15 |
16 | export class RemoteClient {
17 | readonly serviceType: SUPPORTED_SERVICES_TYPE;
18 | readonly s3Config?: S3Config;
19 | readonly webdavClient?: webdav.WrappedWebdavClient;
20 | readonly webdavConfig?: WebdavConfig;
21 | readonly dropboxClient?: dropbox.WrappedDropboxClient;
22 | readonly dropboxConfig?: DropboxConfig;
23 | readonly onedriveClient?: onedrive.WrappedOnedriveClient;
24 | readonly onedriveConfig?: OnedriveConfig;
25 |
26 | constructor(
27 | serviceType: SUPPORTED_SERVICES_TYPE,
28 | s3Config?: S3Config,
29 | webdavConfig?: WebdavConfig,
30 | dropboxConfig?: DropboxConfig,
31 | onedriveConfig?: OnedriveConfig,
32 | vaultName?: string,
33 | saveUpdatedConfigFunc?: () => Promise
34 | ) {
35 | this.serviceType = serviceType;
36 | // the client may modify the config inplace,
37 | // so we use a ref not copy of config here
38 | if (serviceType === "s3") {
39 | this.s3Config = s3Config;
40 | } else if (serviceType === "webdav") {
41 | if (vaultName === undefined || saveUpdatedConfigFunc === undefined) {
42 | throw Error(
43 | "remember to provide vault name and callback while init webdav client"
44 | );
45 | }
46 | const remoteBaseDir = webdavConfig.remoteBaseDir || vaultName;
47 | this.webdavConfig = webdavConfig;
48 | this.webdavClient = webdav.getWebdavClient(
49 | this.webdavConfig,
50 | remoteBaseDir,
51 | saveUpdatedConfigFunc
52 | );
53 | } else if (serviceType === "dropbox") {
54 | if (vaultName === undefined || saveUpdatedConfigFunc === undefined) {
55 | throw Error(
56 | "remember to provide vault name and callback while init dropbox client"
57 | );
58 | }
59 | const remoteBaseDir = dropboxConfig.remoteBaseDir || vaultName;
60 | this.dropboxConfig = dropboxConfig;
61 | this.dropboxClient = dropbox.getDropboxClient(
62 | this.dropboxConfig,
63 | remoteBaseDir,
64 | saveUpdatedConfigFunc
65 | );
66 | } else if (serviceType === "onedrive") {
67 | if (vaultName === undefined || saveUpdatedConfigFunc === undefined) {
68 | throw Error(
69 | "remember to provide vault name and callback while init onedrive client"
70 | );
71 | }
72 | const remoteBaseDir = onedriveConfig.remoteBaseDir || vaultName;
73 | this.onedriveConfig = onedriveConfig;
74 | this.onedriveClient = onedrive.getOnedriveClient(
75 | this.onedriveConfig,
76 | remoteBaseDir,
77 | saveUpdatedConfigFunc
78 | );
79 | } else {
80 | throw Error(`not supported service type ${this.serviceType}`);
81 | }
82 | }
83 |
84 | uploadToRemote = async (
85 | fileOrFolderPath: string,
86 | vault: Vault,
87 | isRecursively: boolean = false,
88 | password: string = "",
89 | remoteEncryptedKey: string = "",
90 | foldersCreatedBefore: Set | undefined = undefined,
91 | uploadRaw: boolean = false,
92 | rawContent: string | ArrayBuffer = ""
93 | ) => {
94 | if (this.serviceType === "s3") {
95 | return await s3.uploadToRemote(
96 | s3.getS3Client(this.s3Config),
97 | this.s3Config,
98 | fileOrFolderPath,
99 | vault,
100 | isRecursively,
101 | password,
102 | remoteEncryptedKey,
103 | uploadRaw,
104 | rawContent
105 | );
106 | } else if (this.serviceType === "webdav") {
107 | return await webdav.uploadToRemote(
108 | this.webdavClient,
109 | fileOrFolderPath,
110 | vault,
111 | isRecursively,
112 | password,
113 | remoteEncryptedKey,
114 | uploadRaw,
115 | rawContent
116 | );
117 | } else if (this.serviceType === "dropbox") {
118 | return await dropbox.uploadToRemote(
119 | this.dropboxClient,
120 | fileOrFolderPath,
121 | vault,
122 | isRecursively,
123 | password,
124 | remoteEncryptedKey,
125 | foldersCreatedBefore,
126 | uploadRaw,
127 | rawContent
128 | );
129 | } else if (this.serviceType === "onedrive") {
130 | return await onedrive.uploadToRemote(
131 | this.onedriveClient,
132 | fileOrFolderPath,
133 | vault,
134 | isRecursively,
135 | password,
136 | remoteEncryptedKey,
137 | foldersCreatedBefore,
138 | uploadRaw,
139 | rawContent
140 | );
141 | } else {
142 | throw Error(`not supported service type ${this.serviceType}`);
143 | }
144 | };
145 |
146 | listFromRemote = async (prefix?: string) => {
147 | if (this.serviceType === "s3") {
148 | return await s3.listFromRemote(
149 | s3.getS3Client(this.s3Config),
150 | this.s3Config,
151 | prefix
152 | );
153 | } else if (this.serviceType === "webdav") {
154 | return await webdav.listFromRemote(this.webdavClient, prefix);
155 | } else if (this.serviceType === "dropbox") {
156 | return await dropbox.listFromRemote(this.dropboxClient, prefix);
157 | } else if (this.serviceType === "onedrive") {
158 | return await onedrive.listFromRemote(this.onedriveClient, prefix);
159 | } else {
160 | throw Error(`not supported service type ${this.serviceType}`);
161 | }
162 | };
163 |
164 | downloadFromRemote = async (
165 | fileOrFolderPath: string,
166 | vault: Vault,
167 | mtime: number,
168 | password: string = "",
169 | remoteEncryptedKey: string = "",
170 | skipSaving: boolean = false
171 | ) => {
172 | if (this.serviceType === "s3") {
173 | return await s3.downloadFromRemote(
174 | s3.getS3Client(this.s3Config),
175 | this.s3Config,
176 | fileOrFolderPath,
177 | vault,
178 | mtime,
179 | password,
180 | remoteEncryptedKey,
181 | skipSaving
182 | );
183 | } else if (this.serviceType === "webdav") {
184 | return await webdav.downloadFromRemote(
185 | this.webdavClient,
186 | fileOrFolderPath,
187 | vault,
188 | mtime,
189 | password,
190 | remoteEncryptedKey,
191 | skipSaving
192 | );
193 | } else if (this.serviceType === "dropbox") {
194 | return await dropbox.downloadFromRemote(
195 | this.dropboxClient,
196 | fileOrFolderPath,
197 | vault,
198 | mtime,
199 | password,
200 | remoteEncryptedKey,
201 | skipSaving
202 | );
203 | } else if (this.serviceType === "onedrive") {
204 | return await onedrive.downloadFromRemote(
205 | this.onedriveClient,
206 | fileOrFolderPath,
207 | vault,
208 | mtime,
209 | password,
210 | remoteEncryptedKey,
211 | skipSaving
212 | );
213 | } else {
214 | throw Error(`not supported service type ${this.serviceType}`);
215 | }
216 | };
217 |
218 | deleteFromRemote = async (
219 | fileOrFolderPath: string,
220 | password: string = "",
221 | remoteEncryptedKey: string = ""
222 | ) => {
223 | if (this.serviceType === "s3") {
224 | return await s3.deleteFromRemote(
225 | s3.getS3Client(this.s3Config),
226 | this.s3Config,
227 | fileOrFolderPath,
228 | password,
229 | remoteEncryptedKey
230 | );
231 | } else if (this.serviceType === "webdav") {
232 | return await webdav.deleteFromRemote(
233 | this.webdavClient,
234 | fileOrFolderPath,
235 | password,
236 | remoteEncryptedKey
237 | );
238 | } else if (this.serviceType === "dropbox") {
239 | return await dropbox.deleteFromRemote(
240 | this.dropboxClient,
241 | fileOrFolderPath,
242 | password,
243 | remoteEncryptedKey
244 | );
245 | } else if (this.serviceType === "onedrive") {
246 | return await onedrive.deleteFromRemote(
247 | this.onedriveClient,
248 | fileOrFolderPath,
249 | password,
250 | remoteEncryptedKey
251 | );
252 | } else {
253 | throw Error(`not supported service type ${this.serviceType}`);
254 | }
255 | };
256 |
257 | checkConnectivity = async (callbackFunc?: any) => {
258 | if (this.serviceType === "s3") {
259 | return await s3.checkConnectivity(
260 | s3.getS3Client(this.s3Config),
261 | this.s3Config,
262 | callbackFunc
263 | );
264 | } else if (this.serviceType === "webdav") {
265 | return await webdav.checkConnectivity(this.webdavClient, callbackFunc);
266 | } else if (this.serviceType === "dropbox") {
267 | return await dropbox.checkConnectivity(this.dropboxClient, callbackFunc);
268 | } else if (this.serviceType === "onedrive") {
269 | return await onedrive.checkConnectivity(
270 | this.onedriveClient,
271 | callbackFunc
272 | );
273 | } else {
274 | throw Error(`not supported service type ${this.serviceType}`);
275 | }
276 | };
277 |
278 | getUser = async () => {
279 | if (this.serviceType === "dropbox") {
280 | return await dropbox.getUserDisplayName(this.dropboxClient);
281 | } else if (this.serviceType === "onedrive") {
282 | return await onedrive.getUserDisplayName(this.onedriveClient);
283 | } else {
284 | throw Error(`not supported service type ${this.serviceType}`);
285 | }
286 | };
287 |
288 | revokeAuth = async () => {
289 | if (this.serviceType === "dropbox") {
290 | return await dropbox.revokeAuth(this.dropboxClient);
291 | } else {
292 | throw Error(`not supported service type ${this.serviceType}`);
293 | }
294 | };
295 | }
296 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright [yyyy] [name of copyright owner]
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/src/misc.ts:
--------------------------------------------------------------------------------
1 | import { Vault, moment } from "obsidian";
2 | import { base32, base64url } from "rfc4648";
3 | import XRegExp from "xregexp";
4 | import emojiRegex from "emoji-regex";
5 |
6 | import type { I18n, LangType, LangTypeAndAuto, TransItemType } from "./i18n";
7 |
8 | import { log } from "./moreOnLog";
9 |
10 | /**
11 | * If any part of the file starts with '.' or '_' then it's a hidden file.
12 | * @param item
13 | * @param dot
14 | * @param underscore
15 | * @returns
16 | */
17 | export const isHiddenPath = (
18 | item: string,
19 | dot: boolean = true,
20 | underscore: boolean = true
21 | ) => {
22 | if (!(dot || underscore)) {
23 | throw Error("parameter error for isHiddenPath");
24 | }
25 | const k = normalizePath(item); // TODO: only unix path now
26 | const k2 = k.split("/"); // TODO: only unix path now
27 | // log.info(k2)
28 | for (const singlePart of k2) {
29 | if (singlePart === "." || singlePart === ".." || singlePart === "") {
30 | continue;
31 | }
32 | if (dot && singlePart[0] === ".") {
33 | return true;
34 | }
35 | if (underscore && singlePart[0] === "_") {
36 | return true;
37 | }
38 | }
39 | return false;
40 | };
41 |
42 | /**
43 | * Normalizes a path
44 | * @param path
45 | */
46 | export const normalizePath = (
47 | path: string
48 | ) => {
49 | if (!(path)) {
50 | throw Error("missing path for normalizePath")
51 | }
52 | // Replace backslashes with forward slashes
53 | path = path.replace(/\\/g, '/');
54 |
55 | // Remove duplicate slashes (e.g., '//' -> '/')
56 | path = path.replace(/\/\/+/g, '/');
57 |
58 | // Resolve '../' and './' in the path
59 | const parts = path.split('/');
60 | const result = [];
61 | for (const part of parts) {
62 | if (part === '..') {
63 | result.pop();
64 | } else if (part !== '.' && part !== '') {
65 | result.push(part);
66 | }
67 | }
68 |
69 | // Join parts back together with '/' as the separator
70 | return result.join('/');
71 | }
72 |
73 | export const dirname = (path: string) => {
74 | // Normalize the path to use forward slashes
75 | path = path.replace(/\\/g, '/');
76 |
77 | // Split the path into parts using forward slash as the separator
78 | const parts = path.split('/');
79 |
80 | // Remove the last part (file or directory name)
81 | parts.pop();
82 |
83 | // Join the remaining parts to get the dirname
84 | const dirname = parts.join('/');
85 |
86 | // Handle the case where the path was empty or only contained a root slash
87 | if (dirname === '' || dirname === '/' || dirname === '.') {
88 | return '/';
89 | }
90 |
91 | return dirname;
92 | }
93 |
94 | /**
95 | * Util func for mkdir -p based on the "path" of original file or folder
96 | * "a/b/c/" => ["a", "a/b", "a/b/c"]
97 | * "a/b/c/d/e.txt" => ["a", "a/b", "a/b/c", "a/b/c/d"]
98 | * @param x string
99 | * @returns string[] might be empty
100 | */
101 | export const getFolderLevels = (x: string, addEndingSlash: boolean = false) => {
102 | const res: string[] = [];
103 |
104 | if (x === "" || x === "/") {
105 | return res;
106 | }
107 |
108 | const y1 = x.split("/");
109 | let i = 0;
110 | for (let index = 0; index + 1 < y1.length; index++) {
111 | let k = y1.slice(0, index + 1).join("/");
112 | if (k === "" || k === "/") {
113 | continue;
114 | }
115 | if (addEndingSlash) {
116 | k = `${k}/`;
117 | }
118 | res.push(k);
119 | }
120 | return res;
121 | };
122 |
123 | export const mkdirpInVault = async (thePath: string, vault: Vault) => {
124 | const foldersToBuild = getFolderLevels(thePath);
125 | for (const folder of foldersToBuild) {
126 | const r = await vault.adapter.exists(folder);
127 | if (!r) {
128 | await vault.adapter.mkdir(folder);
129 | }
130 | }
131 | };
132 |
133 | /**
134 | * https://stackoverflow.com/questions/8609289
135 | * @param b Buffer
136 | * @returns ArrayBuffer
137 | */
138 | export const bufferToArrayBuffer = (
139 | b: Buffer | Uint8Array | ArrayBufferView
140 | ) => {
141 | return b.buffer.slice(b.byteOffset, b.byteOffset + b.byteLength);
142 | };
143 |
144 | /**
145 | * Simple func.
146 | * @param b
147 | * @returns
148 | */
149 | export const arrayBufferToBuffer = (b: ArrayBuffer) => {
150 | return Buffer.from(b);
151 | };
152 |
153 | export const arrayBufferToBase64 = (b: ArrayBuffer) => {
154 | return arrayBufferToBuffer(b).toString("base64");
155 | };
156 |
157 | export const arrayBufferToHex = (b: ArrayBuffer) => {
158 | return arrayBufferToBuffer(b).toString("hex");
159 | };
160 |
161 | export const base64ToArrayBuffer = (b64text: string) => {
162 | return bufferToArrayBuffer(Buffer.from(b64text, "base64"));
163 | };
164 |
165 | /**
166 | * https://stackoverflow.com/questions/43131242
167 | * @param hex
168 | * @returns
169 | */
170 | export const hexStringToTypedArray = (hex: string) => {
171 | return new Uint8Array(
172 | hex.match(/[\da-f]{2}/gi).map(function (h) {
173 | return parseInt(h, 16);
174 | })
175 | );
176 | };
177 |
178 | export const base64ToBase32 = (a: string) => {
179 | return base32.stringify(Buffer.from(a, "base64"));
180 | };
181 |
182 | export const base64ToBase64url = (a: string, pad: boolean = false) => {
183 | let b = a.replace(/\+/g, "-").replace(/\//g, "_");
184 | if (!pad) {
185 | b = b.replace(/=/g, "");
186 | }
187 | return b;
188 | };
189 |
190 | /**
191 | * Use regex to detect a text contains emoji or not.
192 | * @param a
193 | * @returns
194 | */
195 | export const hasEmojiInText = (a: string) => {
196 | const regex = emojiRegex();
197 | return regex.test(a);
198 | };
199 |
200 | /**
201 | * Convert the headers to a normal object.
202 | * @param h
203 | * @param toLower
204 | * @returns
205 | */
206 | export const headersToRecord = (h: Headers, toLower: boolean = true) => {
207 | const res: Record = {};
208 | h.forEach((v, k) => {
209 | if (toLower) {
210 | res[k.toLowerCase()] = v;
211 | } else {
212 | res[k] = v;
213 | }
214 | });
215 | return res;
216 | };
217 |
218 | /**
219 | * If input is already a folder, returns it as is;
220 | * And if input is a file, returns its direname.
221 | * @param a
222 | * @returns
223 | */
224 | export const getPathFolder = (a: string) => {
225 | if (a.endsWith("/")) {
226 | return a;
227 | }
228 | const b = dirname(a);
229 | return b.endsWith("/") ? b : `${b}/`;
230 | };
231 |
232 | /**
233 | * If input is already a folder, returns its folder;
234 | * And if input is a file, returns its direname.
235 | * @param a
236 | * @returns
237 | */
238 | export const getParentFolder = (a: string) => {
239 | const b = dirname(a);
240 | if (b === "." || b === "/") {
241 | // the root
242 | return "/";
243 | }
244 | if (b.endsWith("/")) {
245 | return b;
246 | }
247 | return `${b}/`;
248 | };
249 |
250 | /**
251 | * https://stackoverflow.com/questions/54511144
252 | * @param a
253 | * @param delimiter
254 | * @returns
255 | */
256 | export const setToString = (a: Set, delimiter: string = ",") => {
257 | return [...a].join(delimiter);
258 | };
259 |
260 | export const extractSvgSub = (x: string, subEl: string = "rect") => {
261 | const parser = new window.DOMParser();
262 | const dom = parser.parseFromString(x, "image/svg+xml");
263 | const svg = dom.querySelector("svg");
264 | svg.setAttribute("viewbox", "0 0 10 10");
265 | return svg.innerHTML;
266 | };
267 |
268 | /**
269 | * https://stackoverflow.com/questions/18230217
270 | * @param min
271 | * @param max
272 | * @returns
273 | */
274 | export const getRandomIntInclusive = (min: number, max: number) => {
275 | const randomBuffer = new Uint32Array(1);
276 | window.crypto.getRandomValues(randomBuffer);
277 | let randomNumber = randomBuffer[0] / (0xffffffff + 1);
278 | min = Math.ceil(min);
279 | max = Math.floor(max);
280 | return Math.floor(randomNumber * (max - min + 1)) + min;
281 | };
282 |
283 | /**
284 | * Random buffer
285 | * @param byteLength
286 | * @returns
287 | */
288 | export const getRandomArrayBuffer = (byteLength: number) => {
289 | const k = window.crypto.getRandomValues(new Uint8Array(byteLength));
290 | return bufferToArrayBuffer(k);
291 | };
292 |
293 | /**
294 | * https://stackoverflow.com/questions/958908
295 | * @param x
296 | * @returns
297 | */
298 | export const reverseString = (x: string) => {
299 | return [...x].reverse().join("");
300 | };
301 |
302 | export interface SplitRange {
303 | partNum: number; // startting from 1
304 | start: number;
305 | end: number; // exclusive
306 | }
307 | export const getSplitRanges = (bytesTotal: number, bytesEachPart: number) => {
308 | const res: SplitRange[] = [];
309 | if (bytesEachPart >= bytesTotal) {
310 | res.push({
311 | partNum: 1,
312 | start: 0,
313 | end: bytesTotal,
314 | });
315 | return res;
316 | }
317 | const remainder = bytesTotal % bytesEachPart;
318 | const howMany =
319 | Math.floor(bytesTotal / bytesEachPart) + (remainder === 0 ? 0 : 1);
320 | for (let i = 0; i < howMany; ++i) {
321 | res.push({
322 | partNum: i + 1,
323 | start: bytesEachPart * i,
324 | end: Math.min(bytesEachPart * (i + 1), bytesTotal),
325 | });
326 | }
327 | return res;
328 | };
329 |
330 | /**
331 | * https://stackoverflow.com/questions/332422
332 | * @param obj anything
333 | * @returns string of the name of the object
334 | */
335 | export const getTypeName = (obj: any) => {
336 | return Object.prototype.toString.call(obj).slice(8, -1);
337 | };
338 |
339 | /**
340 | * Startting from 1
341 | * @param x
342 | * @returns
343 | */
344 | export const atWhichLevel = (x: string) => {
345 | if (
346 | x === undefined ||
347 | x === "" ||
348 | x === "." ||
349 | x === ".." ||
350 | x.startsWith("/")
351 | ) {
352 | log.debug(`do not know which level for ${x}`);
353 | }
354 | let y = x;
355 | if (x.endsWith("/")) {
356 | y = x.slice(0, -1);
357 | }
358 | return y.split("/").length;
359 | };
360 |
361 | export const checkHasSpecialCharForDir = (x: string) => {
362 | return /[?/\\]/.test(x);
363 | };
364 |
365 | export const unixTimeToStr = (x: number | undefined | null) => {
366 | if (x === undefined || x === null || Number.isNaN(x)) {
367 | return undefined;
368 | }
369 | return (moment as any)(x).format() as string;
370 | };
371 |
372 | /**
373 | * https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Errors/Cyclic_object_value#examples
374 | * @returns
375 | */
376 | const getCircularReplacer = () => {
377 | const seen = new WeakSet();
378 | return (key: any, value: any) => {
379 | if (typeof value === "object" && value !== null) {
380 | if (seen.has(value)) {
381 | return;
382 | }
383 | seen.add(value);
384 | }
385 | return value;
386 | };
387 | };
388 |
389 | /**
390 | * Convert "any" value to string.
391 | * @param x
392 | * @returns
393 | */
394 | export const toText = (x: any) => {
395 | if (x === undefined || x === null) {
396 | return `${x}`;
397 | }
398 | if (typeof x === "string") {
399 | return x;
400 | }
401 | if (
402 | x instanceof String ||
403 | x instanceof Date ||
404 | typeof x === "number" ||
405 | typeof x === "bigint" ||
406 | typeof x === "boolean"
407 | ) {
408 | return `${x}`;
409 | }
410 |
411 | if (
412 | x instanceof Error ||
413 | (x &&
414 | x.stack &&
415 | x.message &&
416 | typeof x.stack === "string" &&
417 | typeof x.message === "string")
418 | ) {
419 | return `ERROR! MESSAGE: ${x.message}, STACK: ${x.stack}`;
420 | }
421 |
422 | try {
423 | const y = JSON.stringify(x, getCircularReplacer(), 2);
424 | if (y !== undefined) {
425 | return y;
426 | }
427 | throw new Error("not jsonable");
428 | } catch {
429 | return `${x}`;
430 | }
431 | };
432 |
433 | /**
434 | * On Android the stat has bugs for folders. So we need a fixed version.
435 | * @param vault
436 | * @param path
437 | */
438 | export const statFix = async (vault: Vault, path: string) => {
439 | const s = await vault.adapter.stat(path);
440 | if (s == undefined) {
441 | return;
442 | }
443 | if (s.ctime === undefined || s.ctime === null || Number.isNaN(s.ctime)) {
444 | s.ctime = undefined;
445 | }
446 | if (s.mtime === undefined || s.mtime === null || Number.isNaN(s.mtime)) {
447 | s.mtime = undefined;
448 | }
449 | if (
450 | (s.size === undefined || s.size === null || Number.isNaN(s.size)) &&
451 | s.type === "folder"
452 | ) {
453 | s.size = 0;
454 | }
455 | return s;
456 | };
457 |
458 | export function getLastSynced(i18n: I18n, lastSuccessSyncMillis?: number): {lastSyncMsg: string, lastSyncLabelMsg: string} {
459 | const t = (x: TransItemType, vars?: any) => {
460 | return i18n.t(x, vars);
461 | };
462 |
463 | let lastSynced = {
464 | lastSyncMsg: t("statusbar_lastsync_never"),
465 | lastSyncLabelMsg: t("statusbar_lastsync_never_label")
466 | };
467 |
468 | if (lastSuccessSyncMillis !== undefined && lastSuccessSyncMillis > 0) {
469 | const deltaTime = Date.now() - lastSuccessSyncMillis;
470 |
471 | // create human readable time
472 | const years = Math.floor(deltaTime / 31556952000);
473 | const months = Math.floor(deltaTime / 2629746000);
474 | const weeks = Math.floor(deltaTime / 604800000);
475 | const days = Math.floor(deltaTime / 86400000);
476 | const hours = Math.floor(deltaTime / 3600000);
477 | const minutes = Math.floor(deltaTime / 60000);
478 | let timeText = "";
479 |
480 | if (years > 0) {
481 | timeText = t("statusbar_time_years", { time: years });
482 | } else if (months > 0) {
483 | timeText = t("statusbar_time_months", { time: months });
484 | } else if (weeks > 0) {
485 | timeText = t("statusbar_time_weeks", { time: weeks });
486 | } else if (days > 0) {
487 | timeText = t("statusbar_time_days", { time: days });
488 | } else if (hours > 0) {
489 | timeText = t("statusbar_time_hours", { time: hours });
490 | } else if (minutes > 0) {
491 | timeText = t("statusbar_time_minutes", { time: minutes });
492 | } else {
493 | timeText = t("statusbar_time_lessminute");
494 | }
495 |
496 | let dateText = new Date(lastSuccessSyncMillis)
497 | .toLocaleTimeString(navigator.language, {
498 | weekday: "long", year: "numeric", month: "long", day: "numeric"
499 | });
500 |
501 | lastSynced.lastSyncMsg = t("statusbar_lastsync", { time: timeText });
502 | lastSynced.lastSyncLabelMsg = t("statusbar_lastsync_label", { date: dateText });
503 | }
504 |
505 | return lastSynced;
506 | }
507 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Remotely Sync
2 |
3 | **Remotely Sync** is a fork of *Remotely Save*, the unofficial sync plugin for Obsidian. At the time of forking the *Remotely Save* plugin was not actively maintained and some security improvements were made to Remotely Save - please see the [list of security updates](#security-updates-from-remotely-save) made to *Remotely Save*. Note this plugin is not backwards compatible with Remotely Save, save your data locally and have a backup before using this plugin. See [migration guide](#migrating-from-remotely-save) instructions.
4 |
5 | Note that some of the features will be merged into Remotely Save over time, and Remotely Sync is likely less stable at any point in time. If you want stability go with [Remotely Save](https://github.com/remotely-save/remotely-save)!
6 |
7 | If you like it or find it useful, please consider give it a [star ](https://github.com/sboesen/remotely-sync) on Github.
8 |
9 | Pull requests greatly appreciated! Please see [Contributing](#contributing) to get started.
10 |
11 | ## Disclaimer
12 |
13 | - **This is NOT the [official sync service](https://obsidian.md/sync) provided by Obsidian.**
14 |
15 | ## !!!Caution!!!
16 |
17 | **ALWAYS, ALWAYS, backup your vault before using this plugin.**
18 |
19 |
20 | ## Security Updates from Remotely Save
21 | - Updated encryption to use [AES-GCM](https://github.com/sboesen/remotely-sync/commit/d9ad76e774b0b1cee2b36316058df926f4bfb2bf#diff-6ce8b79e4237671498e2b10caa08b379beaae2cd5e56415167b563d1536f6b74R57) which is more secure and authenticates the ciphertext when decrypting, making it harder to exploit [padding oracle attacks](https://cryptopals.com/sets/3/challenges/17).
22 | - Updated [salt](https://github.com/sboesen/remotely-sync/commit/d9ad76e774b0b1cee2b36316058df926f4bfb2bf#diff-6ce8b79e4237671498e2b10caa08b379beaae2cd5e56415167b563d1536f6b74R45) from 8 -> 16 bytes. [See note](https://github.com/sboesen/remotely-sync/issues/9)
23 | - Updated IV to not be derived from the user's password ([discussion](https://github.com/sboesen/remotely-sync/discussions/76#discussioncomment-7878678))
24 | - **No security guarantees**, but these are the issues I identified when reviewing the end-to-end encryption as implemented in remotely-save.
25 |
26 | ## Features
27 | - Supports:
28 | - Amazon S3 or S3-compatible
29 | - Dropbox
30 | - OneDrive for personal
31 | - Webdav
32 | - [Here](./docs/services_connectable_or_not.md) shows more connectable (or not-connectable) services in details. Need another service added? Please [open a feature request](#questions-suggestions-or-bugs)!
33 | - **Obsidian Mobile supported.** Vaults can be synced across mobile and desktop devices with the cloud service as the "broker".
34 | - **[End-to-end encryption](./docs/encryption.md) supported.** Files are encrypted using AES-256 GCM before being sent to the cloud **if** user specifies a password.
35 | - **Scheduled auto sync supported.** You can also manually trigger the sync using sidebar ribbon, or using the command from the command palette (or even bind the hot key combination to the command then press the hot key combination).
36 | - Sync on Save
37 | - Sync status bar
38 | - Syncing bookmarks by default (and other obsidian configuration files if enabled)
39 | - **[Minimal Intrusive](./docs/minimal_intrusive_design.md).**
40 | - **Fully open source under [Apache-2.0 License](./LICENSE).**
41 | - **[Sync Algorithm open](./docs/sync_algorithm_v2.md) for discussion.**
42 |
43 | ## Limitations
44 |
45 | - **To support syncing deleted files, extra metadata will also be uploaded.** See [Minimal Intrusive](./docs/minimal_intrusive_design.md).
46 | - **No conflict resolution. No content-diff-and-patch algorithm.** All files and folders are compared using their local and remote "last modified time" and those with later "last modified time" win.
47 | - **Cloud services cost you money.** Always be aware of the costs and pricing. Specifically, all the operations, including but not limited to downloading, uploading, listing all files, calling any api, storage sizes, may or may not cost you money.
48 | - **Some limitations from the browser environment.** More technical details are [in the doc](./docs/browser_env.md).
49 | - **You should protect your `data.json` file.** The file contains sensitive information.
50 | - It's strongly advised **NOT** to share your `data.json` file to anyone.
51 | - It's usually **NOT** a good idea to check the file into version control. By default, the plugin tries to create a `.gitignore` file inside the plugin directory if it doesn't exist, for ignoring `data.json` in the `git` version control. If you know exactly what it means and want to remove the setting, please modify the `.gitignore` file or set it to be empty.
52 |
53 | ## Migrating from Remotely Save
54 | The easiest way to migrate from Remotely Save (or other forks) to Remotely Sync is:
55 |
56 | 1. Make a local, unencrypted backup of your files (make sure to synchronize all changes across your devices)
57 | 2. Disable the remotely-save plugin
58 | 3. Enable remotely-sync and set a new encryption password
59 | 4. Delete the encrypted files in your cloud provider (or make a new S3 bucket in this case)
60 | 5. Perform a sync using remotely-sync
61 |
62 | ## Credit
63 | * Thanks to @fyears for the original Remotely Save plugin
64 | * Thanks to @sampurkiszb for sync on save
65 | * Thanks to @zaiziw for Obsidian bookmark sync
66 | * Thanks to @FEI352 & @lyiton for helping translate the plugin
67 | * Thanks to @kadisonm for major code contributions (including lightweight sync), refactoring, & bug fixes
68 | * Thanks to @vpsone for status bar UI fix!
69 |
70 | ## Questions, Suggestions, Or Bugs
71 |
72 | You are greatly welcome to ask questions, post any suggestions, or report any bugs! Pull requests also greatly appreciated. The project is mainly maintained on GitHub:
73 |
74 | - Questions: [GitHub repo Discussions](https://github.com/sboesen/remotely-sync/discussions)
75 | - Suggestions: also in [GitHub repo Discussions](https://github.com/sboesen/remotely-sync/discussions)
76 | - Bugs: [GitHub repo Issues](https://github.com/sboesen/remotely-sync/issues) (NOT Discussion)
77 |
78 | ## Download and Install
79 |
80 | - Option #1: Search in the official "community plugin list", or visit this: [https://obsidian.md/plugins?id=remotely-sync](https://obsidian.md/plugins?id=remotely-sync) (which should redirect you into Obsidian app), then install the plugin.
81 | - Option #2: You can also use [Obsidian42 - BRAT](https://github.com/TfTHacker/obsidian42-brat) to install this plugin. Input `sboesen/remotely-sync` in the configuration of BRAT.
82 | - Option #3: [](https://github.com/sboesen/remotely-sync/releases) Manually download assets (`main.js`, `manifest.json`, `styles.css`) from the latest release.
83 |
84 | ## Contributing
85 |
86 | Please see our [GitHub project](https://github.com/users/sboesen/projects/1) for a prioritized list of issues.
87 |
88 | General priorities (may change):
89 | P0: Top priority, sync broken or risk of data loss for all remote providers.
90 | P1: Issue or major feature gap for all providers, usually has workaround
91 | P2: Sync issue for some providers but not all, or for some users but not all
92 | P3: Nice to have, or cosmetic issue. Does not impact sync.
93 |
94 | Building the project:
95 | ```
96 | git clone https://github.com/sboesen/remotely-sync
97 | cd remotely-sync
98 | npm install
99 | ```
100 |
101 | Running development build (watches for changes and recompiles)
102 | ```
103 | npm run dev2
104 | ```
105 |
106 | Building a production build
107 | ```
108 | npm run build2
109 | ```
110 |
111 | Testing:
112 | ```
113 | cp main.js styles.css manifest.json /your/path/to/vault/.obsidian/plugins/remotely-sync
114 | ```
115 | Open development tools and Cmd+r or Ctrl+r to refresh the Obsidian app, quickly reloading the plugin.
116 |
117 | ## Usage
118 |
119 | ### S3
120 |
121 | - Prepare your S3 (-compatible) service information: [endpoint, region](https://docs.aws.amazon.com/general/latest/gr/s3.html), [access key id, secret access key](https://docs.aws.amazon.com/sdk-for-javascript/v3/developer-guide/getting-your-credentials.html), bucket name. The bucket should be empty and solely for syncing a vault.
122 | - About CORS:
123 | - If you are using Obsidian desktop >= 0.13.25 or mobile >= 1.1.1, you can skip this CORS part.
124 | - If you are using Obsidian desktop < 0.13.25 or mobile < 1.1.1, you need to configure (enable) [CORS](https://docs.aws.amazon.com/AmazonS3/latest/userguide/enabling-cors-examples.html) for requests from `app://obsidian.md` and `capacitor://localhost` and `http://localhost`, and add at least `ETag` into exposed headers. Full example is [here](./docs/s3_cors_configure.md). It's unfortunately required, because the plugin sends requests from a browser-like envirement. And those addresses are tested and found on desktop and ios and android.
125 | - Download and enable this plugin.
126 | - Enter your information to the settings of this plugin.
127 | - If you want to enable end-to-end encryption, also set a password in settings. If you do not specify a password, the files and folders are synced in plain, original content to the cloud.
128 | - Click the new "circle arrow" icon on the ribbon (the left sidebar), **every time** you want to sync your vault between local and remote. (Or, you could configure auto sync in the settings panel (See next chapter).) While syncing, the icon becomes "two half-circle arrows". Besides clicking the icon on the sidebar ribbon, you can also activate the corresponding command in the command palette.
129 | - **Be patient while syncing.** Especially in the first-time sync.
130 |
131 | ### Dropbox
132 |
133 | - **This plugin is NOT an official Dropbox product.** The plugin just uses Dropbox's public API.
134 | - After the authorization, the plugin can read your name and email (which cannot be unselected on Dropbox api), and read and write files in your Dropbox's `/Apps/remotely-sync` folder.
135 | - If you decide to authorize this plugin to connect to Dropbox, please go to plugin's settings, and choose Dropbox then follow the instructions. [More with screenshot is here](./docs/dropbox_review_material/README.md).
136 | - Password-based end-to-end encryption is also supported. But please be aware that **the vault name itself is not encrypted**.
137 |
138 | ### OneDrive for personal
139 |
140 | - **This plugin is NOT an official Microsoft / OneDrive product.** The plugin just uses Microsoft's [OneDrive's public API](https://docs.microsoft.com/en-us/onedrive/developer/rest-api).
141 | - This plugin only works for "OneDrive for personal", and not works for "OneDrive for Business" (yet). See [#11](https://github.com/fyears/remotely-save/issues/11) to further details.
142 | - After the authorization, the plugin can read your name and email, and read and write files in your OneDrive's `/Apps/remotely-sync` folder.
143 | - If you decide to authorize this plugin to connect to OneDrive, please go to plugin's settings, and choose OneDrive then follow the instructions.
144 | - Password-based end-to-end encryption is also supported. But please be aware that **the vault name itself is not encrypted**.
145 | - Syncing empty files is not supported (see [related issue](https://github.com/sboesen/remotely-sync/issues/67))
146 |
147 | ### webdav
148 |
149 | - About CORS:
150 | - If you are using Obsidian desktop >= 0.13.25 or iOS >= 1.1.1, you can skip this CORS part.
151 | - If you are using Obsidian desktop < 0.13.25 or iOS < 1.1.1 or any Android version:
152 | - The webdav server has to be enabled CORS for requests from `app://obsidian.md` and `capacitor://localhost` and `http://localhost`, **AND** all webdav HTTP methods, **AND** all webdav headers. These are required, because Obsidian mobile works like a browser and mobile plugins are limited by CORS policies unless under a upgraded Obsidian version.
153 | - Popular software NextCloud, OwnCloud, `rclone serve webdav` do **NOT** enable CORS by default. If you are using any of them, you should evaluate the risk, and find a way to enable CORS, before using this plugin, or use a upgraded Obsidian version.
154 | - **Unofficial** workaround: NextCloud users can **evaluate the risk by themselves**, and if decide to accept the risk, they can install [WebAppPassword](https://apps.nextcloud.com/apps/webapppassword) app, and add `app://obsidian.md`, `capacitor://localhost`, `http://localhost` to `Allowed origins`
155 | - **Unofficial** workaround: OwnCloud users can **evaluate the risk by themselves**, and if decide to accept the risk, they can download `.tar.gz` of `WebAppPassword` above and manually install and configure it on their instances.
156 | - The plugin is tested successfully under python package [`wsgidav` (version 4.0)](https://github.com/mar10/wsgidav). See [this issue](https://github.com/mar10/wsgidav/issues/239) for some details.
157 | - Your data would be synced to a `${vaultName}` sub folder on your webdav server.
158 | - Password-based end-to-end encryption is also supported. But please be aware that **the vault name itself is not encrypted**.
159 |
160 | ### Alibaba Cloud OSS and Minio
161 | - Use the S3 configuration
162 | - Enable "Disable S3 metadata sync" if you get 403 or 400 errors. This means not syncing modification time until it is [fixed](https://github.com/sboesen/remotely-sync/issues/70).
163 |
164 |
165 | ## Scheduled Auto Sync
166 |
167 | - You can configure auto syncing every N minutes in settings.
168 | - In auto sync mode, if any error occurs, the plugin would **fail silently**.
169 | - Auto sync only works when Obsidian is being opened. It's **technically impossible** to auto sync while Obsidian is in background, because the plugin just works in the browser environment provided by Obsidian.
170 |
171 | ## How To Deal With Hidden Files Or Folders
172 |
173 | **By default, all files or folder starting with `.` (dot) or `_` (underscore) are treated as hidden files, and would NOT be synced.** It's useful if you have some files just staying locally. But this strategy also means that themes / other plugins / settings of this plugin would neither be synced.
174 |
175 | You can change the settings to allow syncing `_` files or folders, as well as `.obsidian` special config folder (but not any other `.` files or folders).
176 |
177 | ## How To Debug
178 |
179 | See [here](./docs/how_to_debug/README.md) for more details.
180 |
181 | ## Troubleshooting
182 |
183 | ### password_not_matched
184 | If you get a `password_not_matched` error while syncing, try:
185 |
186 | 1. making a backup of your vault
187 | 2. removing the vault folder on your remote sync service
188 | 3. syncing again.
189 |
190 |
191 | ## Bonus: Import And Export Not-Oauth2 Plugin Settings By QR Code
192 |
193 | See [here](./docs/import_export_some_settings.md) for more details.
194 |
--------------------------------------------------------------------------------
/src/remoteForWebdav.ts:
--------------------------------------------------------------------------------
1 | import { Buffer } from "buffer";
2 | import { Vault, requestUrl } from "obsidian";
3 |
4 | import { Queue } from "@fyears/tsqueue";
5 | import chunk from "lodash/chunk";
6 | import flatten from "lodash/flatten";
7 | import { getReasonPhrase } from "http-status-codes";
8 | import { RemoteItem, VALID_REQURL, WebdavConfig } from "./baseTypes";
9 | import { decryptArrayBuffer, encryptArrayBuffer } from "./encrypt";
10 | import { bufferToArrayBuffer, getPathFolder, mkdirpInVault } from "./misc";
11 |
12 | import { log } from "./moreOnLog";
13 |
14 | import type {
15 | FileStat,
16 | WebDAVClient,
17 | RequestOptionsWithState,
18 | Response,
19 | ResponseDataDetailed,
20 | } from "webdav/web";
21 | import { getPatcher } from "webdav/web";
22 | if (VALID_REQURL) {
23 | getPatcher().patch(
24 | "request",
25 | async (
26 | options: RequestOptionsWithState
27 | ): Promise> => {
28 | const transformedHeaders = { ...options.headers };
29 | delete transformedHeaders["host"];
30 | delete transformedHeaders["Host"];
31 | delete transformedHeaders["content-length"];
32 | delete transformedHeaders["Content-Length"];
33 | const r = await requestUrl({
34 | url: options.url,
35 | method: options.method,
36 | body: options.data as string | ArrayBuffer,
37 | headers: transformedHeaders,
38 | });
39 |
40 | let r2: Response | ResponseDataDetailed = undefined;
41 | if (options.responseType === undefined) {
42 | r2 = {
43 | data: undefined,
44 | status: r.status,
45 | statusText: getReasonPhrase(r.status),
46 | headers: r.headers,
47 | };
48 | } else if (options.responseType === "json") {
49 | r2 = {
50 | data: r.json,
51 | status: r.status,
52 | statusText: getReasonPhrase(r.status),
53 | headers: r.headers,
54 | };
55 | } else if (options.responseType === "text") {
56 | r2 = {
57 | data: r.text,
58 | status: r.status,
59 | statusText: getReasonPhrase(r.status),
60 | headers: r.headers,
61 | };
62 | } else if (options.responseType === "arraybuffer") {
63 | r2 = {
64 | data: r.arrayBuffer,
65 | status: r.status,
66 | statusText: getReasonPhrase(r.status),
67 | headers: r.headers,
68 | };
69 | } else {
70 | throw Error(
71 | `do not know how to deal with responseType = ${options.responseType}`
72 | );
73 | }
74 | return r2;
75 | }
76 | );
77 | }
78 | import { AuthType, BufferLike, createClient } from "webdav/web";
79 | export type { WebDAVClient } from "webdav/web";
80 |
81 | export const DEFAULT_WEBDAV_CONFIG = {
82 | address: "",
83 | username: "",
84 | password: "",
85 | authType: "basic",
86 | manualRecursive: false,
87 | depth: "auto_unknown",
88 | remoteBaseDir: "",
89 | } as WebdavConfig;
90 |
91 | const getWebdavPath = (fileOrFolderPath: string, remoteBaseDir: string) => {
92 | let key = fileOrFolderPath;
93 | if (fileOrFolderPath === "/" || fileOrFolderPath === "") {
94 | // special
95 | key = `/${remoteBaseDir}/`;
96 | }
97 | if (!fileOrFolderPath.startsWith("/")) {
98 | key = `/${remoteBaseDir}/${fileOrFolderPath}`;
99 | }
100 | return key;
101 | };
102 |
103 | const getNormPath = (fileOrFolderPath: string, remoteBaseDir: string) => {
104 | if (
105 | !(
106 | fileOrFolderPath === `/${remoteBaseDir}` ||
107 | fileOrFolderPath.startsWith(`/${remoteBaseDir}/`)
108 | )
109 | ) {
110 | throw Error(
111 | `"${fileOrFolderPath}" doesn't starts with "/${remoteBaseDir}/"`
112 | );
113 | }
114 | // if (fileOrFolderPath.startsWith("/")) {
115 | // return fileOrFolderPath.slice(1);
116 | // }
117 | return fileOrFolderPath.slice(`/${remoteBaseDir}/`.length);
118 | };
119 |
120 | const fromWebdavItemToRemoteItem = (x: FileStat, remoteBaseDir: string) => {
121 | let key = getNormPath(x.filename, remoteBaseDir);
122 | if (x.type === "directory" && !key.endsWith("/")) {
123 | key = `${key}/`;
124 | }
125 | return {
126 | key: key,
127 | lastModified: Date.parse(x.lastmod).valueOf(),
128 | size: x.size,
129 | remoteType: "webdav",
130 | etag: x.etag || undefined,
131 | } as RemoteItem;
132 | };
133 |
134 | export class WrappedWebdavClient {
135 | webdavConfig: WebdavConfig;
136 | remoteBaseDir: string;
137 | client: WebDAVClient;
138 | vaultFolderExists: boolean;
139 | saveUpdatedConfigFunc: () => Promise;
140 | constructor(
141 | webdavConfig: WebdavConfig,
142 | remoteBaseDir: string,
143 | saveUpdatedConfigFunc: () => Promise
144 | ) {
145 | this.webdavConfig = webdavConfig;
146 | this.remoteBaseDir = remoteBaseDir;
147 | this.vaultFolderExists = false;
148 | this.saveUpdatedConfigFunc = saveUpdatedConfigFunc;
149 | }
150 |
151 | init = async () => {
152 | // init client if not inited
153 | const headers = {
154 | "Cache-Control": "no-cache",
155 | };
156 | if (this.client === undefined) {
157 | if (
158 | this.webdavConfig.username !== "" &&
159 | this.webdavConfig.password !== ""
160 | ) {
161 | this.client = createClient(this.webdavConfig.address, {
162 | username: this.webdavConfig.username,
163 | password: this.webdavConfig.password,
164 | headers: headers,
165 | authType:
166 | this.webdavConfig.authType === "digest"
167 | ? AuthType.Digest
168 | : AuthType.Password,
169 | });
170 | } else {
171 | this.client = createClient(this.webdavConfig.address, {
172 | headers: headers,
173 | });
174 | }
175 | }
176 |
177 | // check vault folder
178 | if (this.vaultFolderExists) {
179 | // pass
180 | } else {
181 | const res = await this.client.exists(`/${this.remoteBaseDir}/`);
182 | if (res) {
183 | this.vaultFolderExists = true;
184 | } else {
185 | await this.client.createDirectory(`/${this.remoteBaseDir}/`);
186 | this.vaultFolderExists = true;
187 | }
188 | }
189 |
190 | // adjust depth parameter
191 | if (this.webdavConfig.depth === "auto_unknown") {
192 | let testPassed = false;
193 | try {
194 | const res = await this.client.customRequest(`/${this.remoteBaseDir}/`, {
195 | method: "PROPFIND",
196 | headers: {
197 | Depth: "infinity",
198 | },
199 | responseType: "text",
200 | });
201 | if (res.status === 403) {
202 | throw Error("not support Infinity, get 403");
203 | } else {
204 | testPassed = true;
205 | this.webdavConfig.depth = "auto_infinity";
206 | this.webdavConfig.manualRecursive = false;
207 | }
208 | } catch (error) {
209 | testPassed = false;
210 | }
211 | if (!testPassed) {
212 | try {
213 | const res = await this.client.customRequest(
214 | `/${this.remoteBaseDir}/`,
215 | {
216 | method: "PROPFIND",
217 | headers: {
218 | Depth: "1",
219 | },
220 | responseType: "text",
221 | }
222 | );
223 | testPassed = true;
224 | this.webdavConfig.depth = "auto_1";
225 | this.webdavConfig.manualRecursive = true;
226 | } catch (error) {
227 | testPassed = false;
228 | }
229 | }
230 | if (testPassed) {
231 | // the depth option has been changed
232 | // save the setting
233 | if (this.saveUpdatedConfigFunc !== undefined) {
234 | await this.saveUpdatedConfigFunc();
235 | }
236 | }
237 | }
238 | };
239 | }
240 |
241 | export const getWebdavClient = (
242 | webdavConfig: WebdavConfig,
243 | remoteBaseDir: string,
244 | saveUpdatedConfigFunc: () => Promise
245 | ) => {
246 | return new WrappedWebdavClient(
247 | webdavConfig,
248 | remoteBaseDir,
249 | saveUpdatedConfigFunc
250 | );
251 | };
252 |
253 | export const getRemoteMeta = async (
254 | client: WrappedWebdavClient,
255 | fileOrFolderPath: string
256 | ) => {
257 | await client.init();
258 | const remotePath = getWebdavPath(fileOrFolderPath, client.remoteBaseDir);
259 | const res = (await client.client.stat(remotePath, {
260 | details: false,
261 | })) as FileStat;
262 | return fromWebdavItemToRemoteItem(res, client.remoteBaseDir);
263 | };
264 |
265 | export const uploadToRemote = async (
266 | client: WrappedWebdavClient,
267 | fileOrFolderPath: string,
268 | vault: Vault,
269 | isRecursively: boolean = false,
270 | password: string = "",
271 | remoteEncryptedKey: string = "",
272 | uploadRaw: boolean = false,
273 | rawContent: string | ArrayBuffer = ""
274 | ) => {
275 | await client.init();
276 | let uploadFile = fileOrFolderPath;
277 | if (password !== "") {
278 | uploadFile = remoteEncryptedKey;
279 | }
280 | uploadFile = getWebdavPath(uploadFile, client.remoteBaseDir);
281 |
282 | const isFolder = fileOrFolderPath.endsWith("/");
283 |
284 | if (isFolder && isRecursively) {
285 | throw Error("upload function doesn't implement recursive function yet!");
286 | } else if (isFolder && !isRecursively) {
287 | if (uploadRaw) {
288 | throw Error(`you specify uploadRaw, but you also provide a folder key!`);
289 | }
290 | // folder
291 | if (password === "") {
292 | // if not encrypted, mkdir a remote folder
293 | await client.client.createDirectory(uploadFile, {
294 | recursive: false, // the sync algo should guarantee no need to recursive
295 | });
296 | const res = await getRemoteMeta(client, uploadFile);
297 | return res;
298 | } else {
299 | // if encrypted, upload a fake file with the encrypted file name
300 | await client.client.putFileContents(uploadFile, "", {
301 | overwrite: true,
302 | });
303 |
304 | return await getRemoteMeta(client, uploadFile);
305 | }
306 | } else {
307 | // file
308 | // we ignore isRecursively parameter here
309 | let localContent = undefined;
310 | if (uploadRaw) {
311 | if (typeof rawContent === "string") {
312 | localContent = new TextEncoder().encode(rawContent).buffer;
313 | } else {
314 | localContent = rawContent;
315 | }
316 | } else {
317 | localContent = await vault.adapter.readBinary(fileOrFolderPath);
318 | }
319 | let remoteContent = localContent;
320 | if (password !== "") {
321 | remoteContent = await encryptArrayBuffer(localContent, password);
322 | }
323 | // updated 20220326: the algorithm guarantee this
324 | // // we need to create folders before uploading
325 | // const dir = getPathFolder(uploadFile);
326 | // if (dir !== "/" && dir !== "") {
327 | // await client.client.createDirectory(dir, { recursive: false });
328 | // }
329 | await client.client.putFileContents(uploadFile, remoteContent, {
330 | overwrite: true,
331 | });
332 |
333 | return await getRemoteMeta(client, uploadFile);
334 | }
335 | };
336 |
337 | export const listFromRemote = async (
338 | client: WrappedWebdavClient,
339 | prefix?: string
340 | ) => {
341 | if (prefix !== undefined) {
342 | throw Error("prefix not supported");
343 | }
344 | await client.init();
345 |
346 | let contents = [] as FileStat[];
347 | if (
348 | client.webdavConfig.depth === "auto_1" ||
349 | client.webdavConfig.depth === "manual_1"
350 | ) {
351 | // the remote doesn't support infinity propfind,
352 | // we need to do a bfs here
353 | const q = new Queue([`/${client.remoteBaseDir}`]);
354 | const CHUNK_SIZE = 10;
355 | while (q.length > 0) {
356 | const itemsToFetch = [];
357 | while (q.length > 0) {
358 | itemsToFetch.push(q.pop());
359 | }
360 | const itemsToFetchChunks = chunk(itemsToFetch, CHUNK_SIZE);
361 | // log.debug(itemsToFetchChunks);
362 | const subContents = [] as FileStat[];
363 | for (const singleChunk of itemsToFetchChunks) {
364 | const r = singleChunk.map((x) => {
365 | return client.client.getDirectoryContents(x, {
366 | deep: false,
367 | details: false /* no need for verbose details here */,
368 | // TODO: to support .obsidian,
369 | // we need to load all files including dot,
370 | // anyway to reduce the resources?
371 | // glob: "/**" /* avoid dot files by using glob */,
372 | }) as Promise;
373 | });
374 | const r2 = flatten(await Promise.all(r));
375 | subContents.push(...r2);
376 | }
377 | for (let i = 0; i < subContents.length; ++i) {
378 | const f = subContents[i];
379 | contents.push(f);
380 | if (f.type === "directory") {
381 | q.push(f.filename);
382 | }
383 | }
384 | }
385 | } else {
386 | // the remote supports infinity propfind
387 | contents = (await client.client.getDirectoryContents(
388 | `/${client.remoteBaseDir}`,
389 | {
390 | deep: true,
391 | details: false /* no need for verbose details here */,
392 | // TODO: to support .obsidian,
393 | // we need to load all files including dot,
394 | // anyway to reduce the resources?
395 | // glob: "/**" /* avoid dot files by using glob */,
396 | }
397 | )) as FileStat[];
398 | }
399 | return {
400 | Contents: contents.map((x) =>
401 | fromWebdavItemToRemoteItem(x, client.remoteBaseDir)
402 | ),
403 | };
404 | };
405 |
406 | const downloadFromRemoteRaw = async (
407 | client: WrappedWebdavClient,
408 | fileOrFolderPath: string
409 | ) => {
410 | await client.init();
411 | const buff = (await client.client.getFileContents(
412 | getWebdavPath(fileOrFolderPath, client.remoteBaseDir)
413 | )) as BufferLike;
414 | if (buff instanceof ArrayBuffer) {
415 | return buff;
416 | } else if (buff instanceof Buffer) {
417 | return bufferToArrayBuffer(buff);
418 | }
419 | throw Error(`unexpected file content result with type ${typeof buff}`);
420 | };
421 |
422 | export const downloadFromRemote = async (
423 | client: WrappedWebdavClient,
424 | fileOrFolderPath: string,
425 | vault: Vault,
426 | mtime: number,
427 | password: string = "",
428 | remoteEncryptedKey: string = "",
429 | skipSaving: boolean = false
430 | ) => {
431 | await client.init();
432 |
433 | const isFolder = fileOrFolderPath.endsWith("/");
434 |
435 | if (!skipSaving) {
436 | await mkdirpInVault(fileOrFolderPath, vault);
437 | }
438 |
439 | // the file is always local file
440 | // we need to encrypt it
441 |
442 | if (isFolder) {
443 | // mkdirp locally is enough
444 | // do nothing here
445 | return new ArrayBuffer(0);
446 | } else {
447 | let downloadFile = fileOrFolderPath;
448 | if (password !== "") {
449 | downloadFile = remoteEncryptedKey;
450 | }
451 | downloadFile = getWebdavPath(downloadFile, client.remoteBaseDir);
452 | const remoteContent = await downloadFromRemoteRaw(client, downloadFile);
453 | let localContent = remoteContent;
454 | if (password !== "") {
455 | localContent = await decryptArrayBuffer(remoteContent, password);
456 | }
457 | if (!skipSaving) {
458 | await vault.adapter.writeBinary(fileOrFolderPath, localContent, {
459 | mtime: mtime,
460 | });
461 | }
462 | return localContent;
463 | }
464 | };
465 |
466 | export const deleteFromRemote = async (
467 | client: WrappedWebdavClient,
468 | fileOrFolderPath: string,
469 | password: string = "",
470 | remoteEncryptedKey: string = ""
471 | ) => {
472 | if (fileOrFolderPath === "/") {
473 | return;
474 | }
475 | let remoteFileName = fileOrFolderPath;
476 | if (password !== "") {
477 | remoteFileName = remoteEncryptedKey;
478 | }
479 | remoteFileName = getWebdavPath(remoteFileName, client.remoteBaseDir);
480 |
481 | await client.init();
482 | try {
483 | await client.client.deleteFile(remoteFileName);
484 | } catch (err) {
485 | console.error("some error while deleting");
486 | }
487 | };
488 |
489 | export const checkConnectivity = async (
490 | client: WrappedWebdavClient,
491 | callbackFunc?: any
492 | ) => {
493 | if (
494 | !(
495 | client.webdavConfig.address.startsWith("http://") ||
496 | client.webdavConfig.address.startsWith("https://")
497 | )
498 | ) {
499 | const err = "Error: the url should start with http(s):// but it does not!";
500 | log.debug(err);
501 | if (callbackFunc !== undefined) {
502 | callbackFunc(err);
503 | }
504 | return false;
505 | }
506 | try {
507 | await client.init();
508 | const results = await getRemoteMeta(client, "/");
509 | if (results === undefined) {
510 | const err = "results is undefined";
511 | log.debug(err);
512 | if (callbackFunc !== undefined) {
513 | callbackFunc(err);
514 | }
515 | return false;
516 | }
517 | return true;
518 | } catch (err) {
519 | log.debug(err);
520 | if (callbackFunc !== undefined) {
521 | callbackFunc(err);
522 | }
523 | return false;
524 | }
525 | };
526 |
--------------------------------------------------------------------------------
/src/remoteForS3.ts:
--------------------------------------------------------------------------------
1 | import type { _Object } from "@aws-sdk/client-s3";
2 | import {
3 | DeleteObjectCommand,
4 | GetObjectCommand,
5 | HeadBucketCommand,
6 | HeadObjectCommand,
7 | HeadObjectCommandOutput,
8 | ListObjectsV2Command,
9 | ListObjectsV2CommandInput,
10 | PutObjectCommand,
11 | S3Client,
12 | S3ClientConfig,
13 | } from "@aws-sdk/client-s3";
14 | import { Upload } from "@aws-sdk/lib-storage";
15 | import { HttpHandler, HttpRequest, HttpResponse } from "@aws-sdk/protocol-http";
16 | import {
17 | FetchHttpHandler,
18 | FetchHttpHandlerOptions,
19 | } from "@aws-sdk/fetch-http-handler";
20 | // @ts-ignore
21 | import { requestTimeout } from "@aws-sdk/fetch-http-handler/dist-es/request-timeout";
22 | import { buildQueryString } from "@aws-sdk/querystring-builder";
23 | import { HttpHandlerOptions } from "@aws-sdk/types";
24 | import { Buffer } from "buffer";
25 | import * as mime from "mime-types";
26 | import { Vault, requestUrl, RequestUrlParam } from "obsidian";
27 | import { Readable } from "stream";
28 | import AggregateError from "aggregate-error";
29 | import {
30 | DEFAULT_CONTENT_TYPE,
31 | RemoteItem,
32 | S3Config,
33 | VALID_REQURL,
34 | } from "./baseTypes";
35 | import { decryptArrayBuffer, encryptArrayBuffer } from "./encrypt";
36 | import {
37 | bufferToArrayBuffer,
38 | mkdirpInVault, statFix,
39 | } from "./misc";
40 |
41 | export { S3Client } from "@aws-sdk/client-s3";
42 |
43 | import { log } from "./moreOnLog";
44 |
45 | ////////////////////////////////////////////////////////////////////////////////
46 | // special handler using Obsidian requestUrl
47 | ////////////////////////////////////////////////////////////////////////////////
48 |
49 | /**
50 | * This is close to origin implementation of FetchHttpHandler
51 | * https://github.com/aws/aws-sdk-js-v3/blob/main/packages/fetch-http-handler/src/fetch-http-handler.ts
52 | * that is released under Apache 2 License.
53 | * But this uses Obsidian requestUrl instead.
54 | */
55 | class ObsHttpHandler extends FetchHttpHandler {
56 | requestTimeoutInMs: number;
57 | constructor(options?: FetchHttpHandlerOptions) {
58 | super(options);
59 | this.requestTimeoutInMs =
60 | options === undefined ? undefined : options.requestTimeout;
61 | }
62 | async handle(
63 | request: HttpRequest,
64 | { abortSignal }: HttpHandlerOptions = {}
65 | ): Promise<{ response: HttpResponse }> {
66 | if (abortSignal?.aborted) {
67 | const abortError = new Error("Request aborted");
68 | abortError.name = "AbortError";
69 | return Promise.reject(abortError);
70 | }
71 |
72 | let path = request.path;
73 | if (request.query) {
74 | const queryString = buildQueryString(request.query);
75 | if (queryString) {
76 | path += `?${queryString}`;
77 | }
78 | }
79 |
80 | const { port, method } = request;
81 | const url = `${request.protocol}//${request.hostname}${
82 | port ? `:${port}` : ""
83 | }${path}`;
84 | const body =
85 | method === "GET" || method === "HEAD" ? undefined : request.body;
86 |
87 | const transformedHeaders: Record = {};
88 | for (const key of Object.keys(request.headers)) {
89 | const keyLower = key.toLowerCase();
90 | if (keyLower === "host" || keyLower === "content-length") {
91 | continue;
92 | }
93 | transformedHeaders[keyLower] = request.headers[key];
94 | }
95 |
96 | let contentType: string = undefined;
97 | if (transformedHeaders["content-type"] !== undefined) {
98 | contentType = transformedHeaders["content-type"];
99 | }
100 |
101 | let transformedBody: any = body;
102 | if (ArrayBuffer.isView(body)) {
103 | transformedBody = bufferToArrayBuffer(body);
104 | }
105 |
106 | const param: RequestUrlParam = {
107 | body: transformedBody,
108 | headers: transformedHeaders,
109 | method: method,
110 | url: url,
111 | contentType: contentType,
112 | };
113 |
114 | const raceOfPromises = [
115 | requestUrl(param).then((rsp) => {
116 | const headers = rsp.headers;
117 | const headersLower: Record = {};
118 | for (const key of Object.keys(headers)) {
119 | headersLower[key.toLowerCase()] = headers[key];
120 | }
121 | const stream = new ReadableStream({
122 | start(controller) {
123 | controller.enqueue(new Uint8Array(rsp.arrayBuffer));
124 | controller.close();
125 | },
126 | });
127 | return {
128 | response: new HttpResponse({
129 | headers: headersLower,
130 | statusCode: rsp.status,
131 | body: stream,
132 | }),
133 | };
134 | }),
135 | requestTimeout(this.requestTimeoutInMs),
136 | ];
137 |
138 | if (abortSignal) {
139 | raceOfPromises.push(
140 | new Promise((resolve, reject) => {
141 | abortSignal.onabort = () => {
142 | const abortError = new Error("Request aborted");
143 | abortError.name = "AbortError";
144 | reject(abortError);
145 | };
146 | })
147 | );
148 | }
149 | return Promise.race(raceOfPromises);
150 | }
151 | updateHttpClientConfig(key: never, value: never): void {
152 | // Implement this method if necessary
153 | }
154 |
155 | httpHandlerConfigs(): {} {
156 | // Implement this method if necessary
157 | return {};
158 | }
159 | }
160 |
161 | ////////////////////////////////////////////////////////////////////////////////
162 | // other stuffs
163 | ////////////////////////////////////////////////////////////////////////////////
164 |
165 | export const DEFAULT_S3_CONFIG = {
166 | s3Endpoint: "",
167 | s3Region: "",
168 | s3AccessKeyID: "",
169 | s3SecretAccessKey: "",
170 | s3BucketName: "",
171 | bypassCorsLocally: true,
172 | partsConcurrency: 20,
173 | forcePathStyle: false,
174 | disableS3MetadataSync: false
175 | };
176 |
177 | export type S3ObjectType = _Object;
178 |
179 | const fromS3ObjectToRemoteItem = (x: S3ObjectType) => {
180 | return {
181 | key: x.Key,
182 | lastModified: x.LastModified.valueOf(),
183 | size: x.Size,
184 | remoteType: "s3",
185 | etag: x.ETag,
186 | } as RemoteItem;
187 | };
188 |
189 | const fromS3HeadObjectToRemoteItem = (
190 | key: string,
191 | x: HeadObjectCommandOutput
192 | ) => {
193 | let lastModified = x.LastModified.valueOf();
194 | if (x.Metadata['modification_time'] != null) {
195 | lastModified = parseInt(x.Metadata['modification_time']);
196 | }
197 | return {
198 | key: key,
199 | lastModified: lastModified,
200 | size: x.ContentLength,
201 | remoteType: "s3",
202 | etag: x.ETag,
203 | } as RemoteItem;
204 | };
205 |
206 | export const getS3Client = (s3Config: S3Config) => {
207 | let endpoint = s3Config.s3Endpoint;
208 | if (!(endpoint.startsWith("http://") || endpoint.startsWith("https://"))) {
209 | endpoint = `https://${endpoint}`;
210 | }
211 |
212 | let s3Client: S3Client;
213 |
214 |
215 | const s3ClientConfig: S3ClientConfig = {
216 | region: s3Config.s3Region,
217 | endpoint: endpoint,
218 | forcePathStyle: s3Config.forcePathStyle,
219 | credentials: {
220 | accessKeyId: s3Config.s3AccessKeyID,
221 | secretAccessKey: s3Config.s3SecretAccessKey,
222 | }
223 | };
224 |
225 | if (VALID_REQURL && s3Config.bypassCorsLocally) {
226 | s3ClientConfig.requestHandler = new ObsHttpHandler();
227 | }
228 | s3Client = new S3Client(s3ClientConfig);
229 |
230 | s3Client.middlewareStack.add(
231 | (next, context) => (args) => {
232 | (args.request as any).headers["cache-control"] = "no-cache";
233 | return next(args);
234 | },
235 | {
236 | step: "build",
237 | }
238 | );
239 |
240 | return s3Client;
241 | };
242 |
243 | export const getRemoteMeta = async (
244 | s3Client: S3Client,
245 | s3Config: S3Config,
246 | fileOrFolderPath: string
247 | ) => {
248 | const res = await s3Client.send(
249 | new HeadObjectCommand({
250 | Bucket: s3Config.s3BucketName,
251 | Key: fileOrFolderPath,
252 | })
253 | );
254 |
255 | return fromS3HeadObjectToRemoteItem(fileOrFolderPath, res);
256 | };
257 |
258 | export const uploadToRemote = async (
259 | s3Client: S3Client,
260 | s3Config: S3Config,
261 | fileOrFolderPath: string,
262 | vault: Vault,
263 | isRecursively: boolean = false,
264 | password: string = "",
265 | remoteEncryptedKey: string = "",
266 | uploadRaw: boolean = false,
267 | rawContent: string | ArrayBuffer = ""
268 | ) => {
269 | let uploadFile = fileOrFolderPath;
270 | if (password !== "") {
271 | uploadFile = remoteEncryptedKey;
272 | }
273 | const isFolder = fileOrFolderPath.endsWith("/");
274 |
275 | if (isFolder && isRecursively) {
276 | throw Error("upload function doesn't implement recursive function yet!");
277 | } else if (isFolder && !isRecursively) {
278 | if (uploadRaw) {
279 | throw Error(`you specify uploadRaw, but you also provide a folder key!`);
280 | }
281 | // folder
282 | const contentType = DEFAULT_CONTENT_TYPE;
283 | await s3Client.send(
284 | new PutObjectCommand({
285 | Bucket: s3Config.s3BucketName,
286 | Key: uploadFile,
287 | Body: "",
288 | ContentType: contentType,
289 | })
290 | );
291 | return await getRemoteMeta(s3Client, s3Config, uploadFile);
292 | } else {
293 | // file
294 | // we ignore isRecursively parameter here
295 | let contentType = DEFAULT_CONTENT_TYPE;
296 | if (password === "") {
297 | contentType =
298 | mime.contentType(
299 | mime.lookup(fileOrFolderPath) || DEFAULT_CONTENT_TYPE
300 | ) || DEFAULT_CONTENT_TYPE;
301 | }
302 | let localContent = undefined;
303 | if (uploadRaw) {
304 | if (typeof rawContent === "string") {
305 | localContent = new TextEncoder().encode(rawContent).buffer;
306 | } else {
307 | localContent = rawContent;
308 | }
309 | } else {
310 | localContent = await vault.adapter.readBinary(fileOrFolderPath);
311 | }
312 | let remoteContent = localContent;
313 | if (password !== "") {
314 | remoteContent = await encryptArrayBuffer(localContent, password);
315 | }
316 |
317 | const bytesIn5MB = 5242880;
318 | const body = new Uint8Array(remoteContent);
319 | const fileStat = await statFix(vault, fileOrFolderPath);
320 |
321 | let mtime = fileStat == undefined ? undefined : fileStat.mtime.toString();
322 |
323 | let uploadParams : any = {
324 | Bucket: s3Config.s3BucketName,
325 | Key: uploadFile,
326 | Body: body,
327 | ContentType: contentType
328 | };
329 |
330 | if (!s3Config.disableS3MetadataSync) {
331 | uploadParams["Metadata"] = {modification_time: mtime};
332 | }
333 |
334 | const upload = new Upload({
335 | client: s3Client,
336 | queueSize: s3Config.partsConcurrency, // concurrency
337 | partSize: bytesIn5MB, // minimal 5MB by default
338 | leavePartsOnError: false,
339 | params: uploadParams
340 | });
341 | await upload.done();
342 |
343 | return await getRemoteMeta(s3Client, s3Config, uploadFile);
344 | }
345 | };
346 |
347 | export const listFromRemote = async (
348 | s3Client: S3Client,
349 | s3Config: S3Config,
350 | prefix?: string
351 | ) => {
352 | const confCmd = {
353 | Bucket: s3Config.s3BucketName,
354 | } as ListObjectsV2CommandInput;
355 | if (prefix !== undefined) {
356 | confCmd.Prefix = prefix;
357 | }
358 |
359 | const contents = [] as _Object[];
360 |
361 | let isTruncated = true;
362 | do {
363 | const rsp = await s3Client.send(new ListObjectsV2Command(confCmd));
364 |
365 | if (rsp.$metadata.httpStatusCode !== 200) {
366 | throw Error("some thing bad while listing remote!");
367 | }
368 | if (rsp.Contents === undefined) {
369 | break;
370 | }
371 | contents.push(...rsp.Contents);
372 |
373 | isTruncated = rsp.IsTruncated;
374 | confCmd.ContinuationToken = rsp.NextContinuationToken;
375 | if (
376 | isTruncated &&
377 | (confCmd.ContinuationToken === undefined ||
378 | confCmd.ContinuationToken === "")
379 | ) {
380 | throw Error("isTruncated is true but no continuationToken provided");
381 | }
382 | } while (isTruncated);
383 |
384 | // ensemble fake rsp
385 | return {
386 | Contents: contents.map((x) => fromS3ObjectToRemoteItem(x)),
387 | };
388 | };
389 |
390 | /**
391 | * The Body of resp of aws GetObject has mix types
392 | * and we want to get ArrayBuffer here.
393 | * See https://github.com/aws/aws-sdk-js-v3/issues/1877
394 | * @param b The Body of GetObject
395 | * @returns Promise
396 | */
397 | const getObjectBodyToArrayBuffer = async (
398 | b: Readable | ReadableStream | Blob
399 | ) => {
400 | if (b instanceof Readable) {
401 | return (await new Promise((resolve, reject) => {
402 | const chunks: Uint8Array[] = [];
403 | b.on("data", (chunk) => chunks.push(chunk));
404 | b.on("error", reject);
405 | b.on("end", () => resolve(bufferToArrayBuffer(Buffer.concat(chunks))));
406 | })) as ArrayBuffer;
407 | } else if (b instanceof ReadableStream) {
408 | return await new Response(b, {}).arrayBuffer();
409 | } else if (b instanceof Blob) {
410 | return await b.arrayBuffer();
411 | } else {
412 | throw TypeError(`The type of ${b} is not one of the supported types`);
413 | }
414 | };
415 |
416 | const downloadFromRemoteRaw = async (
417 | s3Client: S3Client,
418 | s3Config: S3Config,
419 | fileOrFolderPath: string
420 | ) => {
421 | const data = await s3Client.send(
422 | new GetObjectCommand({
423 | Bucket: s3Config.s3BucketName,
424 | Key: fileOrFolderPath,
425 | })
426 | );
427 | const bodyContents = await getObjectBodyToArrayBuffer(data.Body);
428 | return bodyContents;
429 | };
430 |
431 | export const downloadFromRemote = async (
432 | s3Client: S3Client,
433 | s3Config: S3Config,
434 | fileOrFolderPath: string,
435 | vault: Vault,
436 | mtime: number,
437 | password: string = "",
438 | remoteEncryptedKey: string = "",
439 | skipSaving: boolean = false
440 | ) => {
441 | const isFolder = fileOrFolderPath.endsWith("/");
442 |
443 | if (!skipSaving) {
444 | await mkdirpInVault(fileOrFolderPath, vault);
445 | }
446 |
447 | // the file is always local file
448 | // we need to encrypt it
449 |
450 | if (isFolder) {
451 | // mkdirp locally is enough
452 | // do nothing here
453 | return new ArrayBuffer(0);
454 | } else {
455 | let downloadFile = fileOrFolderPath;
456 | if (password !== "") {
457 | downloadFile = remoteEncryptedKey;
458 | }
459 | const remoteContent = await downloadFromRemoteRaw(
460 | s3Client,
461 | s3Config,
462 | downloadFile
463 | );
464 | let localContent = remoteContent;
465 | if (password !== "") {
466 | localContent = await decryptArrayBuffer(remoteContent, password);
467 | }
468 | if (!skipSaving) {
469 | await vault.adapter.writeBinary(fileOrFolderPath, localContent, {
470 | mtime: mtime,
471 | });
472 | }
473 | return localContent;
474 | }
475 | };
476 |
477 | /**
478 | * This function deals with file normally and "folder" recursively.
479 | * @param s3Client
480 | * @param s3Config
481 | * @param fileOrFolderPath
482 | * @returns
483 | */
484 | export const deleteFromRemote = async (
485 | s3Client: S3Client,
486 | s3Config: S3Config,
487 | fileOrFolderPath: string,
488 | password: string = "",
489 | remoteEncryptedKey: string = ""
490 | ) => {
491 | if (fileOrFolderPath === "/") {
492 | return;
493 | }
494 | let remoteFileName = fileOrFolderPath;
495 | if (password !== "") {
496 | remoteFileName = remoteEncryptedKey;
497 | }
498 | await s3Client.send(
499 | new DeleteObjectCommand({
500 | Bucket: s3Config.s3BucketName,
501 | Key: remoteFileName,
502 | })
503 | );
504 |
505 | if (fileOrFolderPath.endsWith("/") && password === "") {
506 | const x = await listFromRemote(s3Client, s3Config, fileOrFolderPath);
507 | x.Contents.forEach(async (element) => {
508 | await s3Client.send(
509 | new DeleteObjectCommand({
510 | Bucket: s3Config.s3BucketName,
511 | Key: element.key,
512 | })
513 | );
514 | });
515 | } else if (fileOrFolderPath.endsWith("/") && password !== "") {
516 | // TODO
517 | } else {
518 | // pass
519 | }
520 | };
521 |
522 | /**
523 | * Check the config of S3 by heading bucket
524 | * https://stackoverflow.com/questions/50842835
525 | * @param s3Client
526 | * @param s3Config
527 | * @returns
528 | */
529 | export const checkConnectivity = async (
530 | s3Client: S3Client,
531 | s3Config: S3Config,
532 | callbackFunc?: any
533 | ) => {
534 | try {
535 | const results = await s3Client.send(
536 | new HeadBucketCommand({ Bucket: s3Config.s3BucketName })
537 | );
538 | if (
539 | results === undefined ||
540 | results.$metadata === undefined ||
541 | results.$metadata.httpStatusCode === undefined
542 | ) {
543 | const err = "results or $metadata or httStatusCode is undefined";
544 | log.debug(err);
545 | if (callbackFunc !== undefined) {
546 | callbackFunc(err);
547 | }
548 | return false;
549 | }
550 | return results.$metadata.httpStatusCode === 200;
551 | } catch (err) {
552 | log.debug(err);
553 | if (callbackFunc !== undefined) {
554 | if (s3Config.s3Endpoint.contains(s3Config.s3BucketName)) {
555 | const err2 = new AggregateError([
556 | err,
557 | new Error(
558 | "Maybe you've included the bucket name inside the endpoint setting. Please remove the bucket name and try again."
559 | ),
560 | ]);
561 | callbackFunc(err2);
562 | } else {
563 | callbackFunc(err);
564 | }
565 | }
566 |
567 | return false;
568 | }
569 | };
570 |
--------------------------------------------------------------------------------