├── .npmrc ├── .mise.toml ├── hooks ├── pre-push └── commit-msg ├── .prettierrc.json ├── .vscode └── settings.json ├── jest.config.js ├── .gitignore ├── src ├── errors.ts ├── types.ts ├── util │ ├── glob.ts │ ├── path.ts │ ├── collection-helper.ts │ ├── path.test.ts │ ├── glob.test.ts │ ├── collection-helper.test.ts │ ├── strings.ts │ ├── diacritics-map.ts │ └── strings.test.ts ├── external │ └── chinese-tokenizer.d.ts ├── ui │ ├── component │ │ ├── ObsidianButton.svelte │ │ ├── ObsidianIconButton.svelte │ │ ├── InputDialog.ts │ │ └── CustomDictionaryWordAdd.svelte │ ├── CustomDictionaryWordAddModal.ts │ ├── ProviderStatusBar.ts │ └── popup-commands.ts ├── tokenizer │ ├── tokenizers │ │ ├── ArabicTokenizer.ts │ │ ├── ChineseTokenizer.ts │ │ ├── AbstractTokenizer.ts │ │ ├── JapaneseTokenizer.ts │ │ ├── DefaultTokenizer.ts │ │ ├── EnglishOnlyTokenizer.ts │ │ ├── EnglishOnlyTokenizer.test.ts │ │ ├── JapaneseTokenizer.test.ts │ │ ├── KoreanTokenizer.ts │ │ ├── DefaultTokenizer.test.ts │ │ └── KoreanTokenizer.test.ts │ ├── TokenizeStrategy.ts │ └── tokenizer.ts ├── setting │ └── settings-helper.ts ├── option │ ├── ColumnDelimiter.ts │ └── DescriptionOnSuggestion.ts ├── provider │ ├── MatchStrategy.ts │ ├── SpecificMatchStrategy.ts │ ├── CurrentFileWordProvider.ts │ ├── FrontMatterWordProvider.ts │ ├── CurrentVaultWordProvider.ts │ ├── InternalLinkWordProvider.ts │ ├── CustomDictionaryWordProvider.ts │ └── suggester.ts ├── keys.ts ├── model │ └── Word.ts ├── storage │ └── SelectionHistoryStorage.ts ├── main.ts └── app-helper.ts ├── pull_request_template.md ├── .github ├── copilot-instructions.md ├── workflows │ ├── tests.yaml │ └── release.yaml └── ISSUE_TEMPLATE │ └── bug-report.yaml ├── manifest.json ├── manifest-beta.json ├── tsconfig.json ├── versions.json ├── LICENSE ├── docs └── release.md ├── version-bump.mjs ├── package.json ├── README.md ├── .releaserc.mjs ├── esbuild.config.mjs ├── AGENTS.md └── CLAUDE.md /.npmrc: -------------------------------------------------------------------------------- 1 | tag-version-prefix="" 2 | -------------------------------------------------------------------------------- /.mise.toml: -------------------------------------------------------------------------------- 1 | [tools] 2 | node = "22" 3 | -------------------------------------------------------------------------------- /hooks/pre-push: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | pnpm pre:push 3 | -------------------------------------------------------------------------------- /.prettierrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "plugins": ["prettier-plugin-organize-imports"] 3 | } 4 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "typescript.experimental.useTsgo": true 3 | } 4 | -------------------------------------------------------------------------------- /jest.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | "transform": { 3 | "^.+\\.tsx?$": "esbuild-jest" 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Intellij 2 | *.iml 3 | .idea 4 | 5 | # npm 6 | node_modules 7 | 8 | # build 9 | main.js 10 | *.js.map 11 | 12 | # test 13 | /coverage 14 | -------------------------------------------------------------------------------- /src/errors.ts: -------------------------------------------------------------------------------- 1 | export class ExhaustiveError extends Error { 2 | constructor(value: never, message = `Unsupported type: ${value}`) { 3 | super(message); 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /src/types.ts: -------------------------------------------------------------------------------- 1 | export type PartialRequired = T & { 2 | [P in K]-?: T[P]; 3 | }; 4 | 5 | export function isPresent(arg: T | null | undefined): arg is T { 6 | return arg != null; 7 | } 8 | -------------------------------------------------------------------------------- /pull_request_template.md: -------------------------------------------------------------------------------- 1 | Sorry, I would not accept the pull requests except for the following cases. 2 | 3 | Fix obvious bugs 4 | Fix typo or wrong documentation 5 | If I ask for it in the GitHub issues or the discussions 6 | -------------------------------------------------------------------------------- /.github/copilot-instructions.md: -------------------------------------------------------------------------------- 1 | ## 使用言語について 2 | 3 | - 返答は日本語で行ってください 4 | - 途中の推論過程は英語で問題ありません 5 | 6 | ## 開発プロジェクトについて 7 | 8 | - Obsidianプラグインの開発です 9 | - Windows/macOS/Linuxすべてで動かす必要があります 10 | - 特にmacOSやSafariで動作しないトラブルが多いです 11 | 12 | ### 利用技術 13 | 14 | - Node.js v22 15 | - pnpm (フォーマッター) 16 | - Jest (テスト) -------------------------------------------------------------------------------- /manifest.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "various-complements", 3 | "name": "Various Complements", 4 | "version": "10.8.0", 5 | "minAppVersion": "0.16.0", 6 | "description": "This plugin enables you to complete words like the auto-completion of IDE", 7 | "author": "tadashi-aikawa", 8 | "authorUrl": "https://github.com/tadashi-aikawa", 9 | "isDesktopOnly": false 10 | } -------------------------------------------------------------------------------- /manifest-beta.json: -------------------------------------------------------------------------------- 1 | { 2 | "id": "various-complements", 3 | "name": "Various Complements", 4 | "version": "10.8.0", 5 | "minAppVersion": "0.16.0", 6 | "description": "This plugin enables you to complete words like the auto-completion of IDE", 7 | "author": "tadashi-aikawa", 8 | "authorUrl": "https://github.com/tadashi-aikawa", 9 | "isDesktopOnly": false 10 | } -------------------------------------------------------------------------------- /src/util/glob.ts: -------------------------------------------------------------------------------- 1 | import { minimatch } from "minimatch"; 2 | 3 | export function isMatchedGlobPatterns( 4 | path: string, 5 | patterns: string[], 6 | ): boolean { 7 | if (patterns.length === 0) { 8 | return false; 9 | } 10 | 11 | try { 12 | return patterns.some((p) => minimatch(path, p)); 13 | } catch (error) { 14 | console.warn(`Invalid glob pattern detected: ${error}`); 15 | return false; 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /src/external/chinese-tokenizer.d.ts: -------------------------------------------------------------------------------- 1 | declare module "chinese-tokenizer" { 2 | export interface TokenizedResult { 3 | text: string; 4 | traditional: string; 5 | simplified: string; 6 | position: { 7 | offset: number; 8 | line: number; 9 | column: number; 10 | }; 11 | matches: unknown[]; 12 | } 13 | 14 | export const load: ( 15 | content: string, 16 | ) => (content: string) => TokenizedResult[]; 17 | } 18 | -------------------------------------------------------------------------------- /src/ui/component/ObsidianButton.svelte: -------------------------------------------------------------------------------- 1 | 12 | 13 | 21 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "@tsconfig/svelte/tsconfig.json", 3 | "compilerOptions": { 4 | "strict": true, 5 | "types": ["svelte", "jest"], 6 | "inlineSources": true, 7 | "module": "ESNext", 8 | "target": "es2018", 9 | "allowJs": false, 10 | "strictPropertyInitialization": false, 11 | "moduleResolution": "bundler", 12 | "importHelpers": true, 13 | "isolatedModules": true, 14 | "lib": ["dom", "es5", "es6", "es7"], 15 | "allowSyntheticDefaultImports": true 16 | }, 17 | "include": ["**/*.ts", "**/*.mts"] 18 | } 19 | -------------------------------------------------------------------------------- /.github/workflows/tests.yaml: -------------------------------------------------------------------------------- 1 | name: "Tests" 2 | 3 | on: 4 | push: 5 | paths: 6 | - ".github/**/*" 7 | - "src/*" 8 | - "src/**/*" 9 | - "*.js" 10 | - "*.json" 11 | 12 | jobs: 13 | test: 14 | runs-on: ubuntu-latest 15 | steps: 16 | - uses: actions/checkout@v4 17 | with: 18 | repo-token: ${{ secrets.GITHUB_TOKEN }} 19 | - uses: actions/setup-node@v4 20 | with: 21 | node-version: "22" 22 | 23 | - run: npm install -g corepack@latest && corepack enable 24 | - run: pnpm run ci 25 | -------------------------------------------------------------------------------- /src/tokenizer/tokenizers/ArabicTokenizer.ts: -------------------------------------------------------------------------------- 1 | import type { FactoryArgs } from "../tokenizer"; 2 | import { DefaultTokenizer } from "./DefaultTokenizer"; 3 | 4 | const INPUT_ARABIC_TRIM_CHAR_PATTERN = /[\r\n\t\[\]/:?!=()<>"'.,|;*~ `،؛]/g; 5 | const INDEXING_ARABIC_TRIM_CHAR_PATTERN = /[\r\n\t\[\]$/:?!=()<>"'.,|;*~ `،؛]/g; 6 | 7 | export class ArabicTokenizer extends DefaultTokenizer { 8 | constructor(_args?: FactoryArgs) { 9 | super(); 10 | this.inputTrimCharPattern = INPUT_ARABIC_TRIM_CHAR_PATTERN; 11 | this.indexingTrimCharPattern = INDEXING_ARABIC_TRIM_CHAR_PATTERN; 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /src/setting/settings-helper.ts: -------------------------------------------------------------------------------- 1 | import type { TextComponent } from "obsidian"; 2 | 3 | export namespace TextComponentEvent { 4 | export function onChange( 5 | component: TextComponent, 6 | handler: (value: string) => void, 7 | option?: { className?: string }, 8 | ): TextComponent { 9 | component.inputEl.addEventListener("change", async (ev) => { 10 | if (!(ev.target instanceof HTMLInputElement)) { 11 | return; 12 | } 13 | 14 | handler(ev.target.value); 15 | }); 16 | if (option?.className) { 17 | component.inputEl.className = option.className; 18 | } 19 | return component; 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /.github/workflows/release.yaml: -------------------------------------------------------------------------------- 1 | name: "Release" 2 | 3 | on: workflow_dispatch 4 | permissions: 5 | contents: write 6 | 7 | jobs: 8 | release: 9 | runs-on: ubuntu-latest 10 | steps: 11 | - uses: actions/checkout@v4 12 | - uses: actions/setup-node@v4 13 | with: 14 | node-version: "22" 15 | - run: npm install -g corepack@latest && corepack enable && npm i -D --no-save conventional-changelog-conventionalcommits @semantic-release/git @semantic-release/exec 16 | - uses: cycjimmy/semantic-release-action@v4 17 | with: 18 | # バージョンは固定にしておいたほうがいい 19 | semantic_version: 24.2.1 20 | env: 21 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 22 | -------------------------------------------------------------------------------- /src/util/path.ts: -------------------------------------------------------------------------------- 1 | export function basename(path: string, ext?: string): string { 2 | const name = path.match(/.+[\\/]([^\\/]+)[\\/]?$/)?.[1] ?? path; 3 | return ext && name.endsWith(ext) ? name.replace(ext, "") : name; 4 | } 5 | 6 | export function extname(path: string): string { 7 | const ext = basename(path).split(".").slice(1).pop(); 8 | return ext ? `.${ext}` : ""; 9 | } 10 | 11 | export function dirname(path: string): string { 12 | return path.match(/(.+)[\\/].+$/)?.[1] ?? "."; 13 | } 14 | 15 | export function isURL(path: string): boolean { 16 | return Boolean(path.match(new RegExp("^https?://"))); 17 | } 18 | 19 | export const DEFAULT_HISTORIES_PATH = 20 | ".obsidian/plugins/various-complements/histories.json"; 21 | -------------------------------------------------------------------------------- /src/option/ColumnDelimiter.ts: -------------------------------------------------------------------------------- 1 | type Delimiter = "\t" | "," | "|"; 2 | 3 | export class ColumnDelimiter { 4 | private static readonly _values: ColumnDelimiter[] = []; 5 | 6 | static readonly TAB = new ColumnDelimiter("Tab", "\t"); 7 | static readonly COMMA = new ColumnDelimiter("Comma", ","); 8 | static readonly PIPE = new ColumnDelimiter("Pipe", "|"); 9 | 10 | private constructor( 11 | readonly name: string, 12 | readonly value: Delimiter, 13 | ) { 14 | ColumnDelimiter._values.push(this); 15 | } 16 | 17 | static fromName(name: string): ColumnDelimiter { 18 | return ColumnDelimiter._values.find((x) => x.name === name)!; 19 | } 20 | 21 | static values(): ColumnDelimiter[] { 22 | return ColumnDelimiter._values; 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /src/ui/component/ObsidianIconButton.svelte: -------------------------------------------------------------------------------- 1 | 14 | 15 |
16 | 25 |
26 | 27 | 42 | -------------------------------------------------------------------------------- /versions.json: -------------------------------------------------------------------------------- 1 | { 2 | "7.2.0": "0.16.0", 3 | "7.1.1": "0.15.4", 4 | "7.0.2": "0.15.4", 5 | "7.0.1": "0.15.3", 6 | "7.0.0": "0.14.6", 7 | "6.0.0": "0.14.6", 8 | "5.11.0": "0.13.11", 9 | "1.0.1": "0.9.12", 10 | "1.0.0": "0.9.7", 11 | "7.3.0": "0.16.0", 12 | "8.0.0": "0.16.0", 13 | "8.1.0": "0.16.0", 14 | "8.2.0": "0.16.0", 15 | "8.2.1": "0.16.0", 16 | "8.2.2": "0.16.0", 17 | "8.2.3": "0.16.0", 18 | "8.3.0": "0.16.0", 19 | "8.3.1": "0.16.0", 20 | "8.3.2": "0.16.0", 21 | "8.3.3": "0.16.0", 22 | "8.4.0": "0.16.0", 23 | "8.4.1": "0.16.0", 24 | "9.0.0": "0.16.0", 25 | "9.0.1": "0.16.0", 26 | "9.1.0": "0.16.0", 27 | "9.2.0": "0.16.0", 28 | "9.2.1": "0.16.0", 29 | "9.3.0": "0.16.0", 30 | "9.4.0": "0.16.0", 31 | "10.0.0": "0.16.0", 32 | "10.0.1": "0.16.0", 33 | "10.0.2": "0.16.0", 34 | "10.0.3": "0.16.0", 35 | "10.1.0": "0.16.0", 36 | "10.2.0": "0.16.0", 37 | "10.3.0": "0.16.0", 38 | "10.4.0": "0.16.0", 39 | "10.5.0": "0.16.0", 40 | "10.5.1": "0.16.0", 41 | "10.6.0": "0.16.0", 42 | "10.7.0": "0.16.0", 43 | "10.7.1": "0.16.0", 44 | "10.7.2": "0.16.0", 45 | "10.8.0": "0.16.0" 46 | } -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2023 Tadashi Aikawa 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | 23 | -------------------------------------------------------------------------------- /src/option/DescriptionOnSuggestion.ts: -------------------------------------------------------------------------------- 1 | import type { Word } from "../model/Word"; 2 | import { basename } from "../util/path"; 3 | 4 | export class DescriptionOnSuggestion { 5 | private static readonly _values: DescriptionOnSuggestion[] = []; 6 | 7 | static readonly NONE = new DescriptionOnSuggestion("None", () => null); 8 | static readonly SHORT = new DescriptionOnSuggestion("Short", (word) => { 9 | if (!word.description) { 10 | return null; 11 | } 12 | return word.type === "customDictionary" 13 | ? word.description 14 | : basename(word.description); 15 | }); 16 | static readonly FULL = new DescriptionOnSuggestion( 17 | "Full", 18 | (word) => word.description ?? null, 19 | ); 20 | 21 | private constructor( 22 | readonly name: string, 23 | readonly toDisplay: (word: Word) => string | null, 24 | ) { 25 | DescriptionOnSuggestion._values.push(this); 26 | } 27 | 28 | static fromName(name: string): DescriptionOnSuggestion { 29 | return DescriptionOnSuggestion._values.find((x) => x.name === name)!; 30 | } 31 | 32 | static values(): DescriptionOnSuggestion[] { 33 | return DescriptionOnSuggestion._values; 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /src/tokenizer/TokenizeStrategy.ts: -------------------------------------------------------------------------------- 1 | type Name = 2 | | "default" 3 | | "english-only" 4 | | "japanese" 5 | | "arabic" 6 | | "chinese" 7 | | "korean"; 8 | 9 | export class TokenizeStrategy { 10 | private static readonly _values: TokenizeStrategy[] = []; 11 | 12 | static readonly DEFAULT = new TokenizeStrategy("default", 3, 5, true); 13 | static readonly ENGLISH_ONLY = new TokenizeStrategy( 14 | "english-only", 15 | 3, 16 | 5, 17 | true, 18 | ); 19 | static readonly JAPANESE = new TokenizeStrategy("japanese", 2, 2, false); 20 | static readonly ARABIC = new TokenizeStrategy("arabic", 3, 3, false); 21 | static readonly CHINESE = new TokenizeStrategy("chinese", 1, 2, false); 22 | static readonly KOREAN = new TokenizeStrategy("korean", 1, 2, true); 23 | 24 | private constructor( 25 | readonly name: Name, 26 | readonly triggerThreshold: number, 27 | readonly indexingThreshold: number, 28 | readonly canTreatUnderscoreAsPartOfWord: boolean, 29 | ) { 30 | TokenizeStrategy._values.push(this); 31 | } 32 | 33 | static fromName(name: string): TokenizeStrategy { 34 | return TokenizeStrategy._values.find((x) => x.name === name)!; 35 | } 36 | 37 | static values(): TokenizeStrategy[] { 38 | return TokenizeStrategy._values; 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug-report.yaml: -------------------------------------------------------------------------------- 1 | name: Bug Report 2 | description: Bug report form. 3 | title: "[Bug] " 4 | assignees: 5 | - tadashi-aikawa 6 | body: 7 | - type: textarea 8 | id: summary 9 | attributes: 10 | label: Summary 11 | validations: 12 | required: true 13 | - type: textarea 14 | id: steps-to-reproduce 15 | attributes: 16 | label: Steps to Reproduce in the Sandbox Vault 17 | validations: 18 | required: true 19 | - type: textarea 20 | id: expected-behavior 21 | attributes: 22 | label: Expected Behavior 23 | validations: 24 | required: true 25 | - type: textarea 26 | id: actual-behavior 27 | attributes: 28 | label: Actual Behavior 29 | validations: 30 | required: true 31 | - type: input 32 | id: obsidian-version 33 | attributes: 34 | label: Obsidian Version 35 | validations: 36 | required: true 37 | - type: input 38 | id: plugin-version 39 | attributes: 40 | label: Various Complements Version 41 | validations: 42 | required: true 43 | - type: input 44 | id: os 45 | attributes: 46 | label: OS 47 | placeholder: Windows 48 | validations: 49 | required: true 50 | - type: textarea 51 | id: notes 52 | attributes: 53 | label: Notes 54 | -------------------------------------------------------------------------------- /hooks/commit-msg: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | commit_message_first_line="$(cat "$1")" 4 | 5 | if [[ "${commit_message_first_line}" == "fixup! "* ]]; then 6 | exit 0 7 | fi 8 | 9 | commit_message_regex='^([^(:!]+)\(?([^():!]*)\)?!?:\ (.+)$' 10 | if [[ $commit_message_first_line =~ $commit_message_regex ]]; then 11 | type="${BASH_REMATCH[1]}" 12 | scope="${BASH_REMATCH[2]}" 13 | description="${BASH_REMATCH[3]}" 14 | fi 15 | 16 | if [[ -z ${type} && -z ${scope} && -z ${description} ]]; then 17 | echo "Invalid commit message format -> ${commit_message_first_line}" 18 | exit 1 19 | fi 20 | 21 | # $1: type 22 | function validateType() { 23 | case $1 in 24 | feat | fix | style | docs | refactor | test | ci | build | dev | chore) ;; 25 | *) 26 | echo "Invalid type: $1" 27 | echo ">> feat | fix | style | docs | refactor | test | ci | build | dev | chore" 28 | exit 1 29 | ;; 30 | esac 31 | } 32 | 33 | # $1: scope 34 | function validateScope() { 35 | case $1 in 36 | 'current file' | 'current vault' | 'custom dictionary' | 'internal link' | 'front matter' | "") ;; 37 | *) 38 | echo "Invalid scope: $1" 39 | echo ">> current file | current vault | custom dictionary | internal link | front matter" 40 | exit 1 41 | ;; 42 | esac 43 | } 44 | 45 | validateType "${type}" 46 | 47 | tr "/" \\n <<<"${scope}" | while read -r f; do validateScope "$f"; done 48 | -------------------------------------------------------------------------------- /src/tokenizer/tokenizers/ChineseTokenizer.ts: -------------------------------------------------------------------------------- 1 | import chineseTokenizer from "chinese-tokenizer"; 2 | import { AbstractTokenizer } from "./AbstractTokenizer"; 3 | 4 | /** 5 | * Chinese needs original logic. 6 | */ 7 | export class ChineseTokenizer extends AbstractTokenizer { 8 | _tokenize: ReturnType; 9 | 10 | static create(dict: string): ChineseTokenizer { 11 | const ins = new ChineseTokenizer(); 12 | ins._tokenize = chineseTokenizer.load(dict); 13 | return ins; 14 | } 15 | 16 | tokenize(content: string, raw?: boolean): string[] { 17 | return content 18 | .split(raw ? / /g : this.getTrimPattern("indexing")) 19 | .filter((x) => x !== "") 20 | .flatMap((x) => this._tokenize(x)) 21 | .map((x) => x.text); 22 | } 23 | 24 | recursiveTokenize(content: string): { word: string; offset: number }[] { 25 | const tokens: string[] = this._tokenize(content).map((x) => x.text); 26 | 27 | const ret = []; 28 | for (let i = 0; i < tokens.length; i++) { 29 | if ( 30 | i === 0 || 31 | tokens[i].length !== 1 || 32 | !Boolean(tokens[i].match(this.getTrimPattern("input"))) 33 | ) { 34 | ret.push({ 35 | word: tokens.slice(i).join(""), 36 | offset: tokens.slice(0, i).join("").length, 37 | }); 38 | } 39 | } 40 | 41 | return ret; 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /docs/release.md: -------------------------------------------------------------------------------- 1 | # Release Guide 2 | 3 | 本リポジトリのリリース手順と補助スクリプトの使い方です。実行は手動で行ってください。 4 | 5 | ## 前提 6 | 7 | - GitHub CLI が認証済みで利用可能であること(`gh auth status`) 8 | - デフォルトブランチ上で作業していること(通常は `main`) 9 | 10 | ## 手順(順序付き) 11 | 12 | 1. ローカルの未push確認: `git status` で未コミットが無いことを確認し、`git fetch` 後に `HEAD` が upstream に対して ahead でないことを確認(ahead なら `git push`)。 13 | 2. CI成功の確認: GitHub Actions の Tests ワークフロー(`.github/workflows/tests.yaml`)が最新コミットで成功していることを確認(必要なら `gh run list --workflow tests.yaml --limit 1` 等で確認)。 14 | 3. リリース実行: GitHub Actions の Release ワークフロー(`.github/workflows/release.yaml`)を `workflow_dispatch` で手動実行(UI から or `gh workflow run release.yaml --ref `)。このリポジトリは semantic‑release によりバージョン決定とリリース作成が自動化されます。 15 | 4. リリース完了確認: Actions の Release ジョブ成功を待ち、GitHub の Releases ページで新しいリリース(タグとリリースノート)が作成されたことを確認。 16 | 5. 関連Issue対応: コミットメッセージで参照された `#番号` の Issue に「リリースした旨」をコメントして Close。コメント例: `Released in vX.Y.Z 🚀`(必要に応じて投稿者へメンション)。 17 | 6. Bluesky投稿準備: プロダクト名・バージョン・主要変更点(箇条書き)・リリースURLで文面を整え、手動で投稿。 18 | 7. リポジトリ最新化: semantic‑release が push した `chore(release): x.y.z` を取り込むため `git pull`。 19 | 20 | ## 補助コマンド 21 | 22 | `pnpm release` で以下を順に実施します(非破壊チェック+自動実行)。 23 | 24 | - ローカルの未コミット/未push確認 25 | - Tests ワークフローの最新結果確認 26 | - Release ワークフローの手動トリガー(`workflow_dispatch`) 27 | - Release 実行のウォッチと最新リリース情報の取得 28 | - Issue コメント/Bluesky 投稿用テンプレートの出力 29 | - `git pull` によるローカルの最新化 30 | 31 | 実際の投稿や Issue コメントは出力をコピーして手動で行ってください。 32 | 33 | -------------------------------------------------------------------------------- /version-bump.mjs: -------------------------------------------------------------------------------- 1 | import { readFileSync, writeFileSync } from "fs"; 2 | import { exit } from "process"; 3 | 4 | function updateVersion(version) { 5 | const packageJson = JSON.parse(readFileSync("package.json", "utf8")); 6 | packageJson.version = version; 7 | writeFileSync("package.json", JSON.stringify(packageJson, null, " ")); 8 | 9 | const manifestBeta = JSON.parse(readFileSync("manifest-beta.json", "utf8")); 10 | manifestBeta.version = version; 11 | writeFileSync("manifest-beta.json", JSON.stringify(manifestBeta, null, " ")); 12 | if (version.includes("beta")) { 13 | return; 14 | } 15 | 16 | const manifest = JSON.parse(readFileSync("manifest.json", "utf8")); 17 | const { minAppVersion } = manifest; 18 | manifest.version = version; 19 | writeFileSync("manifest.json", JSON.stringify(manifest, null, " ")); 20 | 21 | // update versions.json with target version and minAppVersion from manifest.json 22 | const versions = JSON.parse(readFileSync("versions.json", "utf8")); 23 | versions[version] = minAppVersion; 24 | writeFileSync("versions.json", JSON.stringify(versions, null, " ")); 25 | } 26 | 27 | const version = process.argv[2]; 28 | if (!version) { 29 | console.error("Required: version (ex: node version-bump.mjs 1.2.3)"); 30 | exit(1); 31 | } 32 | if (!version.match(/\d+\.\d+\.\d+/)) { 33 | console.error("The version is not valid (ex: node version-bump.mjs 1.2.3)"); 34 | exit(1); 35 | } 36 | 37 | updateVersion(version); 38 | -------------------------------------------------------------------------------- /src/provider/MatchStrategy.ts: -------------------------------------------------------------------------------- 1 | import type { Word } from "../model/Word"; 2 | import type { SelectionHistoryStorage } from "../storage/SelectionHistoryStorage"; 3 | import type { IndexedWords } from "../ui/AutoCompleteSuggest"; 4 | import { suggestWords, suggestWordsByPartialMatch } from "./suggester"; 5 | 6 | type Name = "prefix" | "partial"; 7 | 8 | type Handler = ( 9 | indexedWords: IndexedWords, 10 | query: string, 11 | max: number, 12 | option: { 13 | frontMatter?: string; 14 | selectionHistoryStorage?: SelectionHistoryStorage; 15 | fuzzy?: { 16 | minMatchScore: number; 17 | }; 18 | providerMinChars?: { 19 | currentFile: number; 20 | currentVault: number; 21 | customDictionary: number; 22 | internalLink: number; 23 | }; 24 | globalMinChar?: number; 25 | }, 26 | ) => Word[]; 27 | 28 | export class MatchStrategy { 29 | private static readonly _values: MatchStrategy[] = []; 30 | 31 | static readonly PREFIX = new MatchStrategy("prefix", suggestWords); 32 | static readonly PARTIAL = new MatchStrategy( 33 | "partial", 34 | suggestWordsByPartialMatch, 35 | ); 36 | 37 | private constructor( 38 | readonly name: Name, 39 | readonly handler: Handler, 40 | ) { 41 | MatchStrategy._values.push(this); 42 | } 43 | 44 | static fromName(name: string): MatchStrategy { 45 | return MatchStrategy._values.find((x) => x.name === name)!; 46 | } 47 | 48 | static values(): MatchStrategy[] { 49 | return MatchStrategy._values; 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /src/provider/SpecificMatchStrategy.ts: -------------------------------------------------------------------------------- 1 | import type { Word } from "../model/Word"; 2 | import type { SelectionHistoryStorage } from "../storage/SelectionHistoryStorage"; 3 | import type { IndexedWords } from "../ui/AutoCompleteSuggest"; 4 | import { suggestWords, suggestWordsByPartialMatch } from "./suggester"; 5 | 6 | type Name = "inherit" | "prefix" | "partial"; 7 | 8 | type Handler = ( 9 | indexedWords: IndexedWords, 10 | query: string, 11 | max: number, 12 | option: { 13 | frontMatter?: string; 14 | selectionHistoryStorage?: SelectionHistoryStorage; 15 | fuzzy?: { 16 | minMatchScore: number; 17 | }; 18 | }, 19 | ) => Word[]; 20 | 21 | const neverUsedHandler = (..._args: any[]) => []; 22 | 23 | export class SpecificMatchStrategy { 24 | private static readonly _values: SpecificMatchStrategy[] = []; 25 | 26 | static readonly INHERIT = new SpecificMatchStrategy( 27 | "inherit", 28 | neverUsedHandler, 29 | ); 30 | static readonly PREFIX = new SpecificMatchStrategy("prefix", suggestWords); 31 | static readonly PARTIAL = new SpecificMatchStrategy( 32 | "partial", 33 | suggestWordsByPartialMatch, 34 | ); 35 | 36 | private constructor( 37 | readonly name: Name, 38 | readonly handler: Handler, 39 | ) { 40 | SpecificMatchStrategy._values.push(this); 41 | } 42 | 43 | static fromName(name: string): SpecificMatchStrategy { 44 | return SpecificMatchStrategy._values.find((x) => x.name === name)!; 45 | } 46 | 47 | static values(): SpecificMatchStrategy[] { 48 | return SpecificMatchStrategy._values; 49 | } 50 | } 51 | -------------------------------------------------------------------------------- /src/tokenizer/tokenizers/AbstractTokenizer.ts: -------------------------------------------------------------------------------- 1 | import { ExhaustiveError } from "../../errors"; 2 | import { removeFromPattern } from "../../util/strings"; 3 | import { 4 | type FactoryArgs, 5 | type Tokenizer, 6 | type TrimTarget, 7 | } from "../tokenizer"; 8 | 9 | const INPUT_TRIM_CHAR_PATTERN = /[\r\n\t\[\]$/:?!=()<>"',|;*~ `_“„«»‹›‚‘’”]/g; 10 | const INDEXING_TRIM_CHAR_PATTERN = /[\r\n\t\[\]/:?!=()<>"',|;*~ `_“„«»‹›‚‘’”]/g; 11 | 12 | export abstract class AbstractTokenizer implements Tokenizer { 13 | protected inputTrimCharPattern: RegExp; 14 | protected indexingTrimCharPattern: RegExp; 15 | 16 | constructor(args?: FactoryArgs) { 17 | this.inputTrimCharPattern = args?.treatUnderscoreAsPartOfWord 18 | ? removeFromPattern(INPUT_TRIM_CHAR_PATTERN, "_") 19 | : INPUT_TRIM_CHAR_PATTERN; 20 | this.indexingTrimCharPattern = args?.treatUnderscoreAsPartOfWord 21 | ? removeFromPattern(INDEXING_TRIM_CHAR_PATTERN, "_") 22 | : INDEXING_TRIM_CHAR_PATTERN; 23 | } 24 | 25 | getTrimPattern(target: TrimTarget): RegExp { 26 | switch (target) { 27 | case "input": 28 | return this.inputTrimCharPattern; 29 | case "indexing": 30 | return this.indexingTrimCharPattern; 31 | default: 32 | throw new ExhaustiveError(target); 33 | } 34 | } 35 | 36 | shouldIgnoreOnCurrent(_str: string): boolean { 37 | return false; 38 | } 39 | 40 | abstract tokenize(content: string, raw?: boolean): string[]; 41 | 42 | abstract recursiveTokenize( 43 | content: string, 44 | ): { word: string; offset: number }[]; 45 | } 46 | -------------------------------------------------------------------------------- /src/ui/CustomDictionaryWordAddModal.ts: -------------------------------------------------------------------------------- 1 | import { App, Modal, Notice } from "obsidian"; 2 | import { AppHelper } from "../app-helper"; 3 | import type { CustomDictionaryWord } from "../model/Word"; 4 | import CustomDictionaryWordAdd from "./component/CustomDictionaryWordAdd.svelte"; 5 | 6 | export class CustomDictionaryWordAddModal extends Modal { 7 | component: CustomDictionaryWordAdd; 8 | 9 | constructor( 10 | app: App, 11 | dictionaryPaths: string[], 12 | initialValue: string = "", 13 | dividerForDisplay: string = "", 14 | onSubmit: (dictionaryPath: string, word: CustomDictionaryWord) => void, 15 | ) { 16 | super(app); 17 | const appHelper = new AppHelper(app); 18 | 19 | const dictionaries = dictionaryPaths.map((x) => ({ id: x, path: x })); 20 | 21 | const { contentEl } = this; 22 | this.component = new CustomDictionaryWordAdd({ 23 | target: contentEl, 24 | props: { 25 | dictionaries, 26 | selectedDictionary: dictionaries[0], 27 | inputWord: initialValue, 28 | dividerForDisplay, 29 | onSubmit, 30 | onClickFileIcon: (dictionaryPath: string) => { 31 | const markdownFile = appHelper.getMarkdownFileByPath(dictionaryPath); 32 | if (!markdownFile) { 33 | // noinspection ObjectAllocationIgnored 34 | new Notice(`Can't open ${dictionaryPath}`); 35 | return; 36 | } 37 | 38 | this.close(); 39 | appHelper.openMarkdownFile(markdownFile, true); 40 | }, 41 | }, 42 | }); 43 | } 44 | 45 | onClose() { 46 | super.onClose(); 47 | this.component.$destroy(); 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /src/tokenizer/tokenizers/JapaneseTokenizer.ts: -------------------------------------------------------------------------------- 1 | import TinySegmenter from "../../external/tiny-segmenter"; 2 | import { joinNumberWithSymbol } from "../../util/strings"; 3 | import { AbstractTokenizer } from "./AbstractTokenizer"; 4 | // @ts-ignore 5 | const segmenter = new TinySegmenter(); 6 | 7 | function pickTokensAsJapanese(content: string, trimPattern: RegExp): string[] { 8 | return content 9 | .split(trimPattern) 10 | .filter((x) => x !== "") 11 | .flatMap((x) => joinNumberWithSymbol(segmenter.segment(x))); 12 | } 13 | 14 | /** 15 | * Japanese needs original logic. 16 | */ 17 | export class JapaneseTokenizer extends AbstractTokenizer { 18 | tokenize(content: string, raw?: boolean): string[] { 19 | return pickTokensAsJapanese( 20 | content, 21 | raw ? / /g : this.getTrimPattern("indexing"), 22 | ); 23 | } 24 | 25 | recursiveTokenize(content: string): { word: string; offset: number }[] { 26 | const tokens: string[] = joinNumberWithSymbol( 27 | segmenter 28 | .segment(content) 29 | // https://github.com/tadashi-aikawa/obsidian-various-complements-plugin/issues/77 30 | .flatMap((x: string) => 31 | x === " " ? x : x.split(" ").map((t) => (t === "" ? " " : t)), 32 | ), 33 | ); 34 | 35 | const ret = []; 36 | for (let i = 0; i < tokens.length; i++) { 37 | if (i === 0 || tokens[i].length !== 1 || tokens[i] !== " ") { 38 | ret.push({ 39 | word: tokens.slice(i).join(""), 40 | offset: tokens.slice(0, i).join("").length, 41 | }); 42 | } 43 | } 44 | 45 | return ret; 46 | } 47 | 48 | shouldIgnoreOnCurrent(str: string): boolean { 49 | return Boolean(str.match(/^[ぁ-んa-zA-Z。、ー ]*$/)); 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "various-complements", 3 | "version": "10.8.0", 4 | "description": "This plugin for Obsidian enables you to complete words like the auto-completion of IDE", 5 | "main": "main.js", 6 | "scripts": { 7 | "dev": "node esbuild.config.mjs", 8 | "typecheck": "tsgo -noEmit -skipLibCheck", 9 | "build": "pnpm typecheck && node esbuild.config.mjs production", 10 | "format": "prettier -l \"**/*.ts\"", 11 | "test": "jest --coverage", 12 | "test:watch": "jest --watch", 13 | "mobile:check": "node esbuild.config.mjs production --bundle --platform=browser", 14 | "pre:push": "pnpm typecheck && pnpm format && pnpm test", 15 | "ci": "pnpm i && pnpm build && pnpm test" 16 | }, 17 | "keywords": [], 18 | "author": "tadashi-aikawa", 19 | "license": "MIT", 20 | "devDependencies": { 21 | "@jest/globals": "^29.7.0", 22 | "@tsconfig/svelte": "^5.0.4", 23 | "@typescript/native-preview": "7.0.0-dev.20250622.1", 24 | "builtin-modules": "^3.3.0", 25 | "chokidar": "^3.6.0", 26 | "esbuild": "^0.23.0", 27 | "esbuild-jest": "^0.5.0", 28 | "esbuild-svelte": "^0.8.1", 29 | "jest": "^29.7.0", 30 | "obsidian": "^0.16.0", 31 | "prettier": "^3.3.3", 32 | "prettier-plugin-organize-imports": "^4.0.0", 33 | "prettier-plugin-svelte": "^3.2.6", 34 | "svelte": "^4.2.18", 35 | "svelte-preprocess": "^5.1.3", 36 | "tslib": "^2.6.3", 37 | "typescript": "^5.8.3" 38 | }, 39 | "dependencies": { 40 | "chinese-tokenizer": "github:tadashi-aikawa/chinese-tokenizer", 41 | "emoji-regex": "^10.3.0", 42 | "minimatch": "^10.0.3", 43 | "svelte-lucide-icons": "^0.6.0", 44 | "ts-deepmerge": "^7.0.1" 45 | }, 46 | "packageManager": "pnpm@10.1.0+sha512.c89847b0667ddab50396bbbd008a2a43cf3b581efd59cf5d9aa8923ea1fb4b8106c041d540d08acb095037594d73ebc51e1ec89ee40c88b30b8a66c0fae0ac1b" 47 | } -------------------------------------------------------------------------------- /src/ui/component/InputDialog.ts: -------------------------------------------------------------------------------- 1 | import { Modal } from "obsidian"; 2 | 3 | export class InputDialog extends Modal { 4 | inputEl!: HTMLInputElement; 5 | promise!: Promise; 6 | submitted = false; 7 | 8 | constructor( 9 | public args: { 10 | title: string; 11 | placeholder?: string; 12 | defaultValue?: string; 13 | }, 14 | ) { 15 | super(app); 16 | } 17 | 18 | onOpen(): void { 19 | this.titleEl.setText(this.args.title); 20 | 21 | this.inputEl = this.contentEl.createEl("input", { 22 | type: "text", 23 | placeholder: this.args.placeholder ?? "", 24 | cls: "carnelian-input-dialog-input", 25 | value: this.args.defaultValue, 26 | }); 27 | } 28 | 29 | /** 30 | * This function returns 31 | * - Promise if submitted not empty string 32 | * - Promise<""> if submitted empty string 33 | * - Promise if canceled 34 | */ 35 | open(args?: { initialSelect: boolean }): Promise { 36 | super.open(); 37 | 38 | this.promise = new Promise((resolve) => { 39 | const listener = (ev: KeyboardEvent) => { 40 | if (ev.isComposing) { 41 | return; 42 | } 43 | if (ev.code === "Enter") { 44 | ev.preventDefault(); 45 | resolve(this.inputEl.value); 46 | this.submitted = true; 47 | this.close(); 48 | } 49 | }; 50 | 51 | this.inputEl.addEventListener("keydown", listener); 52 | 53 | this.onClose = () => { 54 | super.onClose(); 55 | this.inputEl.removeEventListener("keydown", listener); 56 | if (!this.submitted) { 57 | resolve(null); 58 | } 59 | }; 60 | 61 | if (args?.initialSelect) { 62 | this.inputEl.select(); 63 | } 64 | }); 65 | 66 | return this.promise; 67 | } 68 | } 69 | -------------------------------------------------------------------------------- /src/keys.ts: -------------------------------------------------------------------------------- 1 | import { type Modifier, Platform } from "obsidian"; 2 | import { equalsAsSet } from "./util/collection-helper"; 3 | 4 | export const MOD = Platform.isMacOS ? "Cmd" : "Ctrl"; 5 | export const ALT = Platform.isMacOS ? "Option" : "Alt"; 6 | 7 | export const quickResultSelectionModifier = ( 8 | userAltInsteadOfModForQuickResultSelection: boolean, 9 | ) => (userAltInsteadOfModForQuickResultSelection ? ALT : MOD); 10 | 11 | export type Hotkey = { 12 | modifiers: Modifier[]; 13 | key: string; 14 | hideHotkeyGuide?: boolean; 15 | }; 16 | 17 | export function hotkey2String(hk?: Hotkey): string { 18 | if (!hk) { 19 | return ""; 20 | } 21 | 22 | const hotkey = hk.key === " " ? "Space" : hk.key; 23 | const mods = hk.modifiers.join(" "); 24 | 25 | return mods ? `${mods} ${hotkey}` : hotkey; 26 | } 27 | 28 | export function string2Hotkey( 29 | hotKey: string, 30 | hideHotkeyGuide: boolean, 31 | ): Hotkey | null { 32 | const keys = hotKey.split(" "); 33 | 34 | if (keys.length === 0 || keys[0] === "") { 35 | return null; 36 | } 37 | if (keys.length === 1) { 38 | return { 39 | modifiers: [], 40 | key: keys[0].replace("Space", " "), 41 | hideHotkeyGuide, 42 | }; 43 | } 44 | return { 45 | modifiers: keys.slice(0, -1) as Modifier[], 46 | key: keys.last()!.replace("Space", " "), 47 | hideHotkeyGuide, 48 | }; 49 | } 50 | 51 | export function equalsAsHotkey( 52 | hotkey: Hotkey, 53 | keyDownEvent: KeyboardEvent, 54 | ): boolean { 55 | const hk: Hotkey = { modifiers: [], key: keyDownEvent.key }; 56 | if (keyDownEvent.shiftKey) { 57 | hk.modifiers.push("Shift"); 58 | } 59 | if (keyDownEvent.altKey) { 60 | hk.modifiers.push("Alt"); 61 | } 62 | if (keyDownEvent.ctrlKey) { 63 | hk.modifiers.push(Platform.isMacOS ? "Ctrl" : "Mod"); 64 | } 65 | if (keyDownEvent.metaKey) { 66 | hk.modifiers.push(Platform.isMacOS ? "Mod" : "Meta"); 67 | } 68 | 69 | return ( 70 | hotkey.key.toLowerCase() === hk.key.toLowerCase() && 71 | equalsAsSet(hotkey.modifiers, hk.modifiers) 72 | ); 73 | } 74 | -------------------------------------------------------------------------------- /src/tokenizer/tokenizer.ts: -------------------------------------------------------------------------------- 1 | import type { App } from "obsidian"; 2 | 3 | import type { Settings } from "../setting/settings"; 4 | import { ArabicTokenizer } from "./tokenizers/ArabicTokenizer"; 5 | import { ChineseTokenizer } from "./tokenizers/ChineseTokenizer"; 6 | import { DefaultTokenizer } from "./tokenizers/DefaultTokenizer"; 7 | import { EnglishOnlyTokenizer } from "./tokenizers/EnglishOnlyTokenizer"; 8 | import { JapaneseTokenizer } from "./tokenizers/JapaneseTokenizer"; 9 | import { KoreanTokenizer } from "./tokenizers/KoreanTokenizer"; 10 | import type { TokenizeStrategy } from "./TokenizeStrategy"; 11 | 12 | export type TrimTarget = "input" | "indexing"; 13 | 14 | export interface FactoryArgs { 15 | treatUnderscoreAsPartOfWord?: boolean; 16 | } 17 | 18 | export interface Tokenizer { 19 | tokenize(content: string, raw?: boolean): string[]; 20 | recursiveTokenize(content: string): { word: string; offset: number }[]; 21 | getTrimPattern(target: TrimTarget): RegExp; 22 | shouldIgnoreOnCurrent(query: string): boolean; 23 | } 24 | 25 | export async function createTokenizer( 26 | strategy: TokenizeStrategy, 27 | app: App, 28 | settings: Settings, 29 | ): Promise { 30 | switch (strategy.name) { 31 | case "default": 32 | return new DefaultTokenizer({ 33 | treatUnderscoreAsPartOfWord: settings.treatUnderscoreAsPartOfWord, 34 | }); 35 | case "english-only": 36 | return new EnglishOnlyTokenizer({ 37 | treatUnderscoreAsPartOfWord: settings.treatUnderscoreAsPartOfWord, 38 | }); 39 | case "arabic": 40 | return new ArabicTokenizer(); 41 | case "japanese": 42 | return new JapaneseTokenizer(); 43 | case "chinese": 44 | const hasCedict = await app.vault.adapter.exists(settings.cedictPath); 45 | if (!hasCedict) { 46 | return Promise.reject( 47 | new Error(`cedict_ts.u8 doesn't exist in ${settings.cedictPath}.`), 48 | ); 49 | } 50 | const dict = await app.vault.adapter.read(settings.cedictPath); 51 | return ChineseTokenizer.create(dict); 52 | case "korean": 53 | return new KoreanTokenizer(); 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # obsidian-various-complements-plugin 2 | 3 | [![release](https://img.shields.io/github/release/tadashi-aikawa/obsidian-various-complements-plugin.svg)](https://github.com/tadashi-aikawa/obsidian-various-complements-plugin/releases/latest) 4 | [![Tests](https://github.com/tadashi-aikawa/obsidian-various-complements-plugin/workflows/Tests/badge.svg)](https://github.com/tadashi-aikawa/obsidian-various-complements-plugin/actions) 5 | ![downloads](https://img.shields.io/github/downloads/tadashi-aikawa/obsidian-various-complements-plugin/total) 6 | 7 | This plugin for [Obsidian] enables you complete words like the auto-completion of IDE. 8 | 9 | ![](https://tadashi-aikawa.github.io/docs-obsidian-various-complements-plugin/resources/various-complements.gif) 10 | 11 | ## 📚 Documentation 12 | 13 | - [Official](https://tadashi-aikawa.github.io/docs-obsidian-various-complements-plugin/) 14 | - [![Ask DeepWiki](https://deepwiki.com/badge.svg)](https://deepwiki.com/tadashi-aikawa/obsidian-various-complements-plugin) 15 | 16 | ## 👥 For users 17 | 18 | ### Feature requests / Bugs 19 | 20 | Please create a new [issue]. 21 | 22 | ### Questions / Others 23 | 24 | Please create a new [discussion]. 25 | 26 | ### Pull requests 27 | 28 | Before creating a pull request, please make an [issue] or a [discussion]😉 29 | 30 | [issue]: https://github.com/tadashi-aikawa/obsidian-various-complements-plugin/issues 31 | [discussion]: https://github.com/tadashi-aikawa/obsidian-various-complements-plugin/discussions 32 | 33 | ## 🖥️ For developers 34 | 35 | - Requirements 36 | - Node.js v22 37 | 38 | ### Development 39 | 40 | #### Set up 41 | 42 | ```bash 43 | git config core.hooksPath hooks 44 | ``` 45 | 46 | #### Install dependencies 47 | 48 | ```bash 49 | corepack enable 50 | pnpm install 51 | ``` 52 | 53 | #### Build for development 54 | 55 | ```bash 56 | pnpm dev 57 | ``` 58 | 59 | #### Test 60 | 61 | ```bash 62 | pnpm test 63 | # or 64 | pnpm test --watch 65 | ``` 66 | 67 | #### CI 68 | 69 | ```bash 70 | pnpm run ci 71 | ``` 72 | 73 | #### Release 74 | 75 | Run [Release Action](https://github.com/tadashi-aikawa/obsidian-various-complements-plugin/actions/workflows/release.yaml). 76 | 77 | [Obsidian]: https://obsidian.md/ 78 | 79 | -------------------------------------------------------------------------------- /.releaserc.mjs: -------------------------------------------------------------------------------- 1 | export default { 2 | branches: ["main"], 3 | // remove "v" 4 | tagFormat: "${version}", 5 | plugins: [ 6 | [ 7 | "@semantic-release/commit-analyzer", 8 | { 9 | preset: "conventionalcommits", 10 | releaseRules: [ 11 | { breaking: true, release: "major" }, 12 | { type: "feat", release: "minor" }, 13 | { type: "build", release: "minor" }, 14 | { type: "style", release: "minor" }, 15 | { type: "fix", release: "patch" }, 16 | { type: "refactor", release: "patch" }, 17 | { revert: true, release: "patch" }, 18 | ], 19 | }, 20 | ], 21 | [ 22 | "@semantic-release/release-notes-generator", 23 | { 24 | preset: "conventionalcommits", 25 | presetConfig: { 26 | types: [ 27 | { type: "feat", section: "✨ Features" }, 28 | { type: "style", section: "🎨 Styles" }, 29 | { type: "fix", section: "🛡 Bug Fixes" }, 30 | { type: "build", section: "🤖 Build" }, 31 | { type: "docs", hidden: true }, 32 | { type: "refactor", hidden: true }, 33 | { type: "test", hidden: true }, 34 | { type: "ci", hidden: true }, 35 | { type: "dev", hidden: true }, 36 | { type: "chore", hidden: true }, 37 | ], 38 | }, 39 | }, 40 | ], 41 | [ 42 | "@semantic-release/exec", 43 | { 44 | prepareCmd: 45 | "pnpm run ci && node version-bump.mjs ${nextRelease.version}", 46 | }, 47 | ], 48 | [ 49 | "@semantic-release/github", 50 | { 51 | assets: [ 52 | "main.js", 53 | "styles.css", 54 | "manifest.json", 55 | "manifest-beta.json", 56 | ], 57 | }, 58 | ], 59 | [ 60 | "@semantic-release/git", 61 | { 62 | assets: [ 63 | "package.json", 64 | "manifest-beta.json", 65 | "manifest.json", 66 | "versions.json", 67 | "pnpm-lock.yaml", 68 | ], 69 | message: 70 | "chore(release): ${nextRelease.version} [skip ci]\n\n${nextRelease.notes}", 71 | }, 72 | ], 73 | ], 74 | }; 75 | -------------------------------------------------------------------------------- /src/tokenizer/tokenizers/DefaultTokenizer.ts: -------------------------------------------------------------------------------- 1 | import type { TrimTarget } from "../tokenizer"; 2 | import { AbstractTokenizer } from "./AbstractTokenizer"; 3 | 4 | type PreviousType = "none" | "trim" | "others"; 5 | 6 | export class DefaultTokenizer extends AbstractTokenizer { 7 | tokenize(content: string, raw?: boolean): string[] { 8 | const tokenized = Array.from(this.__tokenize(content, "indexing")); 9 | return raw 10 | ? tokenized.map((x) => x.word) 11 | : tokenized 12 | .map((x) => x.word) 13 | .filter((x) => !x.match(this.getTrimPattern("indexing"))) 14 | .map((x) => x.replace(/\.+$/g, "")); 15 | } 16 | 17 | recursiveTokenize(content: string): { word: string; offset: number }[] { 18 | const offsets = Array.from(this.__tokenize(content, "input")) 19 | .filter((x) => !x.word.match(this.getTrimPattern("input"))) 20 | .map((x) => x.offset); 21 | 22 | const results = offsets.map((i) => ({ 23 | word: content.slice(i), 24 | offset: i, 25 | })); 26 | 27 | if (results.length === 0) { 28 | return [{ word: content, offset: 0 }]; 29 | } 30 | 31 | return results[0].offset === 0 32 | ? results 33 | : [{ word: content, offset: 0 }, ...results]; 34 | } 35 | 36 | // Diffirent with _tokenize of other tokenizers 37 | private *__tokenize( 38 | content: string, 39 | target: TrimTarget, 40 | ): Iterable<{ word: string; offset: number }> { 41 | let startIndex = 0; 42 | let previousType: PreviousType = "none"; 43 | 44 | for (let i = 0; i < content.length; i++) { 45 | if (content[i].match(super.getTrimPattern(target))) { 46 | const word = content.slice(startIndex, i); 47 | if (word !== "") { 48 | yield { word, offset: startIndex }; 49 | } 50 | previousType = "trim"; 51 | startIndex = i; 52 | continue; 53 | } 54 | 55 | if (previousType === "others" || previousType === "none") { 56 | previousType = "others"; 57 | continue; 58 | } 59 | 60 | yield { word: content.slice(startIndex, i), offset: startIndex }; 61 | previousType = "others"; 62 | startIndex = i; 63 | } 64 | 65 | yield { 66 | word: content.slice(startIndex, content.length), 67 | offset: startIndex, 68 | }; 69 | } 70 | } 71 | -------------------------------------------------------------------------------- /src/tokenizer/tokenizers/EnglishOnlyTokenizer.ts: -------------------------------------------------------------------------------- 1 | import type { TrimTarget } from "../tokenizer"; 2 | import { DefaultTokenizer } from "./DefaultTokenizer"; 3 | 4 | type PreviousType = "none" | "trim" | "english" | "others"; 5 | const ENGLISH_PATTERN = /[a-zA-Z0-9_\-\\]/; 6 | export class EnglishOnlyTokenizer extends DefaultTokenizer { 7 | tokenize(content: string, raw?: boolean): string[] { 8 | const tokenized = Array.from(this._tokenize(content, "indexing")).filter( 9 | (x) => x.word.match(ENGLISH_PATTERN), 10 | ); 11 | return raw 12 | ? tokenized.map((x) => x.word) 13 | : tokenized 14 | .map((x) => x.word) 15 | .filter((x) => !x.match(this.getTrimPattern("indexing"))); 16 | } 17 | 18 | recursiveTokenize(content: string): { word: string; offset: number }[] { 19 | const offsets = Array.from(this._tokenize(content, "input")) 20 | .filter((x) => !x.word.match(this.getTrimPattern("input"))) 21 | .map((x) => x.offset); 22 | return [ 23 | ...offsets.map((i) => ({ 24 | word: content.slice(i), 25 | offset: i, 26 | })), 27 | ]; 28 | } 29 | 30 | private *_tokenize( 31 | content: string, 32 | target: TrimTarget, 33 | ): Iterable<{ word: string; offset: number }> { 34 | let startIndex = 0; 35 | let previousType: PreviousType = "none"; 36 | 37 | for (let i = 0; i < content.length; i++) { 38 | if (content[i].match(super.getTrimPattern(target))) { 39 | yield { word: content.slice(startIndex, i), offset: startIndex }; 40 | previousType = "trim"; 41 | startIndex = i; 42 | continue; 43 | } 44 | 45 | if (content[i].match(ENGLISH_PATTERN)) { 46 | if (previousType === "english" || previousType === "none") { 47 | previousType = "english"; 48 | continue; 49 | } 50 | 51 | yield { word: content.slice(startIndex, i), offset: startIndex }; 52 | previousType = "english"; 53 | startIndex = i; 54 | continue; 55 | } 56 | 57 | if (previousType === "others" || previousType === "none") { 58 | previousType = "others"; 59 | continue; 60 | } 61 | 62 | yield { word: content.slice(startIndex, i), offset: startIndex }; 63 | previousType = "others"; 64 | startIndex = i; 65 | } 66 | 67 | yield { 68 | word: content.slice(startIndex, content.length), 69 | offset: startIndex, 70 | }; 71 | } 72 | } 73 | -------------------------------------------------------------------------------- /esbuild.config.mjs: -------------------------------------------------------------------------------- 1 | import builtins from "builtin-modules"; 2 | import chokidar from "chokidar"; 3 | import esbuild from "esbuild"; 4 | import esbuildSvelte from "esbuild-svelte"; 5 | import fs from "fs"; 6 | import path from "path"; 7 | import process from "process"; 8 | import sveltePreprocess from "svelte-preprocess"; 9 | 10 | const VAULT_DIR = "/Users/tadashi-aikawa/work/minerva"; 11 | const FILES = ["main.js", "manifest.json", "styles.css"]; 12 | 13 | // --- 14 | 15 | const banner = `/* 16 | THIS IS A GENERATED/BUNDLED FILE BY ESBUILD 17 | if you want to view the source, please visit the github repository of this plugin 18 | */ 19 | `; 20 | 21 | const prod = process.argv[2] === "production"; 22 | 23 | const context = await esbuild.context({ 24 | plugins: [ 25 | esbuildSvelte({ 26 | compilerOptions: { css: "injected" }, 27 | preprocess: sveltePreprocess(), 28 | }), 29 | ], 30 | banner: { 31 | js: banner, 32 | }, 33 | entryPoints: ["src/main.ts"], 34 | bundle: true, 35 | external: [ 36 | "obsidian", 37 | "electron", 38 | "@codemirror/autocomplete", 39 | "@codemirror/collab", 40 | "@codemirror/commands", 41 | "@codemirror/language", 42 | "@codemirror/lint", 43 | "@codemirror/search", 44 | "@codemirror/state", 45 | "@codemirror/view", 46 | "@lezer/common", 47 | "@lezer/highlight", 48 | "@lezer/lr", 49 | ...builtins, 50 | ], 51 | format: "cjs", 52 | target: "es2018", 53 | logLevel: "info", 54 | sourcemap: prod ? false : "inline", 55 | treeShaking: true, 56 | outfile: "main.js", 57 | }); 58 | 59 | if (prod) { 60 | await context.rebuild(); 61 | process.exit(0); 62 | } else { 63 | await context.watch(); 64 | 65 | const pluginDir = path.join( 66 | VAULT_DIR, 67 | ".obsidian/plugins/various-complements", 68 | ); 69 | 70 | console.log(`📁 Creating ${pluginDir} (if not existed)`); 71 | fs.mkdirSync(pluginDir, { recursive: true }); 72 | 73 | const hotreloadPath = path.join(pluginDir, ".hotreload", ""); 74 | console.log(`🌶 Creating a ${hotreloadPath}`); 75 | fs.writeFileSync(hotreloadPath, ""); 76 | 77 | const watcher = chokidar.watch(FILES, { persistent: true }); 78 | watcher 79 | .on("add", (p) => { 80 | console.log(`♨ ${p} is added`); 81 | fs.copyFileSync(p, path.join(pluginDir, p)); 82 | }) 83 | .on("change", (p) => { 84 | console.log(`♨ ${p} is changed`); 85 | fs.copyFileSync(p, path.join(pluginDir, p)); 86 | }); 87 | } 88 | -------------------------------------------------------------------------------- /src/util/collection-helper.ts: -------------------------------------------------------------------------------- 1 | export const groupBy = ( 2 | values: T[], 3 | toKey: (t: T) => string, 4 | ): { [key: string]: T[] } => 5 | values.reduce( 6 | (prev, cur, _1, _2, k = toKey(cur)) => ( 7 | (prev[k] || (prev[k] = [])).push(cur), prev 8 | ), 9 | {} as { [key: string]: T[] }, 10 | ); 11 | 12 | export function uniq(values: T[]): T[] { 13 | return [...new Set(values)]; 14 | } 15 | 16 | export function uniqBy(values: T[], fn: (x: T) => string | number): T[] { 17 | const m = new Map(); 18 | values.forEach((x) => { 19 | const k = fn(x); 20 | if (!m.has(k)) { 21 | m.set(k, x); 22 | } 23 | }); 24 | return Array.from(m.values()); 25 | } 26 | 27 | export function uniqWith(arr: T[], fn: (one: T, other: T) => boolean) { 28 | return arr.filter( 29 | (element, index) => arr.findIndex((step) => fn(element, step)) === index, 30 | ); 31 | } 32 | 33 | export function hasSameElement(arr1: unknown[], arr2: unknown[]): boolean { 34 | return arr1.some((x) => arr2.includes(x)); 35 | } 36 | 37 | export function arrayEquals( 38 | arr1: unknown[], 39 | arr2: unknown[], 40 | length?: number, 41 | ): boolean { 42 | let l = Math.max(arr1.length, arr2.length); 43 | if (length !== undefined) { 44 | l = Math.min(l, length); 45 | } 46 | 47 | for (let i = 0; i < l; i++) { 48 | if (arr1[i] !== arr2[i]) { 49 | return false; 50 | } 51 | } 52 | 53 | return true; 54 | } 55 | 56 | export function arrayEqualsUntil(arr1: unknown[], arr2: unknown[]): number { 57 | let l = Math.min(arr1.length, arr2.length); 58 | for (let i = 0; i < l; i++) { 59 | if (arr1[i] !== arr2[i]) { 60 | return i - 1; 61 | } 62 | } 63 | 64 | return l - 1; 65 | } 66 | 67 | export function setEquals(set1: Set, set2: Set): boolean { 68 | if (set1.size !== set2.size) { 69 | return false; 70 | } 71 | 72 | return Array.from(set1).every((element) => set2.has(element)); 73 | } 74 | 75 | export function equalsAsSet(ary1: string[], ary2: string[]): boolean { 76 | return setEquals(new Set(ary1), new Set(ary2)); 77 | } 78 | 79 | export function mirrorMap( 80 | collection: T[], 81 | toValue: (t: T) => string, 82 | ): { [key: string]: string } { 83 | return collection.reduce((p, c) => ({ ...p, [toValue(c)]: toValue(c) }), {}); 84 | } 85 | 86 | export function max(collection: number[], emptyValue: number): number { 87 | const select = (a: number, b: number) => (a >= b ? a : b); 88 | return collection.reduce(select, emptyValue); 89 | } 90 | -------------------------------------------------------------------------------- /AGENTS.md: -------------------------------------------------------------------------------- 1 | # Repository Guidelines 2 | 3 | ## Project Structure & Module Organization 4 | - `src/`: TypeScript source. 5 | - `src/main.ts`: Obsidian plugin entry. 6 | - `src/ui/`: Svelte UI components. 7 | - `src/provider/`, `src/tokenizer/`, `src/util/`, `src/setting/`: core logic and helpers. 8 | - Root artifacts: `manifest.json`, `styles.css`, bundled `main.js`. 9 | - Tests: colocated `*.test.ts` next to sources (e.g., `src/util/strings.test.ts`). 10 | 11 | ## Build, Test, and Development Commands 12 | - `pnpm dev`: Start esbuild in watch mode. Copies `main.js`, `manifest.json`, `styles.css` to your Obsidian vault if `VAULT_DIR` in `esbuild.config.mjs` is set. 13 | - `pnpm build`: Type-check then production bundle to `main.js`. 14 | - `pnpm typecheck`: Run TypeScript in strict mode without emit. 15 | - `pnpm test` | `pnpm test:watch`: Run Jest tests (with coverage by default). 16 | - `pnpm format`: Prettier check for `*.ts` files. 17 | - `pnpm run ci`: Install, build, and test (used by CI/Release). 18 | 19 | ## Coding Style & Naming Conventions 20 | - Language: TypeScript (strict). UI in Svelte where applicable. 21 | - Formatting: Prettier with `prettier-plugin-organize-imports` (run `pnpm format`). 22 | - Indentation: Prettier defaults (2 spaces). Avoid inline `any` and prefer explicit types. 23 | - Names: `PascalCase` for types/classes, `camelCase` for variables/functions, `UPPER_SNAKE_CASE` for constants. 24 | - File names: `kebab-case.ts`; tests as `name.test.ts` colocated with the unit under test. 25 | 26 | ## Testing Guidelines 27 | - Framework: Jest via `esbuild-jest` transform. 28 | - Location: colocated `*.test.ts` near sources. 29 | - Coverage: `pnpm test` generates coverage; keep or improve existing coverage. 30 | - Test style: small, focused cases; prefer deterministic inputs; mock Obsidian APIs as needed. 31 | 32 | ## Commit & Pull Request Guidelines 33 | - Commits: Conventional Commits (e.g., `feat:`, `fix:`, `build:`, `style:`). Releases are automated via semantic‑release. 34 | - PR policy: This project generally does not accept PRs except obvious bug fixes, typos/docs, or items explicitly requested in issues/discussions (see `pull_request_template.md`). Open an issue/discussion first. 35 | - PR content: clear description, linked issue, reproduction or before/after notes; attach screenshots only for UI-facing changes. 36 | 37 | ## Security & Configuration Tips 38 | - Local dev vault: set `VAULT_DIR` in `esbuild.config.mjs` to your Obsidian vault to enable hot-copying during `pnpm dev`. 39 | - External modules: Obsidian and Node builtins are marked external in bundling; avoid relying on unavailable runtime modules. 40 | - Requirements: Node.js 22 (`.mise.toml`), `pnpm` via `corepack`. 41 | -------------------------------------------------------------------------------- /src/util/path.test.ts: -------------------------------------------------------------------------------- 1 | import { describe, expect, test } from "@jest/globals"; 2 | import { basename, dirname, extname, isURL } from "./path"; 3 | 4 | describe.each<{ path: string; ext?: string; expected: string }>` 5 | path | ext | expected 6 | ${"a\\b\\c.txt"} | ${undefined} | ${"c.txt"} 7 | ${"a/b/c.txt"} | ${undefined} | ${"c.txt"} 8 | ${"a/b/c.txt.bat"} | ${undefined} | ${"c.txt.bat"} 9 | ${"a/b/c"} | ${undefined} | ${"c"} 10 | ${"a/b/"} | ${undefined} | ${"b"} 11 | ${"a/b"} | ${undefined} | ${"b"} 12 | ${"a"} | ${undefined} | ${"a"} 13 | ${"a\\b\\c.txt"} | ${".txt"} | ${"c"} 14 | ${"a/b/c.txt"} | ${".txt"} | ${"c"} 15 | ${"a/b/c.txt.bat"} | ${".txt"} | ${"c.txt.bat"} 16 | ${"a/b/c.txt.bat"} | ${".bat"} | ${"c.txt"} 17 | ${"a/b/c"} | ${".txt"} | ${"c"} 18 | ${"a/b/"} | ${".txt"} | ${"b"} 19 | ${"a/b"} | ${".txt"} | ${"b"} 20 | ${"a.txt"} | ${".txt"} | ${"a"} 21 | ${"a.txt.bat"} | ${".txt"} | ${"a.txt.bat"} 22 | ${"a.txt.bat"} | ${".bat"} | ${"a.txt"} 23 | ${"a"} | ${".txt"} | ${"a"} 24 | `("basename", ({ path, ext, expected }) => { 25 | test(`basename(${path}, ${ext}) = ${expected}`, () => { 26 | expect(basename(path, ext)).toBe(expected); 27 | }); 28 | }); 29 | 30 | describe.each` 31 | path | expected 32 | ${"a\\b\\c.txt"} | ${".txt"} 33 | ${"a/b/c.txt"} | ${".txt"} 34 | ${"a/b/c.txt.bat"} | ${".bat"} 35 | ${"a/b/c"} | ${""} 36 | ${"a/b/"} | ${""} 37 | ${"a/b"} | ${""} 38 | ${"c.txt"} | ${".txt"} 39 | ${"c.txt.bat"} | ${".bat"} 40 | ${"c"} | ${""} 41 | `("extname", ({ path, expected }) => { 42 | test(`extname(${path}) = ${expected}`, () => { 43 | expect(extname(path)).toBe(expected); 44 | }); 45 | }); 46 | 47 | describe.each` 48 | path | expected 49 | ${"a\\b\\c.txt"} | ${"a\\b"} 50 | ${"a/b/c.txt"} | ${"a/b"} 51 | ${"a/b/c.txt.bat"} | ${"a/b"} 52 | ${"a/b/c"} | ${"a/b"} 53 | ${"a/b/"} | ${"a"} 54 | ${"a/b"} | ${"a"} 55 | ${"a"} | ${"."} 56 | `("dirname", ({ path, expected }) => { 57 | test(`dirname(${path}) = ${expected}`, () => { 58 | expect(dirname(path)).toBe(expected); 59 | }); 60 | }); 61 | 62 | describe.each<{ path: string; expected: boolean }>` 63 | path | expected 64 | ${"http://hoge"} | ${true} 65 | ${"https://hoge"} | ${true} 66 | ${"file:///hoge"} | ${false} 67 | ${"./hoge/hoge"} | ${false} 68 | ${"hoge/hoge"} | ${false} 69 | ${"./http/hoge"} | ${false} 70 | ${"http/hoge"} | ${false} 71 | `("isURL", ({ path, expected }) => { 72 | test(`isURL(${path}) = ${expected}`, () => { 73 | expect(isURL(path)).toBe(expected); 74 | }); 75 | }); 76 | -------------------------------------------------------------------------------- /src/model/Word.ts: -------------------------------------------------------------------------------- 1 | export type WordType = 2 | | "currentFile" 3 | | "currentVault" 4 | | "customDictionary" 5 | | "internalLink" 6 | | "frontMatter"; 7 | 8 | export interface DefaultWord { 9 | value: string; 10 | description?: string; 11 | aliases?: string[]; 12 | type: WordType; 13 | createdPath: string; 14 | // Add after judge 15 | offset?: number; 16 | hit?: string; 17 | fuzzy?: boolean; 18 | query?: string; 19 | valueForHistory?: string; // prioritize this value for history 20 | } 21 | export interface CurrentFileWord extends DefaultWord { 22 | type: "currentFile"; 23 | } 24 | export interface CurrentVaultWord extends DefaultWord { 25 | type: "currentVault"; 26 | } 27 | export interface CustomDictionaryWord extends DefaultWord { 28 | type: "customDictionary"; 29 | caretSymbol?: string; 30 | /** Use for inserting instead of value **/ 31 | insertedText?: string; 32 | /** If true, ignore `Insert space after completion` option **/ 33 | ignoreSpaceAfterCompletion?: boolean; 34 | } 35 | export interface InternalLinkWord extends DefaultWord { 36 | type: "internalLink"; 37 | phantom?: boolean; 38 | aliasMeta?: { 39 | // path 40 | origin: string; 41 | }; 42 | } 43 | export interface FrontMatterWord extends DefaultWord { 44 | type: "frontMatter"; 45 | key: string; 46 | } 47 | 48 | export type Word = 49 | | CurrentFileWord 50 | | CurrentVaultWord 51 | | CustomDictionaryWord 52 | | InternalLinkWord 53 | | FrontMatterWord; 54 | 55 | export class WordTypeMeta { 56 | private static readonly _values: WordTypeMeta[] = []; 57 | private static readonly _dict: { [type: string]: WordTypeMeta } = {}; 58 | 59 | static readonly FRONT_MATTER = new WordTypeMeta( 60 | "frontMatter", 61 | 100, 62 | "frontMatter", 63 | ); 64 | static readonly INTERNAL_LINK = new WordTypeMeta( 65 | "internalLink", 66 | 90, 67 | "internalLink", 68 | ); 69 | static readonly CUSTOM_DICTIONARY = new WordTypeMeta( 70 | "customDictionary", 71 | 80, 72 | "suggestion", 73 | ); 74 | static readonly CURRENT_FILE = new WordTypeMeta( 75 | "currentFile", 76 | 70, 77 | "suggestion", 78 | ); 79 | static readonly CURRENT_VAULT = new WordTypeMeta( 80 | "currentVault", 81 | 60, 82 | "suggestion", 83 | ); 84 | 85 | private constructor( 86 | readonly type: WordType, 87 | readonly priority: number, 88 | readonly group: "frontMatter" | "internalLink" | "suggestion", 89 | ) { 90 | WordTypeMeta._values.push(this); 91 | WordTypeMeta._dict[type] = this; 92 | } 93 | 94 | static of(type: WordType): WordTypeMeta { 95 | return WordTypeMeta._dict[type]; 96 | } 97 | 98 | static values(): WordTypeMeta[] { 99 | return WordTypeMeta._values; 100 | } 101 | } 102 | -------------------------------------------------------------------------------- /src/provider/CurrentFileWordProvider.ts: -------------------------------------------------------------------------------- 1 | import type { App } from "obsidian"; 2 | import type { AppHelper } from "../app-helper"; 3 | import type { Word } from "../model/Word"; 4 | import type { Tokenizer } from "../tokenizer/tokenizer"; 5 | import { uniq } from "../util/collection-helper"; 6 | import { 7 | allAlphabets, 8 | startsSmallLetterOnlyFirst, 9 | synonymAliases, 10 | } from "../util/strings"; 11 | import { pushWord, type WordsByFirstLetter } from "./suggester"; 12 | 13 | export class CurrentFileWordProvider { 14 | wordsByFirstLetter: WordsByFirstLetter = {}; 15 | private words: Word[] = []; 16 | private tokenizer: Tokenizer; 17 | 18 | constructor( 19 | private app: App, 20 | private appHelper: AppHelper, 21 | ) {} 22 | 23 | async refreshWords(option: { 24 | onlyEnglish: boolean; 25 | minNumberOfCharacters: number; 26 | makeSynonymAboutEmoji: boolean; 27 | makeSynonymAboutAccentsDiacritics: boolean; 28 | excludeWordPatterns: string[]; 29 | }): Promise { 30 | this.clearWords(); 31 | 32 | const editor = this.appHelper.getCurrentEditor(); 33 | if (!editor) { 34 | return; 35 | } 36 | 37 | const file = this.app.workspace.getActiveFile(); 38 | if (!file) { 39 | return; 40 | } 41 | 42 | const currentToken = this.tokenizer 43 | .tokenize( 44 | editor.getLine(editor.getCursor().line).slice(0, editor.getCursor().ch), 45 | ) 46 | .last(); 47 | 48 | const excludePatterns = option.excludeWordPatterns.map( 49 | (x) => new RegExp(`^${x}$`), 50 | ); 51 | const content = await this.app.vault.cachedRead(file); 52 | const tokens = this.tokenizer 53 | .tokenize(content) 54 | .filter((x) => { 55 | if (x.length < option.minNumberOfCharacters) { 56 | return false; 57 | } 58 | if (this.tokenizer.shouldIgnoreOnCurrent(x)) { 59 | return false; 60 | } 61 | return option.onlyEnglish ? allAlphabets(x) : true; 62 | }) 63 | .map((x) => (startsSmallLetterOnlyFirst(x) ? x.toLowerCase() : x)) 64 | .filter((x) => !excludePatterns.some((rp) => x.match(rp))); 65 | this.words = uniq(tokens) 66 | .filter((x) => x !== currentToken) 67 | .map((x) => ({ 68 | value: x, 69 | type: "currentFile", 70 | createdPath: file.path, 71 | aliases: synonymAliases(x, { 72 | emoji: option.makeSynonymAboutEmoji, 73 | accentsDiacritics: option.makeSynonymAboutAccentsDiacritics, 74 | }), 75 | })); 76 | 77 | for (const word of this.words) { 78 | pushWord(this.wordsByFirstLetter, word.value.charAt(0), word); 79 | word.aliases?.forEach((a) => 80 | pushWord(this.wordsByFirstLetter, a.charAt(0), word), 81 | ); 82 | } 83 | } 84 | 85 | clearWords(): void { 86 | this.words = []; 87 | this.wordsByFirstLetter = {}; 88 | } 89 | 90 | get wordCount(): number { 91 | return this.words.length; 92 | } 93 | 94 | setSettings(tokenizer: Tokenizer) { 95 | this.tokenizer = tokenizer; 96 | } 97 | } 98 | -------------------------------------------------------------------------------- /src/provider/FrontMatterWordProvider.ts: -------------------------------------------------------------------------------- 1 | import type { App, TFile } from "obsidian"; 2 | import type { AppHelper, FrontMatterValue } from "../app-helper"; 3 | import type { FrontMatterWord } from "../model/Word"; 4 | import { groupBy, uniqBy } from "../util/collection-helper"; 5 | import { excludeEmoji } from "../util/strings"; 6 | import type { WordsByFirstLetter } from "./suggester"; 7 | 8 | function synonymAliases(name: string): string[] { 9 | const lessEmojiValue = excludeEmoji(name); 10 | return name === lessEmojiValue ? [] : [lessEmojiValue]; 11 | } 12 | 13 | function frontMatterToWords( 14 | file: TFile, 15 | key: string, 16 | values: FrontMatterValue, 17 | ): FrontMatterWord[] { 18 | return values.map((x) => ({ 19 | key, 20 | value: x, 21 | type: "frontMatter", 22 | createdPath: file.path, 23 | aliases: synonymAliases(x), 24 | })); 25 | } 26 | 27 | function pickWords(file: TFile, fm: { [key: string]: FrontMatterValue }) { 28 | return Object.entries(fm) 29 | .filter( 30 | ([_key, value]) => 31 | value != null && 32 | (typeof value === "string" || typeof value[0] === "string"), 33 | ) 34 | .flatMap(([key, value]) => frontMatterToWords(file, key, value)); 35 | } 36 | 37 | // noinspection FunctionWithMultipleLoopsJS 38 | function extractAndUniqWords( 39 | wordsByCreatedPath: FrontMatterWordProvider["wordsByCreatedPath"], 40 | ): FrontMatterWord[] { 41 | return uniqBy( 42 | Object.values(wordsByCreatedPath).flat(), 43 | (w) => w.key + w.value.toLowerCase(), 44 | ); 45 | } 46 | 47 | function indexingWords( 48 | words: FrontMatterWord[], 49 | ): FrontMatterWordProvider["wordsByFirstLetterByKey"] { 50 | const wordsByKey = groupBy(words, (x) => x.key); 51 | return Object.fromEntries( 52 | Object.entries(wordsByKey).map( 53 | ([key, words]: [string, FrontMatterWord[]]) => [ 54 | key, 55 | groupBy(words, (w) => w.value.charAt(0)), 56 | ], 57 | ), 58 | ); 59 | } 60 | 61 | export class FrontMatterWordProvider { 62 | private wordsByCreatedPath: { [path: string]: FrontMatterWord[] } = {}; 63 | words: FrontMatterWord[]; 64 | wordsByFirstLetterByKey: { [key: string]: WordsByFirstLetter }; 65 | 66 | constructor( 67 | private app: App, 68 | private appHelper: AppHelper, 69 | ) {} 70 | 71 | refreshWords(): void { 72 | this.clearWords(); 73 | 74 | this.app.vault.getMarkdownFiles().forEach((f) => { 75 | const fm = this.appHelper.getFrontMatter(f); 76 | if (!fm) { 77 | return; 78 | } 79 | 80 | this.wordsByCreatedPath[f.path] = pickWords(f, fm); 81 | }); 82 | 83 | this.words = extractAndUniqWords(this.wordsByCreatedPath); 84 | this.wordsByFirstLetterByKey = indexingWords(this.words); 85 | } 86 | 87 | updateWordIndex(file: TFile): void { 88 | const fm = this.appHelper.getFrontMatter(file); 89 | if (!fm) { 90 | return; 91 | } 92 | 93 | this.wordsByCreatedPath[file.path] = pickWords(file, fm); 94 | } 95 | 96 | updateWords(): void { 97 | this.words = extractAndUniqWords(this.wordsByCreatedPath); 98 | this.wordsByFirstLetterByKey = indexingWords(this.words); 99 | } 100 | 101 | clearWords(): void { 102 | this.wordsByCreatedPath = {}; 103 | this.words = []; 104 | this.wordsByFirstLetterByKey = {}; 105 | } 106 | 107 | get wordCount(): number { 108 | return this.words.length; 109 | } 110 | } 111 | -------------------------------------------------------------------------------- /src/util/glob.test.ts: -------------------------------------------------------------------------------- 1 | import { describe, expect, test } from "@jest/globals"; 2 | import { isMatchedGlobPatterns } from "./glob"; 3 | 4 | describe("isMatchedGlobPatterns", () => { 5 | test("should return false for empty patterns", () => { 6 | const result = isMatchedGlobPatterns("path/to/file.md", []); 7 | expect(result).toBe(false); 8 | }); 9 | 10 | test("should match attachments folders", () => { 11 | const patterns = ["**/attachments/**"]; 12 | expect( 13 | isMatchedGlobPatterns( 14 | "Database/Entertainment/Books/attachments/image.png", 15 | patterns, 16 | ), 17 | ).toBe(true); 18 | expect( 19 | isMatchedGlobPatterns( 20 | "Education/University/CS300/attachments/doc.pdf", 21 | patterns, 22 | ), 23 | ).toBe(true); 24 | expect( 25 | isMatchedGlobPatterns("Journal/2020/attachments/photo.jpg", patterns), 26 | ).toBe(true); 27 | expect( 28 | isMatchedGlobPatterns("Database/Entertainment/Books/notes.md", patterns), 29 | ).toBe(false); 30 | }); 31 | 32 | test("should match files by extension", () => { 33 | const patterns = ["**/*.{png,jpg,gif}"]; 34 | expect(isMatchedGlobPatterns("path/to/image.png", patterns)).toBe(true); 35 | expect(isMatchedGlobPatterns("path/to/photo.jpg", patterns)).toBe(true); 36 | expect(isMatchedGlobPatterns("path/to/animation.gif", patterns)).toBe(true); 37 | expect(isMatchedGlobPatterns("path/to/document.pdf", patterns)).toBe(false); 38 | expect(isMatchedGlobPatterns("path/to/notes.md", patterns)).toBe(false); 39 | }); 40 | 41 | test("should match specific directory patterns", () => { 42 | const patterns = ["Private/**"]; 43 | expect(isMatchedGlobPatterns("Private/secrets.md", patterns)).toBe(true); 44 | expect(isMatchedGlobPatterns("Private/folder/document.txt", patterns)).toBe( 45 | true, 46 | ); 47 | expect(isMatchedGlobPatterns("Public/document.md", patterns)).toBe(false); 48 | }); 49 | 50 | test("should match multiple patterns (OR logic)", () => { 51 | const patterns = ["**/attachments/**", "**/*.tmp", "Private/**"]; 52 | expect(isMatchedGlobPatterns("folder/attachments/file.png", patterns)).toBe( 53 | true, 54 | ); 55 | expect(isMatchedGlobPatterns("folder/temp.tmp", patterns)).toBe(true); 56 | expect(isMatchedGlobPatterns("Private/secret.md", patterns)).toBe(true); 57 | expect(isMatchedGlobPatterns("Public/document.md", patterns)).toBe(false); 58 | }); 59 | 60 | test("should handle exact folder name matches", () => { 61 | const patterns = ["**/attachments"]; 62 | expect(isMatchedGlobPatterns("Database/attachments", patterns)).toBe(true); 63 | expect( 64 | isMatchedGlobPatterns("folder/subfolder/attachments", patterns), 65 | ).toBe(true); 66 | expect(isMatchedGlobPatterns("attachments", patterns)).toBe(true); 67 | expect(isMatchedGlobPatterns("Database/attachments-backup", patterns)).toBe( 68 | false, 69 | ); 70 | expect( 71 | isMatchedGlobPatterns("Database/attachments/file.png", patterns), 72 | ).toBe(false); 73 | }); 74 | 75 | test("should handle invalid patterns gracefully", () => { 76 | const patterns = ["[invalid"]; 77 | const result = isMatchedGlobPatterns("any/path.md", patterns); 78 | expect(result).toBe(false); 79 | }); 80 | 81 | test("should handle mixed valid and invalid patterns", () => { 82 | const patterns = ["**/*.md", "[invalid", "**/attachments"]; 83 | const result = isMatchedGlobPatterns("folder/document.md", patterns); 84 | expect(result).toBe(true); 85 | }); 86 | }); 87 | -------------------------------------------------------------------------------- /src/tokenizer/tokenizers/EnglishOnlyTokenizer.test.ts: -------------------------------------------------------------------------------- 1 | import { describe, expect, test } from "@jest/globals"; 2 | import type { FactoryArgs } from "../tokenizer"; 3 | import { EnglishOnlyTokenizer } from "./EnglishOnlyTokenizer"; 4 | 5 | describe.each<{ 6 | constructorArgs: FactoryArgs; 7 | content: string; 8 | raw: boolean; 9 | expected: string[]; 10 | }>` 11 | constructorArgs | content | raw | expected 12 | ${{}} | ${"aa bb cc"} | ${false} | ${["aa", "bb", "cc"]} 13 | ${{}} | ${"Edith旧市街"} | ${false} | ${["Edith"]} 14 | ${{}} | ${"Edith旧city"} | ${false} | ${["Edith", "city"]} 15 | ${{}} | ${"イーディスold city"} | ${false} | ${["old", "city"]} 16 | ${{}} | ${"イーディスold市街"} | ${false} | ${["old"]} 17 | ${{}} | ${"イーディス旧市街"} | ${false} | ${[]} 18 | ${{}} | ${"$\\alpha"} | ${false} | ${["\\alpha"]} 19 | ${{}} | ${"__a _b __c__ d_ e__"} | ${false} | ${["a", "b", "c", "d", "e"]} 20 | ${{}} | ${"let hoge_huga = 1"} | ${false} | ${["let", "hoge", "huga", "1"]} 21 | ${{ treatUnderscoreAsPartOfWord: true }} | ${"__a _b __c__ d_ e__"} | ${false} | ${["__a", "_b", "__c__", "d_", "e__"]} 22 | ${{ treatUnderscoreAsPartOfWord: true }} | ${"let hoge_huga = 1"} | ${false} | ${["let", "hoge_huga", "1"]} 23 | ${{}} | ${"aaa\nbbb"} | ${false} | ${["aaa", "bbb"]} 24 | ${{}} | ${"aaa\r\nbbb"} | ${false} | ${["aaa", "bbb"]} 25 | `("tokenize", ({ constructorArgs, content, raw, expected }) => { 26 | test(`tokenize(${content}, ${raw}) = ${expected}`, () => { 27 | expect( 28 | new EnglishOnlyTokenizer(constructorArgs).tokenize(content, raw), 29 | ).toStrictEqual(expected); 30 | }); 31 | }); 32 | 33 | describe.each<{ 34 | content: string; 35 | expected: { word: string; offset: number }[]; 36 | }>` 37 | content | expected 38 | ${"aa bb cc"} | ${[{ word: "aa bb cc", offset: 0 }, { word: "bb cc", offset: 3 }, { word: "cc", offset: 6 }]} 39 | ${"aa:bb:cc"} | ${[{ word: "aa:bb:cc", offset: 0 }, { word: "bb:cc", offset: 3 }, { word: "cc", offset: 6 }]} 40 | ${"## @smi"} | ${[{ word: "## @smi", offset: 0 }, { word: "@smi", offset: 3 }, { word: "smi", offset: 4 }]} 41 | ${"@smi"} | ${[{ word: "@smi", offset: 0 }, { word: "smi", offset: 1 }]} 42 | ${"Edith旧市街"} | ${[{ word: "Edith旧市街", offset: 0 }, { word: "旧市街", offset: 5 }]} 43 | ${"Edith旧city"} | ${[{ word: "Edith旧city", offset: 0 }, { word: "旧city", offset: 5 }, { word: "city", offset: 6 }]} 44 | ${"ヒナold city"} | ${[{ word: "ヒナold city", offset: 0 }, { word: "old city", offset: 2 }, { word: "city", offset: 6 }]} 45 | ${"ヒナold市街"} | ${[{ word: "ヒナold市街", offset: 0 }, { word: "old市街", offset: 2 }, { word: "市街", offset: 5 }]} 46 | ${"ヒナ旧市街"} | ${[{ word: "ヒナ旧市街", offset: 0 }]} 47 | ${"$\\alpha"} | ${[{ word: "$\\alpha", offset: 0 }, { word: "\\alpha", offset: 1 }]} 48 | ${"::one::two"} | ${[{ word: "::one::two", offset: 0 }, { word: "one::two", offset: 2 }, { word: "two", offset: 7 }]} 49 | `("recursiveTokenize", ({ content, expected }) => { 50 | test(`recursiveTokenize(${content}) = ${expected}`, () => { 51 | expect(new EnglishOnlyTokenizer().recursiveTokenize(content)).toStrictEqual( 52 | expected, 53 | ); 54 | }); 55 | }); 56 | -------------------------------------------------------------------------------- /src/ui/component/CustomDictionaryWordAdd.svelte: -------------------------------------------------------------------------------- 1 | 2 | 57 | 58 |
59 |

Add a word to a custom dictionary

60 | 61 |

Dictionary

62 |
63 | 70 | onClickFileIcon(selectedDictionary.path)} 73 | > 74 | 75 | 76 |
77 | 78 |

{firstWordTitle}

79 |