├── .npmrc ├── experiments ├── hugo │ ├── page.md │ ├── posts │ │ ├── slug_test.md │ │ ├── url_test.md │ │ ├── post.md │ │ └── post_bundle │ │ │ └── index.md │ ├── tags │ │ └── tag │ │ │ └── _index.md │ └── index.md ├── demo-astro │ ├── .gitignore │ ├── tsconfig.json │ ├── src │ │ ├── styles │ │ │ └── global.css │ │ ├── env.d.ts │ │ ├── components │ │ │ ├── Prose.astro │ │ │ ├── Graph.astro │ │ │ └── graphRenderer.ts │ │ ├── pages │ │ │ ├── index.astro │ │ │ ├── facet.json.ts │ │ │ └── notes │ │ │ │ └── [...slug].astro │ │ ├── layouts │ │ │ └── main.astro │ │ ├── content │ │ │ └── config.ts │ │ └── lib │ │ │ ├── graph.ts │ │ │ └── braindb.mjs │ ├── tailwind.config.mjs │ ├── package.json │ ├── public │ │ └── favicon.svg │ ├── astro.config.mjs │ ├── braindb.config.ts │ └── README.md ├── obsidian │ ├── subfolder │ │ └── Three laws of motion.md │ └── Simple Note.md ├── astro │ └── mdx-page.mdx └── cli │ ├── README.md │ ├── tsconfig.json │ ├── package.json │ └── src │ ├── config.ts │ └── index.ts ├── .tool-versions ├── packages ├── astro │ ├── .gitignore │ ├── env.d.ts │ ├── tsconfig.json │ ├── tsup.config.ts │ ├── package.json │ ├── src │ │ ├── remarkWikiLink.ts │ │ └── index.ts │ └── README.md ├── docs │ ├── README.md │ ├── tsconfig.json │ ├── src │ │ ├── env.d.ts │ │ ├── content │ │ │ ├── config.ts │ │ │ └── docs │ │ │ │ ├── index.md │ │ │ │ └── notes │ │ │ │ ├── alternatives.md │ │ │ │ ├── parallel.md │ │ │ │ ├── slug.md │ │ │ │ ├── unsorted.md │ │ │ │ ├── use-cases.md │ │ │ │ ├── remark-wiki-link.md │ │ │ │ ├── astro-integration.md │ │ │ │ ├── content-query.md │ │ │ │ ├── dataview.md │ │ │ │ ├── metadata.md │ │ │ │ ├── frontmatter-schema.md │ │ │ │ └── todo.md │ │ └── styles │ │ │ └── custom.css │ ├── .gitignore │ ├── package.json │ └── astro.config.mjs ├── core │ ├── drizzle │ │ ├── 0006_icy_iron_monger.sql │ │ ├── 0002_worthless_silk_fever.sql │ │ ├── 0003_jazzy_epoch.sql │ │ ├── 0005_stormy_luckman.sql │ │ ├── 0001_petite_fixer.sql │ │ ├── 0004_ambitious_starbolt.sql │ │ ├── 0000_same_talon.sql │ │ ├── 0007_peaceful_stepford_cuckoos.sql │ │ └── meta │ │ │ ├── _journal.json │ │ │ ├── 0000_snapshot.json │ │ │ ├── 0002_snapshot.json │ │ │ ├── 0001_snapshot.json │ │ │ ├── 0003_snapshot.json │ │ │ └── 0006_snapshot.json │ ├── tsconfig.json │ ├── drizzle.config.ts │ ├── README.md │ ├── src │ │ ├── parser.ts │ │ ├── deleteDocument.ts │ │ ├── defaults.ts │ │ ├── toText.ts │ │ ├── toDot.ts │ │ ├── utils.ts │ │ ├── toJson.ts │ │ ├── Link.ts │ │ ├── types.ts │ │ ├── Task.ts │ │ ├── db.ts │ │ ├── query.ts │ │ ├── resolveLinks.ts │ │ ├── getMarkdown.ts │ │ ├── Document.ts │ │ └── schema.ts │ └── package.json ├── remark-wiki-link │ ├── vitest.config.ts │ ├── tsconfig.json │ ├── src │ │ └── index.ts │ ├── LICENSE │ ├── package.json │ ├── README.md │ └── test │ │ └── index.test.ts ├── mdast-util-wiki-link │ ├── vitest.config.ts │ ├── src │ │ ├── index.ts │ │ ├── to-markdown.ts │ │ └── from-markdown.ts │ ├── tsconfig.json │ ├── package.json │ ├── README.md │ └── test │ │ └── index.test.ts ├── micromark-extension-wiki-link │ ├── vitest.config.ts │ ├── src │ │ ├── index.ts │ │ ├── html.ts │ │ └── syntax.ts │ ├── tsconfig.json │ ├── package.json │ ├── README.md │ └── test │ │ └── micromark.test.js └── remark-dataview │ ├── tsconfig.json │ ├── package.json │ ├── src │ ├── sqlUtils.test.ts │ └── index.ts │ └── README.md ├── .vscode ├── extensions.json └── launch.json ├── pnpm-workspace.yaml ├── turbo.json ├── package.json ├── tsconfig.json ├── README.md └── .gitignore /.npmrc: -------------------------------------------------------------------------------- 1 | node-linker=hoisted -------------------------------------------------------------------------------- /experiments/hugo/page.md: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /.tool-versions: -------------------------------------------------------------------------------- 1 | nodejs 18.20.5 2 | -------------------------------------------------------------------------------- /packages/astro/.gitignore: -------------------------------------------------------------------------------- 1 | dist -------------------------------------------------------------------------------- /packages/docs/README.md: -------------------------------------------------------------------------------- 1 | # BrainDB docs 2 | -------------------------------------------------------------------------------- /experiments/demo-astro/.gitignore: -------------------------------------------------------------------------------- 1 | .braindb 2 | src/content/notes -------------------------------------------------------------------------------- /packages/astro/env.d.ts: -------------------------------------------------------------------------------- 1 | /// 2 | -------------------------------------------------------------------------------- /experiments/demo-astro/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "astro/tsconfigs/base" 3 | } 4 | -------------------------------------------------------------------------------- /packages/core/drizzle/0006_icy_iron_monger.sql: -------------------------------------------------------------------------------- 1 | ALTER TABLE `links` DROP COLUMN `revision`; -------------------------------------------------------------------------------- /experiments/hugo/posts/slug_test.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Slug test 3 | # slug: slug-test-1 4 | --- 5 | -------------------------------------------------------------------------------- /experiments/hugo/posts/url_test.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: URL test 3 | url: other/url-test 4 | --- 5 | -------------------------------------------------------------------------------- /packages/core/drizzle/0002_worthless_silk_fever.sql: -------------------------------------------------------------------------------- 1 | ALTER TABLE `documents` DROP COLUMN `markdown`; -------------------------------------------------------------------------------- /experiments/hugo/posts/post.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Awesome post 3 | tags: [tag, another-tag] 4 | --- 5 | -------------------------------------------------------------------------------- /experiments/hugo/posts/post_bundle/index.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Post bundle example 3 | tags: [tag] 4 | --- 5 | -------------------------------------------------------------------------------- /packages/core/drizzle/0003_jazzy_epoch.sql: -------------------------------------------------------------------------------- 1 | ALTER TABLE documents ADD `revision` integer DEFAULT 0 NOT NULL; -------------------------------------------------------------------------------- /packages/core/drizzle/0005_stormy_luckman.sql: -------------------------------------------------------------------------------- 1 | ALTER TABLE documents ADD `cfghash` integer DEFAULT 0 NOT NULL; -------------------------------------------------------------------------------- /packages/docs/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "astro/tsconfigs/strict", 3 | "exclude": ["dist"] 4 | } 5 | -------------------------------------------------------------------------------- /packages/core/drizzle/0001_petite_fixer.sql: -------------------------------------------------------------------------------- 1 | ALTER TABLE documents ADD `updated_at` integer DEFAULT 0 NOT NULL; -------------------------------------------------------------------------------- /packages/core/drizzle/0004_ambitious_starbolt.sql: -------------------------------------------------------------------------------- 1 | ALTER TABLE links ADD `revision` integer DEFAULT 0 NOT NULL; -------------------------------------------------------------------------------- /experiments/demo-astro/src/styles/global.css: -------------------------------------------------------------------------------- 1 | @tailwind base; 2 | @tailwind components; 3 | @tailwind utilities; 4 | -------------------------------------------------------------------------------- /experiments/obsidian/subfolder/Three laws of motion.md: -------------------------------------------------------------------------------- 1 | # First law 2 | 3 | ## Second law 4 | 5 | ### Third law 6 | -------------------------------------------------------------------------------- /packages/docs/src/env.d.ts: -------------------------------------------------------------------------------- 1 | /// 2 | /// 3 | -------------------------------------------------------------------------------- /.vscode/extensions.json: -------------------------------------------------------------------------------- 1 | { 2 | "recommendations": ["astro-build.astro-vscode"], 3 | "unwantedRecommendations": [] 4 | } 5 | -------------------------------------------------------------------------------- /experiments/demo-astro/src/env.d.ts: -------------------------------------------------------------------------------- 1 | /// 2 | /// 3 | -------------------------------------------------------------------------------- /pnpm-workspace.yaml: -------------------------------------------------------------------------------- 1 | packages: 2 | - packages/* 3 | onlyBuiltDependencies: 4 | - better-sqlite3 5 | - esbuild 6 | - sharp 7 | -------------------------------------------------------------------------------- /experiments/hugo/tags/tag/_index.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Tag also can be a page 3 | --- 4 | 5 | Some content followed by a list of posts with this tag. 6 | -------------------------------------------------------------------------------- /experiments/astro/mdx-page.mdx: -------------------------------------------------------------------------------- 1 | --- 2 | title: MDX page 3 | tags: [tag, another-tag] 4 | --- 5 | 6 | /* 7 | import a from "b" 8 | 9 | a c {1 + 1} d 10 | */ -------------------------------------------------------------------------------- /experiments/cli/README.md: -------------------------------------------------------------------------------- 1 | # @braindb/cli 2 | 3 | > [!WARNING] 4 | > Work in progress. Expect breaking changes. 5 | 6 | ## What is this? 7 | 8 | CLI interface for `@braindb/core` 9 | -------------------------------------------------------------------------------- /packages/remark-wiki-link/vitest.config.ts: -------------------------------------------------------------------------------- 1 | import { defineConfig } from "vitest/config"; 2 | import tsconfigPaths from "vite-tsconfig-paths"; 3 | 4 | export default defineConfig({ 5 | plugins: [tsconfigPaths()], 6 | }); 7 | -------------------------------------------------------------------------------- /packages/mdast-util-wiki-link/vitest.config.ts: -------------------------------------------------------------------------------- 1 | import { defineConfig } from "vitest/config"; 2 | import tsconfigPaths from "vite-tsconfig-paths"; 3 | 4 | export default defineConfig({ 5 | plugins: [tsconfigPaths()], 6 | }); 7 | -------------------------------------------------------------------------------- /packages/astro/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "astro/tsconfigs/strictest", 3 | "compilerOptions": { 4 | "module": "Node16", 5 | "moduleResolution": "Node16", 6 | "jsx": "preserve" 7 | }, 8 | "exclude": ["dist"] 9 | } 10 | -------------------------------------------------------------------------------- /packages/mdast-util-wiki-link/src/index.ts: -------------------------------------------------------------------------------- 1 | export { 2 | fromMarkdown, 3 | type FromMarkdownOptions, 4 | type WikiLinkNode, 5 | } from "./from-markdown.js"; 6 | export { toMarkdown, type ToMarkdownOptions } from "./to-markdown.js"; 7 | -------------------------------------------------------------------------------- /packages/micromark-extension-wiki-link/vitest.config.ts: -------------------------------------------------------------------------------- 1 | import { defineConfig } from "vitest/config"; 2 | import tsconfigPaths from "vite-tsconfig-paths"; 3 | 4 | export default defineConfig({ 5 | plugins: [tsconfigPaths()], 6 | }); 7 | -------------------------------------------------------------------------------- /packages/core/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://json.schemastore.org/tsconfig", 3 | "extends": "../../tsconfig.json", 4 | "compilerOptions": { 5 | "outDir": "./dist" 6 | }, 7 | "include": ["./src"], 8 | "rootDir": "./src" 9 | } 10 | -------------------------------------------------------------------------------- /experiments/cli/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://json.schemastore.org/tsconfig", 3 | "extends": "../../tsconfig.json", 4 | "compilerOptions": { 5 | "outDir": "./dist" 6 | }, 7 | "include": ["./src"], 8 | "rootDir": "./src" 9 | } 10 | -------------------------------------------------------------------------------- /experiments/demo-astro/src/components/Prose.astro: -------------------------------------------------------------------------------- 1 | --- 2 | 3 | --- 4 | 5 |
10 | 11 |
12 | -------------------------------------------------------------------------------- /packages/remark-dataview/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://json.schemastore.org/tsconfig", 3 | "extends": "../../tsconfig.json", 4 | "compilerOptions": { 5 | "outDir": "./dist" 6 | }, 7 | "include": ["./src"], 8 | "rootDir": "./src" 9 | } 10 | -------------------------------------------------------------------------------- /.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "0.2.0", 3 | "configurations": [ 4 | { 5 | "command": "./node_modules/.bin/astro dev", 6 | "name": "Development server", 7 | "request": "launch", 8 | "type": "node-terminal" 9 | } 10 | ] 11 | } 12 | -------------------------------------------------------------------------------- /experiments/demo-astro/tailwind.config.mjs: -------------------------------------------------------------------------------- 1 | /** @type {import('tailwindcss').Config} */ 2 | export default { 3 | content: ["./src/**/*.{astro,html,js,jsx,md,mdx,svelte,ts,tsx,vue}"], 4 | theme: { 5 | extend: {}, 6 | }, 7 | plugins: [require("@tailwindcss/typography")], 8 | }; 9 | -------------------------------------------------------------------------------- /packages/docs/src/content/config.ts: -------------------------------------------------------------------------------- 1 | import { defineCollection } from "astro:content"; 2 | import { docsSchema } from "@astrojs/starlight/schema"; 3 | import { docsLoader } from "@astrojs/starlight/loaders"; 4 | 5 | export const collections = { 6 | docs: defineCollection({ loader: docsLoader(), schema: docsSchema() }), 7 | }; 8 | -------------------------------------------------------------------------------- /packages/micromark-extension-wiki-link/src/index.ts: -------------------------------------------------------------------------------- 1 | import { WikiLinkHtmlOptions } from "./html.js"; 2 | import { WikiLinkSyntaxOptions } from "./syntax.js"; 3 | 4 | export { html } from "./html.js"; 5 | export { syntax } from "./syntax.js"; 6 | 7 | export type WikiLinkOptions = WikiLinkHtmlOptions & WikiLinkSyntaxOptions; 8 | -------------------------------------------------------------------------------- /packages/core/drizzle.config.ts: -------------------------------------------------------------------------------- 1 | import type { Config } from "drizzle-kit"; 2 | 3 | export default { 4 | schema: "./src/schema.ts", 5 | out: "./drizzle", 6 | dialect: "sqlite", 7 | // for `bunx drizzle-kit studio` but it doesn't work 8 | // driver: "better-sqlite", 9 | dbCredentials: { 10 | url: "tmp/db.sqlite3", 11 | }, 12 | } satisfies Config; 13 | -------------------------------------------------------------------------------- /packages/docs/.gitignore: -------------------------------------------------------------------------------- 1 | # build output 2 | dist/ 3 | # generated types 4 | .astro/ 5 | 6 | # dependencies 7 | node_modules/ 8 | 9 | # logs 10 | npm-debug.log* 11 | yarn-debug.log* 12 | yarn-error.log* 13 | pnpm-debug.log* 14 | 15 | 16 | # environment variables 17 | .env 18 | .env.production 19 | 20 | # macOS-specific files 21 | .DS_Store 22 | 23 | .braindb 24 | -------------------------------------------------------------------------------- /packages/docs/src/content/docs/index.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: BrainDB 3 | tableOfContents: false 4 | --- 5 | 6 | :::caution 7 | BrainDB docs are not ready. For now, it's just a bunch of notes. 8 | ::: 9 | 10 | - The main demo is here: [astro-digital-garden](https://astro-digital-garden.stereobooster.com/recipes/braindb/). It is probably better than this documentation. 11 | - [[vision]] 12 | - [[architecture]] 13 | -------------------------------------------------------------------------------- /experiments/demo-astro/src/components/Graph.astro: -------------------------------------------------------------------------------- 1 | --- 2 | import { renderer, DEFAULTS } from "./graphRenderer"; 3 | 4 | export interface Props { 5 | graph: any; 6 | } 7 | 8 | const { graph } = Astro.props; 9 | 10 | // probably need to use cache here 11 | const result = renderer(graph, { 12 | ...DEFAULTS, 13 | width: 800, 14 | height: 400, 15 | }); 16 | --- 17 | 18 | 19 | -------------------------------------------------------------------------------- /turbo.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://turbo.build/schema.json", 3 | "tasks": { 4 | "test": {}, 5 | "clean": {}, 6 | "tsc": { 7 | "dependsOn": ["^build"] 8 | }, 9 | "build": { 10 | "dependsOn": ["^build"], 11 | "outputs": ["dist/**"] 12 | }, 13 | "dev": { 14 | "dependsOn": ["^build"], 15 | "cache": false, 16 | "persistent": true 17 | } 18 | } 19 | } -------------------------------------------------------------------------------- /packages/astro/tsup.config.ts: -------------------------------------------------------------------------------- 1 | import { defineConfig } from "tsup"; 2 | import { peerDependencies } from "./package.json"; 3 | 4 | export default defineConfig((options) => { 5 | const dev = !!options.watch; 6 | return { 7 | entry: ["src/**/*.(ts|js)"], 8 | format: ["esm"], 9 | target: "node18", 10 | bundle: true, 11 | dts: true, 12 | sourcemap: true, 13 | clean: true, 14 | splitting: false, 15 | minify: !dev, 16 | external: [...Object.keys(peerDependencies)], 17 | tsconfig: "tsconfig.json", 18 | }; 19 | }); 20 | -------------------------------------------------------------------------------- /packages/docs/src/content/docs/notes/alternatives.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: alternatives 3 | draft: true 4 | --- 5 | 6 | - [contentlayer2](https://github.com/timlrx/contentlayer2) 7 | - [Contentlayer2 fork](https://github.com/contentlayerdev/contentlayer/issues/429#issuecomment-2030339798) 8 | - [markdownlayer](https://github.com/mburumaxwell/markdownlayer) 9 | - [markdowndb](https://github.com/datopian/markdowndb) 10 | - [content-structure](https://github.com/MicroWebStacks/content-structure) 11 | - Astro Content Layer 12 | - Nuxt Content 13 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "braindb", 3 | "type": "module", 4 | "private": true, 5 | "workspaces": [ 6 | "packages/*" 7 | ], 8 | "dependencies": { 9 | "turbo": "^2.4.4", 10 | "typescript": "^5.8.2", 11 | "vitest": "^3.0.9" 12 | }, 13 | "scripts": { 14 | "test": "vitest", 15 | "build": "turbo run build", 16 | "dev": "turbo run dev", 17 | "clean": "turbo run clean", 18 | "tsc": "turbo run tsc", 19 | "graph": "cyto-nodejs -s tmp/graph.json -d tmp/graph.svg" 20 | }, 21 | "packageManager": "pnpm@10.7.0" 22 | } 23 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://json.schemastore.org/tsconfig", 3 | "compilerOptions": { 4 | "target": "ES2022", 5 | "useDefineForClassFields": true, 6 | "module": "node16", 7 | "lib": ["ES2022"], 8 | "skipLibCheck": true, 9 | 10 | /* Bundler mode */ 11 | "moduleResolution": "Node16", 12 | "esModuleInterop": true, 13 | "declaration": true, 14 | 15 | /* Linting */ 16 | "strict": true, 17 | "noUnusedLocals": true, 18 | "noUnusedParameters": true, 19 | "noFallthroughCasesInSwitch": true, 20 | 21 | "types": [] 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /experiments/demo-astro/src/pages/index.astro: -------------------------------------------------------------------------------- 1 | --- 2 | import Layout from "../layouts/main.astro"; 3 | import Graph from "../components/Graph.astro"; 4 | import { getCollection } from "astro:content"; 5 | import { getGraph } from "../lib/graph"; 6 | 7 | const notes = await getCollection("notes"); 8 | const graph = await getGraph(); 9 | --- 10 | 11 | 12 | 13 | 14 |
23 | 24 | -------------------------------------------------------------------------------- /experiments/demo-astro/src/layouts/main.astro: -------------------------------------------------------------------------------- 1 | --- 2 | import "../styles/global.css"; 3 | import Prose from "../components/Prose.astro"; 4 | 5 | const { content } = Astro.props; 6 | --- 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | {content.title} 15 | 16 | 17 |
18 | 19 | 20 | 21 |
22 | 23 | 24 | -------------------------------------------------------------------------------- /packages/core/README.md: -------------------------------------------------------------------------------- 1 | # @braindb/core 2 | 3 | > [!WARNING] 4 | > Work in progress. Expect breaking changes. 5 | 6 | ## What is this? 7 | 8 | **TL;DR:** A markdown graph content layer database. 9 | 10 | Read more [here](https://astro-digital-garden.stereobooster.com/recipes/braindb/) 11 | 12 | ## What does it do? 13 | 14 | - Watches a folder. 15 | - Each Markdown file is parsed to extract links and frontmatter. 16 | - Each Markdown file is saved in the database. 17 | - Links are resolved to form a graph. 18 | 19 | ## What can it do? 20 | 21 | - Find (internal) broken links. 22 | - Show (internal) backlinks. 23 | - Resolve wiki links. 24 | - Etc. 25 | -------------------------------------------------------------------------------- /packages/remark-wiki-link/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://json.schemastore.org/tsconfig", 3 | "compilerOptions": { 4 | "target": "ES2022", 5 | "useDefineForClassFields": true, 6 | "module": "node16", 7 | "lib": ["ES2022"], 8 | "skipLibCheck": true, 9 | 10 | /* Bundler mode */ 11 | "moduleResolution": "Node16", 12 | "esModuleInterop": true, 13 | "declaration": true, 14 | 15 | /* Linting */ 16 | "strict": true, 17 | "noUnusedLocals": true, 18 | "noUnusedParameters": true, 19 | "noFallthroughCasesInSwitch": true, 20 | 21 | "types": [], 22 | 23 | "outDir": "./dist" 24 | }, 25 | "include": ["./src"] 26 | } 27 | -------------------------------------------------------------------------------- /packages/mdast-util-wiki-link/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://json.schemastore.org/tsconfig", 3 | "compilerOptions": { 4 | "target": "ES2022", 5 | "useDefineForClassFields": true, 6 | "module": "node16", 7 | "lib": ["ES2022"], 8 | "skipLibCheck": true, 9 | 10 | /* Bundler mode */ 11 | "moduleResolution": "Node16", 12 | "esModuleInterop": true, 13 | "declaration": true, 14 | 15 | /* Linting */ 16 | "strict": true, 17 | "noUnusedLocals": true, 18 | "noUnusedParameters": true, 19 | "noFallthroughCasesInSwitch": true, 20 | 21 | "types": [], 22 | 23 | "outDir": "./dist" 24 | }, 25 | "include": ["./src"] 26 | } 27 | -------------------------------------------------------------------------------- /packages/core/src/parser.ts: -------------------------------------------------------------------------------- 1 | // @ts-expect-error https://github.com/microsoft/TypeScript/issues/42873#issuecomment-2037722981 2 | import type { Root } from "mdast"; 3 | 4 | import { unified } from "unified"; 5 | import remarkFrontmatter from "remark-frontmatter"; 6 | import remarkParse from "remark-parse"; 7 | import wikiLinkPlugin from "@braindb/remark-wiki-link"; 8 | import remarkStringify from "remark-stringify"; 9 | import remarkGfm from "remark-gfm"; 10 | // import remarkMdx from "remark-mdx" 11 | 12 | export const mdParser = unified() 13 | .use(remarkParse) 14 | .use(remarkFrontmatter) 15 | .use(wikiLinkPlugin) 16 | .use(remarkGfm) 17 | .use(remarkStringify, { resourceLink: false }); 18 | -------------------------------------------------------------------------------- /packages/micromark-extension-wiki-link/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://json.schemastore.org/tsconfig", 3 | "compilerOptions": { 4 | "target": "ES2022", 5 | "useDefineForClassFields": true, 6 | "module": "node16", 7 | "lib": ["ES2022"], 8 | "skipLibCheck": true, 9 | 10 | /* Bundler mode */ 11 | "moduleResolution": "Node16", 12 | "esModuleInterop": true, 13 | "declaration": true, 14 | 15 | /* Linting */ 16 | "strict": true, 17 | "noUnusedLocals": true, 18 | "noUnusedParameters": true, 19 | "noFallthroughCasesInSwitch": true, 20 | 21 | "types": [], 22 | 23 | "outDir": "./dist" 24 | }, 25 | "include": ["./src"] 26 | } 27 | -------------------------------------------------------------------------------- /packages/docs/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@braindb/docs", 3 | "type": "module", 4 | "version": "0.0.1", 5 | "private": true, 6 | "scripts": { 7 | "dev": "astro dev", 8 | "build": "astro check && astro build", 9 | "preview": "astro preview", 10 | "astro": "astro" 11 | }, 12 | "dependencies": { 13 | "@astrojs/check": "^0.9.4", 14 | "@astrojs/starlight": "^0.32.5", 15 | "@beoe/cache": "^0.1.0", 16 | "@beoe/rehype-vizdom": "^0.4.2", 17 | "@braindb/astro": "workspace:^", 18 | "@braindb/remark-dataview": "workspace:^", 19 | "astro": "^5.5.5", 20 | "astro-robots-txt": "^1.0.0", 21 | "sharp": "^0.33.5", 22 | "typescript": "^5.5.2" 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /experiments/demo-astro/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "demo-astro", 3 | "type": "module", 4 | "version": "0.0.1", 5 | "private": true, 6 | "scripts": { 7 | "dev-test": "astro dev", 8 | "build-test": "astro build", 9 | "preview": "astro preview", 10 | "astro": "astro", 11 | "braindb": "braindb" 12 | }, 13 | "dependencies": { 14 | "@astrojs/mdx": "^3.1.2", 15 | "@astrojs/tailwind": "^5.1.0", 16 | "@braindb/core": "workspace:*", 17 | "@tailwindcss/typography": "^0.5.13", 18 | "astro": "^4.11.3", 19 | "github-slugger": "^2.0.0", 20 | "graphology": "^0.25.4", 21 | "graphology-layout": "^0.6.1", 22 | "graphology-svg": "^0.1.3", 23 | "graphology-utils": "^2.5.2", 24 | "tailwindcss": "^3.4.4" 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /experiments/demo-astro/src/content/config.ts: -------------------------------------------------------------------------------- 1 | // 1. Import utilities from `astro:content` 2 | import { z, defineCollection } from "astro:content"; 3 | 4 | // 2. Define a `type` and `schema` for each collection 5 | const notesCollection = defineCollection({ 6 | type: "content", // v2.5.0 and later 7 | schema: z.object({ 8 | title: z.string(), 9 | // url: z.string(), 10 | // backlinks: z.array( 11 | // z.object({ 12 | // url: z.string(), 13 | // title: z.string(), 14 | // }) 15 | // ), 16 | // tags: z.array(z.string()), 17 | // image: z.string().optional(), 18 | }), 19 | }); 20 | 21 | // 3. Export a single `collections` object to register your collection(s) 22 | export const collections = { 23 | notes: notesCollection, 24 | }; 25 | -------------------------------------------------------------------------------- /packages/core/src/deleteDocument.ts: -------------------------------------------------------------------------------- 1 | import { document, link, task } from "./schema.js"; 2 | import { eq, not } from "drizzle-orm"; 3 | import { Db } from "./db.js"; 4 | 5 | export function deleteDocument(db: Db, idPath: string) { 6 | db.delete(document).where(eq(document.path, idPath)).run(); 7 | db.delete(link).where(eq(link.from, idPath)).run(); 8 | db.update(link).set({ to: null }).where(eq(link.to, idPath)).run(); 9 | db.delete(task).where(eq(task.from, idPath)).run(); 10 | } 11 | 12 | export function deleteOldRevision(db: Db, revision: number) { 13 | db.select({ 14 | path: document.path, 15 | }) 16 | .from(document) 17 | .where(not(eq(document.revision, revision))) 18 | .all() 19 | .forEach(({ path }) => { 20 | deleteDocument(db, path); 21 | }); 22 | } 23 | -------------------------------------------------------------------------------- /experiments/demo-astro/public/favicon.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 9 | 10 | -------------------------------------------------------------------------------- /experiments/obsidian/Simple Note.md: -------------------------------------------------------------------------------- 1 | Read more here: https://help.obsidian.md/Linking+notes+and+files/Internal+links 2 | 3 | ## Markown links 4 | 5 | - [Three laws of motion](subfolder/Three%20laws%20of%20motion.md) 6 | - [Three laws of motion]() 7 | - [Three laws of motion](/example/obsidian/subfolder/Three%20laws%20of%20motion.md) 8 | - [Three laws of motion](/example/obsidian/subfolder/Three laws of motion.md) (this is wrong markdown) 9 | 10 | ## Wikilink 11 | 12 | - [[Three laws of motion]] 13 | - [[Three laws of motion#Second law]] 14 | - [[Three laws of motion|3lm]] 15 | - [[Three laws of motion#Second law|2ndl]] 16 | 17 | ## Autolink 18 | 19 | - https://example.com 20 | - https://example.com#anchor 21 | - https://example.com?query 22 | 23 | ## Self anchors 24 | 25 | - [anchor](#self-anchors) 26 | -------------------------------------------------------------------------------- /packages/docs/src/styles/custom.css: -------------------------------------------------------------------------------- 1 | .column-list { 2 | column-width: calc(var(--sl-content-width) / 2 - 1.5rem); 3 | 4 | ul { 5 | padding: 0; 6 | list-style: none; 7 | margin-top: 0 !important; 8 | } 9 | } 10 | 11 | @media (min-width: 72rem) { 12 | .sl-container { 13 | margin-inline: var(--sl-content-margin-inline, 0) !important; 14 | } 15 | } 16 | 17 | .vizdom { 18 | :not([fill]) { 19 | fill: var(--sl-color-white); 20 | } 21 | [fill="black"], 22 | [fill="#000"] { 23 | fill: var(--sl-color-white); 24 | } 25 | [stroke="black"], 26 | [stroke="#000"] { 27 | stroke: var(--sl-color-white); 28 | } 29 | [fill="white"], 30 | [fill="#fff"] { 31 | fill: var(--sl-color-black); 32 | } 33 | [stroke="white"], 34 | [stroke="#fff"] { 35 | stroke: var(--sl-color-black); 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /experiments/demo-astro/astro.config.mjs: -------------------------------------------------------------------------------- 1 | import { defineConfig } from "astro/config"; 2 | import tailwind from "@astrojs/tailwind"; 3 | import mdx from "@astrojs/mdx"; 4 | // import { bdb } from "./src/lib/braindb.mjs"; 5 | // await bdb.ready(); 6 | 7 | // https://astro.build/config 8 | export default defineConfig({ 9 | integrations: [ 10 | tailwind({ 11 | configFile: "./tailwind.config.mjs", 12 | applyBaseStyles: false, 13 | }), 14 | mdx(), 15 | ], 16 | vite: { 17 | optimizeDeps: { 18 | exclude: [ 19 | "fsevents", 20 | "@node-rs", 21 | "@napi-rs", 22 | // "@braindb/core", 23 | // "@node-rs/xxhash", 24 | // "@node-rs/xxhash-darwin-x64", 25 | // "@node-rs/xxhash-wasm32-wasi", 26 | // "@napi-rs/simple-git-darwin-x64", 27 | ], 28 | }, 29 | }, 30 | }); 31 | -------------------------------------------------------------------------------- /experiments/demo-astro/src/pages/facet.json.ts: -------------------------------------------------------------------------------- 1 | import { bdb } from "../lib/braindb.mjs"; 2 | 3 | export async function GET() { 4 | return new Response( 5 | JSON.stringify( 6 | (await bdb.documents()).map((document) => { 7 | const pathParts = document.url().split("/"); 8 | pathParts.shift(); // '' 9 | pathParts.pop(); // '' 10 | pathParts.shift(); // 'notes' 11 | 12 | return { 13 | // id: document.id(), 14 | url: document.url(), 15 | title: document.frontmatter().title as string, 16 | content: document.text(), 17 | tags: document.frontmatter().tags || [], 18 | path: { 19 | lvl0: pathParts[0], 20 | lvl1: pathParts[1], 21 | lvl2: pathParts[2], 22 | }, 23 | }; 24 | }), 25 | null, 26 | 2 27 | ) 28 | ); 29 | } 30 | -------------------------------------------------------------------------------- /packages/core/src/defaults.ts: -------------------------------------------------------------------------------- 1 | import { Frontmatter } from "./index.js"; 2 | import { basename } from "node:path"; 3 | 4 | export const defaultGetUrl = (filePath: string, _frontmatter: Frontmatter) => { 5 | let url = 6 | filePath.replace(/\/_?index\.mdx?$/, "").replace(/\.mdx?$/, "") || "/"; 7 | 8 | // if (!url.startsWith("/")) url = "/" + url; 9 | if (!url.endsWith("/")) url = url + "/"; 10 | 11 | return url; 12 | }; 13 | 14 | // Hugo style 15 | // export const getSlug = (filePath: string, frontmatter: Frontmatter) => { 16 | // let slug: string; 17 | // if (frontmatter.slug) { 18 | // // no validation - trusting source 19 | // slug = String(frontmatter.slug); 20 | // } else { 21 | // slug = basename(filePath.replace(/\/_?index\.mdx?$/, "")).replace(/\.mdx?$/, "") || "/" 22 | // } 23 | // }; 24 | 25 | export const defaultGetSlug = (filePath: string, _frontmatter: Frontmatter) => 26 | basename(filePath.replace(/\/_?index\.mdx?$/, "")).replace(/\.mdx?$/, "") || 27 | "/"; 28 | -------------------------------------------------------------------------------- /experiments/hugo/index.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Home page 3 | --- 4 | 5 | Read more here: https://gohugo.io/content-management/urls/ 6 | 7 | ## HTML link - not supported 8 | 9 | 1 10 | 11 | ## Markdown link 12 | 13 | ### Web link 14 | 15 | [internal link](posts/post) 16 | [internal link with anchor](posts/post#2) 17 | [internal link](/example/hugo/posts/post) 18 | [internal link with anchor](/example/hugo/posts/post#2) 19 | 20 | #### Unsupported 21 | 22 | [pagevars][] 23 | [pagevars]: /example/hugo/posts/post 24 | 25 | Possible solution: https://github.com/remarkjs/remark-inline-links 26 | 27 | ### Portable link 28 | 29 | [internal link](./posts/post.md) 30 | [internal link](posts/post.md) 31 | [internal link](/example/hugo/posts/post.md) 32 | 33 | ### External link 34 | 35 | [external link](http://example.com) 36 | [external link](http://example.com#anchor) 37 | [external link](http://example.com?query) 38 | 39 | ## Autolink 40 | 41 | https://example.com 42 | https://example.com#anchor 43 | https://example.com?query 44 | -------------------------------------------------------------------------------- /packages/core/src/toText.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Alternatives: 3 | * 4 | * - [mdast-util-to-string](https://www.npmjs.com/package/mdast-util-to-string) 5 | * - [remark-mdx-to-plain-text](https://www.npmjs.com/package/remark-mdx-to-plain-text) 6 | * - [remark-plain-text](https://www.npmjs.com/package/remark-plain-text) 7 | * - [strip-markdown](https://www.npmjs.com/package/strip-markdown) 8 | */ 9 | 10 | import { unified } from "unified"; 11 | import stripMarkdown from "strip-markdown"; 12 | import remarkStringify from "remark-stringify"; 13 | import { toString } from "mdast-util-to-string"; 14 | 15 | const processor = unified() 16 | .use(stripMarkdown) 17 | .use(remarkStringify, { resourceLink: false }); 18 | 19 | // TODO: I'm not sure about this one, need to test it more 20 | export function toText(ast: any) { 21 | try { 22 | const root = processor.runSync(ast); 23 | return processor.stringify(root) as string; 24 | } catch (e) { 25 | // sometimes doesn't preserve "readable" formating 26 | // but works for all extensions 27 | return toString(ast); 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /packages/remark-wiki-link/src/index.ts: -------------------------------------------------------------------------------- 1 | import { syntax } from "@braindb/micromark-extension-wiki-link"; 2 | import { 3 | fromMarkdown, 4 | toMarkdown, 5 | FromMarkdownOptions, 6 | ToMarkdownOptions, 7 | } from "@braindb/mdast-util-wiki-link"; 8 | 9 | export type RemarkWikiLinkOptions = FromMarkdownOptions & ToMarkdownOptions; 10 | 11 | export function remarkWikiLink(opts: RemarkWikiLinkOptions = {}) { 12 | // @ts-expect-error: TS is wrong about `this`. 13 | const self = /** @type {import('unified').Processor} */ this; 14 | const data = self.data(); 15 | 16 | const micromarkExtensions = 17 | data.micromarkExtensions || (data.micromarkExtensions = []); 18 | const fromMarkdownExtensions = 19 | data.fromMarkdownExtensions || (data.fromMarkdownExtensions = []); 20 | const toMarkdownExtensions = 21 | data.toMarkdownExtensions || (data.toMarkdownExtensions = []); 22 | 23 | micromarkExtensions.push(syntax(opts)); 24 | fromMarkdownExtensions.push(fromMarkdown(opts)); 25 | toMarkdownExtensions.push(toMarkdown(opts)); 26 | } 27 | 28 | export default remarkWikiLink; 29 | -------------------------------------------------------------------------------- /packages/core/src/toDot.ts: -------------------------------------------------------------------------------- 1 | // import { isNotNull, sql } from "drizzle-orm"; 2 | // import { document, link } from "./schema.js"; 3 | import { Db } from "./db.js"; 4 | 5 | export function toDot(_db: Db) { 6 | // const edges = db 7 | // .select({ 8 | // from_id: link.from_id, 9 | // to_id: link.to_id, 10 | // }) 11 | // .from(link) 12 | // // need to show broken links on the graph 13 | // .where(isNotNull(link.to)) 14 | // .all(); 15 | 16 | // const nodes = db 17 | // .select({ 18 | // id: sql`json_extract(${document.properties}, '$.id')`, 19 | // title: sql`json_extract(${document.frontmatter}, '$.title')`, 20 | // url: document.url, 21 | // }) 22 | // .from(document) 23 | // .all(); 24 | 25 | // const dot = `digraph G { 26 | // bgcolor=transparent; 27 | 28 | // ${nodes 29 | // .map((node) => `${node.id} [label="${node.title}",href="${node.url}"];`) 30 | // .join("\n")} 31 | 32 | // ${edges 33 | // .map( 34 | // (edge) => `${edge.from_id} -> ${edge.to_id};` /* [label="${edge.label}"]; */ 35 | // ) 36 | // .join("\n")} 37 | // }`; 38 | 39 | // return dot; 40 | } 41 | -------------------------------------------------------------------------------- /packages/remark-wiki-link/LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2017 Mark Hudnall 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /experiments/demo-astro/src/pages/notes/[...slug].astro: -------------------------------------------------------------------------------- 1 | --- 2 | import Layout from "../../layouts/main.astro"; 3 | import { getCollection } from "astro:content"; 4 | import { bdb } from "../../lib/braindb"; 5 | 6 | // 1. Generate a new path for every collection entry 7 | export async function getStaticPaths() { 8 | const notes = await getCollection("notes"); 9 | return notes.map((note) => { 10 | return { 11 | params: { slug: note.slug }, 12 | props: { entry: note }, 13 | }; 14 | }); 15 | } 16 | // 2. For your template, you can get the entry directly from the prop 17 | const { entry } = Astro.props; 18 | const { Content, headings } = await entry.render(); 19 | 20 | const doc = await bdb.findDocument(`/${entry.collection}/${entry.id}`); 21 | --- 22 | 23 | 24 |
Title: {entry.data.title}
25 |
26 | Backlinks: 27 | { 28 | doc.documentsFrom().map((x) => ( 29 |
  • 30 | {x.title()} 31 |
  • 32 | )) 33 | } 34 |
    35 | 39 | 40 |
    41 | ../../lib/braindb.mjs -------------------------------------------------------------------------------- /packages/core/src/utils.ts: -------------------------------------------------------------------------------- 1 | import { xxh64, xxh32 } from "@node-rs/xxhash"; 2 | import { deterministicString } from "deterministic-object-hash"; 3 | 4 | const memoizeSecret: any = {}; 5 | export const memoizeOnce = (f: (x: A) => B) => { 6 | let arg: A = memoizeSecret; 7 | let result: B; 8 | return (x: A) => { 9 | if (x !== arg) { 10 | arg = x; 11 | result = f(x); 12 | } 13 | return result; 14 | }; 15 | }; 16 | 17 | export const cheksumConfig = memoizeOnce((conf: any) => xxh32(deterministicString(conf))); 18 | 19 | // can use streaming instead of reading whole file 20 | export const cheksum64str = (str: string) => xxh64(str).toString(36); 21 | 22 | const externalLinkRegexp = RegExp(`^[a-z]+://`); 23 | 24 | export const isExternalLink = (link: string) => externalLinkRegexp.test(link); 25 | 26 | export const symmetricDifference = (arrayA: T[], arrayB: T[]) => { 27 | if (arrayA.length === 0) return arrayB; 28 | if (arrayB.length === 0) return arrayA; 29 | 30 | const setA = new Set(arrayA); 31 | const setB = new Set(arrayB); 32 | 33 | const diffA = arrayA.filter((x) => !setB.has(x)); 34 | const diffB = arrayB.filter((x) => !setA.has(x)); 35 | 36 | return [...diffA, ...diffB]; 37 | }; 38 | -------------------------------------------------------------------------------- /experiments/demo-astro/src/lib/graph.ts: -------------------------------------------------------------------------------- 1 | import { BrainDB } from "@braindb/core"; 2 | import { bdb } from "./braindb.mjs"; 3 | import circular from "graphology-layout/circular"; 4 | import graphology from "graphology"; 5 | // @ts-ignore 6 | const { MultiGraph } = graphology; 7 | 8 | export async function getGraph() { 9 | const graph = new MultiGraph(); 10 | const data = await toGraphologyJson(bdb); 11 | graph.import(data as any); 12 | circular.assign(graph); 13 | return graph; 14 | } 15 | 16 | async function toGraphologyJson(db: BrainDB) { 17 | const nodes = (await db.documents()).map((document, i) => ({ 18 | key: document.id(), 19 | attributes: { 20 | label: document.frontmatter().title as string, 21 | url: document.url(), 22 | size: 0.05, 23 | // color: "#f00" 24 | }, 25 | })); 26 | 27 | const edges = (await db.links()) 28 | .filter((link) => link.to() !== null) 29 | .map((link) => ({ 30 | source: link.from().id(), 31 | target: link.to()?.id(), 32 | })); 33 | 34 | return { 35 | attributes: { name: "g" }, 36 | options: { 37 | allowSelfLoops: true, 38 | multi: true, 39 | type: "directed", 40 | }, 41 | nodes, 42 | edges, 43 | }; 44 | } 45 | -------------------------------------------------------------------------------- /packages/core/drizzle/0000_same_talon.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE `documents` ( 2 | `id` integer PRIMARY KEY AUTOINCREMENT NOT NULL, 3 | `path` text NOT NULL, 4 | `frontmatter` text NOT NULL, 5 | `ast` text NOT NULL, 6 | `markdown` text NOT NULL, 7 | `mtime` real NOT NULL, 8 | `checksum` text NOT NULL, 9 | `slug` text NOT NULL, 10 | `url` text NOT NULL 11 | ); 12 | --> statement-breakpoint 13 | CREATE TABLE `links` ( 14 | `id` integer PRIMARY KEY AUTOINCREMENT NOT NULL, 15 | `from` text NOT NULL, 16 | `to` text, 17 | `start` integer NOT NULL, 18 | `to_slug` text, 19 | `to_url` text, 20 | `to_path` text, 21 | `to_anchor` text, 22 | `label` text, 23 | `line` integer NOT NULL, 24 | `column` integer NOT NULL 25 | ); 26 | --> statement-breakpoint 27 | CREATE INDEX `slug` ON `documents` (`slug`);--> statement-breakpoint 28 | CREATE INDEX `url` ON `documents` (`url`);--> statement-breakpoint 29 | CREATE UNIQUE INDEX `path` ON `documents` (`path`);--> statement-breakpoint 30 | CREATE INDEX `to_slug` ON `links` (`to_slug`);--> statement-breakpoint 31 | CREATE INDEX `to_url` ON `links` (`to_url`);--> statement-breakpoint 32 | CREATE INDEX `to_path` ON `links` (`to_path`);--> statement-breakpoint 33 | CREATE UNIQUE INDEX `from_start` ON `links` (`from`,`start`); -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # BrainDB 2 | 3 |

    4 | 5 | 6 | 7 | 8 |

    9 | 10 | > [!WARNING] 11 | > Work in progress. Expect breaking changes. 12 | 13 | ## What is this? 14 | 15 | **TL;DR** markdown-graph-content-layer-database. For the full description see [vision](https://braindb.stereobooster.com/notes/vision/). 16 | 17 | ## Packages 18 | 19 | - [`@braindb/core`](/packages/core) - core package ![NPM Version](https://img.shields.io/npm/v/%40braindb%2Fcore) 20 | - [`@braindb/remark-wiki-link`](/packages/remark-wiki-link) ![NPM Version](https://img.shields.io/npm/v/%40braindb%2Fremark-wiki-link) 21 | - [`docs`](/packages/docs) [![Netlify Status](https://api.netlify.com/api/v1/badges/56dc8e11-4317-4801-b722-15e261ca4353/deploy-status)](https://app.netlify.com/sites/braindb/deploys) 22 | 23 | Demos: 24 | 25 | - [astro-digital-garden](https://astro-digital-garden.stereobooster.com/recipes/braindb/) 26 | 27 | ## Logo 28 | 29 | Logo by Dairy Free Design from Noun Project (CC BY 3.0) 30 | -------------------------------------------------------------------------------- /experiments/cli/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@braindb/cli", 3 | "version": "0.0.1", 4 | "description": "markdown-graph-content-layer-database", 5 | "private": true, 6 | "repository": { 7 | "type": "git", 8 | "url": "git+https://github.com/stereobooster/braindb.git", 9 | "directory": "packages/cli" 10 | }, 11 | "author": "stereobooster", 12 | "license": "MIT", 13 | "keywords": [ 14 | "markdown", 15 | "database", 16 | "content", 17 | "obsidian", 18 | "graph", 19 | "contentlayer" 20 | ], 21 | "type": "module", 22 | "exports": { 23 | "types": "./dist/index.d.js", 24 | "default": "./dist/index.js" 25 | }, 26 | "main": "./dist/index.js", 27 | "module": "./dist/index.js", 28 | "files": [ 29 | "./dist/*" 30 | ], 31 | "types": "./dist/index.d.js", 32 | "bin": { 33 | "braindb": "./dist/index.js" 34 | }, 35 | "scripts": { 36 | "test": "vitest", 37 | "prepublishOnly": "npm run build", 38 | "build": "rm -rf dist && tsc && chmod +x dist/index.js", 39 | "dev": "tsc --watch", 40 | "clean": "rm -rf dist", 41 | "tsc": "tsc" 42 | }, 43 | "dependencies": { 44 | "@braindb/core": "workspace:*", 45 | "commander": "^12.0.0", 46 | "cosmiconfig": "^9.0.0" 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /packages/mdast-util-wiki-link/src/to-markdown.ts: -------------------------------------------------------------------------------- 1 | import { type Unsafe, type Handle } from "mdast-util-to-markdown"; 2 | 3 | export interface ToMarkdownOptions { 4 | aliasDivider?: string; 5 | } 6 | 7 | export function toMarkdown(opts: ToMarkdownOptions = {}) { 8 | const aliasDivider = opts.aliasDivider || "|"; 9 | 10 | const unsafe = [ 11 | { 12 | character: "[", 13 | inConstruct: ["phrasing", "label", "reference"], 14 | }, 15 | { 16 | character: "]", 17 | inConstruct: ["label", "reference"], 18 | }, 19 | ] satisfies Unsafe[]; 20 | 21 | const wikiLink: Handle = (node, _parent, state, _info) => { 22 | // @ts-expect-error 23 | const exit = state.enter("wikiLink"); 24 | 25 | const nodeValue = state.safe(node.value, { before: "[", after: "]" }); 26 | 27 | let value: string; 28 | if (node.data.alias != null) { 29 | const nodeAlias = state.safe(node.data.alias, { 30 | before: "[", 31 | after: "]", 32 | }); 33 | value = `[[${nodeValue}${aliasDivider}${nodeAlias}]]`; 34 | } else { 35 | value = `[[${nodeValue}]]`; 36 | } 37 | 38 | exit(); 39 | 40 | return value; 41 | }; 42 | 43 | return { 44 | unsafe: unsafe, 45 | handlers: { 46 | wikiLink, 47 | }, 48 | }; 49 | } 50 | -------------------------------------------------------------------------------- /packages/remark-dataview/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@braindb/remark-dataview", 3 | "version": "0.0.3", 4 | "description": "markdown-graph-content-layer-database", 5 | "repository": { 6 | "type": "git", 7 | "url": "git+https://github.com/stereobooster/braindb.git", 8 | "directory": "packages/remark-dataview" 9 | }, 10 | "author": "stereobooster", 11 | "license": "MIT", 12 | "keywords": [ 13 | "markdown", 14 | "database", 15 | "content", 16 | "obsidian", 17 | "graph", 18 | "contentlayer" 19 | ], 20 | "type": "module", 21 | "exports": { 22 | "types": "./dist/index.d.js", 23 | "default": "./dist/index.js" 24 | }, 25 | "main": "./dist/index.js", 26 | "module": "./dist/index.js", 27 | "files": [ 28 | "./dist/*" 29 | ], 30 | "types": "./dist/index.d.js", 31 | "scripts": { 32 | "test": "vitest", 33 | "prepublishOnly": "npm run build", 34 | "build": "rm -rf dist && tsc", 35 | "dev": "tsc --watch", 36 | "clean": "rm -rf dist", 37 | "tsc": "tsc" 38 | }, 39 | "dependencies": { 40 | "@beoe/remark-code-hook": "^0.1.0", 41 | "@braindb/core": "workspace:*", 42 | "@types/mdast": "^4.0.4", 43 | "fenceparser": "^2.2.0", 44 | "node-sql-parser": "^5.3.8", 45 | "unified": "^11.0.5" 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /packages/remark-dataview/src/sqlUtils.test.ts: -------------------------------------------------------------------------------- 1 | import { describe, expect, it } from "vitest"; 2 | import { parse, transform } from "./sqlUtils.js"; 3 | 4 | describe("parse", () => { 5 | it("parses select statements", () => { 6 | expect(parse("SELECT * FROM A")).to.toMatchInlineSnapshot(` 7 | { 8 | "columns": [ 9 | { 10 | "as": null, 11 | "expr": { 12 | "column": "*", 13 | "table": null, 14 | "type": "column_ref", 15 | }, 16 | }, 17 | ], 18 | "distinct": null, 19 | "for_update": null, 20 | "from": [ 21 | { 22 | "as": null, 23 | "db": null, 24 | "table": "A", 25 | }, 26 | ], 27 | "groupby": null, 28 | "having": null, 29 | "limit": null, 30 | "options": null, 31 | "orderby": null, 32 | "type": "select", 33 | "where": null, 34 | "with": null, 35 | } 36 | `); 37 | }); 38 | }); 39 | 40 | describe("transform", () => { 41 | it("removes custom functions", () => { 42 | expect(() => 43 | transform( 44 | parse(`SELECT "from", tasks.ast, length("from") - 1, checked as t, dv_md(ast) FROM tasks;`) 45 | ) 46 | ).to.not.throw(); 47 | }); 48 | }); 49 | -------------------------------------------------------------------------------- /packages/docs/src/content/docs/notes/parallel.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Parallel execution 3 | draft: true 4 | --- 5 | 6 | ## Profiling 7 | 8 | Need to profile before jumping to threads: 9 | 10 | - https://github.com/StarpTech/profiling-nodejs 11 | - https://nodejs.org/en/learn/getting-started/profiling 12 | - https://nodejs.org/en/learn/diagnostics/memory/using-heap-profiler 13 | - https://blog.appsignal.com/2023/11/29/an-introduction-to-profiling-in-nodejs.html 14 | - https://clinicjs.org/ 15 | 16 | ## Thoughts 17 | 18 | - sqlite is single threaded 19 | - either execute all db operations in main thread 20 | - but passing data between threads is expensive 21 | - zero-copy - "Transferable objects" 22 | - or use some kind of mutex 23 | - **upd** it seems [to work with thread](https://github.com/WiseLibs/better-sqlite3/blob/master/docs/threads.md) 24 | - parallelism would not help with IO operations, but it will help with 25 | - parsing 26 | - checksum calculation (hashing) 27 | - traversing AST 28 | 29 | ## Libs 30 | 31 | - https://github.com/josdejong/workerpool 32 | - https://github.com/piscinajs/piscina 33 | - https://github.com/poolifier/poolifier 34 | - https://github.com/Vincit/tarn.js 35 | - https://github.com/SUCHMOKUO/node-worker-threads-pool 36 | - https://threads.js.org/usage-pool 37 | - https://github.com/tim-hub/pambdajs 38 | -------------------------------------------------------------------------------- /packages/core/src/toJson.ts: -------------------------------------------------------------------------------- 1 | import { eq, isNotNull, sql } from "drizzle-orm"; 2 | import { document, link } from "./schema.js"; 3 | import { Db } from "./db.js"; 4 | import { alias } from "drizzle-orm/sqlite-core"; 5 | 6 | const documentFrom = alias(document, "documentFrom"); 7 | const documentTo = alias(document, "documentTo"); 8 | 9 | // https://graphology.github.io/serialization.html#format 10 | export function toGraphology(db: Db) { 11 | const edges = db 12 | .select({ 13 | source: documentFrom.id, 14 | target: documentTo.id, 15 | // key: link.id, 16 | // attributes: { label: link.label }, 17 | }) 18 | .from(link) 19 | .innerJoin(documentFrom, eq(link.from, documentFrom.path)) 20 | .innerJoin(documentTo, eq(link.to, documentTo.path)) 21 | // need to show broken links on the graph 22 | .where(isNotNull(link.to)) 23 | .all(); 24 | 25 | const nodes = db 26 | .select({ 27 | key: document.id, 28 | attributes: { 29 | label: sql`json_extract(${document.frontmatter}, '$.title')`, 30 | url: document.url, 31 | }, 32 | }) 33 | .from(document) 34 | .all(); 35 | 36 | return { 37 | attributes: { name: "g" }, 38 | options: { 39 | allowSelfLoops: true, 40 | multi: true, 41 | type: "directed", 42 | }, 43 | nodes, 44 | edges, 45 | }; 46 | } 47 | -------------------------------------------------------------------------------- /packages/core/drizzle/0007_peaceful_stepford_cuckoos.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE `tasks` ( 2 | `id` integer PRIMARY KEY AUTOINCREMENT NOT NULL, 3 | `from` text NOT NULL, 4 | `start` integer NOT NULL, 5 | `ast` text NOT NULL, 6 | `checked` integer NOT NULL, 7 | `line` integer NOT NULL, 8 | `column` integer NOT NULL 9 | ); 10 | --> statement-breakpoint 11 | DROP INDEX IF EXISTS `slug`;--> statement-breakpoint 12 | DROP INDEX IF EXISTS `url`;--> statement-breakpoint 13 | DROP INDEX IF EXISTS `path`;--> statement-breakpoint 14 | DROP INDEX IF EXISTS `to_slug`;--> statement-breakpoint 15 | DROP INDEX IF EXISTS `to_url`;--> statement-breakpoint 16 | DROP INDEX IF EXISTS `to_path`;--> statement-breakpoint 17 | DROP INDEX IF EXISTS `from_start`;--> statement-breakpoint 18 | CREATE UNIQUE INDEX `tasks_from_start` ON `tasks` (`from`,`start`);--> statement-breakpoint 19 | CREATE INDEX `documents_slug` ON `documents` (`slug`);--> statement-breakpoint 20 | CREATE INDEX `documents_url` ON `documents` (`url`);--> statement-breakpoint 21 | CREATE UNIQUE INDEX `documents_path` ON `documents` (`path`);--> statement-breakpoint 22 | CREATE INDEX `links_to_slug` ON `links` (`to_slug`);--> statement-breakpoint 23 | CREATE INDEX `links_to_url` ON `links` (`to_url`);--> statement-breakpoint 24 | CREATE INDEX `links_to_path` ON `links` (`to_path`);--> statement-breakpoint 25 | CREATE UNIQUE INDEX `links_from_start` ON `links` (`from`,`start`); -------------------------------------------------------------------------------- /packages/astro/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@braindb/astro", 3 | "version": "0.1.1", 4 | "description": "BrainDB Astro integration", 5 | "repository": { 6 | "type": "git", 7 | "url": "git+https://github.com/stereobooster/braindb.git", 8 | "directory": "packages/astro" 9 | }, 10 | "author": "stereobooster", 11 | "license": "MIT", 12 | "keywords": [ 13 | "markdown", 14 | "database", 15 | "content", 16 | "obsidian", 17 | "graph", 18 | "contentlayer", 19 | "astro-integration", 20 | "astro-component", 21 | "withastro", 22 | "astro" 23 | ], 24 | "publishConfig": { 25 | "access": "public" 26 | }, 27 | "sideEffects": false, 28 | "exports": { 29 | ".": { 30 | "types": "./dist/index.d.ts", 31 | "default": "./dist/index.js" 32 | } 33 | }, 34 | "files": [ 35 | "dist" 36 | ], 37 | "scripts": { 38 | "dev": "tsup --watch", 39 | "build": "tsup" 40 | }, 41 | "type": "module", 42 | "peerDependencies": { 43 | "astro": "^5.5.3" 44 | }, 45 | "dependencies": { 46 | "@braindb/core": "workspace:*", 47 | "@braindb/remark-wiki-link": "workspace:*", 48 | "@types/mdast": "^4.0.4", 49 | "astro-integration-kit": "^0.18.0", 50 | "github-slugger": "^2.0.0", 51 | "unified": "^11.0.5", 52 | "unist-util-visit": "^5.0.0" 53 | }, 54 | "devDependencies": { 55 | "tsup": "^8.4.0" 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /packages/core/src/Link.ts: -------------------------------------------------------------------------------- 1 | import { and, eq } from "drizzle-orm"; 2 | import { Db } from "./db.js"; 3 | import { LinkProps, link } from "./schema.js"; 4 | import { Document } from "./Document.js"; 5 | 6 | export class Link { 7 | private idPath: string; 8 | private offset: number; 9 | // @ts-expect-error it is lazyly initialized only on the request 10 | private lnk: LinkProps; 11 | private db: Db; 12 | 13 | private getLnk() { 14 | if (!this.lnk) { 15 | const [lnk] = this.db 16 | .select() 17 | .from(link) 18 | .where(and(eq(link.from, this.idPath), eq(link.start, this.offset))) 19 | .all(); 20 | this.lnk = lnk; 21 | } 22 | return this.lnk; 23 | } 24 | 25 | constructor(db: Db, idPath: string, offset: number) { 26 | this.idPath = idPath; 27 | this.offset = offset; 28 | this.db = db; 29 | } 30 | 31 | from() { 32 | return new Document(this.db, this.getLnk().from); 33 | } 34 | 35 | to() { 36 | const to = this.getLnk().to; 37 | return to == null ? null : new Document(this.db, to); 38 | } 39 | 40 | anchor() { 41 | return this.getLnk().to_anchor; 42 | } 43 | 44 | line() { 45 | return this.getLnk().line; 46 | } 47 | 48 | column() { 49 | return this.getLnk().column; 50 | } 51 | 52 | label() { 53 | return this.getLnk().label; 54 | } 55 | 56 | id() { 57 | return this.getLnk().id; 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /packages/micromark-extension-wiki-link/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@braindb/micromark-extension-wiki-link", 3 | "version": "0.1.0", 4 | "description": "Parse and render wiki-style links", 5 | "keywords": [ 6 | "remark", 7 | "remark-plugin", 8 | "markdown", 9 | "gfm", 10 | "micromark", 11 | "micromark-plugin" 12 | ], 13 | "repository": { 14 | "type": "git", 15 | "url": "git+https://github.com/stereobooster/braindb.git", 16 | "directory": "packages/micromark-extension-wiki-link" 17 | }, 18 | "contributors": [ 19 | { 20 | "name": "Mark Hudnall", 21 | "email": "me@markhudnall.com", 22 | "url": "https://markhudnall.com" 23 | }, 24 | "stereobooster" 25 | ], 26 | "license": "MIT", 27 | "type": "module", 28 | "sideEffects": false, 29 | "exports": { 30 | "types": "./dist/index.d.js", 31 | "default": "./dist/index.js" 32 | }, 33 | "main": "./dist/index.js", 34 | "module": "./dist/index.js", 35 | "files": [ 36 | "dist" 37 | ], 38 | "types": "./dist/index.d.js", 39 | "scripts": { 40 | "test": "vitest", 41 | "prepublishOnly": "npm run build", 42 | "build": "rm -rf dist && tsc", 43 | "dev": "tsc --watch", 44 | "clean": "rm -rf dist" 45 | }, 46 | "devDependencies": { 47 | "micromark": "^4.0.2", 48 | "vite-tsconfig-paths": "^5.1.4", 49 | "vitest": "^3.0.9" 50 | }, 51 | "dependencies": { 52 | "micromark-util-types": "^2.0.2" 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /packages/core/src/types.ts: -------------------------------------------------------------------------------- 1 | // https://github.com/sindresorhus/type-fest/blob/main/source/basic.d.ts 2 | 3 | /** 4 | Matches a JSON object. 5 | 6 | This type can be useful to enforce some input to be JSON-compatible or as a super-type to be extended from. Don't use this as a direct return type as the user would have to double-cast it: `jsonObject as unknown as CustomResponse`. Instead, you could extend your CustomResponse type from it to ensure your type only uses JSON-compatible types: `interface CustomResponse extends JsonObject { … }`. 7 | 8 | @category JSON 9 | */ 10 | export type JsonObject = { [Key in string]: JsonValue } & { 11 | [Key in string]?: JsonValue | undefined; 12 | }; 13 | 14 | /** 15 | Matches a JSON array. 16 | 17 | @category JSON 18 | */ 19 | export type JsonArray = JsonValue[] | readonly JsonValue[]; 20 | 21 | /** 22 | Matches any valid JSON primitive value. 23 | 24 | @category JSON 25 | */ 26 | export type JsonPrimitive = string | number | boolean | null; 27 | 28 | /** 29 | Matches any valid JSON value. 30 | 31 | @see `Jsonify` if you need to transform a type to one that is assignable to `JsonValue`. 32 | 33 | @category JSON 34 | */ 35 | export type JsonValue = JsonPrimitive | JsonObject | JsonArray; 36 | 37 | // special case of limited JSON - without arrays 38 | 39 | export type JsonLimitedValue = JsonPrimitive | JsonLimitedObject; 40 | 41 | export type JsonLimitedObject = { [Key in string]: JsonLimitedValue } & { 42 | [Key in string]?: JsonLimitedValue | undefined; 43 | }; 44 | -------------------------------------------------------------------------------- /packages/core/drizzle/meta/_journal.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "5", 3 | "dialect": "sqlite", 4 | "entries": [ 5 | { 6 | "idx": 0, 7 | "version": "5", 8 | "when": 1705892624753, 9 | "tag": "0000_same_talon", 10 | "breakpoints": true 11 | }, 12 | { 13 | "idx": 1, 14 | "version": "5", 15 | "when": 1710975544103, 16 | "tag": "0001_petite_fixer", 17 | "breakpoints": true 18 | }, 19 | { 20 | "idx": 2, 21 | "version": "5", 22 | "when": 1711738431891, 23 | "tag": "0002_worthless_silk_fever", 24 | "breakpoints": true 25 | }, 26 | { 27 | "idx": 3, 28 | "version": "5", 29 | "when": 1711755760856, 30 | "tag": "0003_jazzy_epoch", 31 | "breakpoints": true 32 | }, 33 | { 34 | "idx": 4, 35 | "version": "5", 36 | "when": 1711756629582, 37 | "tag": "0004_ambitious_starbolt", 38 | "breakpoints": true 39 | }, 40 | { 41 | "idx": 5, 42 | "version": "5", 43 | "when": 1711762860250, 44 | "tag": "0005_stormy_luckman", 45 | "breakpoints": true 46 | }, 47 | { 48 | "idx": 6, 49 | "version": "5", 50 | "when": 1711800425368, 51 | "tag": "0006_icy_iron_monger", 52 | "breakpoints": true 53 | }, 54 | { 55 | "idx": 7, 56 | "version": "5", 57 | "when": 1713297143671, 58 | "tag": "0007_peaceful_stepford_cuckoos", 59 | "breakpoints": true 60 | } 61 | ] 62 | } -------------------------------------------------------------------------------- /packages/docs/astro.config.mjs: -------------------------------------------------------------------------------- 1 | import { defineConfig } from "astro/config"; 2 | import starlight from "@astrojs/starlight"; 3 | import robotsTxt from "astro-robots-txt"; 4 | 5 | import { rehypeVizdom } from "@beoe/rehype-vizdom"; 6 | import { getCache } from "@beoe/cache"; 7 | 8 | import { brainDbAstro, getBrainDb } from "@braindb/astro"; 9 | import remarkDataview from "@braindb/remark-dataview"; 10 | 11 | const cache = await getCache(); 12 | 13 | // https://astro.build/config 14 | export default defineConfig({ 15 | site: "https://braindb.stereobooster.com/", 16 | integrations: [ 17 | starlight({ 18 | pagination: false, 19 | lastUpdated: true, 20 | logo: { 21 | light: "./src/assets/logo.svg", 22 | dark: "./src/assets/logo-dark.svg", 23 | }, 24 | customCss: ["./src/styles/custom.css"], 25 | title: "BrainDB", 26 | social: { 27 | github: "https://github.com/stereobooster/braindb", 28 | }, 29 | editLink: { 30 | baseUrl: 31 | "https://github.com/stereobooster/braindb/edit/main/packages/docs/", 32 | }, 33 | sidebar: [ 34 | { label: "Introduction", link: "/" }, 35 | { 36 | label: "Unsorted notes", 37 | autogenerate: { 38 | directory: "notes", 39 | }, 40 | }, 41 | ], 42 | }), 43 | robotsTxt(), 44 | brainDbAstro(), 45 | ], 46 | markdown: { 47 | remarkPlugins: [[remarkDataview, { getBrainDb }]], 48 | rehypePlugins: [[rehypeVizdom, { class: "not-content", cache }]], 49 | }, 50 | }); 51 | -------------------------------------------------------------------------------- /packages/docs/src/content/docs/notes/slug.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Slug 3 | --- 4 | 5 | ## How to form slug for root pages? 6 | 7 | Root pages can be `index.md`, `index.mdx` (or `readme.md`, `readme.mdx` in GitHub). 8 | 9 | | path | option 1 | option 2 | option 3 | url-path | "file name" | 10 | | ---------------- | -------- | -------- | ------------ | -------- | ----------- | 11 | | `/index.md` | `/` | `index` | `index` | `/` | `index` | 12 | | `/some/index.md` | `some` | `index` | `some/index` | `/some/` | `some` | 13 | | `/some.md` | `some` | `some` | `some` | `/some/` | `some` | 14 | | `/index.mdx` | `/` | `index` | `index` | `/` | `index` | 15 | | `/Index.md` | `/` | `Index` | `Index` | `/` | `Index` | 16 | 17 | - slug generation is customizable, so if somebody needs any specific behaviour they can implement it 18 | - **option 1** is the current default behaviour 19 | - I think slug should be case sensitive 20 | - Only path gauranteed to be unique. Slugs and urls can repeat - maybe create a function to check if there are duplicates `SELECT count() FROM documents GROUP BY slug HAVING count() > 1` 21 | - do we need option to match slug case insensitively? 22 | - do we need option to match against more than slug if there is ambiguity, like `index`, `other/index`, `some/other/index`? 23 | 24 | Other thoughts: 25 | 26 | - with alias there can be more than one name / url 27 | - see also "against more than slug if there is ambiguity" 28 | - can I implement "did you mean" suggestions? 29 | -------------------------------------------------------------------------------- /packages/remark-dataview/src/index.ts: -------------------------------------------------------------------------------- 1 | import remarkCodeHook from "@beoe/remark-code-hook"; 2 | import { BrainDB } from "@braindb/core"; 3 | // @ts-expect-error required for generated types 4 | import type { Root } from "mdast"; 5 | // @ts-expect-error required for generated types 6 | import type { Plugin } from "unified"; 7 | import { 8 | generateList, 9 | generateTable, 10 | parse, 11 | processMeta, 12 | transform, 13 | } from "./sqlUtils.js"; 14 | 15 | type RemarkDataviewOptions = { 16 | getBrainDb: () => BrainDB; 17 | /** 18 | * @deprecated 19 | */ 20 | bdb?: BrainDB; 21 | }; 22 | 23 | export function remarkDataview(options: RemarkDataviewOptions) { 24 | const { getBrainDb, bdb, ...rest } = options; 25 | // @ts-expect-error 26 | return remarkCodeHook.call(this, { 27 | ...rest, 28 | language: "dataview", 29 | code: async ({ code, meta }) => { 30 | if (getBrainDb == null) { 31 | console.warn( 32 | `[remark-dataview]: "bdb" option is deprecated. Use "getBrainDb" instead` 33 | ); 34 | } 35 | const bdbInstance = getBrainDb == null ? bdb! : getBrainDb(); 36 | await bdbInstance.ready(); 37 | 38 | try { 39 | const options = processMeta(meta); 40 | const { query, columns } = transform(parse(code)); 41 | if (options.list) 42 | return generateList(columns, bdbInstance.__rawQuery(query), options); 43 | 44 | return generateTable(columns, bdbInstance.__rawQuery(query)); 45 | } catch (e) { 46 | return String(e); 47 | } 48 | }, 49 | }); 50 | } 51 | 52 | export default remarkDataview; 53 | -------------------------------------------------------------------------------- /packages/mdast-util-wiki-link/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@braindb/mdast-util-wiki-link", 3 | "version": "0.2.0", 4 | "description": "Parse and render wiki-style links", 5 | "keywords": [ 6 | "remark", 7 | "remark-plugin", 8 | "markdown", 9 | "gfm", 10 | "micromark", 11 | "micromark-plugin", 12 | "mdast", 13 | "mdast-util" 14 | ], 15 | "repository": { 16 | "type": "git", 17 | "url": "git+https://github.com/stereobooster/braindb.git", 18 | "directory": "packages/mdast-util-wiki-link" 19 | }, 20 | "contributors": [ 21 | { 22 | "name": "Mark Hudnall", 23 | "email": "me@markhudnall.com", 24 | "url": "https://markhudnall.com" 25 | }, 26 | "stereobooster" 27 | ], 28 | "license": "MIT", 29 | "type": "module", 30 | "sideEffects": false, 31 | "exports": { 32 | "types": "./dist/index.d.js", 33 | "default": "./dist/index.js" 34 | }, 35 | "main": "./dist/index.js", 36 | "module": "./dist/index.js", 37 | "files": [ 38 | "dist" 39 | ], 40 | "types": "./dist/index.d.js", 41 | "scripts": { 42 | "test": "vitest", 43 | "prepublishOnly": "npm run build", 44 | "build": "rm -rf dist && tsc", 45 | "dev": "tsc --watch", 46 | "clean": "rm -rf dist" 47 | }, 48 | "devDependencies": { 49 | "@braindb/micromark-extension-wiki-link": "workspace:^", 50 | "micromark": "^4.0.2", 51 | "typescript": "^5.8.2", 52 | "unist-util-visit": "^5.0.0", 53 | "vite-tsconfig-paths": "^5.1.4", 54 | "vitest": "^3.0.9" 55 | }, 56 | "dependencies": { 57 | "@types/unist": "^3.0.3", 58 | "mdast-util-from-markdown": "^2.0.2", 59 | "mdast-util-to-markdown": "^2.1.2" 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /packages/docs/src/content/docs/notes/unsorted.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: unsorted notes 3 | draft: true 4 | --- 5 | 6 | https://keystatic.com/docs/installation-astro 7 | https://github.com/sveltia/sveltia-cms 8 | https://pagescms.org/ 9 | https://payloadcms.com/docs/queries/overview 10 | 11 | ### Test with real life example 12 | 13 | - https://github.com/gohugoio/hugoDocs/tree/master/content/en (Hugo) 14 | - https://github.com/obsidianmd/obsidian-help (Obsidian) 15 | - https://github.com/mdn/content/tree/main/files/en-us (Custom) 16 | - https://github.com/github/docs/tree/main/content (Next.js) 17 | - https://github.com/primer/design/tree/main/content (Gatsby, mdx) 18 | - https://github.com/facebook/docusaurus/tree/main/website (Docuaurus, mdx) 19 | - https://github.com/reactjs/react.dev/blob/main/src/content/ 20 | 21 | ### Other ideas 22 | 23 | API 24 | 25 | - https://github.com/drizzle-team/drizzle-trpc-zod 26 | 27 | frontend 28 | 29 | - show markdown as html 30 | - list of files 31 | - graph of files 32 | - query api 33 | - search api 34 | - graph query api or traverse with recursive 35 | - live reload 36 | 37 | tui to run SQL in watch mode 38 | 39 | ## Examples 40 | 41 | - https://github.com/timlrx/contentlayer2 42 | - https://lume.land/ 43 | - tags 44 | - https://github.com/topics/digital-garden 45 | - https://github.com/topics/second-brain 46 | - https://github.com/topics/digital-gardening 47 | - https://github.com/thedevdavid/digital-garden 48 | - https://github.com/pixelart7/chaiyapat-astro-site/blob/main/astro.config.ts 49 | - https://github.com/johackim/remark-obsidian 50 | - https://github.com/wikibonsai/wikibonsai 51 | - https://github.com/bahlo/arnes-notes 52 | - https://github.com/TuanManhCao/digital-garden 53 | -------------------------------------------------------------------------------- /packages/core/src/Task.ts: -------------------------------------------------------------------------------- 1 | import { and, eq } from "drizzle-orm"; 2 | import { Db } from "./db.js"; 3 | import { TaskProps, task } from "./schema.js"; 4 | import { Document } from "./Document.js"; 5 | import { toText } from "./toText.js"; 6 | import { mdParser } from "./parser.js"; 7 | import { Root } from "mdast"; 8 | 9 | export class Task { 10 | private idPath: string; 11 | private offset: number; 12 | // @ts-expect-error it is lazyly initialized only on the request 13 | private lnk: TaskProps; 14 | private db: Db; 15 | 16 | private getDbRecord() { 17 | if (!this.lnk) { 18 | const [lnk] = this.db 19 | .select() 20 | .from(task) 21 | .where(and(eq(task.from, this.idPath), eq(task.start, this.offset))) 22 | .all(); 23 | this.lnk = lnk; 24 | } 25 | return this.lnk; 26 | } 27 | 28 | constructor(db: Db, idPath: string, offset: number) { 29 | this.idPath = idPath; 30 | this.offset = offset; 31 | this.db = db; 32 | } 33 | 34 | from() { 35 | return new Document(this.db, this.getDbRecord().from); 36 | } 37 | 38 | ast() { 39 | return this.getDbRecord().ast; 40 | } 41 | 42 | checked() { 43 | return this.getDbRecord().checked; 44 | } 45 | 46 | line() { 47 | return this.getDbRecord().line; 48 | } 49 | 50 | column() { 51 | return this.getDbRecord().column; 52 | } 53 | 54 | id() { 55 | return this.getDbRecord().id; 56 | } 57 | 58 | /** 59 | * experimental 60 | */ 61 | text() { 62 | return toText(this.getDbRecord().ast); 63 | } 64 | 65 | markdown() { 66 | // to support links/wikilinks need to use `getMarkdown` 67 | return mdParser.stringify(this.getDbRecord().ast as Root); 68 | } 69 | } 70 | -------------------------------------------------------------------------------- /experiments/demo-astro/braindb.config.ts: -------------------------------------------------------------------------------- 1 | import { resolve } from "node:path"; 2 | import { slug as githubSlug } from "github-slugger"; 3 | import path from "node:path"; 4 | 5 | import { type Config } from "@braindb/cli"; 6 | 7 | const generateUrl: Config["url"] = (filePath, _frontmatter) => { 8 | const withoutFileExt = filePath.replace( 9 | new RegExp(path.extname(filePath) + "$"), 10 | "" 11 | ); 12 | const rawSlugSegments = withoutFileExt.split(path.sep); 13 | 14 | if (rawSlugSegments[0] === "example") rawSlugSegments[0] = "notes"; 15 | if (rawSlugSegments[0] === "" && rawSlugSegments[1] === "example") 16 | rawSlugSegments[1] = "notes"; 17 | 18 | const slug = rawSlugSegments 19 | // Slugify each route segment to handle capitalization and spaces. 20 | // Note: using `slug` instead of `new Slugger()` means no slug deduping. 21 | .map((segment) => githubSlug(segment)) 22 | .join("/") 23 | .replace(/\/index$/, ""); 24 | 25 | return `${slug}/`; 26 | }; 27 | 28 | export default { 29 | root: resolve("../.."), 30 | source: "/example", 31 | destination: resolve("src/content"), 32 | transformPath: (filePath: string) => filePath.replace(/^\/example/, "/notes"), 33 | url: generateUrl, 34 | linkType: "web", 35 | cache: false, 36 | transformUnresolvedLink: (x, y) => console.log(`${x}:${y.position.start.line}:${y.position.start.column}`), 37 | // transformFrontmatter: (doc) => { 38 | // const frontmatter = doc.frontmatter(); 39 | // frontmatter["url"] = doc.url(); 40 | // frontmatter["backlinks"] = doc.documentsFrom().map((bl) => ({ 41 | // url: bl.url(), 42 | // title: bl.title(), 43 | // })); 44 | // return frontmatter; 45 | // }, 46 | } satisfies Config; 47 | -------------------------------------------------------------------------------- /experiments/cli/src/config.ts: -------------------------------------------------------------------------------- 1 | import { cosmiconfig } from "cosmiconfig"; 2 | import { cwd } from "node:process"; 3 | import { BrainDBOptionsIn, BrainDBOptionsOut } from "@braindb/core"; 4 | 5 | // For inspiration https://github.com/vitejs/vite/blob/main/packages/vite/src/node/config.ts#L126 6 | // TODO: root should be optional 7 | export type Config = BrainDBOptionsIn & 8 | BrainDBOptionsOut & { 9 | destination?: string; 10 | }; 11 | 12 | const moduleName = "braindb"; 13 | const explorer = cosmiconfig(moduleName, { 14 | searchPlaces: [ 15 | // "package.json", 16 | `${moduleName}.config.js`, 17 | `${moduleName}.config.ts`, 18 | `${moduleName}.config.mjs`, 19 | `${moduleName}.config.cjs`, 20 | ], 21 | }); 22 | 23 | // shall it use URIEncode or URIDecode? 24 | // just an example, depends on configuation of static site generator 25 | // For example, https://gohugo.io/content-management/urls/#tokens 26 | const generateUrl: (root: string | undefined) => BrainDBOptionsIn["url"] = 27 | (source) => (path, _frontmatter) => { 28 | const dir = source ? source : ""; 29 | let url = 30 | path 31 | .replace(dir, "") 32 | .replace(/_?index\.md$/, "") 33 | .replace(/\.md$/, "") || "/"; 34 | 35 | if (!url.startsWith("/")) url = "/" + url; 36 | 37 | return url; 38 | }; 39 | 40 | export async function getConfig() { 41 | const defaultCfg: Config = { 42 | root: cwd(), 43 | // TODO: make true when cache would be improved 44 | cache: false, 45 | }; 46 | 47 | let cfg: Partial = {}; 48 | try { 49 | const res = await explorer.search(); 50 | cfg = res?.config; 51 | } catch (e) {} 52 | 53 | const res = { ...defaultCfg, ...cfg }; 54 | if (!res.url) res.url = generateUrl(res.source); 55 | return res; 56 | } 57 | -------------------------------------------------------------------------------- /packages/remark-wiki-link/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@braindb/remark-wiki-link", 3 | "description": "Parse and render wiki-style links", 4 | "version": "2.1.0", 5 | "keywords": [ 6 | "remark", 7 | "remark-plugin", 8 | "markdown", 9 | "gfm" 10 | ], 11 | "repository": { 12 | "type": "git", 13 | "url": "git+https://github.com/stereobooster/braindb.git", 14 | "directory": "packages/remark-wiki-link" 15 | }, 16 | "contributors": [ 17 | { 18 | "name": "Mark Hudnall", 19 | "email": "me@markhudnall.com", 20 | "url": "https://markhudnall.com" 21 | }, 22 | "stereobooster" 23 | ], 24 | "license": "MIT", 25 | "type": "module", 26 | "sideEffects": false, 27 | "exports": { 28 | "types": "./dist/index.d.js", 29 | "default": "./dist/index.js" 30 | }, 31 | "main": "./dist/index.js", 32 | "module": "./dist/index.js", 33 | "files": [ 34 | "dist" 35 | ], 36 | "types": "./dist/index.d.js", 37 | "scripts": { 38 | "test": "vitest", 39 | "prepublishOnly": "npm run build", 40 | "build": "rm -rf dist && tsc", 41 | "dev": "tsc --watch", 42 | "clean": "rm -rf dist" 43 | }, 44 | "dependencies": { 45 | "@braindb/mdast-util-wiki-link": "workspace:^", 46 | "@braindb/micromark-extension-wiki-link": "workspace:^" 47 | }, 48 | "devDependencies": { 49 | "@types/unist": "^3.0.3", 50 | "mdast-util-from-markdown": "^2.0.2", 51 | "rehype-stringify": "^10.0.1", 52 | "remark-parse": "^11.0.0", 53 | "remark-rehype": "^11.1.1", 54 | "remark-stringify": "^11.0.0", 55 | "typescript": "^5.8.2", 56 | "unified": "^11.0.5", 57 | "unist-util-select": "^5.1.0", 58 | "unist-util-visit": "^5.0.0", 59 | "vite-tsconfig-paths": "^5.1.4", 60 | "vitest": "^3.0.9" 61 | } 62 | } 63 | -------------------------------------------------------------------------------- /packages/astro/src/remarkWikiLink.ts: -------------------------------------------------------------------------------- 1 | import { BrainDB } from "@braindb/core"; 2 | import { visit, SKIP } from "unist-util-visit"; 3 | import type { Plugin } from "unified"; 4 | import type { Root } from "mdast"; 5 | 6 | type WikiLinkNode = { 7 | type: "wikiLink"; 8 | value: string; 9 | data: { 10 | alias?: string; 11 | hName: string; 12 | hProperties: Record; 13 | hChildren: any[]; 14 | }; 15 | }; 16 | 17 | export const remarkWikiLink: Plugin<[{ bdb: BrainDB }], Root> = ({ bdb }) => { 18 | return (ast, _file) => { 19 | visit(ast, "wikiLink", (node: WikiLinkNode) => { 20 | const slug = node.value; 21 | const alias = node.data.alias; 22 | 23 | const [slugWithoutAnchor, anchor] = slug.split("#"); 24 | if (slugWithoutAnchor) { 25 | const doc = bdb.documentsSync({ slug: slugWithoutAnchor })[0]; 26 | if (doc) { 27 | if (!doc.frontmatter().draft || (import.meta.env && import.meta.env.DEV)) { 28 | node.data = { 29 | hName: "a", 30 | hProperties: { 31 | href: anchor ? `${doc.url()}#${anchor}` : doc.url(), 32 | class: doc.frontmatter().draft ? "draft-link" : "", 33 | }, 34 | hChildren: [ 35 | { 36 | type: "text", 37 | value: alias == null ? doc.frontmatter().title : alias, 38 | }, 39 | ], 40 | }; 41 | } 42 | return SKIP; 43 | } 44 | } 45 | 46 | node.data = { 47 | hName: "span", 48 | hProperties: { 49 | class: "broken-link", 50 | title: `Can't resolve link to ${slug}`, 51 | }, 52 | hChildren: [{ type: "text", value: alias || slug }], 53 | }; 54 | return SKIP; 55 | }); 56 | }; 57 | }; 58 | -------------------------------------------------------------------------------- /packages/astro/README.md: -------------------------------------------------------------------------------- 1 | # `@braindb/astro` 2 | 3 | > [!WARNING] 4 | > You need to add `@braindb/core` as direct dependency due to [vitejs/vite#14289](https://github.com/vitejs/vite/issues/14289) 5 | 6 | This is an [Astro integration](https://docs.astro.build/en/guides/integrations-guide/) for [BrainDB](https://github.com/stereobooster/braindb). 7 | 8 | ## Usage 9 | 10 | ### Installation 11 | 12 | Install the integration **automatically** using the Astro CLI: 13 | 14 | ```bash 15 | pnpm add @braindb/core 16 | pnpm astro add @braindb/astro 17 | ``` 18 | 19 | ```bash 20 | npm install @braindb/core 21 | npx astro add @braindb/astro 22 | ``` 23 | 24 | ```bash 25 | yarn add @braindb/core 26 | yarn astro add @braindb/astro 27 | ``` 28 | 29 | Or install it **manually**: 30 | 31 | 1. Install the required dependencies 32 | 33 | ```bash 34 | pnpm add @braindb/astro @braindb/core 35 | ``` 36 | 37 | ```bash 38 | npm install @braindb/astro @braindb/core 39 | ``` 40 | 41 | ```bash 42 | yarn add @braindb/astro @braindb/core 43 | ``` 44 | 45 | 2. Add the integration to your astro config 46 | 47 | ```diff 48 | +import brainDbAstro from "@braindb/astro"; 49 | 50 | export default defineConfig({ 51 | integrations: [ 52 | + brainDbAstro(), 53 | ], 54 | }); 55 | ``` 56 | 57 | ### if you need BrainDB instance 58 | 59 | ```js 60 | import { brainDbAstro, getBrainDb } from "@braindb/astro"; 61 | 62 | const bdb = getBrainDb(); 63 | ``` 64 | 65 | ### Wiki links 66 | 67 | By default plugin adds [`@braindb/remark-wiki-link`](https://github.com/stereobooster/braindb/tree/main/packages/remark-wiki-link) to support wiki links (`[[]]`) 68 | 69 | You can disable it, like this: 70 | 71 | ```diff 72 | export default defineConfig({ 73 | integrations: [ 74 | + brainDbAstro({ remarkWikiLink: false }), 75 | ], 76 | }); 77 | ``` 78 | 79 | ## TODO 80 | 81 | - [ ] add `@braindb/remark-dataview` when it will be stable 82 | -------------------------------------------------------------------------------- /packages/core/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@braindb/core", 3 | "version": "0.0.17", 4 | "description": "markdown-graph-content-layer-database", 5 | "repository": { 6 | "type": "git", 7 | "url": "git+https://github.com/stereobooster/braindb.git", 8 | "directory": "packages/core" 9 | }, 10 | "author": "stereobooster", 11 | "license": "MIT", 12 | "keywords": [ 13 | "markdown", 14 | "database", 15 | "content", 16 | "obsidian", 17 | "graph", 18 | "contentlayer" 19 | ], 20 | "type": "module", 21 | "exports": { 22 | "types": "./dist/index.d.js", 23 | "default": "./dist/index.js" 24 | }, 25 | "main": "./dist/index.js", 26 | "module": "./dist/index.js", 27 | "files": [ 28 | "./dist/*", 29 | "./drizzle/**/*" 30 | ], 31 | "types": "./dist/index.d.js", 32 | "scripts": { 33 | "test": "vitest", 34 | "prepublishOnly": "npm run build", 35 | "build": "rm -rf dist && tsc", 36 | "dev": "tsc --watch", 37 | "clean": "rm -rf dist", 38 | "tsc": "tsc", 39 | "generate": "drizzle-kit generate:sqlite" 40 | }, 41 | "devDependencies": { 42 | "@types/better-sqlite3": "^7.6.12", 43 | "@types/hast": "^3.0.4", 44 | "@types/mdast": "^4.0.4", 45 | "@types/serialize-javascript": "^5.0.4", 46 | "@types/unist": "^3.0.3", 47 | "drizzle-kit": "^0.30.6" 48 | }, 49 | "dependencies": { 50 | "@braindb/remark-wiki-link": "workspace:*", 51 | "@napi-rs/simple-git": "^0.1.19", 52 | "@node-rs/xxhash": "^1.7.6", 53 | "better-sqlite3": "^11.9.1", 54 | "chokidar": "^4.0.3", 55 | "deterministic-object-hash": "^2.0.2", 56 | "drizzle-orm": "^0.41.0", 57 | "mdast-util-to-string": "^4.0.0", 58 | "mitt": "^3.0.1", 59 | "remark-frontmatter": "^5.0.0", 60 | "remark-gfm": "^4.0.1", 61 | "remark-parse": "^11.0.0", 62 | "remark-stringify": "^11.0.0", 63 | "strip-markdown": "^6.0.0", 64 | "unified": "^11.0.5", 65 | "unist-util-map": "^4.0.0", 66 | "unist-util-visit": "^5.0.0", 67 | "yaml": "^2.7.0" 68 | } 69 | } 70 | -------------------------------------------------------------------------------- /experiments/demo-astro/src/lib/braindb.mjs: -------------------------------------------------------------------------------- 1 | import { slug as githubSlug } from "github-slugger"; 2 | import path from "node:path"; 3 | import process from "node:process"; 4 | import { BrainDB } from "@braindb/core"; 5 | 6 | // slug implementation according to Astro 7 | // see astro/packages/astro/src/content/utils.ts 8 | const generateSlug = (filePath) => { 9 | const withoutFileExt = filePath.replace( 10 | new RegExp(path.extname(filePath) + "$"), 11 | "" 12 | ); 13 | const rawSlugSegments = withoutFileExt.split(path.sep); 14 | const slug = rawSlugSegments 15 | // Slugify each route segment to handle capitalization and spaces. 16 | // Note: using `slug` instead of `new Slugger()` means no slug deduping. 17 | .map((segment) => githubSlug(segment)) 18 | .join("/") 19 | .replace(/\/index$/, ""); 20 | 21 | return slug; 22 | }; 23 | 24 | const start = new Date().getTime(); 25 | 26 | export const bdb = new BrainDB({ 27 | root: path.resolve(process.cwd(), "src/content"), 28 | url: (filePath, _frontmatter) => `${generateSlug(filePath)}/`, 29 | // source: "/notes", 30 | // dbPath: process.cwd(), 31 | // cache: true, 32 | // git: path.resolve(process.cwd(), "../.."), 33 | storeMarkdown: false, 34 | }); 35 | 36 | bdb.start(); 37 | bdb.on("*", (action, opts) => { 38 | if (action === "ready") { 39 | console.log(`Done: ${new Date().getTime() - start}`); 40 | // console.log( 41 | // bdb.documentsSync({ 42 | // // sort: ["updated_at", "asc"], 43 | // // slug: "mdx-page", 44 | // frontmatter: { 45 | // // test: 1 46 | // tags: "tag", 47 | // }, 48 | // }) 49 | // ); 50 | } 51 | if (opts) { 52 | opts.document 53 | .unresolvedLinks() 54 | .forEach((link) => 55 | console.log( 56 | `Unresolved link: ${link 57 | .from() 58 | .path()}:${link.line()}:${link.column()}` 59 | ) 60 | ); 61 | } 62 | }); 63 | 64 | // await bdb.ready(); 65 | // console.log((await bdb.tasks()).map(x => x.text() )) -------------------------------------------------------------------------------- /packages/core/src/db.ts: -------------------------------------------------------------------------------- 1 | import { BaseSQLiteDatabase } from "drizzle-orm/sqlite-core"; 2 | import { ExtractTablesWithRelations } from "drizzle-orm"; 3 | import * as schema from "./schema.js"; 4 | import { resolve } from "node:path"; 5 | import { RunResult } from "better-sqlite3"; 6 | 7 | import { drizzle } from "drizzle-orm/better-sqlite3"; 8 | import { migrate } from "drizzle-orm/better-sqlite3/migrator"; 9 | import Database from "better-sqlite3"; 10 | 11 | import * as url from 'url'; 12 | const __filename = url.fileURLToPath(import.meta.url); 13 | 14 | export const getDb = (connectionString: string) => { 15 | const sqlite = new Database(connectionString); 16 | const db = drizzle(sqlite, { schema }); 17 | migrate(db, { migrationsFolder: resolve(__filename, "../../drizzle") }); 18 | return db; 19 | }; 20 | 21 | export type Db = BaseSQLiteDatabase< 22 | "sync", 23 | void | RunResult, 24 | typeof schema, 25 | ExtractTablesWithRelations 26 | >; 27 | 28 | // let getDb: (connectionString: string) => Db; 29 | 30 | // if (process.isBun === undefined) { 31 | // const { drizzle } = await import("drizzle-orm/better-sqlite3"); 32 | // const { migrate } = await import("drizzle-orm/better-sqlite3/migrator"); 33 | // // @ts-ignore 34 | // const Database = (await import("better-sqlite3")).default; 35 | 36 | // getDb = (connectionString: string) => { 37 | // const sqlite = new Database(connectionString); 38 | // const db = drizzle(sqlite, { schema }); 39 | // migrate(db, { migrationsFolder: resolve(__filename, "../../drizzle") }); 40 | // return db; 41 | // }; 42 | // } else { 43 | // const { drizzle } = await import("drizzle-orm/bun-sqlite"); 44 | // const { migrate } = await import("drizzle-orm/bun-sqlite/migrator"); 45 | // const { Database } = await import("bun:sqlite"); 46 | 47 | // getDb = (connectionString: string) => { 48 | // const sqlite = new Database(connectionString); 49 | // const db = drizzle(sqlite, { schema }); 50 | // migrate(db, { migrationsFolder: resolve(__filename, "../../drizzle") }); 51 | // return db; 52 | // }; 53 | // } 54 | 55 | // export { getDb }; 56 | -------------------------------------------------------------------------------- /packages/docs/src/content/docs/notes/use-cases.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Usecases 3 | draft: true 4 | --- 5 | 6 | ## Obvious cases 7 | 8 | | | Output | convert wiki-links | find broken links | build backlinks | build graph | 9 | | ---------------- | --------- | ------------------ | ----------------- | --------------- | ----------- | 10 | | Hugo → Hugo | PML | - | + | + | + | 11 | | Obsidian → Hugo | PML | + | + | + | + | 12 | | Astro → Astro | web-links | - | + | + | + | 13 | | Obsidian → Astro | web-links | + | + | + | + | 14 | 15 | - PML is the easiest option, because Hugo can figure out links itself 16 | - Web-links is the only supported option by Astro 17 | - Astro doesn't allow `slug` in frontmatter, but it has an algorithm to generate one 18 | - **Upd** [Starlight supports `slug`](https://starlight.astro.build/reference/frontmatter/#slug) 19 | - `X → X`, I assume `source` and `output` directories are **the same**, so no need to write `md` (`mdx`) files. Need to write auxilary files only, for example: 20 | - `filename-backlinks.json` 21 | - `filename-graph.json`, `filename-graph.svg` - local graph with distance 1 with outgoing and incoming links 22 | - `backlinks.json`, where `{ [slug]: [...backlinks] }` 23 | - `graph.json`, `graph.svg` 24 | - `X → Y`, I assume `source` and `output` directories are **different**. 25 | - Auxilary data, like backlinks can be placed in frontmatter 26 | - Or can be kept in separate files as in `X → X` scenario 27 | - additionaly BrainDb can be used to typecheck frontmatter (like in Astro) 28 | - adding scheme support would also allow to do faceted search 29 | 30 | ## Out of scope (for now) 31 | 32 | - conversion from one web-links to another web-links 33 | - as a workaround one can convert from one web-link to PML, and then PML to another web-links 34 | - images 35 | - `![](/path)` 36 | - `` 37 | 38 | ## Other 39 | 40 | - From `Foam` 41 | - specifics: wikilinks with long slugs, reference links 42 | -------------------------------------------------------------------------------- /packages/core/src/query.ts: -------------------------------------------------------------------------------- 1 | import { Db } from "./db.js"; 2 | import { Document } from "./Document.js"; 3 | import { document } from "./schema.js"; 4 | import { asc, desc, eq, and, sql, SQLWrapper, SQL } from "drizzle-orm"; 5 | import { JsonLimitedObject, JsonPrimitive } from "./types.js"; 6 | import { SQLiteColumn } from "drizzle-orm/sqlite-core"; 7 | 8 | export type SortDirection = "asc" | "desc"; 9 | 10 | // this would be similar to 11 | // https://github.com/stereobooster/facets/blob/main/packages/facets/src/Facets.ts#L138-L150 12 | export type DocumentsOtions = { 13 | slug?: string; 14 | url?: string; 15 | /** 16 | * Limited search by frontmatter fields - only strict comparison for primitive values, 17 | * like: string, number, boolean 18 | * ATTENTION: arrays in frontammter don't work e.g. `tags: [X,Y]` 19 | */ 20 | frontmatter?: JsonLimitedObject; 21 | sort?: ["updated_at", SortDirection]; 22 | }; 23 | 24 | export function documentsSync(db: Db, options?: DocumentsOtions) { 25 | let query = db.select({ path: document.path }).from(document); 26 | 27 | const where: (SQLWrapper | undefined)[] = []; 28 | if (options?.slug !== undefined) { 29 | where.push(eq(document.slug, options?.slug)); 30 | } 31 | if (options?.url !== undefined) { 32 | where.push(eq(document.url, options?.url)); 33 | } 34 | if (options?.frontmatter !== undefined) { 35 | Object.entries(flattenObj(options?.frontmatter)).forEach(([key, value]) => 36 | where.push(eq(sql`${document.frontmatter}->>${"$." + key}`, value)) 37 | ); 38 | } 39 | 40 | const order: (SQLiteColumn | SQL)[] = []; 41 | if (options?.sort !== undefined) { 42 | const dir = options?.sort?.[1] === "asc" ? asc : desc; 43 | order.push(dir(document.updated_at)); 44 | } 45 | 46 | return query 47 | .where(and(...where)) 48 | .orderBy(...order) 49 | .all() 50 | .map(({ path }) => new Document(db, path)); 51 | } 52 | 53 | function flattenObj( 54 | obj: JsonLimitedObject, 55 | parent?: string, 56 | res = Object.create(null) as Record 57 | ) { 58 | for (let key in obj) { 59 | const propName = parent ? parent + "." + key : key; 60 | const value = obj[key]; 61 | if (typeof value == "object" && value !== null) { 62 | flattenObj(value, propName, res); 63 | } else { 64 | res[propName] = value; 65 | } 66 | } 67 | return res; 68 | } 69 | -------------------------------------------------------------------------------- /packages/core/src/resolveLinks.ts: -------------------------------------------------------------------------------- 1 | import { and, eq, isNull, ne, sql, isNotNull } from "drizzle-orm"; 2 | import { link } from "./schema.js"; 3 | import { Db } from "./db.js"; 4 | 5 | export function resolveLinks(db: Db) { 6 | // TODO: check for ambiguous: slugs, urls 7 | // Maybe update would be better than replace? 8 | db.run( 9 | sql` 10 | REPLACE INTO links 11 | SELECT 12 | links.id, 13 | links."from", 14 | documents.path as "to", 15 | links.start, 16 | links.to_slug, 17 | links.to_url, 18 | links.to_path, 19 | links.to_anchor, 20 | links.label, 21 | links.line, 22 | links.column 23 | FROM links INNER JOIN documents ON 24 | links.to_slug = documents.slug OR 25 | links.to_url = documents.url OR 26 | links.to_path = documents.path 27 | WHERE links."to" IS NULL;` 28 | ); 29 | } 30 | 31 | export function unresolvedLinks(db: Db, idPath?: string) { 32 | return db 33 | .select({ from: link.from, start: link.start }) 34 | .from(link) 35 | .where( 36 | idPath === undefined 37 | ? isNull(link.to) 38 | : and(isNull(link.to), eq(link.from, idPath)) 39 | ) 40 | .all(); 41 | } 42 | 43 | type GetDocumentsProps = { 44 | db: Db; 45 | idPath: string; 46 | selfLinks?: boolean; 47 | }; 48 | 49 | /** 50 | * Incoming links 51 | */ 52 | export function getDocumentsFrom({ 53 | db, 54 | idPath, 55 | selfLinks = false, 56 | }: GetDocumentsProps) { 57 | return db 58 | .selectDistinct({ from: link.from }) 59 | .from(link) 60 | .where( 61 | selfLinks 62 | ? eq(link.to, idPath) 63 | : and(eq(link.to, idPath), ne(link.from, idPath)) 64 | ) 65 | .all() 66 | .map((x) => x.from); 67 | } 68 | 69 | /** 70 | * Outgoing links 71 | */ 72 | export function getDocumentsTo({ 73 | db, 74 | idPath, 75 | selfLinks = false, 76 | }: GetDocumentsProps) { 77 | return db 78 | .selectDistinct({ to: link.to }) 79 | .from(link) 80 | .where( 81 | and( 82 | isNotNull(link.to), 83 | selfLinks 84 | ? eq(link.from, idPath) 85 | : and(eq(link.from, idPath), ne(link.to, idPath)) 86 | ) 87 | ) 88 | .all() 89 | .map((x) => x.to as string); 90 | } 91 | 92 | /** 93 | * Incoming and Outgoing links 94 | */ 95 | export function getConnectedDocuments(props: GetDocumentsProps) { 96 | return [...new Set([...getDocumentsFrom(props), ...getDocumentsTo(props)])]; 97 | } 98 | -------------------------------------------------------------------------------- /experiments/cli/src/index.ts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | import { unlinkSync } from "node:fs"; 4 | import { writeFileSync } from "node:fs"; 5 | import { dirname } from "node:path"; 6 | import { mkdirSync } from "node:fs"; 7 | 8 | import { getConfig } from "./config.js"; 9 | export { Config } from "./config.js"; 10 | // import { version } from "./package.json"; 11 | 12 | import { BrainDB } from "@braindb/core"; 13 | 14 | import process from "node:process"; 15 | import { Command } from "commander"; 16 | const program = new Command(); 17 | 18 | program 19 | .name("BrainDB") 20 | // .version(version) 21 | // .command("start", "start", { isDefault: true, }) 22 | .description("Treat your markdown files as database") 23 | .option("--watch", "watch mode"); 24 | 25 | const cmd = program.parse(); 26 | const opts = cmd.opts(); 27 | 28 | getConfig().then((cfg) => { 29 | const { destination, transformPath, linkType, transformFrontmatter, transformUnresolvedLink } = cfg; 30 | 31 | const dbPath = process.cwd(); 32 | const bdb = new BrainDB({ ...cfg, dbPath }); 33 | 34 | bdb 35 | .on("*", (action, option) => { 36 | if (destination) { 37 | if (action === "ready") { 38 | // const jsonPath = 39 | // destination + 40 | // (transformPath ? transformPath(`/graph.json`) : "/graph.json"); 41 | // writeFileSync(jsonPath, JSON.stringify(bdb.toJson(), null, 2), { 42 | // encoding: "utf8", 43 | // }); 44 | 45 | if (opts.watch) { 46 | console.log("Watching files"); 47 | process.on("SIGINT", () => { 48 | bdb.stop(); 49 | }); 50 | } else { 51 | bdb.stop(); 52 | } 53 | } 54 | 55 | if (action === "create" || action === "update") { 56 | const document = option?.document!; 57 | const path = option?.document?.path()!; 58 | const mdPath = 59 | destination + (transformPath ? transformPath(path) : path); 60 | mkdirSync(dirname(mdPath), { recursive: true }); 61 | writeFileSync( 62 | mdPath, 63 | document.markdown({ 64 | transformPath, 65 | linkType, 66 | transformFrontmatter, 67 | transformUnresolvedLink 68 | }), 69 | { 70 | encoding: "utf8", 71 | } 72 | ); 73 | } else if (action === "delete") { 74 | const path = option?.document?.path()!; 75 | unlinkSync(destination + path); 76 | } 77 | } 78 | }) 79 | .start(); 80 | }); 81 | -------------------------------------------------------------------------------- /packages/docs/src/content/docs/notes/remark-wiki-link.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: remark-wiki-link 3 | --- 4 | 5 | `remark-wiki-link` consists of 3 projects: 6 | 7 | - `micromark-extension-wiki-link` 8 | - `mdast-util-wiki-link` 9 | - `remark-wiki-link` 10 | 11 | I moved all 3 projects in monorepo - this way it is more convinient to work. Changed all plugins to TypeScript, simplified build process (ES6 only), changed options. 12 | 13 | It is also worth to mention: [wikirefs](https://github.com/wikibonsai/wikirefs). 14 | 15 | ## Open questions 16 | 17 | **But there is more**... 18 | 19 | ```js 20 | export function remarkWikiLink(options) { 21 | const { bdb } = options; 22 | 23 | return wikiLinkPlugin.call(this, { 24 | linkTemplate: ({ slug, alias }) => { 25 | const [slugWithoutAnchor, anchor] = slug.split("#"); 26 | const doc = bdb.documentsSync({ slug: slugWithoutAnchor })[0]; 27 | if (doc) { 28 | return { 29 | hName: "a", 30 | hProperties: { 31 | href: anchor ? `${doc.url()}#${anchor}` : doc.url(), 32 | }, 33 | hChildren: [ 34 | { 35 | type: "text", 36 | value: alias == null ? doc.frontmatter().title : alias, 37 | }, 38 | ], 39 | }; 40 | } else { 41 | return { 42 | hName: "span", 43 | hProperties: { 44 | class: "broken-link", 45 | title: `Can't resolve link to ${slug}`, 46 | }, 47 | hChildren: [{ type: "text", value: alias || slug }], 48 | }; 49 | } 50 | }, 51 | }); 52 | } 53 | ``` 54 | 55 | - [ ] What about direct integration with BrainDB? (see code above 👆) 56 | - [ ] What about [PML](https://stereobooster.com/posts/portable-markdown-links/)? 57 | - [ ] Related functionality [Icons for external links](https://astro-digital-garden.stereobooster.com/recipes/icons-to-external-links/) 58 | - [ ] support anchors in wikilinks (`[[page#anchor]]`, `[[page#anchor|alias]]`) 59 | - do we need to url-encode anchors? 60 | - do we need to slugify anchors? 61 | - check that anchors correspond to some header in target document 62 | - [ ] what about ambiguous links (`bdb.documentsSync({ slug: permalink }).length > 1`)? 63 | - [ ] image wikilinks (`![[some.jpg]]`) 64 | 65 | ### Options 66 | 67 | - make general plugin for all links, like `remark-code-hook`? 68 | - and based on it resolve both wikilinks and PML 69 | - maybe call it `remark-link-resolver`? 70 | - make another plugin which would take `BrainDB` as option 71 | - use named exports in `remark-wkik-link` to expose "classic" and "new" versions 72 | - where "new" is plugin which would take `BrainDB` as option 73 | -------------------------------------------------------------------------------- /packages/core/src/getMarkdown.ts: -------------------------------------------------------------------------------- 1 | import { map } from "unist-util-map"; 2 | import { stringify as stringifyYaml } from "yaml"; 3 | import { and, eq } from "drizzle-orm"; 4 | import { DocumentProps, document, link } from "./schema.js"; 5 | import { mdParser } from "./parser.js"; 6 | import { Db } from "./db.js"; 7 | import { BrainDBOptionsOut, Frontmatter } from "./index.js"; 8 | import { isExternalLink } from "./utils.js"; 9 | 10 | export function getMarkdown( 11 | db: Db, 12 | frontmatter: Frontmatter, 13 | d: DocumentProps, 14 | options: BrainDBOptionsOut = {} 15 | ): string | Uint8Array { 16 | const { transformPath, linkType, transformUnresolvedLink } = options; 17 | 18 | let frontmatterDetected = false; 19 | const modified = map(d.ast as any, (node) => { 20 | if (node.type == "yaml") { 21 | frontmatterDetected = true; 22 | return { 23 | type: "yaml", 24 | value: stringifyYaml(frontmatter).trim(), 25 | }; 26 | } 27 | if (node.type === "wikiLink" || node.type === "link") { 28 | const label = 29 | node.type === "link" 30 | ? (node.children[0].value as string) 31 | : node.data.alias; 32 | 33 | if (isExternalLink(node.url)) return node; 34 | 35 | const [resolvedLink] = db 36 | .select() 37 | .from(link) 38 | .where( 39 | and(eq(link.from, d.path), eq(link.start, node.position.start.offset)) 40 | ) 41 | .all(); 42 | 43 | if (!resolvedLink || !resolvedLink.to) 44 | return ( 45 | (transformUnresolvedLink && transformUnresolvedLink(d.path, node)) || 46 | node 47 | ); 48 | 49 | let url: string; 50 | 51 | if (linkType === "web") { 52 | const toDocument = db 53 | .select() 54 | .from(document) 55 | .where(and(eq(document.path, resolvedLink.to))) 56 | .get(); 57 | if (!toDocument) return node; 58 | url = toDocument.url; 59 | } else { 60 | url = resolvedLink.to; 61 | if (transformPath) url = transformPath(url); 62 | } 63 | 64 | if (!url.startsWith("/")) url = "/" + url; 65 | 66 | if (resolvedLink.to_anchor) url = url + "#" + resolvedLink.to_anchor; 67 | url = encodeURI(url); 68 | 69 | return { 70 | type: "link", 71 | title: node.title, 72 | url, 73 | children: [ 74 | { 75 | type: "text", 76 | value: label, 77 | }, 78 | ], 79 | }; 80 | } 81 | return node; 82 | }); 83 | if (!frontmatterDetected) { 84 | modified.children.unshift({ 85 | type: "yaml", 86 | value: stringifyYaml(frontmatter).trim(), 87 | }); 88 | } 89 | 90 | return mdParser.stringify(modified); 91 | } 92 | -------------------------------------------------------------------------------- /packages/docs/src/content/docs/notes/astro-integration.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Astro integration 3 | draft: true 4 | --- 5 | 6 | > [!WARNING] 7 | > This is outdated article. 8 | 9 | ## Options 10 | 11 | ### Option 1: (`@braindb/cli`) 12 | 13 | In this case `BrainDB` acts as [obsidian-export](https://nick.groenen.me/projects/obsidian-export/). 14 | 15 | ```vizdom 16 | digraph { 17 | rankdir=LR 18 | node[shape=rect] 19 | a[label="some/folder"] 20 | c[label="src/content"] 21 | a -> BrainDB -> c 22 | subgraph cluster_0 { 23 | label=Astro 24 | d[label=Astro] 25 | e[label=dist] 26 | c -> d -> e 27 | } 28 | } 29 | ``` 30 | 31 | ### Option 2: augment though Astro components 32 | 33 | In this case `Astro` is repsonsible for rendering and `BrainDB` used to add features on top, for example, backlinks. Basically BrainDB and Astro Content Collections run in parallel and fully independent. 34 | 35 | ```vizdom 36 | digraph { 37 | rankdir=LR 38 | node[shape=rect] 39 | c[label="src/content"] 40 | e[label=dist] 41 | f[label="Astro components"] 42 | 43 | c -> Astro -> e 44 | c -> BrainDB -> f 45 | f -> Astro 46 | } 47 | ``` 48 | 49 | ### Option 3: augment though Remark plugins 50 | 51 | In this case `Astro` is repsonsible for rendering and `BrainDB` hooked in through remark plugins. BrainDB in this case responsible, for example, for wikilinks, datview. 52 | 53 | ```vizdom 54 | digraph { 55 | rankdir=LR 56 | node[shape=rect] 57 | c[label="src/content"] 58 | e[label=dist] 59 | f[label="remark plugins"] 60 | 61 | c -> Astro -> e 62 | c -> BrainDB -> f 63 | f -> Astro 64 | } 65 | ``` 66 | 67 | ## Old links 68 | 69 | Ideas from discussions: 70 | 71 | - https://github.com/withastro/roadmap/discussions/424 72 | - https://github.com/withastro/roadmap/discussions/688 73 | - https://github.com/withastro/roadmap/discussions/769 74 | - https://github.com/withastro/roadmap/discussions/434 75 | - https://github.com/withastro/roadmap/discussions/759 76 | - https://github.com/withastro/roadmap/discussions/736 77 | - https://github.com/withastro/roadmap/discussions/739 78 | - https://github.com/withastro/roadmap/discussions/704 79 | - https://github.com/withastro/roadmap/discussions/696 80 | - https://github.com/withastro/roadmap/discussions/686 81 | - https://github.com/withastro/roadmap/discussions/687 82 | - https://github.com/withastro/roadmap/discussions/551 83 | - https://github.com/withastro/roadmap/discussions/423 84 | - https://github.com/withastro/roadmap/discussions/505 85 | - https://github.com/withastro/roadmap/discussions/487 86 | - https://github.com/withastro/roadmap/discussions/470 87 | - https://github.com/withastro/roadmap/discussions/457 88 | - https://github.com/withastro/roadmap/discussions/334 89 | - https://github.com/withastro/roadmap/discussions/76 90 | -------------------------------------------------------------------------------- /packages/core/src/Document.ts: -------------------------------------------------------------------------------- 1 | import { eq } from "drizzle-orm"; 2 | import { Db } from "./db.js"; 3 | import { getMarkdown } from "./getMarkdown.js"; 4 | import { BrainDBOptionsOut } from "./index.js"; 5 | import { DocumentProps, document } from "./schema.js"; 6 | import { getDocumentsFrom, unresolvedLinks } from "./resolveLinks.js"; 7 | import { Link } from "./Link.js"; 8 | import { toText } from "./toText.js"; 9 | 10 | export class Document { 11 | private idPath: string; 12 | // @ts-expect-error it is lazily initialized only on the request 13 | private doc: DocumentProps; 14 | private db: Db; 15 | 16 | private getDoc() { 17 | if (!this.doc) { 18 | const [doc] = this.db 19 | .select() 20 | .from(document) 21 | .where(eq(document.path, this.idPath)) 22 | .all(); 23 | this.doc = doc; 24 | } 25 | return this.doc; 26 | } 27 | 28 | private checkAst() { 29 | const ast = this.getDoc().ast as any; 30 | if (!ast || ast.type !== "root") 31 | throw new Error( 32 | "Do not use `storeMarkdown: false` if you want to use `markdown` and `text`" 33 | ); 34 | } 35 | 36 | constructor(db: Db, idPath: string) { 37 | this.idPath = idPath; 38 | this.db = db; 39 | } 40 | 41 | path() { 42 | return this.idPath; 43 | } 44 | url() { 45 | return this.getDoc().url; 46 | } 47 | slug() { 48 | return this.getDoc().slug; 49 | } 50 | frontmatter() { 51 | return this.getDoc().frontmatter!; 52 | } 53 | markdown(options: BrainDBOptionsOut = {}) { 54 | this.checkAst(); 55 | const { transformFrontmatter } = options; 56 | 57 | const frontmatter = transformFrontmatter 58 | ? transformFrontmatter(this) 59 | : this.frontmatter(); 60 | 61 | return getMarkdown(this.db, frontmatter, this.getDoc(), options); 62 | } 63 | title() { 64 | return (this.getDoc().frontmatter!["title"] as string) || this.slug(); 65 | } 66 | id() { 67 | return this.getDoc().id; 68 | } 69 | updatedAt() { 70 | return new Date(this.getDoc().updated_at); 71 | } 72 | 73 | /** 74 | * From which documents there are links to this one 75 | */ 76 | documentsFrom() { 77 | return getDocumentsFrom({ 78 | db: this.db, 79 | idPath: this.idPath, 80 | }).map((from) => new Document(this.db, from)); 81 | } 82 | 83 | /** 84 | * experimental - maybe use instead outgoingLinks(to=null) 85 | */ 86 | unresolvedLinks() { 87 | return unresolvedLinks(this.db, this.idPath).map( 88 | (x) => new Link(this.db, x.from, x.start) 89 | ); 90 | } 91 | 92 | /** 93 | * experimental 94 | */ 95 | text() { 96 | this.checkAst(); 97 | return toText(this.getDoc().ast); 98 | } 99 | } 100 | -------------------------------------------------------------------------------- /packages/docs/src/content/docs/notes/content-query.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Content query 3 | --- 4 | 5 | I think to expose querying interface similar to [facets](https://github.com/stereobooster/facets/blob/05e9b0811d5c4ca35fc83fab1c8d2f60e8918599/packages/facets/src/Facets.ts#L138-L150). It is to some extent is limited. But if people need more they can use generated DB directly. 6 | 7 | Related: 8 | 9 | - https://content.nuxt.com/composables/query-content 10 | - https://github.com/datopian/markdowndb#or-using-markdowndb-nodejs-api-in-a-framework-of-your-choice 11 | - https://pagefind.app/docs/js-api-filtering/#using-compound-filters 12 | - https://testing-library.com/docs/react-testing-library/cheatsheet/#queries 13 | - https://payloadcms.com/docs/queries/overview 14 | 15 | ## Curently available 16 | 17 | ```ts 18 | export type DocumentsOtions = { 19 | slug?: string; 20 | url?: string; 21 | frontmatter?: JsonLimitedObject; 22 | sort?: ["updated_at", SortDirection]; 23 | }; 24 | documents(options?: DocumentsOtions) {} 25 | ``` 26 | 27 | ## Open questions 28 | 29 | - SQLite docs 30 | - https://stackoverflow.com/questions/63651913/is-there-a-method-to-check-if-an-array-includes-one-value-in-sqlite 31 | - https://www.sqlite.org/lang_expr.html#the_like_glob_regexp_match_and_extract_operators 32 | - Find all docs with couple values for the same field (OR) `frontmater: { date: [X, Y] }` 33 | - Find all docs with tag - `frontmater: { tags: [tag] }`? 34 | - Find all posts with exactly one tag? 35 | - Find all posts with any tag (OR) - `frontmater: { tags: [tag1, tag2] }`? 36 | - Find all posts with two tags (AND)? 37 | - Find all docs where field exists in frontmatter - `frontmater: { tags: ... }`? 38 | - Find all docs where field doesn't exist in frontmatter - `frontmater: { tags: null }`? 39 | - Pattern match 40 | - Find all docs where path starts with (matches some pattern) 41 | - Find all docs where url starts with (matches some pattern) 42 | - Find all docs where title matches some pattern 43 | 44 | ## Alternative approach 45 | 46 | Use some kind of DSL, like https://orm.drizzle.team/docs/operators 47 | 48 | ## Built-in fields vs frontmatter 49 | 50 | There are built-in fields: `path`, `url`, `updated_at` (will be added `type`). They don't require schema. 51 | 52 | What to do if we have same fields in frontmatter? We can explicitly target those fields with prefix `frontmatter` or `fm`. 53 | 54 | - `sort: ["updated_at", "asc"]` will sort by built-in field 55 | - `sort: ["fm.updated_at", "asc"]` will sort by field in frontmatter 56 | - `sort: ["something", "asc"]` will sort by field in frontmatter (because there is no such built-in field) 57 | - `sort: ["something.else", "asc"]` will sort by field `else` nested in object `something` in frontmatter 58 | - which means we can't use fields with `.` in it. In order to support `.` probably need to intrdouce escape sequence `\.` 59 | -------------------------------------------------------------------------------- /packages/micromark-extension-wiki-link/src/html.ts: -------------------------------------------------------------------------------- 1 | import { type CompileContext, type Token } from "micromark-util-types"; 2 | 3 | interface WikiLink { 4 | target: string; 5 | alias?: string; 6 | } 7 | 8 | export type WikiLinkHtmlOptions = { 9 | permalinks?: string[]; 10 | pageResolver?: (name: string) => string[]; 11 | newClassName?: string; 12 | wikiLinkClassName?: string; 13 | hrefTemplate?: (name: string) => string; 14 | }; 15 | 16 | function html(opts: WikiLinkHtmlOptions = {}) { 17 | const permalinks = opts.permalinks || []; 18 | const defaultPageResolver = (name: string) => [ 19 | name.replace(/ /g, "_").toLowerCase(), 20 | ]; 21 | const pageResolver = opts.pageResolver || defaultPageResolver; 22 | const newClassName = opts.newClassName || "new"; 23 | const wikiLinkClassName = opts.wikiLinkClassName || "internal"; 24 | const defaultHrefTemplate = (permalink: string) => `#/page/${permalink}`; 25 | const hrefTemplate = opts.hrefTemplate || defaultHrefTemplate; 26 | 27 | function enterWikiLink(this: CompileContext): void { 28 | // @ts-expect-error 29 | let stack: WikiLink[] = this.getData("wikiLinkStack"); 30 | // @ts-expect-error 31 | if (!stack) this.setData("wikiLinkStack", (stack = [])); 32 | 33 | // @ts-expect-error 34 | stack.push({}); 35 | } 36 | 37 | function top(stack: T[]) { 38 | return stack[stack.length - 1]; 39 | } 40 | 41 | function exitWikiLinkAlias(this: CompileContext, token: Token): void { 42 | const alias = this.sliceSerialize(token); 43 | // @ts-expect-error 44 | const current = top(this.getData("wikiLinkStack") as WikiLink[]); 45 | current.alias = alias; 46 | } 47 | 48 | function exitWikiLinkTarget(this: CompileContext, token: Token): void { 49 | const target = this.sliceSerialize(token); 50 | // @ts-expect-error 51 | const current = top(this.getData("wikiLinkStack") as WikiLink[]); 52 | current.target = target; 53 | } 54 | 55 | function exitWikiLink(this: CompileContext): void { 56 | // @ts-expect-error 57 | const wikiLink = (this.getData("wikiLinkStack") as WikiLink[]).pop()!; 58 | 59 | const pagePermalinks = pageResolver(wikiLink.target!); 60 | let permalink = pagePermalinks.find((p) => permalinks.indexOf(p) !== -1); 61 | const exists = permalink !== undefined; 62 | if (!exists) { 63 | permalink = pagePermalinks[0]; 64 | } 65 | let displayName = wikiLink.target!; 66 | if (wikiLink.alias) { 67 | displayName = wikiLink.alias; 68 | } 69 | 70 | let classNames = wikiLinkClassName; 71 | if (!exists) { 72 | classNames += " " + newClassName; 73 | } 74 | 75 | this.tag( 76 | '' 77 | ); 78 | this.raw(displayName); 79 | this.tag(""); 80 | } 81 | 82 | return { 83 | enter: { 84 | wikiLink: enterWikiLink, 85 | }, 86 | exit: { 87 | wikiLinkTarget: exitWikiLinkTarget, 88 | wikiLinkAlias: exitWikiLinkAlias, 89 | wikiLink: exitWikiLink, 90 | }, 91 | }; 92 | } 93 | 94 | export { html }; 95 | -------------------------------------------------------------------------------- /packages/micromark-extension-wiki-link/README.md: -------------------------------------------------------------------------------- 1 | # @braindb/micromark-extension-wiki-link 2 | 3 | fork of https://github.com/landakram/micromark-extension-wiki-link 4 | 5 | --- 6 | 7 | # Old readme 8 | 9 | This [micromark](https://github.com/micromark/micromark) extension parses and renders `[[Wiki Links]]`. 10 | 11 | - Parse wiki-style links and render them as anchors 12 | - Differentiate between "new" and "existing" wiki links by giving the parser a list of existing permalinks 13 | - Parse aliased wiki links i.e `[[Real Page:Page Alias]]` 14 | 15 | Using [remark](https://github.com/remarkjs/remark)? You might want to use 16 | [`remark-wiki-link`](https://github.com/landakram/remark-wiki-link) instead of using this package directly. 17 | 18 | ## Usage 19 | 20 | ```javascript 21 | import { syntax, html } from "@braindb/micromark-extension-wiki-link"; 22 | 23 | let serialized = micromark("[[Wiki Link]]", { 24 | extensions: [syntax()], 25 | htmlExtensions: [html()], 26 | }); 27 | 28 | console.log(serialized); 29 | //

    Wiki Link

    30 | ``` 31 | 32 | ### Configuration options 33 | 34 | Both the syntax extension and html extension can be configured by passing an object. 35 | 36 | For example, one may configure the syntax extension like so: 37 | 38 | ```javascript 39 | let serialized = micromark("[[Wiki Link]]", { 40 | extensions: [syntax({ aliasDivider: ":" })], 41 | htmlExtensions: [html()], 42 | }); 43 | ``` 44 | 45 | #### `syntax` 46 | 47 | - `options.aliasDivider [String]`: a string to be used as the divider for aliases. See the section below on [Aliasing pages](#aliasing-pages). Defaults to `"|"`. 48 | 49 | #### `html` 50 | 51 | - `options.permalinks [String]`: An array of permalinks that should be considered existing pages. If a wiki link is parsed and its permalink matches one of these permalinks, `node.data.exists` will be true. 52 | - `options.pageResolver (pageName: String) -> [String]`: A function that maps a page name to an array of possible permalinks. These possible permalinks are cross-referenced with `options.permalinks` to determine whether a page exists. If a page doesn't exist, the first element of the array is considered the permalink. 53 | 54 | The default `pageResolver` is: 55 | 56 | ```javascript 57 | (name) => [name.replace(/ /g, "_").toLowerCase()]; 58 | ``` 59 | 60 | - `options.hrefTemplate (permalink: String) -> String`: A function that maps a permalink to some path. This path is used as the `href` for the rendered `a`. 61 | 62 | The default `hrefTemplate` is: 63 | 64 | ```javascript 65 | (permalink) => `#/page/${permalink}`; 66 | ``` 67 | 68 | - `options.wikiLinkClassName [String]`: a class name that is attached to any rendered wiki links. Defaults to `"internal"`. 69 | - `options.newClassName [String]`: a class name that is attached to any rendered wiki links that do not exist. Defaults to `"new"`. 70 | 71 | ### Aliasing pages 72 | 73 | Aliased pages are supported with the following markdown syntax: 74 | 75 | ```md 76 | [[Real Page|Page Alias]] 77 | ``` 78 | 79 | And will produce this HTML when rendered: 80 | 81 | ```html 82 | Page Alias 83 | ``` 84 | -------------------------------------------------------------------------------- /experiments/demo-astro/README.md: -------------------------------------------------------------------------------- 1 | # Demo: Astro integration 2 | 3 | ## Build fails 4 | 5 | ```sh 6 | pnpm build 7 | Build failed in 970ms 8 | [commonjs--resolver] ../../node_modules/.pnpm/@node-rs+xxhash-darwin-x64@1.7.0/node_modules/@node-rs/xxhash-darwin-x64/xxhash.darwin-x64.node (1:0): Unexpected character '�' (Note that you need plugins to import files that are not JavaScript) 9 | file: /node_modules/.pnpm/@node-rs+xxhash@1.7.0/node_modules/@node-rs/xxhash/index.js:1:0 10 | 1: �������__TEXT�__text... 11 | ``` 12 | 13 | There are several related issues reported in Vite: 14 | 15 | - https://github.com/vitejs/vite/issues/5688 16 | - https://github.com/vitejs/vite/issues/14289 17 | - https://github.com/vitejs/vite/issues/16293 18 | 19 | **Workaround** use `@braindb/core` from npm instead of local version (`"@braindb/core": "workspace:*"`) 20 | 21 | ## TODO 22 | 23 | Those are old TODOs. A lot of them covered in https://astro-digital-garden.stereobooster.com/ 24 | 25 | - Layout: navbar, footer, main, sidebar 26 | - navbar 27 | - [dark mode switcher](https://www.kevinzunigacuellar.com/blog/dark-mode-in-astro/) 28 | - [pagefind component](https://blog.otterlord.dev/posts/astro-search/) 29 | - other 30 | - @astrojs/sitemap 31 | - seo 32 | - https://www.npmjs.com/package/@astrolib/seo 33 | - https://www.npmjs.com/package/astro-seo 34 | - components 35 | - https://github.com/natemoo-re/astro-icon 36 | - https://github.com/markteekman/accessible-astro-components#usage 37 | - https://github.com/delucis/astro-embed/tree/main/packages/astro-embed-youtube#readme 38 | - https://github.com/felix-berlin/astro-breadcrumbs 39 | - tailwind themes 40 | - https://github.com/gndx/ev0-astro-theme 41 | - https://github.com/flexdinesh/blogster/tree/main/themes/sleek 42 | - https://github.com/satnaing/astro-paper 43 | - https://github.com/chrismwilliams/astro-theme-cactus 44 | - https://github.com/markteekman/accessible-astro-starter 45 | - https://github.com/michael-andreuzza/astrosaas 46 | - faceted search 47 | - facets, client side astro component 48 | - https://github.com/withastro/starlight/blob/main/packages/starlight/components/Search.astro 49 | - https://daisyui.com/docs/use/ 50 | - https://tanstack.com/table/v8/docs/adapters/solid-table 51 | - https://ui.shadcn.com/docs/components/data-table 52 | - https://flowbite.com/blocks/application/advanced-tables/ 53 | - https://tw-elements.com/docs/standard/data/datatables/ 54 | - https://solid-ui-components.vercel.app/docs/components/table 55 | - start off by implementing concrete example and generalize later 56 | - lists 57 | - latest (by publish date, or by modification date) 58 | - alphabetical 59 | - other 60 | - time line, graph, calendar 61 | - sidebar 62 | - custom 63 | - file tree 64 | - facets 65 | - tags, categories 66 | - publish date (but why) 67 | - file tree can be one of filters (hierarchical) 68 | - custom facets per category 69 | - standalone pages 70 | - what about sidebar? 71 | - Right sidebar 72 | - ToC 73 | - backlinks 74 | - mini graph 75 | - tags pages 76 | - do I need them? I can reuse faceted search instead 77 | - but what about SEO? 78 | -------------------------------------------------------------------------------- /packages/docs/src/content/docs/notes/dataview.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: dataview 3 | draft: true 4 | --- 5 | 6 | ## TODO 7 | 8 | - simplest views are 9 | - [x] table (any number of columns) 10 | - [ ] align columns based on type (string left, numbers right) 11 | - [x] list 12 | - [x] `dv_task` 13 | - is it ok that it depends on `ast`? 14 | - [x] if it depends on `tasks` table I can as well put default columns `tasks.ast`, `tasks.checked` 15 | - [ ] potential issue with first column used for grouping 16 | - [ ] nested-list (any number of columns) 17 | - [x] handle `*` 18 | - [x] update readme 19 | - [ ] maybe shortcut like `dv('updated_at')` - if `updated_at` exists in frontmatter than take it, otherwise use built-in value 20 | - I would need to replace this in `WHERE`, `ORDER` and other places 21 | - [ ] maybe rename `dv_link` to `dv_anchor` or `dv_page` ... because there is table `links` which may be confusing 22 | - [ ] add tests 23 | - [ ] shall I rename tables and columns before publishing? 24 | - [ ] Backlinks? 25 | - I would need special function which would return path of current page `dv_path()` 26 | 27 | ## Examples 28 | 29 | ### Alphabetical index 30 | 31 | ```dataview list root_class=column-list 32 | SELECT upper(substr(frontmatter ->> '$.title', 1, 1)), dv_link() 33 | FROM documents 34 | WHERE frontmatter ->> '$.draft' IS NULL OR frontmatter ->> '$.draft' = false 35 | ORDER BY frontmatter ->> '$.title' 36 | LIMIT 2; 37 | ``` 38 | 39 | ### Recently changed 40 | 41 | ```dataview list root_class=column-list 42 | SELECT date(updated_at / 1000, 'unixepoch'), dv_link() 43 | FROM documents 44 | WHERE frontmatter ->> '$.draft' IS NULL OR frontmatter ->> '$.draft' = false 45 | ORDER BY updated_at DESC 46 | LIMIT 2; 47 | ``` 48 | 49 | ### Task list 50 | 51 | - [ ] testing wikilinks in tasks [[architecture|some]] 52 | 53 | ```dataview 54 | SELECT dv_link(), dv_task() 55 | FROM tasks JOIN documents ON documents.path = tasks.from 56 | WHERE start = 1537 57 | ORDER BY updated_at DESC, path, tasks.start; 58 | ``` 59 | 60 | ### Tags list 61 | 62 | ```dataview list root_class=column-list 63 | SELECT tags.value as tag, dv_link() 64 | FROM documents, json_each(frontmatter, '$.tags') tags 65 | WHERE frontmatter ->> '$.draft' IS NULL OR frontmatter ->> '$.draft' = false 66 | ORDER BY tag 67 | LIMIT 2; 68 | ``` 69 | 70 | ## SQL parsers 71 | 72 | - https://github.com/taozhi8833998/node-sql-parser 73 | - dialect: a lot, parser: pegjs, typescript 74 | - https://github.com/JavaScriptor/js-sql-parser 75 | - dialect: MySQL, parser: jison 76 | - https://alasql.org/ 77 | - dialect: ?, parser: jison 78 | - https://github.com/launchql/pgsql-parser 79 | - dialect: PostgreSQL, parser: ? 80 | - https://github.com/TypeFox/langium-sql/blob/main/packages/langium-sql/ 81 | - no idea how to use it as simple parser 82 | - https://nanosql.io/welcome.html 83 | - https://github.com/kristianmandrup/chevrotain-mini-sql-lang 84 | - https://chevrotain.io/docs/tutorial/step3b_adding_actions_embedded.html#sql-grammar 85 | - https://www.npmjs.com/package/rhombic 86 | - last commit 5 years ago 87 | - https://github.com/forward/sql-parser 88 | - last commit 9 years ago 89 | - https://github.com/DerekStride/tree-sitter-sql 90 | - https://github.com/lezer-parser/import-tree-sitter 91 | -------------------------------------------------------------------------------- /packages/mdast-util-wiki-link/src/from-markdown.ts: -------------------------------------------------------------------------------- 1 | import { 2 | type CompileContext, 3 | type Handle, 4 | type Token, 5 | } from "mdast-util-from-markdown"; 6 | 7 | import { type Node, type Data } from "unist"; 8 | 9 | interface WikiLinkHProperties { 10 | className: string; 11 | href: string; 12 | [key: string]: unknown; 13 | } 14 | 15 | interface WikiLinkData extends Data { 16 | alias: string; 17 | permalink: string | undefined; 18 | hName: string; 19 | hProperties: WikiLinkHProperties; 20 | hChildren: Array<{ type: string; value: string }>; 21 | } 22 | 23 | export interface WikiLinkNode extends Node { 24 | data: WikiLinkData; 25 | value: string; 26 | } 27 | 28 | type LinkTemplateProps = { 29 | slug: string; 30 | permalink?: string; 31 | alias?: string; 32 | }; 33 | 34 | function defaultLinkTemplate({ 35 | slug, 36 | permalink, 37 | alias, 38 | }: LinkTemplateProps): any { 39 | return { 40 | hName: "a", 41 | hProperties: { href: permalink == null ? slug : permalink }, 42 | hChildren: [{ type: "text", value: alias == null ? slug : alias }], 43 | }; 44 | } 45 | 46 | export interface FromMarkdownOptions { 47 | linkResolver?: (x: string) => string; 48 | linkTemplate?: typeof defaultLinkTemplate; 49 | } 50 | 51 | export function fromMarkdown(opts: FromMarkdownOptions = {}) { 52 | const linkTemplate = opts.linkTemplate || defaultLinkTemplate; 53 | let node: WikiLinkNode; 54 | 55 | function enterWikiLink(this: CompileContext, token: Token) { 56 | node = { 57 | type: "wikiLink", 58 | value: null, 59 | data: { 60 | // alias: null, 61 | // permalink: null 62 | }, 63 | } as any; 64 | // @ts-expect-error 65 | this.enter(node, token); 66 | } 67 | 68 | function top(stack: T[]) { 69 | return stack[stack.length - 1]; 70 | } 71 | 72 | function exitWikiLinkAlias(this: CompileContext, token: Token) { 73 | const alias = this.sliceSerialize(token); 74 | const current = top(this.stack); 75 | // @ts-expect-error 76 | current.data.alias = alias; 77 | } 78 | 79 | function exitWikiLinkTarget(this: CompileContext, token: Token) { 80 | const target = this.sliceSerialize(token); 81 | const current = top(this.stack); 82 | // @ts-expect-error 83 | current.value = target; 84 | } 85 | 86 | function exitWikiLink(this: CompileContext, token: Token) { 87 | this.exit(token); 88 | const wikiLink = node; 89 | 90 | const data = { 91 | slug: wikiLink.value, 92 | alias: wikiLink.data.alias, 93 | permalink: opts.linkResolver 94 | ? opts.linkResolver(wikiLink.value) 95 | : undefined, 96 | }; 97 | 98 | wikiLink.data = { 99 | // ...wikiLink.data, 100 | alias: data.alias, 101 | permalink: data.permalink, 102 | ...linkTemplate(data), 103 | }; 104 | } 105 | 106 | return { 107 | enter: { 108 | wikiLink: enterWikiLink satisfies Handle, 109 | }, 110 | exit: { 111 | wikiLinkTarget: exitWikiLinkTarget satisfies Handle, 112 | wikiLinkAlias: exitWikiLinkAlias satisfies Handle, 113 | wikiLink: exitWikiLink satisfies Handle, 114 | }, 115 | }; 116 | } 117 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | tmp 2 | 3 | # Based on https://raw.githubusercontent.com/github/gitignore/main/Node.gitignore 4 | 5 | # Logs 6 | 7 | logs 8 | _.log 9 | npm-debug.log_ 10 | yarn-debug.log* 11 | yarn-error.log* 12 | lerna-debug.log* 13 | .pnpm-debug.log* 14 | 15 | # Diagnostic reports (https://nodejs.org/api/report.html) 16 | 17 | report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json 18 | 19 | # Runtime data 20 | 21 | pids 22 | _.pid 23 | _.seed 24 | \*.pid.lock 25 | 26 | # Directory for instrumented libs generated by jscoverage/JSCover 27 | 28 | lib-cov 29 | 30 | # Coverage directory used by tools like istanbul 31 | 32 | coverage 33 | \*.lcov 34 | 35 | # nyc test coverage 36 | 37 | .nyc_output 38 | 39 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 40 | 41 | .grunt 42 | 43 | # Bower dependency directory (https://bower.io/) 44 | 45 | bower_components 46 | 47 | # node-waf configuration 48 | 49 | .lock-wscript 50 | 51 | # Compiled binary addons (https://nodejs.org/api/addons.html) 52 | 53 | build/Release 54 | 55 | # Dependency directories 56 | 57 | node_modules/ 58 | jspm_packages/ 59 | 60 | # Snowpack dependency directory (https://snowpack.dev/) 61 | 62 | web_modules/ 63 | 64 | # TypeScript cache 65 | 66 | \*.tsbuildinfo 67 | 68 | # Optional npm cache directory 69 | 70 | .npm 71 | 72 | # Optional eslint cache 73 | 74 | .eslintcache 75 | 76 | # Optional stylelint cache 77 | 78 | .stylelintcache 79 | 80 | # Microbundle cache 81 | 82 | .rpt2_cache/ 83 | .rts2_cache_cjs/ 84 | .rts2_cache_es/ 85 | .rts2_cache_umd/ 86 | 87 | # Optional REPL history 88 | 89 | .node_repl_history 90 | 91 | # Output of 'npm pack' 92 | 93 | \*.tgz 94 | 95 | # Yarn Integrity file 96 | 97 | .yarn-integrity 98 | 99 | # dotenv environment variable files 100 | 101 | .env 102 | .env.development.local 103 | .env.test.local 104 | .env.production.local 105 | .env.local 106 | 107 | # parcel-bundler cache (https://parceljs.org/) 108 | 109 | .cache 110 | .parcel-cache 111 | 112 | # Next.js build output 113 | 114 | .next 115 | out 116 | 117 | # Nuxt.js build / generate output 118 | 119 | .nuxt 120 | dist 121 | 122 | # Gatsby files 123 | 124 | .cache/ 125 | 126 | # Comment in the public line in if your project uses Gatsby and not Next.js 127 | 128 | # https://nextjs.org/blog/next-9-1#public-directory-support 129 | 130 | # public 131 | 132 | # vuepress build output 133 | 134 | .vuepress/dist 135 | 136 | # vuepress v2.x temp and cache directory 137 | 138 | .temp 139 | .cache 140 | 141 | # Docusaurus cache and generated files 142 | 143 | .docusaurus 144 | 145 | # Serverless directories 146 | 147 | .serverless/ 148 | 149 | # FuseBox cache 150 | 151 | .fusebox/ 152 | 153 | # DynamoDB Local files 154 | 155 | .dynamodb/ 156 | 157 | # TernJS port file 158 | 159 | .tern-port 160 | 161 | # Stores VSCode versions used for testing VSCode extensions 162 | 163 | .vscode-test 164 | 165 | # yarn v2 166 | 167 | .yarn/cache 168 | .yarn/unplugged 169 | .yarn/build-state.yml 170 | .yarn/install-state.gz 171 | .pnp.\* 172 | 173 | # IntelliJ based IDEs 174 | .idea 175 | 176 | .DS_Store 177 | .turbo 178 | 179 | # build output 180 | dist/ 181 | 182 | # generated types 183 | .astro/ 184 | 185 | # dependencies 186 | node_modules/ 187 | 188 | # logs 189 | npm-debug.log* 190 | yarn-debug.log* 191 | yarn-error.log* 192 | pnpm-debug.log* 193 | 194 | # environment variables 195 | .env 196 | .env.production 197 | 198 | # macOS-specific files 199 | .DS_Store 200 | -------------------------------------------------------------------------------- /packages/core/src/schema.ts: -------------------------------------------------------------------------------- 1 | import { 2 | text, 3 | sqliteTable, 4 | integer, 5 | unique, 6 | real, 7 | index, 8 | } from "drizzle-orm/sqlite-core"; 9 | import { JsonObject } from "./types.js"; 10 | 11 | // int("updated_at", { mode: "timestamp" }), 12 | // const timestamp = customType<{ 13 | // data: Date; 14 | // driverData: string; 15 | // }>({ 16 | // dataType() { 17 | // return "text"; 18 | // }, 19 | // fromDriver(value: string): Date { 20 | // return new Date(value); 21 | // }, 22 | // }); 23 | 24 | // TODO: better types for JSON columns https://github.com/drizzle-team/drizzle-orm/discussions/386 25 | export const document = sqliteTable( 26 | "documents", 27 | { 28 | // can use Inode number here 29 | id: integer("id").primaryKey({ autoIncrement: true }), 30 | path: text("path").notNull(), 31 | // content 32 | frontmatter: text("frontmatter", { mode: "json" }) 33 | .$type() 34 | .notNull(), 35 | ast: text("ast", { mode: "json" }).notNull(), 36 | // markdown: text("markdown").notNull(), 37 | // to avoide reparse 38 | // file modification time https://man7.org/linux/man-pages/man3/stat.3type.html 39 | mtime: real("mtime").notNull(), 40 | // file hash 41 | checksum: text("checksum").notNull(), 42 | cfghash: integer("cfghash").default(0).notNull(), 43 | // for link resolution 44 | slug: text("slug").notNull(), 45 | url: text("url").notNull(), 46 | // title: text("title"), 47 | updated_at: integer("updated_at").default(0).notNull(), 48 | revision: integer("revision").default(0).notNull(), 49 | }, 50 | (t) => ({ 51 | path: unique("documents_path").on(t.path), 52 | slug: index("documents_slug").on(t.slug), 53 | url: index("documents_url").on(t.url), 54 | }) 55 | ); 56 | 57 | export type DocumentProps = typeof document.$inferSelect; 58 | 59 | export const link = sqliteTable( 60 | "links", 61 | { 62 | id: integer("id").primaryKey({ autoIncrement: true }), 63 | // edge for directed graph 64 | from: text("from").notNull(), 65 | to: text("to"), 66 | /** 67 | * Options to uniqlly identify link in the document 68 | * - **path + start.offset** 69 | * - autoincrement 70 | * - uuid-like (random) 71 | * - path + start.column + start.line 72 | */ 73 | start: integer("start").notNull(), 74 | to_slug: text("to_slug"), 75 | to_url: text("to_url"), 76 | to_path: text("to_path"), 77 | to_anchor: text("to_anchor"), 78 | label: text("label"), 79 | line: integer("line").notNull(), 80 | column: integer("column").notNull(), 81 | }, 82 | (t) => ({ 83 | from_start: unique("links_from_start").on(t.from, t.start), 84 | to_slug: index("links_to_slug").on(t.to_slug), 85 | to_url: index("links_to_url").on(t.to_url), 86 | to_path: index("links_to_path").on(t.to_path), 87 | }) 88 | ); 89 | 90 | export type LinkProps = typeof link.$inferSelect; 91 | 92 | export const task = sqliteTable( 93 | "tasks", 94 | { 95 | id: integer("id").primaryKey({ autoIncrement: true }), 96 | from: text("from").notNull(), 97 | /** 98 | * Options to uniqlly identify link in the document 99 | * - **path + start.offset** 100 | * - autoincrement 101 | * - uuid-like (random) 102 | * - path + start.column + start.line 103 | */ 104 | start: integer("start").notNull(), 105 | ast: text("ast", { mode: "json" }).notNull(), 106 | checked: integer("checked", { mode: "boolean" }).notNull(), 107 | line: integer("line").notNull(), 108 | column: integer("column").notNull(), 109 | }, 110 | (t) => ({ 111 | from_start: unique("tasks_from_start").on(t.from, t.start), 112 | }) 113 | ); 114 | 115 | export type TaskProps = typeof task.$inferSelect; 116 | -------------------------------------------------------------------------------- /packages/remark-wiki-link/README.md: -------------------------------------------------------------------------------- 1 | # @braindb/remark-wiki-link 2 | 3 | fork of https://github.com/landakram/remark-wiki-link 4 | 5 | --- 6 | 7 | This [remark](https://github.com/wooorm/remark) plugin parses and renders `[[Wiki Links]]`. 8 | 9 | - Parse wiki-style links and render them as anchors 10 | - Differentiate between "new" and "existing" wiki links by giving the parser a list of existing permalinks 11 | - Parse aliased wiki links i.e `[[Real Page|Page Alias]]` 12 | 13 | Looking for lower level packages? Check out [@braindb/mdast-util-wiki-link](https://github.com/stereobooster/braindb/tree/main/packages/mdast-util-wiki-link) for working with ASTs and [@braindb/micromark-extension-wiki-link](https://github.com/stereobooster/braindb/tree/main/packages/micromark-extension-wiki-link) for working with tokens. 14 | 15 | ## Usage 16 | 17 | ```javascript 18 | const unified = require("unified"); 19 | const markdown = require("remark-parse"); 20 | const remarkWikiLink = require("@braindb/remark-wiki-link"); 21 | 22 | let processor = unified().use(markdown, { gfm: true }).use(remarkWikiLink); 23 | ``` 24 | 25 | When the processor is run, wiki links will be parsed to a `wikiLink` node. 26 | 27 | If we have this markdown string: 28 | 29 | ```md 30 | [[Test Page]] 31 | ``` 32 | 33 | ```javascript 34 | { 35 | value: 'Test Page', 36 | data: { 37 | alias: 'Test Page', 38 | permalink: 'Test Page', 39 | hName: 'a', 40 | hProperties: { 41 | href: 'Test Page', 42 | }, 43 | hChildren: [{ 44 | type: 'text', 45 | value: 'Test Page' 46 | }] 47 | } 48 | } 49 | ``` 50 | 51 | - `value`: slug of the page 52 | - `data.alias`: The display name for this link 53 | - `data.permalink`: The permalink for this page. This permalink is computed from `node.value` using `options.linkResolver`, which can be passed in when initializing the plugin. 54 | - `data.hProperties.href`: `href` value for the rendered `a`. This `href` is computed using `options.hrefTemplate`. 55 | 56 | The `hName` and other `h` fields provide compatibility with [`rehype`](https://github.com/rehypejs/rehype). 57 | 58 | When rendered to HTML, we get: 59 | 60 | ```html 61 | Test Page 62 | ``` 63 | 64 | ### Configuration options 65 | 66 | - `options.linkResolver (pageName: string) -> string`: A function that maps a page name to an array of possible permalinks. These possible permalinks are cross-referenced with `options.permalinks` to determine whether a page exists. If a page doesn't exist, the first element of the array is considered the permalink. 67 | 68 | The default `linkResolver` is: 69 | 70 | ```javascript 71 | (name) => name; 72 | ``` 73 | 74 | - `options.linkTemplate ({ slug: string, permalink: string, alias: string | null }) -> HAST`: A function that generates "HAST" for link. 75 | 76 | The default `linkTemplate` is: 77 | 78 | ```js 79 | function defaultLinkTemplate({ slug, permalink, alias }) { 80 | return { 81 | hName: "a", 82 | hProperties: { href: permalink == null ? slug : permalink }, 83 | hChildren: [{ type: "text", value: alias == null ? slug : alias }], 84 | }; 85 | } 86 | ``` 87 | 88 | - `options.aliasDivider`: a string for `aliased pages`. . Defaults to `"|"`. 89 | 90 | #### Aliasing pages 91 | 92 | Aliased pages are supported with the following markdown syntax: 93 | 94 | ```md 95 | [[Real Page|Page Alias]] 96 | ``` 97 | 98 | The AST node will look like: 99 | 100 | ```javascript 101 | { 102 | value: 'Real Page', 103 | data: { 104 | alias: 'Page Alias', 105 | permalink: 'Real Page', 106 | hName: 'a', 107 | hProperties: { 108 | href: 'Real Page' 109 | }, 110 | hChildren: [{ 111 | type: 'text', 112 | value: 'Page Alias' 113 | }] 114 | } 115 | } 116 | ``` 117 | 118 | And will produce this HTML when rendered: 119 | 120 | ```html 121 | Page Alias 122 | ``` 123 | -------------------------------------------------------------------------------- /packages/astro/src/index.ts: -------------------------------------------------------------------------------- 1 | import { defineIntegration } from "astro-integration-kit"; 2 | import { slug as githubSlug } from "github-slugger"; 3 | import path from "node:path"; 4 | import process from "node:process"; 5 | import { BrainDB, type BrainDBOptionsIn } from "@braindb/core"; 6 | import { remarkWikiLink } from "./remarkWikiLink.js"; 7 | import { z } from "astro/zod"; 8 | import wikiLinkPlugin from "@braindb/remark-wiki-link"; 9 | 10 | const brainDBOptionsIn = z 11 | .object({ 12 | dbPath: z.string(), 13 | cache: z.boolean(), 14 | url: z.function( 15 | z.tuple([z.string(), z.record(z.string(), z.any())]), 16 | z.string() 17 | ), 18 | slug: z.function( 19 | z.tuple([z.string(), z.record(z.string(), z.any())]), 20 | z.string() 21 | ), 22 | root: z.string(), 23 | source: z.string(), 24 | git: z.boolean(), 25 | storeMarkdown: z.boolean(), 26 | // need to pass false in order to disable built-in remarkWikiLink plugin 27 | remarkWikiLink: z.boolean(), 28 | }) 29 | .partial(); 30 | 31 | // slug implementation according to Astro 32 | // see astro/packages/astro/src/content/utils.ts 33 | export const generateSlug = (filePath: string) => { 34 | const withoutFileExt = filePath.replace( 35 | new RegExp(path.extname(filePath) + "$"), 36 | "" 37 | ); 38 | const rawSlugSegments = withoutFileExt.split(path.sep); 39 | const slug = rawSlugSegments 40 | // Slugify each route segment to handle capitalization and spaces. 41 | // Note: using `slug` instead of `new Slugger()` means no slug deduping. 42 | .map((segment) => githubSlug(segment)) 43 | .join("/") 44 | .replace(/\/index$/, ""); 45 | 46 | return slug; 47 | }; 48 | 49 | const slugToUrl = (slug: string) => { 50 | if (!slug.startsWith("/")) slug = `/${slug}`; 51 | if (!slug.endsWith("/")) slug = `${slug}/`; 52 | return slug; 53 | }; 54 | 55 | const defaultBrainDBOptions: BrainDBOptionsIn = { 56 | root: path.resolve(process.cwd(), "src/content/docs"), 57 | url: (filePath, frontmatter) => 58 | frontmatter.url 59 | ? String(frontmatter.url) 60 | : slugToUrl( 61 | frontmatter.slug ? String(frontmatter.slug) : generateSlug(filePath) 62 | ), 63 | git: true, 64 | }; 65 | 66 | let bdbInstance = new BrainDB(defaultBrainDBOptions); 67 | 68 | export function getBrainDb() { 69 | try { 70 | bdbInstance.start(true); 71 | } catch {} 72 | return bdbInstance; 73 | } 74 | 75 | export const brainDbAstro = defineIntegration({ 76 | name: "@braindb/astro", 77 | optionsSchema: brainDBOptionsIn.optional(), 78 | setup({ options }) { 79 | if (options) { 80 | bdbInstance.stop(); 81 | // @ts-expect-error tsup is getting on my nerves 82 | bdbInstance = new BrainDB({ ...defaultBrainDBOptions, ...options }); 83 | } 84 | 85 | return { 86 | hooks: { 87 | "astro:config:setup": async ({ config, updateConfig }) => { 88 | await getBrainDb().ready(); 89 | 90 | const newConfig = { 91 | markdown: { 92 | remarkPlugins: 93 | options?.remarkWikiLink === false 94 | ? config.markdown.remarkPlugins 95 | : [ 96 | ...(config.markdown.remarkPlugins || []), 97 | // I had to use wikiLinkPlugin without resolver 98 | // and puted resolver in separate plugin using visit 99 | // to support wikilinks in remark-dataview output 100 | wikiLinkPlugin, 101 | [remarkWikiLink, { bdb: getBrainDb() }], 102 | ], 103 | }, 104 | vite: { 105 | // https://github.com/vitejs/vite/issues/14289 106 | optimizeDeps: { 107 | exclude: [ 108 | ...(config.vite.optimizeDeps?.exclude || []), 109 | "@braindb", 110 | // "fsevents", 111 | // "@node-rs", 112 | // "@napi-rs", 113 | ], 114 | }, 115 | }, 116 | }; 117 | updateConfig(newConfig); 118 | }, 119 | }, 120 | }; 121 | }, 122 | }); 123 | 124 | export default brainDbAstro; 125 | -------------------------------------------------------------------------------- /packages/mdast-util-wiki-link/README.md: -------------------------------------------------------------------------------- 1 | # @braindb/mdast-util-wiki-link 2 | 3 | fork of https://github.com/landakram/mdast-util-wiki-link 4 | 5 | --- 6 | 7 | Extension for [`mdast-util-from-markdown`](https://github.com/syntax-tree/mdast-util-from-markdown) and 8 | [`mdast-util-to-markdown`](https://github.com/syntax-tree/mdast-util-to-markdown) to support `[[Wiki Links]]`. 9 | 10 | - Parse wiki-style links and render them as anchors 11 | - Differentiate between "new" and "existing" wiki links by giving the parser a list of existing permalinks 12 | - Parse aliased wiki links i.e `[[Real Page|Page Alias]]` 13 | 14 | Using [remark](https://github.com/remarkjs/remark)? You might want to use 15 | [`@braindb/remark-wiki-link`](https://github.com/stereobooster/braindb/tree/main/packages/remark-wiki-link) instead of using this package directly. 16 | 17 | ## Usage 18 | 19 | ### Markdown to AST 20 | 21 | ```javascript 22 | import fromMarkdown from "mdast-util-from-markdown"; 23 | import { syntax } from "@braindb/micromark-extension-wiki-link"; 24 | import * as wikiLink from "@braindb/mdast-util-wiki-link"; 25 | 26 | let ast = fromMarkdown("[[Test Page]]", { 27 | extensions: [syntax()], 28 | mdastExtensions: [wikiLink.fromMarkdown()], 29 | }); 30 | ``` 31 | 32 | The AST node will look like this: 33 | 34 | ```javascript 35 | { 36 | value: 'Test Page', 37 | data: { 38 | alias: 'Test Page', 39 | permalink: 'Test Page', 40 | hName: 'a', 41 | hProperties: { 42 | href: 'Test Page', 43 | }, 44 | hChildren: [{ 45 | type: 'text', 46 | value: 'Test Page' 47 | }] 48 | } 49 | } 50 | ``` 51 | 52 | - `value`: slug of the page 53 | - `data.alias`: The display name for this link 54 | - `data.permalink`: The permalink for this page. This permalink is computed from `node.value` using `options.linkResolver`, which can be passed in when initializing the plugin. 55 | - `data.hProperties.href`: `href` value for the rendered `a`. This `href` is computed using `options.hrefTemplate`. 56 | 57 | The `hName` and other `h` fields provide compatibility with [`rehype`](https://github.com/rehypejs/rehype). 58 | 59 | ### AST to Markdown 60 | 61 | Taking the `ast` from the prior example, let's go back to markdown: 62 | 63 | ```javascript 64 | import { fromMarkdown } from "mdast-util-from-markdown"; 65 | import * as wikiLink from "@braindb/mdast-util-wiki-link"; 66 | 67 | let markdownString = toMarkdown(ast, { 68 | extensions: [wikiLink.toMarkdown()], 69 | }).trim(); 70 | console.log(markdownString); 71 | // [[Wiki Link]] 72 | ``` 73 | 74 | ### Configuration options 75 | 76 | Both `fromMarkdown` and `toMarkdown` accept configuration as an object. 77 | 78 | For example, one may configure `fromMarkdown` like so: 79 | 80 | ```javascript 81 | let ast = fromMarkdown("[[Test Page]]", { 82 | extensions: [syntax()], 83 | mdastExtensions: [ 84 | wikiLink.fromMarkdown({ 85 | linkResolver: (x) => name.replace(/ /g, "_").toLowerCase(), 86 | }), 87 | ], // <-- 88 | }); 89 | ``` 90 | 91 | #### `fromMarkdown` 92 | 93 | - `options.linkResolver (pageName: string) -> string`: A function that maps a page name to an array of possible permalinks. These possible permalinks are cross-referenced with `options.permalinks` to determine whether a page exists. If a page doesn't exist, the first element of the array is considered the permalink. 94 | 95 | The default `linkResolver` is: 96 | 97 | ```javascript 98 | (name) => name; 99 | ``` 100 | 101 | - `options.linkTemplate ({ slug: string, permalink: string, alias: string | null }) -> HAST`: A function that generates "HAST" for link. 102 | 103 | The default `linkTemplate` is: 104 | 105 | ```js 106 | function defaultLinkTemplate({ slug, permalink, alias }) { 107 | return { 108 | hName: "a", 109 | hProperties: { href: permalink == null ? slug : permalink }, 110 | hChildren: [{ type: "text", value: alias == null ? slug : alias }], 111 | }; 112 | } 113 | ``` 114 | 115 | #### `toMarkdown` 116 | 117 | - `options.aliasDivider string`: a string to be used as the divider for aliases. See the section below on [Aliasing pages](#aliasing-pages). Defaults to `"|"`. 118 | 119 | ### Aliasing pages 120 | 121 | Aliased pages are supported with the following markdown syntax: 122 | 123 | ```md 124 | [[Real Page|Page Alias]] 125 | ``` 126 | 127 | And will produce this HTML when rendered: 128 | 129 | ```html 130 | Page Alias 131 | ``` 132 | -------------------------------------------------------------------------------- /packages/docs/src/content/docs/notes/metadata.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Metadata 3 | draft: true 4 | --- 5 | 6 | Metadata can come from frontmatter or can be computed 7 | 8 | **Content**: 9 | 10 | | | frontmatter | auto generated | usage | 11 | | ------------ | ------------- | --------------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ | 12 | | Title | `title` | File name | ``, `og:title`, `twitter:title`, social-images-autogenration | 13 | | Description | `description` | Text summarization | `<meta name="description">`, `og:description`, `twitter:description`, social-images-autogenration | 14 | | Image | ? | social-images-autogenration | `og:image`, `twitter:image`, cover image for post, small image for list | 15 | | Slug | `slug` | File name or path | wikilinks, url generation | 16 | | Last Updated | `lastUpdated` | Based on git | "Last updated" on on page or in list, "Recently changed" page, [schema](https://schema.org/dateModified) (`dateModified`), [sitemap](https://docs.astro.build/en/guides/integrations-guide/sitemap/) (`lastmod`), search | 17 | | Tags | `tags` | - | Tag list page, Tags on page or in list, search | 18 | 19 | **Functional**: `tableOfContents`, `banner`, `pagefind`, `draft`, `sidebar` 20 | 21 | **Other**: 22 | 23 | - It should be possible to assign metadata for each page and/or tag. For example, it can be `color` or `icon`. Then this metadata can be reused: 24 | - color and icon for nodes in in content graph 25 | - icons can be shown for tags 26 | - icons can be shown for pages in sidebar and near link to them 27 | - page may have `stage` field (idea, draft, in progress, or finished). Right now I use emojis: 🧠, 🚷, 🚧. Similar idea: [Taxonomy of note types](https://www.ssp.sh/brain/taxonomy-of-note-types/) 28 | - some pages have `aka` field 29 | - for tags `color` can be generated automaticially: ["scale"](https://d3js.org/d3-scale-chromatic/categorical) or [color-hash](https://github.com/zenozeng/color-hash) 30 | - Functional metadata: 31 | - I can implement `alias` with "catch all" `[...path].astro` 32 | - but also need to exclude it from sitemap 33 | ```astro 34 | --- 35 | export function getStaticPaths() { 36 | return [{ params: { path: "test-redirect" } }]; 37 | } 38 | --- 39 | ``` 40 | - `description` 41 | - Can I generate it automatically? Let's say take `hast-util-to-string`, `trim()` and slcie first 150 chars 42 | - https://github.com/topics/text-summarization?l=javascript 43 | - exclude from `sitemap` 44 | 45 | ## Links 46 | 47 | - https://gohugo.io/content-management/front-matter/ 48 | - https://starlight.astro.build/reference/frontmatter/ 49 | - https://docusaurus.io/docs/api/plugins/@docusaurus/plugin-content-pages#markdown-front-matter 50 | - https://docusaurus.io/docs/api/plugins/@docusaurus/plugin-content-docs#markdown-front-matter 51 | - https://docusaurus.io/docs/api/plugins/@docusaurus/plugin-content-blog#markdown-front-matter 52 | - https://content.nuxt.com/usage/markdown#front-matter 53 | - https://jekyllrb.com/docs/front-matter/ 54 | - https://hexo.io/docs/front-matter 55 | - https://v1.vuepress.vuejs.org/guide/frontmatter.html#predefined-variables 56 | - https://v1.d.umijs.org/config/frontmatter 57 | - https://vitepress.dev/reference/frontmatter-config 58 | - https://docs.asciidoctor.org/asciidoc/latest/attributes/document-attributes-ref/ 59 | - https://www.mkdocs.org/user-guide/writing-your-docs/#meta-data 60 | -------------------------------------------------------------------------------- /experiments/demo-astro/src/components/graphRenderer.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * Graphology SVG Renderer 3 | * 4 | * Copy-paste from original package to fix some things in place 5 | */ 6 | import helpers from "graphology-svg/helpers"; 7 | import defaults from "graphology-svg/defaults"; 8 | import line from "graphology-svg/components/edges/line"; 9 | // import circle from "graphology-svg/components/nodes/circle"; 10 | // import nodeLabelDefault from "graphology-svg/components/nodeLabels/default"; 11 | 12 | export const { DEFAULTS } = defaults; 13 | 14 | function nodeReducer(settings: any, node: any, attr: any) { 15 | return { 16 | ...defaults.DEFAULT_NODE_REDUCER(settings, node, attr), 17 | url: attr.url, 18 | }; 19 | } 20 | 21 | function drawCircle(settings, data) { 22 | return `<a href="${helpers.escape(data.url)}"> 23 | <circle cx="${data.x}" cy="${data.y}" r="${data.size}" fill="${ 24 | data.color 25 | }" /> 26 | </a>`; 27 | } 28 | 29 | function drawLabel(settings, data) { 30 | return `<a href="${helpers.escape(data.url)}"> 31 | <text x="${data.x + data.size * 1.1}" y="${ 32 | data.y + data.size / 4 33 | }" font-family="${helpers.escape( 34 | settings.font || "sans-serif" 35 | )}" font-size="${data.size}">${helpers.escape(data.label)}</text> 36 | </a>`; 37 | } 38 | 39 | const components = { 40 | nodes: { 41 | circle: drawCircle, 42 | }, 43 | edges: { 44 | line, 45 | }, 46 | nodeLabels: { 47 | default: drawLabel, 48 | }, 49 | }; 50 | 51 | export function renderer(graph: any, settings: any) { 52 | // Reducing nodes 53 | const nodeData = reduceNodes(graph, settings); 54 | 55 | // Drawing edges 56 | const edgesStrings = []; 57 | graph.forEachEdge(function (edge, attr, source, target) { 58 | // Reducing edge 59 | if (typeof settings.edges.reducer === "function") 60 | attr = settings.edges.reducer(settings, edge, attr); 61 | 62 | attr = defaults.DEFAULT_EDGE_REDUCER(settings, edge, attr); 63 | 64 | edgesStrings.push( 65 | components.edges[attr.type]( 66 | settings, 67 | attr, 68 | nodeData[source], 69 | nodeData[target] 70 | ) 71 | ); 72 | }); 73 | 74 | // Drawing nodes and labels 75 | // TODO: should we draw in size order to avoid weird overlaps? Should we run noverlap? 76 | const nodesStrings = []; 77 | const nodeLabelsStrings = []; 78 | let k; 79 | for (k in nodeData) { 80 | nodesStrings.push( 81 | components.nodes[nodeData[k].type](settings, nodeData[k]) 82 | ); 83 | nodeLabelsStrings.push( 84 | components.nodeLabels[nodeData[k].labelType](settings, nodeData[k]) 85 | ); 86 | } 87 | 88 | return ( 89 | '<?xml version="1.0" encoding="utf-8"?>' + 90 | '<svg width="' + 91 | settings.width + 92 | '" height=" ' + 93 | settings.height + 94 | '" ' + 95 | 'viewBox="0 0 ' + 96 | settings.width + 97 | " " + 98 | settings.height + 99 | '" ' + 100 | 'version="1.1" ' + 101 | 'xmlns="http://www.w3.org/2000/svg">' + 102 | "<g>" + 103 | edgesStrings.join("") + 104 | "</g>" + 105 | "<g>" + 106 | nodesStrings.join("") + 107 | "</g>" + 108 | "<g>" + 109 | nodeLabelsStrings.join("") + 110 | "</g>" + 111 | "</svg>" 112 | ); 113 | } 114 | 115 | function reduceNodes(graph, settings) { 116 | const width = settings.width, 117 | height = settings.height; 118 | 119 | let xBarycenter = 0, 120 | yBarycenter = 0, 121 | totalWeight = 0; 122 | 123 | const data = {}; 124 | 125 | graph.forEachNode(function (node, attr) { 126 | // Applying user's reducing logic 127 | if (typeof settings.nodes.reducer === "function") 128 | attr = settings.nodes.reducer(settings, node, attr); 129 | 130 | attr = nodeReducer(settings, node, attr); 131 | data[node] = attr; 132 | 133 | // Computing rescaling items 134 | xBarycenter += attr.size * attr.x; 135 | yBarycenter += attr.size * attr.y; 136 | totalWeight += attr.size; 137 | }); 138 | 139 | xBarycenter /= totalWeight; 140 | yBarycenter /= totalWeight; 141 | 142 | let d, ratio, n; 143 | let dMax = -Infinity; 144 | 145 | let k; 146 | 147 | for (k in data) { 148 | n = data[k]; 149 | d = Math.pow(n.x - xBarycenter, 2) + Math.pow(n.y - yBarycenter, 2); 150 | 151 | if (d > dMax) dMax = d; 152 | } 153 | 154 | ratio = 155 | (Math.min(width, height) - 2 * settings.margin) / (2 * Math.sqrt(dMax)); 156 | 157 | for (k in data) { 158 | n = data[k]; 159 | 160 | n.x = width / 2 + (n.x - xBarycenter) * ratio; 161 | n.y = height / 2 + (n.y - yBarycenter) * ratio; 162 | 163 | n.size *= ratio; // TODO: keep? 164 | } 165 | 166 | return data; 167 | } 168 | -------------------------------------------------------------------------------- /packages/micromark-extension-wiki-link/test/micromark.test.js: -------------------------------------------------------------------------------- 1 | import { describe, it, expect } from "vitest"; 2 | import { micromark } from "micromark"; 3 | import { syntax, html } from "../src/index.ts"; 4 | 5 | describe("micromark-extension-wiki-link", () => { 6 | it("parses a wiki link that has a matching permalink", () => { 7 | let serialized = micromark("[[Wiki Link]]", { 8 | extensions: [syntax()], 9 | htmlExtensions: [html({ permalinks: ["wiki_link"] })], 10 | }); 11 | 12 | expect(serialized).toEqual( 13 | '<p><a href="#/page/wiki_link" class="internal">Wiki Link</a></p>' 14 | ); 15 | }); 16 | 17 | it("parses a wiki link that has no matching permalink", () => { 18 | let serialized = micromark("[[Wiki Link]]", { 19 | extensions: [syntax()], 20 | htmlExtensions: [html()], 21 | }); 22 | 23 | expect(serialized).toEqual( 24 | '<p><a href="#/page/wiki_link" class="internal new">Wiki Link</a></p>' 25 | ); 26 | }); 27 | 28 | it("handles wiki links with aliases", () => { 29 | let serialized = micromark("[[Real Page|Page Alias]]", { 30 | extensions: [syntax()], 31 | htmlExtensions: [html()], 32 | }); 33 | 34 | expect(serialized).toEqual( 35 | '<p><a href="#/page/real_page" class="internal new">Page Alias</a></p>' 36 | ); 37 | }); 38 | 39 | it("handles wiki links with a custom alias divider", () => { 40 | let serialized = micromark("[[Real Page||Page Alias]]", { 41 | extensions: [syntax({ aliasDivider: "||" })], 42 | htmlExtensions: [html()], 43 | }); 44 | 45 | expect(serialized).toEqual( 46 | '<p><a href="#/page/real_page" class="internal new">Page Alias</a></p>' 47 | ); 48 | }); 49 | 50 | describe("open wiki links", () => { 51 | it("handles open wiki links", () => { 52 | let serialized = micromark("t[[\nt", { 53 | extensions: [syntax()], 54 | htmlExtensions: [html()], 55 | }); 56 | 57 | expect(serialized).toEqual("<p>t[[\nt</p>"); 58 | }); 59 | 60 | it("handles open wiki links at end of file", () => { 61 | let serialized = micromark("t [[", { 62 | extensions: [syntax()], 63 | htmlExtensions: [html()], 64 | }); 65 | 66 | expect(serialized).toEqual("<p>t [[</p>"); 67 | }); 68 | 69 | it("handles open wiki links with partial data", () => { 70 | let serialized = micromark("t [[tt\nt", { 71 | extensions: [syntax()], 72 | htmlExtensions: [html()], 73 | }); 74 | 75 | expect(serialized).toEqual("<p>t [[tt\nt</p>"); 76 | }); 77 | 78 | it("handles open wiki links with partial alias divider", () => { 79 | let serialized = micromark("[[t|\nt", { 80 | extensions: [syntax({ aliasDivider: "||" })], 81 | htmlExtensions: [html()], 82 | }); 83 | 84 | expect(serialized).toEqual("<p>[[t|\nt</p>"); 85 | }); 86 | 87 | it("handles open wiki links with partial alias", () => { 88 | let serialized = micromark("[[t:\nt", { 89 | extensions: [syntax()], 90 | htmlExtensions: [html()], 91 | }); 92 | 93 | expect(serialized).toEqual("<p>[[t:\nt</p>"); 94 | }); 95 | }); 96 | 97 | describe("configuration options", () => { 98 | it("uses pageResolver", () => { 99 | let identity = (name) => [name]; 100 | 101 | let serialized = micromark("[[A Page]]", { 102 | extensions: [syntax()], 103 | htmlExtensions: [ 104 | html({ 105 | pageResolver: identity, 106 | permalinks: ["A Page"], 107 | }), 108 | ], 109 | }); 110 | 111 | expect(serialized).toEqual( 112 | '<p><a href="#/page/A Page" class="internal">A Page</a></p>' 113 | ); 114 | }); 115 | 116 | it("uses newClassName", () => { 117 | let serialized = micromark("[[A Page]]", { 118 | extensions: [syntax()], 119 | htmlExtensions: [ 120 | html({ 121 | newClassName: "new_page", 122 | }), 123 | ], 124 | }); 125 | 126 | expect(serialized).toEqual( 127 | '<p><a href="#/page/a_page" class="internal new_page">A Page</a></p>' 128 | ); 129 | }); 130 | 131 | it("uses hrefTemplate", () => { 132 | let hrefTemplate = (permalink) => permalink; 133 | let serialized = micromark("[[A Page]]", { 134 | extensions: [syntax()], 135 | htmlExtensions: [ 136 | html({ 137 | hrefTemplate: hrefTemplate, 138 | }), 139 | ], 140 | }); 141 | 142 | expect(serialized).toEqual( 143 | '<p><a href="a_page" class="internal new">A Page</a></p>' 144 | ); 145 | }); 146 | 147 | it("uses wikiLinkClassName", () => { 148 | let serialized = micromark("[[A Page]]", { 149 | extensions: [syntax()], 150 | htmlExtensions: [ 151 | html({ 152 | wikiLinkClassName: "wiki_link", 153 | permalinks: ["a_page"], 154 | }), 155 | ], 156 | }); 157 | 158 | expect(serialized).toEqual( 159 | '<p><a href="#/page/a_page" class="wiki_link">A Page</a></p>' 160 | ); 161 | }); 162 | }); 163 | }); 164 | -------------------------------------------------------------------------------- /packages/micromark-extension-wiki-link/src/syntax.ts: -------------------------------------------------------------------------------- 1 | import { type Tokenizer, type Code } from "micromark-util-types"; 2 | 3 | export type WikiLinkSyntaxOptions = { 4 | aliasDivider?: string; 5 | }; 6 | 7 | const codes = { 8 | horizontalTab: -2, 9 | virtualSpace: -1, 10 | nul: 0, 11 | eof: null, 12 | space: 32, 13 | }; 14 | 15 | function markdownLineEndingOrSpace(code: Code) { 16 | return code !== codes.eof && (code < codes.nul || code === codes.space); 17 | } 18 | 19 | function markdownLineEnding(code: Code) { 20 | return code !== codes.eof && (code === null || code < codes.horizontalTab); 21 | } 22 | 23 | export function syntax(opts: WikiLinkSyntaxOptions = {}) { 24 | const aliasDivider = opts.aliasDivider || "|"; 25 | 26 | const aliasMarker = aliasDivider; 27 | const startMarker = "[["; 28 | const endMarker = "]]"; 29 | 30 | const tokenize: Tokenizer = (effects, ok, nok) => { 31 | let data: boolean; 32 | let alias: boolean; 33 | 34 | let aliasCursor = 0; 35 | let startMarkerCursor = 0; 36 | let endMarkerCursor = 0; 37 | 38 | return start; 39 | 40 | function start(code: Code) { 41 | if (code !== startMarker.charCodeAt(startMarkerCursor)) return nok(code); 42 | 43 | // @ts-expect-error 44 | effects.enter("wikiLink"); 45 | // @ts-expect-error 46 | effects.enter("wikiLinkMarker"); 47 | 48 | return consumeStart(code); 49 | } 50 | 51 | function consumeStart(code: Code) { 52 | if (startMarkerCursor === startMarker.length) { 53 | // @ts-expect-error 54 | effects.exit("wikiLinkMarker"); 55 | return consumeData(code); 56 | } 57 | 58 | if (code !== startMarker.charCodeAt(startMarkerCursor)) { 59 | return nok(code); 60 | } 61 | 62 | effects.consume(code); 63 | startMarkerCursor++; 64 | 65 | return consumeStart; 66 | } 67 | 68 | function consumeData(code: Code) { 69 | if (markdownLineEnding(code) || code === codes.eof) { 70 | return nok(code); 71 | } 72 | 73 | // @ts-expect-error 74 | effects.enter("wikiLinkData"); 75 | // @ts-expect-error 76 | effects.enter("wikiLinkTarget"); 77 | return consumeTarget(code); 78 | } 79 | 80 | function consumeTarget(code: Code) { 81 | if (code === aliasMarker.charCodeAt(aliasCursor)) { 82 | if (!data) return nok(code); 83 | // @ts-expect-error 84 | effects.exit("wikiLinkTarget"); 85 | // @ts-expect-error 86 | effects.enter("wikiLinkAliasMarker"); 87 | return consumeAliasMarker(code); 88 | } 89 | 90 | if (code === endMarker.charCodeAt(endMarkerCursor)) { 91 | if (!data) return nok(code); 92 | // @ts-expect-error 93 | effects.exit("wikiLinkTarget"); 94 | // @ts-expect-error 95 | effects.exit("wikiLinkData"); 96 | // @ts-expect-error 97 | effects.enter("wikiLinkMarker"); 98 | return consumeEnd(code); 99 | } 100 | 101 | if (markdownLineEnding(code) || code === codes.eof) { 102 | return nok(code); 103 | } 104 | 105 | if (!markdownLineEndingOrSpace(code)) { 106 | data = true; 107 | } 108 | 109 | effects.consume(code); 110 | 111 | return consumeTarget; 112 | } 113 | 114 | function consumeAliasMarker(code: Code) { 115 | if (aliasCursor === aliasMarker.length) { 116 | // @ts-expect-error 117 | effects.exit("wikiLinkAliasMarker"); 118 | // @ts-expect-error 119 | effects.enter("wikiLinkAlias"); 120 | return consumeAlias(code); 121 | } 122 | 123 | if (code !== aliasMarker.charCodeAt(aliasCursor)) { 124 | return nok(code); 125 | } 126 | 127 | effects.consume(code); 128 | aliasCursor++; 129 | 130 | return consumeAliasMarker; 131 | } 132 | 133 | function consumeAlias(code: Code) { 134 | if (code === endMarker.charCodeAt(endMarkerCursor)) { 135 | if (!alias) return nok(code); 136 | // @ts-expect-error 137 | effects.exit("wikiLinkAlias"); 138 | // @ts-expect-error 139 | effects.exit("wikiLinkData"); 140 | // @ts-expect-error 141 | effects.enter("wikiLinkMarker"); 142 | return consumeEnd(code); 143 | } 144 | 145 | if (markdownLineEnding(code) || code === codes.eof) { 146 | return nok(code); 147 | } 148 | 149 | if (!markdownLineEndingOrSpace(code)) { 150 | alias = true; 151 | } 152 | 153 | effects.consume(code); 154 | 155 | return consumeAlias; 156 | } 157 | 158 | function consumeEnd(code: Code) { 159 | if (endMarkerCursor === endMarker.length) { 160 | // @ts-expect-error 161 | effects.exit("wikiLinkMarker"); 162 | // @ts-expect-error 163 | effects.exit("wikiLink"); 164 | return ok(code); 165 | } 166 | 167 | if (code !== endMarker.charCodeAt(endMarkerCursor)) { 168 | return nok(code); 169 | } 170 | 171 | effects.consume(code); 172 | endMarkerCursor++; 173 | 174 | return consumeEnd; 175 | } 176 | }; 177 | 178 | return { 179 | text: { 91: { tokenize: tokenize } }, // left square bracket 180 | }; 181 | } 182 | -------------------------------------------------------------------------------- /packages/docs/src/content/docs/notes/frontmatter-schema.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Frontmatter schema 3 | --- 4 | 5 | Main source of inspiration - [Astro Content Collections](https://docs.astro.build/en/guides/content-collections/) 6 | 7 | Related: 8 | 9 | - https://contentlayer.dev/docs/sources/files/mapping-document-types-bf100a10 10 | 11 | Basic idea: 12 | 13 | - folder represents table/collection 14 | - folder name represents table name, or tag in disjoint union 15 | - file represents row/document/tupple 16 | - file path represents global unique ID 17 | - slug represents per table unique ID 18 | 19 | For each selected folder we can define schema. 20 | 21 | ## Error behaviour 22 | 23 | What to do if there is an error in one of documents? 24 | 25 | - Report error 26 | - then either: 27 | - block whole database until error would be resolved 28 | - exclude document with error from the index (until error would be resolved) 29 | 30 | ## Extra fields 31 | 32 | Options: 33 | 34 | - allow fields that are not in schema 35 | - **remove all extra fields** that are not present in schema 36 | - optionally print warning when that happens 37 | 38 | ### Shall schema be optional? 39 | 40 | - On one side it is easier to get started if schema is optional 41 | - On the other side one can always use `z.any()` 42 | - Astro uses convention of one schema per top level folder. But in our case this is not convinient 43 | - Use one schema for root, with default type `undefined` and default schema `z.any()` 44 | - top level folder = type, but optional. If there is no config it would use default one (from root) 45 | - shall we allow arbitrary paths (glob) for schemas 46 | - what to do if more than one schema matches for file? Throw an error? 47 | 48 | ## Typescript 49 | 50 | [Runtime type validators](https://stereobooster.com/posts/runtime-type-validators/) 51 | 52 | Let's say we have collections: 53 | 54 | ```ts 55 | const A = defineCollection({ 56 | type: "A", 57 | schema: z.object({ 58 | a: z.string(), 59 | }), 60 | }); 61 | 62 | const B = defineCollection({ 63 | type: "B", 64 | schema: z.object({ 65 | b: z.number(), 66 | }), 67 | }); 68 | 69 | export const collections = { 70 | A: A, 71 | B: B, 72 | }; 73 | ``` 74 | 75 | Then `frontmatter()` can be either: 76 | 77 | ```ts 78 | { 79 | a?: string, 80 | b?: number, 81 | } 82 | ``` 83 | 84 | or: 85 | 86 | ```ts 87 | { 88 | A?: { a: string }, 89 | B?: { b: number }, 90 | } 91 | ``` 92 | 93 | or: 94 | 95 | ```ts 96 | { type: 'A', A: { a: string } } | 97 | { type: 'B', B: { b: number } } 98 | ``` 99 | 100 | theoretically it could also be: 101 | 102 | ```ts 103 | { type: 'A', a: string } | 104 | { type: 'B', b: number } 105 | ``` 106 | 107 | But in this case field we can't use field `type` in orginial schemas. 108 | 109 | ### Class 110 | 111 | ```ts 112 | class Document<T, F> { 113 | type(): T {} 114 | frontmatter(): F {} 115 | } 116 | ``` 117 | 118 | and default can be 119 | 120 | ```ts 121 | new Document<undefined, any>(); 122 | ``` 123 | 124 | ## SQL 125 | 126 | for trivial types: 127 | 128 | ``` 129 | query({"frontmatter.a": 1}) 130 | ``` 131 | 132 | would be translated to 133 | 134 | ```sql 135 | SELECT * FROM documents WHERE frontmatter ->> '$.a' = 1; 136 | ``` 137 | 138 | But for arrays we may need to use `json_each()`: 139 | 140 | ``` 141 | query({"frontmatter.tag": "js"}) 142 | ``` 143 | 144 | would be translated to something like this: 145 | 146 | ```sql 147 | SELECT * FROM documents, json_each(frontmatter -> '$.tag') WHERE 148 | WHERE json_each.value = 'js'; 149 | ``` 150 | 151 | Also need to check if there are issue with quoting. 152 | 153 | ## Indexing, sorting, faceting 154 | 155 | ```ts 156 | type A = { 157 | a: string; 158 | c: number; 159 | d: Array<number>; 160 | }; 161 | 162 | type B = { 163 | b: number; 164 | c: number; 165 | d: Record<string, string>; 166 | }; 167 | ``` 168 | 169 | - `a` and `b` - do not conflict, so we can simply put `NULL` (in SQL terms) or `undefined` (in JS terms) where they miss. Related: sorting can provide options for `NULL FIRST`, `NULL LAST` 170 | - `c` - present in both types, but do not conflict. So there are no issues 171 | - `d` - conflicts. Even if we can build index for this mixed type, it is unclear how to sort mixed types. We can convert everything to string and sort, but would it make sense? 172 | 173 | The simplest solution: **do not allow to filter, sort, facet by mixed type columns**. 174 | 175 | ### Indexes 176 | 177 | For inspiration: 178 | 179 | - [SQLite + Roaring Bitmaps](https://github.com/oldmoe/roaringlite) 180 | - [pgfaceting](https://github.com/cybertec-postgresql/pgfaceting) + [pg_roaringbitmap](https://github.com/ChenHuajun/pg_roaringbitmap) + [pglite](https://github.com/electric-sql/pglite/issues/18) 181 | - maybe https://www.sqlite.org/expridx.html 182 | - https://dadroit.com/blog/json-querying/#section-6-how-to-use-indexing-for-query-optimization-over-json-data-in-sqlite 183 | - [LiteIndex: Memory-Efficient Schema-Agnostic Indexing for JSON documents in SQLite](https://www.researchgate.net/publication/348889953_LiteIndex_Memory-Efficient_Schema-Agnostic_Indexing_for_JSON_documents_in_SQLite) 184 | 185 | ## Querying 186 | 187 | One of options for querying data is to expose [Drizzle ORM](https://orm.drizzle.team/docs/rqb). But this exposes implementation details. 188 | 189 | [[content-query]] 190 | -------------------------------------------------------------------------------- /packages/docs/src/content/docs/notes/todo.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: TODO 3 | draft: true 4 | --- 5 | 6 | ## Dataview 7 | 8 | See [[dataview]] 9 | 10 | ## Astro integration 11 | 12 | - [x] Astro integration 13 | - [x] Starlight plugin 14 | 15 | ## Core 16 | 17 | - [ ] maybe rename `documents` to `files` 18 | - `slug` to `name`? 19 | - [mddb table structure for comaprison](https://github.com/datopian/markdowndb/blob/main/src/lib/schema.ts): files, tags, file_tags, links, tasks 20 | - `from` column name is conflicting with SQL 21 | - Maybe rename to `start`/`end`? 22 | - Maybe rename to `source`/`target`? 23 | - `frontmatter ->> '$.some.thing'` is very long. Maybe rename to `fm`? 24 | - [ ] Extract Headings 25 | - either need to allow to pass remark plugins (`import { rehypeHeadingIds } from "@astrojs/markdown-remark";`) or pass slug function 26 | - https://github.com/withastro/astro/blob/main/packages/markdown/remark/src/rehype-collect-headings.ts 27 | - separate table 28 | - path to original document, anchor (aka id, aka slug), ast?, text, level 29 | - use for link resolution 30 | - [ ] [[frontmatter-schema]] 31 | - [ ] Extract tags? 32 | - what about other taxonomies? 33 | 34 | ## Components 35 | 36 | - [ ] notes 37 | - write note about [[metadata]] (frontmatter) in different SSG 38 | - sort and rewrite old notes 39 | - [x] core 40 | - [ ] print warning only once `Warning: Error: Failed to get commit for` 41 | - [ ] improve query interface 42 | - [ ] extract headers 43 | - [ ] frontmatter schema 44 | - [ ] allow to pass remark/rehype plugins? 45 | - [ ] documentation 46 | - need to clearly describe what is it and how it can be used 47 | - document options and API 48 | - provide demos 49 | - check grammar 50 | - [ ] demos 51 | - obsidian 52 | - maybe Next.js 53 | 54 | ## Performance 55 | 56 | - [ ] sql prepared statements 57 | - [ ] [[parallel|parallel processing]] 58 | - [incremental parsing](https://parsing.stereobooster.com/incremental-parsers/) 59 | - https://github.com/lezer-parser/markdown 60 | - https://github.com/tree-sitter-grammars/tree-sitter-markdown 61 | - https://github.com/ikatyang/tree-sitter-markdown 62 | 63 | ## Support Bun 64 | 65 | - use [`great.db`](https://www.npmjs.com/package/great.db) instead of `better-sqlite3` 66 | - use [`xxhash-wasm`](https://github.com/jungomi/xxhash-wasm) instead of `@node-rs/xxhash` 67 | - what about `@napi-rs/simple-git`? 68 | 69 | ## Ideas for later 70 | 71 | - [ ] CLI 72 | - [ ] respect `.gitignore` and output folder 73 | - https://www.npmjs.com/package/parse-gitignore 74 | - https://git-scm.com/docs/gitignore#_pattern_format 75 | - https://github.com/paulmillr/chokidar#path-filtering 76 | - [ ] copy other files (images) 77 | - [ ] LSP 78 | - https://github.com/microsoft/vscode-languageserver-node/tree/main/server 79 | - https://github.com/foambubble/foam/blob/master/packages/foam-vscode/src/core/model/graph.ts 80 | - https://github.com/lostintangent/wikilens/blob/main/src/store/actions.ts 81 | - https://github.com/kortina/vscode-markdown-notes 82 | - https://github.com/ImperiumMaximus/ts-lsp-client 83 | - [ ] GUI aka "studio" 84 | - maybe https://tauri.app/ 85 | - https://github.com/tauri-apps/tauri-plugin-fs-watch 86 | - for inspiration: [drizzle-studio](https://orm.drizzle.team/drizzle-studio/overview), [docsql](https://github.com/peterbe/docsql) 87 | - it also can be local server instead of desktop application 88 | - [ ] semantic wiki 89 | - https://github.com/wikibonsai/wikibonsai 90 | - [ ] graph database 91 | - graph query language, like, Cypher or Datalog 92 | - graph algorithms, like, PageRank or shortest path 93 | 94 | ## Other 95 | 96 | - read-only mode (for Next.js) 97 | - https://github.com/thedevdavid/digital-garden 98 | - cache 99 | - take into account versions of libraries? 100 | - https://github.com/novemberborn/package-hash 101 | - do I need to take into account `inode`? 102 | - BigInt for hash maybe? 103 | - `UNSIGNED BIG INT` 104 | - https://orm.drizzle.team/docs/column-types/sqlite#bigint 105 | - https://github.com/WiseLibs/better-sqlite3/blob/master/docs/integer.md#the-bigint-primitive-type 106 | - yaml 107 | - https://github.com/biojppm/rapidyaml 108 | - https://philna.sh/blog/2023/02/02/yaml-document-from-hell-javascript-edition/ 109 | - Faceted search 110 | - Pagefind integration either 111 | - generate JSON to build index 112 | - `text()` 113 | - Or [build index based on generated html](https://github.com/withastro/starlight/blob/d2822a1127c622e086ad8877a07adad70d8c3aab/packages/starlight/index.ts#L61-L72) 114 | - Update facets lib to support pagefind 115 | - reactivity/memoization 116 | - [materialite](https://github.com/vlcn-io/materialite) 117 | - [signals](https://preactjs.com/guide/v10/signals/) 118 | - maybe [rxdb](https://rxdb.info) Observable 119 | - [electric-sql: Live queries](https://electric-sql.com/docs/usage/data-access/queries#live-queries) 120 | - https://github.com/wycats/js-reactivity-benchmark 121 | - [Graphology](https://graphology.github.io/) integration 122 | - event listener - when node, edge added or node, edge removed 123 | - show broken links, maybe? 124 | - PageRank, clustering (related) 125 | - `html()` 126 | - use `mdast` to `hast` 127 | - mark broken links with html class 128 | - syntax highlighter for code 129 | - and probably something else 130 | -------------------------------------------------------------------------------- /packages/remark-dataview/README.md: -------------------------------------------------------------------------------- 1 | # @braindb/remark-dataview 2 | 3 | > [!WARNING] 4 | > Work in progress. Expect breaking changes. 5 | 6 | ## Idea 7 | 8 | ### Concept 9 | 10 | > Dataview is a live index and query engine over your personal knowledge base. You can add metadata to your notes and query them with the Dataview Query Language to list, filter, sort or group your data. Dataview keeps your queries always up to date and makes data aggregation a breeze. 11 | > 12 | > -- [Obsidian Dataview](https://blacksmithgu.github.io/obsidian-dataview/) 13 | 14 | The project is inspired by Obsidian DataView, but there are some differences. 15 | 16 | First, I decided to use real SQL instead of the custom query language by DataView ([DQL](https://blacksmithgu.github.io/obsidian-dataview/queries/dql-js-inline/)). 17 | 18 | **Pros**: 19 | 20 | - It is easy to learn due to the large community and extensive documentation. 21 | - It is easy to implement since the project already uses a relational database (SQLite). 22 | 23 | **Cons**: 24 | 25 | - It exposes the data structure, so any changes to the model will break downstream projects. 26 | - It locks the project into using a relational database, though I had considered experimenting with a graph database. 27 | 28 | Real SQL would at least allow the data to be displayed as a table (since all SQL clients output results in tables, even if there's only one cell). Other templates could include: 29 | 30 | - A list, if there's only one column. 31 | - A nested list. 32 | - Etc. 33 | 34 | The template can be customized through a meta string (the "fence meta"): 35 | 36 | ````md 37 | ```dataview <template> <other options> 38 | SELECT a, b, c FROM nodes; 39 | ``` 40 | ```` 41 | 42 | Additionally, I can add a custom function to format output. For example, `dv_link(text, url)` would output a Markdown link: `[text](url)`. 43 | 44 | ### Implementation 45 | 46 | Remark plugin will find all code blocks (or "fence blocks") with the language "dataview," treat the content as a query, execute the query using BrainDB, transform the result into MDAST, and replace the original code block. This way, the result can be post-processed by other Remark/Rehype plugins. 47 | 48 | To implement custom functions, I would use an SQL parser to remove custom functions from the SQL before executing it. These functions would then be used to format the resulting data before transforming it into MDAST. 49 | 50 | ```mermaid 51 | flowchart LR 52 | s[SQL parser] --> t[data query] --> e[fetch data] --> tm[transform into MDAST] 53 | s --> t1[formatting config] --> tm 54 | ``` 55 | 56 | That's it. 57 | 58 | ## Future 59 | 60 | ### Extension 61 | 62 | As an improvement, a VSCode extension could be implemented to highlight syntax and provide autocompletion. I believe this should be possible with [langium-sql](https://github.com/TypeFox/langium-sql/blob/main/packages/langium-sql/). 63 | 64 | Related: [VSCode Markdown Fenced Code Block Grammar Injection Example](https://github.com/mjbvz/vscode-fenced-code-block-grammar-injection-example) 65 | 66 | ### Improved Tables 67 | 68 | Perhaps the table template could be combined with [sortable tables](https://astro-digital-garden.stereobooster.com/recipes/sortable-tables/)? 69 | 70 | ### Graph Template 71 | 72 | For example, one could select data from the links table, convert it to DOT format, and output it as a code block, which would then be processed by `@beoe/rehype-graphviz`. 73 | 74 | Alternatively, one could use `@beoe/rehype-gnuplot` to generate plots based on the data. 75 | 76 | This is **another bonus** of this architecture — it is modular and can be integrated with other solutions. 77 | 78 | ## Examples 79 | 80 | ### [Alphabetical index](https://astro-digital-garden.stereobooster.com/alphabetical/) 81 | 82 | ````md 83 | ```dataview list root_class=column-list 84 | SELECT upper(substr(frontmatter ->> '$.title', 1, 1)), dv_link() 85 | FROM documents 86 | WHERE frontmatter ->> '$.draft' IS NULL OR frontmatter ->> '$.draft' = false 87 | ORDER BY frontmatter ->> '$.title' 88 | LIMIT 2; 89 | ``` 90 | ```` 91 | 92 | ### [Recently changed](https://astro-digital-garden.stereobooster.com/recent/) 93 | 94 | ````md 95 | ```dataview list root_class=column-list 96 | SELECT date(updated_at / 1000, 'unixepoch'), dv_link() 97 | FROM documents 98 | WHERE frontmatter ->> '$.draft' IS NULL OR frontmatter ->> '$.draft' = false 99 | ORDER BY updated_at DESC 100 | LIMIT 2; 101 | ``` 102 | ```` 103 | 104 | ### [Task list](https://astro-digital-garden.stereobooster.com/recipes/task-extraction/) 105 | 106 | ````md 107 | ```dataview list 108 | SELECT dv_link(), dv_task() 109 | FROM tasks JOIN documents ON documents.path = tasks.from 110 | WHERE frontmatter ->> '$.draft' IS NULL OR frontmatter ->> '$.draft' = false 111 | ORDER BY updated_at DESC, path, tasks.start 112 | LIMIT 2; 113 | ``` 114 | ```` 115 | 116 | ### [Tags list](https://astro-digital-garden.stereobooster.com/tags/) 117 | 118 | ````md 119 | ```dataview list root_class=column-list 120 | SELECT tags.value as tag, dv_link() 121 | FROM documents, json_each(frontmatter, '$.tags') tags 122 | WHERE frontmatter ->> '$.draft' IS NULL OR frontmatter ->> '$.draft' = false 123 | ORDER BY tag 124 | LIMIT 2; 125 | ``` 126 | ```` 127 | 128 | ## Usage 129 | 130 | ```javascript 131 | import unified from "unified"; 132 | import markdown from "remark-parse"; 133 | import remarkDataview from "@braindb/remark-dataview"; 134 | 135 | // bdb is an instnce of BrainDB 136 | 137 | let processor = unified() 138 | .use(markdown, { gfm: true }) 139 | .use(remarkDataview, { bdb }); 140 | ``` 141 | -------------------------------------------------------------------------------- /packages/mdast-util-wiki-link/test/index.test.ts: -------------------------------------------------------------------------------- 1 | import { describe, it, expect } from "vitest"; 2 | 3 | import { fromMarkdown } from "mdast-util-from-markdown"; 4 | import { toMarkdown } from "mdast-util-to-markdown"; 5 | import { visit } from "unist-util-visit"; 6 | import { syntax } from "@braindb/micromark-extension-wiki-link"; 7 | 8 | import * as wikiLink from "../src/index.js"; 9 | import { type WikiLinkNode } from "../src/index.js"; 10 | 11 | function assertWikiLink(obj: any): asserts obj is WikiLinkNode { 12 | if (!obj.data || !("alias" in obj.data) || !("permalink" in obj.data)) { 13 | throw new Error("Not a wiki link"); 14 | } 15 | } 16 | 17 | describe("mdast-util-wiki-link", () => { 18 | describe("fromMarkdown", () => { 19 | it("parses a wiki link", () => { 20 | const ast = fromMarkdown("[[Wiki Link]]", { 21 | extensions: [syntax()], 22 | mdastExtensions: [wikiLink.fromMarkdown()], 23 | }); 24 | 25 | visit(ast, "wikiLink", (node: WikiLinkNode) => { 26 | assertWikiLink(node); 27 | expect(node.data.permalink).toEqual(undefined); 28 | expect(node.data.hName).toEqual("a"); 29 | expect(node.data.hProperties.href).toEqual("Wiki Link"); 30 | expect(node.data.hChildren[0].value).toEqual("Wiki Link"); 31 | }); 32 | }); 33 | 34 | it("handles wiki links with aliases", () => { 35 | const ast = fromMarkdown("[[Real Page|Page Alias]]", { 36 | extensions: [syntax()], 37 | mdastExtensions: [wikiLink.fromMarkdown()], 38 | }); 39 | 40 | visit(ast, "wikiLink", (node: WikiLinkNode) => { 41 | assertWikiLink(node); 42 | expect(node.data.alias).toEqual("Page Alias"); 43 | expect(node.data.permalink).toEqual(undefined); 44 | expect(node.value).toEqual("Real Page"); 45 | expect(node.data.hName).toEqual("a"); 46 | expect(node.data.hProperties.href).toEqual("Real Page"); 47 | expect(node.data.hChildren[0].value).toEqual("Page Alias"); 48 | }); 49 | }); 50 | 51 | describe("configuration options", () => { 52 | it("uses linkResolver", () => { 53 | const ast = fromMarkdown("[[A Page]]", { 54 | extensions: [syntax()], 55 | mdastExtensions: [ 56 | wikiLink.fromMarkdown({ 57 | linkResolver: (x) => x.toLowerCase().replace(" ", "_"), 58 | }), 59 | ], 60 | }); 61 | 62 | visit(ast, "wikiLink", (node: WikiLinkNode) => { 63 | assertWikiLink(node); 64 | expect(node.data.permalink).toEqual("a_page"); 65 | expect(node.data.hProperties.href).toEqual("a_page"); 66 | }); 67 | }); 68 | 69 | it("uses linkTemplate", () => { 70 | const ast = fromMarkdown("[[A Page]]", { 71 | extensions: [syntax()], 72 | mdastExtensions: [ 73 | wikiLink.fromMarkdown({ 74 | linkTemplate: ({ slug, permalink, alias }) => ({ 75 | hName: "span", 76 | hProperties: { "data-href": permalink || slug }, 77 | hChildren: [{ type: "text", value: alias || slug }], 78 | }), 79 | }), 80 | ], 81 | }); 82 | 83 | visit(ast, "wikiLink", (node: WikiLinkNode) => { 84 | assertWikiLink(node); 85 | expect(node.data.hName).toEqual("span"); 86 | expect(node.data.hProperties["data-href"]).toEqual("A Page"); 87 | expect(node.data.hChildren[0].value).toEqual("A Page"); 88 | }); 89 | }); 90 | }); 91 | }); 92 | 93 | describe("toMarkdown", () => { 94 | it("stringifies wiki links", () => { 95 | const ast = fromMarkdown("[[Wiki Link]]", { 96 | extensions: [syntax()], 97 | mdastExtensions: [wikiLink.fromMarkdown()], 98 | }); 99 | 100 | const stringified = toMarkdown(ast, { 101 | // @ts-expect-error 102 | extensions: [wikiLink.toMarkdown()], 103 | }).trim(); 104 | 105 | expect(stringified).toEqual("[[Wiki Link]]"); 106 | }); 107 | 108 | it("stringifies aliased wiki links", () => { 109 | const ast = fromMarkdown("[[Real Page|Page Alias]]", { 110 | extensions: [syntax()], 111 | mdastExtensions: [wikiLink.fromMarkdown()], 112 | }); 113 | 114 | const stringified = toMarkdown(ast, { 115 | // @ts-expect-error 116 | extensions: [wikiLink.toMarkdown()], 117 | }).trim(); 118 | 119 | expect(stringified).toEqual("[[Real Page|Page Alias]]"); 120 | }); 121 | 122 | it("stringifies aliased wiki links when alias is the same as slug", () => { 123 | const ast = fromMarkdown("[[Real Page|Real Page]]", { 124 | extensions: [syntax()], 125 | mdastExtensions: [wikiLink.fromMarkdown()], 126 | }); 127 | 128 | const stringified = toMarkdown(ast, { 129 | // @ts-expect-error 130 | extensions: [wikiLink.toMarkdown()], 131 | }).trim(); 132 | 133 | expect(stringified).toEqual("[[Real Page|Real Page]]"); 134 | }); 135 | 136 | describe("configuration options", () => { 137 | it("uses aliasDivider", () => { 138 | const ast = fromMarkdown("[[Real Page|Page Alias]]", { 139 | extensions: [syntax()], 140 | mdastExtensions: [wikiLink.fromMarkdown()], 141 | }); 142 | 143 | const stringified = toMarkdown(ast, { 144 | // @ts-expect-error 145 | extensions: [wikiLink.toMarkdown({ aliasDivider: ":" })], 146 | }).trim(); 147 | 148 | expect(stringified).toEqual("[[Real Page:Page Alias]]"); 149 | }); 150 | }); 151 | }); 152 | }); 153 | -------------------------------------------------------------------------------- /packages/core/drizzle/meta/0000_snapshot.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "5", 3 | "dialect": "sqlite", 4 | "id": "b3b143ce-d981-4bb9-9e53-8c89f9379455", 5 | "prevId": "00000000-0000-0000-0000-000000000000", 6 | "tables": { 7 | "documents": { 8 | "name": "documents", 9 | "columns": { 10 | "id": { 11 | "name": "id", 12 | "type": "integer", 13 | "primaryKey": true, 14 | "notNull": true, 15 | "autoincrement": true 16 | }, 17 | "path": { 18 | "name": "path", 19 | "type": "text", 20 | "primaryKey": false, 21 | "notNull": true, 22 | "autoincrement": false 23 | }, 24 | "frontmatter": { 25 | "name": "frontmatter", 26 | "type": "text", 27 | "primaryKey": false, 28 | "notNull": true, 29 | "autoincrement": false 30 | }, 31 | "ast": { 32 | "name": "ast", 33 | "type": "text", 34 | "primaryKey": false, 35 | "notNull": true, 36 | "autoincrement": false 37 | }, 38 | "markdown": { 39 | "name": "markdown", 40 | "type": "text", 41 | "primaryKey": false, 42 | "notNull": true, 43 | "autoincrement": false 44 | }, 45 | "mtime": { 46 | "name": "mtime", 47 | "type": "real", 48 | "primaryKey": false, 49 | "notNull": true, 50 | "autoincrement": false 51 | }, 52 | "checksum": { 53 | "name": "checksum", 54 | "type": "text", 55 | "primaryKey": false, 56 | "notNull": true, 57 | "autoincrement": false 58 | }, 59 | "slug": { 60 | "name": "slug", 61 | "type": "text", 62 | "primaryKey": false, 63 | "notNull": true, 64 | "autoincrement": false 65 | }, 66 | "url": { 67 | "name": "url", 68 | "type": "text", 69 | "primaryKey": false, 70 | "notNull": true, 71 | "autoincrement": false 72 | } 73 | }, 74 | "indexes": { 75 | "slug": { 76 | "name": "slug", 77 | "columns": [ 78 | "slug" 79 | ], 80 | "isUnique": false 81 | }, 82 | "url": { 83 | "name": "url", 84 | "columns": [ 85 | "url" 86 | ], 87 | "isUnique": false 88 | }, 89 | "path": { 90 | "name": "path", 91 | "columns": [ 92 | "path" 93 | ], 94 | "isUnique": true 95 | } 96 | }, 97 | "foreignKeys": {}, 98 | "compositePrimaryKeys": {}, 99 | "uniqueConstraints": {} 100 | }, 101 | "links": { 102 | "name": "links", 103 | "columns": { 104 | "id": { 105 | "name": "id", 106 | "type": "integer", 107 | "primaryKey": true, 108 | "notNull": true, 109 | "autoincrement": true 110 | }, 111 | "from": { 112 | "name": "from", 113 | "type": "text", 114 | "primaryKey": false, 115 | "notNull": true, 116 | "autoincrement": false 117 | }, 118 | "to": { 119 | "name": "to", 120 | "type": "text", 121 | "primaryKey": false, 122 | "notNull": false, 123 | "autoincrement": false 124 | }, 125 | "start": { 126 | "name": "start", 127 | "type": "integer", 128 | "primaryKey": false, 129 | "notNull": true, 130 | "autoincrement": false 131 | }, 132 | "to_slug": { 133 | "name": "to_slug", 134 | "type": "text", 135 | "primaryKey": false, 136 | "notNull": false, 137 | "autoincrement": false 138 | }, 139 | "to_url": { 140 | "name": "to_url", 141 | "type": "text", 142 | "primaryKey": false, 143 | "notNull": false, 144 | "autoincrement": false 145 | }, 146 | "to_path": { 147 | "name": "to_path", 148 | "type": "text", 149 | "primaryKey": false, 150 | "notNull": false, 151 | "autoincrement": false 152 | }, 153 | "to_anchor": { 154 | "name": "to_anchor", 155 | "type": "text", 156 | "primaryKey": false, 157 | "notNull": false, 158 | "autoincrement": false 159 | }, 160 | "label": { 161 | "name": "label", 162 | "type": "text", 163 | "primaryKey": false, 164 | "notNull": false, 165 | "autoincrement": false 166 | }, 167 | "line": { 168 | "name": "line", 169 | "type": "integer", 170 | "primaryKey": false, 171 | "notNull": true, 172 | "autoincrement": false 173 | }, 174 | "column": { 175 | "name": "column", 176 | "type": "integer", 177 | "primaryKey": false, 178 | "notNull": true, 179 | "autoincrement": false 180 | } 181 | }, 182 | "indexes": { 183 | "to_slug": { 184 | "name": "to_slug", 185 | "columns": [ 186 | "to_slug" 187 | ], 188 | "isUnique": false 189 | }, 190 | "to_url": { 191 | "name": "to_url", 192 | "columns": [ 193 | "to_url" 194 | ], 195 | "isUnique": false 196 | }, 197 | "to_path": { 198 | "name": "to_path", 199 | "columns": [ 200 | "to_path" 201 | ], 202 | "isUnique": false 203 | }, 204 | "from_start": { 205 | "name": "from_start", 206 | "columns": [ 207 | "from", 208 | "start" 209 | ], 210 | "isUnique": true 211 | } 212 | }, 213 | "foreignKeys": {}, 214 | "compositePrimaryKeys": {}, 215 | "uniqueConstraints": {} 216 | } 217 | }, 218 | "enums": {}, 219 | "_meta": { 220 | "schemas": {}, 221 | "tables": {}, 222 | "columns": {} 223 | } 224 | } -------------------------------------------------------------------------------- /packages/core/drizzle/meta/0002_snapshot.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "5", 3 | "dialect": "sqlite", 4 | "id": "d0aaa3f0-53e1-4a1a-9561-fe7ee1ee4bcc", 5 | "prevId": "5765a267-399b-43de-878f-5703ded6a016", 6 | "tables": { 7 | "documents": { 8 | "name": "documents", 9 | "columns": { 10 | "id": { 11 | "name": "id", 12 | "type": "integer", 13 | "primaryKey": true, 14 | "notNull": true, 15 | "autoincrement": true 16 | }, 17 | "path": { 18 | "name": "path", 19 | "type": "text", 20 | "primaryKey": false, 21 | "notNull": true, 22 | "autoincrement": false 23 | }, 24 | "frontmatter": { 25 | "name": "frontmatter", 26 | "type": "text", 27 | "primaryKey": false, 28 | "notNull": true, 29 | "autoincrement": false 30 | }, 31 | "ast": { 32 | "name": "ast", 33 | "type": "text", 34 | "primaryKey": false, 35 | "notNull": true, 36 | "autoincrement": false 37 | }, 38 | "mtime": { 39 | "name": "mtime", 40 | "type": "real", 41 | "primaryKey": false, 42 | "notNull": true, 43 | "autoincrement": false 44 | }, 45 | "checksum": { 46 | "name": "checksum", 47 | "type": "text", 48 | "primaryKey": false, 49 | "notNull": true, 50 | "autoincrement": false 51 | }, 52 | "slug": { 53 | "name": "slug", 54 | "type": "text", 55 | "primaryKey": false, 56 | "notNull": true, 57 | "autoincrement": false 58 | }, 59 | "url": { 60 | "name": "url", 61 | "type": "text", 62 | "primaryKey": false, 63 | "notNull": true, 64 | "autoincrement": false 65 | }, 66 | "updated_at": { 67 | "name": "updated_at", 68 | "type": "integer", 69 | "primaryKey": false, 70 | "notNull": true, 71 | "autoincrement": false, 72 | "default": 0 73 | } 74 | }, 75 | "indexes": { 76 | "slug": { 77 | "name": "slug", 78 | "columns": [ 79 | "slug" 80 | ], 81 | "isUnique": false 82 | }, 83 | "url": { 84 | "name": "url", 85 | "columns": [ 86 | "url" 87 | ], 88 | "isUnique": false 89 | }, 90 | "path": { 91 | "name": "path", 92 | "columns": [ 93 | "path" 94 | ], 95 | "isUnique": true 96 | } 97 | }, 98 | "foreignKeys": {}, 99 | "compositePrimaryKeys": {}, 100 | "uniqueConstraints": {} 101 | }, 102 | "links": { 103 | "name": "links", 104 | "columns": { 105 | "id": { 106 | "name": "id", 107 | "type": "integer", 108 | "primaryKey": true, 109 | "notNull": true, 110 | "autoincrement": true 111 | }, 112 | "from": { 113 | "name": "from", 114 | "type": "text", 115 | "primaryKey": false, 116 | "notNull": true, 117 | "autoincrement": false 118 | }, 119 | "to": { 120 | "name": "to", 121 | "type": "text", 122 | "primaryKey": false, 123 | "notNull": false, 124 | "autoincrement": false 125 | }, 126 | "start": { 127 | "name": "start", 128 | "type": "integer", 129 | "primaryKey": false, 130 | "notNull": true, 131 | "autoincrement": false 132 | }, 133 | "to_slug": { 134 | "name": "to_slug", 135 | "type": "text", 136 | "primaryKey": false, 137 | "notNull": false, 138 | "autoincrement": false 139 | }, 140 | "to_url": { 141 | "name": "to_url", 142 | "type": "text", 143 | "primaryKey": false, 144 | "notNull": false, 145 | "autoincrement": false 146 | }, 147 | "to_path": { 148 | "name": "to_path", 149 | "type": "text", 150 | "primaryKey": false, 151 | "notNull": false, 152 | "autoincrement": false 153 | }, 154 | "to_anchor": { 155 | "name": "to_anchor", 156 | "type": "text", 157 | "primaryKey": false, 158 | "notNull": false, 159 | "autoincrement": false 160 | }, 161 | "label": { 162 | "name": "label", 163 | "type": "text", 164 | "primaryKey": false, 165 | "notNull": false, 166 | "autoincrement": false 167 | }, 168 | "line": { 169 | "name": "line", 170 | "type": "integer", 171 | "primaryKey": false, 172 | "notNull": true, 173 | "autoincrement": false 174 | }, 175 | "column": { 176 | "name": "column", 177 | "type": "integer", 178 | "primaryKey": false, 179 | "notNull": true, 180 | "autoincrement": false 181 | } 182 | }, 183 | "indexes": { 184 | "to_slug": { 185 | "name": "to_slug", 186 | "columns": [ 187 | "to_slug" 188 | ], 189 | "isUnique": false 190 | }, 191 | "to_url": { 192 | "name": "to_url", 193 | "columns": [ 194 | "to_url" 195 | ], 196 | "isUnique": false 197 | }, 198 | "to_path": { 199 | "name": "to_path", 200 | "columns": [ 201 | "to_path" 202 | ], 203 | "isUnique": false 204 | }, 205 | "from_start": { 206 | "name": "from_start", 207 | "columns": [ 208 | "from", 209 | "start" 210 | ], 211 | "isUnique": true 212 | } 213 | }, 214 | "foreignKeys": {}, 215 | "compositePrimaryKeys": {}, 216 | "uniqueConstraints": {} 217 | } 218 | }, 219 | "enums": {}, 220 | "_meta": { 221 | "schemas": {}, 222 | "tables": {}, 223 | "columns": {} 224 | } 225 | } -------------------------------------------------------------------------------- /packages/core/drizzle/meta/0001_snapshot.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "5", 3 | "dialect": "sqlite", 4 | "id": "5765a267-399b-43de-878f-5703ded6a016", 5 | "prevId": "b3b143ce-d981-4bb9-9e53-8c89f9379455", 6 | "tables": { 7 | "documents": { 8 | "name": "documents", 9 | "columns": { 10 | "id": { 11 | "name": "id", 12 | "type": "integer", 13 | "primaryKey": true, 14 | "notNull": true, 15 | "autoincrement": true 16 | }, 17 | "path": { 18 | "name": "path", 19 | "type": "text", 20 | "primaryKey": false, 21 | "notNull": true, 22 | "autoincrement": false 23 | }, 24 | "frontmatter": { 25 | "name": "frontmatter", 26 | "type": "text", 27 | "primaryKey": false, 28 | "notNull": true, 29 | "autoincrement": false 30 | }, 31 | "ast": { 32 | "name": "ast", 33 | "type": "text", 34 | "primaryKey": false, 35 | "notNull": true, 36 | "autoincrement": false 37 | }, 38 | "markdown": { 39 | "name": "markdown", 40 | "type": "text", 41 | "primaryKey": false, 42 | "notNull": true, 43 | "autoincrement": false 44 | }, 45 | "mtime": { 46 | "name": "mtime", 47 | "type": "real", 48 | "primaryKey": false, 49 | "notNull": true, 50 | "autoincrement": false 51 | }, 52 | "checksum": { 53 | "name": "checksum", 54 | "type": "text", 55 | "primaryKey": false, 56 | "notNull": true, 57 | "autoincrement": false 58 | }, 59 | "slug": { 60 | "name": "slug", 61 | "type": "text", 62 | "primaryKey": false, 63 | "notNull": true, 64 | "autoincrement": false 65 | }, 66 | "url": { 67 | "name": "url", 68 | "type": "text", 69 | "primaryKey": false, 70 | "notNull": true, 71 | "autoincrement": false 72 | }, 73 | "updated_at": { 74 | "name": "updated_at", 75 | "type": "integer", 76 | "primaryKey": false, 77 | "notNull": true, 78 | "autoincrement": false, 79 | "default": 0 80 | } 81 | }, 82 | "indexes": { 83 | "slug": { 84 | "name": "slug", 85 | "columns": [ 86 | "slug" 87 | ], 88 | "isUnique": false 89 | }, 90 | "url": { 91 | "name": "url", 92 | "columns": [ 93 | "url" 94 | ], 95 | "isUnique": false 96 | }, 97 | "path": { 98 | "name": "path", 99 | "columns": [ 100 | "path" 101 | ], 102 | "isUnique": true 103 | } 104 | }, 105 | "foreignKeys": {}, 106 | "compositePrimaryKeys": {}, 107 | "uniqueConstraints": {} 108 | }, 109 | "links": { 110 | "name": "links", 111 | "columns": { 112 | "id": { 113 | "name": "id", 114 | "type": "integer", 115 | "primaryKey": true, 116 | "notNull": true, 117 | "autoincrement": true 118 | }, 119 | "from": { 120 | "name": "from", 121 | "type": "text", 122 | "primaryKey": false, 123 | "notNull": true, 124 | "autoincrement": false 125 | }, 126 | "to": { 127 | "name": "to", 128 | "type": "text", 129 | "primaryKey": false, 130 | "notNull": false, 131 | "autoincrement": false 132 | }, 133 | "start": { 134 | "name": "start", 135 | "type": "integer", 136 | "primaryKey": false, 137 | "notNull": true, 138 | "autoincrement": false 139 | }, 140 | "to_slug": { 141 | "name": "to_slug", 142 | "type": "text", 143 | "primaryKey": false, 144 | "notNull": false, 145 | "autoincrement": false 146 | }, 147 | "to_url": { 148 | "name": "to_url", 149 | "type": "text", 150 | "primaryKey": false, 151 | "notNull": false, 152 | "autoincrement": false 153 | }, 154 | "to_path": { 155 | "name": "to_path", 156 | "type": "text", 157 | "primaryKey": false, 158 | "notNull": false, 159 | "autoincrement": false 160 | }, 161 | "to_anchor": { 162 | "name": "to_anchor", 163 | "type": "text", 164 | "primaryKey": false, 165 | "notNull": false, 166 | "autoincrement": false 167 | }, 168 | "label": { 169 | "name": "label", 170 | "type": "text", 171 | "primaryKey": false, 172 | "notNull": false, 173 | "autoincrement": false 174 | }, 175 | "line": { 176 | "name": "line", 177 | "type": "integer", 178 | "primaryKey": false, 179 | "notNull": true, 180 | "autoincrement": false 181 | }, 182 | "column": { 183 | "name": "column", 184 | "type": "integer", 185 | "primaryKey": false, 186 | "notNull": true, 187 | "autoincrement": false 188 | } 189 | }, 190 | "indexes": { 191 | "to_slug": { 192 | "name": "to_slug", 193 | "columns": [ 194 | "to_slug" 195 | ], 196 | "isUnique": false 197 | }, 198 | "to_url": { 199 | "name": "to_url", 200 | "columns": [ 201 | "to_url" 202 | ], 203 | "isUnique": false 204 | }, 205 | "to_path": { 206 | "name": "to_path", 207 | "columns": [ 208 | "to_path" 209 | ], 210 | "isUnique": false 211 | }, 212 | "from_start": { 213 | "name": "from_start", 214 | "columns": [ 215 | "from", 216 | "start" 217 | ], 218 | "isUnique": true 219 | } 220 | }, 221 | "foreignKeys": {}, 222 | "compositePrimaryKeys": {}, 223 | "uniqueConstraints": {} 224 | } 225 | }, 226 | "enums": {}, 227 | "_meta": { 228 | "schemas": {}, 229 | "tables": {}, 230 | "columns": {} 231 | } 232 | } -------------------------------------------------------------------------------- /packages/core/drizzle/meta/0003_snapshot.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "5", 3 | "dialect": "sqlite", 4 | "id": "23036ee6-9818-446a-9cee-3bc44c17e1a5", 5 | "prevId": "d0aaa3f0-53e1-4a1a-9561-fe7ee1ee4bcc", 6 | "tables": { 7 | "documents": { 8 | "name": "documents", 9 | "columns": { 10 | "id": { 11 | "name": "id", 12 | "type": "integer", 13 | "primaryKey": true, 14 | "notNull": true, 15 | "autoincrement": true 16 | }, 17 | "path": { 18 | "name": "path", 19 | "type": "text", 20 | "primaryKey": false, 21 | "notNull": true, 22 | "autoincrement": false 23 | }, 24 | "frontmatter": { 25 | "name": "frontmatter", 26 | "type": "text", 27 | "primaryKey": false, 28 | "notNull": true, 29 | "autoincrement": false 30 | }, 31 | "ast": { 32 | "name": "ast", 33 | "type": "text", 34 | "primaryKey": false, 35 | "notNull": true, 36 | "autoincrement": false 37 | }, 38 | "mtime": { 39 | "name": "mtime", 40 | "type": "real", 41 | "primaryKey": false, 42 | "notNull": true, 43 | "autoincrement": false 44 | }, 45 | "checksum": { 46 | "name": "checksum", 47 | "type": "text", 48 | "primaryKey": false, 49 | "notNull": true, 50 | "autoincrement": false 51 | }, 52 | "slug": { 53 | "name": "slug", 54 | "type": "text", 55 | "primaryKey": false, 56 | "notNull": true, 57 | "autoincrement": false 58 | }, 59 | "url": { 60 | "name": "url", 61 | "type": "text", 62 | "primaryKey": false, 63 | "notNull": true, 64 | "autoincrement": false 65 | }, 66 | "updated_at": { 67 | "name": "updated_at", 68 | "type": "integer", 69 | "primaryKey": false, 70 | "notNull": true, 71 | "autoincrement": false, 72 | "default": 0 73 | }, 74 | "revision": { 75 | "name": "revision", 76 | "type": "integer", 77 | "primaryKey": false, 78 | "notNull": true, 79 | "autoincrement": false, 80 | "default": 0 81 | } 82 | }, 83 | "indexes": { 84 | "slug": { 85 | "name": "slug", 86 | "columns": [ 87 | "slug" 88 | ], 89 | "isUnique": false 90 | }, 91 | "url": { 92 | "name": "url", 93 | "columns": [ 94 | "url" 95 | ], 96 | "isUnique": false 97 | }, 98 | "path": { 99 | "name": "path", 100 | "columns": [ 101 | "path" 102 | ], 103 | "isUnique": true 104 | } 105 | }, 106 | "foreignKeys": {}, 107 | "compositePrimaryKeys": {}, 108 | "uniqueConstraints": {} 109 | }, 110 | "links": { 111 | "name": "links", 112 | "columns": { 113 | "id": { 114 | "name": "id", 115 | "type": "integer", 116 | "primaryKey": true, 117 | "notNull": true, 118 | "autoincrement": true 119 | }, 120 | "from": { 121 | "name": "from", 122 | "type": "text", 123 | "primaryKey": false, 124 | "notNull": true, 125 | "autoincrement": false 126 | }, 127 | "to": { 128 | "name": "to", 129 | "type": "text", 130 | "primaryKey": false, 131 | "notNull": false, 132 | "autoincrement": false 133 | }, 134 | "start": { 135 | "name": "start", 136 | "type": "integer", 137 | "primaryKey": false, 138 | "notNull": true, 139 | "autoincrement": false 140 | }, 141 | "to_slug": { 142 | "name": "to_slug", 143 | "type": "text", 144 | "primaryKey": false, 145 | "notNull": false, 146 | "autoincrement": false 147 | }, 148 | "to_url": { 149 | "name": "to_url", 150 | "type": "text", 151 | "primaryKey": false, 152 | "notNull": false, 153 | "autoincrement": false 154 | }, 155 | "to_path": { 156 | "name": "to_path", 157 | "type": "text", 158 | "primaryKey": false, 159 | "notNull": false, 160 | "autoincrement": false 161 | }, 162 | "to_anchor": { 163 | "name": "to_anchor", 164 | "type": "text", 165 | "primaryKey": false, 166 | "notNull": false, 167 | "autoincrement": false 168 | }, 169 | "label": { 170 | "name": "label", 171 | "type": "text", 172 | "primaryKey": false, 173 | "notNull": false, 174 | "autoincrement": false 175 | }, 176 | "line": { 177 | "name": "line", 178 | "type": "integer", 179 | "primaryKey": false, 180 | "notNull": true, 181 | "autoincrement": false 182 | }, 183 | "column": { 184 | "name": "column", 185 | "type": "integer", 186 | "primaryKey": false, 187 | "notNull": true, 188 | "autoincrement": false 189 | } 190 | }, 191 | "indexes": { 192 | "to_slug": { 193 | "name": "to_slug", 194 | "columns": [ 195 | "to_slug" 196 | ], 197 | "isUnique": false 198 | }, 199 | "to_url": { 200 | "name": "to_url", 201 | "columns": [ 202 | "to_url" 203 | ], 204 | "isUnique": false 205 | }, 206 | "to_path": { 207 | "name": "to_path", 208 | "columns": [ 209 | "to_path" 210 | ], 211 | "isUnique": false 212 | }, 213 | "from_start": { 214 | "name": "from_start", 215 | "columns": [ 216 | "from", 217 | "start" 218 | ], 219 | "isUnique": true 220 | } 221 | }, 222 | "foreignKeys": {}, 223 | "compositePrimaryKeys": {}, 224 | "uniqueConstraints": {} 225 | } 226 | }, 227 | "enums": {}, 228 | "_meta": { 229 | "schemas": {}, 230 | "tables": {}, 231 | "columns": {} 232 | } 233 | } -------------------------------------------------------------------------------- /packages/remark-wiki-link/test/index.test.ts: -------------------------------------------------------------------------------- 1 | import { describe, it, expect } from "vitest"; 2 | import { unified } from "unified"; 3 | import remarkParse from "remark-parse"; 4 | import { visit } from "unist-util-visit"; 5 | import remarkStringify from "remark-stringify"; 6 | 7 | import wikiLinkPlugin, { type RemarkWikiLinkOptions } from "../src"; 8 | import { select } from "unist-util-select"; 9 | 10 | import { type WikiLinkNode } from "@braindb/mdast-util-wiki-link"; 11 | 12 | function assertWikiLink(obj: any): asserts obj is WikiLinkNode { 13 | if (!obj.data || !("alias" in obj.data) || !("permalink" in obj.data)) { 14 | throw new Error("Not a wiki link"); 15 | } 16 | } 17 | 18 | describe("remark-wiki-link", () => { 19 | it("parses a wiki link", () => { 20 | const processor = unified().use(remarkParse).use(wikiLinkPlugin); 21 | 22 | let ast = processor.runSync(processor.parse("[[Wiki Link]]")); 23 | 24 | visit(ast, "wikiLink", (node: WikiLinkNode) => { 25 | assertWikiLink(node); 26 | 27 | expect(node.data.permalink).toEqual(undefined); 28 | expect(node.data.hName).toEqual("a"); 29 | expect(node.data.hProperties.href).toEqual("Wiki Link"); 30 | expect(node.data.hChildren[0].value).toEqual("Wiki Link"); 31 | }); 32 | }); 33 | 34 | it("handles wiki links with aliases", () => { 35 | const processor = unified().use(remarkParse).use(wikiLinkPlugin); 36 | 37 | let ast = processor.runSync(processor.parse("[[Real Page|Page Alias]]")); 38 | 39 | visit(ast, "wikiLink", (node: WikiLinkNode) => { 40 | assertWikiLink(node); 41 | 42 | expect(node.data.permalink).toEqual(undefined); 43 | expect(node.data.hName).toEqual("a"); 44 | expect(node.data.alias).toEqual("Page Alias"); 45 | expect(node.value).toEqual("Real Page"); 46 | expect(node.data.hProperties.href).toEqual("Real Page"); 47 | expect(node.data.hChildren[0].value).toEqual("Page Alias"); 48 | }); 49 | }); 50 | 51 | it("handles wiki alias links with custom divider", () => { 52 | const processor = unified().use(remarkParse).use(wikiLinkPlugin, { 53 | aliasDivider: ":", 54 | }); 55 | 56 | let ast = processor.runSync(processor.parse("[[Real Page:Page Alias]]")); 57 | 58 | visit(ast, "wikiLink", (node: WikiLinkNode) => { 59 | assertWikiLink(node); 60 | 61 | expect(node.data.permalink).toEqual(undefined); 62 | expect(node.data.hName).toEqual("a"); 63 | expect(node.data.alias).toEqual("Page Alias"); 64 | expect(node.value).toEqual("Real Page"); 65 | expect(node.data.hProperties.href).toEqual("Real Page"); 66 | expect(node.data.hChildren[0].value).toEqual("Page Alias"); 67 | }); 68 | }); 69 | 70 | it("stringifies wiki links", () => { 71 | const processor = unified() 72 | .use(remarkParse) 73 | .use(remarkStringify) 74 | .use(wikiLinkPlugin); 75 | 76 | const stringified = processor 77 | .processSync("[[Wiki Link]]") 78 | .value.toString() 79 | .trim(); 80 | expect(stringified).toEqual("[[Wiki Link]]"); 81 | }); 82 | 83 | it("stringifies aliased wiki links", () => { 84 | const processor = unified() 85 | .use(remarkParse) 86 | .use(remarkStringify) 87 | .use(wikiLinkPlugin); 88 | 89 | const stringified = processor 90 | .processSync("[[Real Page:Page Alias]]") 91 | .value.toString() 92 | .trim(); 93 | expect(stringified).toEqual("[[Real Page:Page Alias]]"); 94 | }); 95 | 96 | describe("configuration options", () => { 97 | it("uses linkResolver", () => { 98 | const opts: RemarkWikiLinkOptions = { 99 | linkResolver: (x: string) => x.toLowerCase().replace(" ", "_"), 100 | }; 101 | 102 | const processor = unified().use(remarkParse).use(wikiLinkPlugin, opts); 103 | 104 | let ast = processor.runSync(processor.parse("[[A Page]]")); 105 | 106 | visit(ast, "wikiLink", (node: WikiLinkNode) => { 107 | assertWikiLink(node); 108 | expect(node.data.permalink).toEqual("a_page"); 109 | expect(node.data.hProperties.href).toEqual("a_page"); 110 | }); 111 | }); 112 | 113 | it("uses linkTemplate", () => { 114 | const opts: RemarkWikiLinkOptions = { 115 | linkTemplate: ({ slug, permalink, alias }) => ({ 116 | hName: "span", 117 | hProperties: { "data-href": permalink || slug }, 118 | hChildren: [{ type: "text", value: alias || slug }], 119 | }), 120 | }; 121 | const processor = unified().use(remarkParse).use(wikiLinkPlugin, opts); 122 | 123 | let ast = processor.runSync(processor.parse("[[A Page]]")); 124 | 125 | visit(ast, "wikiLink", (node: WikiLinkNode) => { 126 | assertWikiLink(node); 127 | expect(node.data.hName).toEqual("span"); 128 | expect(node.data.hProperties["data-href"]).toEqual("A Page"); 129 | expect(node.data.hChildren[0].value).toEqual("A Page"); 130 | }); 131 | }); 132 | }); 133 | 134 | describe("open wiki links", () => { 135 | it("handles open wiki links", () => { 136 | const processor = unified().use(remarkParse).use(wikiLinkPlugin); 137 | 138 | let ast = processor.runSync(processor.parse("t[[\nt")); 139 | 140 | expect(!select("wikiLink", ast)).toBeTruthy(); 141 | }); 142 | 143 | it("handles open wiki links at end of file", () => { 144 | const processor = unified().use(remarkParse).use(wikiLinkPlugin); 145 | 146 | let ast = processor.runSync(processor.parse("t [[")); 147 | 148 | expect(!select("wikiLink", ast)).toBeTruthy(); 149 | }); 150 | 151 | it("handles open wiki links with partial data", () => { 152 | const processor = unified().use(remarkParse).use(wikiLinkPlugin); 153 | 154 | let ast = processor.runSync(processor.parse("t [[tt\nt")); 155 | 156 | expect(!select("wikiLink", ast)).toBeTruthy(); 157 | }); 158 | 159 | it("handles open wiki links with partial alias divider", () => { 160 | const processor = unified().use(remarkParse).use(wikiLinkPlugin, { 161 | aliasDivider: "::", 162 | }); 163 | 164 | let ast = processor.runSync(processor.parse("[[t::\n")); 165 | 166 | expect(!select("wikiLink", ast)).toBeTruthy(); 167 | }); 168 | 169 | it("handles open wiki links with partial alias", () => { 170 | const processor = unified().use(remarkParse).use(wikiLinkPlugin); 171 | 172 | let ast = processor.runSync(processor.parse("[[t:\n")); 173 | 174 | expect(!select("wikiLink", ast)).toBeTruthy(); 175 | }); 176 | }); 177 | }); 178 | -------------------------------------------------------------------------------- /packages/core/drizzle/meta/0006_snapshot.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": "5", 3 | "dialect": "sqlite", 4 | "id": "544cb315-a32b-4a6a-9693-f8fc51d21d01", 5 | "prevId": "dc3ca034-8244-4cbf-9062-ef2146a6c398", 6 | "tables": { 7 | "documents": { 8 | "name": "documents", 9 | "columns": { 10 | "id": { 11 | "name": "id", 12 | "type": "integer", 13 | "primaryKey": true, 14 | "notNull": true, 15 | "autoincrement": true 16 | }, 17 | "path": { 18 | "name": "path", 19 | "type": "text", 20 | "primaryKey": false, 21 | "notNull": true, 22 | "autoincrement": false 23 | }, 24 | "frontmatter": { 25 | "name": "frontmatter", 26 | "type": "text", 27 | "primaryKey": false, 28 | "notNull": true, 29 | "autoincrement": false 30 | }, 31 | "ast": { 32 | "name": "ast", 33 | "type": "text", 34 | "primaryKey": false, 35 | "notNull": true, 36 | "autoincrement": false 37 | }, 38 | "mtime": { 39 | "name": "mtime", 40 | "type": "real", 41 | "primaryKey": false, 42 | "notNull": true, 43 | "autoincrement": false 44 | }, 45 | "checksum": { 46 | "name": "checksum", 47 | "type": "text", 48 | "primaryKey": false, 49 | "notNull": true, 50 | "autoincrement": false 51 | }, 52 | "cfghash": { 53 | "name": "cfghash", 54 | "type": "integer", 55 | "primaryKey": false, 56 | "notNull": true, 57 | "autoincrement": false, 58 | "default": 0 59 | }, 60 | "slug": { 61 | "name": "slug", 62 | "type": "text", 63 | "primaryKey": false, 64 | "notNull": true, 65 | "autoincrement": false 66 | }, 67 | "url": { 68 | "name": "url", 69 | "type": "text", 70 | "primaryKey": false, 71 | "notNull": true, 72 | "autoincrement": false 73 | }, 74 | "updated_at": { 75 | "name": "updated_at", 76 | "type": "integer", 77 | "primaryKey": false, 78 | "notNull": true, 79 | "autoincrement": false, 80 | "default": 0 81 | }, 82 | "revision": { 83 | "name": "revision", 84 | "type": "integer", 85 | "primaryKey": false, 86 | "notNull": true, 87 | "autoincrement": false, 88 | "default": 0 89 | } 90 | }, 91 | "indexes": { 92 | "slug": { 93 | "name": "slug", 94 | "columns": [ 95 | "slug" 96 | ], 97 | "isUnique": false 98 | }, 99 | "url": { 100 | "name": "url", 101 | "columns": [ 102 | "url" 103 | ], 104 | "isUnique": false 105 | }, 106 | "path": { 107 | "name": "path", 108 | "columns": [ 109 | "path" 110 | ], 111 | "isUnique": true 112 | } 113 | }, 114 | "foreignKeys": {}, 115 | "compositePrimaryKeys": {}, 116 | "uniqueConstraints": {} 117 | }, 118 | "links": { 119 | "name": "links", 120 | "columns": { 121 | "id": { 122 | "name": "id", 123 | "type": "integer", 124 | "primaryKey": true, 125 | "notNull": true, 126 | "autoincrement": true 127 | }, 128 | "from": { 129 | "name": "from", 130 | "type": "text", 131 | "primaryKey": false, 132 | "notNull": true, 133 | "autoincrement": false 134 | }, 135 | "to": { 136 | "name": "to", 137 | "type": "text", 138 | "primaryKey": false, 139 | "notNull": false, 140 | "autoincrement": false 141 | }, 142 | "start": { 143 | "name": "start", 144 | "type": "integer", 145 | "primaryKey": false, 146 | "notNull": true, 147 | "autoincrement": false 148 | }, 149 | "to_slug": { 150 | "name": "to_slug", 151 | "type": "text", 152 | "primaryKey": false, 153 | "notNull": false, 154 | "autoincrement": false 155 | }, 156 | "to_url": { 157 | "name": "to_url", 158 | "type": "text", 159 | "primaryKey": false, 160 | "notNull": false, 161 | "autoincrement": false 162 | }, 163 | "to_path": { 164 | "name": "to_path", 165 | "type": "text", 166 | "primaryKey": false, 167 | "notNull": false, 168 | "autoincrement": false 169 | }, 170 | "to_anchor": { 171 | "name": "to_anchor", 172 | "type": "text", 173 | "primaryKey": false, 174 | "notNull": false, 175 | "autoincrement": false 176 | }, 177 | "label": { 178 | "name": "label", 179 | "type": "text", 180 | "primaryKey": false, 181 | "notNull": false, 182 | "autoincrement": false 183 | }, 184 | "line": { 185 | "name": "line", 186 | "type": "integer", 187 | "primaryKey": false, 188 | "notNull": true, 189 | "autoincrement": false 190 | }, 191 | "column": { 192 | "name": "column", 193 | "type": "integer", 194 | "primaryKey": false, 195 | "notNull": true, 196 | "autoincrement": false 197 | } 198 | }, 199 | "indexes": { 200 | "to_slug": { 201 | "name": "to_slug", 202 | "columns": [ 203 | "to_slug" 204 | ], 205 | "isUnique": false 206 | }, 207 | "to_url": { 208 | "name": "to_url", 209 | "columns": [ 210 | "to_url" 211 | ], 212 | "isUnique": false 213 | }, 214 | "to_path": { 215 | "name": "to_path", 216 | "columns": [ 217 | "to_path" 218 | ], 219 | "isUnique": false 220 | }, 221 | "from_start": { 222 | "name": "from_start", 223 | "columns": [ 224 | "from", 225 | "start" 226 | ], 227 | "isUnique": true 228 | } 229 | }, 230 | "foreignKeys": {}, 231 | "compositePrimaryKeys": {}, 232 | "uniqueConstraints": {} 233 | } 234 | }, 235 | "enums": {}, 236 | "_meta": { 237 | "schemas": {}, 238 | "tables": {}, 239 | "columns": {} 240 | } 241 | } --------------------------------------------------------------------------------