├── .eslintrc ├── .github └── workflows │ ├── ci.yml │ └── release.yml ├── .gitignore ├── .mocharc.js ├── .npmrc ├── CHANGELOG.adoc ├── LICENSE ├── README.adoc ├── bin └── downdoc ├── lib ├── cli.js ├── index.js └── util │ └── read-stream.js ├── npm ├── format.js ├── release.sh └── version.js ├── package-lock.json ├── package.json └── test ├── cli-test.js ├── downdoc-test.js └── harness ├── config.js ├── index.js └── mocha-ci-reporter.js /.eslintrc: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "standard", 3 | "rules": { 4 | "array-callback-return": "off", 5 | "arrow-parens": ["error", "always"], 6 | "comma-dangle": ["error", { 7 | "arrays": "always-multiline", 8 | "objects": "always-multiline", 9 | "imports": "always-multiline", 10 | "exports": "always-multiline" 11 | }], 12 | "max-len": ["error", { 13 | "code": 120, 14 | "ignoreTemplateLiterals": true 15 | }], 16 | "prefer-regex-literals": "off", 17 | "spaced-comment": "off", 18 | "radix": ["error", "always"] 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | on: 3 | push: 4 | branches: ['**'] 5 | paths-ignore: ['*.adoc'] 6 | pull_request: 7 | branches: [main] 8 | paths-ignore: ['*.adoc'] 9 | schedule: 10 | - cron: '30 2 * * MON' 11 | concurrency: 12 | group: ${{ github.workflow }}-${{ github.ref }} 13 | cancel-in-progress: true 14 | jobs: 15 | activate: 16 | if: >- 17 | github.event_name == 'push' || 18 | (github.event_name == 'schedule' && github.repository_owner == 'opendevise') || 19 | (github.event_name == 'pull_request' && !startsWith(github.head_ref, 'docs/')) 20 | runs-on: ubuntu-latest 21 | steps: 22 | - name: Proceed 23 | run: echo ok go 24 | build: 25 | needs: activate 26 | runs-on: ubuntu-latest 27 | steps: 28 | - name: Checkout 29 | uses: actions/checkout@v3 30 | - name: Install Node.js 31 | uses: actions/setup-node@v3 32 | with: 33 | node-version: '16' 34 | - name: Install dependencies 35 | run: npm ci 36 | - name: Run linter 37 | run: | 38 | npm run lint 39 | if [ -n "$(npm --silent run format && git --no-pager diff --name-only)" ]; then 40 | echo 'Detected pending code style changes' 41 | exit 1 42 | fi 43 | - name: Run tests with coverage 44 | run: npm run coverage 45 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Release 2 | run-name: ${{ github.workflow }} ${{ github.event.inputs.release-version }} 3 | on: 4 | workflow_dispatch: 5 | inputs: 6 | release-version: 7 | description: Enter version to release (e.g., 1.0.1). 8 | required: false 9 | jobs: 10 | perform: 11 | if: github.repository_owner == 'opendevise' && github.event_name == 'workflow_dispatch' 12 | runs-on: ubuntu-latest 13 | environment: releases 14 | steps: 15 | - name: Checkout 16 | uses: actions/checkout@v3 17 | - name: Install Node.js 18 | uses: actions/setup-node@v3 19 | with: 20 | node-version: '16' 21 | - name: Install dependencies 22 | run: npm ci 23 | - name: Run linter 24 | run: npm run lint 25 | - name: Run tests 26 | run: npm test 27 | - name: Set up release environment 28 | run: | 29 | echo RELEASE_VERSION=${{ github.event.inputs.release-version }} >> $GITHUB_ENV 30 | echo RELEASE_NPM_TOKEN=${{ secrets[format('NPM_TOKEN_{0}', github.actor)] }} >> $GITHUB_ENV 31 | - name: Build, tag, and publish npm package 32 | run: ./npm/release.sh 33 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | /.nyc_output/ 2 | /node_modules/ 3 | /reports/ 4 | /.nvmrc 5 | -------------------------------------------------------------------------------- /.mocharc.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | module.exports = require('./test/harness/config.js') 4 | -------------------------------------------------------------------------------- /.npmrc: -------------------------------------------------------------------------------- 1 | fund=false 2 | lockfile-version=3 3 | omit=optional 4 | -------------------------------------------------------------------------------- /CHANGELOG.adoc: -------------------------------------------------------------------------------- 1 | = downdoc Changelog 2 | :url-repo: https://github.com/opendevise/downdoc 3 | 4 | This document provides a summary of all notable changes to downdoc by release. 5 | For a detailed view of what's changed, refer to the {url-repo}/commits[commit history] of this project. 6 | 7 | == 1.0.2-stable (2023-07-25) 8 | 9 | === Changed 10 | 11 | * Only support single hyphen as list marker 12 | 13 | === Fixed 14 | 15 | * Do not process attribute entry if attribute name begins with hyphen 16 | * Unset attribute entry if attribute name begins with exclamation point 17 | * Remove blockquote marker when unwrapping lines in Markdown-style blockquote 18 | * Retain paragraph breaks when unwrapping lines in Markdown-style blockquote 19 | 20 | == 1.0.1-stable (2023-04-12) 21 | 22 | === Fixed 23 | 24 | * Use target as fallback text for external xref (#10) 25 | * Do not consume final letter of cell as part of cell delimiter (#11) 26 | * Consume alignment as part of cellspec on table cell 27 | 28 | == 1.0.0-stable (2023-04-01) 29 | 30 | === Added 31 | 32 | * Add support for `hardbreaks` option on paragraph 33 | * Add support for `markdown-unwrap-prose` document attribute to remove newlines in paragraphs 34 | * Add support for nested description lists (up to 3 levels deep) 35 | * Add support for ordered list that uses explicit arabic numerals (e.g., `1.`) 36 | * Add support for numbers 10-19 in callout list 37 | * Add support for `noheader` option on table 38 | 39 | === Changed 40 | 41 | * Trim extra spaces after heading marker (effectively trimming leading spaces in title) 42 | * Trim extra spaces after list marker (effectively trimming leading spaces in principal text) 43 | 44 | === Fixed 45 | 46 | * Allow preprocessor conditionals to be nested 47 | * Allow description list term to start with list marker if line is not matched as a list item 48 | * Preserve non-space indent in front of empty line in verbatim block (with and without indent=0) 49 | * Restrict maximum (0-based) section level to 5 50 | * Verify in list when looking for list item offset by one or more empty lines 51 | * Don't interpret hard line break in last line of literal paragraph when directly adjacent to another paragraph 52 | * Don't promote first row of table to header if preceded by empty line 53 | * Don't drop cells when number of cells on line in source exceeds number of columns in table 54 | 55 | == 1.0.0-rc.2 (2023-03-07) 56 | 57 | === Added 58 | 59 | * Allow marks/tags used to enclose strikethrough text to be configured using `markdown-strikethrough` document attribute 60 | * Support title along with optional ID on delimited quote block 61 | * Process and drop metadata (ID and roles) on code span 62 | * Process single quoted text 63 | * Use reftext defined on target block for automatic link text of xref if block has ID, title, and reftext 64 | 65 | === Changed 66 | 67 | * Allow space in target of block or inline image as long as it's not the first character of the target 68 | * Ignore `image:` macro prefix if followed by backtick 69 | * Remove file extension from target basename when generating alt text for image 70 | 71 | === Fixed 72 | 73 | * Don't recognize xref macro if fragment in target contains spaces 74 | * Preserve non-space indent (e.g., quote block markers) when resetting indent of code block 75 | * Preserve non-space indent on list continuation line 76 | 77 | == 1.0.0-rc.1 (2023-02-07) 78 | 79 | === Added 80 | 81 | * Allow line break marker to be configured using `markdown-line-break` document attribute 82 | * Add basic support for inline stem macro 83 | * Support ID on lists and description lists 84 | * Support title on admonition block; insert after label 85 | * Support ID on admonition block; insert before label 86 | * Support title with ID on example block 87 | * Support title without ID on example and sidebar blocks 88 | * Apply normal substitutions to title/summary of spoiler block 89 | 90 | === Changed 91 | 92 | * Add hard line break after label on admonition paragraph 93 | * Don't add space after hard line break in table cell 94 | * Indent wrapped primary text of dlist entry 95 | * Don't add line break marker to description list term if primary description text is empty or resolves to empty 96 | * Don't add line break marker to admonition label if text resolves to empty 97 | * Replace newline that follows hard break in table cell with single space 98 | 99 | === Fixed 100 | 101 | * Place block title above table when table has no header 102 | * Don't initialize list and container stack to same array 103 | 104 | == 1.0.0-beta.12 (2023-01-27) 105 | 106 | === Added 107 | 108 | * Honor escaped link or URL macro (preceded with backslash) 109 | 110 | === Changed 111 | 112 | * Consider all Unicode-defined letters when checking for constrained boundaries of formatted text 113 | 114 | === Fixed 115 | 116 | * Don't match inline anchor that precedes formatted phrase as boxed attrlist 117 | 118 | == 1.0.0-beta.11 (2023-01-24) 119 | 120 | === Added 121 | 122 | * Drop include directive lines 123 | * Unescape ifdef, ifndef, and include preprocessor directives 124 | 125 | === Changed 126 | 127 | * Look for `+\p{L}+` instead of `+\p{Alpha}+` when scanning for curly apostrophe replacement 128 | 129 | === Fixed 130 | 131 | * Align match for inline anchor with match for block anchor 132 | * Give priority to preprocessor directive lines 133 | 134 | == 1.0.0-beta.10 (2023-01-21) 135 | 136 | === Added 137 | 138 | * Support indent=0 attribute on verbatim blocks 139 | * Support natural xrefs (xref that identifies target by title) 140 | * Support xref macro when target path contains spaces 141 | * Use title on paragraph with ID as automatic text (xreftext) for internal ref 142 | * Support reading input from stdin when input path is `-` 143 | 144 | === Fixed 145 | 146 | * End literal paragraph (and enclosing list, if relevant) when djacent block attribute line is encountered 147 | * Prevent xref rewriter from matching square brackets elsewhere in the line 148 | * Honor escaped xref macro inside monospace phrase 149 | * Treat underscore as word character when converting marked phrase 150 | * Apply constrained formatting rules when converting bold or italic phrase 151 | * Align constrained formatting rules for marked phrase with those for bold and italic 152 | * Make match for block anchor more accurate 153 | * Don't resolve attribute reference if name begins with hyphen 154 | 155 | == 1.0.0-beta.9 (2023-01-11) 156 | 157 | === Added 158 | 159 | * Support indented list items 160 | * Unescape escaped preprocessor conditional inside verbatim block 161 | * Allow replacement characters for double smart quotes to be controlled using `quotes` attribute 162 | 163 | === Fixed 164 | 165 | * Track dlist type to avoid confusing qanda item with regular ordered list item 166 | 167 | == 1.0.0-beta.8 (2023-01-08) 168 | 169 | === Added 170 | 171 | * Support delimited quote block 172 | * Promote ID on paragraph to inline anchor 173 | * Unescape and skip escaped xref macro 174 | * Define `zwsp` as intrinsic attribute 175 | * Honor horizontal column alignments specifed in cols attribute on table 176 | 177 | === Changed 178 | 179 | * Process all block attribute lines above a block rather than just the last one 180 | 181 | === Fixed 182 | 183 | * Don't interpret double colon at start of line or embedded inside term as description list entry marker 184 | * Don't process xref macro if target starts with colon 185 | * Correctly process cols attribute that contains both repeating and non-repeating col specs 186 | 187 | == 1.0.0-beta.7 (2023-01-03) 188 | 189 | === Added 190 | 191 | * Support xref to verbatim block with ID and title 192 | * Add rudimentary support for qanda list 193 | * Add support for ID on literal paragraphs (including promoted console blocks) and block images 194 | * Isolate lists inside a delimited block from those outside while still maintaining current indentation 195 | 196 | === Changed 197 | 198 | * Escape less than sign (`<`) in regular text 199 | 200 | === Fixed 201 | 202 | * Clear list context (inList and listStack) at start of non-attached delimited block (adjacent or non-adjacent) 203 | * Reset indent when clearing list context at start of delimited block 204 | * Don't interpret text enclosed in << and >> that contains spaces as an xref shorthand 205 | * Don't process link macro if target starts with colon 206 | 207 | == 1.0.0-beta.6 (2022-12-27) 208 | 209 | === Added 210 | 211 | * Add support for inline anchor (shorthand syntax only, no reftext) 212 | * Implement rudimentary support for literal monospace 213 | 214 | === Changed 215 | 216 | * Dramatically improve parsing performance by guarding use of regular expressions 217 | * Make xref macro parsing more accurate 218 | 219 | === Fixed 220 | 221 | * Don't mangle single quote enclosed in monospaced formatting pair 222 | * Don't attempt to substitute escaped attribute reference at start of monospaced phrase 223 | * Replace backslashes in monospaced phrase when no backslash on the line immediately follows a backtick 224 | * Don't mangle a double-escaped attribute reference 225 | * Allow use of all lower Unicode alphabetic characters in attribute name 226 | 227 | == 1.0.0-beta.5 (2022-12-23) 228 | 229 | === Added 230 | 231 | * Add support for delimited admonition block 232 | * Honor subs attribute on verbatim paragraph (attributes only) 233 | * Switch from filled to circled conums (to extend the range) 234 | * Honor hardbreak at end of wrapped line in table cell 235 | 236 | === Changed 237 | 238 | * Don't process marked (highlight) phrase within a word 239 | * Extend supported conums range to numbers 1-19 240 | * Rename ADMONITION_ICONS constant to ADMONS; switch value to Map 241 | * Enclose entire admonition label in strong phrase and remove colon 242 | 243 | === Fixed 244 | 245 | * Only promote first row of table to header if specified by %header option or by implicit syntax 246 | * Don't interpret multiple character references on line as marked (highlight) phrase 247 | * Restore indent after literal paragraph inside delimited block attached to list item 248 | * Fix overgreedy match in block attribute parsing when attribute values are quoted 249 | * Close promoted console code block at list continuation 250 | * Close verbatim at list item or list continuation 251 | 252 | == 1.0.0-beta.4 (2022-12-19) 253 | 254 | === Added 255 | 256 | * Process all conums per line in verbatim block 257 | * Track stack of containers attached to list item; don't end list item at empty line inside attached container 258 | 259 | === Fixed 260 | 261 | * Indent table header delimiter line when table is inside list 262 | 263 | == 1.0.0-beta.3 (2022-12-13) 264 | 265 | === Added 266 | 267 | * Add support for stem style on passthrough block to create display (block) math 268 | * Pass contents of passthrough block through unprocessed 269 | * Support discrete headings, including inside delimited block and at start of document 270 | * Support autonumbering of colist items (i.e., `<.>` syntax) 271 | * Support autonumbering of conums in verbatim block (i.e., `<.>` syntax) 272 | * Support implicit list continuation above literal paragraph inside list item 273 | 274 | === Changed 275 | 276 | * Process colist as a list instead of ad-hoc lines 277 | 278 | == 1.0.0-beta.2 (2022-12-10) 279 | 280 | === Changed 281 | 282 | * Remove leading line if empty after applying subs 283 | * Don't indent empty line 284 | * Implement callouts replacement as a named substitution 285 | * Read closing delimiter for verbatim block from cap property on inContainer object 286 | * Rename convertInline variable to substitutors to better reflect its purpose 287 | * Skip applying subs if no special characters are detected 288 | 289 | === Fixed 290 | 291 | * Reset subs when entering a verbatim block without block metadata 292 | * Clear block attributes after processing section title 293 | * Place title on promoted console literal paragraph above instead of inside block 294 | * Drop open in blank window hint from link text 295 | * Pop container when exiting verbatim block 296 | 297 | == 1.0.0-beta.1 (2022-12-05) 298 | 299 | _Initial prerelease._ 300 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (C) 2022-present Dan Allen 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in 13 | all copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 21 | THE SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.adoc: -------------------------------------------------------------------------------- 1 | = downdoc: Down-convert AsciiDoc to Markdown 2 | Dan Allen 3 | ifdef::env-github[] 4 | :toc: preamble 5 | :toc-title: Contents 6 | :toclevels: 1 7 | endif::[] 8 | 9 | > Rapidly down-converts AsciiDoc to Markdown. 10 | 11 | == What is downdoc? 12 | 13 | The downdoc package provides both a CLI (command: `downdoc`) and a JavaScript function (`downdoc`) to down-convert AsciiDoc to Markdown. 14 | "`But why,`" you may ask? 15 | 16 | The reality is, the AsciiDoc format isn't supported everywhere. 17 | For instance, npm package repositories such as https://npmjs.com and https://yarnpkg.com assume the README for a package is in Markdown. 18 | But you're a sensible developer who wants to maintain your README in AsciiDoc. 19 | Enter downdoc. 20 | 21 | When the situation calls for it, you can draft content in AsciiDoc and convert it to Markdown where only Markdown is accepted. 22 | Typical examples include an issue tracker or chat application. 23 | To post in Markdown, run the AsciiDoc content through downdoc and paste the result it produces. 24 | downdoc handles the context switch to Markdown so your brain doesn't have to. 25 | 26 | This project initially started out with a goal to transpile an AsciiDoc README to the Markdown format for npm packaging. 27 | In that regard, it was designed to be just good enough to get the job done. 28 | Over time, downdoc has proven to have broader application and has thus been gradually improved. 29 | It now does a reasonably good job of mapping constructs in AsciiDoc to Markdown for all situations. 30 | It even offers dedicated configuration to target some Markdown flavors and environments. 31 | 32 | == How does it work? 33 | 34 | downdoc employs a minimal approach to map the AsciiDoc syntax to Markdown that does not make use of Asciidoctor. 35 | While this gives downdoc the advantage of having no dependencies and being exceptionally fast, it does not support the full AsciiDoc syntax. 36 | Rather, it's intended to be used on AsciiDoc documents that are written with conversion to Markdown in mind. 37 | 38 | == Supported syntax 39 | 40 | In order for downdoc to convert an AsciiDoc document, it must have lines that end with LF (`\n`), not CRLF (`\r\n`). 41 | 42 | Here's a rough list of the AsciiDoc syntax that downdoc current handles: 43 | 44 | * document title (atx-style only; Markdown-style headings not supported) 45 | * implicit author and revision lines in document header 46 | * common intrinsic attributes (empty, idprefix, idseparator, nbsp, sp, vbar, zwsp) 47 | * document attribute entries (single-line value only; no support for pass macro) 48 | * document attribute references 49 | * part/chapter/section titles and discrete headings (atx-style only; Markdown-style headings not supported) 50 | * block titles 51 | * formatted text (restricted to boundaries of a single line) 52 | ** strong (bold), emphasis (italic), code (monospace), literal code, mark, span w/ line-through role, double and single curly quotes, curly apostrophe 53 | ** metadata (ID and roles) attached to strong, emphasis and code spans are processed and dropped 54 | ** literal code span only escapes attribute references 55 | ** use `quotes` attribute to specify characters (space-separated) for double curly quotes (e.g., `“ ”`) 56 | ** use `markdown-strikethrough` attribute to specify characters to use to enclose strikethrough text 57 | * ordered, unordered (including checklists), and callout lists (1-19 or .) 58 | * nested ordered and unordered lists (use `markdown-list-indent` attribute to control indent size of output) 59 | * description lists with optional qanda style 60 | * list continuation (for current list item only; no support for ancestor list continuation) 61 | * link and URL macros (honors `hide-uri-scheme` attribute to hide URL scheme in visible URL); can be escaped 62 | * escaped link and URL macros and autolinks (bare URLs) 63 | * internal xrefs (honors value of `idprefix` and `idseparator` document attributes) 64 | * block IDs for document, sections, paragraphs 65 | * block IDs for other blocks (e.g., listing, literal, lists, example, sidebar, etc.) that have a title 66 | ** anchor is placed at start of title 67 | * auto-generated text for internal xrefs (section and verbatim blocks only) (honors `reftext` attribute on section or on block with ID and title) 68 | * inline anchor (shorthand syntax only, no reftext) 69 | * block and inline image macros 70 | * literal blocks, listing blocks, and source blocks with optional source language 71 | * diagram blocks (literal block with diagram dialect) 72 | * literal paragraphs (auto-detects command prompt and promotes to command code block; first line must be least indented line) 73 | * callout numbers (i.e., conums) (1-19 or .) in verbatim blocks 74 | * attribute references in literal paragraphs and verbatim blocks when subs=attributes+ or subs=+attributes is specified 75 | * indent=0 attribute on verbatim blocks 76 | * admonition paragraphs and blocks 77 | * quote blocks with optional attribution 78 | * Markdown-style blockquotes (nested blocks are passed through as written; only inline markup is down-converted) 79 | * thematic breaks (including Markdown-style, e.g., `---`) 80 | * hard line breaks 81 | ** supports explicit character or `hardbreaks` option on paragraph 82 | ** uses backslash as line break character by default (use `markdown-line-break` document attribute to override) 83 | * ifdef and ifndef preprocessor conditionals (can be nested) 84 | * escaped ifdef, ifndef, and include preprocessor directives 85 | * tables 86 | ** cols attirbute with optional horizontal alignments 87 | ** explicit header row or implicit header row unless noheader option is set 88 | ** no support for cell style (ignores cell style in colspec and cellspec) 89 | * collapsible blocks (set the `markdown-collapsible-variant` attribute to `spoiler` to generate Zulip-compatible spoiler block) 90 | * comment lines and blocks 91 | * passthrough blocks 92 | * stem blocks (i.e., display math) (assumes latexmath notation) 93 | * inline stem macro (no passthrough semantics) 94 | * delimited example, open, and sidebar blocks (delimiter lines removed) 95 | 96 | Here's the list of document attributes unique to this converter that control its behavior: 97 | 98 | * markdown-collapsible-variant (default: `disclosure`; accepts: `disclosure` or `spoiler`) 99 | * markdown-line-break (default: `\`; accepts any characters) 100 | * markdown-list-indent (default: not set; accepts a positive integer) 101 | * markdown-strikethrough (default: `~~`; accepts a mark sequence or a space-separated pair of HTML tags) 102 | * markdown-unwrap-prose (when set, will remove newlines between lines in paragraphs; reverses ventilated prose) 103 | * quotes (default: ` `; accepts a space-separated pair of HTML tags or marks) 104 | 105 | To use a backtick in a code span in Markdown, it must be enclosed in backticks then enclosed in non-backtick characters, such as spaces. 106 | To achieve this, we recommend setting the `backtick` attribute as follows: 107 | 108 | [,asciidoc] 109 | ---- 110 | :backtick: {sp}```{sp} 111 | ---- 112 | 113 | Then you can reference it anywhere in a code span using the `\{backtick}` attribute reference. 114 | If you need to use a backtick outside of a code span, you may want to split it into two separate attributes. 115 | You may need to play around a bit to get the output you want. 116 | 117 | Currently, include directives are dropped. 118 | However, you can first run the document through https://github.com/asciidoctor/asciidoctor-reducer[Asciidoctor Reducer] to incorporate the content from any included files. 119 | Add the `--preserve-conditionals` option when running Asciidoctor Reducer to preserve preprocessor conditional directives in the output of this step. 120 | If you then run downdoc on the output produced by Asciidoctor Reducer, it will convert the entire document, includes and all. 121 | 122 | Support for additional syntax may be added in the future. 123 | 124 | == Prerequisites 125 | 126 | In order to use this extension, you must have Node.js 16.17.0 or higher installed on your machine. 127 | 128 | == Install 129 | 130 | Use the following command to install the downdoc package into your project: 131 | 132 | [,console] 133 | ---- 134 | $ npm i downdoc 135 | ---- 136 | 137 | By default, `npm i` will install the latest stable release. 138 | The version number for stable downdoc releases ends with `-stable` because downdoc is a reclaimed package. 139 | 140 | Alternately, you can defer installation and invoke the CLI using the `npx` command. 141 | 142 | == Usage 143 | 144 | === CLI 145 | 146 | [,console] 147 | ---- 148 | $ npx downdoc README.adoc 149 | ---- 150 | 151 | The `downdoc` command automatically generates a Markdown file. 152 | By default, the Markdown file has the same name as the AsciiDoc file with the file extension changed to `.md` (e.g., `README.md`). 153 | 154 | You can instruct the command to write to a different file using the `-o` (or `--output`) option. 155 | 156 | [,console] 157 | ---- 158 | $ npx downdoc -o out.md README.adoc 159 | ---- 160 | 161 | If the value of the `-o` option is `-`, the command will write the output to the console (i.e., stdout). 162 | 163 | [,console] 164 | ---- 165 | $ npx downdoc -o - README.adoc 166 | ---- 167 | 168 | You can pipe from input and output by using `-` as the input path. 169 | 170 | [,console] 171 | ---- 172 | $ cat README.adoc | npx downdoc - 173 | ---- 174 | 175 | You can pass additional runtime AsciiDoc attributes using the `-a` (or `--attribute`) option. 176 | 177 | [,console] 178 | ---- 179 | $ npx downdoc -a hide-uri-scheme -a markdown-list-indent=4 README.adoc 180 | ---- 181 | 182 | To print a usage statement that includes a complete list of available options, pass the `-h` option. 183 | 184 | === API 185 | 186 | [,js] 187 | ---- 188 | const downdoc = require('downdoc') 189 | const fsp = require('node:fs/promises') 190 | 191 | ;(async () => { 192 | await fsp 193 | .readFile('README.adoc', 'utf8') 194 | .then((asciidoc) => fsp.writeFile('README.md', downdoc(asciidoc) + '\n', 'utf8')) 195 | })() 196 | ---- 197 | 198 | The `downdoc` function accepts an object (i.e., map) of options as the second argument. 199 | 200 | [,js] 201 | ---- 202 | downdoc(asciidoc, { attributes: { 'markdown-list-indent': 4 } }) 203 | ---- 204 | 205 | Currently the only supported option in the API is `attributes`, which is an object (i.e., map) of runtime AsciiDoc attributes. 206 | 207 | ifndef::env-npm[] 208 | === npm publish 209 | 210 | The prime focus of this tool is to convert an AsciiDoc README to Markdown for npm packaging. 211 | This switch is done by leveraging the pre and post lifecycle hooks of the `publish` task. 212 | In the pre hook, you convert the README to Markdown and hide the AsciiDoc README. 213 | The npm `publish` task will then discover the Markdown README and include it in the package. 214 | In the post hook, you remove the Markdown README and restore the AsciiDoc README. 215 | 216 | Using this technique, the published npm package ends up with a Markdown README, but the README in your repository remains in AsciiDoc. 217 | We refer to this process as the README dance. 218 | 219 | If that sounds complicated, no need to worry. 220 | downdoc has you covered. 221 | The downdoc CLI provides the helpers you need to call during these lifecycle hooks. 222 | To use them, add the following entries to the `scripts` property in the [.path]_package.json_ at the root of your project. 223 | 224 | [,json] 225 | ---- 226 | "postpublish": "downdoc --postpublish", 227 | "prepublishOnly": "downdoc --prepublish", 228 | ---- 229 | 230 | Let's have a look at where these entries go when we step back and look at a complete file: 231 | 232 | [,json] 233 | ---- 234 | { 235 | "name": "my-package", 236 | "version": "1.0.0", 237 | "scripts": { 238 | "postpublish": "downdoc --postpublish", 239 | "prepublishOnly": "downdoc --prepublish", 240 | "test": "mocha" 241 | } 242 | } 243 | ---- 244 | 245 | If you don't want to declare a dependency on the downdoc package in your project, prepend the call to `downdoc` with `npx -y`: 246 | 247 | [,json] 248 | ---- 249 | "postpublish": "npx -y downdoc --postpublish", 250 | "prepublishOnly": "npx -y downdoc --prepublish", 251 | ---- 252 | 253 | When an AsciiDoc file is converted using the `--prepublish` CLI option, both the `env=npm` and `env-npm` document attributes are set. 254 | This allows you to show or hide content in the README that is displayed in the npm package registry. 255 | 256 | You can find an example of downdoc used for this purpose in the downdoc project itself. 257 | 258 | === Create executables 259 | 260 | Thus far, we've assumed that you're running downdoc using Node.js installed on your system. 261 | However, downdoc is one of those tools you might want to use in any environment. 262 | In that case, what you want is an executable that doesn't require Node.js to be installed. 263 | That's where pkg comes in. 264 | 265 | Using https://github.com/vercel/pkg[pkg], you can bundle Node.js and downdoc into a single executable (i.e., a precompiled binary) per system (OS and architecture). 266 | To do so, clone this project and run the following command: 267 | 268 | $ npx pkg -t node18-linux,node18-macos,node18-win . 269 | 270 | This command will produce `downdoc-linux`, `downdoc-macos`, and `downdoc-win.exe`. 271 | You can transfer any one of these executables to a suitable system and run it without having to install Node.js. 272 | For example: 273 | 274 | $ ./downdoc-linux README.adoc 275 | 276 | The binary includes the package metadata and source code of this project in raw form. 277 | Run `npx pkg -h` or read the https://github.com/vercel/pkg[pkg README] to learn more about how it works. 278 | endif::[] 279 | 280 | == Copyright and License 281 | 282 | Copyright (C) 2022-present Dan Allen (OpenDevise Inc.) and the individual contributors to this project. 283 | 284 | Use of this software is granted under the terms of the MIT License. 285 | -------------------------------------------------------------------------------- /bin/downdoc: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | 'use strict' 4 | 5 | require('../lib/cli')() 6 | -------------------------------------------------------------------------------- /lib/cli.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const downdoc = require('./index.js') 4 | const fsp = require('node:fs/promises') 5 | const ospath = require('node:path') 6 | const { parseArgs } = require('node:util') 7 | const readStream = require('./util/read-stream') 8 | const { version } = require('../package.json') 9 | 10 | async function run (p = process) { 11 | const options = { 12 | attribute: { type: 'string', multiple: true, short: 'a', desc: 'set an AsciiDoc attribute', hint: 'name=val' }, 13 | output: { type: 'string', short: 'o', desc: 'specify an output file or - for stdout', hint: 'path' }, 14 | postpublish: { type: 'boolean', desc: 'run the postpublish lifecycle routine (restore input file)' }, 15 | prepublish: { type: 'boolean', desc: 'run the prepublish lifecycle routine (convert and hide input file)' }, 16 | help: { type: 'boolean', short: 'h', desc: 'output this help and exit' }, 17 | version: { type: 'boolean', short: 'v', desc: 'output version and exit' }, 18 | } 19 | const { positionals, values } = parseArgs({ args: p.args || (p.argv || []).slice(2), options, strict: false }) 20 | if (values.help) return printUsage.call(p, options) 21 | if (values.version) return printVersion.call(p) 22 | const inputPath = positionals[0] ?? (values.postpublish || values.prepublish ? 'README.adoc' : undefined) 23 | if (!inputPath) return printUsage.call(p, options, true) 24 | if (values.postpublish) return restoreInputFile(inputPath, values.output) 25 | if (values.prepublish) values.attribute = ['env=npm', 'env-npm'].concat(values.attribute || []) 26 | const attributes = values.attribute?.reduce((accum, it) => { 27 | const [name, ...value] = it.split('=') 28 | accum[name] = value.join('=') 29 | return accum 30 | }, {}) 31 | if (!(await validateInputPath.call(p, inputPath))) return 32 | await convertFile.call(p, inputPath, attributes && { attributes }, values.output) 33 | if (values.prepublish) await hideInputFile(inputPath) 34 | } 35 | 36 | function convertFile (inputPath, opts, outputPath = toOutputPath(inputPath)) { 37 | const write = (data) => (outputPath === '-' ? this.stdout.write(data) : fsp.writeFile(outputPath, data, 'utf8')) 38 | const read = () => (inputPath === '-' ? readStream(this.stdin, 'utf8') : fsp.readFile(inputPath, 'utf8')) 39 | return read().then((input) => write(downdoc(input, opts) + '\n')) 40 | } 41 | 42 | function toOutputPath (path) { 43 | return path === '-' ? path : path.replace(/\.adoc$/, '.md') 44 | } 45 | 46 | function hideInputFile (inputPath) { 47 | return fsp.rename(inputPath, toHiddenPath(inputPath)) 48 | } 49 | 50 | function restoreInputFile (inputPath, outputPath = toOutputPath(inputPath)) { 51 | const hiddenInputPath = toHiddenPath(inputPath) 52 | return Promise.all([ 53 | gracefulStat(hiddenInputPath).then((stat) => (stat.isFile() ? fsp.rename(hiddenInputPath, inputPath) : undefined)), 54 | gracefulStat(outputPath).then((stat) => (stat.isFile() ? fsp.unlink(outputPath) : undefined)), 55 | ]) 56 | } 57 | 58 | function toHiddenPath (path) { 59 | const { dir, base } = ospath.parse(path) 60 | return ospath.join(dir, '.' + base) 61 | } 62 | 63 | function gracefulStat (path) { 64 | return fsp.stat(path).catch(() => ({ isDirectory: () => false, isFile: () => false })) 65 | } 66 | 67 | function printUsage (options, error) { 68 | error ? (this.exitCode = 1) : printVersion.call(this, true) 69 | let usage = [ 70 | 'Usage: downdoc [OPTION]... FILE', 71 | 'Convert the specified AsciiDoc FILE to a Markdown file.', 72 | 'Example: downdoc README.adoc', 73 | ] 74 | if (error) { 75 | usage = usage.slice(0, 1).concat("Run 'downdoc --help' for more information.") 76 | } else { 77 | usage.push('') 78 | Object.entries(options).forEach(([long, { short, hint, multiple, desc }]) => { 79 | const option = short ? `-${short}, --${long}${hint ? ' ' + hint : ''}` : `--${long}` 80 | usage.push(` ${option.padEnd(27, ' ')}${desc}${multiple ? '; can be specified multiple times' : ''}`) 81 | }) 82 | usage.push('', 'If --output is not specified, the output file path is derived from FILE (e.g., README.md).') 83 | } 84 | usage.reduce((stream, line) => typeof stream.write(line + '\n') && stream, error ? this.stderr : this.stdout) 85 | } 86 | 87 | function printVersion (withCommandName) { 88 | this.stdout.write(`${withCommandName ? 'downdoc ' : ''}${version}\n`) 89 | } 90 | 91 | function validateInputPath (path) { 92 | if (path === '-') return true 93 | return gracefulStat(path).then((stat) => { 94 | if (stat.isFile()) return true 95 | this.exitCode = 1 96 | this.stderr.write(`downdoc: ${path}: ${stat.isDirectory() ? 'Not a file' : 'No such file'}\n`) 97 | }) 98 | } 99 | 100 | module.exports = run 101 | -------------------------------------------------------------------------------- /lib/index.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const ADMONS = JSON.parse( 4 | '{"CAUTION":"\ud83d\udd25","IMPORTANT":"\u2757","NOTE":"\ud83d\udccc","TIP":"\ud83d\udca1","WARNING":"\u26a0\ufe0f"}' 5 | ) 6 | const ATTRIBUTES = JSON.parse( 7 | '{"empty":"","idprefix":"_","idseparator":"_","markdown-line-break":"\\\\","markdown-strikethrough":"~~",' + 8 | '"nbsp":" ","quotes":" ","sp":" ","vbar":"|","zwsp":"​"}' 9 | ) 10 | const BREAKS = { "'''": '---', '***': '---', '---': '---', '<<<': undefined, 'toc::[]': undefined } 11 | const CONUMS = [...Array(19)].reduce((obj, _, i) => (obj[i + 1] = String.fromCharCode(0x2460 + i)) && obj, {}) 12 | const DELIMS = { '----': 'v', '....': 'v', '====': 'c', '|===': 't', '--': 'c', '****': 'c', ____: 'c', '++++': 'p' } 13 | const LIST_MARKERS = [1, 2, 3, 4, 5, 6, 7, 8, 9, '*', '.', '<', '-'].reduce((obj, c) => (obj['' + c] = true) && obj, {}) 14 | const NORMAL_SUBS = ['quotes', 'attributes', 'macros'] 15 | const SUBSTITUTORS = { quotes, attributes, macros, callouts } 16 | const TDIV = { '': '| --- ', '<': '| :-- ', '^': '| :-: ', '>': '| --: ' } 17 | const AttributeEntryRx = /^:(!)?([^:-][^:]*):(?:$| (.+))/ 18 | const AttributeRefRx = /(\\)?\{([\p{Ll}\d_][\p{Ll}\d_-]*)\}/gu 19 | const AuthorInfoLineRx = /^(?:[\p{Alpha}\d_]+(?: +[\p{Alpha}\d_]+){0,2}(?: +<([^>]+)>)?(?:; |$))+$/u 20 | const BlockAnchorRx = /^\[([\p{L}_][\p{Alpha}\d_\-:.]*)(?:, ?(.+))?\]$/u 21 | const BlockImageMacroRx = /^image::([^\s[][^[]*)\[(.*)\]$/ 22 | const CellDelimiterRx = /(?:(?:^| +)(?:[<>^.]*[a-z]?)|)\| */ 23 | const ConumRx = /(^| )<([.1-9]|1\d)>(?=(?: <(?:[.1-9]|1\d)>)*$)/g 24 | const DlistItemRx = /^(?!\/\/)(\S.*?)(:{2,4})(?: (.+))?($)/ 25 | const ElementAttributeRx = /(?:^|, *)(?:(\w[\w-]*)=)?(?:("|')([^\2]+?)\2|([^,]+|))/g 26 | const EmphasisSpanMetaRx = /(?|-|\d+\.) +(.+)/ 32 | const MarkedSpanRx = /(?][^,>]*)(?:, ?([^>]+))?>>/g 41 | 42 | module.exports = function downdoc (asciidoc, { attributes: initialAttrs = {} } = {}) { 43 | const attrs = new Map(Object.entries(Object.assign({}, ATTRIBUTES, initialAttrs))) 44 | const lines = asciidoc.split(attrs.delete('doctitle') ? '\n' : '\n') 45 | if (lines[lines.length - 1] === '') lines.pop() 46 | let inContainer, inHeader, inList, inPara, inTable, indent 47 | inHeader = (inContainer = inPara = (indent = '') || false) || asciidoc[0] === '=' || !!~asciidoc.indexOf('\n= ') 48 | let blockAttrs, blockTitle, chr0, grab, hardbreakNext, listStack, match, next, subs, style, verbatim 49 | const [containerStack, nrefs, refs, skipStack, undef] = [(listStack = []).slice(), new Map(), new Map(), [], () => {}] 50 | return lines 51 | .reduce((accum, line, idx) => { 52 | while ((grab = match = next = style = subs = undefined) === undefined) { 53 | if (skipStack.length && (match = skipStack[skipStack.length - 1] || (line === 'endif::[]' && line))) { 54 | if (line === match) return undef(skipStack.pop()) || accum 55 | if (match === 'endif::[]' && line.startsWith('if') && /^ifn?def::.+\[\]$/.test(line)) skipStack.push(match) 56 | return accum 57 | } else if (!line && !inContainer.verbatim) { 58 | if (inTable || (inHeader = inPara = false)) return accum 59 | inList ? (inPara = undefined) : (line = indent = inContainer.childIndent || '') && (line = line.trimEnd()) 60 | blockAttrs = blockTitle = verbatim = verbatim?.close() 61 | return accum[accum.length - 1] && (accum[accum.length] = line) ? accum : accum 62 | } else if (((grab = (chr0 = line[0]) === '\\') || (chr0 === 'i' && line[1] !== 'm')) && 63 | ((grab && line === '\\endif::[]') || (line[line.length - 1] === ']' && ~line.indexOf('::') && 64 | (match = PreprocessorDirectiveRx.exec(line)))) && !(line = grab ? line.substring(1) : undefined)) { 65 | if (match[1]) { 66 | const [,, negated, name, text, drop = attrs.has(name) ? !!negated : !negated] = match 67 | if (text ? (drop ? false : (line = text)) : !skipStack.push(drop && 'endif::[]')) continue // redo 68 | } 69 | } else if (inContainer.verbatim) { 70 | if (line === inContainer.delimiter) { 71 | ;({ cap: line, indent, inList, listStack } = inContainer) 72 | if (inContainer.outdent && (grab = inContainer.outdent + indent.length) && ~(match = inContainer.at)) { 73 | for (let i = match, l = accum.length; ++i < l;) accum[i] = (indent + accum[i].substring(grab)).trimEnd() 74 | } 75 | inContainer = containerStack.length ? containerStack.pop() : false 76 | } else if ((match = line.length)) { 77 | inContainer.outdent &&= Math.min(match - line.trimStart().length, inContainer.outdent) 78 | if (match > 2 && (~line.indexOf('{') || line[match - 1] === '>')) subs = inContainer.subs 79 | } 80 | } else if (!inHeader && line in DELIMS) { 81 | if (inPara !== false) inPara = !(listStack = (inList = undef((indent = inContainer.childIndent || ''))) || []) 82 | const opening = inContainer === false || (line !== inContainer.delimiter && containerStack.push(inContainer)) 83 | ? (inContainer = { delimiter: line, indent, childIndent: indent, inList, listStack }) 84 | : undefined 85 | if ((grab = DELIMS[line]) === 'v') { 86 | inContainer.subs = (inContainer.verbatim = !!(attrs.coseq = 1)) && ['callouts'] 87 | if (blockAttrs) { 88 | style = ((style = blockAttrs.get(1) || (line === '----' ? 'source' : undefined)) === 'source') 89 | ? blockAttrs.get(2) || attrs.get('source-language') 90 | : style === 'listing' || style === 'literal' ? undefined : style 91 | if (blockAttrs.get('indent') === '0') Object.assign(inContainer, { at: accum.length, outdent: Infinity }) 92 | if (blockAttrs.get('subs')?.includes('attributes')) inContainer.subs.push('attributes') 93 | blockTitle &&= writeBlockTitle(accum, blockTitle, blockAttrs, attrs, refs) 94 | } else if (line === '----') style = attrs.get('source-language') 95 | line = style ? (inContainer.cap = '```') + style : (inContainer.cap = '```') 96 | } else if (grab === 'p' && (inContainer.verbatim = true)) { 97 | line = blockAttrs && blockAttrs.get(1) === 'stem' ? (inContainer.cap = '```') + 'math' : undefined 98 | blockTitle = undefined 99 | } else if (opening === undefined) { 100 | ;({ cap: line, indent, inList, listStack } = inContainer) 101 | inTable = blockTitle = undefined 102 | inContainer = containerStack.length ? containerStack.pop() : false 103 | } else if (grab === 't') { 104 | inTable = { header: blockAttrs?.has('header-option') || (!blockAttrs?.has('noheader-option') && undefined) } 105 | if (blockAttrs !== (line = undefined) && (grab = blockAttrs.get('cols'))) { 106 | const cols = (!~grab.indexOf('*') && grab.split(/,|;/)) || grab.split(/,|;/) 107 | .reduce((a, c) => a.push.apply(a, ~c.indexOf('*') ? Array(parseInt(c, 10)).fill(c) : [c]) && a, []) 108 | ;(inTable.div = ~grab.indexOf('<') || ~grab.indexOf('^') || ~grab.indexOf('>') 109 | ? cols.reduce((buf, c) => buf + TDIV[/(?]|$/.exec(c)[0]], '') + '|' 110 | : TDIV[''].repeat(cols.length) + '|') && (inTable.cols = cols.length) 111 | } 112 | } else if (line === '____') { 113 | indent = inContainer.childIndent += '> ' 114 | if ((grab = blockAttrs && blockAttrs.get(2))) inContainer.cap = '>\n' + indent + '\u2014 ' + grab 115 | line = blockTitle &&= writeBlockTitle(accum, blockTitle, blockAttrs, attrs, refs) 116 | } else if (line === '====' && blockAttrs) { 117 | if ((style = blockAttrs.get(1)) in ADMONS) { 118 | line = '
' + (blockAttrs.has('id') ? '' : '') + 119 | ADMONS[style] + ' ' + style + (blockTitle ? ': ' + blockTitle.text : '') + '
\n' 120 | inContainer.cap = '
' 121 | blockTitle = undefined 122 | } else if (blockAttrs.has('collapsible-option')) { 123 | line = attrs.get('markdown-collapsible-variant') === 'spoiler' && (grab = 'spoiler') 124 | ? (inContainer.cap = '```') + (blockTitle ? grab + ' ' + applySubs.call(attrs, blockTitle.text) : grab) 125 | : (inContainer.cap = '') && (blockAttrs.has('open-option') ? '
' : '
') + 126 | '\n' + indent + '' + (blockTitle ? blockTitle.text : 'Details') + '\n' 127 | blockTitle = undefined 128 | } else line = blockTitle &&= writeBlockTitle(accum, blockTitle, blockAttrs, attrs, refs) 129 | } else line = blockTitle &&= writeBlockTitle(accum, blockTitle, blockAttrs, attrs, refs) 130 | if (opening !== (blockAttrs = undefined)) listStack = (inList = undefined) || [] 131 | } else { 132 | let _chr0, _line, indented 133 | if (!(indented = chr0 === ' ')) _line = line 134 | if (inPara === undefined && !(inPara = false) && (_chr0 = indented ? (_line = line.trimStart())[0] : chr0)) { 135 | isAnyListItem(_chr0, _line) && inList 136 | ? accum.pop() 137 | : !indented && (listStack = (inList = undef((indent = inContainer.childIndent || ''))) || []) 138 | } 139 | if (chr0 === '/' && line[1] === '/') { 140 | line = line === '////' ? undef((inPara = !(skipStack.push(line)))) : undefined 141 | } else if (!inPara && chr0 === '=' && (match = isHeading(line, inContainer === false, blockAttrs))) { 142 | let [marker, title, id, autoId] = match 143 | if (inHeader) { 144 | if (marker.length > 1 || (blockAttrs && blockAttrs.get(1) === 'discrete') || attrs.has('doctitle')) { 145 | if (!(inHeader = false) && accum.length) return accum 146 | continue // redo 147 | } 148 | attrs.set('doctitle', title) 149 | if ((id = blockAttrs?.get('id'))) autoId = title.toLowerCase().split(' ').join('-') 150 | } else { 151 | autoId = (grab = (title = attributes.call(attrs, title)).toLowerCase().split(' ')).join('-') 152 | id = blockAttrs?.get('id') || attrs.get('idprefix') + grab.join(attrs.get('idseparator')) 153 | } 154 | if (id) refs.set(title, refs.set(id, { autoId, reftext: blockAttrs?.get('reftext'), title }).get(id)) 155 | blockAttrs = blockTitle = undefined 156 | line = '#'.repeat(marker.length) + ' ' + title 157 | } else if (!inPara && chr0 === ':' && (match = AttributeEntryRx.exec(line))) { 158 | const [, del, name, val = ''] = (line = undefined) || match 159 | if (!(name in initialAttrs)) del ? attrs.delete(name) : attrs.set(name, val && attributes.call(attrs, val)) 160 | } else if (chr0 === '[' && line[line.length - 1] === ']') { 161 | if (verbatim && !(inPara = verbatim = verbatim.close())) continue // redo 162 | blockAttrs = parseAttrlist(line.substring(1, line.length - 1), blockAttrs) 163 | line = (inPara = false) || undefined 164 | } else if (inHeader) { 165 | if ((inHeader = attrs.has('doctitle'))) { 166 | if (!attrs.has('author') && AuthorInfoLineRx.test(line)) { 167 | const authors = line.split('; ').map((it) => it.split(' <')[0]) 168 | attrs.set('author', authors[0]).set('authors', authors.join(', ')) 169 | } else if (!('revdate' in attrs) && !('revnumber' in attrs) && (match = RevisionInfoLineRx.exec(line))) { 170 | const [, revnumber, revdate_, revdate = revdate_] = match 171 | ;(revnumber ? attrs.set('revnumber', revnumber) : true) && revdate && attrs.set('revdate', revdate) 172 | } else inHeader = false 173 | } 174 | if (!inHeader && !accum.length) continue // redo 175 | line = undefined 176 | } else if (inTable) { 177 | const row = inTable.row 178 | const cells = ~line.indexOf('|', 1) 179 | ? line.split(CellDelimiterRx) 180 | : chr0 === '|' ? ['', line.substring(line[1] === ' ' ? 2 : 1)] : [line] 181 | if (row) { 182 | if (cells[0]) { 183 | if (row.length && (row.wrapped = true)) { 184 | row[row.length - 1] = ((grab = row[row.length - 1]) && hardbreak(grab, ' +\n') + ' ') + cells[0] 185 | } else { 186 | line = (grab = accum[accum.length - 1].split('\n'))[0].substring(0, grab[0].length - 2).trimEnd() 187 | line = hardbreak(line, '
') + ' ' + applySubs.call(attrs, cells[0]) + ' |' 188 | accum[accum.length - 1] = grab.length > 1 ? (grab[0] = line) && grab.join('\n') : line 189 | } 190 | } 191 | if ((cells.length === 1 ? row.length : row.push.apply(row, cells.slice(1))) < inTable.cols) return accum 192 | line = '| ' + applySubs.call(attrs, row.splice(0, inTable.cols).join(' | ')) + ' |' 193 | if (row.wrapped && !(row.wrapped = false) && ~line.indexOf(' +\n')) line = line.replace(/ \+\n/g, '
') 194 | if (row.length && accum.push(indent + line) && (line = '|' + row.splice(0).join(' |'))) continue // redo 195 | } else { 196 | const cols = (inTable.row = []) && (inTable.cols ??= cells.length - 1) 197 | if (!(inTable.header ??= cols === cells.length - 1 && lines[idx + 1] === '' && lines[idx - 1] !== '')) { 198 | blockTitle &&= writeBlockTitle(accum, blockTitle, blockAttrs, attrs, refs) 199 | accum.push(indent + '| '.repeat(cols) + '|', indent + (inTable.div || TDIV[''].repeat(cols) + '|')) 200 | continue // redo 201 | } 202 | subs = NORMAL_SUBS 203 | line = '| ' + cells.slice(1).join(' | ') + ' |\n' + indent + (inTable.div || TDIV[''].repeat(cols) + '|') 204 | } 205 | } else if (line === '+' && inList) { 206 | ;({ indent: line, childIndent: indent } = inList) 207 | verbatim = (inPara = false) || verbatim?.close() 208 | return (accum[accum.length] = line && line.trimEnd()) ? accum : accum // eslint-disable-line no-cond-assign 209 | } else if ((!inPara || inList) && (_chr0 ??= indented ? (_line = line.trimStart())[0] : chr0) && 210 | (match = isAnyListItem(_chr0, _line, 'exec'))) { 211 | let [, marker, text, desc, dlist] = match 212 | if (dlist !== undefined && (dlist = text)) { // marker and text are swapped for dlist item 213 | (blockAttrs && blockAttrs.get(1) === 'qanda') || (inList && inList.dlist === 'qanda') 214 | ? (text = '_' + marker + '_') && (marker = '.'.repeat(dlist.length - 1)) && (dlist = 'qanda') 215 | : (text = '*' + marker + '*') && (marker = dlist.length > 2 ? '-'.repeat(dlist.length - 1) : '-') 216 | if (!(next = desc)) hardbreakNext = 'pending' 217 | } 218 | const ordered = (marker[0] === '<' && !!(marker = '<.>')) || marker[0] === '.' || 219 | (marker[marker.length - 1] === '.' && !!(marker = '1.')) 220 | if (!inList || inList.marker !== marker) { 221 | if ((listStack.length && ~(match = listStack.findIndex((it) => it.marker === marker)))) { 222 | indent = (inList = (listStack = match ? listStack.slice(0, match + 1) : [listStack[0]]).pop()).indent 223 | } else { 224 | indent = (inList ? (listStack[listStack.length] = inList) : inContainer).childIndent || '' 225 | const lindent = (attrs.lindent ??= parseInt(attrs.get('markdown-list-indent'), 10)) || (ordered ? 3 : 2) 226 | inList = { marker, indent, childIndent: indent + ' '.repeat(lindent), numeral: ordered && 0, dlist } 227 | } 228 | } else indent = inList.indent 229 | verbatim = verbatim?.close() 230 | subs = (inPara = true) && NORMAL_SUBS 231 | line = (ordered ? ++inList.numeral + '. ' : '* ') + text 232 | } else if (inPara) { 233 | subs = NORMAL_SUBS 234 | if (verbatim) { 235 | if (indented ? undef((subs = verbatim.subs)) : !(inPara = verbatim = verbatim.close())) continue // redo 236 | line = line.substring(verbatim.outdent) 237 | } else if (hardbreakNext || inPara === 'hardbreaks') { 238 | accum[accum.length - 1] += attrs.get('markdown-line-break') 239 | } else if ((grab = accum[accum.length - 1])?.[grab.length - 1] === '+' && grab[grab.length - 2] === ' ') { 240 | accum[accum.length - 1] = hardbreak(grab, attrs.get('markdown-line-break'), true) 241 | } else if (attrs.has('markdown-unwrap-prose')) { 242 | ;(inPara !== '> ' || ((line = line.trimEnd()) !== '>' && accum[accum.length - 1] !== '>' && 243 | (line = line.substring(2)))) && (indent = accum.pop() + ' ') 244 | } 245 | } else if (chr0 === '.') { 246 | subs = NORMAL_SUBS 247 | if (line !== chr0 && !(line[1] === '.' && line[2] === '.') && (match = line.length - 1)) { 248 | const text = line[1] === '*' && line[match] === '*' ? line.substring(2, match) : line.substring(1) 249 | blockTitle = (line = undefined) || { indent, text, subs } 250 | } 251 | } else if (indented) { 252 | if (blockAttrs && blockAttrs.get('subs')?.includes('attributes')) subs = ['attributes'] 253 | const outdent = line.length - (line = _line).length 254 | if ((inPara = true) && _chr0 === '$' && _line[1] === ' ') { 255 | indent = (inList || inContainer).childIndent || '' 256 | verbatim = { cap: indent + '```', close: () => accum.push(verbatim.cap) && undefined, outdent, subs } 257 | line = '```console\n' + indent + line 258 | } else { 259 | indent = ((inList || inContainer).childIndent || '') + ' ' 260 | verbatim = { close: undef, outdent, subs } 261 | } 262 | } else if (~(match = line.indexOf(': ')) && match < 10 && (style = line.substring(0, match)) in ADMONS) { 263 | next = (inPara = true) && line.substring(match + 2) 264 | line = '**' + ADMONS[style] + ' ' + style + '**' 265 | } else if (chr0 === 'i' && line.startsWith('image::') && (match = BlockImageMacroRx.exec(line))) { 266 | line = image.apply(attrs, match, (subs = ['attributes'])) 267 | } else if (line in BREAKS) { 268 | line = BREAKS[line] 269 | } else { 270 | inPara = blockAttrs?.has('hardbreaks-option') ? 'hardbreaks' : chr0 === '>' && line[1] === ' ' ? '> ' : true 271 | if ((grab = blockAttrs?.get('id'))) blockTitle ? (blockTitle.id = grab) : (line = '[[' + grab + ']]' + line) 272 | subs = NORMAL_SUBS 273 | } 274 | } 275 | if (line) { 276 | blockTitle &&= writeBlockTitle(accum, blockTitle, blockAttrs, attrs, refs) 277 | if (subs && !(line = applySubs.call(attrs, line, subs)) && !accum.length) return accum 278 | accum[accum.length] = indent && line ? indent + line : line 279 | ;(next &&= applySubs.call(attrs, next)) && (accum[accum.length - 1] += attrs.get('markdown-line-break')) && 280 | (accum[accum.length] = indent ? indent + next : next) 281 | } else if (line === undefined) { 282 | return accum 283 | } else accum[accum.length] = indent ? indent.trimEnd() : line 284 | hardbreakNext &&= hardbreakNext === 'pending' || undefined 285 | return accum 286 | } 287 | }, []) 288 | .join('\n') 289 | .trimEnd() 290 | .replace(RewriteInternalXrefRx, (_, text, id) => { 291 | const { title = id, reftext = title, autoId = id } = refs.get(id) || nrefs.get(id) || {} 292 | return '[' + (text || reftext) + '](#' + autoId + ')' 293 | }) 294 | .concat(((grab = verbatim?.cap)) ? '\n' + grab : '') // prettier-ignore 295 | } 296 | 297 | function applySubs (str, subs = NORMAL_SUBS) { 298 | return /[{\x60\x27*_:<[#]/.test(str) ? subs.reduce((str, name) => SUBSTITUTORS[name].call(this, str), str) : str 299 | } 300 | 301 | function attributes (str) { 302 | return ~str.indexOf('{') ? str.replace(AttributeRefRx, (m, bs, n) => (bs ? m.substring(1) : this.get(n) ?? m)) : str 303 | } 304 | 305 | function callouts (str, apply = str[str.length - 1] === '>') { 306 | return apply ? str.replace(ConumRx, (_, sp, chr) => sp + CONUMS[chr === '.' ? this.coseq++ : chr]) : str 307 | } 308 | 309 | function hardbreak (str, mark, force, len = str.length) { 310 | return force || (str[len - 1] === '+' && str[len - 2] === ' ') ? str.substring(0, str.length - 2) + mark : str 311 | } 312 | 313 | function image (_, target, attrlist, _idx, _str, alt = attrlist.split(',')[0] || /(.*\/)?(.*?)($|\.)/.exec(target)[2]) { 314 | return '![' + alt + '](' + (this.get('imagesdir') ? this.get('imagesdir') + '/' : '') + target + ')' 315 | } 316 | 317 | function isAnyListItem (chr0, str, mode = 'test', match = chr0 in LIST_MARKERS && ListItemRx[mode](str)) { 318 | return match || (str.endsWith('::') || ~str.indexOf(':: ') ? DlistItemRx[mode](str) : undefined) 319 | } 320 | 321 | function isHeading (str, acceptAll, blockAttrs, marker, title, spaceIdx = str.indexOf(' ')) { 322 | if (!(~spaceIdx && str.startsWith((marker = ['=', '==', '===', '====', '=====', '======'][spaceIdx - 1])))) return 323 | if (!(title = str.substring(spaceIdx + 1)) || (title[0] === ' ' && !(title = title.trimStart()))) return 324 | if (acceptAll || (blockAttrs && blockAttrs.get(1) === 'discrete')) return [marker, title] 325 | } 326 | 327 | function macros (str) { 328 | if (!~str.indexOf(':')) return ~str.indexOf('[[') ? str.replace(InlineAnchorRx, '') : str 329 | if (~str.indexOf('m:[')) str = str.replace(InlineStemMacroRx, (_, expr) => '$' + expr.replace(/\\]/g, ']') + '$') 330 | if (~str.indexOf('image:')) str = str.replace(InlineImageMacroRx, image.bind(this)) 331 | if (~str.indexOf(':/') || ~str.indexOf('link:')) { 332 | str = str.replace(LinkMacroRx, (_, esc, scheme = '', url, boxed = '', text, bareScheme = scheme, bareUrl) => { 333 | if (esc) return bareScheme ? '' + bareScheme + '' + (bareUrl ?? url + boxed) : 'link:' + url + boxed 334 | if (!bareUrl) return '[' + (text ||= this.has('hide-uri-scheme') ? url : scheme + url) + '](' + scheme + url + ')' 335 | return this.has('hide-uri-scheme') ? '[' + bareUrl + '](' + bareScheme + bareUrl + ')' : bareScheme + bareUrl 336 | }) 337 | } 338 | if (~str.indexOf('[[')) str = str.replace(InlineAnchorRx, '') 339 | if (!~str.indexOf('xref:')) return str 340 | return str.replace(XrefMacroRx, (m, esc, p, ext, id_, id = id_, txt) => 341 | esc ? m.substring(1) : '[' + (p && (ext === '#' || (p += ext)) ? txt || p : (p = '#!' + id) && txt) + '](' + p + ')' 342 | ) 343 | } 344 | 345 | function parseAttrlist (attrlist, attrs = new Map()) { 346 | if (!attrlist) return attrs 347 | attrs.set(0, attrlist) 348 | let chr0, idx, m, shorthand, style 349 | if ((chr0 = attrlist[0]) === '[' && (m = BlockAnchorRx.exec(attrlist))) { 350 | return m[2] ? attrs.set('id', m[1]).set('reftext', m[2]) : attrs.set('id', m[1]) 351 | } 352 | if (!(idx = 0) && (~attrlist.indexOf('=') || ~attrlist.indexOf('"'))) { 353 | while ((m = ElementAttributeRx.exec(attrlist))) { 354 | attrs.set(m[1] ?? ++idx, m[4] ?? m[3]) 355 | if (!m.index) attrlist = (ElementAttributeRx.lastIndex = 1) && ',' + attrlist 356 | } 357 | } else if (chr0 === ',' || ~attrlist.indexOf(',')) { 358 | for (const it of attrlist.split(',')) attrs.set(++idx, it.trimStart()) 359 | } else attrs.set(1, attrlist) 360 | if (!(shorthand = attrs.get(1)) || (m = shorthand.split(StyleShorthandMarkersRx)).length < 2) return attrs 361 | for (let i = 0, len = m.length, val; i < len; i += 2) { 362 | if ((val = m[i]) && ((chr0 = m[i - 1]) || !(style = val))) { 363 | chr0 === '#' ? attrs.set('id', val) : chr0 === '.' ? attrs.set('role', val) : attrs.set(val + '-option', '') 364 | } 365 | } 366 | return attrs.set(1, style) 367 | } 368 | 369 | function quotes (str, idx) { 370 | const hasLt = ~(~str.indexOf('<<') ? (str = str.replace(XrefShorthandRx, 'xref:$1[$2]')) : str).indexOf('<') 371 | if (hasLt) str = str.replace(/ { 377 | if (hasLt && text.length > 3 && ~text.indexOf('<')) text = text.replace(/</g, '<') 378 | if (pass) return '`' + (~text.indexOf('{') ? text.replace(/\{(?=[a-z])/g, '\\{') : text) + '`' 379 | return '`' + (~text.indexOf('\\') ? text.replace(/\\(?=https?:|\.\.\.)/g, '') : text) + '`' 380 | }) 381 | } 382 | } 383 | if (~str.indexOf(']_')) str = str.replace(EmphasisSpanMetaRx, '') 384 | if (~(idx = str.indexOf('#')) && ~str.indexOf('#', idx + 1)) { 385 | str = str.replace(MarkedSpanRx, (_, roles, s, text) => { 386 | s &&= this.s ??= (s = this.get('markdown-strikethrough').split(' ')).length > 1 ? s.slice(0, 2) : [s[0], s[0]] 387 | return roles ? (s ? s[0] + text + s[1] : text) : '' + text + '' 388 | }) 389 | } 390 | if (!~str.indexOf("'")) return str 391 | return str.replace(/.'/g, (m, i, s, l = m[0], r = s[i + 2] || '') => 392 | l === '`' ? (r === '`' ? m : '\u2019') : /\p{L}/u.test(r) && /[\p{L}\d]/u.test(l) ? l + '\u2019' : m 393 | ) 394 | } 395 | 396 | function writeBlockTitle (buffer, blockTitle, blockAttrs, attrs, refs) { 397 | const { id = blockAttrs?.get('id'), indent, text, subs, title = applySubs.call(attrs, text, subs) } = blockTitle 398 | const anchor = id && refs.set(id, { title, reftext: blockAttrs.get('reftext') }) ? '' : '' 399 | buffer.push(indent + anchor + '**' + title + '**', '') 400 | } 401 | -------------------------------------------------------------------------------- /lib/util/read-stream.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | module.exports = function readStream (stream, encoding) { 4 | return new Promise((resolve, reject, buffer = '', chunk) => { 5 | return stream 6 | .setEncoding(encoding) 7 | .on('readable', () => { 8 | while ((chunk = stream.read()) !== null) buffer += chunk 9 | }) 10 | .on('error', reject) 11 | .on('end', () => resolve(buffer.trimEnd())) 12 | }) 13 | } 14 | -------------------------------------------------------------------------------- /npm/format.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const { promises: fsp } = require('fs') 4 | const ospath = require('path') 5 | const format = require('prettier-eslint') 6 | 7 | async function formatAll (dirs, ignores, cwd = process.cwd()) { 8 | const result = [] 9 | for (const dir of dirs) { 10 | const subdirs = [] 11 | const absdir = ospath.join(cwd, dir) 12 | for await (const dirent of await fsp.opendir(absdir)) { 13 | const name = dirent.name 14 | if (dirent.isDirectory()) { 15 | if (name !== 'node_modules') subdirs.push(name) 16 | } else if (name.endsWith('.js')) { 17 | const filePath = ospath.join(absdir, name) 18 | if (!~ignores.indexOf(filePath)) { 19 | result.push( 20 | await fsp.readFile(filePath, 'utf8').then(async (text) => { 21 | const formatted = await format({ text, filePath }) 22 | return formatted === text ? false : fsp.writeFile(filePath, formatted).then(() => true) 23 | }) 24 | ) 25 | } 26 | } 27 | } 28 | if (subdirs.length) result.push.apply(result, await formatAll(subdirs, ignores, absdir)) 29 | } 30 | return result 31 | } 32 | 33 | ;(async (dirlist) => { 34 | const cwd = process.cwd() 35 | //const ignores = await fsp.readFile('.eslintignore', 'utf8').then((contents) => 36 | // contents 37 | // .trimRight() 38 | // .split('\n') 39 | // .map((it) => ospath.join(cwd, it)) 40 | //) 41 | const ignores = [] 42 | await formatAll(dirlist.split(','), ignores, cwd).then((result) => { 43 | if (process.env.npm_config_loglevel === 'silent') return 44 | const total = result.length 45 | const changed = result.filter((it) => it).length 46 | const unchanged = total - changed 47 | const changedStatus = `changed ${changed} file${changed === 1 ? '' : 's'}` 48 | const unchangedStatus = `left ${unchanged} file${unchanged === 1 ? '' : 's'} unchanged` 49 | console.log(`prettier-eslint ${changedStatus} and ${unchangedStatus}`) 50 | }) 51 | })(process.argv[2] || '') 52 | -------------------------------------------------------------------------------- /npm/release.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # required packages (for ubuntu:kinetic): curl git jq nodejs npm 4 | 5 | if [ -z "$RELEASE_NPM_TOKEN" ]; then 6 | echo No npm token specified for publishing to npmjs.com. Stopping release. 7 | exit 1 8 | fi 9 | export RELEASE_BRANCH=${GITHUB_REF_NAME:-main} 10 | if [ ! -v RELEASE_USER ]; then 11 | export RELEASE_USER=$GITHUB_ACTOR 12 | fi 13 | RELEASE_GIT_NAME=$(curl -s https://api.github.com/users/$RELEASE_USER | jq -r .name) 14 | RELEASE_GIT_EMAIL=$RELEASE_USER@users.noreply.github.com 15 | 16 | # RELEASE_VERSION can be a version number (exact) or increment keyword (next in sequence) 17 | if [ -z "$RELEASE_VERSION" ]; then RELEASE_VERSION=prerelease; fi 18 | if [ -z "$RELEASE_NPM_TAG" ]; then 19 | if case $RELEASE_VERSION in major|minor|patch) ;; *) false;; esac; then 20 | RELEASE_NPM_TAG=latest 21 | elif case $RELEASE_VERSION in pre*) ;; *) false;; esac; then 22 | RELEASE_NPM_TAG=testing 23 | elif [ "$RELEASE_VERSION" != "${RELEASE_VERSION/-/}" ] && [ "${RELEASE_VERSION#*-}" != "stable" ]; then 24 | RELEASE_NPM_TAG=testing 25 | else 26 | RELEASE_NPM_TAG=latest 27 | fi 28 | fi 29 | 30 | # configure git to push changes 31 | git config --local user.name "$RELEASE_GIT_NAME" 32 | git config --local user.email "$RELEASE_GIT_EMAIL" 33 | 34 | # configure npm client for publishing 35 | echo -e "//registry.npmjs.org/:_authToken=$RELEASE_NPM_TOKEN" > $HOME/.npmrc 36 | 37 | # release! 38 | ( 39 | set -e 40 | npm version --no-git-tag-version $RELEASE_VERSION 41 | RELEASE_VERSION=$(npm exec -c 'echo -n $npm_package_version') 42 | if case $RELEASE_VERSION in 1.0.0-*) ;; *) false;; esac; then 43 | RELEASE_NPM_TAG=latest 44 | fi 45 | git commit -a -m "release $RELEASE_VERSION [no ci]" 46 | git tag -m "version $RELEASE_VERSION" v$RELEASE_VERSION 47 | git push origin $(git describe --tags --exact-match) 48 | npm publish --access public --tag $RELEASE_NPM_TAG 49 | git push origin $RELEASE_BRANCH 50 | ) 51 | 52 | exit_code=$? 53 | 54 | # nuke npm settings 55 | rm -f $HOME/.npmrc 56 | 57 | # check for any uncommitted files 58 | git status -s -b 59 | 60 | exit $exit_code 61 | -------------------------------------------------------------------------------- /npm/version.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const fsp = require('node:fs/promises') 4 | const ospath = require('node:path') 5 | 6 | const PROJECT_ROOT_DIR = ospath.join(__dirname, '..') 7 | const CHANGELOG_FILE = ospath.join(PROJECT_ROOT_DIR, 'CHANGELOG.adoc') 8 | const VERSION = process.env.npm_package_version 9 | 10 | function getCurrentDate () { 11 | const now = new Date() 12 | return new Date(now.getTime() - now.getTimezoneOffset() * 60000) 13 | } 14 | 15 | function updateChangelog (releaseDate) { 16 | return fsp.readFile(CHANGELOG_FILE, 'utf8').then((changelog) => 17 | fsp.writeFile( 18 | CHANGELOG_FILE, 19 | changelog.replace(/^== (?:(Unreleased)|\d.*)$/m, (currentLine, replace) => { 20 | const newLine = `== ${VERSION} (${releaseDate})` 21 | return replace ? newLine : [newLine, '_No changes since previous release._', currentLine].join('\n\n') 22 | }) 23 | ) 24 | ) 25 | } 26 | 27 | ;(async () => { 28 | const releaseDate = getCurrentDate().toISOString().split('T')[0] 29 | await updateChangelog(releaseDate) 30 | })() 31 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "downdoc", 3 | "version": "1.0.2-stable", 4 | "description": "Rapidly down-converts AsciiDoc to Markdown. Often used to prepare an AsciiDoc README for npm packaging.", 5 | "license": "MIT", 6 | "author": "Dan Allen", 7 | "contributors": [ 8 | "Dan Allen " 9 | ], 10 | "repository": "https://github.com/opendevise/downdoc", 11 | "bugs": { 12 | "url": "https://github.com/opendevise/downdoc/issues" 13 | }, 14 | "engines": { 15 | "node": ">=16.17.0" 16 | }, 17 | "scripts": { 18 | "coverage": "nyc _mocha", 19 | "build": "npm test && npm run lint", 20 | "format": "node npm/format.js lib,test,npm", 21 | "lint": "eslint \"{lib,test,npm}/**/*.js\"", 22 | "postpublish": "downdoc --postpublish", 23 | "prepublishOnly": "downdoc --prepublish", 24 | "test": "_mocha", 25 | "version": "node npm/version.js" 26 | }, 27 | "bin": { 28 | "downdoc": "bin/downdoc" 29 | }, 30 | "main": "lib/index.js", 31 | "exports": { 32 | ".": "./lib/index.js", 33 | "./package.json": "./package.json" 34 | }, 35 | "imports": { 36 | "#cli": "./lib/cli.js" 37 | }, 38 | "files": [ 39 | "lib" 40 | ], 41 | "workspaces": [ 42 | "." 43 | ], 44 | "keywords": [ 45 | "asciidoc", 46 | "markdown", 47 | "converter" 48 | ], 49 | "devDependencies": { 50 | "chai": "~4.3", 51 | "chai-fs": "~2.0", 52 | "chai-string": "~1.5", 53 | "dirty-chai": "~2.0", 54 | "eslint": "~8.45", 55 | "eslint-config-standard": "~17.1", 56 | "mocha": "~10.2", 57 | "nyc": "~15.1", 58 | "prettier-eslint": "~15.0" 59 | }, 60 | "nyc": { 61 | "all": true, 62 | "branches": 100, 63 | "cacheDir": "node_modules/.cache/nyc", 64 | "checkCoverage": true, 65 | "include": [ 66 | "lib/**/*.js" 67 | ], 68 | "lines": 100, 69 | "reporter": [ 70 | "cobertura", 71 | "lcov", 72 | "text" 73 | ], 74 | "reportDir": "reports" 75 | } 76 | } 77 | -------------------------------------------------------------------------------- /test/cli-test.js: -------------------------------------------------------------------------------- 1 | /* eslint-env mocha */ 2 | 'use strict' 3 | 4 | const { cleanDir, expect, heredoc, StringIO } = require('./harness') 5 | const downdoc = require('#cli') 6 | const fsp = require('node:fs/promises') 7 | const ospath = require('node:path') 8 | const { Readable } = require('node:stream') 9 | const { version } = require('downdoc/package.json') 10 | 11 | const WORK_DIR = ospath.join(__dirname, 'work') 12 | 13 | describe('downdoc', () => { 14 | const lf = '\n' 15 | const oldcwd = process.cwd() 16 | let stdout, stderr, example 17 | 18 | before(async () => { 19 | await cleanDir(WORK_DIR, { create: true }) 20 | process.chdir(WORK_DIR) 21 | }) 22 | 23 | beforeEach(async () => { 24 | process.chdir(oldcwd) 25 | await cleanDir(WORK_DIR, { create: true }) 26 | process.chdir(WORK_DIR) 27 | stdout = new StringIO() 28 | stderr = new StringIO() 29 | example = { 30 | input: heredoc` 31 | = Document Title 32 | 33 | == Section Title 34 | 35 | Paragraph.${lf} 36 | `, 37 | expected: heredoc` 38 | # Document Title 39 | 40 | ## Section Title 41 | 42 | Paragraph.${lf} 43 | `, 44 | } 45 | }) 46 | 47 | after(async () => { 48 | process.chdir(oldcwd) 49 | await cleanDir(WORK_DIR) 50 | }) 51 | 52 | describe('info', () => { 53 | it('should only print version when -v option is specified', async () => { 54 | const args = ['-v'] 55 | const expected = version + '\n' 56 | await downdoc({ args, stdout }) 57 | expect(stdout.string).to.equal(expected) 58 | }) 59 | 60 | it('should only print version when --version option is specified', async () => { 61 | const args = ['--version'] 62 | const expected = version + '\n' 63 | await downdoc({ args, stdout }) 64 | expect(stdout.string).to.equal(expected) 65 | }) 66 | 67 | it('should only print usage when -h option is specified', async () => { 68 | const args = ['-h'] 69 | const expected = heredoc` 70 | downdoc ${version} 71 | Usage: downdoc [OPTION]... FILE 72 | Convert the specified AsciiDoc FILE to a Markdown file.${lf} 73 | ` 74 | await downdoc({ args, stdout }) 75 | expect(stdout.string).to.startWith(expected) 76 | expect(stdout.string).to.endWith('\n') 77 | }) 78 | 79 | it('should only print usage when --help option is specified', async () => { 80 | const args = ['-h'] 81 | const expectedStart = heredoc` 82 | downdoc ${version} 83 | Usage: downdoc [OPTION]... FILE 84 | Convert the specified AsciiDoc FILE to a Markdown file.${lf} 85 | ` 86 | const expectedIn = '\n -a, --attribute name=val set an AsciiDoc attribute; can be specified multiple times\n' 87 | const expectedEnd = 'If --output is not specified, the output file path is derived from FILE (e.g., README.md).\n' 88 | await downdoc({ args, stdout }) 89 | expect(stdout.string).to.startWith(expectedStart) 90 | expect(stdout.string).to.include(expectedIn) 91 | expect(stdout.string).to.endWith(expectedEnd) 92 | expect(stdout.string).to.endWith('\n') 93 | }) 94 | 95 | it('should only print usage to stderr and set exit code when no options or arguments are specified', async () => { 96 | const args = [] 97 | const expected = heredoc` 98 | Usage: downdoc [OPTION]... FILE 99 | Run 'downdoc --help' for more information.${lf} 100 | ` 101 | const p = { args, stdout, stderr } 102 | await downdoc(p) 103 | expect(stdout.string).to.be.empty() 104 | expect(stderr.string).to.equal(expected) 105 | expect(p.exitCode).to.equal(1) 106 | }) 107 | 108 | it('should only print usage to stderr and set exit code when neither args or argv are set on process', async () => { 109 | const expected = heredoc` 110 | Usage: downdoc [OPTION]... FILE 111 | Run 'downdoc --help' for more information.${lf} 112 | ` 113 | const p = { stderr } 114 | await downdoc(p) 115 | expect(stderr.string).to.equal(expected) 116 | expect(p.exitCode).to.equal(1) 117 | }) 118 | }) 119 | 120 | describe('output option', () => { 121 | it('should convert FILE and write output to file in cwd when --output option not specified', async () => { 122 | const { input, expected } = example 123 | await fsp.writeFile('doc.adoc', input, 'utf8') 124 | const args = ['doc.adoc'] 125 | await downdoc({ args, stdout }) 126 | expect(stdout.string).to.be.empty() 127 | expect('doc.md').to.be.a.file().with.contents(expected) 128 | }) 129 | 130 | it('should convert FILE and write output to file in subdir when --output option not specified', async () => { 131 | const { input, expected } = example 132 | await fsp.mkdir('docs') 133 | await fsp.writeFile('docs/doc.adoc', input, 'utf8') 134 | const args = ['docs/doc.adoc'] 135 | await downdoc({ args }) 136 | expect('docs/doc.md').to.be.a.file().with.contents(expected) 137 | }) 138 | 139 | it('should convert FILE and write output to stdout when -o option is -', async () => { 140 | const { input, expected } = example 141 | await fsp.writeFile('doc.adoc', input, 'utf8') 142 | const args = ['-o', '-', 'doc.adoc'] 143 | await downdoc({ args, stdout }) 144 | expect(stdout.string).to.equal(expected) 145 | expect('doc.md').to.not.be.a.path() 146 | }) 147 | 148 | it('should convert FILE and write output to stdout when --output option is -', async () => { 149 | const { input, expected } = example 150 | await fsp.writeFile('doc.adoc', input, 'utf8') 151 | const args = ['--output', '-', 'doc.adoc'] 152 | await downdoc({ args, stdout }) 153 | expect(stdout.string).to.equal(expected) 154 | expect('doc.md').to.not.be.a.path() 155 | }) 156 | 157 | it('should convert FILE and write output to adjacent file specified by -o option', async () => { 158 | const { input, expected } = example 159 | await fsp.writeFile('doc.adoc', input, 'utf8') 160 | const args = ['-o', 'out.md', 'doc.adoc'] 161 | await downdoc({ args }) 162 | expect('out.md').to.be.a.file().with.contents(expected) 163 | }) 164 | 165 | it('should convert FILE and write output to adjacent file specified by --output option', async () => { 166 | const { input, expected } = example 167 | await fsp.writeFile('doc.adoc', input, 'utf8') 168 | const args = ['--output', 'out.md', 'doc.adoc'] 169 | await downdoc({ args }) 170 | expect('out.md').to.be.a.file().with.contents(expected) 171 | }) 172 | 173 | it('should convert FILE and write output to file in different folder specified by -o option', async () => { 174 | const { input, expected } = example 175 | await fsp.writeFile('doc.adoc', input, 'utf8') 176 | await fsp.mkdir('build') 177 | const args = ['-o', 'build/doc.md', 'doc.adoc'] 178 | await downdoc({ args }) 179 | expect('build/doc.md').to.be.a.file().with.contents(expected) 180 | }) 181 | }) 182 | 183 | describe('input', () => { 184 | it('should allow input to be specified from stdin by passing - as input path', async () => { 185 | const args = ['-'] 186 | const expected = '**foo** and _bar_\n' 187 | const stdin = Readable.from('*foo* and _bar_') 188 | const p = { args, stdout, stdin } 189 | await downdoc(p) 190 | expect(stdout.string).to.equal(expected) 191 | }) 192 | 193 | it('should print message to stderr and set exit code when FILE is missing', async () => { 194 | const args = ['no-such-file.adoc'] 195 | const p = { args, stderr } 196 | await downdoc(p) 197 | expect(stderr.string).to.equal('downdoc: no-such-file.adoc: No such file\n') 198 | expect(p.exitCode).to.equal(1) 199 | }) 200 | 201 | it('should print message to stderr and set exit code when FILE is directory', async () => { 202 | await fsp.mkdir('docs') 203 | const args = ['docs'] 204 | const p = { args, stderr } 205 | await downdoc(p) 206 | expect(stderr.string).to.equal('downdoc: docs: Not a file\n') 207 | expect(p.exitCode).to.equal(1) 208 | }) 209 | }) 210 | 211 | describe('attribute option', () => { 212 | it('should pass attribute specified by -a option', async () => { 213 | const input = 'Go to {url-order} to purchase your copy.\n' 214 | const expected = 'Go to https://example.org/order to purchase your copy.\n' 215 | await fsp.writeFile('doc.adoc', input, 'utf8') 216 | const args = ['-a', 'url-order=https://example.org/order', 'doc.adoc'] 217 | await downdoc({ args }) 218 | expect('doc.md').to.be.a.file().with.contents(expected) 219 | }) 220 | 221 | it('should pass attribute specified by --attribute option', async () => { 222 | const input = 'Go to {url-order} to purchase your copy.\n' 223 | const expected = 'Go to https://example.org/order to purchase your copy.\n' 224 | await fsp.writeFile('doc.adoc', input, 'utf8') 225 | const args = ['-a', 'url-order=https://example.org/order', 'doc.adoc'] 226 | await downdoc({ args }) 227 | expect('doc.md').to.be.a.file().with.contents(expected) 228 | }) 229 | 230 | it('should allow -a option to be specified multiple times', async () => { 231 | const input = 'Visit {url-site} to learn about {company}.\n' 232 | const expected = 'Visit https://example.org to learn about ACME.\n' 233 | await fsp.writeFile('doc.adoc', input, 'utf8') 234 | const args = ['-a', 'url-site=https://example.org', '-a', 'company=ACME', 'doc.adoc'] 235 | await downdoc({ args }) 236 | expect('doc.md').to.be.a.file().with.contents(expected) 237 | }) 238 | }) 239 | 240 | describe('npm publish', () => { 241 | it('should convert FILE and hide it when --prepublish option is specified', async () => { 242 | const { input, expected } = example 243 | await fsp.writeFile('doc.adoc', input, 'utf8') 244 | const args = ['--prepublish', 'doc.adoc'] 245 | await downdoc({ args }) 246 | expect('doc.md').to.be.a.file().with.contents(expected) 247 | expect('doc.adoc').to.not.be.a.path() 248 | expect('.doc.adoc').to.be.a.file().with.contents(input) 249 | }) 250 | 251 | it('should convert FILE in subdir and hide it when --prepublish option is specified', async () => { 252 | const { input, expected } = example 253 | await fsp.mkdir('docs') 254 | await fsp.writeFile('docs/doc.adoc', input, 'utf8') 255 | const args = ['--prepublish', 'docs/doc.adoc'] 256 | await downdoc({ args }) 257 | expect('docs/doc.md').to.be.a.file().with.contents(expected) 258 | expect('docs/doc.adoc').to.not.be.a.path() 259 | expect('docs/.doc.adoc').to.be.a.file().with.contents(input) 260 | }) 261 | 262 | it('should assume FILE is README.adoc when --prepublish option is specified', async () => { 263 | const { input, expected } = example 264 | await fsp.writeFile('README.adoc', input, 'utf8') 265 | const args = ['--prepublish'] 266 | await downdoc({ args }) 267 | expect('README.md').to.be.a.file().with.contents(expected) 268 | expect('README.adoc').to.not.be.a.path() 269 | expect('.README.adoc').to.be.a.file().with.contents(input) 270 | }) 271 | 272 | it('should set env and env-npm attributes when --prepublish option is specified', async () => { 273 | let { input, expected } = example 274 | input += 'ifdef::env-npm[{env}]\n' 275 | expected += 'npm\n' 276 | await fsp.writeFile('README.adoc', input, 'utf8') 277 | const args = ['--prepublish'] 278 | await downdoc({ args }) 279 | expect('README.md').to.be.a.file().with.contents(expected) 280 | }) 281 | 282 | it('should set env and env-npm attributes when --prepublish option and -a options are specified', async () => { 283 | let { input, expected } = example 284 | input += 'ifdef::env-npm[{env} {scope}]\n' 285 | expected += 'npm @org\n' 286 | await fsp.writeFile('README.adoc', input, 'utf8') 287 | const args = ['--prepublish', '-a', 'scope=@org'] 288 | await downdoc({ args }) 289 | expect('README.md').to.be.a.file().with.contents(expected) 290 | }) 291 | 292 | it('should restore FILE when --postpublish option is specified', async () => { 293 | const { input } = example 294 | await fsp.writeFile('doc.adoc', input, 'utf8') 295 | await downdoc({ args: ['--prepublish', 'doc.adoc'] }) 296 | await downdoc({ args: ['--postpublish', 'doc.adoc'] }) 297 | expect('doc.md').to.not.be.a.path() 298 | expect('.doc.adoc').to.not.be.a.path() 299 | expect('doc.adoc').to.be.a.file().with.contents(input) 300 | }) 301 | 302 | it('should assume FILE is README.adoc when --postpublish option is specified', async () => { 303 | const { input } = example 304 | await fsp.writeFile('README.adoc', input, 'utf8') 305 | await downdoc({ args: ['--prepublish'] }) 306 | await downdoc({ args: ['--postpublish'] }) 307 | expect('README.md').to.not.be.a.path() 308 | expect('.README.adoc').to.not.be.a.path() 309 | expect('README.adoc').to.be.a.file().with.contents(input) 310 | }) 311 | 312 | it('should take no action if there is no file to restore', async () => { 313 | const { input } = example 314 | await fsp.writeFile('README.adoc', input, 'utf8') 315 | await downdoc({ args: ['--postpublish'] }) 316 | expect('README.md').to.not.be.a.path() 317 | expect('.README.adoc').to.not.be.a.path() 318 | expect('README.adoc').to.be.a.file().with.contents(input) 319 | }) 320 | }) 321 | 322 | describe('integration', () => { 323 | it('should use global process variable by default', async () => { 324 | const oldprocess = global.process 325 | try { 326 | global.process = { argv: ['node', 'downdoc', '-o', '-', 'doc.adoc'], stdout } 327 | const { input, expected } = example 328 | await fsp.writeFile('doc.adoc', input, 'utf8') 329 | await downdoc() 330 | expect(stdout.string).to.equal(expected) 331 | expect('doc.md').to.not.be.a.path() 332 | } finally { 333 | global.process = oldprocess 334 | } 335 | }) 336 | 337 | it('should accept options and arguments in any order', async () => { 338 | const input = 'Visit {url-site} to learn about {company}.\n' 339 | const expected = 'Visit https://example.org to learn about ACME.\n' 340 | await fsp.writeFile('doc.adoc', input, 'utf8') 341 | const args = ['-a', 'url-site=https://example.org', 'doc.adoc', '-a', 'company=ACME', '-o', 'out.md'] 342 | await downdoc({ args }) 343 | expect('out.md').to.be.a.file().with.contents(expected) 344 | }) 345 | }) 346 | }) 347 | -------------------------------------------------------------------------------- /test/harness/config.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const config = { 4 | checkLeaks: true, 5 | mochaGlobalTeardown () { 6 | if (!this.failures) logCoverageReportPath() 7 | }, 8 | require: __filename, 9 | spec: resolveSpec(), 10 | timeout: 10 * 60 * 1000, 11 | } 12 | 13 | if (process.env.npm_config_watch) config.watch = true 14 | if (process.env.CI) { 15 | Object.assign(config, { 16 | forbidOnly: true, 17 | reporter: './test/harness/mocha-ci-reporter', 18 | 'reporter-option': ['output=reports/tests-xunit.xml'], 19 | }) 20 | } 21 | 22 | function logCoverageReportPath () { 23 | if (!process.env.NYC_PROCESS_ID) return 24 | const { CI_PROJECT_PATH, CI_JOB_ID } = process.env 25 | const coverageReportRelpath = 'reports/lcov-report/index.html' 26 | const coverageReportURL = CI_JOB_ID 27 | ? `https://gitlab.com/${CI_PROJECT_PATH}/-/jobs/${CI_JOB_ID}/artifacts/file/${coverageReportRelpath}` 28 | : require('node:url').pathToFileURL(coverageReportRelpath) 29 | console.log(`Coverage report: ${coverageReportURL}`) 30 | } 31 | 32 | function resolveSpec () { 33 | const spec = process.argv[2] 34 | if (spec && !spec.startsWith('-')) return spec 35 | return 'test/**/*-test.js' 36 | } 37 | 38 | module.exports = config 39 | -------------------------------------------------------------------------------- /test/harness/index.js: -------------------------------------------------------------------------------- 1 | /* eslint-env mocha */ 2 | 'use strict' 3 | 4 | process.env.NODE_ENV = 'test' 5 | 6 | const chai = require('chai') 7 | const fsp = require('node:fs/promises') 8 | 9 | chai.use(require('chai-fs')) 10 | chai.use(require('chai-string')) 11 | // dirty-chai must be loaded after all other plugins 12 | // see https://github.com/prodatakey/dirty-chai#plugin-assertions 13 | chai.use(require('dirty-chai')) 14 | 15 | const cleanDir = (dir, { create } = {}) => 16 | fsp.rm(dir, { recursive: true, force: true }).then(() => (create ? fsp.mkdir(dir, { recursive: true }) : undefined)) 17 | 18 | function heredoc (strings, ...values) { 19 | const first = strings[0] 20 | if (first[0] !== '\n') { 21 | return values.length ? values.reduce((accum, value, idx) => accum + value + strings[idx + 1], first) : first 22 | } 23 | let string = values.length 24 | ? (strings = strings.slice()).push(strings.pop().trimEnd()) && 25 | values.reduce((accum, _, idx) => accum + '\x1f' + strings[idx + 1], first.substring(1)) 26 | : first.substring(1).trimEnd() 27 | const lines = string.split('\n') 28 | const indentSize = lines.reduce( 29 | (accum, line) => 30 | accum && line ? (line[0] === ' ' ? Math.min(accum, line.length - line.trimStart().length) : 0) : accum, 31 | Infinity 32 | ) 33 | if (indentSize) { 34 | string = lines.map((line) => (line && line[0] === ' ' ? line.substring(indentSize) : line)).join('\n') 35 | if (!values.length) return string 36 | strings = string.split('\x1f') 37 | } else if (!values.length) { 38 | return string 39 | } 40 | return values.reduce((accum, value, idx) => accum + value + strings[idx + 1], strings[0]) 41 | } 42 | 43 | class StringIO { 44 | #buffer 45 | 46 | constructor () { 47 | this.#buffer = [] 48 | } 49 | 50 | reopen () { 51 | this.#buffer.length = 0 52 | } 53 | 54 | write (chunk) { 55 | this.#buffer.push(chunk) 56 | } 57 | 58 | get string () { 59 | return this.#buffer.join('') 60 | } 61 | } 62 | 63 | module.exports = { cleanDir, expect: chai.expect, heredoc, StringIO } 64 | -------------------------------------------------------------------------------- /test/harness/mocha-ci-reporter.js: -------------------------------------------------------------------------------- 1 | 'use strict' 2 | 3 | const { Base, Dot, XUnit } = require('mocha').reporters 4 | 5 | // A Mocha reporter that combines the dot and xunit reporters. 6 | class CI extends Base { 7 | constructor (runner, options) { 8 | super(runner, options) 9 | new Dot(runner, options) // eslint-disable-line no-new 10 | new XUnit(runner, options) // eslint-disable-line no-new 11 | } 12 | } 13 | 14 | module.exports = CI 15 | --------------------------------------------------------------------------------