├── tests ├── .gitignore ├── cbackend │ ├── nim.cfg │ └── tFinalizeHCparse.nim ├── nim.cfg ├── config.nims ├── tHaxdocCli.nim ├── tFromSelf.nim ├── tFromCompiler.nim ├── tHtmlGen.nim ├── tFromPackage.nim ├── tLineDiffDb.nim └── tFromSimpleCode.nim ├── src ├── .gitignore ├── s.nim ├── haxdoc │ ├── wrappers │ │ └── mandoc │ │ │ ├── .gitignore │ │ │ ├── tests │ │ │ ├── config.nims │ │ │ ├── test1.nim │ │ │ ├── tUsingC.c │ │ │ ├── build.sh │ │ │ └── tUsingNim.nim │ │ │ ├── nimmandoc.nimble │ │ │ ├── eqn.nim │ │ │ ├── tbl.nim │ │ │ ├── mdoc.nim │ │ │ ├── mandoc_common.nim │ │ │ ├── mansearch.nim │ │ │ ├── roff.nim │ │ │ ├── manconf.nim │ │ │ ├── manconf_mansearch.nim │ │ │ ├── main_manconf.nim │ │ │ ├── mandoc_aux.nim │ │ │ ├── make_wrap.nim │ │ │ ├── mandoc_parse.nim │ │ │ ├── roff_mdoc.nim │ │ │ ├── main.nim │ │ │ ├── mandoc.nim │ │ │ └── nimmandoc.nim │ ├── submodule.nim │ ├── generate │ │ ├── html_full.nim │ │ ├── docentry_hext.nim │ │ ├── sqlite_db.nim │ │ └── sourcetrail_db.nim │ ├── extract │ │ ├── from_doxygen_xml.nim │ │ └── from_manpages.nim │ ├── process │ │ ├── docentry_group.nim │ │ ├── docentry_query.nim │ │ └── docdb_diff.nim │ ├── parse │ │ └── docentry_link.nim │ ├── docentry_io.nim │ └── docentry_types.nim ├── file.nim ├── doc.json └── haxdoc.nim ├── nim.cfg ├── todo.org ├── .gitignore ├── .github └── workflows │ ├── test.yaml │ └── release.yaml ├── haxdoc.nimble ├── assets └── nim.rules └── readme.md /tests/.gitignore: -------------------------------------------------------------------------------- 1 | !cbackend -------------------------------------------------------------------------------- /tests/cbackend/nim.cfg: -------------------------------------------------------------------------------- 1 | --backend:c -------------------------------------------------------------------------------- /tests/nim.cfg: -------------------------------------------------------------------------------- 1 | --backend:cpp 2 | -------------------------------------------------------------------------------- /src/.gitignore: -------------------------------------------------------------------------------- 1 | cache 2 | cxxstd 3 | nim.cfg 4 | -------------------------------------------------------------------------------- /src/s.nim: -------------------------------------------------------------------------------- 1 | if isMainModule: 2 | discard() 3 | -------------------------------------------------------------------------------- /nim.cfg: -------------------------------------------------------------------------------- 1 | -d:ssl 2 | -d:nimcore 3 | --path:"$nim" 4 | -------------------------------------------------------------------------------- /tests/config.nims: -------------------------------------------------------------------------------- 1 | switch("path", "$projectDir/../src") -------------------------------------------------------------------------------- /src/haxdoc/wrappers/mandoc/.gitignore: -------------------------------------------------------------------------------- 1 | cache/ 2 | **/mandoc-1*/* -------------------------------------------------------------------------------- /src/haxdoc/wrappers/mandoc/tests/config.nims: -------------------------------------------------------------------------------- 1 | switch("path", "$projectDir/../src") -------------------------------------------------------------------------------- /todo.org: -------------------------------------------------------------------------------- 1 | * TODO CLI interface 2 | 3 | - [ ] Allow to ignore files via glob 4 | - [ ] Specify main file for project 5 | 6 | https://github.com/nim-lang/Nim/issues/8477 7 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | * 2 | !*.* 3 | !**/*.* 4 | !/**/ 5 | !.* 6 | !/* 7 | 8 | **/*.bin 9 | **/*.out 10 | 11 | **/cache 12 | bin 13 | **/*.srctrl* 14 | tests/**/* 15 | !tests/**/*.* 16 | docs 17 | nimdoc.cfg 18 | -------------------------------------------------------------------------------- /src/haxdoc/wrappers/mandoc/nimmandoc.nimble: -------------------------------------------------------------------------------- 1 | # Package 2 | 3 | version = "0.1.0" 4 | author = "haxscramper" 5 | description = "Nim wrapper for mandoc library" 6 | license = "Apache-2.0" 7 | srcDir = "src" 8 | 9 | 10 | # Dependencies 11 | 12 | requires "nim >= 1.4.0" 13 | -------------------------------------------------------------------------------- /src/haxdoc/wrappers/mandoc/eqn.nim: -------------------------------------------------------------------------------- 1 | 2 | {.push, warning[UnusedImport]: off.} 3 | 4 | 5 | import 6 | std / bitops, ./mandoc_parse_roff_eqn_main_tbl_mdoc, 7 | hmisc / wrappers / wraphelp 8 | 9 | 10 | 11 | export 12 | mandoc_parse_roff_eqn_main_tbl_mdoc, wraphelp 13 | 14 | 15 | 16 | 17 | import 18 | mandoc_common 19 | 20 | 21 | 22 | -------------------------------------------------------------------------------- /tests/tHaxdocCli.nim: -------------------------------------------------------------------------------- 1 | import 2 | ../src/haxdoc 3 | 4 | import 5 | hmisc/other/oswrap 6 | 7 | let file = getAppTempFile("intrail.nim") 8 | file.writeFile("echo 12") 9 | 10 | let dir = getTempDir() / "tHaxdocCliProject" 11 | 12 | mkWithDirStructure(dir): 13 | file "project.nimble": 14 | "srcDir = \"src\"" 15 | 16 | dir "src": 17 | file "project.nim": 18 | "echo 12" 19 | 20 | withDir dir: 21 | haxdocCli(@["nim", "trail", "project.nimble"]) 22 | -------------------------------------------------------------------------------- /src/haxdoc/wrappers/mandoc/tests/test1.nim: -------------------------------------------------------------------------------- 1 | # This is just an example to get you started. You may wish to put all of your 2 | # tests into a single file, or separate them into multiple `test1`, `test2` 3 | # etc. files (better names are recommended, just make sure the name starts with 4 | # the letter 't'). 5 | # 6 | # To run these tests, simply execute `nimble test`. 7 | 8 | import unittest 9 | 10 | import nimmandoc 11 | test "can add": 12 | check add(5, 5) == 10 13 | -------------------------------------------------------------------------------- /src/haxdoc/submodule.nim: -------------------------------------------------------------------------------- 1 | # This is just an example to get you started. Users of your library will 2 | # import this file by writing ``import haxdoc/submodule``. Feel free to rename or 3 | # remove this file altogether. You may create additional modules alongside 4 | # this file as required. 5 | 6 | type 7 | Submodule* = object 8 | name*: string 9 | 10 | proc initSubmodule*(): Submodule = 11 | ## Initialises a new ``Submodule`` object. 12 | Submodule(name: "Anonymous") 13 | -------------------------------------------------------------------------------- /.github/workflows/test.yaml: -------------------------------------------------------------------------------- 1 | name: test 2 | on: [push] 3 | jobs: 4 | compile: 5 | if: | 6 | !contains(github.event.head_commit.message, 'WIP') 7 | runs-on: ubuntu-latest 8 | steps: 9 | - uses: actions/checkout@v2 10 | - uses: alaviss/setup-nim@master 11 | with: 12 | path: '../nim' 13 | version: 'version-1-4' 14 | 15 | - name: Run tests 16 | run: | 17 | export PATH=$HOME/.nimble/bin:$PATH 18 | nimble install -y 19 | nimble test -y -d:release 20 | -------------------------------------------------------------------------------- /tests/tFromSelf.nim: -------------------------------------------------------------------------------- 1 | # Generate documentation database for haxdoc itself. 2 | 3 | import 4 | haxdoc/extract/[from_nim_code], 5 | haxdoc/generate/sourcetrail_db, 6 | hnimast/compiler_aux 7 | 8 | import 9 | hmisc/other/[oswrap, colorlogger], 10 | hmisc/hdebug_misc, 11 | hmisc/algo/hseq_distance 12 | 13 | startHax() 14 | startColorLogger() 15 | 16 | let dir = getNewTempDir("tFromSelf") 17 | 18 | let db = docDbFromPackage( 19 | findPackage("haxdoc", newVRAny()).get(), 20 | ignored = @[**"**/haxdoc.nim"]) 21 | 22 | db.writeSourcetrailDb(dir /. "tFromSelf") 23 | -------------------------------------------------------------------------------- /src/haxdoc/wrappers/mandoc/tbl.nim: -------------------------------------------------------------------------------- 1 | 2 | {.push, warning[UnusedImport]: off.} 3 | 4 | 5 | import 6 | std / bitops, ./main_manconf_mandoc_mandoc_parse_mansearch_mdoc_roff_tbl, 7 | ./mandoc_parse_roff_eqn_main_tbl_mdoc, hmisc / wrappers / wraphelp 8 | 9 | 10 | 11 | export 12 | mandoc_parse_roff_eqn_main_tbl_mdoc, wraphelp, 13 | main_manconf_mandoc_mandoc_parse_mansearch_mdoc_roff_tbl 14 | 15 | 16 | 17 | 18 | import 19 | mandoc_common 20 | 21 | 22 | 23 | 24 | proc toCInt*(en: TcTbl_cell): cint {.inline.} = 25 | ## Convert proxy enum to integer value 26 | cint(en.int) 27 | 28 | proc toCInt*(en: set[TcTbl_cell]): cint {.inline.} = 29 | ## Convert set of enums to bitmasked integer 30 | for val in en: 31 | result = bitor(result, val.cint) 32 | 33 | 34 | 35 | -------------------------------------------------------------------------------- /src/haxdoc/wrappers/mandoc/mdoc.nim: -------------------------------------------------------------------------------- 1 | 2 | {.push, warning[UnusedImport]: off.} 3 | 4 | 5 | import 6 | std / bitops, ./roff_mdoc, 7 | ./main_manconf_mandoc_mandoc_parse_mansearch_mdoc_roff_tbl, 8 | ./mandoc_parse_roff_eqn_main_tbl_mdoc, hmisc / wrappers / wraphelp 9 | 10 | 11 | 12 | export 13 | mandoc_parse_roff_eqn_main_tbl_mdoc, roff_mdoc, wraphelp, 14 | main_manconf_mandoc_mandoc_parse_mansearch_mdoc_roff_tbl 15 | 16 | 17 | 18 | 19 | import 20 | mandoc_common 21 | 22 | 23 | 24 | 25 | 26 | # Declaration created in: hc_wrapgen.nim(254, 28) 27 | # Wrapper for `mdoc_validate` 28 | # Declared in mdoc.h:158 29 | proc mdocValidate*(a0: ptr RoffMan): void {.importc: r"mdoc_validate", 30 | header: allHeaders.} 31 | ## @import{[[code:proc!proc(ptr[roff_man]): void]]} 32 | 33 | -------------------------------------------------------------------------------- /src/file.nim: -------------------------------------------------------------------------------- 1 | 2 | import std/strutils 3 | 4 | type 5 | Obj = object 6 | fld1: int 7 | case isRaw: bool 8 | of true: 9 | fld2: float 10 | 11 | of false: 12 | fld3: string 13 | 14 | Enum = enum 15 | enFirst 16 | enSecond 17 | 18 | 19 | DistinctAlias = distinct int 20 | Alias = int 21 | 22 | proc hello(): int = 23 | ## Documentation comment 1 24 | return 12 25 | 26 | proc nice(): int = 27 | ## Documentation comment 2 28 | return 200 29 | 30 | proc hello2(arg: int): int = 31 | return hello() + arg + nice() 32 | 33 | proc hello3(obj: Obj): int = 34 | return obj.fld1 35 | 36 | proc hello4(arg1, arg2: int, arg3: string): int = 37 | result = arg1 + hello3(Obj(fld1: arg2)) + hello2(arg3.len) 38 | if result > 10: 39 | echo "result > 10" 40 | 41 | else: 42 | result += hello4(arg1, arg2, arg3) 43 | 44 | -------------------------------------------------------------------------------- /src/haxdoc/wrappers/mandoc/mandoc_common.nim: -------------------------------------------------------------------------------- 1 | type 2 | MdocData* = object 3 | data: pointer 4 | 5 | # It is not possible to use libmandoc as a normal library - it is necessary 6 | # to include *almost all* headers at once, as well as some things from 7 | # sys/types.h like `FILE` etc. 8 | const allHeaders* = """ 9 | #include 10 | #include 11 | 12 | // clang-format off 13 | #include 14 | #include 15 | #include 16 | #include 17 | #include 18 | #include 19 | #include 20 | #include 21 | 22 | #include 23 | #include 24 | #include 25 | #include 26 | #include 27 | #include 28 | #include 29 | #include 30 | #include 31 | #include 32 | // clang-format on 33 | 34 | // Mandatory things for libmandoc 35 | """ 36 | -------------------------------------------------------------------------------- /src/haxdoc/generate/html_full.nim: -------------------------------------------------------------------------------- 1 | import ../docentry, ../process/[docentry_query, docentry_group] 2 | import 3 | hmisc/[helpers, base_errors, hdebug_misc], 4 | hmisc/hasts/html_ast2, 5 | haxorg/[semorg, ast] 6 | 7 | export html_ast2 8 | 9 | import std/[streams, strformat] 10 | 11 | 12 | 13 | func toLink*(t: DocType): string = 14 | case t.kind: 15 | of dtkProc: 16 | for idx, arg in t.arguments: 17 | if idx > 0: result &= "," 18 | result &= toLink(arg.identType) 19 | 20 | else: 21 | result = $t 22 | 23 | func toLink*(full: DocLink): string = 24 | for part in full.parts: 25 | case part.kind: 26 | of dekModule, dekPackage, dekNewtypeKinds: 27 | result &= part.name & "_" 28 | 29 | of dekProcKinds: 30 | result &= part.name & "(" & part.procType.toLink() & ")" 31 | 32 | else: 33 | raiseImplementKindError(part) 34 | 35 | proc link*(writer: var HtmlWriter, entry: DocEntry; text: string) = 36 | writer.link(entry.fullIdent.toLink, text, $entry.kind) 37 | 38 | proc writeHtml*(sem: SemOrg, writer: var HtmlWriter) = 39 | writer.text("tree") 40 | -------------------------------------------------------------------------------- /src/haxdoc/generate/docentry_hext.nim: -------------------------------------------------------------------------------- 1 | import 2 | hmisc/hasts/hext_template, 3 | ../docentry 4 | 5 | export hext_template 6 | 7 | 8 | 9 | type 10 | DocBoxKind* = enum 11 | dbkDb 12 | dbkEntry 13 | 14 | DocBox* = object 15 | case kind*: DocBoxKind 16 | of dbkDb: 17 | db*: DocDb 18 | 19 | of dbkEntry: 20 | entry*: DocEntry 21 | 22 | DValue* = HextValue[DocBox] 23 | 24 | 25 | func boxValue*(t: typedesc[DValue], val: DocEntry): DValue = 26 | boxValue(t, DocBox(kind: dbkEntry, entry: val), dbkEntry.int) 27 | 28 | func boxValue*(t: typedesc[DValue], val: DocDb): DValue = 29 | boxValue(t, DocBox(kind: dbkDb, db: val), dbkDb.int) 30 | 31 | proc getField*(t: typedesc[DValue], box: DocBox, name: string): DValue = 32 | case box.kind: 33 | of dbkEntry: 34 | case name: 35 | of "name": return boxValue(t, box.entry.name) 36 | 37 | else: 38 | assert false 39 | 40 | iterator boxedItems*(t: typedesc[DValue], val: DocBox): DValue = 41 | case val.kind: 42 | of dbkDb: 43 | for _, entry in val.db.entries: 44 | yield boxValue(t, entry) 45 | 46 | else: 47 | assert false, "asfd" 48 | -------------------------------------------------------------------------------- /tests/tFromCompiler.nim: -------------------------------------------------------------------------------- 1 | import 2 | hmisc/other/[oswrap, hshell, hlogger], 3 | hmisc/core/all 4 | 5 | 6 | import 7 | haxdoc/extract/from_nim_code, 8 | haxdoc/process/[docentry_group], 9 | haxdoc/[docentry, docentry_io], 10 | haxdoc/generate/sourcetrail_db 11 | 12 | 13 | import cxxstd/cxx_common 14 | import nimtrail/nimtrail_common 15 | import hnimast/[compiler_aux] 16 | 17 | let 18 | outDir = getTempDir() / "tFromCompiler" 19 | 20 | startHax() 21 | 22 | 23 | let 24 | compDir = getStdPath().dir() 25 | compFile = compDir /. "compiler/nim.nim" 26 | l = newTermLogger() 27 | 28 | mkDir outDir 29 | 30 | l.info "Using compiler source dir ", compDir 31 | l.info "Starting compilation from ", compFile 32 | 33 | let db = generateDocDb( 34 | compFile, 35 | fileLib = some("compiler"), 36 | defines = @["nimpretty", "haxdoc", "nimdoc"], 37 | logger = l 38 | ) 39 | 40 | echo "Db compilation done" 41 | 42 | let dot = db.inheritDotGraph() 43 | if hasCmd(shellCmd(dot)): 44 | dot.toPng(AbsFile "/tmp/compiler-inherit.png") 45 | echo "Inhertiance graph for compiler done" 46 | else: 47 | echo "no dot installed, skipping graph generation" 48 | 49 | db.writeSourcetrailDb(outDir /. "tFromCompiler") 50 | echo "Registered sourcetrail DB" 51 | -------------------------------------------------------------------------------- /src/haxdoc/wrappers/mandoc/mansearch.nim: -------------------------------------------------------------------------------- 1 | 2 | {.push, warning[UnusedImport]: off.} 3 | 4 | 5 | import 6 | std / bitops, cstd / stddef, ./manconf_mansearch, 7 | ./main_manconf_mandoc_mandoc_parse_mansearch_mdoc_roff_tbl, 8 | hmisc / wrappers / wraphelp 9 | 10 | 11 | 12 | export 13 | wraphelp, main_manconf_mandoc_mandoc_parse_mansearch_mdoc_roff_tbl, 14 | manconf_mansearch 15 | 16 | 17 | 18 | 19 | import 20 | mandoc_common 21 | 22 | 23 | 24 | 25 | 26 | # Declaration created in: hc_wrapgen.nim(254, 28) 27 | # Wrapper for `mansearch` 28 | # Declared in mansearch.h:111 29 | proc mansearch*(cfg: ptr Mansearch; paths: ptr Manpaths; argc: cint; 30 | argv: ptr cstring; res: ptr ptr Manpage; ressz: ptr SizeT): cint {. 31 | importc: r"mansearch", header: allHeaders.} 32 | ## @import{[[code:proc!proc(ptr[mansearch], ptr[manpaths], int, tkIncompleteArray, ptr[ptr[manpage]], ptr[tkTypedef]): int]]} 33 | 34 | 35 | 36 | # Declaration created in: hc_wrapgen.nim(254, 28) 37 | # Wrapper for `mansearch_free` 38 | # Declared in mansearch.h:117 39 | proc mansearchFree*(a0: ptr Manpage; a1: SizeT): void {. 40 | importc: r"mansearch_free", header: allHeaders.} 41 | ## @import{[[code:proc!proc(ptr[manpage], tkTypedef): void]]} 42 | 43 | -------------------------------------------------------------------------------- /.github/workflows/release.yaml: -------------------------------------------------------------------------------- 1 | name: release 2 | on: 3 | push: 4 | tags: 5 | - 'v*' 6 | 7 | jobs: 8 | build: 9 | if: | 10 | !contains(github.event.head_commit.message, 'SKIP') 11 | runs-on: ubuntu-latest 12 | steps: 13 | - uses: actions/checkout@v2 14 | - uses: alaviss/setup-nim@master 15 | with: 16 | path: '../nim' 17 | version: 'version-1-4' 18 | 19 | - name: Compile binary 20 | run: | 21 | export PATH=$HOME/.nimble/bin:$PATH 22 | nimble build -y || nimble build -y 23 | zip --junk-paths haxdoc.zip bin/haxdoc 24 | - name: Create Release 25 | id: create_release 26 | uses: actions/create-release@v1 27 | env: 28 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 29 | with: 30 | tag_name: ${{ github.ref }} 31 | release_name: Release ${{ github.ref }} 32 | draft: false 33 | prerelease: false 34 | 35 | - name: Upload Release Asset 36 | uses: actions/upload-release-asset@v1 37 | env: 38 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 39 | with: 40 | upload_url: ${{ steps.create_release.outputs.upload_url }} 41 | asset_path: ./haxdoc.zip 42 | asset_name: haxdoc.zip 43 | asset_content_type: application/zip 44 | -------------------------------------------------------------------------------- /src/haxdoc/wrappers/mandoc/roff.nim: -------------------------------------------------------------------------------- 1 | 2 | {.push, warning[UnusedImport]: off.} 3 | 4 | 5 | import 6 | std / bitops, ./main_manconf_mandoc_mandoc_parse_mansearch_mdoc_roff_tbl, 7 | ./mandoc_parse_roff_eqn_main_tbl_mdoc, hmisc / wrappers / wraphelp 8 | 9 | 10 | 11 | export 12 | mandoc_parse_roff_eqn_main_tbl_mdoc, wraphelp, 13 | main_manconf_mandoc_mandoc_parse_mansearch_mdoc_roff_tbl 14 | 15 | 16 | 17 | 18 | import 19 | mandoc_common 20 | 21 | 22 | 23 | 24 | proc toCInt*(en: MnNode): cint {.inline.} = 25 | ## Convert proxy enum to integer value 26 | cint(en.int) 27 | 28 | proc toCInt*(en: set[MnNode]): cint {.inline.} = 29 | ## Convert set of enums to bitmasked integer 30 | for val in en: 31 | result = bitor(result, val.cint) 32 | 33 | 34 | 35 | 36 | 37 | # Declaration created in: hc_wrapgen.nim(254, 28) 38 | # Wrapper for `arch_valid` 39 | # Declared in roff.h:551 40 | proc archValid*(a0: cstring; a1: MandocOsC): cint {.importc: r"arch_valid", 41 | header: allHeaders.} 42 | ## @import{[[code:proc!proc(ptr[const[char]], mandoc_os): int]]} 43 | 44 | 45 | 46 | # Declaration created in: hc_wrapgen.nim(254, 28) 47 | # Wrapper for `deroff` 48 | # Declared in roff.h:552 49 | proc deroff*(a0: cstringArray; a1: ptr RoffNode): void {.importc: r"deroff", 50 | header: allHeaders.} 51 | ## @import{[[code:proc!proc(ptr[ptr[char]], ptr[roff_node]): void]]} 52 | 53 | -------------------------------------------------------------------------------- /tests/tHtmlGen.nim: -------------------------------------------------------------------------------- 1 | import hmisc/other/[oswrap] 2 | import hmisc/hdebug_misc 3 | import 4 | haxdoc/[docentry, docentry_io], 5 | haxdoc/process/[docentry_query, docentry_group], 6 | haxdoc/generate/html_full 7 | 8 | startHax() 9 | 10 | let dir = getTempDir() / "tFromSimpleCode" 11 | let db = loadDbXml(dir, "compile-db") 12 | 13 | var w = newHtmlWriter(AbsFile "/tmp/page.html") 14 | 15 | w.start(hHtml) 16 | w.start(hHead) 17 | 18 | w.style({ 19 | "th": @{ 20 | "text-align": "left" 21 | } 22 | }) 23 | 24 | w.finish(hHead) 25 | 26 | w.start hBody 27 | w.wrap0 hCaption, w.text("Module list") 28 | 29 | for module in allItems(db, {dekModule}): 30 | w.wrap hH1: 31 | w.wrap0 hCell, w.link(module, module.name) 32 | w.wrap0 hCell, module.docText.docBrief.writeHtml(w) 33 | 34 | w.wrap hTable: 35 | for t in items(module, dekNewtypeKinds): 36 | w.wrap 2 | #include 3 | 4 | // clang-format off 5 | #include 6 | #include 7 | #include 8 | #include 9 | #include 10 | #include 11 | #include 12 | #include 13 | 14 | #include "mandoc_aux.h" 15 | #include "mandoc.h" 16 | #include "roff.h" 17 | #include "mdoc.h" 18 | #include "man.h" 19 | #include "mandoc_parse.h" 20 | #include "main.h" 21 | #include "manconf.h" 22 | #include "mansearch.h" 23 | // clang-format on 24 | 25 | 26 | void tree_mdoc(void* arg, const struct roff_meta* mdoc); 27 | void tree_man(void* arg, const struct roff_meta* mdoc); 28 | void print_mdoc(const struct roff_node* n, int indent); 29 | 30 | 31 | int main() { 32 | mchars_alloc(); 33 | struct mparse* mp = mparse_alloc( 34 | MPARSE_SO | MPARSE_UTF8 | MPARSE_LATIN1 | MPARSE_VALIDATE, 35 | MANDOC_OS_OTHER, 36 | "linux"); 37 | 38 | mparse_readfd(mp, STDIN_FILENO, "STDIN"); 39 | struct roff_meta* meta = mparse_result(mp); 40 | 41 | if (meta == NULL) { 42 | puts("Meta is nil"); 43 | } else { 44 | puts("tree_man >>>>>>>"); 45 | tree_man(NULL, meta); 46 | if (meta->first->child == NULL) { 47 | puts("Meta first is nil"); 48 | } 49 | 50 | 51 | /* puts("tree_mdoc >>>>>>>"); */ 52 | /* tree_mdoc(NULL, meta); */ 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /src/haxdoc/wrappers/mandoc/tests/build.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # -*- coding: utf-8 -*- 3 | # bash 4 | set -o nounset 5 | set -o errexit 6 | 7 | dir=../src/nimmandoc/mandoc-1.14.5 8 | 9 | files=$(find $dir -name "*.c" | 10 | grep -v "test-" | 11 | grep -Ev "(cgi)|(main)|(mandocd)|(demandoc)" | 12 | grep -Ev "(catman)|(soelim)" 13 | xargs) 14 | 15 | echo $files 16 | 17 | clang \ 18 | -Wno-macro-redefined \ 19 | -Wno-implicit-function-declaration \ 20 | -Wno-pointer-bool-conversion \ 21 | -lz -I$dir $files tUsingC.c # $dir/demandoc.c # tUsingC.c $files 22 | 23 | gunzip -c /usr/share/man/man3/write.3p.gz | ./a.out 24 | 25 | # cat << EOF | ./a.out 26 | # '\" et 27 | # .TH WRITE "3P" 2017 "IEEE/The Open Group" "POSIX Programmer's Manual" 28 | # .\" 29 | # .SH PROLOG 30 | # This manual page is part of the POSIX Programmer's Manual. 31 | # The Linux implementation of this interface may differ (consult 32 | # the corresponding Linux manual page for details of Linux behavior), 33 | # or the interface may not be implemented on Linux. 34 | # .\" 35 | # .SH NAME 36 | # pwrite, 37 | # write 38 | # \(em write on a file 39 | # .SH SYNOPSIS 40 | # .LP 41 | # .nf 42 | # #include 43 | # .P 44 | # ssize_t pwrite(int \fIfildes\fP, const void *\fIbuf\fP, size_t \fInbyte\fP, 45 | # off_t \fIoffset\fP); 46 | # ssize_t write(int \fIfildes\fP, const void *\fIbuf\fP, size_t \fInbyte\fP); 47 | # .fi 48 | # EOF 49 | -------------------------------------------------------------------------------- /src/haxdoc/wrappers/mandoc/tests/tUsingNim.nim: -------------------------------------------------------------------------------- 1 | import 2 | ../nimmandoc, 3 | hmisc/hdebug_misc, 4 | hmisc/other/oswrap 5 | 6 | 7 | var file: AbsFile 8 | 9 | if false: 10 | file = getAppTempFile("true") # "/usr/share/man/man1/true.1.gz" 11 | 12 | file.writeFile(""" 13 | .\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.47.3. 14 | .TH TRUE "1" "March 2020" "GNU coreutils 8.32" "User Commands" 15 | .SH NAME 16 | true \- do nothing, successfully 17 | .SH SYNOPSIS 18 | .B true 19 | [\fI\,ignored command line arguments\/\fR] 20 | .br 21 | .B true 22 | 23 | \fI\,OPTION\/\fR \fI\,OPTION\/\fR 24 | 25 | .SH DESCRIPTION 26 | .\" Add any additional description here 27 | .PP 28 | Exit with a status code indicating success. 29 | .TP 30 | \fB\-\-help\fR 31 | display this help and exit 32 | .TP 33 | \fB\-\-version\fR 34 | output version information and exit 35 | .PP 36 | """) 37 | 38 | else: 39 | file = findManpage("ls") 40 | # file = AbsFile("/usr/share/man/man1/ls.1.gz") 41 | # file = AbsFile("/tmp/Untitled-1.troff") 42 | 43 | echo file 44 | 45 | mcharsAlloc() 46 | var mp = mparseAlloc( 47 | toCInt({mpSO, mpUTF8, mpLatiN1, mpValidate}), 48 | mdosOther, 49 | "linux".cstring 50 | ) 51 | 52 | 53 | var fd = mp.mparseopen(file.string.cstring) 54 | mparsereadfd(mp, fd, file.string.cstring) 55 | var meta = mparseresult(mp) 56 | # treeman(nil, meta) 57 | 58 | starthax() 59 | # echo treerepr(meta.first) 60 | 61 | let nroff = meta.first.tonroffnode() 62 | echo nroff.treeRepr() 63 | -------------------------------------------------------------------------------- /tests/tFromPackage.nim: -------------------------------------------------------------------------------- 1 | ## Create documentation database for a package 2 | 3 | import 4 | haxdoc/extract/[from_nim_code], 5 | haxdoc/generate/[sourcetrail_db], 6 | haxdoc/process/[docentry_group], 7 | haxdoc/[docentry_io], 8 | hnimast/compiler_aux 9 | 10 | import 11 | hmisc/[hdebug_misc], 12 | hmisc/other/[colorlogger, oswrap, hshell] 13 | 14 | import 15 | hpprint 16 | 17 | import std/[tables, sets, options, unittest] 18 | 19 | startHax() 20 | startColorLogger() 21 | 22 | 23 | suite "From project dependency": 24 | test "From hmisc": 25 | let 26 | outDir = getTempDir() / "tFromPackage" 27 | 28 | mkDir outDir 29 | 30 | let db = docDbFromPackage( 31 | findPackage("hmisc", newVRAny()).get()) 32 | 33 | db.writeDbXml(outDir, "package") 34 | writeSourcetrailDb(db, outDir /. "package") 35 | 36 | if hasCmd(shellCmd("dot")): 37 | db.inheritDotGraph().toPng(AbsFile "/tmp/hmisc-inherit.png") 38 | db.structureDotGraph().toPng(outDir /. "structure.png") 39 | 40 | suite "From regular package": 41 | test "in /tmp": 42 | let 43 | inDir = getTempDir() / "inPackage" 44 | outDir = getTempDir() / "tFromTmpPackage" 45 | 46 | if exists(inDir): 47 | mkDir outDir 48 | let info = getPackageInfo(inDir) 49 | echo info.projectPath() 50 | echo info.projectImportPath() 51 | let db = docDbFromPackage(info) 52 | 53 | writeSourcetrailDb(db, outDir /. "fromPackage") 54 | writeDbXml(db, outDir, "fromPackage") 55 | 56 | else: 57 | echo "Directory ", inDir, " does not exist, skipping test" 58 | -------------------------------------------------------------------------------- /haxdoc.nimble: -------------------------------------------------------------------------------- 1 | version = "0.1.2" 2 | author = "haxscramper" 3 | description = "Nim documentation generator" 4 | license = "Apache-2.0" 5 | srcDir = "src" 6 | 7 | installExt = @["nim"] 8 | backend = "cpp" 9 | 10 | requires "hnimast#head" 11 | requires "haxorg" 12 | requires "nimtrail >= 0.1.1" 13 | requires "nim >= 1.4.0" 14 | requires "hmisc#head" 15 | requires "hpprint >= 0.2.12" 16 | requires "hcparse#head" 17 | requires "fusion" 18 | requires "cxxstd" 19 | requires "nimtraits#head" 20 | requires "nimble <= 0.13.0" 21 | 22 | requires "flatty >= 0.2.2" 23 | requires "supersnappy >= 2.1.1" 24 | 25 | before install: 26 | # Whatever, I'm too tired of fighting nimble over my local installation 27 | # 'local dependencies' will be added 'stometimes later', so for now I 28 | # just have this hack. Don't care, works for me. 29 | exec("nimble -y install 'https://github.com/haxscramper/nimspell.git'") 30 | exec("nimble -y install 'https://github.com/haxscramper/cxxstd.git'") 31 | exec("nimble -y install 'https://github.com/haxscramper/nimtrail.git'") 32 | exec("nimble -y install 'https://github.com/haxscramper/haxorg.git'") 33 | 34 | task dockertest, "Run test in docker container": 35 | exec(""" 36 | hmisc-putils \ 37 | dockertest \ 38 | --projectDir:$(pwd) \ 39 | -lfusion \ 40 | -lbenchy \ 41 | -lcligen \ 42 | -lcompiler \ 43 | -lhmisc \ 44 | -lhasts \ 45 | -lhdrawing \ 46 | -lregex \ 47 | -lnimble \ 48 | -lhnimast \ 49 | -lhpprint \ 50 | -lnimtraits \ 51 | -lunicodeplus \ 52 | -lhcparse \ 53 | -lnimspell \ 54 | -lcxxstd \ 55 | -lhaxorg \ 56 | -lnimtrail 57 | """) 58 | 59 | task docgen, "Generate documentation": 60 | exec("hmisc-putils docgen") 61 | -------------------------------------------------------------------------------- /src/haxdoc/wrappers/mandoc/manconf.nim: -------------------------------------------------------------------------------- 1 | 2 | {.push, warning[UnusedImport]: off.} 3 | 4 | 5 | import 6 | std / bitops, ./manconf_mansearch, ./main_manconf, 7 | ./main_manconf_mandoc_mandoc_parse_mansearch_mdoc_roff_tbl, 8 | hmisc / wrappers / wraphelp 9 | 10 | 11 | 12 | export 13 | wraphelp, main_manconf, 14 | main_manconf_mandoc_mandoc_parse_mansearch_mdoc_roff_tbl, manconf_mansearch 15 | 16 | 17 | 18 | 19 | import 20 | mandoc_common 21 | 22 | 23 | 24 | 25 | 26 | # Declaration created in: hc_wrapgen.nim(254, 28) 27 | # Wrapper for `manconf_parse` 28 | # Declared in manconf.h:49 29 | proc manconfParse*(a0: ptr Manconf; a1: cstring; a2: cstring; a3: cstring): void {. 30 | importc: r"manconf_parse", header: allHeaders.} 31 | ## @import{[[code:proc!proc(ptr[manconf], ptr[const[char]], ptr[char], ptr[char]): void]]} 32 | 33 | 34 | 35 | # Declaration created in: hc_wrapgen.nim(254, 28) 36 | # Wrapper for `manconf_output` 37 | # Declared in manconf.h:50 38 | proc manconfOutput*(a0: ptr Manoutput; a1: cstring; a2: cint): cint {. 39 | importc: r"manconf_output", header: allHeaders.} 40 | ## @import{[[code:proc!proc(ptr[manoutput], ptr[const[char]], int): int]]} 41 | 42 | 43 | 44 | # Declaration created in: hc_wrapgen.nim(254, 28) 45 | # Wrapper for `manconf_free` 46 | # Declared in manconf.h:51 47 | proc manconfFree*(a0: ptr Manconf): void {.importc: r"manconf_free", 48 | header: allHeaders.} 49 | ## @import{[[code:proc!proc(ptr[manconf]): void]]} 50 | 51 | 52 | 53 | # Declaration created in: hc_wrapgen.nim(254, 28) 54 | # Wrapper for `manpath_base` 55 | # Declared in manconf.h:52 56 | proc manpathBase*(a0: ptr Manpaths): void {.importc: r"manpath_base", 57 | header: allHeaders.} 58 | ## @import{[[code:proc!proc(ptr[manpaths]): void]]} 59 | 60 | -------------------------------------------------------------------------------- /src/doc.json: -------------------------------------------------------------------------------- 1 | [{ 2 | "plainName": "hhh", 3 | "doctextBody": {"kind": "onkEmptyNode"}, 4 | "doctextBrief": { 5 | "tree": [ 6 | "onkStmtList", 7 | [{ 8 | "tree": [ 9 | "onkParagraph", 10 | [ 11 | {"kind": "onkWord", "text": "Documentation"}, 12 | {"kind": "onkWord", "text": "for"}, 13 | {"kind": "onkWord", "text": "hhh"} 14 | ]] 15 | }]] 16 | }, 17 | "useKind": "deuDeclaration", 18 | "admonitions": [{ 19 | "kind": "obiNote", 20 | "body": { 21 | "tree": [ 22 | "onkStmtList", 23 | [{ 24 | "tree": [ 25 | "onkParagraph", 26 | [ 27 | {"kind": "onkWord", "text": "prints"}, 28 | {"kind": "onkWord", "text": "out"}, 29 | {"mark": "", "kind": "onkMarkup", "str": "\""}, 30 | {"kind": "onkWord", "text": "123"}, 31 | {"kind": "onkWord", "text": "\""} 32 | ]] 33 | }, 34 | {"kind": "onkEmptyNode"} 35 | ]] 36 | } 37 | }], 38 | "kind": "dekProc", 39 | "prSigText": "proc hhh(arg: None[DocSym])", 40 | "prSigTree": { 41 | "kind": "ntkProc", 42 | "arguments": [{ 43 | "ident": "arg", 44 | "kind": "nvdLet", 45 | "vtype": { 46 | "kind": "ntkIdent", 47 | "head": {"plainName": "int", "doctextBriefPlain": "", "useKind": "deuReference", "kind": "dekObject"} 48 | } 49 | }], 50 | "effects": [{ 51 | "plainName": "Eff", 52 | "doctextBriefPlain": "", 53 | "docSym": {"declLink": {"plain": "Eff"}}, 54 | "useKind": "deuReference", 55 | "kind": "dekObject" 56 | }] 57 | }, 58 | "doctextBriefPlain": "Documentation for hhh" 59 | }] -------------------------------------------------------------------------------- /src/haxdoc/wrappers/mandoc/manconf_mansearch.nim: -------------------------------------------------------------------------------- 1 | 2 | {.push, warning[UnusedImport]: off.} 3 | 4 | 5 | import 6 | std / bitops, hmisc / wrappers / wraphelp 7 | 8 | 9 | 10 | export 11 | wraphelp 12 | 13 | 14 | 15 | 16 | import 17 | mandoc_common 18 | 19 | 20 | 21 | 22 | type 23 | 24 | # Declaration created in: hc_wrapgen.nim(743, 20) 25 | # Wrapper for `manpaths` 26 | # Declared in manconf.h:21 27 | Manpaths* {.bycopy, importc: "struct manpaths", header: allHeaders.} = object 28 | ## @import{[[code:struct!manpaths]]} 29 | paths* {.importc: "paths".}: cstringArray ## @import{[[code:struct!manpaths.field!paths]]} 30 | sz* {.importc: "sz".}: SizeT ## @import{[[code:struct!manpaths.field!sz]]} 31 | 32 | 33 | 34 | 35 | 36 | 37 | # Declaration created in: hc_wrapgen.nim(457, 24) 38 | # Wrapper for `manpaths` 39 | # Declared in manconf.h:21 40 | proc destroyManpaths*(obj: ptr Manpaths): void {.importc: r"#.~manpaths()", 41 | header: allHeaders.} 42 | ## @import{[[code:struct!manpaths]]} 43 | 44 | 45 | 46 | # Declaration created in: hc_wrapgen.nim(468, 24) 47 | # Wrapper for `manpaths` 48 | # Declared in manconf.h:21 49 | proc cnewManpaths*(): ptr Manpaths {.importc: r"new manpaths()", 50 | header: allHeaders.} 51 | ## @import{[[code:struct!manpaths]]} 52 | 53 | 54 | 55 | # Declaration created in: hc_wrapgen.nim(476, 24) 56 | # Wrapper for `manpaths` 57 | # Declared in manconf.h:21 58 | proc newManpaths*(): ref Manpaths = 59 | ## @import{[[code:struct!manpaths]]} 60 | newImportAux() 61 | new(result, proc (self: ref Manpaths) = 62 | destroyManpaths(addr self[])) 63 | {.emit: "new ((void*)result) manpaths(); /* Placement new */".} 64 | 65 | 66 | 67 | 68 | # Declaration created in: hc_wrapgen.nim(486, 24) 69 | # Wrapper for `manpaths` 70 | # Declared in manconf.h:21 71 | proc initManpaths*(): Manpaths {.importc: r"{className}()", header: allHeaders.} 72 | ## @import{[[code:struct!manpaths]]} 73 | 74 | -------------------------------------------------------------------------------- /tests/tLineDiffDb.nim: -------------------------------------------------------------------------------- 1 | import 2 | ../src/haxdoc, 3 | ../src/haxdoc/[ 4 | extract/from_nim_code, 5 | process/docdb_diff, 6 | docentry, 7 | docentry_io 8 | ] 9 | 10 | import 11 | hmisc/other/[oswrap, hpprint], 12 | hmisc/hdebug_misc 13 | 14 | import 15 | std/unittest 16 | 17 | startHax() 18 | 19 | let 20 | dir = getAppTempDir() 21 | oldDir = dir / "old" 22 | newDir = dir / "new" 23 | oldFile = oldDir /. "file.nim" 24 | newFile = newDir /. "file.nim" 25 | oldDbDir = oldDir / "oldDb" 26 | newDbDir = newDir / "newDb" 27 | commonMain = """ 28 | 29 | proc main() = 30 | changeSideEffect() 31 | changeRaiseAnnotation() 32 | changeImplementation() 33 | # proc1(); proc2(); proc3() 34 | """ 35 | 36 | oldCode = """ 37 | proc writeIoEffect() {.tags: [ReadIoEffect].} = 38 | discard 39 | 40 | proc changeSideEffect() = discard 41 | 42 | proc changeRaiseAnnotation() = discard 43 | 44 | proc changeImplementation() = discard 45 | """ 46 | 47 | # proc proc1() = discard 48 | # proc proc2() = discard 49 | # proc proc3() = discard 50 | 51 | 52 | newCode = """ 53 | proc writeIoEffect() {.tags: [ReadIoEffect].} = 54 | discard 55 | 56 | proc changeSideEffect() = 57 | writeIoEffect() 58 | echo 12 #< Does not track write io effect??? 59 | 60 | proc changeRaiseAnnotation() = 61 | raise newException(OsError, "w23423") 62 | 63 | proc changeImplementation() = 64 | for i in [0, 1, 3]: 65 | discard i 66 | """ 67 | 68 | # proc proc1() {.raises: [OsError].} = discard 69 | # proc proc2() {.tags: [IOEffect].} = discard 70 | # proc proc3() = ##[ Documentation update ]## discard 71 | 72 | 73 | mkDir dir 74 | mkDir oldDir 75 | mkDir newDir 76 | oldFile.writeFile(oldCode & commonMain) 77 | newFile.writeFile(newCode & commonMain) 78 | 79 | 80 | suite "API usage": 81 | test "Simple comparison": 82 | let 83 | oldDb = generateDocDb(oldFile) 84 | newDb = generateDocDb(newFile) 85 | diffDb = diffDb(oldDb, newDb) 86 | 87 | let 88 | diffLines = diffDb.diffFile( 89 | oldDb.getFile(oldFile), 90 | newDb.getFile(newFile) 91 | ) 92 | 93 | # pprint oldDb.getFile(oldFile).body.codeLines, ignore = matchField("parts") 94 | # pprint newDb.getFile(newFile).body.codeLines, ignore = matchField("parts") 95 | 96 | # pprint diffLines, ignore = matchField("parts") 97 | 98 | echo formatDiff(diffDb, diffLines) 99 | 100 | 101 | suite "Command line usage": 102 | test "Simple comparison": 103 | rmDir oldDbDir 104 | rmDir newDbDir 105 | withDir dir: 106 | haxdocCli(@["nim", "xml", "--outdir=" & $oldDbDir, $oldFile]) 107 | haxdocCli(@["nim", "xml", "--outdir=" & $newDbDir, $newFile]) 108 | haxdocCli(@["diff", $oldDbDir, $newDbDir]) 109 | -------------------------------------------------------------------------------- /assets/nim.rules: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "type": "quotation", 4 | "range": { 5 | "start": "\"\"\"", 6 | "end": "\"\"\"" 7 | }, 8 | "priority": true 9 | }, 10 | { 11 | "type": "quotation", 12 | "range": { 13 | "start": "\"\"\"", 14 | "end": "\"\"\"" 15 | }, 16 | "priority": true 17 | }, 18 | { 19 | "type": "comment", 20 | "range": { 21 | "start": "#\\[", 22 | "end": "\\]#" 23 | }, 24 | "priority": "true" 25 | }, 26 | { 27 | "type": "comment", 28 | "range": { 29 | "start": "##\\[", 30 | "end": "\\]##" 31 | }, 32 | "priority": "true" 33 | }, 34 | { 35 | "type": "quotation", 36 | "patterns": [ 37 | "(?:[^\"]|^)(\"(?:[^\"]|\\\\.)*\")(?:[^\"]|$)", 38 | "(?:[^']|^)('(?:[^']|\\\\.)*')(?:[^']|$)" 39 | ], 40 | "priority": true 41 | }, 42 | { 43 | "type": "comment", 44 | "patterns": [ 45 | "#[^\n]*" 46 | ], 47 | "priority": true 48 | }, 49 | { 50 | "type": "comment", 51 | "patterns": [ 52 | "##[^\n]*" 53 | ], 54 | "priority": true 55 | }, 56 | { 57 | "type": "keyword", 58 | "patterns": [ 59 | "\\bself\\b", 60 | "\\bFalse\\b", 61 | "\\bclass\\b", 62 | "\\bfinally\\b", 63 | "\\bis\\b", 64 | "\\breturn\\b", 65 | "\\bNone\\b", 66 | "\\bcontinue\\b", 67 | "\\bfor\\b", 68 | "\\blambda\\b", 69 | "\\btry\\b", 70 | "\\btrue\\b", 71 | "\\bproc\\b", 72 | "\\bfunc\\b", 73 | "\\biterator\\b", 74 | "\\bconverter\\b", 75 | "\\bmethod\\b", 76 | "\\bmacro\\b", 77 | "\\btemplate\\b", 78 | "\\bfrom\\b", 79 | "\\bnonlocal\\b", 80 | "\\bwhile\\b", 81 | "\\band\\b", 82 | "\\bdel\\b", 83 | "\\bglobal\\b", 84 | "\\bnot\\b", 85 | "\\bwith\\b", 86 | "\\bas\\b", 87 | "\\belif\\b", 88 | "\\bif\\b", 89 | "\\bor\\b", 90 | "\\byield\\b", 91 | "\\bassert\\b", 92 | "\\belse\\b", 93 | "\\bimport\\b", 94 | "\\bpass\\b", 95 | "\\bbreak\\b", 96 | "\\bexcept\\b", 97 | "\\bin\\b", 98 | "\\braise\\b" 99 | ] 100 | }, 101 | { 102 | "type": "number", 103 | "patterns": [ 104 | "\\b[0-9]+\\b" 105 | ] 106 | }, 107 | { 108 | "type": "function", 109 | "patterns": [ 110 | "\\b[A-Za-z0-9_]+(?=\\()" 111 | ] 112 | }, 113 | { 114 | "type": "type", 115 | "patterns": [ 116 | "\\b[A-Z][A-Za-z0-9_]*\\b" 117 | ] 118 | }, 119 | { 120 | "type": "type", 121 | "patterns": [ 122 | "\\bint\\b", 123 | "\\bfloat\\b", 124 | "\\bstring\\b", 125 | "\\bint16\\b", 126 | "\\bint8\\b" 127 | ] 128 | } 129 | ] 130 | -------------------------------------------------------------------------------- /src/haxdoc/wrappers/mandoc/main_manconf.nim: -------------------------------------------------------------------------------- 1 | 2 | {.push, warning[UnusedImport]: off.} 3 | 4 | 5 | import 6 | std / bitops, hmisc / wrappers / wraphelp 7 | 8 | 9 | 10 | export 11 | wraphelp 12 | 13 | 14 | 15 | 16 | import 17 | mandoc_common 18 | 19 | 20 | 21 | 22 | type 23 | 24 | # Declaration created in: hc_wrapgen.nim(743, 20) 25 | # Wrapper for `manoutput` 26 | # Declared in manconf.h:28 27 | Manoutput* {.bycopy, importc: "struct manoutput", header: allHeaders.} = object 28 | ## @import{[[code:struct!manoutput]]} 29 | includes* {.importc: "includes".}: cstring ## @import{[[code:struct!manoutput.field!includes]]} 30 | man* {.importc: "man".}: cstring ## @import{[[code:struct!manoutput.field!man]]} 31 | paper* {.importc: "paper".}: cstring ## @import{[[code:struct!manoutput.field!paper]]} 32 | style* {.importc: "style".}: cstring ## @import{[[code:struct!manoutput.field!style]]} 33 | tag* {.importc: "tag".}: cstring ## @import{[[code:struct!manoutput.field!tag]]} 34 | indent* {.importc: "indent".}: SizeT ## @import{[[code:struct!manoutput.field!indent]]} 35 | width* {.importc: "width".}: SizeT ## @import{[[code:struct!manoutput.field!width]]} 36 | fragment* {.importc: "fragment".}: cint ## @import{[[code:struct!manoutput.field!fragment]]} 37 | mdoc* {.importc: "mdoc".}: cint ## @import{[[code:struct!manoutput.field!mdoc]]} 38 | noval* {.importc: "noval".}: cint ## @import{[[code:struct!manoutput.field!noval]]} 39 | synopsisonly* {.importc: "synopsisonly".}: cint ## @import{[[code:struct!manoutput.field!synopsisonly]]} 40 | toc* {.importc: "toc".}: cint ## @import{[[code:struct!manoutput.field!toc]]} 41 | 42 | 43 | 44 | 45 | 46 | 47 | # Declaration created in: hc_wrapgen.nim(457, 24) 48 | # Wrapper for `manoutput` 49 | # Declared in manconf.h:28 50 | proc destroyManoutput*(obj: ptr Manoutput): void {.importc: r"#.~manoutput()", 51 | header: allHeaders.} 52 | ## @import{[[code:struct!manoutput]]} 53 | 54 | 55 | 56 | # Declaration created in: hc_wrapgen.nim(468, 24) 57 | # Wrapper for `manoutput` 58 | # Declared in manconf.h:28 59 | proc cnewManoutput*(): ptr Manoutput {.importc: r"new manoutput()", 60 | header: allHeaders.} 61 | ## @import{[[code:struct!manoutput]]} 62 | 63 | 64 | 65 | # Declaration created in: hc_wrapgen.nim(476, 24) 66 | # Wrapper for `manoutput` 67 | # Declared in manconf.h:28 68 | proc newManoutput*(): ref Manoutput = 69 | ## @import{[[code:struct!manoutput]]} 70 | newImportAux() 71 | new(result, proc (self: ref Manoutput) = 72 | destroyManoutput(addr self[])) 73 | {.emit: "new ((void*)result) manoutput(); /* Placement new */".} 74 | 75 | 76 | 77 | 78 | # Declaration created in: hc_wrapgen.nim(486, 24) 79 | # Wrapper for `manoutput` 80 | # Declared in manconf.h:28 81 | proc initManoutput*(): Manoutput {.importc: r"{className}()", header: allHeaders.} 82 | ## @import{[[code:struct!manoutput]]} 83 | 84 | -------------------------------------------------------------------------------- /src/haxdoc/wrappers/mandoc/mandoc_aux.nim: -------------------------------------------------------------------------------- 1 | 2 | {.push, warning[UnusedImport]: off.} 3 | 4 | 5 | import 6 | std / bitops, cstd / stddef, hmisc / wrappers / wraphelp 7 | 8 | 9 | 10 | export 11 | wraphelp 12 | 13 | 14 | 15 | 16 | import 17 | mandoc_common 18 | 19 | 20 | 21 | 22 | 23 | # Declaration created in: hc_wrapgen.nim(254, 28) 24 | # Wrapper for `mandoc_asprintf` 25 | # Declared in mandoc_aux.h:19 26 | proc mandocAsprintf*(a1: cstringArray; a2: cstring): cint {.varargs, 27 | importc: r"mandoc_asprintf", header: allHeaders.} 28 | ## @import{[[code:proc!proc(ptr[ptr[char]], ptr[const[char]]): int]]} 29 | 30 | 31 | 32 | # Declaration created in: hc_wrapgen.nim(254, 28) 33 | # Wrapper for `mandoc_calloc` 34 | # Declared in mandoc_aux.h:21 35 | proc mandocCalloc*(a0: SizeT; a1: SizeT): pointer {.importc: r"mandoc_calloc", 36 | header: allHeaders.} 37 | ## @import{[[code:proc!proc(tkTypedef, tkTypedef): ptr[void]]]} 38 | 39 | 40 | 41 | # Declaration created in: hc_wrapgen.nim(254, 28) 42 | # Wrapper for `mandoc_malloc` 43 | # Declared in mandoc_aux.h:22 44 | proc mandocMalloc*(a0: SizeT): pointer {.importc: r"mandoc_malloc", 45 | header: allHeaders.} 46 | ## @import{[[code:proc!proc(tkTypedef): ptr[void]]]} 47 | 48 | 49 | 50 | # Declaration created in: hc_wrapgen.nim(254, 28) 51 | # Wrapper for `mandoc_realloc` 52 | # Declared in mandoc_aux.h:23 53 | proc mandocRealloc*(a0: pointer; a1: SizeT): pointer {. 54 | importc: r"mandoc_realloc", header: allHeaders.} 55 | ## @import{[[code:proc!proc(ptr[void], tkTypedef): ptr[void]]]} 56 | 57 | 58 | 59 | # Declaration created in: hc_wrapgen.nim(254, 28) 60 | # Wrapper for `mandoc_reallocarray` 61 | # Declared in mandoc_aux.h:24 62 | proc mandocReallocarray*(a0: pointer; a1: SizeT; a2: SizeT): pointer {. 63 | importc: r"mandoc_reallocarray", header: allHeaders.} 64 | ## @import{[[code:proc!proc(ptr[void], tkTypedef, tkTypedef): ptr[void]]]} 65 | 66 | 67 | 68 | # Declaration created in: hc_wrapgen.nim(254, 28) 69 | # Wrapper for `mandoc_recallocarray` 70 | # Declared in mandoc_aux.h:25 71 | proc mandocRecallocarray*(a0: pointer; a1: SizeT; a2: SizeT; a3: SizeT): pointer {. 72 | importc: r"mandoc_recallocarray", header: allHeaders.} 73 | ## @import{[[code:proc!proc(ptr[void], tkTypedef, tkTypedef, tkTypedef): ptr[void]]]} 74 | 75 | 76 | 77 | # Declaration created in: hc_wrapgen.nim(254, 28) 78 | # Wrapper for `mandoc_strdup` 79 | # Declared in mandoc_aux.h:26 80 | proc mandocStrdup*(a0: cstring): cstring {.importc: r"mandoc_strdup", 81 | header: allHeaders.} 82 | ## @import{[[code:proc!proc(ptr[const[char]]): ptr[char]]]} 83 | 84 | 85 | 86 | # Declaration created in: hc_wrapgen.nim(254, 28) 87 | # Wrapper for `mandoc_strndup` 88 | # Declared in mandoc_aux.h:27 89 | proc mandocStrndup*(a0: cstring; a1: SizeT): cstring {. 90 | importc: r"mandoc_strndup", header: allHeaders.} 91 | ## @import{[[code:proc!proc(ptr[const[char]], tkTypedef): ptr[char]]]} 92 | 93 | -------------------------------------------------------------------------------- /src/haxdoc/wrappers/mandoc/make_wrap.nim: -------------------------------------------------------------------------------- 1 | import hcparse/[wrap_common] 2 | import std/[sugar, random] 3 | 4 | import cstd/make_wrap as cstd_wrap 5 | 6 | const sourceDir* = AbsDir(currentSourcePath()).splitDir().head 7 | 8 | let 9 | resdir = cwd() 10 | inDir = toAbsDir("mandoc-1.14.5") 11 | 12 | let parseConf = baseCppParseConf.withIt do: 13 | it.globalFlags = @[ 14 | "-xc", 15 | "--include-with-prefix=/usr/include/c++/10.2.0/bits/", 16 | "--include=sys/types.h", 17 | "--include=stdio.h" 18 | ] 19 | 20 | let wrapConf = baseCWrapConf.withDeepIt do: 21 | it.baseDir = toAbsDir(inDir) 22 | it.nimOutDir = resDir 23 | it.wrapName = "mandoc" 24 | it.depsConf = @[cstd_wrap.wrapConf, baseCWrapConf] 25 | 26 | it.getSavePathImpl = ( 27 | proc(file: AbsFile, conf: WrapConf): LibImport = 28 | result = baseCWrapConf.getSavePathImpl(file, conf) 29 | result.addPathPrefix "tmp" 30 | ) 31 | 32 | it.ignoreCursor = ( 33 | proc(cursor: CXCursor, conf: WrapConf): bool {.closure.} = 34 | if cursor.isFromFile(inDir /. "main.h"): 35 | return $cursor in ["manoutput", "roff_meta"] 36 | 37 | if cursor.isFromFile(inDir /. "mdoc.h"): 38 | return $cursor in ["roff_node"] 39 | 40 | elif cursor.isFromFile(inDir /. "mandoc_parse.h"): 41 | return $cursor in ["roff_meta"] 42 | 43 | elif cursor.isFromFile(inDir /. "roff.h"): 44 | return $cursor in ["tbl_span"] 45 | 46 | else: 47 | baseCWrapConf.ignoreCursor(cursor, conf) 48 | ) 49 | 50 | it.makeHeader = ( 51 | proc(cursor: CXCursor, conf: WrapConf): NimHeaderSpec {.closure.} = 52 | if cursor.isFromDir(inDir): 53 | initHeaderSpec newPIdent("allHeaders") 54 | 55 | else: 56 | initHeaderSpec cursor.asIncludeFromDir(conf, inDir) 57 | ) 58 | 59 | it.setPrefixForEnum @{ 60 | "mandoclevel" : "ml", 61 | "mandocerr" : "me", 62 | "mandoc_esc" : "msc", 63 | "mandoc_os" : "mdos", 64 | "MANDOC": "mdc", 65 | "ASCII": "ma", 66 | "MODE": "mode", 67 | "TYPE": "mtype", 68 | "MPARSE": "mp", 69 | "NODE": "mn" 70 | } 71 | 72 | it.newProcCb = ( 73 | proc( 74 | genProc: var GenProc, conf: WrapConf, cache: var WrapCache 75 | ): seq[WrappedEntry] = 76 | return errorCodesToException(genProc, conf, cache, @{ 77 | toScopedIdent("mparse_open") : negativeError("Cannot open file", {1}), 78 | }) 79 | ) 80 | 81 | it.userCode = ( 82 | proc(file: WrappedFile): tuple[node: PNode, position: WrappedEntryPos] = 83 | result.position = wepBeforeAll 84 | result.node = pquote do: 85 | import mandoc_common 86 | ) 87 | 88 | 89 | when isMainModule: 90 | randomize() 91 | 92 | wrapConf.logger = newTermLogger(file = true, line = true) 93 | wrapConf.logger.leftAlignFiles = 18 94 | 95 | var files: seq[AbsFile] 96 | for file in walkDir(inDir, AbsFile, exts = @["h"]): 97 | if file.name() in [ 98 | "mandoc", "roff", "mdoc", "manconf", "mansearch", 99 | "mandoc_aux", "mandoc_parse", "main", "tbl", "eqn" 100 | ]: 101 | files.add file 102 | 103 | discard wrapAllFiles(files, wrapConf, parseConf) 104 | 105 | wrapConf.notice "Conversion done" 106 | execShell shellCmd( 107 | nim, c, -r, warnings = off, "tests/tUsingNim.nim") 108 | 109 | wrapConf.notice "compilation ok" 110 | -------------------------------------------------------------------------------- /tests/cbackend/tFinalizeHCparse.nim: -------------------------------------------------------------------------------- 1 | import 2 | haxdoc/extract/[from_doxygen_xml, from_nim_code], 3 | haxdoc/[docentry_io, docentry], 4 | hmisc/other/[oswrap, colorlogger], 5 | hmisc/algo/htemplates, 6 | hmisc/[hdebug_misc], 7 | hnimast/compiler_aux, 8 | hcparse, hcparse/hc_docwrap 9 | 10 | import std/[unittest, options] 11 | 12 | startHax() 13 | startColorlogger() 14 | 15 | let 16 | dir = getAppTempDir() 17 | toDir = dir / "doxygen_xml" 18 | codegenDir = dir / "codegen" 19 | inputFile = dir /. "file.cpp" 20 | 21 | mkDir codegenDir 22 | 23 | suite "From doxygen for simple code sample": 24 | test "Generate": 25 | mkWithDirStructure dir: 26 | file inputFile: 27 | """ 28 | struct LocForward1; 29 | struct LocForward2; 30 | 31 | struct LocForwardUser { LocForward2* forward2; LocForward1* forward1; }; 32 | 33 | struct LocForward1 {}; 34 | struct LocForward2 {}; 35 | 36 | struct LocUser { LocForward2* forward2; LocForward1* forward1; }; 37 | 38 | struct Forward2; 39 | struct Forward1; 40 | 41 | struct ForwardUser { Forward2* forward2; Forward1* forward1; }; 42 | 43 | #include "forward1.hpp" 44 | #include "forward2.hpp" 45 | 46 | struct User { Forward2 forward2; Forward1 forward1; }; 47 | 48 | 49 | /// Documentation for main class 50 | class Main { 51 | public: 52 | int field; ///< Field documentation 53 | }; 54 | 55 | 56 | /*! 57 | \param arg1 Documentation for second argument 58 | \param arg2 Documentation for the first argument 59 | */ 60 | Main method(int arg1, int arg2) {} 61 | 62 | enum test { FIRST, SECOND }; 63 | 64 | """ 65 | file "forward2.hpp": 66 | """ 67 | struct Forward1; 68 | struct Forward2 { Forward1* forward1; }; 69 | 70 | Forward2 forward2Proc() {} 71 | Forward1* forward1PtrProc() {} 72 | """ 73 | file "forward1.hpp": 74 | """ 75 | struct Forward2; 76 | struct Forward1 { Forward2* forward2; }; 77 | 78 | Forward1 forward1Proc() {} 79 | Forward2* forward2PtrProc() {} 80 | """ 81 | 82 | doxygenXmlForDir(dir, toDir, doxyfilePattern = "Doxyfile") 83 | 84 | test "Generate C++ wrappers": 85 | let wrapConf = baseCppWrapConf.withDeepIt do: 86 | it.baseDir = dir 87 | it.nimOutDir = dir / "nimout" 88 | # it.refidMap = getRefidLocations(toDir) 89 | it.codegenDir = some codegenDir 90 | 91 | mkDir wrapConf.nimOutDir 92 | 93 | let files = listFiles(dir, @["hpp", "cpp"]) 94 | echov files 95 | wrapWithConf(files, wrapConf, baseCppParseConf) 96 | 97 | test "Create DB": 98 | let db = generateDocDb(toDir, loadLocationMap( 99 | codegenDir / baseCppWrapConf.refidFile)) 100 | 101 | discard generateDocDb( 102 | dir / "nimout" /. "file.nim", 103 | startDb = db, 104 | fileLib = some("finalize"), 105 | extraLibs = @{ 106 | findPackage( 107 | "hcparse", newVRAny()).get().projectImportPath(): "hcparse", 108 | findPackage("hmisc", newVRAny()).get().projectImportPath(): "hmisc" 109 | }, 110 | orgComments = @["finalize"] 111 | ) 112 | 113 | db.writeDbXml(dir, "doxygen") 114 | 115 | # suite "From doxygen for sourcetrail": 116 | # test "Generate": 117 | # let 118 | # package = findPackage("nimtrail", newVRAny()).get() 119 | # strail = package.projectPath() / "SourcetrailDB" / "core" 120 | # toDir = strail / "doxygen_xml" 121 | 122 | # doxygenXmlForDir(strail, toDir) 123 | 124 | # let db = generateDocDb(toDir) 125 | # db.writeDbXml(dir, "doxygen") 126 | -------------------------------------------------------------------------------- /src/haxdoc/wrappers/mandoc/mandoc_parse.nim: -------------------------------------------------------------------------------- 1 | 2 | {.push, warning[UnusedImport]: off.} 3 | 4 | 5 | import 6 | std / bitops, ./main_manconf_mandoc_mandoc_parse_mansearch_mdoc_roff_tbl, 7 | ./mandoc_parse_roff_eqn_main_tbl_mdoc, hmisc / wrappers / wraphelp 8 | 9 | 10 | 11 | export 12 | mandoc_parse_roff_eqn_main_tbl_mdoc, wraphelp, 13 | main_manconf_mandoc_mandoc_parse_mansearch_mdoc_roff_tbl 14 | 15 | 16 | 17 | 18 | import 19 | mandoc_common 20 | 21 | 22 | 23 | 24 | proc toCInt*(en: MpMparse): cint {.inline.} = 25 | ## Convert proxy enum to integer value 26 | cint(en.int) 27 | 28 | proc toCInt*(en: set[MpMparse]): cint {.inline.} = 29 | ## Convert set of enums to bitmasked integer 30 | for val in en: 31 | result = bitor(result, val.cint) 32 | 33 | 34 | 35 | 36 | 37 | # Declaration created in: hc_wrapgen.nim(254, 28) 38 | # Wrapper for `mparse_alloc` 39 | # Declared in mandoc_parse.h:37 40 | proc mparseAlloc*(a1: cint; a2: MandocOsC; a3: cstring): ptr Mparse {. 41 | importc: r"mparse_alloc", header: allHeaders.} 42 | ## @import{[[code:proc!proc(int, mandoc_os, ptr[const[char]]): ptr[mparse]]]} 43 | 44 | 45 | 46 | # Declaration created in: hc_wrapgen.nim(254, 28) 47 | # Wrapper for `mparse_copy` 48 | # Declared in mandoc_parse.h:38 49 | proc mparseCopy*(a0: ptr Mparse): void {.importc: r"mparse_copy", 50 | header: allHeaders.} 51 | ## @import{[[code:proc!proc(ptr[mparse]): void]]} 52 | 53 | 54 | 55 | # Declaration created in: hc_wrapgen.nim(254, 28) 56 | # Wrapper for `mparse_free` 57 | # Declared in mandoc_parse.h:39 58 | proc mparseFree*(a0: ptr Mparse): void {.importc: r"mparse_free", 59 | header: allHeaders.} 60 | ## @import{[[code:proc!proc(ptr[mparse]): void]]} 61 | 62 | 63 | 64 | # Declaration created in: hc_wrapgen.nim(254, 28) 65 | # Wrapper for `mparse_open` 66 | # Declared in mandoc_parse.h:40 67 | proc mparseOpenRaw*(a0: ptr Mparse; a1: cstring): cint {. 68 | importc: r"mparse_open", header: allHeaders.} 69 | ## @import{[[code:proc!proc(ptr[mparse], ptr[const[char]]): int]]} 70 | 71 | 72 | 73 | # Declaration created in: hc_impls.nim(117, 68) 74 | # Wrapper for `mparse_open` 75 | # Declared in mandoc_parse.h:40 76 | proc mparseOpen*(a0: ptr Mparse; a1: cstring): cint = 77 | ## @import{[[code:proc!proc(ptr[mparse], ptr[const[char]]): int]]} 78 | result = mparse_openRaw(a0, a1) 79 | if result notin cint(0) .. cint(2147483647): 80 | var errMsg = "Return value of the mparse_open is not in valid range - expected [0 .. high(cint)], but got " & 81 | $result & 82 | ". Cannot open file. Arguments were \'" 83 | errMsg &= $(a1) 84 | errMsg &= "\'." 85 | raise newException(ValueError, errMsg) 86 | 87 | 88 | 89 | 90 | # Declaration created in: hc_wrapgen.nim(254, 28) 91 | # Wrapper for `mparse_readfd` 92 | # Declared in mandoc_parse.h:41 93 | proc mparseReadfd*(a0: ptr Mparse; a1: cint; a2: cstring): void {. 94 | importc: r"mparse_readfd", header: allHeaders.} 95 | ## @import{[[code:proc!proc(ptr[mparse], int, ptr[const[char]]): void]]} 96 | 97 | 98 | 99 | # Declaration created in: hc_wrapgen.nim(254, 28) 100 | # Wrapper for `mparse_reset` 101 | # Declared in mandoc_parse.h:42 102 | proc mparseReset*(a0: ptr Mparse): void {.importc: r"mparse_reset", 103 | header: allHeaders.} 104 | ## @import{[[code:proc!proc(ptr[mparse]): void]]} 105 | 106 | 107 | 108 | # Declaration created in: hc_wrapgen.nim(254, 28) 109 | # Wrapper for `mparse_result` 110 | # Declared in mandoc_parse.h:43 111 | proc mparseResult*(a1: ptr Mparse): ptr RoffMeta {.importc: r"mparse_result", 112 | header: allHeaders.} 113 | ## @import{[[code:proc!proc(ptr[mparse]): ptr[roff_meta]]]} 114 | 115 | -------------------------------------------------------------------------------- /src/haxdoc/extract/from_doxygen_xml.nim: -------------------------------------------------------------------------------- 1 | import 2 | hcparse/dox_compound as DoxCompound, 3 | hcparse/dox_index as DoxIndex, 4 | hcparse/dox_xml, 5 | ../docentry, 6 | std/[strtabs, tables, strformat], 7 | haxorg/[semorg, ast], 8 | hmisc/hasts/[xml_ast], 9 | hmisc/other/[oswrap, hshell, colorlogger, hjson], 10 | hmisc/algo/halgorithm, 11 | hmisc/hdebug_misc, 12 | hpprint 13 | 14 | 15 | export dox_xml 16 | 17 | type 18 | ConvertContext = object 19 | db: DocDb 20 | refidMap: DocLocationMap 21 | doctext: Table[string, SemOrg] 22 | 23 | using ctx: var ConvertContext 24 | 25 | 26 | proc toOrg(ctx; dt: DescriptionType): OrgNode 27 | 28 | proc toOrg(ctx; body: DocParamListType): OrgNode = 29 | result = onkList.newTree() 30 | for item in body.parameterItem: 31 | var listItem = onkListItem.newTree() 32 | listItem["tag"] = onkInlineStmtList.newTree() 33 | for param in item.parameterNameList: 34 | listItem["tag"].add onkMetaTag.newTree( 35 | newOrgIdent("arg"), 36 | onkRawText.newTree(param.parameterName[0][0].mixedStr) 37 | ) 38 | 39 | listItem["body"] = ctx.toOrg(item.parameterDescription) 40 | 41 | result.add listItem 42 | 43 | proc toOrg(ctx; body: DocParaTypeBody): OrgNode = 44 | case body.kind: 45 | of dptParameterList: result = ctx.toOrg(body.docParamListType) 46 | of dptMixedStr: result = onkWord.newTree(body.mixedStr) 47 | 48 | else: 49 | raise newUnexpectedKindError(body, pstring(body)) 50 | 51 | proc toOrg(ctx; dtb: DescriptionTypeBody): OrgNode = 52 | case dtb.kind: 53 | of dtPara: 54 | result = onkStmtList.newTree() 55 | for item in dtb.docParaType: 56 | result.add ctx.toOrg(item) 57 | 58 | else: 59 | raise newUnexpectedKindError(dtb, pstring(dtb)) 60 | 61 | proc toOrg(ctx; dt: DescriptionType): OrgNode = 62 | result = onkStmtList.newTree() 63 | for sub in dt.xsdChoice: 64 | result.add ctx.toOrg(sub) 65 | 66 | 67 | 68 | proc newEntryForLocation( 69 | ctx; loc: LocationType, name: string): DocEntry = 70 | let link = ctx.refidMap.findLinkForLocation( 71 | initDocLocation(AbsFile(loc.file), loc.line, loc.column.get(0)), 72 | name 73 | ) 74 | 75 | if link.isNone(): 76 | raise newArgumentError( 77 | "Could not find corresponding documentable entry for location", 78 | &"{loc.file}:{loc.line}:{loc.column}" 79 | ) 80 | 81 | return ctx.db.newDocEntry(link.get()) 82 | 83 | proc register(ctx; dox: SectionDefType) = 84 | for member in dox.memberdef: 85 | var entry = ctx.newEntryForLocation( 86 | member.location, $member.name[0]) 87 | if member.detailedDescription.getSome(desc): 88 | entry.docText.docBody = ctx.toOrg(desc).toSemOrg() 89 | 90 | 91 | proc register(ctx; dox: DoxCompound.CompoundDefType) = 92 | case dox.kind: 93 | of dckFile: 94 | for section in dox.sectiondef: 95 | ctx.register(section) 96 | 97 | of dckClass, dckStruct: 98 | for section in dox.sectionDef: 99 | ctx.register(section) 100 | 101 | of dckDir: 102 | discard 103 | 104 | else: 105 | err dox.kind 106 | 107 | proc generateDocDb*( 108 | doxygenDir: AbsDir, refidMap: DocLocationMap): DocDb = 109 | var ctx = ConvertContext(refidMap: refidMap, db: newDocDb()) 110 | assertExists( 111 | doxygenDir / "xml", 112 | "Could not find generated doxygen XML directory") 113 | 114 | let index = indexForDir(doxygenDir) 115 | for item in index.compound: 116 | let file = item.fileForItem(doxygenDir) 117 | 118 | let parsed = parseDoxygenFile(file) 119 | for comp in parsed.compounddef: 120 | ctx.register(comp) 121 | 122 | return ctx.db 123 | -------------------------------------------------------------------------------- /src/haxdoc/wrappers/mandoc/roff_mdoc.nim: -------------------------------------------------------------------------------- 1 | 2 | {.push, warning[UnusedImport]: off.} 3 | 4 | 5 | import 6 | std / bitops, hmisc / wrappers / wraphelp 7 | 8 | 9 | 10 | export 11 | wraphelp 12 | 13 | 14 | 15 | 16 | import 17 | mandoc_common 18 | 19 | 20 | 21 | 22 | type 23 | 24 | # Declaration created in: hc_wrapgen.nim(743, 20) 25 | # Wrapper for `mdoc_argv` 26 | # Declared in mdoc.h:56 27 | MdocArgv* {.bycopy, importc: "struct mdoc_argv", header: allHeaders.} = object 28 | ## @import{[[code:struct!mdoc_argv]]} 29 | arg* {.importc: "arg".}: MdocargtC ## @import{[[code:struct!mdoc_argv.field!arg]]} 30 | line* {.importc: "line".}: cint ## @import{[[code:struct!mdoc_argv.field!line]]} 31 | pos* {.importc: "pos".}: cint ## @import{[[code:struct!mdoc_argv.field!pos]]} 32 | sz* {.importc: "sz".}: SizeT ## @import{[[code:struct!mdoc_argv.field!sz]]} 33 | value* {.importc: "value".}: cstringArray ## @import{[[code:struct!mdoc_argv.field!value]]} 34 | 35 | 36 | 37 | 38 | # Declaration created in: hc_wrapgen.nim(743, 20) 39 | # Wrapper for `mdoc_arg` 40 | # Declared in mdoc.h:69 41 | MdocArg* {.bycopy, importc: "struct mdoc_arg", header: allHeaders.} = object 42 | ## @import{[[code:struct!mdoc_arg]]} 43 | argc* {.importc: "argc".}: SizeT ## @import{[[code:struct!mdoc_arg.field!argc]]} 44 | argv* {.importc: "argv".}: ptr MdocArgv ## @import{[[code:struct!mdoc_arg.field!argv]]} 45 | refcnt* {.importc: "refcnt".}: cuint ## @import{[[code:struct!mdoc_arg.field!refcnt]]} 46 | 47 | 48 | 49 | 50 | 51 | 52 | # Declaration created in: hc_wrapgen.nim(457, 24) 53 | # Wrapper for `mdoc_argv` 54 | # Declared in mdoc.h:56 55 | proc destroyMdocArgv*(obj: ptr MdocArgv): void {.importc: r"#.~mdoc_argv()", 56 | header: allHeaders.} 57 | ## @import{[[code:struct!mdoc_argv]]} 58 | 59 | 60 | 61 | # Declaration created in: hc_wrapgen.nim(468, 24) 62 | # Wrapper for `mdoc_argv` 63 | # Declared in mdoc.h:56 64 | proc cnewMdocArgv*(): ptr MdocArgv {.importc: r"new mdoc_argv()", 65 | header: allHeaders.} 66 | ## @import{[[code:struct!mdoc_argv]]} 67 | 68 | 69 | 70 | # Declaration created in: hc_wrapgen.nim(476, 24) 71 | # Wrapper for `mdoc_argv` 72 | # Declared in mdoc.h:56 73 | proc newMdocArgv*(): ref MdocArgv = 74 | ## @import{[[code:struct!mdoc_argv]]} 75 | newImportAux() 76 | new(result, proc (self: ref MdocArgv) = 77 | destroyMdocArgv(addr self[])) 78 | {.emit: "new ((void*)result) mdoc_argv(); /* Placement new */".} 79 | 80 | 81 | 82 | 83 | # Declaration created in: hc_wrapgen.nim(486, 24) 84 | # Wrapper for `mdoc_argv` 85 | # Declared in mdoc.h:56 86 | proc initMdocArgv*(): MdocArgv {.importc: r"{className}()", header: allHeaders.} 87 | ## @import{[[code:struct!mdoc_argv]]} 88 | 89 | 90 | 91 | # Declaration created in: hc_wrapgen.nim(457, 24) 92 | # Wrapper for `mdoc_arg` 93 | # Declared in mdoc.h:69 94 | proc destroyMdocArg*(obj: ptr MdocArg): void {.importc: r"#.~mdoc_arg()", 95 | header: allHeaders.} 96 | ## @import{[[code:struct!mdoc_arg]]} 97 | 98 | 99 | 100 | # Declaration created in: hc_wrapgen.nim(468, 24) 101 | # Wrapper for `mdoc_arg` 102 | # Declared in mdoc.h:69 103 | proc cnewMdocArg*(): ptr MdocArg {.importc: r"new mdoc_arg()", 104 | header: allHeaders.} 105 | ## @import{[[code:struct!mdoc_arg]]} 106 | 107 | 108 | 109 | # Declaration created in: hc_wrapgen.nim(476, 24) 110 | # Wrapper for `mdoc_arg` 111 | # Declared in mdoc.h:69 112 | proc newMdocArg*(): ref MdocArg = 113 | ## @import{[[code:struct!mdoc_arg]]} 114 | newImportAux() 115 | new(result, proc (self: ref MdocArg) = 116 | destroyMdocArg(addr self[])) 117 | {.emit: "new ((void*)result) mdoc_arg(); /* Placement new */".} 118 | 119 | 120 | 121 | 122 | # Declaration created in: hc_wrapgen.nim(486, 24) 123 | # Wrapper for `mdoc_arg` 124 | # Declared in mdoc.h:69 125 | proc initMdocArg*(): MdocArg {.importc: r"{className}()", header: allHeaders.} 126 | ## @import{[[code:struct!mdoc_arg]]} 127 | 128 | -------------------------------------------------------------------------------- /readme.md: -------------------------------------------------------------------------------- 1 | **note**: *very* work-in-progress 2 | 3 | Documentation generator and source code analysis system for nim. 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | 58 | # Command-line tool 59 | 60 | - [ ] Command-line tool to generate documentation in different formats 61 | (html/xml) 62 | 63 | # Documentation extractor as-a-library 64 | 65 | - [ ] Documentation extractor-as-a-libary. Provide API for writing 66 | documentation extraction, analysis and generation tools. 67 | - [ ] Use unit tests as use examples for library. 68 | - [ ] Extracting additional semantic information from haxorg documentation 69 | comments (metadata information like `@ingroup{}` annotations, 70 | documentation for comments, fields and so on). 71 | - [ ] User-defined higlighting logic (special side effects, exceptions that 72 | developer might want to make accent on) 73 | - [ ] Show resolved links in documentation in sourcetrail tool. Index 74 | `.org` (`.md`, `.rst` etc.) documents as well to allow full 75 | interoperability between source code in documentation. It is somewhat 76 | annoying that =sourcetrail= does not allow to open two split panes, 77 | or edit source code directly in the same window, but this could be 78 | mitigated with support for suchronization between editors and 79 | sourcetrail viewers. 80 | 81 | # Static site generation for documentation 82 | 83 | - [ ] Basic implementation of simple documentation - no pretty 84 | configuration, can be just bare HTML 85 | - [ ] Search implementation - can use fulltext search like 86 | [flexsearch](https://github.com/nextapps-de/flexsearch) in addition 87 | to something closer to [hogle](https://hoogle.haskell.org/) for 88 | API/error search. 89 | 90 | # Automatic change detection 91 | 92 | - [ ] Automatic change detection for API/implementation 93 | - [ ] Automatic changelog documentation 94 | -------------------------------------------------------------------------------- /src/haxdoc/extract/from_manpages.nim: -------------------------------------------------------------------------------- 1 | import 2 | ../wrappers/mandoc/nimmandoc, 3 | ../docentry, 4 | ../docentry_io, 5 | std/[strutils, sequtils, tables, sets], 6 | hmisc/other/[oswrap, cliparse], 7 | hmisc/[base_errors, hdebug_misc], 8 | hmisc/algo/[hlex_base, htemplates, hstring_algo], 9 | haxorg/semorg, 10 | hpprint 11 | 12 | type 13 | ConvertContext = enum 14 | ccNone 15 | ccDescription 16 | 17 | RegStack = object 18 | context: seq[ConvertContext] 19 | 20 | ManContext = object 21 | file: AbsFile 22 | db: DocDB 23 | top: DocEntry 24 | 25 | using 26 | ctx: var ManContext 27 | node: NRoffNode 28 | 29 | 30 | 31 | func `+`(reg: RegStack, conv: ConvertContext): RegStack = 32 | result = reg 33 | result.context.add conv 34 | 35 | func top(reg: RegStack): ConvertContext = reg.context[^1] 36 | 37 | 38 | proc toSemOrg(node): SemOrg = discard 39 | 40 | func isOptStart(node): bool = 41 | case node.kind: 42 | of rnkText: 43 | # echov node.textVal 44 | node.textVal.startsWith("-"): 45 | 46 | else: 47 | node[0].isOptStart() 48 | 49 | func isPunctuation(node): bool = 50 | if node.kind == rnkText and 51 | node.textVal.allIt(it in PunctSentenceChars + AllSpace): 52 | true 53 | 54 | else: 55 | false 56 | 57 | func isKvSeparator(node): bool = 58 | node.kind == rnkText and 59 | node.textVal.allIt(it in {'=', '['} + AllSpace) and 60 | node.textVal.anyIt(it in {'=', '[', ':'}) 61 | 62 | func isOptionalKv(node): bool = 63 | node.kind == rnkText and 64 | node.textVal.startsWith("[=") 65 | 66 | func findLikeOpt(node): int = 67 | var 68 | failColumn = 0 69 | failLine = 0 70 | 71 | for idx, sub in pairs(node): 72 | if sub.kind == rnkText: 73 | if isOptStart(sub): 74 | return idx 75 | 76 | else: 77 | failColumn = sub.column 78 | failLine = sub.line 79 | echov failColumn 80 | result = -1 81 | 82 | else: 83 | if sub.findLikeOpt() != -1: 84 | return idx 85 | 86 | 87 | func toOpt(node): CliOpt = 88 | func aux(n: NRoffNode, opt: var CliOpt) = 89 | case n.kind: 90 | of rnkText: 91 | opt.keyPath = @[n.textVal] 92 | 93 | else: 94 | for sub in n: 95 | aux(sub, opt) 96 | 97 | aux(node, result) 98 | 99 | func flatPlainText(node): string = 100 | case node.kind: 101 | of rnkText: result = node.textVal 102 | else: 103 | for sub in node: 104 | result &= sub.flatPlaintext() 105 | 106 | func splitFlags(node): tuple[opts: seq[CliOpt], docs: NRoffNode] = 107 | var idx = 0 108 | while idx < node[0].len: 109 | let it = node[0][idx] 110 | if it.isOptStart(): 111 | result.opts.add it.toOpt() 112 | 113 | elif it.isPunctuation(): 114 | discard 115 | 116 | elif it.isKvSeparator(): 117 | inc idx 118 | result.opts[^1].valStr = node[0][idx].flatPlaintext() 119 | if it.isOptionalKv(): 120 | inc idx 121 | 122 | else: 123 | raise newUnexpectedKindError( 124 | it, node[0].treeRepr()) 125 | 126 | inc idx 127 | 128 | result.docs = node[1] 129 | 130 | 131 | 132 | func newOpts(ctx; node): DocEntry = 133 | let (opts, docs) = node.splitFlags() 134 | let name = opts.getMaxIt(it.key.len()).key.dropPrefix( 135 | toStrPart ["--", "-"]) 136 | 137 | result = ctx.top.newDocEntry(dekShellOption, name) 138 | 139 | 140 | 141 | proc registerDesc(ctx; node; reg: RegStack) = 142 | var lastOpt: DocEntry 143 | for sub in node: 144 | case sub.kind: 145 | of rnkText: discard # TODO add text 146 | of rnkParagraph: discard 147 | of rnkSection, rnkStmtList: 148 | if sub.isOptStart(): 149 | lastOpt = ctx.newOpts(sub) 150 | 151 | else: 152 | registerDesc(ctx, sub, reg) 153 | 154 | of rnkIndented: 155 | let org = sub.toSemOrg() 156 | # TODO append documentation to last option 157 | 158 | else: 159 | raise newUnexpectedKindError( 160 | sub.kind, sub.treeREpr()) 161 | 162 | 163 | proc register(ctx; node; reg: RegStack) = 164 | case node.kind: 165 | of rnkStmtList: 166 | for sub in node: 167 | ctx.register(sub, reg) 168 | 169 | of rnkComment: 170 | discard 171 | 172 | of rnkSection: 173 | case node[0].textVal: 174 | of "NAME": ctx.top.docText.docBrief = node[1].toSemOrg() 175 | of "SYNOPSIS": discard 176 | of "SEE ALSO": discard # TODO implement mapping to code links 177 | of "AUTHOR", "REPORTING BUGS", "COPYRIGHT": 178 | discard 179 | of "DESCRIPTION": 180 | ctx.registerDesc(node, reg + ccDescription) 181 | 182 | else: 183 | raise newImplementKindError( 184 | node[0].textVal, node.treeRepr()) 185 | 186 | 187 | else: 188 | raise newImplementKindError(node, node.treeREpr()) 189 | 190 | proc generateDocDb*(file: AbsFile, startDb: DocDb = newDocDb()): DocDb = 191 | let node = parseNRoff(file) 192 | 193 | var ctx = ManContext(db: startDb, file: file) 194 | 195 | ctx.top = ctx.db.newDocEntry(dekShellCmd, "zzzz") 196 | 197 | ctx.register(node, RegStack(context: @[ccNone])) 198 | 199 | return ctx.db 200 | 201 | when isMainModule: 202 | startHax() 203 | let db = findManpage("ls").generateDocDb() 204 | # pprint db 205 | db.writeDbXml(getAppTempDir(), "ls") 206 | echov "ok" 207 | -------------------------------------------------------------------------------- /src/haxdoc/process/docentry_group.nim: -------------------------------------------------------------------------------- 1 | import ../docentry 2 | import ./docentry_query 3 | import std/[tables, options, colors, strformat] 4 | 5 | import 6 | hmisc/core/all, 7 | hmisc/types/hgraph, 8 | hmisc/hasts/graphviz_ast 9 | 10 | export hgraph, graphviz_ast 11 | 12 | 13 | converter toDotNodeId*(id: DocId): DotNodeId = 14 | toDotNodeId(id.id.int) 15 | 16 | type 17 | DocTypeGroup = object 18 | typeEntry*: DocEntry 19 | procs*: DocEntryGroup 20 | 21 | # proc 22 | 23 | proc newTypeGroup*(entry: DocEntry): DocTypeGroup = 24 | DocTypeGroup(typeEntry: entry) 25 | 26 | proc procsByTypes*(db: DocDb): tuple[byType: seq[DocTypeGroup], other: DocEntryGroup] = 27 | var table: Table[DocId, DocTypeGroup] 28 | for _, entry in db.entries: 29 | if entry.kind in dekProcKinds: 30 | let id = entry.firstTypeId() 31 | if id in db: 32 | table.mgetOrPut(id, newTypeGroup(db[id])).procs.add entry 33 | 34 | else: 35 | # echov entry.name, entry.procType(), id 36 | result.other.add entry 37 | 38 | 39 | for _, group in table: 40 | result.byType.add group 41 | 42 | proc splitCommonProcs*(group: DocTypeGroup): DocTypeGroup = 43 | result.typeEntry = group.typeEntry 44 | result.procs = newEntryGroup(@[newEntryGroup(), newEntryGroup()]) 45 | for entry in group.procs: 46 | if entry.name in ["$", "==", "<", "items", "pairs", ">", "!=", "[]", "[]="]: 47 | result.procs.nested[0].add entry 48 | 49 | else: 50 | result.procs.nested[1].add entry 51 | 52 | 53 | proc inheritGraph*(db: DocDb): HGraph[DocId, NoProperty] = 54 | result = newHGraph[DocId, NoProperty]() 55 | for id, entry in db.entries: 56 | if entry.kind in dekStructKinds: 57 | for super in entry.superTypes: 58 | result.addOrGetEdge(id, super) 59 | 60 | proc inheritDotGraph*(db: DocDb): DotGraph = 61 | let inherit = db.inheritGraph() 62 | 63 | result = inherit.dotRepr( 64 | proc(id: DocId, _: HNode): DotNode = 65 | if not id.isValid(): 66 | result = makeDotNode(0, "--") 67 | 68 | else: 69 | result = makeDotNode(0, db[id].name) 70 | case db[id].kind: 71 | of dekException: result.color = some colRed 72 | of dekEffect: result.color = some colGreen 73 | of dekDefect: result.color = some colBlue 74 | else: 75 | discard 76 | 77 | ) 78 | 79 | result.rankDir = grdLeftRight 80 | 81 | 82 | proc usageDotGraph*(db: DocDb): DotGraph = 83 | result = makeDotGraph() 84 | result.rankDir = grdLeftRight 85 | for id, entry in db.entries: 86 | let userId = toDotNodeId(entry.id()) 87 | case entry.kind: 88 | of dekBuiltin, dekEnum: 89 | result.add makeDotNode(userId, entry.name).withIt do: 90 | it.color = some colRed 91 | 92 | of dekAliasKinds: 93 | result.add makeDotNode(userId, &"{entry.name} = {entry.baseType}") 94 | for target in entry.baseType.allId(): 95 | if target.isValid(): 96 | result.add makeDotEdge(userId, target).withIt do: 97 | it.style = edsDashed 98 | 99 | of dekStructKinds: 100 | var typeFields: seq[RecordField] = @[makeDotRecord(0, &"[[ {entry.name} ]]")] 101 | for nested in entry: 102 | if nested.kind == dekField and nested.identType.isSome(): 103 | let ftype = nested.identType.get() 104 | 105 | for ftypeId in ftype.allId(): 106 | if ftypeId.isValid(): 107 | let 108 | fieldPath = toDotPath(userId, nested.id()) 109 | targetPath = toDotPath(ftypeId, 0) 110 | 111 | if db[ftypeId].kind != dekBuiltin: 112 | result.add makeDotEdge(fieldPath, targetPath) 113 | 114 | typeFields.add makeDotRecord( 115 | nested.id(), &"{nested.name}: {ftype}") 116 | 117 | result.add makeRecordDotNode(userId, typeFields).withIt do: 118 | it.color = some colBlue 119 | 120 | for super in entry.superTypes: 121 | result.add makeDotEdge(userId, super).withIt do: 122 | it.style = edsBold 123 | 124 | else: 125 | discard 126 | 127 | proc structureDotGraph*(db: DocDb): DotGraph = 128 | var sub: Table[DocId, DotGraph] 129 | for pack in allItems(db, {dekPackage}): 130 | sub[pack.id()] = makeDotGraph().withIt do: 131 | it.color = some colRed 132 | it.isCluster = true 133 | it.name = pack.name 134 | it.label = pack.name 135 | it.add makeDotNode(pack.id(), &""" 136 | name: {pack.name} 137 | auth: {pack.author} 138 | vers: {pack.version}""").withIt do: 139 | it.labelAlign = nlaLeft 140 | 141 | 142 | for module in allItems(db, {dekModule}): 143 | let userId = toDotNodeId(module.id()) 144 | let package = module.fullIdent.parts[0].id 145 | sub[package].add makeDotNode(userId, module.name) 146 | for imp in module.imports: 147 | sub[package].add makeDotEdge(userId, imp) 148 | 149 | result = makeDotGraph() 150 | result.compound = some true 151 | 152 | for _, graph in sub: 153 | result.add graph 154 | 155 | for pack in topItems(db, {dekPackage}): 156 | for req in pack.requires: 157 | if req.resolved.isSome(): 158 | result.add makeDotEdge(pack.id(), req.resolved.get()).withIt do: 159 | it.ltail = some pack.name 160 | it.lhead = some db[db[req.resolved.get()].getPackage()].name 161 | it.label = some req.version 162 | it.style = edsBold 163 | -------------------------------------------------------------------------------- /src/haxdoc/parse/docentry_link.nim: -------------------------------------------------------------------------------- 1 | import 2 | ../docentry_types, 3 | ../docentry, 4 | hmisc/algo/[hparse_base, hlex_base], 5 | hmisc/core/all, 6 | std/[options], 7 | haxorg/defs/defs_all 8 | 9 | type 10 | LinkTokens = enum 11 | ltIdent 12 | ltComma 13 | ltLBrace 14 | ltRBRace 15 | ltRPar 16 | ltLPar 17 | ltLCurly 18 | ltRCurly 19 | ltSemicolon 20 | ltColon 21 | ltEqual 22 | ltDot 23 | ltKindSelector 24 | ltNamespace 25 | 26 | ltEof 27 | 28 | LinkTok = HsTok[LinkTokens] 29 | LinkLex = HsLexer[LinkTok] 30 | 31 | proc lexLink(str: var PosStr): seq[LinkTok] = 32 | if str.finished(): 33 | result.add str.initEof(ltEOF) 34 | 35 | else: 36 | case str[]: 37 | of IdentStartChars: 38 | result.add initTok(str.popIdent(), ltIdent) 39 | 40 | of ',', '[', ']', '(', ')', '{', '}', '.', '!': 41 | result.add initCharTok(str, { 42 | ',': ltComma, 43 | '[': ltLBrace, 44 | ']': ltRBrace, 45 | '(': ltLPar, 46 | ')': ltRPar, 47 | ':': ltColon, 48 | '}': ltRCurly, 49 | '{': ltLCurly, 50 | '=': ltEqual, 51 | '.': ltDot, 52 | '!': ltKindSelector 53 | }) 54 | 55 | of ':': 56 | if str[':', ':', not {':'}]: 57 | result.add str.initTok(ltNamespace, false) 58 | str.next(2) 59 | 60 | else: 61 | result.add str.initTok(ltColon, false) 62 | str.next(1) 63 | 64 | of HorizontalSpace: 65 | str.space() 66 | result = lexLink(str) 67 | 68 | else: 69 | raise newUnexpectedCharError(str) 70 | 71 | proc initSelectorPart*(kinds: set[DocEntryKind], name: string): DocSelectorPart = 72 | DocSelectorPart(expected: kinds, name: name) 73 | 74 | proc initSelectorPart*( 75 | kinds: set[DocEntryKind], name: string, procType: DocType): DocSelectorPart = 76 | DocSelectorPart(expected: kinds, name: name, procType: procType) 77 | 78 | func selectorToKinds*(str: string): set[DocEntryKind] = 79 | var r = result 80 | 81 | case str.normalize(): 82 | of "class": r.incl dekClass 83 | of "enum": r.incl dekEnum 84 | of "enumfield": r.incl dekEnumField 85 | of "field": r.incl dekField 86 | of "proc": r.incl dekProc 87 | of "struct": r.incl dekStruct 88 | else: 89 | raise newUnexpectedKindError(str.normalize()) 90 | 91 | return r 92 | 93 | proc parseDocType(lex: var LinkLex): DocType = 94 | case lex[].kind: 95 | of ltIdent: 96 | result = newDocType(dtkIdent, lex[].strVal()); lex.next() 97 | else: 98 | raise newImplementKindError(lex[]) 99 | 100 | proc parseProcArglist(lex: var LinkLex): tuple[ 101 | arguments: seq[DocType], returnType: Option[DocType]] = 102 | 103 | var balance = 1 104 | lex.skip(ltLPar) 105 | while balance > 0 and not lex[ltEof]: 106 | case lex[].kind: 107 | of ltLPar: inc balance; lex.next() 108 | of ltRPar: dec balance; lex.next() 109 | of ltIdent: result.arguments.add lex.parseDocType() 110 | of ltComma: lex.next() 111 | else: 112 | raise newUnexpectedKindError(lex[]) 113 | 114 | if lex[ltColon]: 115 | lex.next() 116 | result.returnType = some lex.parseDocType() 117 | 118 | proc parseIdentPart(lex: var LinkLex): DocSelectorPart = 119 | case lex[].kind: 120 | of ltIdent: 121 | var 122 | kinds: set[DocEntryKind] 123 | name: string 124 | if lex[ltIdent, ltKindSelector]: 125 | kinds = lex[].strVal().selectorToKinds() 126 | lex.next(2) 127 | name = lex[].strVal() 128 | lex.next(1) 129 | 130 | 131 | if lex[ltLPar]: 132 | let (args, ret) = parseProcArgList(lex) 133 | result = initSelectorPart( 134 | kinds, name, newDocType(args, ret)) 135 | 136 | else: 137 | result = initSelectorPart(kinds, name) 138 | 139 | else: 140 | raise newUnexpectedKindError(lex[]) 141 | 142 | import hpprint 143 | 144 | 145 | proc parseFullIdent*(pos: PosStr): DocSelector = 146 | var str = pos 147 | var lex = initLexer(str, lexLink, some initTok(ltEof)) 148 | while not lex[ltEof]: 149 | result.parts.add parseIdentPart(lex) 150 | if lex[{ltDot, ltNamespace}]: 151 | lex.next() 152 | 153 | proc unif*(t1, t2: DocType): bool = 154 | t1 == t2 155 | 156 | proc matches(entry: DocEntry, part: DocSelectorPart): bool = 157 | if entry.kind in part.expected and 158 | entry.name == part.name: 159 | if len(part.expected * dekProcKinds) > 0: 160 | result = unif(entry.procType(), part.procType) 161 | 162 | else: 163 | result = true 164 | 165 | 166 | proc resolveFullIdent*(db: DocDb, selector: DocSelector): DocId = 167 | var seed: DocIdSet 168 | 169 | for item in topItems(db, selector.parts[0].expected): 170 | if item.matches(selector.parts[0]): 171 | seed.incl item 172 | 173 | for part in selector.parts[1..^1]: 174 | var next: DocIdSet 175 | for item in seed: 176 | for nested in db[item]: 177 | if nested.matches(part): 178 | next.incl nested 179 | 180 | seed = next 181 | 182 | if len(seed) == 1: 183 | result = seed.pop 184 | 185 | else: 186 | raise newImplementError() 187 | 188 | 189 | 190 | type 191 | DocCodeLink* = ref object of OrgUserLink 192 | id*: DocId 193 | 194 | proc newOrgLink*(id: DocId): OrgLink = 195 | OrgLink(kind: olkCode, codeLink: DocCodeLink(id: id)) 196 | 197 | 198 | when isMainModule: 199 | for s in [ 200 | "enum!test.enumField!FIRST", 201 | "class!Main", 202 | "proc!method(int, int): Main" 203 | ]: 204 | pprint parseFullIdent(initPosStr(s)) 205 | 206 | echo "done" 207 | -------------------------------------------------------------------------------- /src/haxdoc/wrappers/mandoc/main.nim: -------------------------------------------------------------------------------- 1 | 2 | {.push, warning[UnusedImport]: off.} 3 | 4 | 5 | import 6 | std / bitops, ./main_manconf, 7 | ./main_manconf_mandoc_mandoc_parse_mansearch_mdoc_roff_tbl, 8 | ./mandoc_parse_roff_eqn_main_tbl_mdoc, hmisc / wrappers / wraphelp 9 | 10 | 11 | 12 | export 13 | mandoc_parse_roff_eqn_main_tbl_mdoc, wraphelp, main_manconf, 14 | main_manconf_mandoc_mandoc_parse_mansearch_mdoc_roff_tbl 15 | 16 | 17 | 18 | 19 | import 20 | mandoc_common 21 | 22 | 23 | 24 | 25 | 26 | # Declaration created in: hc_wrapgen.nim(254, 28) 27 | # Wrapper for `html_alloc` 28 | # Declared in main.h:29 29 | proc htmlAlloc*(a0: ptr Manoutput): pointer {.importc: r"html_alloc", 30 | header: allHeaders.} 31 | ## @import{[[code:proc!proc(ptr[manoutput]): ptr[void]]]} 32 | 33 | 34 | 35 | # Declaration created in: hc_wrapgen.nim(254, 28) 36 | # Wrapper for `html_mdoc` 37 | # Declared in main.h:30 38 | proc htmlMdoc*(a0: pointer; a1: ptr RoffMeta): void {.importc: r"html_mdoc", 39 | header: allHeaders.} 40 | ## @import{[[code:proc!proc(ptr[void], ptr[roff_meta]): void]]} 41 | 42 | 43 | 44 | # Declaration created in: hc_wrapgen.nim(254, 28) 45 | # Wrapper for `html_man` 46 | # Declared in main.h:31 47 | proc htmlMan*(a0: pointer; a1: ptr RoffMeta): void {.importc: r"html_man", 48 | header: allHeaders.} 49 | ## @import{[[code:proc!proc(ptr[void], ptr[roff_meta]): void]]} 50 | 51 | 52 | 53 | # Declaration created in: hc_wrapgen.nim(254, 28) 54 | # Wrapper for `html_reset` 55 | # Declared in main.h:32 56 | proc htmlReset*(a0: pointer): void {.importc: r"html_reset", header: allHeaders.} 57 | ## @import{[[code:proc!proc(ptr[void]): void]]} 58 | 59 | 60 | 61 | # Declaration created in: hc_wrapgen.nim(254, 28) 62 | # Wrapper for `html_free` 63 | # Declared in main.h:33 64 | proc htmlFree*(a0: pointer): void {.importc: r"html_free", header: allHeaders.} 65 | ## @import{[[code:proc!proc(ptr[void]): void]]} 66 | 67 | 68 | 69 | # Declaration created in: hc_wrapgen.nim(254, 28) 70 | # Wrapper for `tree_mdoc` 71 | # Declared in main.h:35 72 | proc treeMdoc*(a0: pointer; a1: ptr RoffMeta): void {.importc: r"tree_mdoc", 73 | header: allHeaders.} 74 | ## @import{[[code:proc!proc(ptr[void], ptr[roff_meta]): void]]} 75 | 76 | 77 | 78 | # Declaration created in: hc_wrapgen.nim(254, 28) 79 | # Wrapper for `tree_man` 80 | # Declared in main.h:36 81 | proc treeMan*(a0: pointer; a1: ptr RoffMeta): void {.importc: r"tree_man", 82 | header: allHeaders.} 83 | ## @import{[[code:proc!proc(ptr[void], ptr[roff_meta]): void]]} 84 | 85 | 86 | 87 | # Declaration created in: hc_wrapgen.nim(254, 28) 88 | # Wrapper for `man_mdoc` 89 | # Declared in main.h:38 90 | proc manMdoc*(a0: pointer; a1: ptr RoffMeta): void {.importc: r"man_mdoc", 91 | header: allHeaders.} 92 | ## @import{[[code:proc!proc(ptr[void], ptr[roff_meta]): void]]} 93 | 94 | 95 | 96 | # Declaration created in: hc_wrapgen.nim(254, 28) 97 | # Wrapper for `locale_alloc` 98 | # Declared in main.h:40 99 | proc localeAlloc*(a0: ptr Manoutput): pointer {.importc: r"locale_alloc", 100 | header: allHeaders.} 101 | ## @import{[[code:proc!proc(ptr[manoutput]): ptr[void]]]} 102 | 103 | 104 | 105 | # Declaration created in: hc_wrapgen.nim(254, 28) 106 | # Wrapper for `utf8_alloc` 107 | # Declared in main.h:41 108 | proc utf8Alloc*(a0: ptr Manoutput): pointer {.importc: r"utf8_alloc", 109 | header: allHeaders.} 110 | ## @import{[[code:proc!proc(ptr[manoutput]): ptr[void]]]} 111 | 112 | 113 | 114 | # Declaration created in: hc_wrapgen.nim(254, 28) 115 | # Wrapper for `ascii_alloc` 116 | # Declared in main.h:42 117 | proc asciiAlloc*(a0: ptr Manoutput): pointer {.importc: r"ascii_alloc", 118 | header: allHeaders.} 119 | ## @import{[[code:proc!proc(ptr[manoutput]): ptr[void]]]} 120 | 121 | 122 | 123 | # Declaration created in: hc_wrapgen.nim(254, 28) 124 | # Wrapper for `ascii_free` 125 | # Declared in main.h:43 126 | proc asciiFree*(a0: pointer): void {.importc: r"ascii_free", header: allHeaders.} 127 | ## @import{[[code:proc!proc(ptr[void]): void]]} 128 | 129 | 130 | 131 | # Declaration created in: hc_wrapgen.nim(254, 28) 132 | # Wrapper for `pdf_alloc` 133 | # Declared in main.h:45 134 | proc pdfAlloc*(a0: ptr Manoutput): pointer {.importc: r"pdf_alloc", 135 | header: allHeaders.} 136 | ## @import{[[code:proc!proc(ptr[manoutput]): ptr[void]]]} 137 | 138 | 139 | 140 | # Declaration created in: hc_wrapgen.nim(254, 28) 141 | # Wrapper for `ps_alloc` 142 | # Declared in main.h:46 143 | proc psAlloc*(a0: ptr Manoutput): pointer {.importc: r"ps_alloc", 144 | header: allHeaders.} 145 | ## @import{[[code:proc!proc(ptr[manoutput]): ptr[void]]]} 146 | 147 | 148 | 149 | # Declaration created in: hc_wrapgen.nim(254, 28) 150 | # Wrapper for `pspdf_free` 151 | # Declared in main.h:47 152 | proc pspdfFree*(a0: pointer): void {.importc: r"pspdf_free", header: allHeaders.} 153 | ## @import{[[code:proc!proc(ptr[void]): void]]} 154 | 155 | 156 | 157 | # Declaration created in: hc_wrapgen.nim(254, 28) 158 | # Wrapper for `terminal_mdoc` 159 | # Declared in main.h:49 160 | proc terminalMdoc*(a0: pointer; a1: ptr RoffMeta): void {. 161 | importc: r"terminal_mdoc", header: allHeaders.} 162 | ## @import{[[code:proc!proc(ptr[void], ptr[roff_meta]): void]]} 163 | 164 | 165 | 166 | # Declaration created in: hc_wrapgen.nim(254, 28) 167 | # Wrapper for `terminal_man` 168 | # Declared in main.h:50 169 | proc terminalMan*(a0: pointer; a1: ptr RoffMeta): void {. 170 | importc: r"terminal_man", header: allHeaders.} 171 | ## @import{[[code:proc!proc(ptr[void], ptr[roff_meta]): void]]} 172 | 173 | 174 | 175 | # Declaration created in: hc_wrapgen.nim(254, 28) 176 | # Wrapper for `terminal_sepline` 177 | # Declared in main.h:51 178 | proc terminalSepline*(a0: pointer): void {.importc: r"terminal_sepline", 179 | header: allHeaders.} 180 | ## @import{[[code:proc!proc(ptr[void]): void]]} 181 | 182 | 183 | 184 | # Declaration created in: hc_wrapgen.nim(254, 28) 185 | # Wrapper for `markdown_mdoc` 186 | # Declared in main.h:53 187 | proc markdownMdoc*(a0: pointer; a1: ptr RoffMeta): void {. 188 | importc: r"markdown_mdoc", header: allHeaders.} 189 | ## @import{[[code:proc!proc(ptr[void], ptr[roff_meta]): void]]} 190 | 191 | -------------------------------------------------------------------------------- /src/haxdoc/wrappers/mandoc/mandoc.nim: -------------------------------------------------------------------------------- 1 | 2 | {.push, warning[UnusedImport]: off.} 3 | 4 | 5 | import 6 | std / bitops, cstd / types / FILE, cstd / stddef, 7 | ./main_manconf_mandoc_mandoc_parse_mansearch_mdoc_roff_tbl, 8 | hmisc / wrappers / wraphelp 9 | 10 | 11 | 12 | export 13 | wraphelp, main_manconf_mandoc_mandoc_parse_mansearch_mdoc_roff_tbl 14 | 15 | 16 | 17 | 18 | import 19 | mandoc_common 20 | 21 | 22 | 23 | 24 | proc toCInt*(en: MaAscii): cint {.inline.} = 25 | ## Convert proxy enum to integer value 26 | cint(en.int) 27 | 28 | proc toCInt*(en: set[MaAscii]): cint {.inline.} = 29 | ## Convert set of enums to bitmasked integer 30 | for val in en: 31 | result = bitor(result, val.cint) 32 | 33 | 34 | 35 | 36 | 37 | # Declaration created in: hc_wrapgen.nim(254, 28) 38 | # Wrapper for `mandoc_font` 39 | # Declared in mandoc.h:273 40 | proc mandocFont*(a1: cstring; sz: cint): MandocEsc {.importc: r"mandoc_font", 41 | header: allHeaders.} 42 | ## @import{[[code:proc!proc(ptr[const[char]], int): mandoc_esc]]} 43 | 44 | 45 | 46 | # Declaration created in: hc_wrapgen.nim(254, 28) 47 | # Wrapper for `mandoc_escape` 48 | # Declared in mandoc.h:274 49 | proc mandocEscape*(a1: cstringArray; a2: cstringArray; a3: ptr cint): MandocEsc {. 50 | importc: r"mandoc_escape", header: allHeaders.} 51 | ## @import{[[code:proc!proc(ptr[ptr[const[char]]], ptr[ptr[const[char]]], ptr[int]): mandoc_esc]]} 52 | 53 | 54 | 55 | # Declaration created in: hc_wrapgen.nim(254, 28) 56 | # Wrapper for `mandoc_msg_setoutfile` 57 | # Declared in mandoc.h:275 58 | proc mandocMsgSetoutfile*(a0: ptr FILE): void {. 59 | importc: r"mandoc_msg_setoutfile", header: allHeaders.} 60 | ## @import{[[code:proc!proc(ptr[tkTypedef]): void]]} 61 | 62 | 63 | 64 | # Declaration created in: hc_wrapgen.nim(254, 28) 65 | # Wrapper for `mandoc_msg_getinfilename` 66 | # Declared in mandoc.h:276 67 | proc mandocMsgGetinfilename*(): cstring {.importc: r"mandoc_msg_getinfilename", 68 | header: allHeaders.} 69 | ## @import{[[code:proc!proc(): ptr[const[char]]]]} 70 | 71 | 72 | 73 | # Declaration created in: hc_wrapgen.nim(254, 28) 74 | # Wrapper for `mandoc_msg_setinfilename` 75 | # Declared in mandoc.h:277 76 | proc mandocMsgSetinfilename*(a0: cstring): void {. 77 | importc: r"mandoc_msg_setinfilename", header: allHeaders.} 78 | ## @import{[[code:proc!proc(ptr[const[char]]): void]]} 79 | 80 | 81 | 82 | # Declaration created in: hc_wrapgen.nim(254, 28) 83 | # Wrapper for `mandoc_msg_getmin` 84 | # Declared in mandoc.h:278 85 | proc mandocMsgGetmin*(): Mandocerr {.importc: r"mandoc_msg_getmin", 86 | header: allHeaders.} 87 | ## @import{[[code:proc!proc(): mandocerr]]} 88 | 89 | 90 | 91 | # Declaration created in: hc_wrapgen.nim(254, 28) 92 | # Wrapper for `mandoc_msg_setmin` 93 | # Declared in mandoc.h:279 94 | proc mandocMsgSetmin*(a0: MandocerrC): void {.importc: r"mandoc_msg_setmin", 95 | header: allHeaders.} 96 | ## @import{[[code:proc!proc(mandocerr): void]]} 97 | 98 | 99 | 100 | # Declaration created in: hc_wrapgen.nim(254, 28) 101 | # Wrapper for `mandoc_msg_getrc` 102 | # Declared in mandoc.h:280 103 | proc mandocMsgGetrc*(): Mandoclevel {.importc: r"mandoc_msg_getrc", 104 | header: allHeaders.} 105 | ## @import{[[code:proc!proc(): mandoclevel]]} 106 | 107 | 108 | 109 | # Declaration created in: hc_wrapgen.nim(254, 28) 110 | # Wrapper for `mandoc_msg_setrc` 111 | # Declared in mandoc.h:281 112 | proc mandocMsgSetrc*(a0: MandoclevelC): void {.importc: r"mandoc_msg_setrc", 113 | header: allHeaders.} 114 | ## @import{[[code:proc!proc(mandoclevel): void]]} 115 | 116 | 117 | 118 | # Declaration created in: hc_wrapgen.nim(254, 28) 119 | # Wrapper for `mandoc_msg` 120 | # Declared in mandoc.h:282 121 | proc mandocMsg*(a1: MandocerrC; a2: cint; a3: cint; a4: cstring): void {. 122 | varargs, importc: r"mandoc_msg", header: allHeaders.} 123 | ## @import{[[code:proc!proc(mandocerr, int, int, ptr[const[char]]): void]]} 124 | 125 | 126 | 127 | # Declaration created in: hc_wrapgen.nim(254, 28) 128 | # Wrapper for `mchars_alloc` 129 | # Declared in mandoc.h:284 130 | proc mcharsAlloc*(): void {.importc: r"mchars_alloc", header: allHeaders.} 131 | ## @import{[[code:proc!proc(): void]]} 132 | 133 | 134 | 135 | # Declaration created in: hc_wrapgen.nim(254, 28) 136 | # Wrapper for `mchars_free` 137 | # Declared in mandoc.h:285 138 | proc mcharsFree*(): void {.importc: r"mchars_free", header: allHeaders.} 139 | ## @import{[[code:proc!proc(): void]]} 140 | 141 | 142 | 143 | # Declaration created in: hc_wrapgen.nim(254, 28) 144 | # Wrapper for `mchars_num2char` 145 | # Declared in mandoc.h:286 146 | proc mcharsNum2char*(a0: cstring; a1: SizeT): cint {. 147 | importc: r"mchars_num2char", header: allHeaders.} 148 | ## @import{[[code:proc!proc(ptr[const[char]], tkTypedef): int]]} 149 | 150 | 151 | 152 | # Declaration created in: hc_wrapgen.nim(254, 28) 153 | # Wrapper for `mchars_uc2str` 154 | # Declared in mandoc.h:287 155 | proc mcharsUc2str*(a0: cint): cstring {.importc: r"mchars_uc2str", 156 | header: allHeaders.} 157 | ## @import{[[code:proc!proc(int): ptr[const[char]]]]} 158 | 159 | 160 | 161 | # Declaration created in: hc_wrapgen.nim(254, 28) 162 | # Wrapper for `mchars_num2uc` 163 | # Declared in mandoc.h:288 164 | proc mcharsNum2uc*(a0: cstring; a1: SizeT): cint {.importc: r"mchars_num2uc", 165 | header: allHeaders.} 166 | ## @import{[[code:proc!proc(ptr[const[char]], tkTypedef): int]]} 167 | 168 | 169 | 170 | # Declaration created in: hc_wrapgen.nim(254, 28) 171 | # Wrapper for `mchars_spec2cp` 172 | # Declared in mandoc.h:289 173 | proc mcharsSpec2cp*(a0: cstring; a1: SizeT): cint {.importc: r"mchars_spec2cp", 174 | header: allHeaders.} 175 | ## @import{[[code:proc!proc(ptr[const[char]], tkTypedef): int]]} 176 | 177 | 178 | 179 | # Declaration created in: hc_wrapgen.nim(254, 28) 180 | # Wrapper for `mchars_spec2str` 181 | # Declared in mandoc.h:290 182 | proc mcharsSpec2str*(a0: cstring; a1: SizeT; a2: ptr SizeT): cstring {. 183 | importc: r"mchars_spec2str", header: allHeaders.} 184 | ## @import{[[code:proc!proc(ptr[const[char]], tkTypedef, ptr[tkTypedef]): ptr[const[char]]]]} 185 | 186 | -------------------------------------------------------------------------------- /src/haxdoc.nim: -------------------------------------------------------------------------------- 1 | import 2 | hmisc/preludes/cli_app, 3 | hmisc/helpers 4 | 5 | import 6 | hnimast/[nimble_aux, compiler_aux] 7 | 8 | import 9 | ./haxdoc/extract/from_nim_code, 10 | ./haxdoc/generate/sourcetrail_db, 11 | ./haxdoc/[docentry_io, docentry] 12 | 13 | startHax() 14 | 15 | proc addNimCmd(app: var CliApp) = 16 | var cmd = cmd("nim", "Process nim code") 17 | cmd.add opt( 18 | "define", "Define nim compilation symbol", 19 | alt = @["d"], 20 | default = toCliValue(newSeq[string]()).cliDefault(), 21 | check = cliCheckFor(string), 22 | maxRepeat = high(int) 23 | ) 24 | 25 | cmd.add opt( 26 | "stdpath", "Location of the nim standard library", 27 | default = toCliValue(getStdPath(), "choosenim stdlib installation").cliDefault(), 28 | check = checkDirExists() 29 | ) 30 | 31 | block: 32 | var trail = cmd("trail", "Generate sourcetrail database") 33 | trail.add arg("file", "Main nim file", check = checkAnd( 34 | checkFileReadable(), 35 | checkExtensions({ 36 | "nim", "nims": "Generate database for file and it's imports", 37 | "nimble": "Generate database for a full project and dependencies" 38 | }) 39 | )) 40 | 41 | trail.add flag( 42 | "launch", "Automatically launch `sourcetrail` when project is completed") 43 | 44 | trail.add opt( 45 | "outdir", "Directory to write sourcetrail database files.", 46 | default = cliDefaultFromArg( 47 | "file", "new '/haxdoc' directory for parent dir", 48 | proc(val: CliValue): CliValue = toCliValue( 49 | val.as(FsFile).dir() / RelDir("haxdoc")))) 50 | 51 | cmd.add trail 52 | 53 | block: 54 | cmd.add cmd("xml", "Generate xml database", [ 55 | arg("file", "Main nim file", check = checkFileReadable()), 56 | opt( 57 | "outdir", 58 | "Directory to write XML database to", 59 | check = checkDirCreatable(), 60 | default = cliDefaultFromArg( 61 | "file", "new '/haxdoc' directory for parent dir", 62 | proc(val: CliValue): CliValue = 63 | result = toCliValue(val.as(FsFile).dir() / RelDir("haxdoc")) 64 | echo result.kind))]) 65 | 66 | block: 67 | var project = cmd("project", "Generate documentation for a project") 68 | project.add arg("project", "Project directory or input file", check = checkOr( 69 | checkDirExists(), 70 | checkFileReadable() 71 | )) 72 | 73 | cmd.add project 74 | 75 | app.root.add cmd 76 | 77 | proc addDiffCmd*(app: var CliApp) = 78 | app.add cmd("diff", "Compare two documentation databases and annotated source code", [ 79 | arg("old-db", "Old input database", check = checkDirExists()), 80 | arg("new-db", "New input database", check = checkDirExists()) 81 | ]) 82 | 83 | proc haxdocMain*(app: var CliApp, l: HLogger) = 84 | case app.getCmdName(): 85 | of "nim": 86 | let 87 | nimcmd = app.getCmd() 88 | trail = nimcmd.getCmd() 89 | file = trail.getArg() as AbsFile 90 | define = (nimcmd.getOpt("define") as seq[string]) & @["haxdoc", "nimdoc"] 91 | stdpath = nimcmd.getOpt("stdpath") as AbsDir 92 | dryRun = app.getOpt("dry-run") as bool 93 | 94 | 95 | template compileDb(): untyped = 96 | l.wait "Running trail compilation with file", file 97 | let db {.inject.} = 98 | if dryRun: 99 | DocDb() 100 | 101 | else: 102 | if file.ext() in ["nim", "nims"]: 103 | file. 104 | generateDocDb( 105 | logger = l, 106 | fileLib = some(file.name()), 107 | defines = define, 108 | stdpath = stdpath 109 | ) 110 | 111 | else: 112 | getPackageInfo(file). 113 | docDbFromPackage( 114 | stdpath = stdpath, 115 | defines = define, 116 | logger = l 117 | ) 118 | 119 | 120 | l.success "Finished documentation database generation" 121 | 122 | case nimcmd.getCmdName(): 123 | of "trail": 124 | let 125 | trailcmd = nimcmd.getCmd() 126 | outdir = trailcmd.getOpt("outdir") as AbsDir 127 | outfile = outdir /. file.name() 128 | project = outfile.withExt(sourcetrailProjectExt) 129 | 130 | compileDb() 131 | mkDir(outDir) 132 | 133 | l.wait "Writing sourcetrail db file" 134 | 135 | 136 | if not dryRun: 137 | db.writeSourcetrailDb(outfile) 138 | l.success "Wrote sourcetrail project to", project 139 | 140 | if trailcmd.getOpt("launch") as bool: 141 | l.execShell shellCmd("sourcetrail", $project) 142 | 143 | of "xml": 144 | let 145 | xmlCmd = nimCmd.getCmd() 146 | outdir = xmlCmd.getOpt("outdir") as AbsDir 147 | 148 | mkDir(outDir) 149 | compileDb() 150 | 151 | l.wait "Writing xml database" 152 | if not dryRun: 153 | db.writeDbXml(outdir, "haxdoc-xml") 154 | 155 | l.success "Wrote sourcetrail db to", outdir 156 | 157 | of "diff": 158 | let 159 | diffcmd = app.getCmd() 160 | oldDir = diffCmd.getArg("old-db") as AbsDir 161 | newDir = diffCmd.getArg("new-db") as AbsDir 162 | oldDb = oldDir.loadDbXml("haxdoc-xml", loadFiles = true) 163 | newDb = newDir.loadDbXml("haxdoc-xml", loadFiles = true) 164 | 165 | 166 | 167 | proc haxdocCli*(args: seq[string], doRaise = true) = 168 | var 169 | app = newCliApp( 170 | "haxdoc", (0, 1, 0), "haxscramper", "documentation generator") 171 | 172 | 173 | logger = newTermLogger() 174 | 175 | app.addNimCmd() 176 | app.addDiffCmd() 177 | 178 | app.raisesAsExit(haxdocMain, { 179 | "OsError": (1, "zz"), 180 | # "ShellError": () 181 | }) 182 | 183 | if app.acceptArgs(args): 184 | if app.builtinActionRequested(): 185 | app.showBuiltin(logger) 186 | 187 | else: 188 | app.runMain(haxdocMain, logger, not doRaise) 189 | 190 | else: 191 | app.showErrors(logger) 192 | if doRaise: 193 | raise app.errors[0].toRef() 194 | 195 | 196 | when isMainModule: 197 | haxdocCli(paramStrs(), false) 198 | -------------------------------------------------------------------------------- /src/haxdoc/generate/sqlite_db.nim: -------------------------------------------------------------------------------- 1 | import 2 | ../docentry 3 | 4 | import 5 | std/[ 6 | strformat, with, macros, hashes, options, 7 | sequtils, intsets, sugar, strutils 8 | ] 9 | 10 | import 11 | hmisc/other/[oswrap, sqlite_extra], 12 | hmisc/core/all 13 | 14 | import 15 | hnimast, hnimast/store_decl 16 | 17 | import 18 | haxorg/[semorg] 19 | 20 | startHax() 21 | 22 | 23 | proc bindParam(ps: SqlPrepared, idx: int, id: DocId) = 24 | bindParam(ps, idx, id.id.int) 25 | 26 | # proc bindParam(ps: SqlPrepared, idx: int, id: Hash) = 27 | # bindParam(ps, idx, id.int) 28 | 29 | const 30 | entriesTable = "entries" 31 | typeInstanceTable = "typeInstances" 32 | doctextTable = "doctext" 33 | occurTable = "occurs" 34 | signaturesTable = "procedureSignatures" 35 | pragmaListTable = "pragmaLists" 36 | genericInstTable = "genericSpecializations" 37 | argListTable = "argLists" 38 | idKey = "integer primary key unique not null" 39 | 40 | var 41 | registerPrep: SqlPrepared 42 | 43 | proc finalizePrepared() = 44 | finalize(registerPrep) 45 | 46 | 47 | 48 | 49 | 50 | proc toSQLite*(t: typedesc[DocType]): string = 51 | &"integer references {typeInstanceTable}(id)" 52 | 53 | proc toSqlite*(t: typedesc[DocText]): string = 54 | &"integer references {doctextTable}(id)" 55 | 56 | proc toSqlite*(t: typedesc[SemOrg]): string = "blob" 57 | 58 | proc toSqlite*(t: typedesc[DocId]): string = 59 | &"integer references {entriesTable}(id)" 60 | 61 | 62 | proc createTables(sq: DbConn) = 63 | let q = 64 | @[ 65 | sql fmt(""" 66 | create table {doctextTable} ( 67 | id {idKey}, 68 | docBrief {sq(DocText().docBrief)}, 69 | docBody {sq(DocText().docBody)}, 70 | rawDoc text 71 | ); 72 | """), 73 | sql fmt("""create table {entriesTable} ( 74 | id {idKey}, 75 | parentId {toSqlite(typeof DocEntry().id)}, 76 | depth integer not null, 77 | name {toSqlite(typeof DocEntry().name)}, 78 | kind {toSqlite(typeof DocEntry().kind)}, 79 | docText {toSqlite(typeof DocEntry().docText)}, 80 | docType {sq(DocType())} 81 | ); 82 | """), 83 | sql fmt("""create table {occurTable} ( 84 | user {sq(DocOccur().user)}, 85 | kind {sq(DocOccur().kind)} not null, 86 | localId {sq(DocOccur().localId)}, 87 | withInit {sq(DocOccur().withInit)}, 88 | refid {sq(DocOccur().refid)} 89 | ); 90 | """), 91 | sql fmt("""create table {typeInstanceTable} ( 92 | id {idKey}, 93 | kind {sq(DocType().kind)}, 94 | headEntry {sq(DocType().head)}, 95 | procType integer references {signaturesTable}(id), 96 | paramTypes integer references {genericInstTable}(id) 97 | ); 98 | """), 99 | sql fmt("""create table {genericInstTable} ( 100 | id integer not null, 101 | type {sq(DocType)}, 102 | pos integer not null 103 | ); 104 | """), 105 | sql fmt("""create table {signaturesTable} ( 106 | id {idKey}, 107 | returnType {sq(DocType)}, 108 | argList integer references {arglistTable}(id), 109 | pragmaList integer references {pragmaListTable}(id) 110 | ); 111 | """), 112 | sql fmt("""create table {argListTable} ( 113 | id integer not null, 114 | pos integer not null, 115 | name text, 116 | type {sq(DocType)} 117 | ); 118 | """) 119 | 120 | ] 121 | 122 | for q in q: 123 | sq.exec(q) 124 | 125 | 126 | type 127 | PrepStore = object 128 | entry, docText, docType, sig, docProc, docGeneric: SqlPrepared 129 | 130 | 131 | proc register(sq: DbConn, text: DocText, prep: var PrepStore): int = 132 | var docIdx {.global.}: int 133 | result = docIdx 134 | inc docIdx 135 | once: 136 | let q = &""" 137 | insert into {doctextTable} ( 138 | rawDoc 139 | ) values ( 140 | ?1 -- rawDoc 141 | ) 142 | """ 143 | 144 | prep.docText = sq.prepare(q) 145 | 146 | prep.docText.bindParam(1, text.rawDoc.join("\n")) 147 | 148 | 149 | sq.doExec(prep.docText) 150 | 151 | template checkSeen(hash: Hash): bool = 152 | var store {.global.}: IntSet 153 | if hash in store: 154 | true 155 | 156 | else: 157 | store.incl hash 158 | false 159 | 160 | 161 | 162 | 163 | 164 | proc registerProc(sq: DbConn, sig: DocType, prep: var PrepStore): Hash = 165 | result = hash(sig) 166 | if checkSeen(result): return 167 | 168 | once: 169 | let q = argListTable.newInsert({ 170 | "id": 1, 171 | "pos": 2, 172 | "name": 3, 173 | "type": 4 174 | }) 175 | 176 | prep.docProc = sq.prepare(q) 177 | 178 | 179 | for idx, arg in sig.arguments: 180 | with prep.docProc: 181 | bindParam(1, result) 182 | bindParam(2, idx) 183 | bindParam(3, arg.ident) 184 | bindParam(4, hash(arg.identType)) 185 | 186 | sq.doExec(prep.docProc) 187 | 188 | 189 | 190 | proc registerGeneric(sq: DbConn, sig: DocType, prep: var PrepStore): Hash = 191 | result = hash(sig) 192 | if checkSeen(result): return 193 | 194 | once: 195 | let q = genericInstTable.newInsert({ 196 | "id": 1, "type": 2, "pos": 3}) 197 | 198 | prep.docGeneric = sq.prepare(q) 199 | 200 | for idx, arg in sig.genParams: 201 | with prep.docGeneric: 202 | bindParam(1, result) 203 | bindParam(2, arg.hash()) 204 | bindParam(3, idx) 205 | 206 | sq.doExec(prep.docGeneric) 207 | 208 | 209 | 210 | 211 | proc register(sq: DbConn, dtype: DocType, prep: var PrepStore): Hash = 212 | if isNil(dtype): return 213 | 214 | result = dtype.hash() 215 | if checkSeen(result): return 216 | 217 | 218 | once: 219 | let q = typeInstanceTable.newInsert({ 220 | "id": 1, 221 | "kind": 2, 222 | "headEntry": 3, 223 | "procType": 4, 224 | "paramTypes": 5 225 | }) 226 | 227 | prep.docType = sq.prepare(q) 228 | 229 | with prep.docType: 230 | bindParam(1, result) 231 | bindParam(2, dtype.kind) 232 | 233 | case dtype.kind: 234 | of dtkProc: 235 | prep.docType.bindParam(4, sq.registerProc(dtype, prep)) 236 | 237 | of dtkIdent: 238 | prep.docType.bindParam(3, dtype.head) 239 | prep.docType.bindParam(5, sq.registerGeneric(dtype, prep)) 240 | 241 | else: 242 | discard 243 | 244 | sq.doExec(prep.docType) 245 | 246 | proc register(sq: DbConn, entry: DocEntry, prep: var PrepStore) = 247 | var registered: DocIdSet 248 | if entry.id in registered: 249 | echov entry, "already registered" 250 | return 251 | 252 | else: 253 | registered.incl entry.id() 254 | 255 | once: 256 | let q = &""" 257 | insert into {entriesTable} ( 258 | id, name, kind, depth, parentId, doctext, docType 259 | ) values ( 260 | ?1, -- id 261 | ?2, -- name 262 | ?3, -- kind 263 | ?4, -- depth 264 | ?5, -- parent id 265 | ?6, -- docText 266 | ?7 267 | ); 268 | """ 269 | 270 | prep.entry = sq.prepare(q) 271 | 272 | with prep.entry: 273 | bindParam(1, entry.id()) 274 | bindParam(2, entry.name) 275 | bindParam(3, entry.kind) 276 | bindParam(4, entry.fullIdent.len()) 277 | bindParam(6, sq.register(entry.docText, prep)) 278 | 279 | if entry.fullIdent.hasParent(): 280 | prep.entry.bindParam(5, entry.parentIdentPart.id) 281 | 282 | case entry.kind: 283 | of dekProcKinds: 284 | prep.entry.bindParam(7, sq.register(entry.procType, prep)) 285 | 286 | of dekAliasKinds: 287 | prep.entry.bindParam(7, sq.register(entry.baseType, prep)) 288 | 289 | else: 290 | discard 291 | 292 | sq.doExec(prep.entry) 293 | 294 | proc store*[E: enum](sq: DbConn, name: string, en: typedesc[E]) = 295 | sq.exec(sql &"create table {name} (kind int, name text);") 296 | for kind in low(E) .. high(E): 297 | sq.exec(sql &"insert into {name} (kind, name) values ({kind.int}, \"{kind}\");") 298 | 299 | 300 | proc registerFullDb*(db: DocDb, sq: DbConn) = 301 | var prep: PrepStore 302 | for entry in allItems(db): 303 | sq.register(entry, prep) 304 | 305 | for field in fields(prep): 306 | field.finalize() 307 | 308 | sq.store("entryKinds", DocEntryKind) 309 | sq.store("occurKinds", DocOccurKind) 310 | sq.store("typeKinds", DocTypeKind) 311 | 312 | 313 | proc writeDbSqlite*(db: DocDb, outFile: AbsFile) = 314 | if exists(outFile): rmFile outFile 315 | 316 | let conn = open(outFile.string, "", "", "") 317 | createTables(conn) 318 | try: 319 | db.registerFullDb(conn) 320 | 321 | except DbError: 322 | echo connError(conn) 323 | raise 324 | 325 | finalizePrepared() 326 | close(conn) 327 | -------------------------------------------------------------------------------- /src/haxdoc/process/docentry_query.nim: -------------------------------------------------------------------------------- 1 | import ../docentry 2 | import std/[tables, sequtils, pegs] 3 | import hmisc/core/all 4 | 5 | type 6 | DocFilterKind = enum 7 | dfkKindFilter 8 | dfkNameFilter 9 | dfkSigFilter 10 | dfkSetFilter 11 | 12 | DocSigPatternKind = enum 13 | dspkTrail 14 | dspkTypeId 15 | dspkGenPattern 16 | dspkProcPattern 17 | dspkChoice 18 | 19 | DocSigPattern = object 20 | elements*: seq[DocSigPattern] 21 | case kind*: DocSigPatternKind 22 | of dspkTrail: 23 | discard 24 | 25 | of dspkTypeId: 26 | typeId*: DocId 27 | 28 | of dspkChoice, dspkProcPattern: 29 | discard 30 | 31 | of dspkGenPattern: 32 | headName*: Peg 33 | 34 | DocFilter = object 35 | isInverted*: bool 36 | case kind*: DocFilterKind 37 | of dfkKindFilter: 38 | targetKinds*: set[DocEntryKind] 39 | 40 | of dfkNameFilter: 41 | targetName*: string 42 | 43 | of dfkSigFilter: 44 | targetSig*: DocSigPattern 45 | 46 | of dfkSetFilter: 47 | idSet*: DocIdSet 48 | 49 | DocFilterGroupKind = enum 50 | dfgkOrGroup 51 | dfgkAndGroup 52 | 53 | DocFilterGroup = object 54 | kind*: DocFilterGroupKind 55 | filters*: seq[DocFilter] 56 | 57 | DocFilterPath = object 58 | path*: seq[DocFilterGroup] 59 | 60 | 61 | using 62 | entry: DocEntry 63 | group: DocFilterGroup 64 | 65 | iterator items*(group): DocFilter = 66 | for it in group.filters: 67 | yield it 68 | 69 | func len*(group): int = len(group.filters) 70 | 71 | func isAnd*(group): bool = group.kind == dfgkAndGroup 72 | func isOr*(group): bool = group.kind == dfgkOrGroup 73 | 74 | func targetKinds*(group): set[DocEntryKind] = 75 | for filter in group: 76 | if filter.kind == dfkKindFilter: 77 | result.incl filter.targetKinds 78 | 79 | if len(result) == 0: 80 | result = { low(DocEntryKind) .. high(DocEntryKind) } 81 | 82 | func matches*(etype: DocType, sig: DocSigPattern): bool = 83 | case sig.kind: 84 | of dspkTrail: 85 | result = true 86 | 87 | of dspkChoice: 88 | for patt in sig.elements: 89 | if etype.matches(patt): 90 | return true 91 | 92 | return false 93 | 94 | of dspkTypeId: 95 | case etype.kind: 96 | of dtkIDent: 97 | result = etype.head == sig.typeId 98 | 99 | of dtkVarargs: 100 | result = etype.vaType.matches(sig) 101 | 102 | else: 103 | result = false 104 | 105 | of dspkGenPattern: 106 | case etype.kind: 107 | of {dtkIdent, dtkVarargs}: 108 | result = (etype.name =~ sig.headName) 109 | 110 | var argIdx = 0 111 | var sigIdx = 0 112 | while argIdx < etype.genParams.len and 113 | sigIdx < sig.elements.len: 114 | 115 | if sig.elements[sigIdx].kind == dspkTrail: 116 | return true 117 | 118 | elif not etype.genParams[argIdx].matches( 119 | sig.elements[sigIdx]): 120 | return false 121 | 122 | inc argIdx 123 | inc sigIdx 124 | 125 | return etype.genParams.len == sig.elements.len 126 | 127 | else: 128 | result = false 129 | 130 | of dspkProcPattern: 131 | etype.assertKind(dtkProc) 132 | 133 | var argIdx = 0 134 | var sigIdx = 0 135 | while argIdx < etype.arguments.len and 136 | sigIdx < sig.elements.len: 137 | 138 | if sig.elements[sigIdx].kind == dspkTrail: 139 | return true 140 | 141 | elif not etype.arguments[argIdx].identType.matches( 142 | sig.elements[sigIdx]): 143 | return false 144 | 145 | inc argIdx 146 | inc sigIdx 147 | 148 | return etype.arguments.len == sig.elements.len 149 | 150 | func matches*(entry; filter: DocFilter): bool = 151 | case filter.kind: 152 | of dfkNameFilter: 153 | result = (entry.name == filter.targetName) 154 | 155 | of dfkSigFilter: 156 | case entry.kind: 157 | of dekProcKinds: 158 | result = entry.procType.matches(filter.targetSig) 159 | 160 | else: 161 | result = false 162 | 163 | of dfkSetFilter: 164 | result = filter.isInverted xor (entry.id() in filter.idSet) 165 | 166 | of dfkKindFilter: 167 | result = filter.isInverted xor (entry.kind in filter.targetKinds) 168 | 169 | func matches*(entry; group): bool = 170 | for filter in group: 171 | result = entry.matches(filter) 172 | 173 | if filter.isInverted: 174 | result = not result 175 | 176 | if not result and group.isAnd(): 177 | result = false 178 | break 179 | 180 | if result and group.isOr(): 181 | result = true 182 | break 183 | 184 | 185 | iterator matching*(entry; group): DocEntry = 186 | let targetKinds = group.targetKinds() 187 | for nested in entry: 188 | if entry.matches(group): 189 | yield nested 190 | 191 | let 192 | nimTyHeadChoice*: Peg = `/`( 193 | term("var"), 194 | term("sink"), 195 | term("ptr"), 196 | term("ref") 197 | ) 198 | 199 | func procPatt*(args: varargs[DocSigPattern]): DocSigPattern = 200 | DocSigPattern(kind: dspkProcPattern, elements: toSeq(args)) 201 | 202 | func genPatt*(head: string, args: varargs[DocSigPattern]): DocSigPattern = 203 | DocSigPattern(kind: dspkGenPattern, elements: toSeq(args), headName: term(head)) 204 | 205 | func genPatt*(head: Peg, args: varargs[DocSigPattern]): DocSigPattern = 206 | DocSigPattern(kind: dspkGenPattern, elements: toSeq(args), headName: head) 207 | 208 | func sigPatt*(id: DocId): DocSigPattern = 209 | DocSigPattern(kind: dspkTypeId, typeId: id) 210 | 211 | func sigPatt*(kind: DocSigPatternKind): DocSigPattern = 212 | DocSigPattern(kind: kind) 213 | 214 | func choice*(alts: varargs[DocSigPattern]): DocSigPattern = 215 | DocSigPattern(kind: dspkChoice, elements: toSeq(alts)) 216 | 217 | func docFilter*(kind: set[DocEntryKind]): DocFilter = 218 | DocFilter(kind: dfkKindFilter, targetKinds: kind) 219 | 220 | func docFilter*(kind: DocEntryKind): DocFilter = 221 | DocFilter(kind: dfkKindFilter, targetKinds: { kind }) 222 | 223 | func docFilter*(name: string): DocFilter = 224 | DocFilter(kind: dfkNameFilter, targetName: name) 225 | 226 | func toFilter*(patt: DocSigPattern): DocFilter = 227 | DocFilter(kind: dfkSigFilter, targetSig: patt) 228 | 229 | func toGroup*(patt: DocSigPattern): DocFilterGroup = 230 | DocFilterGroup(kind: dfgkAndGroup, filters: @[toFilter(patt)]) 231 | 232 | func toGroup*(filter: DocFilter): DocFilterGroup = 233 | DocFilterGroup(kind: dfgkAndGroup, filters: @[filter]) 234 | 235 | func toGroup*(group: sink DocFilterGroup): DocFilterGroup = group 236 | 237 | func firstTypeId*(docType: DocType): DocId = 238 | case docType.kind: 239 | of dtkIdent, dtkAnonTuple, dtkGenericSpec, dtkVarargs: 240 | result = docType.id() 241 | 242 | of dtkProc: 243 | if docType.arguments.len > 0: 244 | result = docType.arguments[0].identType.firstTypeId() 245 | 246 | else: 247 | discard 248 | 249 | func firstTypeId*(entry): DocId = entry.procType().firstTypeId() 250 | 251 | # func firstTypeEntry*() 252 | 253 | func procsByFirstId*(entries: seq[DocEntry]): seq[DocEntryGroup] = 254 | var tmp: Table[DocId, DocEntryGroup] 255 | for e in entries: 256 | if e.kind == dekProc: 257 | let id = e.procType().firstTypeId() 258 | if e.id notin tmp: 259 | tmp[id] = newEntryGroup(e) 260 | 261 | else: 262 | tmp[id].add e 263 | 264 | for _, group in tmp: 265 | result.add group 266 | 267 | # func procsByClass* 268 | 269 | 270 | 271 | iterator topMatching*(db: DocDb; group: DocFilterGroup | DocFilter): DocEntry = 272 | for _, entry in db.top: 273 | if entry.matches(toGroup(group)): 274 | yield entry 275 | 276 | iterator allMatching*(db: DocDb; group: DocFilterGroup | DocFilter): DocEntry = 277 | for _, entry in db.entries: 278 | if entry.matches(toGroup(group)): 279 | yield entry 280 | 281 | proc getProcsForType*(entry): seq[DocEntry] = 282 | for entry in entry.db.allMatching(toGroup( 283 | procPatt(choice( 284 | sigPatt(entry.id()), 285 | genPatt(nimTyHeadChoice, sigPatt(entry.id())) 286 | ), sigPatt(dspkTrail)) 287 | )): 288 | result.add entry 289 | -------------------------------------------------------------------------------- /tests/tFromSimpleCode.nim: -------------------------------------------------------------------------------- 1 | import 2 | hmisc/other/[oswrap, hshell, hlogger], 3 | hmisc/algo/halgorithm, 4 | hmisc/preludes/unittest 5 | 6 | import 7 | haxdoc/extract/from_nim_code, 8 | haxdoc/[docentry, docentry_io], 9 | haxdoc/generate/[sourcetrail_db, docentry_hext, sqlite_db], 10 | haxdoc/process/[docentry_query, docentry_group], 11 | nimtrail/nimtrail_common 12 | 13 | import std/[ 14 | options, streams, strformat, strutils, sequtils 15 | ] 16 | 17 | import hnimast/[compiler_aux, nimble_aux] 18 | 19 | const code = """ 20 | type 21 | MalTypeKind* = enum Nil, True, False, Number, Symbol, String, 22 | List, Vector, HashMap, Fun, MalFun, Atom 23 | 24 | type 25 | FunType = proc(a: varargs[MalType]): MalType 26 | 27 | MalFunType* = ref object 28 | fn*: FunType 29 | ast*: MalType 30 | params*: MalType 31 | is_macro*: bool 32 | 33 | MalType* = ref object 34 | case kind*: MalTypeKind 35 | of Nil, True, False: nil 36 | of Number: number*: int 37 | of String, Symbol: str*: string 38 | of List, Vector: list*: seq[MalType] 39 | of HashMap: hash_map*: int 40 | of Fun: 41 | fun*: FunType 42 | is_macro*: bool 43 | of MalFun: malfun*: MalFunType 44 | of Atom: val*: MalType 45 | 46 | meta*: MalType 47 | 48 | type 49 | Base = ref object of RootObj 50 | 51 | A = ref object of Base 52 | f1*, fDebugTrigger*: string 53 | debugTestB: B 54 | b: B 55 | 56 | B = ref object of Base 57 | a: A 58 | debugTestA: A 59 | 60 | 61 | Exc = object of CatchableError 62 | Exc2 = object of Exc 63 | Def = object of Defect 64 | 65 | proc canRaise() {.raises: [Exc].} = discard 66 | proc canDefect() {.raises: [Def].} = discard 67 | 68 | proc newExc2(): ref Exc2 = new(result) 69 | 70 | proc zz(b: B) {.deprecated("use somethign else").} = 71 | canRaise() 72 | canDefect() 73 | echo b.debugTestA.debugTestB.debugTestA[] 74 | let z = b.debugTestA.debugTestB 75 | let q = b.debugTestA 76 | 77 | let tmp: int = (@[0, 1, 3])[40] 78 | echo $tmp 79 | 80 | if 2 > 4: 81 | raise newException(Exc, "Some message") 82 | 83 | elif 3 > 4: 84 | raise newExc2() 85 | 86 | 87 | var globalVar = 10 88 | let globalLet = 20 89 | const globalConst = 30 90 | 91 | proc zz(a: A) = 92 | echo globalVar 93 | echo globalConst 94 | echo globalLet 95 | 96 | zz(A()) 97 | zz(B()) 98 | 99 | proc qew() = 100 | var test: MalTypeKind 101 | test = Fun 102 | echo(test) 103 | 104 | type Dist = distinct int 105 | 106 | let 107 | aa = Dist(123) 108 | bb = 123.Dist 109 | cc = aa.int 110 | 111 | type ObjectWithMethods = ref object of RootObj 112 | method exampleMethod(obj: ObjectWithMethods) = 113 | discard 114 | 115 | proc recFirst() 116 | proc recFirst(a: int) = recFirst() 117 | 118 | 119 | proc recOther() = echo "123123" 120 | 121 | proc recFirst() = 122 | recFirst(1) 123 | recOther() 124 | 125 | proc vDist(a: var Dist) = discard 126 | proc vBase(a: var Base) = discard 127 | 128 | const FooBar {.intdefine.}: int = 5 129 | echo FooBar 130 | """ 131 | 132 | startHax() 133 | 134 | let dir = getTempDir() / "tFromSimpleCode" 135 | 136 | var l = newTermLogger() 137 | 138 | suite "Generate DB": 139 | test "Generate DB": 140 | mkDir dir 141 | let file = dir /. "a.nim" 142 | file.writeFile code 143 | 144 | block: # Generate initial DB 145 | let db = generateDocDb( 146 | file, fileLIb = some("main"), 147 | logger = l) 148 | 149 | db.writeDbXml(dir, "compile-db") 150 | 151 | 152 | var inDb = newDocDb({file.dir(): "main"}) 153 | 154 | block: # Load DB from xml 155 | for file in walkDir(dir, AbsFile, exts = @["hxde"]): 156 | l.info "Loading DB", file 157 | block: 158 | var reader = newHXmlParser(file) 159 | reader.loadXml(inDb, "dbmain") 160 | 161 | block: 162 | var writer = newXmlWriter(file.withExt("xml")) 163 | writer.writeXml(inDb, "file") 164 | 165 | 166 | var writer: SourcetrailDbWriter 167 | 168 | block: # Open sourcetrail DB 169 | inDb.addKnownLib(getStdPath().dropSuffix("lib"), "std") 170 | let trailFile = dir /. "fromSimpleCode" &. sourcetrailDbExt 171 | rmFile trailFile 172 | writer.open(trailFile) 173 | discard writer.beginTransaction() 174 | 175 | block: # Generate sourcetrail database 176 | let idMap = writer.registerDb(inDb) 177 | 178 | for file in walkDir(dir, AbsFile, exts = @["hxda"]): 179 | var inFile: DocFile 180 | var reader = newHXmlParser(file) 181 | reader.loadXml(inFile, "file") 182 | 183 | writer.registerUses(inFile, idMap, inDb) 184 | 185 | block: # Close sourcetrail DB 186 | discard writer.commitTransaction() 187 | discard writer.close() 188 | 189 | 190 | echo "done" 191 | 192 | 193 | suite "Filter DB": 194 | test "Filter db": 195 | if not exists(dir /. "compile-db" &. "hxde"): quit 0 196 | 197 | let db = loadDbXml(dir, "compile-db") 198 | 199 | for entry in db.allMatching(docFilter("seq")): 200 | let procs = entry.getProcsForType() 201 | for pr in procs: 202 | echo pr.name, " ", pr.procType() 203 | 204 | let (groups, other) = db.procsByTypes() 205 | 206 | for group in groups: 207 | echo group.typeEntry.name 208 | let groups = group.splitCommonProcs() 209 | echo " common procs" 210 | for pr in groups.procs.nested[0]: 211 | echo " ", pr.name, " ", pr.procType() 212 | 213 | echo " other procs" 214 | for pr in groups.procs.nested[1]: 215 | echo " ", pr.name, " ", pr.procType() 216 | 217 | 218 | if hasCmd(shellCmd("dot")): 219 | db.inheritDotGraph().toPng(AbsFile "/tmp/inherit.png") 220 | db.usageDotGraph().toPng(AbsFile "/tmp/usage.png") 221 | 222 | 223 | suite "Generate sqlite db": 224 | test "SQlite from xml": 225 | if not exists(dir /. "compile-db" &. "hxde"): quit 0 226 | let db = loadDbXml(dir, "compile-db") 227 | db.writeDbSqlite(dir /. "compile.sqlite") 228 | 229 | 230 | suite "Generate HTML": 231 | test "Generate HTML": 232 | if not exists(dir /. "compile-db" &. "hxde"): quit 0 233 | let db = loadDbXml(dir, "compile-db") 234 | 235 | const genTemplate = """ 236 | 237 | 238 | 239 | 240 | The HTML5 Herald 241 | 242 | 243 | 244 | 245 |
    246 | {% for entry in db %} 247 |
  • {{entry.name}}
  • 248 | {% end %} 249 |
250 | 251 | 252 | 253 | """ 254 | 255 | evalHext(genTemplate, newWriteStream(dir /. "page.html"), { 256 | "db": boxValue(DValue, db) 257 | }) 258 | 259 | 260 | 261 | suite "Multiple packages": 262 | test "Multiple packages": 263 | let dir = getNewTempDir("tFromMultiPackage") 264 | var 265 | imports: seq[string] 266 | requires: seq[string] 267 | 268 | let count = 2 269 | for i in 0 .. count: 270 | let p = &"package{i}" 271 | imports.add &"import {p}/{p}_file1" 272 | requires.add &"{p}" 273 | mkWithDirStructure dir: 274 | dir &"{p}": 275 | file &"{p}.nimble": 276 | &""" 277 | version = "0.1.0" 278 | author = "haxscramper" 279 | description = "Brief documentation for a package {i}" 280 | license = "Apache-2.0" 281 | srcDir = "src" 282 | packageName = "package{i}" 283 | """ 284 | 285 | # TODO test with package name that does not correspond to 286 | # existing package `packageName = "package{i}"` 287 | 288 | 289 | 290 | # no 'src/' dir because I'm emulating globally installed packages 291 | file &"{p}_main.nim" 292 | dir &"{p}": 293 | file &"{p}_file1.nim": &"import {p}_file2; export {p}_file2" 294 | file &"{p}_file2.nim": &"import {p}_file3; export {p}_file3" 295 | file &"{p}_file3.nim": &"proc {p}_proc*() = discard" 296 | 297 | let req = requires.joinq(", ") 298 | mkWithDirStructure dir: 299 | dir "main": 300 | # have 'src/' because I'm emulating package in development 301 | dir "src": 302 | file "main.nim": 303 | file.writeLine imports.joinl() 304 | for pack in 0 .. count: 305 | &"package{pack}_proc()\n" 306 | 307 | file "readme.org": 308 | "Sample readme for a package" 309 | file "main.nimble": 310 | &""" 311 | version = "0.1.0" 312 | author = "haxscramper" 313 | description = "Description of the main package" 314 | license = "Apache-2.0" 315 | packageName = "main" 316 | requires {req} 317 | """ 318 | 319 | let db = docDbFromPackage( 320 | getPackageInfo(dir / "main"), 321 | searchDir = dir) 322 | 323 | db.writeDbSourcetrail(dir /. "multiPackage") 324 | db.writeDbXml(dir, "multiPackage") 325 | 326 | if hasCmd shellCmd("dot"): 327 | let graph = db.structureDotGraph() 328 | graph.toPng(dir /. "structure.png") 329 | -------------------------------------------------------------------------------- /src/haxdoc/generate/sourcetrail_db.nim: -------------------------------------------------------------------------------- 1 | import ../docentry 2 | import ../docentry_io 3 | 4 | import cxxstd/cxx_common 5 | import nimtrail/nimtrail_common 6 | 7 | import 8 | hmisc/other/[oswrap, colorlogger], 9 | hmisc/core/all, 10 | hmisc/hasts/xml_ast 11 | 12 | import std/[with, options, tables, strutils] 13 | 14 | 15 | proc getFile(writer: var SourcetrailDbWriter, path, lang: string): cint = 16 | result = writer.recordFile(path) 17 | discard writer.recordFileLanguage(result, lang) 18 | 19 | 20 | proc toTrailName(ident: DocLink): SourcetrailNameHierarchy = 21 | var parts: seq[tuple[prefix, name, postfix: string]] 22 | for part in ident.parts: 23 | if part.kind in dekProcKinds: 24 | var buf = "(" 25 | for idx, arg in part.procType.arguments: 26 | if idx > 0: buf &= ", " 27 | buf &= $arg.identType 28 | 29 | buf &= ")" 30 | 31 | if part.procType.returnType.isSome(): 32 | parts.add ($part.procType.returnType.get(), part.name, buf) 33 | 34 | else: 35 | parts.add ("", part.name, buf) 36 | 37 | else: 38 | parts.add ("", part.name, "") 39 | 40 | 41 | 42 | return initSourcetrailNameHierarchy(("::", parts)) 43 | 44 | proc toRange(fileId: cint, extent: DocExtent): SourcetrailSourceRange = 45 | with result: 46 | fileId = fileId 47 | startLine = extent.start.line.cint 48 | startColumn = extent.start.column.cint + 1 49 | endLine = extent.finish.line.cint 50 | endColumn = extent.finish.column.cint + 1 51 | 52 | proc toRange(fileId: cint, codeRange: DocCodeSlice): SourcetrailSourceRange = 53 | with result: 54 | fileId = fileId 55 | startLine = codeRange.line.cint 56 | endLine = codeRange.line.cint 57 | startColumn = codeRange.column.a.cint + 1 58 | endColumn = codeRange.column.b.cint + 1 59 | 60 | type 61 | IdMap* = object 62 | docToTrail: Table[DocId, cint] 63 | db: DocDb 64 | 65 | using 66 | writer: var SourcetrailDbWriter 67 | 68 | proc registerUses*(writer; file: DocFile, idMap: IdMap, db: DocDb) = 69 | let fileID = writer.getFile(file.path.string, "nim") 70 | var lastDeclare: DocOccurKind 71 | for line in file.body.codeLines: 72 | for part in line.parts: 73 | if not part.occur.isSome(): 74 | continue 75 | 76 | let occur = part.occur.get() 77 | var userID: cint 78 | if occur.user.isSome(): 79 | if occur.user.get().isValid: 80 | userId = idMap.docToTrail[occur.user.get()] 81 | 82 | else: 83 | echov file.path 84 | echov part, "has invalid user" 85 | 86 | if occur.kind in dokLocalKinds: 87 | discard writer.recordLocalSymbolLocation( 88 | writer.recordLocalSymbol(occur.localId), 89 | toRange(fileId, part.slice)) 90 | 91 | elif not occur.refid.isValid(): 92 | discard 93 | 94 | elif occur.kind in { 95 | dokObjectDeclare, dokCallDeclare, 96 | dokAliasDeclare, dokEnumDeclare, 97 | dokGlobalDeclare, dokEnumFieldDeclare, 98 | dokFieldDeclare 99 | }: 100 | if occur.refid in idMap.docToTrail: 101 | userId = idMap.docToTrail[occur.refid] 102 | lastDeclare = occur.kind 103 | discard writer.recordSymbolLocation( 104 | userId, toRange(fileId, part.slice)) 105 | 106 | else: 107 | echov occur, "id is not in the doctrail map" 108 | 109 | elif occur.kind in {dokImport}: 110 | if file.moduleId.isSome(): 111 | if true: 112 | # Record module imports as file-file relations 113 | let target = db[occur.refid] 114 | if target.location.isSome(): 115 | discard writer.recordReferenceLocation( 116 | writer.recordReference( 117 | fileId, 118 | writer.getFile(target.getPathInPackage().string, "nim"), 119 | srkInclude 120 | ), 121 | toRange(fileId, part.slice)) 122 | 123 | elif false: 124 | # Record module relationship as 'include' between file and 125 | # module inside another file. 126 | discard writer.recordReferenceLocation( 127 | writer.recordReference( 128 | fileId, 129 | idMap.docToTrail[occur.refid], 130 | srkInclude 131 | ), 132 | toRange(fileId, part.slice)) 133 | 134 | elif false: 135 | # Record relations betwen modules as imports 136 | discard writer.recordReferenceLocation( 137 | writer.recordReference( 138 | idMap.docToTrail[file.moduleId.get()], 139 | idMap.docToTrail[occur.refid], 140 | srkImport 141 | ), 142 | toRange(fileId, part.slice)) 143 | 144 | else: 145 | if occur.refid notin idMap.docToTrail: 146 | warn "Occur at", part, "does not refere to any exiting entry" 147 | continue 148 | 149 | let targetId = idMap.docToTrail[occur.refid] 150 | let useKind = 151 | case occur.kind: 152 | of dokLocalKinds: 153 | raise newUnexpectedKindError(occur.kind) 154 | 155 | of dokTypeAsFieldUse, dokTypeAsReturnUse, dokTypeDirectUse, 156 | dokTypeAsParameterUse, dokTypeAsArgUse, dokTypeConversionUse: 157 | srkTypeUsage 158 | 159 | of dokTypeSpecializationUse: 160 | if lastDeclare == dokAliasDeclare: 161 | srkTemplateSpecialization 162 | 163 | else: 164 | # sourcetrail 'template specialization' relations is used 165 | # in order to show that one type is a generic 166 | # specialization of another type. In haxdoc 'generic 167 | # specialization' is used that in this particular case 168 | # generic type was specialized with some parameter - 169 | # without describing /context/ in which declaration 170 | # ocurred. Maybe later I will add support for 'context 171 | # ranges' in annotation sources and differentiate between 172 | # 'specialized generic used as a field' and 'inherited 173 | # from specialized generic' 174 | srkTypeUsage 175 | 176 | of dokInheritFrom: 177 | srkInheritance 178 | 179 | of dokCall: 180 | srkCall 181 | 182 | of dokEnumFieldUse, dokGlobalRead, dokGlobalWrite, 183 | dokFieldUse, dokFieldSet: 184 | srkUsage 185 | 186 | of dokAnnotationUsage, dokDefineCheck: 187 | srkMacroUsage 188 | 189 | else: 190 | raise newUnexpectedKindError(occur) 191 | 192 | let refSym = writer.recordReference( 193 | userId, targetId, useKind) 194 | 195 | discard writer.recordReferenceLocation( 196 | refSym, toRange(fileId, part.slice)) 197 | 198 | 199 | proc registerDb*(writer; db: DocDb): IdMap = 200 | for full, id in db.fullIdents: 201 | let entry = db[id] 202 | 203 | if (entry.kind in {dekPackage} and entry.name == "") or 204 | (entry.kind in {dekArg}) or 205 | (entry.kind in {dekModule} and entry.name == ignoredAbsFile.string): 206 | continue 207 | 208 | let name = full.toTrailName() 209 | 210 | let defKind = 211 | case entry.kind: 212 | of dekNewtypeKinds - { dekAlias, dekDistinctAlias, dekEnum }: 213 | sskStruct 214 | 215 | of dekProc, dekFunc, dekConverter, dekIterator: 216 | sskFunction 217 | 218 | of dekMacro, dekTemplate: 219 | sskMacro 220 | 221 | of dekAlias, dekDistinctAlias: 222 | sskTypedef 223 | 224 | of dekGlobalConst, dekGlobalVar, dekGlobalLet: 225 | sskGlobalVariable 226 | 227 | of dekCompileDefine: 228 | # compile-time defines might be treated as macros or as global 229 | # varibles. I'm not exactly sure how to classify them, but for 230 | # now I think global variable describes sematics a little better. 231 | sskGlobalVariable 232 | 233 | of dekEnum: sskEnum 234 | of dekField: sskField 235 | of dekEnumField: sskEnumConstant 236 | of dekBuiltin: sskBuiltinType 237 | of dekPragma: sskAnnotation 238 | of dekModule: sskModule 239 | of dekPackage: sskPackage 240 | of dekMethod: sskMethod 241 | 242 | else: 243 | raise newImplementKindError(entry) 244 | 245 | result.docToTrail[entry.id()] = writer.recordSymbol(name, defKind) 246 | 247 | if entry.location.isSome(): 248 | let fileId = writer.getFile(entry.getPathInPackage().string, "nim") 249 | let symId = writer.recordSymbol(name, defKind) 250 | 251 | result.docToTrail[entry.id()] = symId 252 | 253 | if entry.declHeadExtent.isSome(): 254 | let extent = toRange(fileId, entry.declHeadExtent.get()) 255 | discard writer.recordSymbolLocation(symId, extent) 256 | discard writer.recordSymbolScopeLocation(symId, extent) 257 | 258 | elif entry.kind == dekModule: 259 | let extent = toRange(fileId, initDocSlice(1, 0, 0)) 260 | discard writer.recordSymbolLocation(symId, extent) 261 | 262 | 263 | 264 | 265 | proc open*(writer; file: AbsFile) = discard writer.open(file.string) 266 | 267 | proc registerFullDb*(writer; db: DocDb) = 268 | discard writer.beginTransaction() 269 | let idMap = writer.registerDb(db) 270 | discard writer.commitTransaction() 271 | 272 | for file in db.files: 273 | discard writer.beginTransaction() 274 | writer.registerUses(file, idMap, db) 275 | discard writer.commitTransaction() 276 | 277 | const 278 | sourcetrailDbExt* = "srctrldb" 279 | sourcetrailProjectExt* = "srctrlprj" 280 | 281 | 282 | proc writeDbSourcetrail*(db: DocDb, outFile: AbsFile) = 283 | var writer: SourcetrailDbWriter 284 | let outFile = outFile.withExt(sourcetrailDbExt) 285 | assertExists outFile.dir() 286 | rmFile outFile 287 | writer.open(outFile) 288 | registerFullDb(writer, db) 289 | discard writer.close() 290 | 291 | proc writeSourcetrailDb*(db: DocDb, outFile: AbsFile) {.deprecated.} = 292 | writeDbSourcetrail(db, outFile) 293 | 294 | when isMainModule: 295 | startHax() 296 | let dir = getTempDir() / "from_nim_code2" 297 | 298 | let trailFile = dir /. "trail.srctrldb" 299 | rmFile trailFile 300 | 301 | var db = newDocDb() 302 | db.addKnownLib(getStdPath().dropSuffix("lib"), "std") 303 | 304 | var writer = newSourcetrailWriter(trailFile) 305 | 306 | for file in walkDir(dir, AbsFile, exts = @["hxde"]): 307 | var reader = newHXmlParser(file) 308 | reader.loadXml(db, "main") 309 | 310 | 311 | discard writer.beginTransaction() 312 | let idMap = writer.registerDb(db) 313 | discard writer.commitTransaction() 314 | 315 | 316 | for file in walkDir(dir, AbsFile, exts = @["hxda"]): 317 | var inFile: DocFile 318 | var reader = newHXmlParser(file) 319 | reader.loadXml(inFile, "file") 320 | 321 | discard writer.beginTransaction() 322 | writer.registerUses(inFile, idMap) 323 | discard writer.commitTransaction() 324 | 325 | echov trailFile 326 | discard writer.close() 327 | echo "done" 328 | -------------------------------------------------------------------------------- /src/haxdoc/process/docdb_diff.nim: -------------------------------------------------------------------------------- 1 | import 2 | std/[tables, sequtils, with, strformat, options, strutils] 3 | 4 | import 5 | hmisc/algo/[hseq_distance, htemplates, halgorithm], 6 | hmisc/types/[colorstring], 7 | hmisc/[base_errors, hdebug_misc] 8 | 9 | import 10 | ../docentry_types, 11 | ../docentry 12 | 13 | 14 | type 15 | DocDiffKind* = enum 16 | ldkNoChanges ## Line does not have syntactical or semantical changes 17 | ldkNewText ## Code was added 18 | ldkDeletedText ## Code was removed 19 | ldkTextChanged ## Line had text (syntactical) changes 20 | ldkSemChanged ## Line didn not change syntactically, but one of the 21 | ## used symbols changed it's meaning (can raise new 22 | ## exception, changed it's implementation) 23 | 24 | ldkDocChanged ## Used entry didn not change it's semantical meaning, 25 | ## but documentation was modified. 26 | 27 | 28 | DocLineDiffPart = object 29 | oldPart*: DocCodePart 30 | newPart*: DocCodePart 31 | diff*: Option[DocEntryDiff] 32 | 33 | DocLineDiff* = object 34 | kind*: DocDiffKind 35 | # REVIEW maybe storing all the lines in duplicate form does not worth 36 | # it, or at least should be made via `case` variant to reduce space? 37 | oldLine*: DocCodeLine 38 | newLine*: DocCodeLine 39 | diffParts*: seq[DocLineDiffPart] 40 | 41 | DocFileDiff* = object 42 | diffLines*: seq[DocLineDiff] 43 | 44 | DocEntryDiffKind* = enum 45 | ## Type of documentably entry diffs that directly affect it's signature 46 | ## or internal implementation 47 | 48 | dedProcedureKindChanged ## Signature different due to procedure kind 49 | ## change - `template` was converted to a `macro`, `func` became a 50 | ## procedure etc. 51 | dedProcedureRenamed ## Signature different due to rename 52 | dedNewArgument ## Different signature due to new argument 53 | dedRemovedArgument ## Different signature due to removed argument 54 | dedChangedArgument ## Different signature due to argument changed type 55 | dedChangedReturnType 56 | 57 | dedImplementationChanged ## Implementation changed between versions 58 | # IDEA in theory I should be able to provide more granual information 59 | # about implementation changes - I can track how passed parameters are 60 | # handler for example. And then track argumetn usage in different 61 | # expressions. For example when I process a procedure call, like 62 | # `procname("positional",named=1231)` - I can keep record of which 63 | # procedure symbol I'm currently processing, and what named parameters 64 | # I use specifically. I don't think this qualifies as `user` kind of 65 | # use, but something close to that might work out. In this case example 66 | # above would be annotated like 67 | # 68 | # ``` 69 | # procname("positioal", named = 1234) 70 | # ^^^^^^^^ ^^^^^^^^^^^ ^^^^^^^^^^^^ 71 | # | | | 72 | # | | refers to use of proc argument 'named' 73 | # | | 74 | # | Usage of 0th procedure argument 75 | # | 76 | # Regular procedure call 77 | # ``` 78 | 79 | 80 | 81 | dedSideEffectsChanged ## Entry has different side effects 82 | dedRaisesChanged ## Entry has different list of raised exceptions 83 | 84 | 85 | dedTypeNameChanged ## Entry has different type name 86 | dedNewField ## Record entry has new field 87 | dedRemovedField ## Record entry lacks field 88 | dedChangedField ## Record entry field changed it's type 89 | 90 | dedEntryDeleted ## New database does not contain entry that can be 91 | ## considered 'similar enough' to provide more concrete change 92 | ## detection. In other words - signature changed too much, and it is 93 | ## not possible to infer original ID of the entry 94 | 95 | dedEntryAdded ## New database contains entry that cannot be tracked 96 | ## back to the old database version (similarly to `deleted` it might be 97 | ## caused by isufficiently sophistiated change detection algorithm 98 | ## rather than actually new entry) 99 | 100 | DedChangeKind = enum 101 | dckNewAdded 102 | dckOldRemoved 103 | dckUsedEntryChanged 104 | dckPresentViaChanged 105 | 106 | DocEntryDiffPart = object 107 | changeKind*: DedChangeKind 108 | entry*: Option[DocId] 109 | case kind*: DocEntryDiffKind 110 | of dedSideEffectsChanged, dedRaisesChanged: 111 | presentVia: Option[DocIdSet] 112 | 113 | else: 114 | discard 115 | 116 | DocEntryDiff* = object 117 | diffParts*: seq[DocEntryDiffPart] 118 | 119 | DocDbDiff* = object 120 | oldDb*: DocDb 121 | newDb*: DocDb 122 | 123 | sameSignature: DocIdSet ## Set of documentable entries with the same 124 | ## signature 125 | 126 | oldNewMap: Table[DocId, DocId] ## Mapping between old and new 127 | ## versions of documentable entries. If old documentable entry is 128 | ## deleted it is mapped to an invalid id. 129 | 130 | newOldMap: Table[DocId, DocId] ## Reversed map 131 | 132 | entryChange: Table[DocId, DocEntryDiff] ## Mapping between documentable 133 | ## entry ID and diff associated with that entry. 134 | ## 135 | ## Due to possibility of deleting old entries and inserting new ones, 136 | ## IDs in the table are mixed - most of them are from old database (and 137 | ## thus also contained in `changedSignature` or `sameSignature`). The 138 | ## only time when ID belongs to new documentable entry is when it is 139 | ## completely new. 140 | 141 | proc isAdded*(db: DocDbDiff, id: DocId): bool = 142 | id notin db.oldNewMap and 143 | id notin db.sameSignature and 144 | id in db.entryChange 145 | 146 | proc isDeleted*(db: DocDbDiff, id: DocId): bool = 147 | id in db.oldNewMap and 148 | not db.oldNewMap[id].isValid() 149 | 150 | proc isChanged*(db: DocDbDiff, id: DocId): bool = 151 | id in db.entryChange 152 | 153 | iterator items*(diff: DocEntryDiff): DocEntryDiffPart = 154 | for part in items(diff.diffParts): 155 | yield part 156 | 157 | iterator pairs*(diff: DocEntryDiff): (int, DocEntryDiffPart) = 158 | for part in pairs(diff.diffParts): 159 | yield part 160 | 161 | func len*(diff: DocEntryDiff): int = diff.diffParts.len() 162 | 163 | proc add*( 164 | diff: var DocEntryDiff, part: DocEntryDiffPart | seq[DocEntryDiffPart]) = 165 | diff.diffParts.add part 166 | 167 | 168 | 169 | proc getNew*(db: DocDbDiff, id: DocId): DocId = 170 | ## Return 'new' documentable entry version. It it is alreayd in new 171 | ## documentable database return it without changing. 172 | if id in db.sameSignature or db.isAdded(id): 173 | assert id in db.newDb 174 | return id 175 | 176 | elif db.isDeleted(id): 177 | raise newGetterError( 178 | "Cannot get 'new' for documentable entry id", 179 | id, db.oldDb[id], " - it has been deleted") 180 | 181 | else: 182 | raise newLogicError( 183 | "Entry must be ether unchanged, new or deleted") 184 | 185 | 186 | proc getOld*(db: DocDbDiff, id: DocId): DocId = 187 | ## Return 'old' documentable entry version. It it is already in old 188 | ## documentable database return it without changing. 189 | if id in db.sameSignature or db.isDeleted(id): 190 | assert id in db.oldDb 191 | return id 192 | 193 | elif db.isAdded(id): 194 | raise newGetterError( 195 | "Cannot get 'old' for documentable entry id", 196 | id, db.newDb[id], " - it is completely new") 197 | 198 | else: 199 | raise newLogicError( 200 | "Entry must be ether unchanged, new or deleted") 201 | 202 | proc getDiff*(db: DocDbDiff, id: DocId): DocEntryDiff = 203 | if id in db.entryChange: 204 | result = db.entryChange[id] 205 | 206 | 207 | proc initDiffPart(kind: DocEntryDiffKind): DocEntryDiffPart = 208 | DocEntryDiffPart(kind: kind) 209 | 210 | proc diffType(db: DocDbDiff, old, new: DocType): seq[DocEntryDiffPart] = 211 | assert old.kind == new.kind, 212 | &"Cannot diff types of different kind - old is {old.kind}, new is {new.kind}" 213 | 214 | case old.kind: 215 | of dtkProc: 216 | if old.returnType != new.returnType: 217 | result.add initDiffPart(dedChangedReturnType) 218 | 219 | else: 220 | discard 221 | 222 | proc diffProc(db: DocDbDiff, old, new: DocEntry): DocEntryDiff = 223 | result.add db.diffType(old.procType, new.procType) 224 | 225 | for effect in new.effects - old.effects: 226 | result.add initDiffPart(dedSideEffectsChanged).withIt do: 227 | it.changeKind = dckNewAdded 228 | it.entry = some effect 229 | if effect in new.effectsVia: 230 | it.presentVia = some new.effectsVia[effect] 231 | 232 | for effect in old.effects - new.effects: 233 | result.add initDiffPart(dedSideEffectsChanged).withIt do: 234 | it.changeKind = dckOldRemoved 235 | it.entry = some effect 236 | if effect in old.effectsVia: 237 | it.presentVia = some old.effectsVia[effect] 238 | 239 | for raised in new.raises - old.raises: 240 | result.add initDiffPart(dedRaisesChanged).withIt do: 241 | it.changeKind = dckNewAdded 242 | it.entry = some raised 243 | if raised in old.raisesVia: 244 | it.presentVia = some old.raisesVia[raised] 245 | 246 | for raised in old.raises - new.raises: 247 | result.add initDiffPart(dedRaisesChanged).withIt do: 248 | it.changeKind = dckOldRemoved 249 | it.entry = some raised 250 | if raised in old.raisesVia: 251 | it.presentVia = some old.raisesVia[raised] 252 | 253 | proc updateDiff(db: var DocDbDiff, id: DocId) = 254 | if db.isDeleted(id): 255 | raise newImplementError("Deleted entry") 256 | 257 | elif db.isAdded(id): 258 | raise newImplementError("Added entry") 259 | 260 | else: 261 | let 262 | old = db.oldDb[db.getOld(id)] 263 | new = db.newDb[db.getNew(id)] 264 | 265 | if old.kind == new.kind: 266 | case old.kind: 267 | of dekProcKinds: 268 | let diff = db.diffProc(old, new) 269 | if diff.len > 0: 270 | db.entryChange[id] = diff 271 | 272 | else: 273 | discard 274 | 275 | 276 | proc diffDb*(oldDb, newDb: DocDb): DocDbDiff = 277 | result = DocDbDiff(oldDb: oldDb, newDb: newDb) 278 | var oldIds, newIds: DocIdSet 279 | 280 | for entry in allItems(oldDb): 281 | oldIds.incl entry.id 282 | 283 | for entry in allItems(newDb): 284 | newIds.incl entry.id 285 | 286 | result.sameSignature = oldIds * newIds 287 | 288 | for id in result.sameSignature: 289 | result.updateDiff(id) 290 | 291 | let 292 | deleted = oldIds - newIds 293 | added = newIds - oldIds 294 | 295 | for entry in deleted: 296 | result.oldNewMap[entry] = DocId() 297 | 298 | 299 | 300 | 301 | 302 | proc diffEntry*(db: DocDbDiff, oldEntry, newEntry: DocEntry): DocEntryDiff = 303 | discard 304 | 305 | proc diffFile*(db: DocDbDiff, oldFile, newFile: DocFile): DocFileDiff = 306 | proc lineCmp(oldLine, newLine: DocCodeLine): bool = 307 | oldLine.text == newLine.text 308 | 309 | let diff = myersDiff( 310 | oldFile.body.codeLines, 311 | newFile.body.codeLines, 312 | lineCmp 313 | ) 314 | 315 | let shifted = shiftDiffed( 316 | oldFile.body.codeLines, 317 | newFile.body.codeLines, 318 | diff, 319 | DocCodeLine() 320 | ) 321 | 322 | for (oldLine, newLine) in zip(shifted.oldShifted, shifted.newShifted): 323 | let 324 | (oldKind, oldCode) = oldLine 325 | (newKind, newCode) = newLine 326 | 327 | 328 | var kind: DocDiffKind 329 | var changed: seq[DocLineDiffPart] 330 | if oldKind == dskKeep and newKind == dskKeep: 331 | kind = ldkNoChanges 332 | for (old, new) in zip(oldCode.parts, newCode.parts): 333 | var part = DocLineDiffPart(oldPart: old, newPart: new) 334 | if old.hasRefid() and 335 | new.hasRefid() and 336 | db.isChanged(old.getRefid()): 337 | part.diff = some db.getDiff(old.getRefid()) 338 | kind = ldkSemChanged 339 | changed.add part 340 | 341 | elif newKind == dskInsert: 342 | kind = ldkNewText 343 | 344 | elif oldKind == dskDelete: 345 | kind = ldkDeletedText 346 | 347 | result.diffLines.add DocLineDiff( 348 | kind: kind, 349 | oldLine: oldCode, 350 | newLine: newCode, 351 | diffParts: changed 352 | ) 353 | 354 | 355 | proc formatDiff*(db: DocDbDiff, diff: DocEntryDiff): string = 356 | var text: seq[string] 357 | for part in diff: 358 | case part.kind: 359 | of dedSideEffectsChanged: 360 | text.add &"Changed side effect - added {db.newDb[part.entry.get()].name}" 361 | 362 | of dedRaisesChanged: 363 | text.add &"New possible exception - added {db.newDb[part.entry.get()].name}" 364 | 365 | else: 366 | raise newImplementKindError(part) 367 | 368 | return text.join("\n") 369 | 370 | 371 | proc formatDiff*(db: DocDbDiff, diff: DocFileDiff): string = 372 | let maxLine = maxIt(diff.diffLines, it.oldLine.lineHigh) + 2 373 | 374 | for line in diff.diffLines: 375 | var buf: ColoredRuneGrid 376 | let (oldStyle, newStyle) = 377 | case line.kind: 378 | of ldkSemChanged: (bgDefault + fgDefault, bgDefault + fgCyan) 379 | of ldkDeletedText: (bgDefault + fgRed, bgDefault + fgDefault) 380 | of ldkNewText: (bgDefault + fgDefault, bgDefault + fgGreen) 381 | else: (bgDefault + fgDefault, bgDefault + fgDefault) 382 | 383 | buf[0, 0] = toStyled(line.oldLine.text, oldStyle) 384 | buf[0, maxLine] = toStyled(line.newLine.text, newStyle) 385 | 386 | if line.diffParts.len > 0: 387 | for part in line.diffParts: 388 | if part.diff.isSome(): 389 | let col = maxLine + part.newPart.slice.column.a 390 | buf[1, col] = '^' 391 | buf[2, col] = toStyled( 392 | db.formatDiff(part.diff.get()), fgMagenta + bgDefault) 393 | 394 | 395 | 396 | 397 | 398 | 399 | with result: 400 | add $buf 401 | add "\n" 402 | -------------------------------------------------------------------------------- /src/haxdoc/docentry_io.nim: -------------------------------------------------------------------------------- 1 | ## Serialization and deserialization for documentation entries 2 | 3 | import 4 | ./docentry, 5 | hnimast/nimtraits/trait_xml 6 | 7 | export trait_xml 8 | 9 | import hmisc/other/[hshell] 10 | 11 | import haxorg/defs/defs_all 12 | import hnimast/nimtraits/nimtraits 13 | export trait_xml 14 | 15 | storeTraits(OrgMetaTag) 16 | storeTraits(ShellCmd) 17 | 18 | using 19 | w: var XmlWriter 20 | r: var HXmlParser 21 | tag: string 22 | 23 | # proc loadXml*(r; it: var DocEntryKind, tag) = 24 | # loadEnumWithPrefix[DocEntryKind](r, it, tag, "dek") 25 | 26 | # proc loadXml*(r; it: var DocOccurKind, tag) = 27 | # r.loadEnumWithPrefix(it, tag, "dok") 28 | 29 | 30 | # proc writeXml*(w; cmd: ShellCmd, tag: string) = raiseImplementError("") 31 | # proc writeXml*(w; cmd: OrgMetaTag, tag: string) = raiseImplementError("") 32 | 33 | # proc xmlAttribute*(w; key: string, use: seq[DocTypeUseKind]) = 34 | # if use.len > 0: 35 | # w.xmlAttribute(key, mapIt(use, $it).join(":")) 36 | 37 | # # proc xml 38 | 39 | 40 | # proc xmlAttribute*(w; key: string, id: DocId) = 41 | # xmlAttribute(w, key, $id.id) 42 | 43 | # proc xmlAttribute*(w; key: string, file: AnyPath) = 44 | # xmlAttribute(w, key, file.getStr()) 45 | 46 | # proc xmlAttribute*(w; key: string, pos: DocPos) = 47 | # xmlAttribute(w, key, &"{pos.line}:{pos.column}") 48 | 49 | # proc writeXml*(w; it: DocAdmonition, tag) 50 | # proc loadXml*(r; it: var DocAdmonition, tag) 51 | 52 | # proc writeXml*(w; it: DocMetatag, tag) 53 | # proc loadXml*(r; it: var DocMetatag, tag) 54 | 55 | # proc writeXml*(w; it: DocOccur, tag) 56 | # proc loadXml*(r; it: var DocOccur, tag) 57 | 58 | # proc writeXml*(w; it: DocId, tag) 59 | # proc loadXml*(r; it: var DocId, tag) 60 | 61 | # proc writeXml*(w; it: DocText, tag) 62 | # proc loadXml*(r; it: var DocText, tag) 63 | 64 | # proc writeXml*(w; it: DocLinkPart, tag) 65 | # proc loadXml*(r; it: var DocLinkPart, tag) 66 | 67 | # proc writeXml*(w; it: DocLink, tag) 68 | # proc loadXml*(r; it: var DocLink, tag) 69 | 70 | # proc writeXml*(w; it: DocType, tag) 71 | # proc loadXml*(r; it: var DocType, tag) 72 | 73 | # proc writeXml*(w; it: DocEntry, tag) 74 | # # proc loadXml*(r; it: var DocEntry, tag) 75 | 76 | # proc writeXml*(w; it: DocFile, tag) 77 | # proc loadXml*(r; it: var DocFile, tag) 78 | 79 | # proc writeXml*(w; it: DocDb, tag) 80 | # proc loadXml*(r; it: var DocDb, tag) 81 | 82 | # proc writeXml*(w; it: DocIdent, tag) 83 | # proc loadXml*(r; it: var DocIdent, tag) 84 | 85 | # proc writeXml*(w; it: DocPragma, tag) 86 | # proc loadXml*(r; it: var DocPragma, tag) 87 | 88 | # proc writeXml*(w; it: DocCode, tag) 89 | # proc loadXml*(r; it: var DocCode, tag) 90 | 91 | # proc writeXml*(w; it: DocCodePart, tag) 92 | # proc loadXml*(r; it: var DocCodePart, tag) 93 | 94 | # proc writeXml*(w; it: DocCodeSlice, tag) 95 | # proc loadXml*(r; it: var DocCodeSlice, tag) 96 | 97 | # proc writeXml*(w; it: DocLocation, tag) 98 | # proc loadXml*(r; it: var DocLocation, tag) 99 | 100 | # proc writeXml*(w; it: DocCodeLine, tag) 101 | # proc loadXml*(r; it: var DocCodeLine, tag) 102 | 103 | # proc writeXml*(w; it: DocExtent, tag) 104 | # proc loadXml*(r; it: var DocExtent, tag) 105 | 106 | # proc writeXml*(w; it: DocPos, tag) 107 | # proc loadXml*(r; it: var DocPos, tag) 108 | 109 | # # ~~~~ DocLocation ~~~~ # 110 | 111 | # proc loadXml*(r; it: var DocLocation, tag) = 112 | # genXmlLoader(DocLocation, it, r, tag, newObjExpr = DocLocation()) 113 | 114 | # proc writeXml*(w; it: DocLocation, tag) = genXmlWriter(DocLocation, it, w, tag) 115 | 116 | # # ~~~~ DocCode ~~~~ # 117 | 118 | # proc loadXml*(r; it: var DocCode, tag) = 119 | # genXmlLoader(DocCode, it, r, tag, newObjExpr = DocCode()) 120 | 121 | # proc writeXml*(w; it: DocCode, tag) = genXmlWriter(DocCode, it, w, tag) 122 | 123 | # # ~~~~ DocCodePart ~~~~ # 124 | 125 | # proc loadXml*(r; it: var DocCodePart, tag) = 126 | # r.skipOpen(tag) 127 | # r.loadXml(it.slice.line, "line") 128 | # r.loadXml(it.slice.column, "column") 129 | # if r["kind"]: 130 | # var kind: DocOccurKind 131 | # r.loadXml(kind, "kind") 132 | # it.occur = some DocOccur(kind: kind) 133 | 134 | # if r["user"]: 135 | # r.loadXml(it.occur.get().user, "user") 136 | 137 | # case kind: 138 | # of dokLocalKinds: 139 | # r.loadXml(it.occur.get().localId, "localId") 140 | 141 | # else: 142 | # r.loadXml(it.occur.get().refid, "refid") 143 | 144 | # r.skipCloseEnd() 145 | 146 | # proc writeXml*(w; it: DocCodePart, tag) = 147 | # w.xmlOpen(tag) 148 | # w.xmlAttribute("line", it.slice.line) 149 | # w.xmlAttribute("column", it.slice.column) 150 | # if it.occur.getSome(occur): 151 | # w.xmlAttribute("kind", occur.kind) 152 | # w.xmlAttribute("user", occur.user) 153 | # case occur.kind: 154 | # of dokLocalKinds: w.xmlAttribute("localId", occur.localId) 155 | # else: w.xmlAttribute("refid", occur.refId) 156 | 157 | 158 | # w.xmlCloseEnd() 159 | 160 | # # ~~~~ DocCodeSlice ~~~~ # 161 | 162 | # proc loadXml*(r; it: var DocCodeSlice, tag) = 163 | # genXmlLoader(DocCodeSlice, it, r, tag, newObjExpr = DocCodeSlice()) 164 | 165 | # proc writeXml*(w; it: DocCodeSlice, tag) = genXmlWriter(DocCodeSlice, it, w, tag) 166 | 167 | # # ~~~~ DocCodeLine ~~~~ # 168 | 169 | # proc loadXml*(r; it: var DocCodeLine, tag) = 170 | # r.skipStart(tag) 171 | # r.loadXml(it.text, "text") 172 | # r.loadXml(it.parts, "parts") 173 | # r.loadXml(it.overlaps, "overlaps") 174 | # r.skipEnd(tag) 175 | 176 | # proc writeXml*(w; it: DocCodeLine, tag) = 177 | # w.xmlStart(tag) 178 | # w.indent() 179 | # w.xmlWrappedCdata(it.text, "text") 180 | # for part in it.parts: w.writeXml(part, "parts") 181 | # for part in it.overlaps: w.writeXml(part, "overlaps") 182 | # w.dedent() 183 | # w.xmlEnd(tag) 184 | 185 | # # ~~~~ DocPragma ~~~~ # 186 | 187 | # proc loadXml*(r; it: var DocPragma, tag) = 188 | # genXmlLoader(DocPragma, it, r, tag, newObjExpr = DocPragma()) 189 | 190 | # proc writeXml*(w; it: DocPragma, tag) = 191 | # genXmlWriter(DocPRagma, it, w, tag) 192 | 193 | # # ~~~~ DocPos ~~~~ # 194 | 195 | # proc loadXml*(r; it: var DocPos, tag) = 196 | # if r.atAttr(): 197 | # var sl: Slice[int] 198 | # r.loadXml(sl, tag) 199 | # it.line = sl.a 200 | # it.column = sl.b 201 | 202 | # else: 203 | # genXmlLoader(DocPos, it, r, tag, newObjExpr = DocPos()) 204 | 205 | # proc writeXml*(w; it: DocPos, tag) = genXmlWriter(DocPos, it, w, tag) 206 | 207 | # # ~~~~ DocExtent ~~~~ # 208 | 209 | # proc loadXml*(r; it: var DocExtent, tag) = 210 | # genXmlLoader(DocExtent, it, r, tag, newObjExpr = DocExtent()) 211 | 212 | # proc writeXml*(w; it: DocExtent, tag) = 213 | # genXmlWriter(DocExtent, it, w, tag) 214 | 215 | # # ~~~~ DocIdent ~~~~ # 216 | 217 | # proc loadXml*(r; it: var DocIdent, tag) = 218 | # genXmlLoader(DocIdent, it, r, tag, newObjExpr = DocIdent()) 219 | 220 | # proc writeXml*(w; it: DocIdent, tag) = 221 | # genXmlWriter(DocIdent, it, w, tag) 222 | 223 | # # ~~~~ DocText ~~~~ # 224 | 225 | # proc loadXml*(r; it: var DocText, tag) = 226 | # genXmlLoader(DocText, it, r, tag, newObjExpr = DocText()) 227 | 228 | # proc writeXml*(w; it: DocText, tag) = 229 | # genXmlWriter(DocText, it, w, tag, 230 | # skipFieldWrite = ["rawDoc"], addClose = false) 231 | 232 | # for d in it.rawDoc: 233 | # w.xmlWrappedCData(d, "rawDoc") 234 | 235 | # w.dedent() 236 | # w.xmlEnd(tag) 237 | 238 | # # ~~~~ DocAdmonition ~~~~ # 239 | 240 | # proc loadXml*(r; it: var DocAdmonition, tag) = 241 | # genXmlLoader(DocAdmonition, it, r, tag, newObjExpr = DocAdmonition()) 242 | 243 | # proc writeXml*(w; it: DocAdmonition, tag) = 244 | # genXmlWriter(DocAdmonition, it, w, tag) 245 | 246 | # # ~~~~ DocMetaTag ~~~~ # 247 | 248 | # proc loadXml*(r; it: var DocMetatag, tag) = 249 | # genXmlLoader(DocMetatag, it, r, tag, newObjExpr = DocMetaTag()) 250 | 251 | # proc writeXml*(w; it: DocMetatag, tag) = 252 | # genXmlWriter(DocMetatag, it, w, tag) 253 | 254 | 255 | # proc loadXml*(r; it: var DocOccur, tag) = 256 | # genXmlLoader(DocOccur, it, r, tag, newObjExpr = DocOccur()) 257 | 258 | # proc writeXml*(w; it: DocOccur, tag) = 259 | # genXmlWriter(DocOccur, it, w, tag) 260 | 261 | 262 | # proc loadXml*(r; it: var DocId, tag) = 263 | # if r.atAttr(): 264 | # r.loadXml(it.id, "id") 265 | 266 | # else: 267 | # r.skipOpen(tag) 268 | # r.loadXml(it.id, "id") 269 | # r.skipCloseEnd() 270 | 271 | # proc writeXml*(w; it: DocId, tag) = 272 | # w.xmlOpen(tag) 273 | # w.xmlAttribute("id", $it.id) 274 | # w.xmlCloseEnd() 275 | 276 | 277 | # proc writeXml*(w; it: DocIdSet, tag) = 278 | # if it.len == 0: return 279 | # w.xmlStart(tag) 280 | # w.indent() 281 | # for id in it: 282 | # w.writeXml(id, "id") 283 | # w.dedent() 284 | # w.xmlEnd(tag) 285 | 286 | # proc loadXml*(r; it: var DocIdSet, tag) = 287 | # if not r[tag]: return 288 | # r.skipStart(tag) 289 | # while r["id"]: 290 | # var id: DocId 291 | # r.loadXml(id, "id") 292 | # it.incl id 293 | 294 | # r.skipEnd(tag) 295 | 296 | # proc loadXml*(r; it: var DocLinkPart, tag) = 297 | # genXmlLoader(DocLinkPart, it, r, tag, 298 | # newObjExpr = DocLinkPart(kind: kind)) 299 | 300 | 301 | 302 | 303 | # proc writeXml*(w; it: DocLinkPart, tag) = 304 | # genXmlWriter( 305 | # DocLinkPart, it, w, tag, 306 | # hasFieldsExpr = (it.kind in dekProcKinds)) 307 | 308 | # proc loadXml*(r; it: var DocLink, tag) = 309 | # genXmlLoader(DocLink, it, r, tag, newObjExpr = DocLink()) 310 | 311 | # proc writeXml*(w; it: DocLink, tag) = 312 | # genXmlWriter(DocLink, it, w, tag) 313 | 314 | # proc loadXml*(r; it: var DocType, tag) = 315 | # genXmlLoader(DocType, it, r, tag, newObjExpr = DocType(kind: kind)) 316 | 317 | # proc writeXml*(w; it: DocType, tag) = 318 | # genXmlWriter(DocType, it, w, tag) 319 | 320 | # proc loadXml*(r; it: var DocFile, tag) = 321 | # genXmlLoader(DocFile, it, r, tag, newObjExpr = DocFile()) 322 | 323 | # proc writeXml*(w; it: DocFile, tag) = 324 | # genXmlWriter(DocFile, it, w, tag) 325 | 326 | # proc writeXml*(w; it: DocDb, tag) = 327 | # w.xmlStart(tag) 328 | # for _, entry in it.top: 329 | # w.writeXml(entry, "test") 330 | 331 | # w.xmlEnd(tag) 332 | 333 | 334 | # proc loadNested*(r; db: var DocDb, tag; top: var DocEntry) = 335 | # var entry: DocEntry 336 | 337 | # genXmlLoader( 338 | # DocEntry, entry, r, tag, 339 | # skipFieldLoad = ["nested"], 340 | # extraAttrLoad = { "decl": r.next() }, 341 | # newObjExpr = DocEntry(kind: kind), 342 | # extraFieldLoad = { 343 | # "nested": ( 344 | # while r.atOpenStart() and r["nested"]: 345 | # loadNested(r, db, "nested", entry))}) 346 | 347 | # db.registerNested(top, entry) 348 | 349 | 350 | # proc loadXml*(r; it: var DocDb, tag) = 351 | # if isNil(it): 352 | # it = DocDb() 353 | 354 | # r.skipStart(tag) 355 | # while r["test"]: 356 | # r.loadNested(it, "test", (var top: DocEntry; top)) 357 | 358 | # r.skipEnd(tag) 359 | 360 | 361 | # proc writeXml*(w; it: DocEntry, tag) = 362 | # genXmlWriter( 363 | # DocEntry, it, w, tag, ["nested", "rawDoc"], false, 364 | # extraAttrWrite = (w.xmlAttribute("decl", true))) 365 | 366 | # w.indent() 367 | # for item in it.nested: 368 | # w.writeXml(it.db[item], "nested") 369 | 370 | # w.dedent() 371 | 372 | # w.xmlEnd(tag) 373 | 374 | # proc loadDbXml*( 375 | # dir: AbsDir, dbName: string, loadFiles: bool = false): DocDb = 376 | # var reader = newHXmlParser(dir /. dbName &. "hxde") 377 | # reader.loadXml(result, "dbmain") 378 | 379 | # if loadFiles: 380 | # for file in walkDir(dir, AbsFile, exts = @["hxda"]): 381 | # var inFile: DocFile 382 | # var reader = newHXmlParser(file) 383 | # reader.loadXml(inFile, "file") 384 | 385 | # proc writeDbXml*(db: DocDb, dir: AbsDir, dbName: string) = 386 | # block: 387 | # var writer = withExt(dir /. dbName, "hxde").newXmlWriter() 388 | # writer.writeXml(db, "dbmain") 389 | # writer.close() 390 | 391 | # for file in db.files: 392 | # let outFile = dir /. file.path.withExt("hxda").splitFile2().file 393 | # var writer = newXmlWriter(outFile) 394 | # writer.writeXml(file, "file") 395 | # writer.close() 396 | 397 | # when isMainModule: 398 | # let doc = DocEntry() 399 | 400 | # var w = newXmlWriter(newFileStream(stdout)) 401 | # w.writeXml(doc, "test") 402 | 403 | 404 | # import flatty 405 | # import hnimast 406 | 407 | # macro genFlattyWriter( 408 | # obj: typedesc, input, stream: untyped): untyped = 409 | 410 | # let 411 | # input = input.copyNimNode() 412 | # stream = stream.copyNimNode() 413 | # impl = getObjectStructure(obj) 414 | 415 | # let kindWrite = impl.mapItKindFields(input.newDot(path)): 416 | # newCall("toFlatty", stream, input.newDot(field)) 417 | 418 | # let reqInit = impl.mapItPlainFields(input.newDot(path)): 419 | # if field.isReqInit(): 420 | # newCall("toFlatty", stream, input.newDot(field)) 421 | 422 | # else: 423 | # newEmptyNode() 424 | 425 | # let fieldWrite = impl.mapItPlainFields(input.newDot(path)): 426 | # if field.isSkipField("IO") or field.isReqInit(): 427 | # newEmptyNode() 428 | 429 | # else: 430 | # newCall("toFlatty", stream, input.newDot(field)) 431 | 432 | # result = newStmtList(kindWrite, reqInit, fieldWrite) 433 | 434 | 435 | # macro genFlattyReader(obj: typedesc, target, stream, index: untyped): untyped = 436 | # let 437 | # impl = getObjectStructure(obj) 438 | # target = target.copyNimNode() 439 | # stream = stream.copyNimNode() 440 | # index = index.copyNimNode() 441 | 442 | # var 443 | # declareKind = impl.getKindFields.mapIt(newVar(it.name, it.fldType)) 444 | # loadKind = newCaseStmt(stream.newCall("attrKey")) 445 | # newObject = impl.newCall() 446 | 447 | 448 | # result = newStmtList(declareKind) 449 | 450 | # echo result.repr 451 | 452 | 453 | 454 | # template castRead(s: string, i: var int, x: typed) = 455 | # x = cast[ptr typeof(x)](unsafeAddr s[i])[] 456 | # i += sizeof(x) 457 | 458 | # import std/times 459 | 460 | 461 | # proc fromFlatty[E: enum](s: string, i: var int, en: var set[E]) = 462 | # castRead(s, i, en) 463 | 464 | # proc fromFlatty(str: string, i: var int, cstr: var cstring) = 465 | # var buf: string 466 | # fromFlatty(str, i, buf) 467 | # cstr = buf.cstring 468 | 469 | # proc fromFlatty(str: string, i: var int, id: var DocId) = 470 | # castRead(str, i, id) 471 | 472 | # proc fromFlatty(str: string, i: var int, ns: var int) = 473 | # castRead(str, i, ns) 474 | 475 | # template castWrite(s: var string, obj: typed) = 476 | # s.setLen(s.len + sizeof(obj)) 477 | # cast[ptr typeof(obj)](addr s[s.len - sizeof(obj)])[] = obj 478 | 479 | # proc toFlatty[E: enum](s: var string, en: set[E]) = castWrite(s, en) 480 | # proc toFlatty(str: var string, cstr: cstring) = toFlatty(str, $cstr) 481 | # proc toFlatty(str: var string, id: DocId) = castWrite(str, id) 482 | # proc toFlatty(str: var string, ns: int) = castWrite(str, ns) 483 | 484 | # proc toFlatty(str: var string, ns: NanosecondRange) = 485 | # castWrite(str, ns) 486 | 487 | # template flattyRefWrite( 488 | # s: var string, entry: typed, writer: untyped): untyped = 489 | # if isNil(entry): 490 | # s.toFlatty(true) 491 | 492 | # else: 493 | # s.toFlatty(false) 494 | # writer 495 | 496 | # template flattyRefRead(s, pos, reader: untyped): untyped = 497 | # var isNil: bool 498 | # fromFlatty(s, pos, isNil) 499 | # if not isNil: 500 | # reader 501 | 502 | # import haxorg/[semorg, ast] 503 | 504 | # proc fromFlatty(s: string, i: var int, sem: var SemOrg) = 505 | # flattyRefRead(s, i, genFlattyReader(SemOrg, sem, s, i)) 506 | 507 | # proc toFlatty(s: var string, entry: DocEntry) = 508 | # flattyRefWrite(s, entry, genFlattyWriter(DocEntry, entry, s)) 509 | 510 | 511 | # import supersnappy 512 | 513 | # proc writeDbFlatty*(db: DocDb, dir: AbsDir, dbName: string) = 514 | # mkDir dir 515 | # writeFile(dir /. dbName, toFlatty(db).compress()) 516 | 517 | # proc loadDbFlatty*(dir: AbsDir, dbName: string): DocDb = 518 | # when false: 519 | # result = fromFlatty(readFile(dir /. dbName).uncompress(), DocDb) 520 | -------------------------------------------------------------------------------- /src/haxdoc/docentry_types.nim: -------------------------------------------------------------------------------- 1 | import 2 | haxorg/defs/defs_all, 3 | 4 | std/[tables, options, intsets, hashes], 5 | 6 | hmisc/other/oswrap, 7 | hmisc/types/hmap, 8 | 9 | hnimast/nimtraits/nimtraits 10 | 11 | type 12 | DocNode* = ref object of OrgUserNode 13 | 14 | DocEntryKind* = enum 15 | ## - NOTE :: Different procedure kinds are also used to describe 16 | ## operator implementations. 17 | # procedure kinds start 18 | dekProc ## \ 19 | ## Procedure definition 20 | 21 | dekFunc ## \ 22 | ## Function definition 23 | 24 | dekMacro ## Macro 25 | dekMethod ## Method 26 | dekTemplate ## \ 27 | ## Template - simple code substitution. 28 | ## 29 | ## - NOTE :: C++ templates are mapped to `dekProc`, and macros are mapped 30 | ## to `dekMacros` 31 | 32 | dekIterator ## \ 33 | ## Iterator 34 | ## 35 | ## - NOTE :: C++ iterator classes are mapped to objects 36 | 37 | dekConverter ## User-defined implicit conversion 38 | dekSignal 39 | dekSlot 40 | # procedure kinds end 41 | 42 | dekParam ## Generic parameters 43 | dekArg ## Entry (function, procedure, macro, template) arguments 44 | 45 | dekInject ## Variable injected into the scope by template/macro 46 | ## instantiation. 47 | 48 | dekPragma ## Compiler-specific directives `{.pragma.}` in nim, 49 | ## `#pragma` in C++ and `#[(things)]` from rust. 50 | 51 | dekTrait # Added both traits and concepts because in nim you can have 52 | # `concept` and automatic trait-based. 53 | 54 | # new type kinds start 55 | dekBuiltin ## Builtin type, not defined using any other types 56 | dekObject 57 | dekClass 58 | dekStruct 59 | dekException ## Exception object 60 | dekDefect ## Nim defect object 61 | dekConcept ## General concept 62 | dekInterface 63 | dekTypeclass 64 | dekUnion 65 | dekEnum ## Enumeration 66 | dekEffect ## Side effect tag 67 | dekAlias ## Typedef 68 | dekRefAlias 69 | dekDistinctAlias ## strong typedef 70 | # new type kinds end 71 | 72 | 73 | # variable-like entries 74 | dekCompileDefine ## Compile-time `define` that might affect compilation 75 | ## of the program. 76 | 77 | dekGlobalConst ## Global immutable compile-time constant 78 | dekGlobalVar ## Global mutable variable 79 | dekGlobalLet ## Global immutable variable 80 | dekField ## object/struct field 81 | dekEnumField ## Enum field/constant 82 | # end 83 | 84 | dekNamespace ## Namespace 85 | dekGroup ## Documentable group of entries that is not otherwise grouped 86 | ## together by language constructs. 87 | 88 | dekTag ## Documentation tag 89 | 90 | 91 | dekModule ## Module (C header file, nim/python/etc. module) 92 | dekFile ## Global or local file 93 | dekDir ## System directory 94 | dekPackage ## System or programming language package (library). If 95 | ## present used as toplevel grouping element. 96 | 97 | dekImport ## 'modern' import semantics 98 | dekInclude ## C-style text-based include 99 | dekDepend ## Interpackage dependency relations 100 | 101 | 102 | dekEnv ## Environment variable 103 | dekShellCmd ## Shell command 104 | dekShellOption ## Shell command option (has value) or flag (no value) 105 | dekShellArg ## Positional shell argument 106 | dekShellSubCmd ## Shell subcommand 107 | 108 | 109 | dekSchema ## Serialization schema 110 | 111 | dekKeyword ## Language or macro DSL keyword 112 | 113 | # REVIEW 114 | dekErrorMsg 115 | dekWarningMsg 116 | dekHintMsg 117 | 118 | dekLibSym 119 | 120 | DocProcKind* = enum 121 | dpkRegular 122 | dpkOperator 123 | dpkConstructor 124 | dpkDestructor 125 | dpkMoveOverride 126 | dpkCopyOverride 127 | dpkAsgnOverride 128 | dpkPropertyGet 129 | dpkPropertySet 130 | dpkPredicate 131 | 132 | const 133 | dekProcKinds* = { dekProc .. dekSlot } 134 | dekNewtypeKinds* = { dekObject .. dekDistinctAlias } 135 | dekAliasKinds* = { dekTypeclass, dekAlias, dekDistinctAlias, 136 | dekRefAlias } 137 | dekStructKinds* = { 138 | dekObject, dekDefect, dekException, dekEffect, dekClass, dekStruct 139 | } 140 | dekAllKinds* = { low(DocEntryKind) .. high(DocEntryKind) } 141 | 142 | 143 | type 144 | DocTypeKind* = enum 145 | dtkNone ## Default type kind 146 | 147 | dtkIdent ## Single, non-generic (or non-specialized generic) identifier 148 | dtkGenericParam ## Generic parameter used in type, not resolved to any 149 | ## particular type 150 | dtkGenericSpec ## Generic object with one or more generic parameters 151 | dtkAnonTuple ## Anonymous (no named fields) tuple 152 | dtkProc ## Procedure 153 | dtkNamedTuple ## Tuple with named fields 154 | dtkRange ## Value range 155 | dtkVarargs ## Variadic arguments 156 | dtkValue ## Value (for generic parameters and typeof exressions) 157 | dtkTypeofExpr ## `typeof` for some value 158 | 159 | dtkString ## String input parameter (for CLI applications) 160 | 161 | dtkFile 162 | dtkDir 163 | 164 | DocOccurKind* = enum 165 | dokTypeDirectUse ## Direct use of non-generic type 166 | dokTypeAsParameterUse ## Use as a parameter in generic specialization 167 | dokTypeSpecializationUse ## Specialization of generic type using other 168 | ## types 169 | 170 | dokTypeAsArgUse 171 | dokTypeAsReturnUse 172 | dokTypeAsFieldUse 173 | dokTypeConversionUse 174 | 175 | dokUsage 176 | dokCall 177 | 178 | dokInheritFrom 179 | dokOverride 180 | dokInclude 181 | dokImport 182 | dokMacroUsage 183 | dokAnnotationUsage 184 | 185 | # local section start 186 | dokLocalUse ## Generic 'use' of local entry 187 | dokLocalWrite 188 | dokLocalRead 189 | 190 | 191 | # local declaration section start 192 | dokLocalArgDecl 193 | dokLocalVarDecl 194 | # local declarations section end 195 | # local section end 196 | 197 | dokGlobalWrite ## Asign value to global variable 198 | dokGlobalRead ## Non-asign form of global variable usage. Taking 199 | ## address and mutating, passing to function that accepts `var` 200 | ## parameter etc. would count as 'read' action. 201 | dokGlobalDeclare 202 | 203 | dokFieldUse 204 | dokFieldSet 205 | dokEnumFieldUse 206 | 207 | dokFieldDeclare 208 | dokCallDeclare 209 | dokAliasDeclare 210 | dokObjectDeclare 211 | dokEnumDeclare 212 | dokEnumFieldDeclare 213 | 214 | dokDefineCheck 215 | 216 | const 217 | dokLocalKinds* = {dokLocalUse .. dokLocalArgDecl } 218 | dokLocalDeclKinds* = { dokLocalArgDecl .. dokLocalVarDecl } 219 | 220 | type 221 | DocOccur* = object 222 | ## Single occurence of documentable entry 223 | user* {.Attr.}: Option[DocId] ## For occurence of global documentable 224 | ## entry - lexically scoped parent (for function call - callee, for 225 | ## type - parent composition). For local occurence - type of the 226 | ## identifier (for local variables, arguments etc). 227 | case kind*: DocOccurKind ## Type of entry occurence 228 | of dokLocalKinds: 229 | localId* {.Attr.}: string 230 | withInit* {.Attr.}: bool ## For 'local decl' - whether identifier 231 | ## was default-constructed or explicitly initialized. 232 | 233 | else: 234 | refid* {.Attr.}: DocId ## Documentable entry id 235 | 236 | DocCodeSlice* = object 237 | line* {.Attr.}: int ## Code slice line /index/ 238 | endLine* {.Attr.}: Option[int] 239 | column* {.Attr.}: Slice[int] 240 | 241 | DocCodePart* = object 242 | ## Single code part with optional occurence link. 243 | slice*: DocCodeSlice ## Single-line slice of the code 244 | occur*: Option[DocOccur] ## 'link' to documentable entry 245 | 246 | DocCodeLine* = object 247 | lineHigh* {.Attr.}: int ## /max index/ (not just length) for target 248 | ## code line 249 | text*: string 250 | parts*: seq[DocCodePart] 251 | overlaps*: seq[DocCodePart] ## \ 252 | ## 253 | ## - WHY :: sometimes it is not possible to reliably determine extent 254 | ## of the identifier, which leads to potentially overlapping code 255 | ## segments. Determining 'the correct' one is hardly possible, so 256 | ## they are just dumped in the overlapping section. 257 | covPasses*: Option[int] ## Merge code coverage reports with 258 | ## documentable database. 259 | 260 | DocCode* = object 261 | ## Block of source code with embedded occurence links. 262 | codeLines*: seq[DocCodeLine] 263 | 264 | DocTypeHeadKind* = enum 265 | dthGenericParam ## Unresolved generic parameter 266 | dthTypeclass ## Typeclass 267 | dthConcreteType ## Concrete resolved class 268 | 269 | DocIdentKind* = enum 270 | diValue ## Pass-by value function argument or regular identifier 271 | diPointer ## Identifier passed by pointer 272 | diMutReference ## Mutable reference 273 | diConstReference ## Immutable reference 274 | diSink ## rvalue/sink parameters 275 | 276 | DocIdent* = object 277 | ## Identifier. 278 | ## 279 | ## - WHY :: Callback itself is represented as a type, but it is also 280 | ## possible to have named arguments for callback arguments (though 281 | ## this is not mandatory). @field{entry} should only point to 282 | ## documentable entry of kind [[code:dekField]]. 283 | 284 | ident*: string ## Identifier name 285 | kind*: DocIdentKind ## 286 | identType*: DocType ## Identifier type 287 | value*: Option[string] ## Optional expression for initialization value 288 | entry*: DocId 289 | 290 | DocIdSet* = object 291 | ids*: IntSet 292 | 293 | DocId* = object 294 | id* {.Attr.}: Hash 295 | 296 | DocIdTableN* = object 297 | table*: Table[DocId, DocIdSet] 298 | 299 | DocEntryGroup* = ref object 300 | entries*: seq[DocEntry] 301 | nested*: seq[DocEntryGroup] 302 | 303 | DocLinkPart* = object 304 | ## Part of fully scoped document identifier. 305 | ## 306 | ## - DESIGN :: Format closely maps to 307 | ## [[code:haxorg//semorg.CodeLinkPart]] but represents *concrete 308 | ## path* to a particular documentable entry. Code link is a pattern, 309 | ## FullIdent is a path. 310 | name* {.Attr.}: string 311 | id* {.Attr.}: DocId 312 | case kind*: DocEntryKind 313 | of dekProcKinds: 314 | procType*: DocType 315 | 316 | else: 317 | discard 318 | 319 | DocLink* = object 320 | ## Full scoped identifier for an entry 321 | docId*: DocId ## Cached identifier value 322 | parts*: seq[DocLinkPart] 323 | 324 | DocSelectorPart* = object 325 | name*: string 326 | expected*: set[DocEntryKind] 327 | procType*: DocType 328 | 329 | DocSelector* = object 330 | parts*: seq[DocSelectorPart] 331 | 332 | DocPragma* = object 333 | name* {.Attr.}: string 334 | entry* {.Attr.}: DocId 335 | args*: seq[DocCode] 336 | 337 | DocTypeUseKind* = enum 338 | ## Different kinds of type usage. 339 | dtukDefault ## Direct type use 340 | dtukPointerTo ## Untraced pointer to type 341 | dtukGcRefTo ## Traced pointer to type 342 | dtukByrefTo ## Reference to lvalue 343 | dtukRvalueTo ## Temporary (rvalue/sink) reference 344 | dtukConst 345 | dtukVolatile 346 | 347 | DocType* = ref object 348 | ## Single **use** of a type in any documentable context (procedure 349 | ## arguments, return types etc.). Plays structural role in 350 | ## documentation context - does not contain any additional information 351 | ## itself. 352 | name* {.Attr.}: string 353 | useKind* {.Attr.}: seq[DocTypeUseKind] ## Unwrap one or more layers of 354 | ## indirection in type usage, blurring distinction between `ptr ptr 355 | ## char` and `char` if needed. 356 | # REVIEW this makes queries like 'all procedures that accept 357 | # pointer-to-pointer-to-char' harder, but at the same time eliminate 358 | # the need to manually unwrap and process functions that work 359 | # with`*MyStruct` and `MyStruct`. Extra flexibility at the cost of 360 | # extra complexity. 361 | case kind*: DocTypeKind 362 | of dtkIdent, dtkGenericSpec, dtkAnonTuple, dtkVarargs: 363 | head* {.Attr.}: DocId ## Documentation entry 364 | identKind* {.Attr.}: DocTypeHeadKind ## `head` ident kind 365 | genParams*: seq[DocType] 366 | 367 | of dtkGenericParam: 368 | paramName* {.Attr.}: string 369 | 370 | of dtkProc, dtkNamedTuple: 371 | returnType*: Option[DocType] 372 | arguments*: seq[DocIdent] 373 | pragmas*: seq[DocPragma] 374 | effects*: seq[DocId] 375 | raises*: seq[DocId] 376 | 377 | of dtkRange: 378 | rngStart*, rngEnd*: string 379 | 380 | of dtkValue, dtkTypeofExpr: 381 | value*: string 382 | 383 | of dtkNone: 384 | discard 385 | 386 | of dtkFile, dtkDir, dtkString: 387 | strVal*: string 388 | 389 | DocAdmonition* = ref object 390 | kind*: OrgBigIdentKind 391 | body*: SemOrg 392 | 393 | DocMetatag* = ref object 394 | kind*: OrgMetaTag 395 | body*: SemOrg 396 | 397 | DocPos* = object 398 | line* {.Attr.}: int 399 | column* {.Attr.}: int 400 | 401 | 402 | DocLocation* = object 403 | # FIXME extermely expensive structure to work with - each documentable 404 | # entry has at least *two* copies of almost the same string, and it is 405 | # also duplicated thousands of times for each file (due to number of 406 | # documentable entries). Must be replaced with FileId abd AbsFileId 407 | file* {.Attr.}: string 408 | absFile* {.Skip(IO).}: AbsFile 409 | pos* {.Attr.}: DocPos 410 | 411 | DocLocationMap* = object 412 | files*: Table[AbsFile, Map[int, 413 | seq[tuple[location: DocPos, link: DocLink]]]] 414 | 415 | DocExtent* = object 416 | start* {.Attr.}: DocPos 417 | finish* {.Attr.}: DocPos 418 | 419 | DocText* = object 420 | category* {.Attr.}: Option[string] 421 | docTags*: seq[string] 422 | docBrief*: SemOrg 423 | docBody*: SemOrg 424 | admonitions*: seq[DocAdmonition] 425 | metatags*: seq[DocMetatag] 426 | rawDoc*: seq[string] 427 | 428 | DocVisibilityKind* = enum 429 | dvkPrivate ## Not exported 430 | dvkInternal ## Exported, but only for internal use 431 | dvkPublic ## Exported, available for public use 432 | 433 | DocRequires* = object 434 | name* {.Attr.}: string 435 | version* {.Attr.}: string # TODO expand 436 | resolved* {.Attr.}: Option[DocID] 437 | 438 | DocEntry* = ref object 439 | location*: Option[DocLocation] 440 | extent*: Option[DocExtent] 441 | declHeadExtent*: Option[DocExtent] ## Source code extent for 442 | ## documentable entry 'head'. Points to single identifier - entry name 443 | ## in declaration. 444 | ## - WHY :: Used in sourcetrail 445 | nested*: seq[DocId] ## Nested documentable entries. Not all 446 | ## `DocEntryKind` is guaranteed to have one. 447 | 448 | db* {.Skip(IO).}: DocDb ## Parent documentable entry database 449 | 450 | name* {.Attr.}: string 451 | visibility* {.Attr.}: DocVisibilityKind 452 | deprecatedMsg* {.Attr.}: Option[string] 453 | fullIdent*: DocLink ## Fully scoped identifier for a name 454 | 455 | docText*: DocText 456 | 457 | case kind*: DocEntryKind 458 | of dekPackage: 459 | version* {.Attr.}: string 460 | author* {.Attr.}: string 461 | license* {.Attr.}: string 462 | requires*: seq[DocRequires] 463 | 464 | of dekModule: 465 | imports*: DocIdSet 466 | exports*: DocIdSet 467 | 468 | of dekStructKinds: 469 | superTypes*: seq[DocId] 470 | 471 | of dekShellOption: 472 | isRequired*: bool 473 | optType*: Option[DocType] 474 | optRepeatRange*: Slice[int] 475 | 476 | of dekArg, dekField: 477 | identTypeStr* {.Attr.}: Option[string] 478 | identType*: Option[DocType] ## Argument type description 479 | identDefault*: Option[DocCode] ## Expression for argument default 480 | ## value. 481 | 482 | of dekAliasKinds: 483 | baseType*: DocType 484 | 485 | of dekProcKinds: 486 | procKind* {.Attr.}: DocProcKind 487 | wrapOf*: Option[string] 488 | dynlibOf*: Option[string] 489 | calls*: DocIdSet ## Procedures called by entry 490 | raises*: DocIdSet ## Full list of potential raises of a procedure 491 | effects*: DocIdSet ## All effects for procedure body 492 | raisesVia*: Table[DocId, DocIdSet] ## Mapping between particular 493 | ## raise and called procedure. Direct raises via `raise` statement 494 | ## are not listed here. 495 | raisesDirect*: DocIdSet 496 | effectsVia*: Table[DocId, DocIdSet] ## Effect -> called procMapping 497 | globalIO*: DocIdSet ## Global variables that procedure reads from 498 | ## or writes into. 499 | 500 | else: 501 | discard 502 | 503 | DocFile* = object 504 | ## Processed code file 505 | path* {.Attr.}: AbsFile ## Absolute path to the original file 506 | body*: DocCode ## Full text with [[code:DocOccur][occurrence]] 507 | ## annotations 508 | moduleId* {.Attr.}: Option[DocId] 509 | 510 | DocLib* = object 511 | name*: string 512 | dir*: AbsDir 513 | 514 | 515 | DocDb* = ref object 516 | ## - DESIGN :: Two-layer mapping between full entry identifiers, their 517 | ## hashes and documentable entries. Hashes are also mapped to full 518 | ## identifiers using [[code:DocEntry.fullIdent]] field. 519 | 520 | # In order to resolve code links I must store all 'full identifiers' 521 | # somewhere, and it makes it easier to serialize if all elements are 522 | # resolved through iteger identifiers. This is slower but does not 523 | # require expensive `ref` graph reconstruction during serialization. 524 | entries*: Table[DocId, DocEntry] 525 | fullIdents*: OrderedTable[DocLink, DocId] 526 | top*: OrderedTable[DocLinkPart, DocEntry] 527 | files*: seq[DocFile] 528 | knownLibs*: seq[DocLib] 529 | currentTop*: DocEntry 530 | -------------------------------------------------------------------------------- /src/haxdoc/wrappers/mandoc/nimmandoc.nim: -------------------------------------------------------------------------------- 1 | import make_wrap 2 | import hmisc/other/oswrap 3 | import hmisc/algo/hseq_distance 4 | import hpprint 5 | import hmisc/algo/clformat 6 | 7 | const dir = sourceDir / "mandoc-1.14.5" 8 | const included = "-I" & $dir 9 | 10 | {.passc: included.} 11 | {.passc: "-lz".} 12 | {.passl: "-lz".} 13 | 14 | const compileFlags = "" 15 | 16 | {.compile(dir /. "compat_err.c", compileFlags).} 17 | {.compile(dir /. "mdoc_markdown.c", compileFlags).} 18 | {.compile(dir /. "tbl_term.c", compileFlags).} 19 | {.compile(dir /. "tbl_html.c", compileFlags).} 20 | {.compile(dir /. "compat_strlcat.c", compileFlags).} 21 | {.compile(dir /. "mandoc_aux.c", compileFlags).} 22 | {.compile(dir /. "term_ps.c", compileFlags).} 23 | {.compile(dir /. "man_html.c", compileFlags).} 24 | {.compile(dir /. "roff_term.c", compileFlags).} 25 | {.compile(dir /. "compat_getsubopt.c", compileFlags).} 26 | {.compile(dir /. "mdoc_validate.c", compileFlags).} 27 | {.compile(dir /. "msec.c", compileFlags).} 28 | {.compile(dir /. "compat_isblank.c", compileFlags).} 29 | {.compile(dir /. "man_macro.c", compileFlags).} 30 | {.compile(dir /. "compat_reallocarray.c", compileFlags).} 31 | {.compile(dir /. "att.c", compileFlags).} 32 | {.compile(dir /. "mandoc.c", compileFlags).} 33 | {.compile(dir /. "compat_strtonum.c", compileFlags).} 34 | {.compile(dir /. "dbm.c", compileFlags).} 35 | {.compile(dir /. "tbl_data.c", compileFlags).} 36 | {.compile(dir /. "mdoc_macro.c", compileFlags).} 37 | {.compile(dir /. "compat_strsep.c", compileFlags).} 38 | {.compile(dir /. "dbm_map.c", compileFlags).} 39 | {.compile(dir /. "tbl_layout.c", compileFlags).} 40 | {.compile(dir /. "man_validate.c", compileFlags).} 41 | {.compile(dir /. "mdoc.c", compileFlags).} 42 | {.compile(dir /. "mdoc_argv.c", compileFlags).} 43 | {.compile(dir /. "compat_strlcpy.c", compileFlags).} 44 | {.compile(dir /. "read.c", compileFlags).} 45 | {.compile(dir /. "mdoc_man.c", compileFlags).} 46 | {.compile(dir /. "mdoc_state.c", compileFlags).} 47 | {.compile(dir /. "eqn_term.c", compileFlags).} 48 | {.compile(dir /. "dba_write.c", compileFlags).} 49 | {.compile(dir /. "eqn.c", compileFlags).} 50 | {.compile(dir /. "arch.c", compileFlags).} 51 | {.compile(dir /. "mandoc_xr.c", compileFlags).} 52 | {.compile(dir /. "dba_read.c", compileFlags).} 53 | {.compile(dir /. "dba_array.c", compileFlags).} 54 | {.compile(dir /. "out.c", compileFlags).} 55 | {.compile(dir /. "tag.c", compileFlags).} 56 | {.compile(dir /. "st.c", compileFlags).} 57 | {.compile(dir /. "compat_recallocarray.c", compileFlags).} 58 | {.compile(dir /. "dba.c", compileFlags).} 59 | {.compile(dir /. "preconv.c", compileFlags).} 60 | {.compile(dir /. "mandoc_ohash.c", compileFlags).} 61 | {.compile(dir /. "compat_mkdtemp.c", compileFlags).} 62 | {.compile(dir /. "compat_vasprintf.c", compileFlags).} 63 | {.compile(dir /. "eqn_html.c", compileFlags).} 64 | {.compile(dir /. "tbl_opts.c", compileFlags).} 65 | {.compile(dir /. "compat_strcasestr.c", compileFlags).} 66 | {.compile(dir /. "compat_fts.c", compileFlags).} 67 | {.compile(dir /. "mdoc_html.c", compileFlags).} 68 | {.compile(dir /. "mandoc_msg.c", compileFlags).} 69 | {.compile(dir /. "term_ascii.c", compileFlags).} 70 | {.compile(dir /. "man.c", compileFlags).} 71 | {.compile(dir /. "term_tab.c", compileFlags).} 72 | {.compile(dir /. "term.c", compileFlags).} 73 | {.compile(dir /. "tree.c", compileFlags).} 74 | {.compile(dir /. "tbl.c", compileFlags).} 75 | {.compile(dir /. "chars.c", compileFlags).} 76 | {.compile(dir /. "compat_stringlist.c", compileFlags).} 77 | {.compile(dir /. "man_term.c", compileFlags).} 78 | {.compile(dir /. "roff.c", compileFlags).} 79 | {.compile(dir /. "roff_validate.c", compileFlags).} 80 | {.compile(dir /. "compat_ohash.c", compileFlags).} 81 | {.compile(dir /. "compat_progname.c", compileFlags).} 82 | {.compile(dir /. "mansearch.c", compileFlags).} 83 | {.compile(dir /. "lib.c", compileFlags).} 84 | {.compile(dir /. "roff_html.c", compileFlags).} 85 | {.compile(dir /. "mdoc_term.c", compileFlags).} 86 | {.compile(dir /. "compat_getline.c", compileFlags).} 87 | {.compile(dir /. "manpath.c", compileFlags).} 88 | {.compile(dir /. "html.c", compileFlags).} 89 | {.compile(dir /. "compat_strndup.c", compileFlags).} 90 | 91 | {.push warning[UnusedImport]: off.} 92 | 93 | import 94 | ./mansearch, 95 | ./mandoc_parse, 96 | ./mandoc_aux, 97 | ./mandoc, 98 | ./roff, 99 | ./mdoc, 100 | ./manconf, 101 | ./main, 102 | ./tbl 103 | 104 | import 105 | std/[strformat, strutils, options, sequtils], 106 | hmisc/types/colorstring, 107 | hmisc/[hdebug_misc, helpers] 108 | 109 | export 110 | mansearch, mandoc_parse, mandoc_aux, mandoc, 111 | roff, mdoc, manconf, main, tbl 112 | 113 | 114 | type 115 | PRoffNode* = ptr RoffNode 116 | PTblSpan* = ptr TblSpan 117 | PTblDat* = ptr TblDat 118 | 119 | func kind*(node: PRoffNode): RoffType = node.cxType.toRoffType() 120 | func tokKind*(node: PRoffNode): RoffTok = node.tok.toRoffTok() 121 | 122 | iterator items*(dat: PTblDat): PTblDat = 123 | var dat = dat 124 | while not isNil(dat): 125 | yield dat 126 | dat = dat.next 127 | 128 | iterator pairs*(dat: PTblDat): (int, PTblDat) = 129 | var cnt = 0 130 | for dat in items(dat): 131 | yield (cnt, dat) 132 | inc cnt 133 | 134 | template lenEqImpl(inNode: typed, val: typed): untyped = 135 | var result = false 136 | var found = false 137 | var over = false 138 | for idx, node in pairs(inNode): 139 | if idx == val: 140 | found = true 141 | 142 | if idx > val: 143 | # Overshot 144 | over = true 145 | break 146 | 147 | if over: 148 | result = false 149 | 150 | elif not found and val == 0: 151 | result = true 152 | 153 | else: 154 | result = found 155 | 156 | result 157 | 158 | func len*(dat: PTblDat): int = 159 | for _ in items(dat): 160 | inc result 161 | 162 | 163 | func lenEq*(dat: PTblDat, val: int): bool = lenEqImpl(dat, val) 164 | 165 | iterator items*(node: PRoffNode): PRoffNode = 166 | if not isNil(node.child): 167 | var next = node.child 168 | while not isNil(next): 169 | yield next 170 | next = next.next 171 | 172 | iterator pairs*(node: PRoffNode): (int, PRoffNode) = 173 | var cnt = 0 174 | for sub in items(node): 175 | yield (cnt, sub) 176 | inc cnt 177 | 178 | func len*(node: PRoffNode): int = 179 | ## Get number of subnodes for `node`. 180 | ## 181 | ## - NOTE :: Due to how underlying AST subnodes are stored this 182 | ## operation takes `O(n)` 183 | for sub in items(node): 184 | inc result 185 | 186 | 187 | func lenEq*(node: PRoffNode, val: int): bool = 188 | ## Faster check for node len. Using `len(node) == 0` will make it compute 189 | ## full length of the node, but `node.lenEq(0)` will only traverse as 190 | ## much nodes as needed 191 | lenEqImpl(node, val) 192 | 193 | 194 | func `[]`*(node: PRoffNode, idx: int): PRoffNode = 195 | for cnt, sub in pairs(node): 196 | if cnt == idx: 197 | return sub 198 | 199 | func `[]`*(node: PRoffNode, slice: Slice[int]): seq[PRoffNode] = 200 | for cnt, sub in pairs(node): 201 | if cnt in slice: 202 | result.add sub 203 | 204 | func `[]`*(node: PRoffNode, slice: HSlice[int, BackwardsIndex]): seq[PRoffNode] = 205 | for cnt, sub in pairs(node): 206 | if slice.a <= cnt: 207 | result.add sub 208 | 209 | for _ in 0 ..< slice.b.int: 210 | discard result.pop 211 | 212 | func strVal*(node: PRoffNode): string = $node.cxString 213 | 214 | func mandocEscape*(str: string): Option[tuple[kind: MandocEsc, escLen: int]] = 215 | ## Parse optional leading roff escape character and return it's class 216 | ## together with total number of characters for escape, including 217 | ## backslash and identifier. 218 | ## 219 | ## - @ex{("\fItext") -> some((mscFontBi, 3))} - italic font, three characters 220 | ## for the whole escape sequence. 221 | if str.len < 2 or str[0] != '\\': 222 | return 223 | 224 | var 225 | str = str 226 | pStart: cstring = cast[cstring](addr str[1]) 227 | 228 | pFinish: ptr[cstring] 229 | lenVal: cint 230 | 231 | let esc = mandocEscape( 232 | cast[cstringarray](addr pStart), 233 | cast[cstringarray](addr pFinish), 234 | addr lenVal 235 | ) 236 | 237 | return some((toMandocEsc( 238 | # FIXME font bold has number 7. Wrapped enum has the same order, but 239 | # for some reason all values are off-by-one, and this hack is needed to 240 | # differentiate between enum kinds. 241 | cast[MandocEscC](cast[int](esc))), int(lenVal) + 2)) 242 | 243 | type 244 | RoffText* = object 245 | str*: string 246 | escape*: Option[MandocEsc] 247 | escapeText*: string 248 | pos*: int 249 | 250 | proc unescapeRoff*(str: string): string = 251 | var idx = 0 252 | while idx < str.len: 253 | if idx == str.high: 254 | result.add str[idx] 255 | 256 | elif str[idx] == '\\': 257 | case str[idx + 1]: 258 | of {'|', '&'}: inc idx 259 | else: 260 | inc idx 261 | result.add str[idx] 262 | 263 | else: 264 | result.add str[idx] 265 | 266 | inc idx 267 | 268 | 269 | proc splitRoffText*(str: string): seq[RoffText] = 270 | var 271 | pos = 0 272 | start = 0 273 | while pos < str.len: 274 | if str[pos] == '\\': 275 | if pos < str.high and str[pos + 1] in {'-', '(', ',', '|', '&'}: 276 | inc pos 277 | 278 | else: 279 | if start != pos: 280 | result.add RoffText( 281 | str: str[start ..< pos].unescapeRoff(), pos: start) 282 | 283 | let escapeStart = pos 284 | let (kind, escLen) = mandocEscape(str[pos .. ^1]).get() 285 | 286 | let escapeText = str[ 287 | (escapeStart + 1) ..< min(str.len, escapeStart + escLen)] 288 | 289 | result.add RoffText( 290 | escape: some(kind), 291 | escapeText: escapeText, 292 | pos: escapeStart 293 | ) 294 | 295 | pos += escLen 296 | start = pos 297 | 298 | else: 299 | inc pos 300 | 301 | if start != pos: 302 | result.add RoffText( 303 | str: str[start ..< pos].unescapeRoff(), pos: start) 304 | 305 | 306 | proc roffTextFormat*(str: string, colored: bool = true): string = 307 | let split = splitRoffText(str) 308 | for idx, elem in split: 309 | if result.len > 0 and result[^1] != ' ': 310 | result &= " " 311 | 312 | if elem.escape.isSome(): 313 | result &= &"({toGreen($elem.escape.get())}" 314 | if elem.escapeText.len > 0: 315 | result &= " " & elem.escapeText 316 | 317 | result &= "))" 318 | 319 | if elem.str.len > 0: 320 | if result.len > 0 and result[^1] != ' ': 321 | result &= " " 322 | 323 | result &= &"\"{elem.str.toYellow(colored)}\"" 324 | 325 | 326 | proc treeRepr*(node: PRoffNode, colored: bool = true, level: int = 0): string 327 | proc treeRepr*(tbl: PTblSpan, colored: bool = true, level: int = 0): string = 328 | let cols = tbl.opts.cols 329 | let pref = " ".repeat(level) 330 | result = pref & "\n" 331 | 332 | for idx, dat in tbl.first: 333 | if idx > 0: 334 | result &= "\n" 335 | result &= &"{pref}[{idx}] {roffTextFormat($dat.cxString, colored)}" 336 | 337 | # var row = tbl.layout 338 | # while not isNil(row): 339 | # result &= pref & "ROW\n" & pref & " " 340 | # var col = row.first 341 | # while not isNil(col): 342 | # result &= &"[{col.wstr}]" 343 | # # result &= treeRepr() 344 | # col = col.next 345 | 346 | # result &= "\n" 347 | 348 | # row = row.next 349 | 350 | proc treeRepr*(node: PRoffNode, colored: bool = true, level: int = 0): string = 351 | proc aux(node: PRoffNode, level: int): string = 352 | let pref = " ".repeat(level) 353 | result = &"{pref}{node.kind} [{node.line}:{node.pos}]" 354 | # if node. 355 | case node.kind: 356 | of rtText: 357 | let str = node.strVal() 358 | if '\n' in str: 359 | result &= "\n" 360 | result &= str.indent(level * 2 + 2).toYellow(colored) 361 | 362 | else: 363 | result &= " " & roffTextFormat(str, colored) 364 | 365 | of rtElem, rtBlock, rtHead, rtBody: 366 | result &= " " & toCyan($node.tokKind()) 367 | 368 | of rtTbl: 369 | if node.span.first.lenEq(0): 370 | return "" 371 | 372 | result &= treeRepr(node.span, colored, level + 1) 373 | 374 | else: 375 | discard 376 | 377 | result &= "\n" 378 | for sub in items(node): 379 | result &= aux(sub, level + 1) 380 | 381 | return aux(node, level) 382 | 383 | type 384 | NRoffNodeKind* = enum 385 | rnkText 386 | rnkSection 387 | rnkStmtList 388 | rnkBlock 389 | rnkComment 390 | rnkParagraph 391 | rnkEmptyNode 392 | 393 | rnkFontBold 394 | rnkFontItalic 395 | rnkFontRoman 396 | rnkFontCw 397 | rnkFontMono 398 | rnkFontNf 399 | 400 | rnkIndented 401 | rnkRelativeInset 402 | rnkTaggedParagraph 403 | 404 | 405 | NRoffNode* = ref object 406 | line*: int 407 | column*: int 408 | case kind*: NRoffNodeKind 409 | of rnkText: 410 | textVal*: string 411 | 412 | else: 413 | subnodes*: seq[NRoffNode] 414 | 415 | import std/sets 416 | 417 | 418 | func treeRepr*( 419 | nr: NRoffNode, opts: HDisplayOpts = defaultHDisplay): string = 420 | 421 | var visited: HashSet[int] 422 | func aux(n: NRoffNode, level: int): string = 423 | if isNil(n): 424 | return " ".repeat(level) & toRed("", opts.colored) 425 | 426 | if cast[int](n) notin visited: 427 | visited.incl cast[int](n) 428 | 429 | else: 430 | return " ".repeat(level) & toBlue("", opts.colored) 431 | 432 | result = " ".repeat(level) & hShow(n.kind, opts) & " " & 433 | hShow(n.line .. n.column) & " " 434 | case n.kind: 435 | of rnkText: 436 | result &= hShow(n.textVal, opts.withIt(( 437 | it.indent = level * 2 + 2))) 438 | 439 | else: 440 | for sub in n.subnodes: 441 | result &= "\n" & aux(sub, level + 1) 442 | 443 | return aux(nr, 0) 444 | 445 | func newTree*(kind: NRoffNodeKind, subnodes: varargs[NRoffNode]): 446 | NRoffNode = 447 | 448 | result = NRoffNode(kind: kind) 449 | if subnodes.len > 0: 450 | result.subnodes = toSeq(subnodes) 451 | 452 | func newTree*(kind: NRoffNodeKind, str: string): 453 | NRoffNode = 454 | 455 | result = NRoffNode(kind: kind) 456 | result.textVal = str 457 | 458 | 459 | func add*(r: var NRoffNode, other: NRoffNode | seq[NRoffNode]) = 460 | r.subnodes.add other 461 | 462 | func len*(n: NRoffNode): int = n.subnodes.len 463 | func `[]`*(n: NRoffNode, idx: int | BackwardsIndex): NRoffNode = 464 | n.subnodes[idx] 465 | 466 | iterator items*(n: NRoffNode): NRoffNode = 467 | for sub in n.subnodes: 468 | yield sub 469 | 470 | iterator pairs*(n: NRoffNode): (int, NRoffNode) = 471 | for idx, sub in pairs(n.subnodes): 472 | yield (idx, sub) 473 | 474 | 475 | func mergeNode*(roff: var NRoffNode, other: NRoffNode, sep: char = '\n') = 476 | if roff.subnodes.len > 0 and 477 | other.kind == rnkText and 478 | roff.subnodes[^1].kind == rnkText: 479 | 480 | roff.subnodes[^1].textVal &= $sep & other.textVal 481 | 482 | else: 483 | roff.add other 484 | 485 | 486 | func toNRoffText*(text: string, line, column: int): NRoffNode = 487 | let parts = text.splitRoffText() 488 | if parts.len == 1: 489 | return NRoffNode( 490 | kind: rnkText, textVal: text.unescapeRoff(), line: line, column: column) 491 | 492 | var stack: seq[seq[tuple[ 493 | pending: bool, node: NRoffNode]]] = @[@[(true, rnkParagraph.newTree())]] 494 | 495 | var column = column 496 | 497 | proc pushNode(node: NRoffNode) = 498 | let pending = node.kind != rnkText 499 | node.line = line 500 | node.column = column 501 | if stack.len > 0 and stack.last2().pending: 502 | stack.add @[@[(pending, node)]] 503 | 504 | else: 505 | stack.last().add (pending, node) 506 | 507 | proc closeLayer() = 508 | let layer = stack.pop() 509 | if ( 510 | stack.len > 0 and 511 | stack.last().len > 0 and 512 | stack.last2.pending 513 | ): 514 | for it in layer: 515 | stack.last2.node.mergeNode(it.node, ' ') 516 | 517 | stack.last2.pending = false 518 | 519 | elif stack.len == 0: 520 | stack.add @[layer] 521 | 522 | else: 523 | stack.last.add layer 524 | 525 | # echov "--" 526 | # debugecho roffTextFormat(text) 527 | 528 | for part in parts: 529 | column = part.pos 530 | if part.escape.isSome(): 531 | case part.escape.get(): 532 | of mscFontBold: pushNode rnkFontBold.newTree() 533 | of mscFontItalic: pushNode rnkFontItalic.newTree() 534 | of mscFontCw: pushNode rnkFontCw.newTree() 535 | of mscSpecial: pushNode rnkText.newTree("\\") 536 | # of mscFontRt: pushNode rnkFontRt.newTree() 537 | of mscIgnore: discard 538 | of mscFontRoman: 539 | closeLayer() 540 | 541 | else: 542 | raise newImplementKindError( 543 | part.escape.get(), line, column) 544 | 545 | else: 546 | pushNode rnkText.newTree(part.str) 547 | 548 | 549 | while stack.len > 1: 550 | closeLayer() 551 | 552 | result = stack[0][0].node 553 | if result.len == 1: 554 | return result[0] 555 | 556 | 557 | func toNRoffNode*(roff: PRoffNode): NRoffNode = 558 | if isNil(roff): 559 | return rnkEmptyNode.newTree() 560 | 561 | case roff.kind: 562 | of rtRoot, rtBody: 563 | result = rnkStmtList.newTree() 564 | 565 | for sub in roff: 566 | let node = toNRoffNode(sub) 567 | if not isNil(node): 568 | result.mergeNode node 569 | 570 | 571 | of rtBlock: 572 | if roff[0].kind == rtHead and 573 | roff[1].kind == rtBody: 574 | 575 | case roff.tokKind: 576 | of rtManSh, rtManSs, rtManTp: 577 | result = rnkSection.newTree( 578 | roff[0][0].toNRoffNode(), 579 | roff[1].toNRoffNode() 580 | ) 581 | 582 | of rtManPp: 583 | result = roff[1].toNRoffNode() 584 | 585 | of rtManIp: 586 | # `.IP` "Set an indented paragraph with an optional tag." 587 | result = rnkIndented.newTree(roff[1].toNRoffNode) 588 | 589 | of rtManRs: 590 | # `.RS` Start a new relative inset level, moving the left 591 | # margin right 592 | result = rnkRelativeInset.newTree(roff[1].toNRoffNode()) 593 | 594 | # of rtManTp: 595 | # result = rnkTaggedParagraph.newTree(roff[1].toNRoffNode()) 596 | 597 | else: 598 | raise newImplementKindError( 599 | roff.tokKind(), roff.line, roff.pos) 600 | 601 | 602 | else: 603 | result = rnkBlock.newTree() 604 | for sub in roff: 605 | let node = toNRoffNode(sub) 606 | if not isNil(node): 607 | result.mergeNode node 608 | 609 | of rtHead: 610 | result = rnkSection.newTree(roff[0].toNRoffNode()) 611 | 612 | of rtText: 613 | result = toNRoffText(roff.strVal(), roff.line, roff.pos) 614 | 615 | of rtComment: 616 | result = rnkComment.newTree() 617 | 618 | of rtElem: 619 | case roff.tokKind(): 620 | of rtManB: result = rnkFontBold.newTree(roff[0].toNRoffNode()) 621 | of rtRoffBr, rtManPd: discard 622 | of rtRoffSp: result = rnkText.newTree(" ") 623 | of rtRoffFt: result = rnkFontMono.newTree(roff[0].toNRoffNode()) 624 | of rtRoffNf: result = rnkFontNf.newTree(roff[0].toNRoffNode()) 625 | of rtRoffFi: result = rnkFontItalic.newTree(roff[0].toNRoffNode()) 626 | else: 627 | raise newImplementKindError(roff.tokKind, roff.treeRepr()) 628 | 629 | else: 630 | raise newImplementKindError(roff, roff.treeRepr()) 631 | 632 | if isNil(result): 633 | result = rnkEmptyNode.newTree() 634 | 635 | 636 | result.line = roff.line 637 | result.column = roff.pos 638 | 639 | proc parseNRoff*(file: AbsFile): NRoffNode = 640 | mcharsAlloc() 641 | var mp = mparseAlloc( 642 | toCInt({mpSO, mpUTF8, mpLatiN1, mpValidate}), 643 | mdosOther, 644 | "linux".cstring 645 | ) 646 | 647 | 648 | 649 | var fd = mp.mparseopen(file.string.cstring) 650 | mparsereadfd(mp, fd, file.string.cstring) 651 | var meta = mparseresult(mp) 652 | return meta.first.tonroffnode() 653 | 654 | proc getManpageDirs*(): seq[AbsDir] = 655 | @[ 656 | AbsDir("/usr/share/man/man0"), 657 | AbsDir("/usr/share/man/man1"), 658 | AbsDir("/usr/share/man/man2"), 659 | AbsDir("/usr/share/man/man3"), 660 | AbsDir("/usr/share/man/man4"), 661 | AbsDir("/usr/share/man/man5"), 662 | AbsDir("/usr/share/man/man6"), 663 | AbsDir("/usr/share/man/man7"), 664 | AbsDir("/usr/share/man/man8"), 665 | AbsDir("/usr/share/man/mann") 666 | ] 667 | 668 | proc findManpage*( 669 | name: string, extensions: seq[GitGlob] = @[**"?.gz", **"?"]): AbsFile = 670 | 671 | findFile(getManpageDirs(), name, extensions) 672 | --------------------------------------------------------------------------------