├── .github
└── workflows
│ ├── publish.yml
│ └── test.yml
├── .gitignore
├── LICENSE
├── Makefile
├── README.md
├── base.html
├── doclog.css
├── doclog.js
├── doclog.sh
├── flake.lock
├── flake.nix
├── footer.html
├── index.html
├── learn.html
├── main.pl
├── nav.html
├── page.html
├── scryer-test.config.pl
├── scryer.config.pl
└── watch.sh
/.github/workflows/publish.yml:
--------------------------------------------------------------------------------
1 | name: Publish
2 | on:
3 | schedule:
4 | - cron: '33 3 * * *'
5 | workflow_dispatch:
6 |
7 | jobs:
8 | test:
9 | runs-on: ubuntu-22.04
10 | steps:
11 | - name: Install Netlify CLI
12 | run: npm install -g netlify-cli@16.0.2
13 | - name: Checkout
14 | uses: actions/checkout@v4
15 | - name: Checkout Scryer Prolog
16 | uses: actions/checkout@v4
17 | with:
18 | repository: mthom/scryer-prolog
19 | path: scryer-prolog
20 | - name: Compile Scryer Prolog
21 | run: cargo build --release
22 | working-directory: scryer-prolog
23 | - name: Install Scryer Prolog
24 | run: sudo cp scryer-prolog/target/release/scryer-prolog /usr/bin/scryer-prolog
25 | - name: Install Dependencies
26 | run: make setup
27 | - name: Generate docs for https://www.scryer.pl
28 | run: make
29 | - name: Upload site
30 | run: netlify deploy --prod --dir=output
31 | env:
32 | NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }}
33 | NETLIFY_SITE_ID: ${{ secrets.NETLIFY_SITE_ID }}
34 |
--------------------------------------------------------------------------------
/.github/workflows/test.yml:
--------------------------------------------------------------------------------
1 | name: Test
2 | on: [push]
3 |
4 | jobs:
5 | test:
6 | runs-on: ubuntu-22.04
7 | steps:
8 | - name: Checkout
9 | uses: actions/checkout@v4
10 | - name: Checkout Scryer Prolog
11 | uses: actions/checkout@v4
12 | with:
13 | repository: mthom/scryer-prolog
14 | path: scryer-prolog
15 | - name: Compile Scryer Prolog
16 | run: cargo build --release
17 | working-directory: scryer-prolog
18 | - name: Install Scryer Prolog
19 | run: sudo cp scryer-prolog/target/release/scryer-prolog /usr/bin/scryer-prolog
20 | - name: Install Dependencies
21 | run: make setup
22 | - name: Generate docs for Scryer Prolog
23 | run: make
24 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Local Netlify folder
2 | .netlify
3 | output
4 | marquete
5 | teruel
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | BSD 3-Clause License
2 |
3 | Copyright (c) 2022, Adrián Arroyo Calle
4 | All rights reserved.
5 |
6 | Redistribution and use in source and binary forms, with or without
7 | modification, are permitted provided that the following conditions are met:
8 |
9 | * Redistributions of source code must retain the above copyright notice, this
10 | list of conditions and the following disclaimer.
11 |
12 | * Redistributions in binary form must reproduce the above copyright notice,
13 | this list of conditions and the following disclaimer in the documentation
14 | and/or other materials provided with the distribution.
15 |
16 | * Neither the name of the copyright holder nor the names of its
17 | contributors may be used to endorse or promote products derived from
18 | this software without specific prior written permission.
19 |
20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | .PHONY: gen-docs-scryer
2 | gen-docs-scryer:
3 | bash doclog.sh ./scryer-prolog ./output
4 |
5 | .PHONY: clean
6 | clean:
7 | rm -rf output
8 |
9 | .PHONY: upload
10 | upload:
11 | netlify deploy --prod
12 |
13 | .PHONY: setup
14 | setup:
15 | rm -rf teruel
16 | git clone --depth 1 --branch v1.0.1 https://github.com/aarroyoc/teruel
17 | rm -rf djota
18 | git clone --depth 1 --branch v0.3.3 https://github.com/aarroyoc/djota
19 | rm -rf scryer-prolog
20 | git clone --depth 1 https://github.com/mthom/scryer-prolog
21 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # DocLog
2 |
3 | Create documentation from your Prolog code
4 |
5 | Example, Scryer Prolog documentation: https://www.scryer.pl
6 |
7 | ## How to document your code?
8 |
9 | There are two kind of comments in DocLog: module and predicate.
10 |
11 | Each file/module has the option to write a module comment. It will be displayed at the beginning of the page. You can use [Djot](https://djot.net/) inside the comment. The syntax is:
12 | ```
13 | /**
14 | COMMENT
15 | MORE COMMENT
16 | */
17 | ```
18 |
19 | Predicate comments start with %% and they're followed by N % comments. Of those lines, the first line comment should be empty. You should indicate in the first line, the name of the predicate, var names and modes. Then you can use [Djot](https://djot.net/) to explain the rest of the predicate.
20 |
21 | ```
22 | %% append(L0, L1, L)
23 | %
24 | % L is L0 and L1 appended together
25 | ```
26 |
27 | ## Using Doclog
28 |
29 | First, clone the repo:
30 |
31 | ```
32 | $ git clone https://github.com/aarroyoc/doclog
33 | $ cd doclog
34 | ```
35 |
36 | Then, install the dependencies:
37 |
38 | ```
39 | $ make setup
40 | ```
41 |
42 | After that, you must create a configuration file called `doclog.config.pl`. This file will contain several options required to document your project, and must be in your source directory. Take a look at `scryer.config.pl` for an example file.
43 |
44 | With your config file, you can execute Doclog:
45 |
46 | ```
47 | ./doclog.sh SOURCE_FOLDER OUTPUT_FOLDER
48 | ```
49 |
50 | And wait for the docs to be generated!
51 |
52 | While developing, it might be usefull to rebuild everytime something in the SOURCE\_FOLDER changed. You can do so, by starting this command:
53 |
54 | ```
55 | ./watch.sh SOURCE_FOLDER OUTPUT_FOLDER
56 | ```
57 |
--------------------------------------------------------------------------------
/base.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | {% block title %}{% endblock %}
5 |
6 |
7 |
8 |
9 |
14 |
15 |
16 | Learn
17 | {{ nav_learn }}
18 | Libraries
19 | {{ nav_lib }}
20 |
21 |
22 | {% block main %}{% endblock %}
23 |
24 |
25 | {{ footer }}
26 |
27 |
28 |
29 |
--------------------------------------------------------------------------------
/doclog.css:
--------------------------------------------------------------------------------
1 | body {
2 | font-family: sans-serif;
3 | margin: 1rem;
4 | margin-top: auto;
5 | }
6 |
7 | .mainContent {
8 | display: grid;
9 | grid-template-columns: 2fr 6fr;
10 | grid-column-gap: 20px;
11 | }
12 |
13 | nav {
14 | max-width: calc(80rem / 6 * 1.5);
15 | word-wrap: break-word;
16 | }
17 |
18 | main {
19 | max-width: calc(80rem / 6 * 5);
20 | }
21 |
22 | .moduleDescription {
23 | margin: 2rem;
24 | }
25 |
26 | .predicates{
27 | margin: 1rem;
28 | }
29 |
30 | .predicate > h4 {
31 | background-color: #00007f;
32 | color: white;
33 | padding-left: 10px;
34 | }
35 | .predicate > div {
36 | margin: 0.5rem;
37 | }
38 |
39 | .navlist {
40 | list-style-type: none;
41 | }
42 |
43 | .mainContent > nav > h4 {
44 | padding-left: 1.5rem;
45 | }
46 |
47 | footer {
48 | text-align: center;
49 | }
50 |
51 | .topbar {
52 | display: flex;
53 | justify-content: space-between;
54 | align-items: center;
55 | border-bottom: solid #00007f 10px;
56 | min-height: 4rem;
57 | }
58 |
59 | .topbarLink {
60 | text-decoration: none;
61 | color: black;
62 | font-size: 2rem;
63 | font-weight: bold;
64 | }
65 |
66 | table {
67 | border: 2px solid #00007F;
68 | border-radius: 0.25rem;
69 | border-collapse: collapse;
70 | }
71 |
72 | tbody tr:nth-child(odd) {
73 | background-color: #DFDFFF;
74 | }
75 |
76 | tbody tr:nth-child(even) {
77 | background-color: #EFEFFF;
78 | }
79 |
80 | td {
81 | padding-left: 10px;
82 | padding-right: 10px;
83 | }
84 |
--------------------------------------------------------------------------------
/doclog.js:
--------------------------------------------------------------------------------
1 | window.addEventListener("load", () => {
2 | const searchBox = document.getElementById("search");
3 | const predicates = document.getElementById("predicates");
4 |
5 | fetch("/search-index.json")
6 | .then((res) => res.json())
7 | .then((data) => {
8 | for(let predicate of data) {
9 | let node = document.createElement("option");
10 | node.value = predicate.predicate;
11 | predicates.appendChild(node);
12 | }
13 |
14 | searchBox.oninput = () => {
15 | for(let predicate of data) {
16 | if(searchBox.value === predicate.predicate) {
17 | window.location.href = predicate.link;
18 | }
19 | }
20 | };
21 | });
22 | });
23 |
--------------------------------------------------------------------------------
/doclog.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | DOCLOG=$(realpath $(dirname $0))
4 |
5 | case "$(uname -s)" in
6 | MINGW*)
7 | SOURCE=$(cygpath -w $(realpath $1) | sed -e 's/\\/\\\\/g');
8 | OUTPUT=$(cygpath -w $(realpath $2) | sed -e 's/\\/\\\\/g');;
9 | *)
10 | SOURCE=$(realpath $1);
11 | OUTPUT=$(realpath $2);;
12 | esac
13 |
14 | cd $DOCLOG
15 | scryer-prolog -g "run(\"$SOURCE\", \"$OUTPUT\")." -g 'halt' main.pl
16 | cd -
17 |
--------------------------------------------------------------------------------
/flake.lock:
--------------------------------------------------------------------------------
1 | {
2 | "nodes": {
3 | "flake-utils": {
4 | "inputs": {
5 | "systems": "systems"
6 | },
7 | "locked": {
8 | "lastModified": 1726560853,
9 | "narHash": "sha256-X6rJYSESBVr3hBoH0WbKE5KvhPU5bloyZ2L4K60/fPQ=",
10 | "owner": "numtide",
11 | "repo": "flake-utils",
12 | "rev": "c1dfcf08411b08f6b8615f7d8971a2bfa81d5e8a",
13 | "type": "github"
14 | },
15 | "original": {
16 | "owner": "numtide",
17 | "repo": "flake-utils",
18 | "type": "github"
19 | }
20 | },
21 | "nixpkgs": {
22 | "locked": {
23 | "lastModified": 1729850857,
24 | "narHash": "sha256-WvLXzNNnnw+qpFOmgaM3JUlNEH+T4s22b5i2oyyCpXE=",
25 | "owner": "NixOS",
26 | "repo": "nixpkgs",
27 | "rev": "41dea55321e5a999b17033296ac05fe8a8b5a257",
28 | "type": "github"
29 | },
30 | "original": {
31 | "owner": "NixOS",
32 | "ref": "nixpkgs-unstable",
33 | "repo": "nixpkgs",
34 | "type": "github"
35 | }
36 | },
37 | "root": {
38 | "inputs": {
39 | "flake-utils": "flake-utils",
40 | "nixpkgs": "nixpkgs"
41 | }
42 | },
43 | "systems": {
44 | "locked": {
45 | "lastModified": 1681028828,
46 | "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
47 | "owner": "nix-systems",
48 | "repo": "default",
49 | "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
50 | "type": "github"
51 | },
52 | "original": {
53 | "owner": "nix-systems",
54 | "repo": "default",
55 | "type": "github"
56 | }
57 | }
58 | },
59 | "root": "root",
60 | "version": 7
61 | }
62 |
--------------------------------------------------------------------------------
/flake.nix:
--------------------------------------------------------------------------------
1 | {
2 | description = "DocLog builds documentation from source code in Prolog";
3 |
4 | inputs = {
5 | nixpkgs.url = "github:NixOS/nixpkgs/nixpkgs-unstable";
6 | flake-utils.url = "github:numtide/flake-utils";
7 | };
8 |
9 | outputs = { nixpkgs, flake-utils, ... }:
10 | flake-utils.lib.eachDefaultSystem(system:
11 | let
12 | pkgs = import nixpkgs { inherit system; };
13 | in
14 | with pkgs; rec {
15 | packages = rec {
16 | default = doclog;
17 |
18 | teruel.src = fetchFromGitHub {
19 | owner = "aarroyoc";
20 | repo = "teruel";
21 | rev = "v1.0.1";
22 | hash = "sha256-Wi8Z3H53xsegMByW69IskLxJhMmLkRVcLnQAPdFmL5Q=";
23 | };
24 |
25 | djota.src = fetchFromGitHub {
26 | owner = "aarroyoc";
27 | repo = "djota";
28 | rev = "v0.1.3";
29 | hash = "sha256-GX4R+eI2AfgnVwh2iNR9Hs5UGoG9pKLnD0bybqb8MQk=";
30 | };
31 |
32 | doclog = stdenv.mkDerivation {
33 | pname = "doclog";
34 | version = "0.0.1";
35 | src = ./.;
36 | dontBuild = true;
37 | buildDependencies = [ inotify-tools ];
38 |
39 | installPhase = ''
40 | mkdir -p $out; cd $out
41 | cp -r ${teruel.src} $out/teruel
42 | cp -r ${djota.src} $out/djota
43 | cp -r ${scryer-prolog.src} $out/scryer-prolog
44 | cp $src/* $out
45 | sed -i 's@scryer-prolog@${scryer-prolog}/bin/scryer-prolog@' doclog.sh
46 | sed -i 's@inotifywait@${inotify-tools}/bin/inotifywait@' watch.sh
47 | ln doclog.sh doclog
48 | ln watch.sh doclog_watch
49 | '';
50 |
51 | homepage = "https://github.com/aarroyoc/doclog";
52 | license = lib.licenses.bsd3.fullName;
53 | };
54 | };
55 |
56 | defaultApp = apps.doclog;
57 | apps = rec {
58 | doclog = {
59 | type = "app";
60 | program = "${packages.doclog}/doclog";
61 | };
62 |
63 | watch = doclog_watch;
64 | doclog_watch = {
65 | type = "app";
66 | program = "${packages.doclog}/doclog_watch";
67 | };
68 | };
69 | }
70 | );
71 | }
72 |
--------------------------------------------------------------------------------
/footer.html:
--------------------------------------------------------------------------------
1 |
2 | Site created with DocLog at {{ time }}
3 |
4 |
--------------------------------------------------------------------------------
/index.html:
--------------------------------------------------------------------------------
1 | {% extends "base.html" %}
2 | {% block title %}{{ project_name }}{% endblock %}
3 | {% block main %}{{ readme }}{% endblock %}
4 |
--------------------------------------------------------------------------------
/learn.html:
--------------------------------------------------------------------------------
1 | {% extends "base.html" %}
2 | {% block title %}{{ name }} - Learning docs of {{ project_name }}{% endblock %}
3 | {% block main %}{{ content }}{% endblock %}
4 |
--------------------------------------------------------------------------------
/main.pl:
--------------------------------------------------------------------------------
1 | :- use_module(library(charsio)).
2 | :- use_module(library(dcgs)).
3 | :- use_module(library(files)).
4 | :- use_module(library(format)).
5 | :- use_module(library(iso_ext)).
6 | :- use_module(library(lists)).
7 | :- use_module(library(pio)).
8 | :- use_module(library(ordsets)).
9 | :- use_module(library(time)).
10 | :- use_module(library(clpz)).
11 | :- use_module(library(dif)).
12 | :- use_module(library(debug)).
13 |
14 | :- use_module(teruel/teruel).
15 | :- use_module(djota/djota).
16 |
17 | :- dynamic(output_folder/1).
18 | :- dynamic(source_folder/1).
19 | :- dynamic(base_url/1).
20 |
21 | run(SourceFolder, OutputFolder) :-
22 | catch((
23 | portray_color(blue, doclog(2, 0, 0)),
24 | assertz(output_folder(OutputFolder)),
25 | assertz(source_folder(SourceFolder)),
26 | path_segments(SourceFolder, S1),
27 | append(S1, ["doclog.config.pl"], C1),
28 | path_segments(ConfigFile, C1),
29 | atom_chars(ConfigFileA, ConfigFile),
30 | consult(ConfigFileA),
31 | generate_nav_lib(NavLib),
32 | generate_nav_learn(NavLearn),
33 | generate_footer(Footer),
34 | Sections = ["nav_lib"-NavLib, "nav_learn"-NavLearn, "footer"-Footer],
35 | generate_page_learn(Sections),
36 | do_copy_files,
37 | generate_page_docs(Sections),
38 | generate_readme(Sections),
39 | portray_color(green, done),
40 | halt), Error, (write(Error), nl, halt(1))).
41 |
42 | docs_base_url(BaseURL) :-
43 | ( base_url(X) ->
44 | BaseURL = X
45 | ; BaseURL = "/"
46 | ).
47 |
48 | do_copy_files :-
49 | source_folder(S1),
50 | output_folder(O1),
51 | path_segments(S1, S2),
52 | path_segments(O1, O2),
53 | findall(A-B, copy_file(A,B), Files),
54 | maplist(do_copy_files_(S2, O2), Files).
55 |
56 | do_copy_files_(S2, O2, A1-B1) :-
57 | path_segments(A1, A2),
58 | path_segments(B1, B2),
59 | append(S2, A2, A3),
60 | append(O2, B2, B3),
61 | path_segments(A, A3),
62 | path_segments(B, B3),
63 | portray_clause(copy_file(A, B)),
64 | file_copy(A, B).
65 |
66 | % We do some path detection which will fail unless all paths are equally
67 | % separated. This relates a base path, a tail path and the segments of the
68 | % union.
69 | %
70 | % canonicalize("C:\\mypath", "my/otherpath", ["C:", "mypath", "my", "otherpath"]).
71 | canonicalize(Base, Path, ResSG) :-
72 | path_segments(Base, BaseSG),
73 | path_segments(Sep, ["", ""]),
74 | Sep = [Separator],
75 | replace_char('\\\\', Separator, Path, Path1),
76 | replace_char('/', Separator, Path1, Path2),
77 | path_segments(Path2, PathSG),
78 | append(BaseSG, PathSG, ResSG).
79 |
80 | generate_nav_lib(NavHtml) :-
81 | source_folder(S1),
82 | source_lib_folder(S20),
83 | canonicalize(S1, S20, SFSG),
84 | subnav(SFSG, ".", Nav),
85 | member("nav"-NavHtml, Nav).
86 |
87 | subnav(Base, Dir, ["name"-Dir, "nav"-Nav, "type"-"dir"]) :-
88 | append(Base, [Dir], DirSg),
89 | path_segments(RealDir, DirSg),
90 | directory_exists(RealDir),
91 | directory_files(RealDir, Files),
92 | files_not_omitted_files(RealDir, Files, FilesReal),
93 | sort(FilesReal, FilesSorted),
94 | maplist(subnav(DirSg), FilesSorted, Items),
95 | render("nav.html", ["items"-Items], Nav).
96 |
97 | subnav(Base, File, ["name"-Name, "link"-['/'|Link], "type"-"file"]) :-
98 | append(Base, [File], FileSg),
99 | append(Name, ".pl", File),
100 | path_segments(FilePath, FileSg),
101 | file_exists(FilePath),
102 | append(_, ["."|LinkSg], FileSg),
103 | \+ member(".", LinkSg),
104 | path_segments(Link0, LinkSg),
105 | append(Link1, ".pl", Link0),
106 | append(Link1, ".html", Link).
107 |
108 | files_not_omitted_files(_, [], []).
109 | files_not_omitted_files(Base, [X|Xs], Ys) :-
110 | source_folder(S1),
111 | source_lib_folder(S2),
112 | canonicalize(S1, S2, SFSG),
113 | path_segments(SF, SFSG),
114 | path_segments(Separator, ["", ""]),
115 | findall(FullOmitFile,(
116 | omit(Omit),
117 | member(OmitFile, Omit),
118 | append([SF, Separator, ".", Separator, OmitFile], FullOmitFile)
119 | ), OmitFiles),
120 | append([Base, Separator, X], File),
121 | (
122 | member(File, OmitFiles) ->
123 | Ys = Ys0
124 | ; Ys = [X|Ys0]
125 | ),
126 | files_not_omitted_files(Base, Xs, Ys0).
127 |
128 | generate_nav_learn(NavLearn) :-
129 | learn_pages_categories(Categories),
130 | maplist(generate_nav_learn_cat, Categories, Items),
131 | render("nav.html", ["items"-Items], NavLearn).
132 |
133 | generate_nav_learn_cat(Category, SubNav) :-
134 | learn_pages(Pages),
135 | findall(Item, (
136 | member(Page, Pages),
137 | Page = page(Name, Category, Source),
138 | append(BaseFile, ".dj", Source),
139 | append(BaseFile, ".html", File),
140 | append("/learn/", File, Link),
141 | Item = ["name"-Name, "link"-Link, "type"-"file"]
142 | ), Items),
143 | render("nav.html", ["items"-Items], Text),
144 | SubNav = ["name"-Category, "nav"-Text, "type"-"dir"].
145 |
146 | generate_footer(Footer) :-
147 | current_time(T),
148 | phrase(format_time("%b %d %Y", T), Time),
149 | render("footer.html", ["time"-Time], Footer).
150 |
151 | generate_page_learn(Sections) :-
152 | learn_pages(Pages),
153 | output_folder(OutputFolder),
154 | path_segments(OutputFolder, O1),
155 | append(O1, ["learn"], LearnFolderSg),
156 | path_segments(LearnFolder, LearnFolderSg),
157 | make_directory_path(LearnFolder),
158 | maplist(generate_page_learn_(Sections, LearnFolderSg), Pages).
159 |
160 | generate_page_learn_(Sections, LearnFolderSg, page(Name, Category, Source)) :-
161 | portray_clause(rendering_learn_page(Name, Category)),
162 | source_folder(SF),
163 | learn_pages_source_folder(SourceFolder),
164 | project_name(ProjectName),
165 | docs_base_url(BaseURL),
166 | path_segments(SF, S0),
167 | path_segments(SourceFolder, S1),
168 | append(S0, S1, S2),
169 | append(S2, [Source], S3),
170 | path_segments(SourceFile, S3),
171 | phrase_from_file(seq(Text), SourceFile),
172 | djot(Text, Html),
173 | Vars0 = ["project_name"-ProjectName, "base_url"-BaseURL, "name"-Name, "category"-Category, "content"-Html],
174 | append(Vars0, Sections, Vars),
175 | render("learn.html", Vars, LearnHtml),
176 | append(F1, ".dj", Source),
177 | append(F1, ".html", F2),
178 | append(LearnFolderSg, [F2], O1),
179 | path_segments(OutputFile, O1),
180 | phrase_to_file(seq(LearnHtml), OutputFile).
181 |
182 | generate_readme(Sections) :-
183 | source_folder(S1),
184 | path_segments(S1, S2),
185 | readme_file(R1),
186 | append(S2, [R1], R2),
187 | path_segments(ReadmeFile, R2),
188 | project_name(ProjectName),
189 | docs_base_url(BaseURL),
190 | output_folder(OutputFolder),
191 | path_segments(OutputFolder, OutputFolderSg),
192 | append(OutputFolderSg, ["index.html"], OutputFileSg),
193 | path_segments(OutputFile, OutputFileSg),
194 | phrase_from_file(seq(ReadmeMd), ReadmeFile),
195 | djot(ReadmeMd, ReadmeHtml),
196 | Vars0 = ["project_name"-ProjectName, "base_url"-BaseURL, "readme"-ReadmeHtml],
197 | append(Vars0, Sections, Vars),
198 | render("index.html", Vars, IndexHtml),
199 | phrase_to_file(seq(IndexHtml), OutputFile).
200 |
201 | generate_page_docs(Sections) :-
202 | source_folder(S1),
203 | source_lib_folder(S2),
204 | canonicalize(S1, S2, Base),
205 | path_segments(DocsFolder, Base),
206 | output_folder(OutputFolder),
207 | make_directory_path(OutputFolder),
208 | directory_files(DocsFolder, Files),
209 | path_segments(OutputFolder, Output),
210 | append(Output, ["search-index.json"], SearchIndexSg),
211 | path_segments(SearchIndex, SearchIndexSg),
212 | setup_call_cleanup(open(SearchIndex, write, SearchWriteStream),(
213 | format(SearchWriteStream, "[", []),
214 | maplist(process_file(Base, Output, Sections, SearchWriteStream), Files),
215 | format(SearchWriteStream, "{}]", [])
216 | ), close(SearchWriteStream)),
217 | append(Output, ["doclog.css"], F1),
218 | append(Output, ["doclog.js"], F2),
219 | path_segments(F3, F1),
220 | path_segments(F4, F2),
221 | file_copy("doclog.css", F3),
222 | file_copy("doclog.js", F4).
223 |
224 | process_file(Base, Output0, Sections, SearchWriteStream, File0) :-
225 | append(Base, [File0], FileSg),
226 | append(File1, ".pl", File0),
227 | append(File1, ".html", Output1),
228 | append(Output0, [Output1], OutputSg),
229 | path_segments(Output, OutputSg),
230 | path_segments(File, FileSg),
231 | file_exists(File),
232 | portray_color(green, process_file(File)),
233 | open(File, read, FileStream),
234 | read_term(FileStream, Term, []),
235 | (
236 | Term = (:- module(ModuleName, PublicPredicates)) ->
237 | (
238 | predicates_clean(PublicPredicates, PublicPredicates1, Ops),
239 | document_file(File, Output, ModuleName, PublicPredicates1, Ops, Sections),
240 | append_predicates_search_index(Output, PublicPredicates1, Ops, SearchWriteStream)
241 | )
242 | ; true
243 | ),
244 | close(FileStream).
245 |
246 | process_file(Base0, Output0, Sections, SearchWriteStream, Dir0) :-
247 | append(Base0, [Dir0], DirSg),
248 | append(Output0, [Dir0], Output),
249 | path_segments(Dir, DirSg),
250 | directory_exists(Dir),
251 | path_segments(OutputDir, Output),
252 | make_directory_path(OutputDir),
253 | directory_files(Dir, Files),
254 | maplist(process_file(DirSg, Output, Sections, SearchWriteStream), Files).
255 |
256 | predicates_clean([], [], []).
257 | predicates_clean([X|Xs], [X|Ys], Ops) :-
258 | X \= op(_,_,_),
259 | predicates_clean(Xs, Ys, Ops).
260 | predicates_clean([X|Xs], Ys, [X|Ops]) :-
261 | X = op(_,_,_),
262 | predicates_clean(Xs, Ys, Ops).
263 |
264 | append_predicates_search_index(Output, PublicPredicates, Ops, SearchWriteStream) :-
265 | output_folder(OF),
266 | append(OF, Relative, Output),
267 | maplist(append_search_index(Relative, SearchWriteStream, Ops), PublicPredicates).
268 |
269 | append_search_index(Output, SearchWriteStream, Ops, Predicate) :-
270 | predicate_string(Predicate, Ops, PredicateString),
271 | phrase(escape_js(PredicateStringSafe), PredicateString),
272 | format(SearchWriteStream, "{\"link\": \"~s#~s\", \"predicate\": \"~s\"},", [Output, PredicateStringSafe, PredicateStringSafe]).
273 |
274 | append_search_index(Output, SearchWriteStream, op(_,_,Operator)) :-
275 | atom_chars(Operator, NameUnsafe),
276 | phrase(escape_js(Name), NameUnsafe),
277 | format(SearchWriteStream, "{\"link\": \"~s\", \"predicate\": \"~s\"},", [Output, Name]).
278 |
279 | escape_js([]) --> [].
280 | escape_js([X|Xs]) -->
281 | [X],
282 | {
283 | X \= (\)
284 | },
285 | escape_js(Xs).
286 | escape_js(Xs) -->
287 | "\\",
288 | escape_js(Xs0),
289 | { append("\\\\", Xs0, Xs) }.
290 |
291 | replace_char(_, _, [], []).
292 | replace_char(X, Y, [X|Xs], [Y|Ys]) :-
293 | replace_char(X, Y, Xs, Ys).
294 | replace_char(X, Y, [Z|Xs], [Z|Ys]) :-
295 | X \= Z,
296 | replace_char(X, Y, Xs, Ys).
297 |
298 | % let's try to document every text comment we see
299 | % Later, we'll add public predicates that have no documentation
300 | document_file(InputFile, OutputFile, ModuleName, PublicPredicates, Ops, Sections) :-
301 | phrase_from_file(seq(FileText), InputFile),
302 | phrase(documented_predicates(Predicates0, Ops), FileText),
303 | public_undocumented_predicates(Predicates0, Ops, PublicPredicates, PublicUndocumented),
304 | ( PublicUndocumented \= [],
305 | % Only portray if there are undocumented items
306 | portray_color(yellow, undocumented_public_predicates(PublicUndocumented))
307 | ;
308 | green(Green),
309 | reset(Reset),
310 | format("~sFile fully documented.~s~n", [Green, Reset])
311 | ),
312 | maplist(document_predicate(Ops), PublicUndocumented, Predicates1),
313 | append(Predicates0, Predicates1, Predicates),
314 | phrase(module_description(ModuleDescriptionMd), FileText),
315 | djot(ModuleDescriptionMd, ModuleDescriptionHtml),
316 | atom_chars(ModuleName, ModuleNameStr),
317 | project_name(ProjectName),
318 | docs_base_url(BaseURL),
319 | source_folder(S1),
320 | source_lib_folder(S2),
321 | canonicalize(S1, S2, S5),
322 | path_segments(SF, S5),
323 | websource(WebSourceBase),
324 | append(SF, ExtraFile, InputFile),
325 | path_segments(Separator, ["", ""]),
326 | append([Separator, LibraryUse, ".pl"], ExtraFile),
327 | % The use_module directive always has to use forward slashes!
328 | replace_char('\\', '/', LibraryUse, LibraryUse1),
329 | append(WebSourceBase, ExtraFile, WebSource),
330 | Vars0 = [
331 | "project_name"-ProjectName,
332 | "base_url"-BaseURL,
333 | "module_name"-ModuleNameStr,
334 | "module_description"-ModuleDescriptionHtml,
335 | "predicates"-Predicates,
336 | "websource"-WebSource,
337 | "library"-LibraryUse1
338 | ],
339 | append(Vars0, Sections, Vars),
340 | render("page.html", Vars, HtmlOut),
341 | phrase_to_file(seq(HtmlOut), OutputFile).
342 |
343 | documented_predicates([], _) --> "".
344 | documented_predicates([PredicateVars|Ps], Ops) -->
345 | predicate_documentation(Predicate, Name, DescriptionDjot),!,
346 | {
347 | predicate_string(Predicate, Ops, PredicateString),
348 | portray_clause(documenting(PredicateString)),
349 | djot(DescriptionDjot, Description),
350 | PredicateVars = ["predicate"-PredicateString, "name"-Name, "description"-Description]
351 | },
352 | documented_predicates(Ps, Ops).
353 |
354 | documented_predicates(Ps, Ops) -->
355 | ... , "\n",
356 | documented_predicates(Ps, Ops).
357 |
358 | module_description(X) -->
359 | ... ,
360 | "/**",
361 | seq(X),
362 | "*/",
363 | ... .
364 |
365 | module_description("No description") -->
366 | ... .
367 |
368 | predicate_string(Predicate, Ops, PredicateString) :-
369 | Predicate = PN/PA,
370 | member(op(_, _, PN), Ops),
371 | phrase(format_("(~a)/~d", [PN, PA]), PredicateString).
372 |
373 | predicate_string(Predicate, Ops, PredicateString) :-
374 | Predicate = PN/_PA,
375 | \+ member(op(_, _, PN), Ops),
376 | phrase(format_("~q", [Predicate]), PredicateString).
377 |
378 | predicate_string(Predicate, _Ops, PredicateString) :-
379 | Predicate = _PN//_PA,
380 | phrase(format_("~q", [Predicate]), PredicateString).
381 |
382 | document_predicate(Ops, Predicate, ["predicate"-Name, "name"-Name, "description"-Description]) :-
383 | predicate_string(Predicate, Ops, Name),
384 | Description = "".
385 |
386 | public_undocumented_predicates(_, _, [], []).
387 | public_undocumented_predicates(Documented, Ops, [Predicate|Public], Undocumented) :-
388 | predicate_string(Predicate, Ops, PredicateString),
389 | member(["predicate"-PredicateString|_], Documented),
390 | public_undocumented_predicates(Documented, Ops, Public, Undocumented).
391 | public_undocumented_predicates(Documented, Ops, [Predicate|Public], [Predicate|Undocumented]) :-
392 | predicate_string(Predicate, Ops, PredicateString),
393 | \+ member(["predicate"-PredicateString|_], Documented),
394 | public_undocumented_predicates(Documented, Ops, Public, Undocumented).
395 |
396 | predicate_documentation(Predicate, Name, Description) -->
397 | "%% ", seq(Name), "\n%", { \+ member('\n', Name) },
398 | whites, "\n",
399 | predicate_description(Description),
400 | { phrase(predicate_name(Predicate), Name) }.
401 |
402 | predicate_name_seq([X|Xs]) -->
403 | [X],
404 | { maplist(dif(X), " ()") },
405 | predicate_name_seq(Xs).
406 | predicate_name_seq([]) --> "".
407 |
408 | predicate_name(PredicateName//Arity) -->
409 | predicate_name_seq(PredicateNameCs),
410 | "(",
411 | seq(Args),
412 | ")//",
413 | {
414 | Commas #= Arity - 1,
415 | phrase(commas(Commas), Args),
416 | atom_chars(PredicateName, PredicateNameCs)
417 | }.
418 |
419 | predicate_name(PredicateName//0) -->
420 | predicate_name_seq(PredicateNameCs),
421 | "//",
422 | {
423 | atom_chars(PredicateName, PredicateNameCs)
424 | }.
425 |
426 | predicate_name(PredicateName/Arity) -->
427 | predicate_name_seq(PredicateNameCs),
428 | "(",
429 | seq(Args),
430 | ")",
431 | ... ,
432 | {
433 | Commas #= Arity - 1,
434 | phrase(commas(Commas), Args),
435 | atom_chars(PredicateName, PredicateNameCs)
436 | }.
437 |
438 | predicate_name(PredicateName/0) -->
439 | predicate_name_seq(PredicateNameCs),
440 | ".",
441 | {
442 | atom_chars(PredicateName, PredicateNameCs)
443 | }.
444 |
445 | predicate_name(PredicateName/0) -->
446 | predicate_name_seq(PredicateNameCs),
447 | ... ,
448 | {
449 | atom_chars(PredicateName, PredicateNameCs)
450 | }.
451 |
452 | predicate_description(Description) -->
453 | "% ", seq(Line), "\n",
454 | predicate_description(Description0),
455 | {
456 | append(Line, ['\n'|Description0], Description)
457 | }.
458 | predicate_description(Description) -->
459 | "%", whites, "\n",
460 | predicate_description(Description0),
461 | {
462 | Description = ['\n'|Description0]
463 | }.
464 | predicate_description("") --> [].
465 |
466 | whites --> [].
467 | whites --> " ", whites.
468 | whites --> "\t", whites.
469 |
470 | commas(0) -->
471 | seq(X),
472 | {
473 | \+ member(',', X)
474 | }.
475 | commas(N) -->
476 | seq(X),
477 | {
478 | \+ member(',', X)
479 | },
480 | ",",
481 | commas(N0),
482 | {
483 | N is N0 + 1
484 | }.
485 |
486 | dirs_only([F|Fs], Output, [F|FOs]) :-
487 | append(Output, [F], OutputFile),
488 | path_segments(File, OutputFile),
489 | directory_exists(File),
490 | dirs_only(Fs, Output, FOs).
491 |
492 | dirs_only([F|Fs], Output, FOs) :-
493 | append(Output, [F], OutputFile),
494 | path_segments(File, OutputFile),
495 | \+ directory_exists(File),
496 | dirs_only(Fs, Output, FOs).
497 |
498 | dirs_only([], _, []).
499 |
500 | string_without([X|Xs], Block) -->
501 | [X],
502 | {
503 | X \= Block
504 | },
505 | string_without(Xs, Block).
506 |
507 | string_without([], Block) -->
508 | [X],
509 | {
510 | X = Block
511 | }.
512 |
513 | string_without([], _) -->
514 | [].
515 |
516 | red(X) :-
517 | chars_utf8bytes(A, [27]),
518 | append(A, "[0;31m", X).
519 | green(X) :-
520 | chars_utf8bytes(A, [27]),
521 | append(A, "[0;32m", X).
522 | yellow(X) :-
523 | chars_utf8bytes(A, [27]),
524 | append(A, "[0;33m", X).
525 | blue(X) :-
526 | chars_utf8bytes(A, [27]),
527 | append(A, "[0;34m", X).
528 | reset(X) :-
529 | chars_utf8bytes(A, [27]),
530 | append(A, "[0m", X).
531 |
532 | portray_color(Color, X) :-
533 | call(Color, A), reset(B),
534 | phrase(portray_clause_(X), S),
535 | append(S1, "\n", S),
536 | format("~s~s~s~n", [A,S1,B]).
537 |
--------------------------------------------------------------------------------
/nav.html:
--------------------------------------------------------------------------------
1 |
2 | {% for item in items %}
3 | {% if item.type == "dir" %}
4 | {{ item.name }}/ {{ item.nav }}
5 | {% else %}
6 | {{ item.name }}
7 | {% endif %}
8 | {% endfor %}
9 |
10 |
--------------------------------------------------------------------------------
/page.html:
--------------------------------------------------------------------------------
1 | {% extends "base.html" %}
2 | {% block title %}Module {{ module_name }} - {{ project_name }} documentation{% endblock %}
3 | {% block main %}
4 | Module {{ module_name }}
5 | :- use_module(library({{ library }})).
6 | {{ module_description }}
7 |
10 |
11 |
12 | {% for predicate in predicates %}
13 |
14 |
{{ predicate.name }}
15 |
{{ predicate.description }}
16 |
17 | {% endfor %}
18 |
19 | {% endblock %}
20 |
--------------------------------------------------------------------------------
/scryer-test.config.pl:
--------------------------------------------------------------------------------
1 | project_name("Scryer Prolog").
2 | readme_file("scryer-prolog/INDEX.dj").
3 | source_folder("scryer-prolog/src/lib").
4 | output_folder("output").
5 | websource("https://github.com/mthom/scryer-prolog/tree/master/src/lib").
6 | omit(["ops_and_meta_predicates.pl", "tabling"]).
7 |
--------------------------------------------------------------------------------
/scryer.config.pl:
--------------------------------------------------------------------------------
1 | project_name("Scryer Prolog").
2 | readme_file("/home/aarroyoc/dev/scryer-prolog/INDEX.dj").
3 | source_folder("/home/aarroyoc/dev/scryer-prolog/src/lib").
4 | output_folder("/home/aarroyoc/dev/doclog/output").
5 | websource("https://github.com/mthom/scryer-prolog/tree/master/src/lib").
6 | omit(["ops_and_meta_predicates.pl", "tabling"]).
7 | learn_pages_source_folder("/home/aarroyoc/dev/scryer-prolog/learn").
8 | learn_pages_categories(["First steps", "Advanced topics"]).
9 | learn_pages([
10 | page("Test page", "First steps", "test-page.dj"),
11 | page("Second test page", "Advanced topics", "second-test-page.dj"),
12 | page("Third test page", "First steps", "test-page-3.dj"),
13 | ]).
14 | base_url("/").
15 |
--------------------------------------------------------------------------------
/watch.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | ## Use inotifywait (from inotify-tools) to rebuild everytime the SOURCE changed.
3 | ## It expects the same 2 arguments as doclog.sh (SOURCE and OUTPUT).
4 |
5 | DOCLOG=$(realpath $(dirname $0))
6 | SOURCE=$1
7 | OUTPUT=$2
8 |
9 | doclog_rebuild() {
10 | rm -rf $OUTPUT
11 | $DOCLOG/doclog.sh $SOURCE $OUTPUT
12 | }
13 |
14 | doclog_rebuild
15 |
16 | while true; do
17 | echo
18 | inotifywait -e modify,create,delete --exclude $OUTPUT -r $SOURCE
19 | doclog_rebuild
20 | done
21 |
--------------------------------------------------------------------------------