├── .gitignore
├── index.html
├── footer.html
├── learn.html
├── scryer-test.config.pl
├── nav.html
├── doclog.sh
├── watch.sh
├── Makefile
├── doclog.js
├── page.html
├── scryer.config.pl
├── .github
└── workflows
│ ├── test.yml
│ └── publish.yml
├── LICENSE
├── flake.lock
├── base.html
├── README.md
├── flake.nix
├── doclog.css
└── main.pl
/.gitignore:
--------------------------------------------------------------------------------
1 | # Local Netlify folder
2 | .netlify
3 | output
4 | marquete
5 | teruel
6 | djota
7 | scryer-prolog
8 | .vscode
--------------------------------------------------------------------------------
/index.html:
--------------------------------------------------------------------------------
1 | {% extends "base.html" %}
2 | {% block title %}{{ project_name }}{% endblock %}
3 | {% block main %}{{ readme }}{% endblock %}
4 |
--------------------------------------------------------------------------------
/footer.html:
--------------------------------------------------------------------------------
1 |
4 |
--------------------------------------------------------------------------------
/learn.html:
--------------------------------------------------------------------------------
1 | {% extends "base.html" %}
2 | {% block title %}{{ name }} - Learning docs of {{ project_name }}{% endblock %}
3 | {% block main %}{{ content }}{% endblock %}
4 |
--------------------------------------------------------------------------------
/scryer-test.config.pl:
--------------------------------------------------------------------------------
1 | project_name("Scryer Prolog").
2 | readme_file("scryer-prolog/INDEX.dj").
3 | source_folder("scryer-prolog/src/lib").
4 | output_folder("output").
5 | websource("https://github.com/mthom/scryer-prolog/tree/master/src/lib").
6 | omit(["ops_and_meta_predicates.pl", "tabling"]).
7 |
--------------------------------------------------------------------------------
/nav.html:
--------------------------------------------------------------------------------
1 |
2 | {% for item in items %}
3 | {% if item.type == "dir" %}
4 | {{ item.name }}/
{{ item.nav }}
5 | {% else %}
6 | - {{ item.name }}
7 | {% endif %}
8 | {% endfor %}
9 |
10 |
--------------------------------------------------------------------------------
/doclog.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | DOCLOG=$(realpath $(dirname $0))
4 |
5 | case "$(uname -s)" in
6 | MINGW*)
7 | SOURCE=$(cygpath -w $(realpath $1) | sed -e 's/\\/\\\\/g');
8 | OUTPUT=$(cygpath -w $(realpath $2) | sed -e 's/\\/\\\\/g');;
9 | *)
10 | SOURCE=$(realpath $1);
11 | OUTPUT=$(realpath $2);;
12 | esac
13 |
14 | cd $DOCLOG
15 | scryer-prolog -g "run(\"$SOURCE\", \"$OUTPUT\")." -g 'halt' main.pl
16 | cd -
17 |
--------------------------------------------------------------------------------
/watch.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | ## Use inotifywait (from inotify-tools) to rebuild everytime the SOURCE changed.
3 | ## It expects the same 2 arguments as doclog.sh (SOURCE and OUTPUT).
4 |
5 | DOCLOG=$(realpath $(dirname $0))
6 | SOURCE=$1
7 | OUTPUT=$2
8 |
9 | doclog_rebuild() {
10 | rm -rf $OUTPUT
11 | $DOCLOG/doclog.sh $SOURCE $OUTPUT
12 | }
13 |
14 | doclog_rebuild
15 |
16 | while true; do
17 | echo
18 | inotifywait -e modify,create,delete --exclude $OUTPUT -r $SOURCE
19 | doclog_rebuild
20 | done
21 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | .PHONY: gen-docs-scryer
2 | gen-docs-scryer:
3 | bash doclog.sh ./scryer-prolog ./output
4 |
5 | .PHONY: clean
6 | clean:
7 | rm -rf output
8 |
9 | .PHONY: upload
10 | upload:
11 | netlify deploy --prod
12 |
13 | .PHONY: setup
14 | setup:
15 | rm -rf teruel
16 | git clone --depth 1 --branch v1.0.1 https://github.com/aarroyoc/teruel
17 | rm -rf djota
18 | git clone --depth 1 --branch v0.3.3 https://github.com/aarroyoc/djota
19 | rm -rf scryer-prolog
20 | git clone --depth 1 https://github.com/mthom/scryer-prolog
21 |
--------------------------------------------------------------------------------
/doclog.js:
--------------------------------------------------------------------------------
1 | window.addEventListener("load", () => {
2 | const searchBox = document.getElementById("search");
3 | const predicates = document.getElementById("predicates");
4 |
5 | fetch("/search-index.json")
6 | .then((res) => res.json())
7 | .then((data) => {
8 | for(let predicate of data) {
9 | let node = document.createElement("option");
10 | node.value = predicate.predicate;
11 | predicates.appendChild(node);
12 | }
13 |
14 | searchBox.oninput = () => {
15 | for(let predicate of data) {
16 | if(searchBox.value === predicate.predicate) {
17 | window.location.href = predicate.link;
18 | }
19 | }
20 | };
21 | });
22 | });
23 |
--------------------------------------------------------------------------------
/page.html:
--------------------------------------------------------------------------------
1 | {% extends "base.html" %}
2 | {% block title %}Module {{ module_name }} - {{ project_name }} documentation{% endblock %}
3 | {% block main %}
4 | Module {{ module_name }}
5 | :- use_module(library({{ library }})).
6 | {{ module_description }}
7 |
10 |
11 |
12 | {% for predicate in predicates %}
13 |
14 |
{{ predicate.name }}
15 |
{{ predicate.description }}
16 |
17 | {% endfor %}
18 |
19 | {% endblock %}
20 |
--------------------------------------------------------------------------------
/scryer.config.pl:
--------------------------------------------------------------------------------
1 | project_name("Scryer Prolog").
2 | readme_file("/home/aarroyoc/dev/scryer-prolog/INDEX.dj").
3 | source_folder("/home/aarroyoc/dev/scryer-prolog/src/lib").
4 | output_folder("/home/aarroyoc/dev/doclog/output").
5 | websource("https://github.com/mthom/scryer-prolog/tree/master/src/lib").
6 | omit(["ops_and_meta_predicates.pl", "tabling"]).
7 | learn_pages_source_folder("/home/aarroyoc/dev/scryer-prolog/learn").
8 | learn_pages_categories(["First steps", "Advanced topics"]).
9 | learn_pages([
10 | page("Test page", "First steps", "test-page.dj"),
11 | page("Second test page", "Advanced topics", "second-test-page.dj"),
12 | page("Third test page", "First steps", "test-page-3.dj"),
13 | ]).
14 | base_url("/").
15 |
--------------------------------------------------------------------------------
/.github/workflows/test.yml:
--------------------------------------------------------------------------------
1 | name: Test
2 | on: [push]
3 |
4 | jobs:
5 | test:
6 | runs-on: ubuntu-22.04
7 | steps:
8 | - name: Checkout
9 | uses: actions/checkout@v4
10 | - name: Checkout Scryer Prolog
11 | uses: actions/checkout@v4
12 | with:
13 | repository: mthom/scryer-prolog
14 | path: scryer-prolog
15 | - name: Compile Scryer Prolog
16 | run: cargo build --release
17 | working-directory: scryer-prolog
18 | - name: Install Scryer Prolog
19 | run: sudo cp scryer-prolog/target/release/scryer-prolog /usr/bin/scryer-prolog
20 | - name: Install Dependencies
21 | run: make setup
22 | - name: Generate docs for Scryer Prolog
23 | run: make
24 |
--------------------------------------------------------------------------------
/.github/workflows/publish.yml:
--------------------------------------------------------------------------------
1 | name: Publish
2 | on:
3 | schedule:
4 | - cron: '33 3 * * *'
5 | workflow_dispatch:
6 |
7 | jobs:
8 | test:
9 | runs-on: ubuntu-22.04
10 | steps:
11 | - name: Install Netlify CLI
12 | run: npm install -g netlify-cli@16.0.2
13 | - name: Checkout
14 | uses: actions/checkout@v4
15 | - name: Checkout Scryer Prolog
16 | uses: actions/checkout@v4
17 | with:
18 | repository: mthom/scryer-prolog
19 | path: scryer-prolog
20 | - name: Compile Scryer Prolog
21 | run: cargo build --release
22 | working-directory: scryer-prolog
23 | - name: Install Scryer Prolog
24 | run: sudo cp scryer-prolog/target/release/scryer-prolog /usr/bin/scryer-prolog
25 | - name: Install Dependencies
26 | run: make setup
27 | - name: Generate docs for https://www.scryer.pl
28 | run: make
29 | - name: Upload site
30 | run: netlify deploy --prod --dir=output
31 | env:
32 | NETLIFY_AUTH_TOKEN: ${{ secrets.NETLIFY_AUTH_TOKEN }}
33 | NETLIFY_SITE_ID: ${{ secrets.NETLIFY_SITE_ID }}
34 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | BSD 3-Clause License
2 |
3 | Copyright (c) 2022, Adrián Arroyo Calle
4 | All rights reserved.
5 |
6 | Redistribution and use in source and binary forms, with or without
7 | modification, are permitted provided that the following conditions are met:
8 |
9 | * Redistributions of source code must retain the above copyright notice, this
10 | list of conditions and the following disclaimer.
11 |
12 | * Redistributions in binary form must reproduce the above copyright notice,
13 | this list of conditions and the following disclaimer in the documentation
14 | and/or other materials provided with the distribution.
15 |
16 | * Neither the name of the copyright holder nor the names of its
17 | contributors may be used to endorse or promote products derived from
18 | this software without specific prior written permission.
19 |
20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
--------------------------------------------------------------------------------
/flake.lock:
--------------------------------------------------------------------------------
1 | {
2 | "nodes": {
3 | "flake-utils": {
4 | "inputs": {
5 | "systems": "systems"
6 | },
7 | "locked": {
8 | "lastModified": 1726560853,
9 | "narHash": "sha256-X6rJYSESBVr3hBoH0WbKE5KvhPU5bloyZ2L4K60/fPQ=",
10 | "owner": "numtide",
11 | "repo": "flake-utils",
12 | "rev": "c1dfcf08411b08f6b8615f7d8971a2bfa81d5e8a",
13 | "type": "github"
14 | },
15 | "original": {
16 | "owner": "numtide",
17 | "repo": "flake-utils",
18 | "type": "github"
19 | }
20 | },
21 | "nixpkgs": {
22 | "locked": {
23 | "lastModified": 1729850857,
24 | "narHash": "sha256-WvLXzNNnnw+qpFOmgaM3JUlNEH+T4s22b5i2oyyCpXE=",
25 | "owner": "NixOS",
26 | "repo": "nixpkgs",
27 | "rev": "41dea55321e5a999b17033296ac05fe8a8b5a257",
28 | "type": "github"
29 | },
30 | "original": {
31 | "owner": "NixOS",
32 | "ref": "nixpkgs-unstable",
33 | "repo": "nixpkgs",
34 | "type": "github"
35 | }
36 | },
37 | "root": {
38 | "inputs": {
39 | "flake-utils": "flake-utils",
40 | "nixpkgs": "nixpkgs"
41 | }
42 | },
43 | "systems": {
44 | "locked": {
45 | "lastModified": 1681028828,
46 | "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
47 | "owner": "nix-systems",
48 | "repo": "default",
49 | "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
50 | "type": "github"
51 | },
52 | "original": {
53 | "owner": "nix-systems",
54 | "repo": "default",
55 | "type": "github"
56 | }
57 | }
58 | },
59 | "root": "root",
60 | "version": 7
61 | }
62 |
--------------------------------------------------------------------------------
/base.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | {% block title %}{% endblock %}
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 | Skip to main content
14 | Skip to navigation
15 |
16 |
24 |
25 |
26 |
27 |
28 | {% block main %}{% endblock %}
29 |
30 |
31 |
32 |
33 |
39 |
40 | {{ footer }}
41 |
42 |
43 |
44 |
45 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # DocLog
2 |
3 | Create documentation from your Prolog code
4 |
5 | Example, Scryer Prolog documentation: https://www.scryer.pl
6 |
7 | ## How to document your code?
8 |
9 | There are two kind of comments in DocLog: module and predicate.
10 |
11 | Each file/module has the option to write a module comment. It will be displayed at the beginning of the page. You can use [Djot](https://djot.net/) inside the comment. The syntax is:
12 | ```
13 | /**
14 | COMMENT
15 | MORE COMMENT
16 | */
17 | ```
18 |
19 | Predicate comments start with %% and they're followed by N % comments. Of those lines, the first line comment should be empty. You should indicate in the first line, the name of the predicate, var names and modes. Then you can use [Djot](https://djot.net/) to explain the rest of the predicate.
20 |
21 | ```
22 | %% append(L0, L1, L)
23 | %
24 | % L is L0 and L1 appended together
25 | ```
26 |
27 | ## Using Doclog
28 |
29 | First, clone the repo:
30 |
31 | ```
32 | $ git clone https://github.com/aarroyoc/doclog
33 | $ cd doclog
34 | ```
35 |
36 | Then, install the dependencies:
37 |
38 | ```
39 | $ make setup
40 | ```
41 |
42 | After that, you must create a configuration file called `doclog.config.pl`. This file will contain several options required to document your project, and must be in your source directory. Take a look at `scryer.config.pl` for an example file.
43 |
44 | With your config file, you can execute Doclog:
45 |
46 | ```
47 | ./doclog.sh SOURCE_FOLDER OUTPUT_FOLDER
48 | ```
49 |
50 | And wait for the docs to be generated!
51 |
52 | While developing, it might be usefull to rebuild everytime something in the SOURCE\_FOLDER changed. You can do so, by starting this command:
53 |
54 | ```
55 | ./watch.sh SOURCE_FOLDER OUTPUT_FOLDER
56 | ```
57 |
58 | There's support for incremental compilation, however, this incremental compilation won't update the sidebars.
59 |
--------------------------------------------------------------------------------
/flake.nix:
--------------------------------------------------------------------------------
1 | {
2 | description = "DocLog builds documentation from source code in Prolog";
3 |
4 | inputs = {
5 | nixpkgs.url = "github:NixOS/nixpkgs/nixpkgs-unstable";
6 | flake-utils.url = "github:numtide/flake-utils";
7 | };
8 |
9 | outputs = { nixpkgs, flake-utils, ... }:
10 | flake-utils.lib.eachDefaultSystem(system:
11 | let
12 | pkgs = import nixpkgs { inherit system; };
13 | in
14 | with pkgs; rec {
15 | packages = rec {
16 | default = doclog;
17 |
18 | teruel.src = fetchFromGitHub {
19 | owner = "aarroyoc";
20 | repo = "teruel";
21 | rev = "v1.0.1";
22 | hash = "sha256-Wi8Z3H53xsegMByW69IskLxJhMmLkRVcLnQAPdFmL5Q=";
23 | };
24 |
25 | djota.src = fetchFromGitHub {
26 | owner = "aarroyoc";
27 | repo = "djota";
28 | rev = "v0.1.3";
29 | hash = "sha256-GX4R+eI2AfgnVwh2iNR9Hs5UGoG9pKLnD0bybqb8MQk=";
30 | };
31 |
32 | doclog = stdenv.mkDerivation {
33 | pname = "doclog";
34 | version = "0.0.1";
35 | src = ./.;
36 | dontBuild = true;
37 | buildDependencies = [ inotify-tools ];
38 |
39 | installPhase = ''
40 | mkdir -p $out; cd $out
41 | cp -r ${teruel.src} $out/teruel
42 | cp -r ${djota.src} $out/djota
43 | cp -r ${scryer-prolog.src} $out/scryer-prolog
44 | cp $src/* $out
45 | sed -i 's@scryer-prolog@${scryer-prolog}/bin/scryer-prolog@' doclog.sh
46 | sed -i 's@inotifywait@${inotify-tools}/bin/inotifywait@' watch.sh
47 | ln doclog.sh doclog
48 | ln watch.sh doclog_watch
49 | '';
50 |
51 | homepage = "https://github.com/aarroyoc/doclog";
52 | license = lib.licenses.bsd3.fullName;
53 | };
54 | };
55 |
56 | defaultApp = apps.doclog;
57 | apps = rec {
58 | doclog = {
59 | type = "app";
60 | program = "${packages.doclog}/doclog";
61 | };
62 |
63 | watch = doclog_watch;
64 | doclog_watch = {
65 | type = "app";
66 | program = "${packages.doclog}/doclog_watch";
67 | };
68 | };
69 | }
70 | );
71 | }
72 |
--------------------------------------------------------------------------------
/doclog.css:
--------------------------------------------------------------------------------
1 | /* ---------------------------------------------------------
2 | RAW TOKENS (physical colors)
3 | --------------------------------------------------------- */
4 | :root {
5 | /* Core palette */
6 | --blue-strong: #00007f;
7 | --blue-light: #dfdfff;
8 | --blue-lighter: #efefff;
9 |
10 | --purple-visited: #5a3fd9;
11 |
12 | --light-bg: #fff;
13 | --light-text: #000;
14 |
15 | --dark-bg: #121212;
16 | --dark-text: #e0e0e0;
17 |
18 | --dark-accent-strong: #1e3a8a;
19 | --dark-accent: #80cbc4;
20 |
21 | --dark-surface-odd: #1e1e2f;
22 | --dark-surface-even: #2a2a3f;
23 |
24 | --code-bg-light: #f6f8fa;
25 | --code-border-light: #e0e0e0;
26 |
27 | /* Radii */
28 | --radius-small: 0.25rem;
29 | --radius-medium: 0.5rem;
30 | }
31 |
32 | /* ---------------------------------------------------------
33 | SEMANTIC TOKENS (meaning-based)
34 | --------------------------------------------------------- */
35 | :root {
36 | /* Surfaces */
37 | --surface-1: var(--light-bg); /* main background */
38 | --surface-2: var(--code-bg-light); /* code blocks, elevated surfaces */
39 | --surface-3: var(--blue-light); /* subtle highlight */
40 |
41 | /* Table surfaces */
42 | --surface-table-odd: var(--surface-3);
43 | --surface-table-even: var(--surface-2);
44 |
45 | /* Text */
46 | --text-primary: var(--light-text);
47 | --text-secondary: #555;
48 | --text-inverse: #fff;
49 |
50 | /* Borders */
51 | --border-strong: var(--blue-strong);
52 | --border-subtle: var(--code-border-light);
53 |
54 | /* Links */
55 | --link: var(--blue-strong);
56 | --link-hover: var(--blue-strong);
57 | --link-visited: var(--purple-visited);
58 |
59 | /* Accents */
60 | --accent-primary: var(--blue-strong);
61 | --accent-strong: var(--blue-strong);
62 | }
63 | /* ---------------------------------------------------------
64 | SEMANTIC TOKENS — SYNTAX HIGHLIGHTING (future-proof)
65 | --------------------------------------------------------- */
66 | :root {
67 | /* Prolog syntax roles */
68 | --syntax-keyword: #00007f; /* e.g., :-, ->, is, not, etc. */
69 | --syntax-atom: #7f0055; /* atoms, functors without args */
70 | --syntax-functor: #005f7f; /* predicate names */
71 | --syntax-string: #007f00; /* "hello", 'world' */
72 | --syntax-number: #7f3f00; /* 123, 3.14 */
73 | --syntax-variable: #5f00af; /* X, Y, VarName */
74 | --syntax-comment: #777; /* % comment */
75 | --syntax-operator: #333; /* +, -, *, /, =.., etc. */
76 | --syntax-punctuation: #555; /* commas, parentheses */
77 | }
78 |
79 | /* ---------------------------------------------------------
80 | DARK MODE SEMANTIC OVERRIDES
81 | --------------------------------------------------------- */
82 | @media (prefers-color-scheme: dark) {
83 | :root {
84 | --surface-1: var(--dark-bg);
85 | --surface-2: var(--dark-surface-odd);
86 | --surface-3: var(--dark-surface-even);
87 | --surface-table-odd: var(--surface-3);
88 | --surface-table-even: var(--surface-2);
89 |
90 |
91 | --text-primary: var(--dark-text);
92 | --text-secondary: #aaa;
93 | --text-inverse: #000;
94 |
95 | --border-strong: var(--dark-accent-strong);
96 | --border-subtle: var(--dark-surface-even);
97 |
98 | --link: var(--dark-accent);
99 | --link-hover: var(--dark-accent);
100 | --link-visited: var(--dark-accent);
101 |
102 | --accent-primary: var(--dark-accent);
103 | --accent-strong: var(--dark-accent-strong);
104 | }
105 | :root {
106 | --syntax-keyword: #80cbc4;
107 | --syntax-atom: #ff79c6;
108 | --syntax-functor: #82aaff;
109 | --syntax-string: #c3e88d;
110 | --syntax-number: #f78c6c;
111 | --syntax-variable: #c792ea;
112 | --syntax-comment: #888;
113 | --syntax-operator: #ccc;
114 | --syntax-punctuation: #aaa;
115 | }
116 |
117 | }
118 |
119 | /* ---------------------------------------------------------
120 | BASE TYPOGRAPHY & LAYOUT
121 | --------------------------------------------------------- */
122 | body {
123 | font-family: sans-serif;
124 | margin: 1rem;
125 | line-height: 1.5;
126 | color: var(--text-primary);
127 | background: var(--surface-1);
128 | }
129 |
130 | code, pre {
131 | font-family: "JetBrains Mono", Consolas, monospace;
132 | }
133 |
134 | pre {
135 | background: var(--surface-2);
136 | border: 1px solid var(--border-subtle);
137 | padding: 1rem;
138 | overflow-x: auto;
139 | border-radius: var(--radius-small);
140 | }
141 |
142 | code {
143 | background: var(--surface-2);
144 | padding: 0.1rem 0.25rem;
145 | border-radius: var(--radius-medium);
146 | }
147 |
148 | table {
149 | border: 2px solid var(--border-strong);
150 | border-radius: var(--radius-medium);
151 | border-collapse: collapse;
152 | }
153 |
154 | tbody tr:nth-child(odd) {
155 | background-color: var(--surface-table-odd);
156 | }
157 |
158 | tbody tr:nth-child(even) {
159 | background-color: var(--surface-table-even);
160 | }
161 |
162 |
163 | th, td {
164 | padding: 0.25rem 0.75rem;
165 | }
166 |
167 | /* ---------------------------------------------------------
168 | LAYOUT STRUCTURE
169 | --------------------------------------------------------- */
170 | .layout {
171 | display: flex;
172 | flex-direction: row;
173 | align-items: flex-start;
174 | }
175 |
176 | .mainContent {
177 | flex: 1;
178 | padding: 1rem;
179 | order: 2;
180 | }
181 |
182 | #navigation {
183 | flex: 0 1 auto;
184 | margin-left: 1rem;
185 | order: 1;
186 | max-width: 20rem;
187 | min-width: 12rem;
188 | width: 18vw;
189 | word-wrap: break-word;
190 | }
191 |
192 | /* Responsive */
193 | @media (max-width: 768px) {
194 | .layout {
195 | flex-direction: column;
196 | }
197 | .mainContent {
198 | order: 1;
199 | }
200 | #navigation {
201 | order: 2;
202 | margin-left: 0;
203 | margin-top: 1rem;
204 | max-width: 100%;
205 | }
206 | }
207 |
208 | /* ---------------------------------------------------------
209 | ACCESSIBILITY
210 | --------------------------------------------------------- */
211 | .sr-only {
212 | position: absolute;
213 | width: 1px;
214 | height: 1px;
215 | padding: 0;
216 | margin: -1px;
217 | overflow: hidden;
218 | clip: rect(0, 0, 0, 0);
219 | white-space: nowrap;
220 | border: 0;
221 | }
222 |
223 | a:focus, button:focus, input:focus {
224 | outline: 2px solid var(--accent-primary);
225 | outline-offset: 2px;
226 | }
227 |
228 | a:hover {
229 | text-decoration: underline;
230 | }
231 |
232 | a:visited {
233 | color: var(--link-visited);
234 | }
235 |
236 | #navigation a:visited {
237 | color: var(--link);
238 | }
239 |
240 | /* ---------------------------------------------------------
241 | COMPONENTS
242 | --------------------------------------------------------- */
243 | .moduleDescription {
244 | margin: 2rem;
245 | }
246 |
247 | .predicates {
248 | margin: 1rem;
249 | }
250 |
251 | .predicate > h4 {
252 | background-color: var(--accent-primary);
253 | color: var(--text-inverse);
254 | padding: 0.16rem;
255 | border-radius: var(--radius-small);
256 | }
257 |
258 | .predicate > div {
259 | margin: 0.5rem;
260 | }
261 |
262 | .predicate > p {
263 | margin: 0.5rem;
264 | }
265 |
266 | .navlist {
267 | list-style-type: none;
268 | padding-left: 1.5rem;
269 | margin: 0;
270 | }
271 |
272 | .navlist li {
273 | margin: 0.25rem 0;
274 | }
275 |
276 | details summary {
277 | cursor: pointer;
278 | font-weight: bold;
279 | }
280 |
281 | /* Topbar */
282 | .topbar {
283 | display: flex;
284 | justify-content: space-between;
285 | align-items: center;
286 | border-bottom: solid var(--accent-primary) 10px;
287 | min-height: 4rem;
288 | }
289 |
290 | .topbarLink {
291 | text-decoration: none;
292 | color: var(--text-primary);
293 | font-size: 2rem;
294 | font-weight: bold;
295 | }
296 |
297 | /* Skip link */
298 | .skip-link {
299 | position: absolute;
300 | left: -999px;
301 | top: auto;
302 | width: 1px;
303 | height: 1px;
304 | overflow: hidden;
305 | }
306 |
307 | .skip-link:focus {
308 | position: static;
309 | width: auto;
310 | height: auto;
311 | margin: 0.5rem;
312 | padding: 0.5rem 1rem;
313 | background: var(--accent-primary);
314 | color: var(--text-inverse);
315 | z-index: 1000;
316 | }
317 |
318 | /* Footer */
319 | footer {
320 | text-align: center;
321 | margin-top: 2rem;
322 | }
323 |
--------------------------------------------------------------------------------
/main.pl:
--------------------------------------------------------------------------------
1 | :- use_module(library(charsio)).
2 | :- use_module(library(dcgs)).
3 | :- use_module(library(files)).
4 | :- use_module(library(format)).
5 | :- use_module(library(iso_ext)).
6 | :- use_module(library(lists)).
7 | :- use_module(library(pio)).
8 | :- use_module(library(ordsets)).
9 | :- use_module(library(time)).
10 | :- use_module(library(clpz)).
11 | :- use_module(library(dif)).
12 | :- use_module(library(debug)).
13 |
14 | :- use_module(teruel/teruel).
15 | :- use_module(djota/djota).
16 |
17 | :- dynamic(output_folder/1).
18 | :- dynamic(source_folder/1).
19 | :- dynamic(base_url/1).
20 |
21 | run(SourceFolder, OutputFolder) :-
22 | catch((
23 | portray_color(blue, doclog(2, 2, 0)),
24 | assertz(output_folder(OutputFolder)),
25 | assertz(source_folder(SourceFolder)),
26 | path_segments(SourceFolder, S1),
27 | append(S1, ["doclog.config.pl"], C1),
28 | path_segments(ConfigFile, C1),
29 | atom_chars(ConfigFileA, ConfigFile),
30 | consult(ConfigFileA),
31 | generate_nav_lib(NavLib),
32 | generate_nav_learn(NavLearn),
33 | generate_footer(Footer),
34 | Sections = ["nav_lib"-NavLib, "nav_learn"-NavLearn, "footer"-Footer],
35 | generate_page_learn(Sections),
36 | do_copy_files,
37 | generate_page_docs(Sections),
38 | generate_readme(Sections),
39 | portray_color(green, done),
40 | halt), Error, (write(Error), nl, halt(1))).
41 |
42 | docs_base_url(BaseURL) :-
43 | ( base_url(X) ->
44 | BaseURL = X
45 | ; BaseURL = "/"
46 | ).
47 |
48 | do_copy_files :-
49 | source_folder(S1),
50 | output_folder(O1),
51 | path_segments(S1, S2),
52 | path_segments(O1, O2),
53 | findall(A-B, copy_file(A,B), Files),
54 | maplist(do_copy_files_(S2, O2), Files).
55 |
56 | do_copy_files_(S2, O2, A1-B1) :-
57 | path_segments(A1, A2),
58 | path_segments(B1, B2),
59 | append(S2, A2, A3),
60 | append(O2, B2, B3),
61 | path_segments(A, A3),
62 | path_segments(B, B3),
63 | ( file_newer(A, B) ->
64 | portray_color(blue, skip_copy_file(A, B))
65 | ; portray_color(green, copy_file(A, B)),
66 | file_copy(A, B)
67 | ).
68 |
69 | % We do some path detection which will fail unless all paths are equally
70 | % separated. This relates a base path, a tail path and the segments of the
71 | % union.
72 | %
73 | % canonicalize("C:\\mypath", "my/otherpath", ["C:", "mypath", "my", "otherpath"]).
74 | canonicalize(Base, Path, ResSG) :-
75 | path_segments(Base, BaseSG),
76 | path_segments(Sep, ["", ""]),
77 | Sep = [Separator],
78 | replace_char('\\\\', Separator, Path, Path1),
79 | replace_char('/', Separator, Path1, Path2),
80 | path_segments(Path2, PathSG),
81 | append(BaseSG, PathSG, ResSG).
82 |
83 | generate_nav_lib(NavHtml) :-
84 | source_folder(S1),
85 | source_lib_folder(S20),
86 | canonicalize(S1, S20, SFSG),
87 | subnav(SFSG, ".", Nav),
88 | member("nav"-NavHtml, Nav).
89 |
90 | subnav(Base, Dir, ["name"-Dir, "nav"-Nav, "type"-"dir"]) :-
91 | append(Base, [Dir], DirSg),
92 | path_segments(RealDir, DirSg),
93 | directory_exists(RealDir),
94 | directory_files(RealDir, Files),
95 | files_not_omitted_files(RealDir, Files, FilesReal),
96 | sort(FilesReal, FilesSorted),
97 | maplist(subnav(DirSg), FilesSorted, Items),
98 | render("nav.html", ["items"-Items], Nav).
99 |
100 | subnav(Base, File, ["name"-Name, "link"-['/'|Link], "type"-"file"]) :-
101 | append(Base, [File], FileSg),
102 | append(Name, ".pl", File),
103 | path_segments(FilePath, FileSg),
104 | file_exists(FilePath),
105 | append(_, ["."|LinkSg], FileSg),
106 | \+ member(".", LinkSg),
107 | path_segments(Link0, LinkSg),
108 | append(Link1, ".pl", Link0),
109 | append(Link1, ".html", Link).
110 |
111 | files_not_omitted_files(_, [], []).
112 | files_not_omitted_files(Base, [X|Xs], Ys) :-
113 | source_folder(S1),
114 | source_lib_folder(S2),
115 | canonicalize(S1, S2, SFSG),
116 | path_segments(SF, SFSG),
117 | path_segments(Separator, ["", ""]),
118 | findall(FullOmitFile,(
119 | omit(Omit),
120 | member(OmitFile, Omit),
121 | append([SF, Separator, ".", Separator, OmitFile], FullOmitFile)
122 | ), OmitFiles),
123 | append([Base, Separator, X], File),
124 | (
125 | member(File, OmitFiles) ->
126 | Ys = Ys0
127 | ; Ys = [X|Ys0]
128 | ),
129 | files_not_omitted_files(Base, Xs, Ys0).
130 |
131 | generate_nav_learn(NavLearn) :-
132 | learn_pages_categories(Categories),
133 | maplist(generate_nav_learn_cat, Categories, Items),
134 | render("nav.html", ["items"-Items], NavLearn).
135 |
136 | generate_nav_learn_cat(Category, SubNav) :-
137 | learn_pages(Pages),
138 | findall(Item, (
139 | member(Page, Pages),
140 | Page = page(Name, Category, Source),
141 | append(BaseFile, ".dj", Source),
142 | append(BaseFile, ".html", File),
143 | append("/learn/", File, Link),
144 | Item = ["name"-Name, "link"-Link, "type"-"file"]
145 | ), Items),
146 | render("nav.html", ["items"-Items], Text),
147 | SubNav = ["name"-Category, "nav"-Text, "type"-"dir"].
148 |
149 | generate_footer(Footer) :-
150 | current_time(T),
151 | phrase(format_time("%b %d %Y", T), Time),
152 | render("footer.html", ["time"-Time], Footer).
153 |
154 | generate_page_learn(Sections) :-
155 | learn_pages(Pages),
156 | output_folder(OutputFolder),
157 | path_segments(OutputFolder, O1),
158 | append(O1, ["learn"], LearnFolderSg),
159 | path_segments(LearnFolder, LearnFolderSg),
160 | make_directory_path(LearnFolder),
161 | maplist(generate_page_learn_(Sections, LearnFolderSg), Pages).
162 |
163 | generate_page_learn_(Sections, LearnFolderSg, page(Name, Category, Source)) :-
164 | source_folder(SF),
165 | learn_pages_source_folder(SourceFolder),
166 | project_name(ProjectName),
167 | docs_base_url(BaseURL),
168 | path_segments(SF, S0),
169 | path_segments(SourceFolder, S1),
170 | append(S0, S1, S2),
171 | append(S2, [Source], S3),
172 | path_segments(SourceFile, S3),
173 | append(F1, ".dj", Source),
174 | append(F1, ".html", F2),
175 | append(LearnFolderSg, [F2], O1),
176 | path_segments(OutputFile, O1),
177 | ( file_newer(SourceFile, OutputFile) ->
178 | portray_color(blue, skip_rendering_learn_page(Name, Category))
179 | ; portray_color(green, rendering_learn_page(Name, Category)),
180 | phrase_from_file(seq(Text), SourceFile),
181 | djot(Text, Html),
182 | Vars0 = ["project_name"-ProjectName, "base_url"-BaseURL, "name"-Name, "category"-Category, "content"-Html],
183 | append(Vars0, Sections, Vars),
184 | render("learn.html", Vars, LearnHtml),
185 | phrase_to_file(seq(LearnHtml), OutputFile)
186 | ).
187 |
188 | generate_readme(Sections) :-
189 | source_folder(S1),
190 | path_segments(S1, S2),
191 | readme_file(R1),
192 | append(S2, [R1], R2),
193 | path_segments(ReadmeFile, R2),
194 | project_name(ProjectName),
195 | docs_base_url(BaseURL),
196 | output_folder(OutputFolder),
197 | path_segments(OutputFolder, OutputFolderSg),
198 | append(OutputFolderSg, ["index.html"], OutputFileSg),
199 | path_segments(OutputFile, OutputFileSg),
200 | ( file_newer(ReadmeFile, OutputFile) ->
201 | portray_color(blue, skip_readme)
202 | ; portray_color(green, readme),
203 | phrase_from_file(seq(ReadmeMd), ReadmeFile),
204 | djot(ReadmeMd, ReadmeHtml),
205 | Vars0 = ["project_name"-ProjectName, "base_url"-BaseURL, "readme"-ReadmeHtml],
206 | append(Vars0, Sections, Vars),
207 | render("index.html", Vars, IndexHtml),
208 | phrase_to_file(seq(IndexHtml), OutputFile)
209 | ).
210 |
211 | generate_page_docs(Sections) :-
212 | source_folder(S1),
213 | source_lib_folder(S2),
214 | canonicalize(S1, S2, Base),
215 | path_segments(DocsFolder, Base),
216 | output_folder(OutputFolder),
217 | make_directory_path(OutputFolder),
218 | directory_files(DocsFolder, Files0),
219 | sort(Files0, Files),
220 | path_segments(OutputFolder, Output),
221 | append(Output, ["search-index.json"], SearchIndexSg),
222 | path_segments(SearchIndex, SearchIndexSg),
223 | setup_call_cleanup(open(SearchIndex, write, SearchWriteStream),(
224 | format(SearchWriteStream, "[", []),
225 | maplist(process_file(Base, Output, Sections, SearchWriteStream), Files),
226 | format(SearchWriteStream, "{}]", [])
227 | ), close(SearchWriteStream)),
228 | append(Output, ["doclog.css"], F1),
229 | append(Output, ["doclog.js"], F2),
230 | path_segments(F3, F1),
231 | path_segments(F4, F2),
232 | file_copy("doclog.css", F3),
233 | file_copy("doclog.js", F4).
234 |
235 | process_file(Base, Output0, Sections, SearchWriteStream, File0) :-
236 | append(Base, [File0], FileSg),
237 | append(File1, ".pl", File0),
238 | append(File1, ".html", Output1),
239 | append(Output0, [Output1], OutputSg),
240 | path_segments(Output, OutputSg),
241 | path_segments(File, FileSg),
242 | file_exists(File),
243 | ( file_newer(File, Output) ->
244 | portray_color(blue, skip_file(File))
245 | ; portray_color(green, process_file(File)),
246 | open(File, read, FileStream),
247 | read_term(FileStream, Term, []),
248 | (
249 | Term = (:- module(ModuleName, PublicPredicates)) ->
250 | (
251 | predicates_clean(PublicPredicates, PublicPredicates1, Ops),
252 | document_file(File, Output, ModuleName, PublicPredicates1, Ops, Sections),
253 | append_predicates_search_index(Output, PublicPredicates1, Ops, SearchWriteStream)
254 | )
255 | ; true
256 | ),
257 | close(FileStream)
258 | ).
259 |
260 | process_file(Base0, Output0, Sections, SearchWriteStream, Dir0) :-
261 | append(Base0, [Dir0], DirSg),
262 | append(Output0, [Dir0], Output),
263 | path_segments(Dir, DirSg),
264 | directory_exists(Dir),
265 | path_segments(OutputDir, Output),
266 | make_directory_path(OutputDir),
267 | directory_files(Dir, Files),
268 | maplist(process_file(DirSg, Output, Sections, SearchWriteStream), Files).
269 |
270 | predicates_clean([], [], []).
271 | predicates_clean([X|Xs], [X|Ys], Ops) :-
272 | X \= op(_,_,_),
273 | predicates_clean(Xs, Ys, Ops).
274 | predicates_clean([X|Xs], Ys, [X|Ops]) :-
275 | X = op(_,_,_),
276 | predicates_clean(Xs, Ys, Ops).
277 |
278 | append_predicates_search_index(Output, PublicPredicates, Ops, SearchWriteStream) :-
279 | output_folder(OF),
280 | append(OF, Relative, Output),
281 | maplist(append_search_index(Relative, SearchWriteStream, Ops), PublicPredicates).
282 |
283 | append_search_index(Output, SearchWriteStream, Ops, Predicate) :-
284 | predicate_string(Predicate, Ops, PredicateString),
285 | phrase(escape_js(PredicateStringSafe), PredicateString),
286 | format(SearchWriteStream, "{\"link\": \"~s#~s\", \"predicate\": \"~s\"},", [Output, PredicateStringSafe, PredicateStringSafe]).
287 |
288 | append_search_index(Output, SearchWriteStream, op(_,_,Operator)) :-
289 | atom_chars(Operator, NameUnsafe),
290 | phrase(escape_js(Name), NameUnsafe),
291 | format(SearchWriteStream, "{\"link\": \"~s\", \"predicate\": \"~s\"},", [Output, Name]).
292 |
293 | escape_js([]) --> [].
294 | escape_js([X|Xs]) -->
295 | [X],
296 | {
297 | X \= (\)
298 | },
299 | escape_js(Xs).
300 | escape_js(Xs) -->
301 | "\\",
302 | escape_js(Xs0),
303 | { append("\\\\", Xs0, Xs) }.
304 |
305 | replace_char(_, _, [], []).
306 | replace_char(X, Y, [X|Xs], [Y|Ys]) :-
307 | replace_char(X, Y, Xs, Ys).
308 | replace_char(X, Y, [Z|Xs], [Z|Ys]) :-
309 | X \= Z,
310 | replace_char(X, Y, Xs, Ys).
311 |
312 | % let's try to document every text comment we see
313 | % Later, we'll add public predicates that have no documentation
314 | document_file(InputFile, OutputFile, ModuleName, PublicPredicates, Ops, Sections) :-
315 | phrase_from_file(seq(FileText), InputFile),
316 | phrase(documented_predicates(Predicates0, Ops), FileText),
317 | public_undocumented_predicates(Predicates0, Ops, PublicPredicates, PublicUndocumented),
318 | ( PublicUndocumented \= [],
319 | % Only portray if there are undocumented items
320 | portray_color(yellow, undocumented_public_predicates(PublicUndocumented))
321 | ;
322 | green(Green),
323 | reset(Reset),
324 | format("~sFile fully documented.~s~n", [Green, Reset])
325 | ),
326 | maplist(document_predicate(Ops), PublicUndocumented, Predicates1),
327 | append(Predicates0, Predicates1, Predicates),
328 | phrase(module_description(ModuleDescriptionMd), FileText),
329 | djot(ModuleDescriptionMd, ModuleDescriptionHtml),
330 | atom_chars(ModuleName, ModuleNameStr),
331 | project_name(ProjectName),
332 | docs_base_url(BaseURL),
333 | source_folder(S1),
334 | source_lib_folder(S2),
335 | canonicalize(S1, S2, S5),
336 | path_segments(SF, S5),
337 | websource(WebSourceBase),
338 | append(SF, ExtraFile, InputFile),
339 | path_segments(Separator, ["", ""]),
340 | append([Separator, LibraryUse, ".pl"], ExtraFile),
341 | % The use_module directive always has to use forward slashes!
342 | replace_char('\\', '/', LibraryUse, LibraryUse1),
343 | append(WebSourceBase, ExtraFile, WebSource),
344 | Vars0 = [
345 | "project_name"-ProjectName,
346 | "base_url"-BaseURL,
347 | "module_name"-ModuleNameStr,
348 | "module_description"-ModuleDescriptionHtml,
349 | "predicates"-Predicates,
350 | "websource"-WebSource,
351 | "library"-LibraryUse1
352 | ],
353 | append(Vars0, Sections, Vars),
354 | render("page.html", Vars, HtmlOut),
355 | phrase_to_file(seq(HtmlOut), OutputFile).
356 |
357 | documented_predicates([], _) --> "".
358 | documented_predicates([PredicateVars|Ps], Ops) -->
359 | predicate_documentation(Predicate, Name, DescriptionDjot),!,
360 | {
361 | predicate_string(Predicate, Ops, PredicateString),
362 | portray_clause(documenting(PredicateString)),
363 | djot(DescriptionDjot, Description),
364 | PredicateVars = ["predicate"-PredicateString, "name"-Name, "description"-Description]
365 | },
366 | documented_predicates(Ps, Ops).
367 |
368 | documented_predicates(Ps, Ops) -->
369 | ... , "\n",
370 | documented_predicates(Ps, Ops).
371 |
372 | module_description(X) -->
373 | ... ,
374 | "/**",
375 | seq(X),
376 | "*/",
377 | ... .
378 |
379 | module_description("No description") -->
380 | ... .
381 |
382 | predicate_string(Predicate, Ops, PredicateString) :-
383 | Predicate = PN/PA,
384 | member(op(_, _, PN), Ops),
385 | phrase(format_("(~a)/~d", [PN, PA]), PredicateString).
386 |
387 | predicate_string(Predicate, Ops, PredicateString) :-
388 | Predicate = PN/_PA,
389 | \+ member(op(_, _, PN), Ops),
390 | phrase(format_("~q", [Predicate]), PredicateString).
391 |
392 | predicate_string(Predicate, _Ops, PredicateString) :-
393 | Predicate = _PN//_PA,
394 | phrase(format_("~q", [Predicate]), PredicateString).
395 |
396 | document_predicate(Ops, Predicate, ["predicate"-Name, "name"-Name, "description"-Description]) :-
397 | predicate_string(Predicate, Ops, Name),
398 | Description = "".
399 |
400 | public_undocumented_predicates(_, _, [], []).
401 | public_undocumented_predicates(Documented, Ops, [Predicate|Public], Undocumented) :-
402 | predicate_string(Predicate, Ops, PredicateString),
403 | member(["predicate"-PredicateString|_], Documented),
404 | public_undocumented_predicates(Documented, Ops, Public, Undocumented).
405 | public_undocumented_predicates(Documented, Ops, [Predicate|Public], [Predicate|Undocumented]) :-
406 | predicate_string(Predicate, Ops, PredicateString),
407 | \+ member(["predicate"-PredicateString|_], Documented),
408 | public_undocumented_predicates(Documented, Ops, Public, Undocumented).
409 |
410 | predicate_documentation(Predicate, Name, Description) -->
411 | "%% ", seq(Name), "\n%", { \+ member('\n', Name) },
412 | whites, "\n",
413 | predicate_description(Description),
414 | { phrase(predicate_name(Predicate), Name) }.
415 |
416 | predicate_name_seq([X|Xs]) -->
417 | [X],
418 | { maplist(dif(X), " ()") },
419 | predicate_name_seq(Xs).
420 | predicate_name_seq([]) --> "".
421 |
422 | predicate_name(PredicateName//Arity) -->
423 | predicate_name_seq(PredicateNameCs),
424 | "(",
425 | seq(Args),
426 | ")//",
427 | {
428 | Commas #= Arity - 1,
429 | phrase(commas(Commas), Args),
430 | atom_chars(PredicateName, PredicateNameCs)
431 | }.
432 |
433 | predicate_name(PredicateName//0) -->
434 | predicate_name_seq(PredicateNameCs),
435 | "//",
436 | {
437 | atom_chars(PredicateName, PredicateNameCs)
438 | }.
439 |
440 | predicate_name(PredicateName/Arity) -->
441 | predicate_name_seq(PredicateNameCs),
442 | "(",
443 | seq(Args),
444 | ")",
445 | ... ,
446 | {
447 | Commas #= Arity - 1,
448 | phrase(commas(Commas), Args),
449 | atom_chars(PredicateName, PredicateNameCs)
450 | }.
451 |
452 | predicate_name(PredicateName/0) -->
453 | predicate_name_seq(PredicateNameCs),
454 | ".",
455 | {
456 | atom_chars(PredicateName, PredicateNameCs)
457 | }.
458 |
459 | predicate_name(PredicateName/0) -->
460 | predicate_name_seq(PredicateNameCs),
461 | ... ,
462 | {
463 | atom_chars(PredicateName, PredicateNameCs)
464 | }.
465 |
466 | predicate_description(Description) -->
467 | "% ", seq(Line), "\n",
468 | predicate_description(Description0),
469 | {
470 | append(Line, ['\n'|Description0], Description)
471 | }.
472 | predicate_description(Description) -->
473 | "%", whites, "\n",
474 | predicate_description(Description0),
475 | {
476 | Description = ['\n'|Description0]
477 | }.
478 | predicate_description("") --> [].
479 |
480 | whites --> [].
481 | whites --> " ", whites.
482 | whites --> "\t", whites.
483 |
484 | commas(0) -->
485 | seq(X),
486 | {
487 | \+ member(',', X)
488 | }.
489 | commas(N) -->
490 | seq(X),
491 | {
492 | \+ member(',', X)
493 | },
494 | ",",
495 | commas(N0),
496 | {
497 | N is N0 + 1
498 | }.
499 |
500 | dirs_only([F|Fs], Output, [F|FOs]) :-
501 | append(Output, [F], OutputFile),
502 | path_segments(File, OutputFile),
503 | directory_exists(File),
504 | dirs_only(Fs, Output, FOs).
505 |
506 | dirs_only([F|Fs], Output, FOs) :-
507 | append(Output, [F], OutputFile),
508 | path_segments(File, OutputFile),
509 | \+ directory_exists(File),
510 | dirs_only(Fs, Output, FOs).
511 |
512 | dirs_only([], _, []).
513 |
514 | string_without([X|Xs], Block) -->
515 | [X],
516 | {
517 | X \= Block
518 | },
519 | string_without(Xs, Block).
520 |
521 | string_without([], Block) -->
522 | [X],
523 | {
524 | X = Block
525 | }.
526 |
527 | string_without([], _) -->
528 | [].
529 |
530 | red(X) :-
531 | chars_utf8bytes(A, [27]),
532 | append(A, "[0;31m", X).
533 | green(X) :-
534 | chars_utf8bytes(A, [27]),
535 | append(A, "[0;32m", X).
536 | yellow(X) :-
537 | chars_utf8bytes(A, [27]),
538 | append(A, "[0;33m", X).
539 | blue(X) :-
540 | chars_utf8bytes(A, [27]),
541 | append(A, "[0;34m", X).
542 | reset(X) :-
543 | chars_utf8bytes(A, [27]),
544 | append(A, "[0m", X).
545 |
546 | portray_color(Color, X) :-
547 | call(Color, A), reset(B),
548 | phrase(portray_clause_(X), S),
549 | append(S1, "\n", S),
550 | format("~s~s~s~n", [A,S1,B]).
551 |
552 | file_newer(A, B) :-
553 | file_exists(B),
554 | file_modification_time(A, TA),
555 | file_modification_time(B, TB),
556 | TA @< TB.
557 |
--------------------------------------------------------------------------------