├── .prettierignore ├── .mailmap ├── .vscode ├── extensions.json └── settings.json ├── default-data ├── status │ ├── favicon.png │ └── recycler.png └── pages │ ├── smallest-federated-wiki │ ├── welcome-visitors │ ├── federated-wiki │ └── how-to-wiki ├── .npmignore ├── .prettierrc.json ├── .gitignore ├── test ├── package.json ├── asdf-test-page ├── random.js ├── defaultargs.js ├── page.js ├── sitemap.js └── server.js ├── .zed └── settings.json ├── index.js ├── lib ├── random_id.js ├── forward.js ├── plugins.js ├── security.js ├── defaultargs.js ├── sitemap.js ├── search.js ├── page.js └── server.js ├── eslint.config.js ├── .github └── workflows │ ├── test.yml │ └── check.yml ├── AUTHORS.txt ├── scripts └── update-authors.js ├── LICENSE.txt ├── ReadMe.md └── package.json /.prettierignore: -------------------------------------------------------------------------------- 1 | coverage 2 | -------------------------------------------------------------------------------- /.mailmap: -------------------------------------------------------------------------------- 1 | Nick Niemeir 2 | -------------------------------------------------------------------------------- /.vscode/extensions.json: -------------------------------------------------------------------------------- 1 | { 2 | "recommendations": ["esbenp.prettier-vscode", "dbaeumer.vscode-eslint"] 3 | } 4 | -------------------------------------------------------------------------------- /default-data/status/favicon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fedwiki/wiki-server/HEAD/default-data/status/favicon.png -------------------------------------------------------------------------------- /default-data/status/recycler.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fedwiki/wiki-server/HEAD/default-data/status/recycler.png -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | /data 2 | config.json 3 | .github/ 4 | .mailmap 5 | .prettier* 6 | .vscode/ 7 | .zed/ 8 | eslint.config.js 9 | scripts/ 10 | test/ 11 | coverage 12 | -------------------------------------------------------------------------------- /.prettierrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "semi": false, 3 | "singleQuote": true, 4 | "bracketSpacing": true, 5 | "bracketSameLine": true, 6 | "arrowParens": "avoid", 7 | "printWidth": 120 8 | } 9 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "editor.codeActionsOnSave": { 3 | "source.fixAll.eslint": "explicit" 4 | }, 5 | "editor.defaultFormatter": "esbenp.prettier-vscode", 6 | "editor.formatOnSave": true 7 | } 8 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | *.swp 3 | *.iml 4 | .idea/ 5 | .sass-cache 6 | .rvmrc 7 | config.json 8 | /data 9 | /spec/data 10 | /default-data/status/sitemap.json 11 | node_modules 12 | npm-debug.log 13 | temp 14 | coverage 15 | -------------------------------------------------------------------------------- /test/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "comment": "This exists so that the tests have a package to read", 3 | "name": "wiki-server", 4 | "dependencies": { 5 | "wiki-plugin-activity": "0.1", 6 | "wiki-plugin-video": "0.1" 7 | } 8 | } 9 | -------------------------------------------------------------------------------- /.zed/settings.json: -------------------------------------------------------------------------------- 1 | // Folder-specific settings 2 | // 3 | // For a full list of overridable settings, and general information on folder-specific settings, 4 | // see the documentation: https://zed.dev/docs/configuring-zed#settings-files 5 | { 6 | "format_on_save": "on" 7 | } 8 | -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | // **index.js** 2 | // Simple file so that if you require this directory 3 | // in node it instead requires ./lib/server.coffee 4 | // with coffee-script already loaded. 5 | require('coffeescript') 6 | require('coffeescript/register') 7 | 8 | module.exports = require('./lib/server') 9 | -------------------------------------------------------------------------------- /test/asdf-test-page: -------------------------------------------------------------------------------- 1 | { "title": "Asdf Test Page", "story": [ 2 | {"id": "a1", "type": "paragraph", "text": "this is the first paragraph"}, 3 | {"id": "a2", "type": "paragraph", "text": "this is the second paragraph"}, 4 | {"id": "a3", "type": "paragraph", "text": "this is the third paragraph"}, 5 | {"id": "a4", "type": "paragraph", "text": "this is the fourth paragraph"} 6 | ], 7 | journal: [] 8 | } 9 | -------------------------------------------------------------------------------- /test/random.js: -------------------------------------------------------------------------------- 1 | const { describe, it } = require('node:test') 2 | const assert = require('node:assert/strict') 3 | 4 | const random = require('../lib/random_id') 5 | 6 | describe('random', () => { 7 | describe('#random_id', () => { 8 | it('should not be the same twice', () => { 9 | assert.notEqual(random(), random()) 10 | }) 11 | it('should be 16 digits', () => { 12 | assert.equal(random().length, 16) 13 | }) 14 | }) 15 | }) 16 | -------------------------------------------------------------------------------- /lib/random_id.js: -------------------------------------------------------------------------------- 1 | /* 2 | * Federated Wiki : Node Server 3 | * 4 | * Copyright Ward Cunningham and other contributors 5 | * Licensed under the MIT license. 6 | * https://github.com/fedwiki/wiki-server/blob/master/LICENSE.txt 7 | */ 8 | 9 | // **random_id.coffee** 10 | // Simple random hex generator, takes an optional number of 11 | // chars that defaults to 16 and returns a random id. 12 | 13 | const random_id = (chars = 16) => [...Array(chars)].map(() => Math.floor(Math.random() * 16).toString(16)).join('') 14 | 15 | module.exports = random_id.random_id = random_id 16 | -------------------------------------------------------------------------------- /eslint.config.js: -------------------------------------------------------------------------------- 1 | import globals from 'globals' 2 | import pluginJs from '@eslint/js' 3 | 4 | /** @type {import('eslint').Linter.Config[]} */ 5 | export default [ 6 | pluginJs.configs.recommended, 7 | { 8 | rules: { 9 | 'no-unused-vars': 'warn', 10 | }, 11 | }, 12 | { ignores: ['client/*'] }, 13 | { 14 | languageOptions: { 15 | sourceType: 'commonjs', 16 | globals: { 17 | wiki: 'readonly', 18 | ...globals.node, 19 | ...globals.nodeBuiltin, 20 | ...globals.mocha, 21 | }, 22 | }, 23 | }, 24 | ] 25 | -------------------------------------------------------------------------------- /test/defaultargs.js: -------------------------------------------------------------------------------- 1 | const { describe, it } = require('node:test') 2 | const assert = require('node:assert/strict') 3 | 4 | const defaultargs = require('../lib/defaultargs') 5 | 6 | describe('defaultargs', () => { 7 | describe('#defaultargs()', () => { 8 | it('should not write over give args', () => { 9 | assert.equal(defaultargs({ port: 1234 }).port, 1234) 10 | }) 11 | it('should write non give args', () => { 12 | assert.equal(defaultargs().port, 3000) 13 | }) 14 | it('should modify dependant args', () => { 15 | assert.equal(defaultargs({ data: '/tmp/asdf/' }).db, '/tmp/asdf/pages') 16 | }) 17 | }) 18 | }) 19 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | push: 5 | branches: [main] 6 | pull_request: 7 | branches: [main] 8 | 9 | jobs: 10 | build: 11 | runs-on: ubuntu-latest 12 | 13 | strategy: 14 | matrix: 15 | # Releases https://github.com/nodejs/release#release-schedule 16 | node-version: 17 | - 20.x # Maintenance 18 | - 22.x # LTS 19 | - 24.x # Current 20 | 21 | steps: 22 | - uses: actions/checkout@v4 23 | - name: Use Node.js ${{ matrix.node-version }} 24 | uses: actions/setup-node@v4 25 | with: 26 | node-version: ${{ matrix.node-version }} 27 | - run: npm ci 28 | - run: npm test 29 | -------------------------------------------------------------------------------- /AUTHORS.txt: -------------------------------------------------------------------------------- 1 | Authors ordered by first contribution 2 | 3 | Ward Cunningham 4 | Nick Niemeir 5 | Patrick Mueller 6 | Erkan Yilmaz 7 | Tom Lee 8 | Nicholas Hallahan 9 | Paul Rodwell 10 | Austin King 11 | Christian Smith 12 | Gui13 13 | Merlyn Albery-Speyer 14 | Marcin Cieslak 15 | enyst 16 | Peter deHaan 17 | winckell benjamin 18 | Eric Dobbs 19 | Joshua Benuck 20 | Tom Lieber 21 | Andrew Shell 22 | jon r 23 | -------------------------------------------------------------------------------- /scripts/update-authors.js: -------------------------------------------------------------------------------- 1 | const gitAuthors = require('grunt-git-authors') 2 | 3 | // list of contributers from prior the split out of Smallest Federated Wiki repo. 4 | const priorAuthors = [ 5 | 'Ward Cunningham ', 6 | 'Nick Niemeir ', 7 | 'Patrick Mueller ', 8 | 'Erkan Yilmaz ', 9 | 'Tom Lee ', 10 | 'Nicholas Hallahan ', 11 | 'Paul Rodwell ', 12 | 'Austin King ', 13 | ] 14 | 15 | gitAuthors.updatePackageJson({ priorAuthors: priorAuthors, order: 'date' }, error => { 16 | if (error) { 17 | console.log('Error: ', error) 18 | } 19 | }) 20 | 21 | gitAuthors.updateAuthors( 22 | { 23 | priorAuthors: priorAuthors.reverse(), 24 | }, 25 | (error, filename) => { 26 | if (error) { 27 | console.log('Error: ', error) 28 | } else { 29 | console.log(filename, 'updated') 30 | } 31 | }, 32 | ) 33 | -------------------------------------------------------------------------------- /.github/workflows/check.yml: -------------------------------------------------------------------------------- 1 | name: Prettier Check 2 | 3 | on: [pull_request] 4 | 5 | jobs: 6 | prettier: 7 | runs-on: ubuntu-latest 8 | 9 | steps: 10 | - name: Checkout code 11 | uses: actions/checkout@v4 12 | 13 | - name: Set up Node.js 14 | uses: actions/setup-node@v4 15 | with: 16 | node-version: '20' 17 | 18 | - name: Install dependencies 19 | run: npm ci 20 | 21 | - name: Run Prettier check 22 | run: npm run prettier:check 23 | 24 | # - name: Annotate Pull Request with Results 25 | # if: failure() 26 | # uses: actions/github-script@v7 27 | # with: 28 | # script: | 29 | # github.rest.issues.createComment({ 30 | # issue_number: context.issue.number, 31 | # owner: context.repo.owner, 32 | # repo: context.repo.repo, 33 | # body: '⚠️ Prettier found formatting issues. Please fix them.' 34 | # }) 35 | -------------------------------------------------------------------------------- /default-data/pages/smallest-federated-wiki: -------------------------------------------------------------------------------- 1 | { 2 | "title": "Smallest Federated Wiki", 3 | "story": [ 4 | { 5 | "type": "paragraph", 6 | "id": "4b6f52ddeb6ebb39", 7 | "text": "The Smallest Federated Wiki was the founding project that has grown into many sites running substantially evolved client and server software. See [[Federated Wiki]]" 8 | } 9 | ], 10 | "journal": [ 11 | { 12 | "type": "create", 13 | "item": { 14 | "title": "Smallest Federated Wiki", 15 | "story": [] 16 | }, 17 | "date": 1418658255368 18 | }, 19 | { 20 | "item": { 21 | "type": "factory", 22 | "id": "4b6f52ddeb6ebb39" 23 | }, 24 | "id": "4b6f52ddeb6ebb39", 25 | "type": "add", 26 | "date": 1418658275922 27 | }, 28 | { 29 | "type": "edit", 30 | "id": "4b6f52ddeb6ebb39", 31 | "item": { 32 | "type": "paragraph", 33 | "id": "4b6f52ddeb6ebb39", 34 | "text": "The Smallest Federated Wiki was the founding project that has grown into many sites running substantially evolved client and server software. See [[Federated Wiki]]" 35 | }, 36 | "date": 1418658294473 37 | } 38 | ] 39 | } -------------------------------------------------------------------------------- /lib/forward.js: -------------------------------------------------------------------------------- 1 | const init = (app, emitter) => { 2 | let sockets = [] 3 | app.io.on('connection', socket => { 4 | let listeners = [] 5 | sockets.push(socket) 6 | console.log('client connected:', socket.handshake.address) 7 | socket.on('disconnect', reason => { 8 | console.log('client disconnected:', socket.handshake.address, reason) 9 | for (let { sProducer, listener } of listeners) { 10 | console.log('removing listener:', sProducer) 11 | emitter.removeListener(sProducer, listener) 12 | } 13 | listeners = [] 14 | let i = sockets.indexOf(socket) 15 | sockets.splice(i, 1) 16 | }) 17 | socket.on('unsubscribe', sProducer => { 18 | console.log('unsubscribing listener:', socket.handshake.address, sProducer) 19 | for (let [i, { slugItem, listener }] of listeners.entries()) { 20 | if (slugItem == sProducer) { 21 | console.log('removing listener:', sProducer) 22 | emitter.removeListener(sProducer, listener) 23 | listeners.splice(i, 1) 24 | } 25 | } 26 | }) 27 | socket.on('subscribe', sProducer => { 28 | let listener = result => { 29 | console.log('forwarding:', socket.handshake.address, result) 30 | socket.emit(sProducer, { slugItem: sProducer, result }) 31 | } 32 | console.log(`registering listener:`, socket.handshake.address, sProducer) 33 | emitter.on(sProducer, listener) 34 | listeners.push({ sProducer, listener }) 35 | }) 36 | }) 37 | } 38 | module.exports = { init } 39 | -------------------------------------------------------------------------------- /lib/plugins.js: -------------------------------------------------------------------------------- 1 | /* 2 | * Federated Wiki : Node Server 3 | * 4 | * Copyright Ward Cunningham and other contributors 5 | * Licensed under the MIT license. 6 | * https://github.com/fedwiki/wiki-server/blob/master/LICENSE.txt 7 | */ 8 | 9 | // support server-side plugins 10 | 11 | const fs = require('node:fs') 12 | const { pathToFileURL } = require('node:url') 13 | // forward = require './forward' 14 | 15 | module.exports = exports = argv => { 16 | // NOTE: plugins are now in their own package directories alongside this one... 17 | // Plugins are in directories of the form wiki-package-* 18 | // those with a server component will have a server directory 19 | 20 | const plugins = {} 21 | 22 | // http://stackoverflow.com/questions/10914751/loading-node-js-modules-dynamically-based-on-route 23 | 24 | const startServer = (params, plugin) => { 25 | const server = `${argv.packageDir}/${plugin}/server/server.js` 26 | fs.access(server, fs.constants.F_OK, err => { 27 | if (!err) { 28 | console.log('starting plugin', plugin) 29 | import(pathToFileURL(server)) 30 | .then(exported => { 31 | plugins[plugin] = exported 32 | plugins[plugin].startServer?.(params) 33 | }) 34 | .catch(e => { 35 | console.log('failed to start plugin', plugin, e?.stack || e) 36 | }) 37 | } 38 | }) 39 | } 40 | 41 | const startServers = params => { 42 | // emitter = new events.EventEmitter() 43 | // forward.init params.app, emitter 44 | // params.emitter = emitter 45 | 46 | Object.keys(require.main.require('./package').dependencies) 47 | .filter(depend => depend.startsWith('wiki-plugin')) 48 | .forEach(plugin => { 49 | startServer(params, plugin) 50 | }) 51 | } 52 | 53 | return { startServers } 54 | } 55 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | Copyright (c) 2011-2015 Ward Cunningham and other contributors 2 | 3 | This software consists of voluntary contributions made by many 4 | individuals. For exact contribution history, see the revision history 5 | available at https://github.com/fedwiki/wiki-server 6 | 7 | The following license applies to all parts of this software except as 8 | documented below: 9 | 10 | ==== 11 | 12 | Permission is hereby granted, free of charge, to any person obtaining a copy 13 | of this software and associated documentation files (the "Software"), to deal 14 | in the Software without restriction, including without limitation the rights 15 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 16 | copies of the Software, and to permit persons to whom the Software is 17 | furnished to do so, subject to the following conditions: 18 | 19 | The above copyright notice and this permission notice shall be included in 20 | all copies or substantial portions of the Software. 21 | 22 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 23 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 24 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 25 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 26 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 27 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 28 | THE SOFTWARE. 29 | 30 | ==== 31 | 32 | All files located in the default-data directory are licensed under a 33 | Creative Commons Attribution-ShareAlike 4.0 International License. 34 | 35 | CC BY-SA 4.0 : http://creativecommons.org/licenses/by-sa/4.0/ 36 | 37 | ==== 38 | 39 | All files located in the node_modules are externally maintained libraries 40 | used by this software which have their own licenses; we recommend you read 41 | them, as their terms may differ from the terms above. 42 | -------------------------------------------------------------------------------- /ReadMe.md: -------------------------------------------------------------------------------- 1 | # Wiki-Server 2 | 3 | Federated wiki node.js server as a npm module. 4 | 5 | **N.B.** Following a code re-organization over the New Year 2013/4 this 6 | repository now only contains the code for the node.js server implementation. 7 | You will also notice that the GitHub reposistory name and location has 8 | changed, it is now fedwiki/wiki-server. It you have previously forked, 9 | and cloned, this repository you will want to update your clone's upstream 10 | remote to reflect this change. 11 | 12 | This package is now published as ```wiki-server```. The ```wiki``` 13 | package which depends on this package, to provide the federated wiki server, 14 | can be found as [fedwiki/wiki](https://github.com/fedwiki/wiki). 15 | 16 | * * * 17 | 18 | ## Goals 19 | 20 | Over its first two years the Smallest Federated Wiki (SFW) project explored 21 | many ways that a wiki could embrace HTML5 and related technologies. Here 22 | we will cautiously reorganize this work as small independent modules that 23 | favor ongoing innovation. 24 | 25 | We proceed by dividing SFW first into large pieces and then these into 26 | smaller pieces as we simplify and regularize the communications between them. 27 | We now favor the node.js module and event conventions, dependency injection, 28 | and increased separation between the DOM and the logic that manages it. 29 | 30 | Federated wiki's single-page application reads page content from many sources 31 | and writes updates to a few. Read-write server backends are maintained in 32 | ruby (sinatra) and node (express). Read-only servers have been realized 33 | with static files and cgi scripts. Encouraging experiments have exploited 34 | exotic service architectures such as CCNx content-addressable networks. 35 | 36 | ## Participation 37 | 38 | We're happy to take issues or pull requests regarding the goals and 39 | their implementation within this code. 40 | 41 | A wider-ranging conversation is documented in the GitHub ReadMe of the 42 | founding project, [SFW](https://github.com/WardCunningham/Smallest-Federated-Wiki/blob/master/ReadMe.md). 43 | 44 | ## License 45 | 46 | You may use the Wiki under either the 47 | [MIT License](https://github.com/WardCunningham/wiki/blob/master/LICENSE.txt) 48 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "wiki-server", 3 | "description": "A Federated Wiki Server", 4 | "version": "0.26.0-rc.4", 5 | "author": { 6 | "name": "Ward Cunningham", 7 | "email": "ward@c2.com", 8 | "url": "http://ward.fed.wiki.org" 9 | }, 10 | "contributors": [ 11 | "Austin King ", 12 | "Paul Rodwell ", 13 | "Nicholas Hallahan ", 14 | "Tom Lee ", 15 | "Erkan Yilmaz ", 16 | "Patrick Mueller ", 17 | "Nick Niemeir ", 18 | "Ward Cunningham ", 19 | "Christian Smith ", 20 | "Gui13 ", 21 | "Merlyn Albery-Speyer ", 22 | "Marcin Cieslak ", 23 | "enyst ", 24 | "Peter deHaan ", 25 | "winckell benjamin ", 26 | "Eric Dobbs ", 27 | "Joshua Benuck ", 28 | "Tom Lieber ", 29 | "Andrew Shell ", 30 | "jon r " 31 | ], 32 | "dependencies": { 33 | "body-parser": "^2.2.0", 34 | "client-sessions": "^0.8.0", 35 | "coffeescript": "^2.5.0", 36 | "cookie-parser": "^1.4.4", 37 | "dompurify": "^3.1.0", 38 | "errorhandler": "^1.5.1", 39 | "express": "^5.1.0", 40 | "express-hbs": "^2.5.0", 41 | "flates": "0.0.5", 42 | "jsdom": "^26.0.0", 43 | "method-override": "^3.0.0", 44 | "minisearch": "^7.1.0", 45 | "morgan": "^1.10.1", 46 | "write-file-atomic": "^6.0.0", 47 | "xml2js": "^0.6.2" 48 | }, 49 | "scripts": { 50 | "prettier:format": "prettier --write './**/*.js'", 51 | "prettier:check": "prettier --check ./**/*.js", 52 | "test": "node --test", 53 | "update-authors": "node scripts/update-authors.js" 54 | }, 55 | "devDependencies": { 56 | "@eslint/js": "^9.32.0", 57 | "eslint": "^9.32.0", 58 | "globals": "^16.1.0", 59 | "grunt-git-authors": "^3.2.0", 60 | "prettier": "^3.5.3", 61 | "supertest": "^7.1.4", 62 | "wiki-client": "^0.31.3", 63 | "wiki-plugin-activity": "0.7", 64 | "wiki-plugin-video": "^0.4" 65 | }, 66 | "engines": { 67 | "node": ">=20.x" 68 | }, 69 | "license": "MIT", 70 | "repository": { 71 | "type": "git", 72 | "url": "https://github.com/fedwiki/wiki-server.git" 73 | }, 74 | "bugs": { 75 | "url": "https://github.com/fedwiki/wiki-server/issues" 76 | } 77 | } 78 | -------------------------------------------------------------------------------- /lib/security.js: -------------------------------------------------------------------------------- 1 | /* 2 | * Federated Wiki : Node Server 3 | * 4 | * Copyright Ward Cunningham and other contributors 5 | * Licensed under the MIT license. 6 | * https://github.com/fedwiki/wiki-node-server/blob/master/LICENSE.txt 7 | */ 8 | // **security.js** 9 | // Module for default site security. 10 | // 11 | // This module is not intented for use, but is here to catch a problem with 12 | // configuration of security. It does not provide any authentication, but will 13 | // allow the server to run read-only. 14 | 15 | // #### Requires #### 16 | const fs = require('node:fs') 17 | 18 | // Export a function that generates security handler 19 | // when called with options object. 20 | module.exports = exports = (log, loga, argv) => { 21 | const security = {} 22 | 23 | // #### Private utility methods. #### 24 | 25 | const user = '' 26 | 27 | let owner = '' 28 | 29 | // save the admin user, and location of the identity file 30 | const { admin, id: idFile } = argv 31 | 32 | // #### Public stuff #### 33 | 34 | security.authenticate_session = () => { 35 | ;(req, res, next) => { 36 | // not possible to login, so always false 37 | req.isAuthenticated = () => false 38 | return next() 39 | } 40 | } 41 | 42 | // Retrieve owner infomation from identity file in status directory 43 | security.retrieveOwner = cb => { 44 | fs.access(idFile, fs.constants.F_OK, err => { 45 | if (!err) { 46 | fs.readFile(idFile, (err, data) => { 47 | if (err) return cb(err) 48 | owner += data 49 | cb() 50 | }) 51 | } else { 52 | owner = '' 53 | cb() 54 | } 55 | }) 56 | } 57 | 58 | // Return the owners name 59 | security.getOwner = () => { 60 | let ownerName 61 | if (!owner.name) { 62 | ownerName = '' 63 | } else { 64 | ownerName = owner.name 65 | } 66 | return ownerName 67 | } 68 | security.getUser = req => { 69 | return '' 70 | } 71 | 72 | security.isAuthorized = req => { 73 | // nobody is authorized - everything is read-only 74 | // unless legacy support, when unclaimed sites can be editted. 75 | if (owner == '') { 76 | if (argv.security_legacy) { 77 | return true 78 | } else { 79 | return false 80 | } 81 | } else { 82 | return false 83 | } 84 | } 85 | // Wiki server admin 86 | security.isAdmin = () => { 87 | return false 88 | } 89 | security.defineRoutes = (app, cors, updateOwner) => { 90 | // default security does not have any routes 91 | } 92 | 93 | return security 94 | } 95 | -------------------------------------------------------------------------------- /test/page.js: -------------------------------------------------------------------------------- 1 | const { describe, it } = require('node:test') 2 | const assert = require('node:assert/strict') 3 | 4 | const path = require('node:path') 5 | const random = require('../lib/random_id') 6 | const testid = random() 7 | const argv = require('../lib/defaultargs')({ 8 | data: path.join('/tmp', 'sfwtests', testid), 9 | root: path.join(__dirname, '..'), 10 | packageDir: path.join(__dirname, '..', 'node_modules'), 11 | security_legacy: true, 12 | }) 13 | const page = require('../lib/page')(argv) 14 | const fs = require('node:fs') 15 | 16 | const testpage = { title: 'Asdf' } 17 | 18 | console.log('testid', testid) 19 | 20 | describe('page', () => { 21 | describe('#page.put()', () => { 22 | it('should save a page', async () => { 23 | return new Promise(resolve => { 24 | page.put('asdf', testpage, e => { 25 | if (e) throw e 26 | resolve() 27 | }) 28 | }) 29 | }) 30 | }) 31 | describe('#page.get()', () => { 32 | it('should get a page if it exists', async () => { 33 | return new Promise(resolve => { 34 | page.get('asdf', (e, got) => { 35 | if (e) throw e 36 | assert.equal(got.title, 'Asdf') 37 | resolve() 38 | }) 39 | }) 40 | }) 41 | it('should copy a page from default if nonexistant in db', async () => { 42 | return new Promise(resolve => { 43 | page.get('welcome-visitors', (e, got) => { 44 | if (e) throw e 45 | assert.equal(got.title, 'Welcome Visitors') 46 | resolve() 47 | }) 48 | }) 49 | }) 50 | // note: here we assume the wiki-plugin-activity repo has been cloned into an adjacent directory 51 | it('should copy a page from plugins if nonexistant in db', async () => { 52 | return new Promise(resolve => { 53 | page.get('recent-changes', (e, got) => { 54 | if (e) throw e 55 | assert.equal(got.title, 'Recent Changes') 56 | resolve() 57 | }) 58 | }) 59 | }) 60 | // note: here we assume the wiki-plugin-activity repo has been cloned into an adjacent directory 61 | it('should mark a page from plugins with the plugin name', async () => { 62 | return new Promise(resolve => { 63 | page.get('recent-changes', (e, got) => { 64 | if (e) throw e 65 | assert.equal(got.plugin, 'activity') 66 | resolve() 67 | }) 68 | }) 69 | }) 70 | it('should create a page if it exists nowhere', async () => { 71 | return new Promise(resolve => { 72 | page.get(random(), (e, got) => { 73 | if (e) throw e 74 | assert.equal(got, 'Page not found') 75 | resolve() 76 | }) 77 | }) 78 | }) 79 | it('should eventually write the page to disk', async () => { 80 | return new Promise(resolve => { 81 | page.get('asdf', (e, got) => { 82 | if (e) throw e 83 | const page = JSON.parse(fs.readFileSync(path.join(path.sep, 'tmp', 'sfwtests', testid, 'pages', 'asdf'))) 84 | assert.equal(got.title, page.title) 85 | resolve() 86 | }) 87 | }) 88 | }) 89 | }) 90 | }) 91 | -------------------------------------------------------------------------------- /lib/defaultargs.js: -------------------------------------------------------------------------------- 1 | /* 2 | * Federated Wiki : Node Server 3 | * 4 | * Copyright Ward Cunningham and other contributors 5 | * Licensed under the MIT license. 6 | * https://github.com/fedwiki/wiki-server/blob/master/LICENSE.txt 7 | */ 8 | 9 | // **defaultargs.coffee** when called on the argv object this 10 | // module will create reasonable defaults for options not supplied, 11 | // based on what information is provided. 12 | const path = require('node:path') 13 | 14 | const getUserHome = () => { 15 | return process.env.HOME || process.env.HOMEPATH || process.env.USERPROFILE 16 | } 17 | 18 | module.exports = argv => { 19 | argv = argv || {} 20 | argv.root ||= __dirname 21 | // the directory that contains all the packages that makeup the wiki 22 | argv.packageDir ||= path.join(argv.root, '..') 23 | argv.port ||= 3000 24 | argv.home ||= 'welcome-visitors' 25 | argv.data ||= path.join(getUserHome(), '.wiki') // see also cli 26 | argv.client ||= path.join(argv.packageDir, 'wiki-client', 'client') 27 | argv.db ||= path.join(argv.data, 'pages') 28 | argv.status ||= path.join(argv.data, 'status') 29 | argv.assets ||= path.join(argv.data, 'assets') 30 | argv.recycler ||= path.join(argv.data, 'recycle') 31 | argv.commons ||= path.join(argv.data, 'commons') 32 | argv.url ||= `http://localhost${argv.port === 80 ? '' : ':' + argv.port}` 33 | argv.id ||= path.join(argv.status, 'owner.json') 34 | argv.uploadLimit ||= '5mb' 35 | argv.cookieSecret ||= require('crypto').randomBytes(64).toString('hex') 36 | argv.secure_cookie ||= false 37 | argv.session_duration ||= 7 38 | argv.neighbors ||= '' 39 | argv.debug ||= false 40 | argv.test ||= false 41 | 42 | if (typeof argv.database === 'string') { 43 | argv.database = JSON.parse(argv.database) 44 | } 45 | argv.database ||= {} 46 | argv.database.type ||= './page' 47 | if (argv.database.type.charAt(0) === '.') { 48 | if (argv.database.type != './page') { 49 | console.log('\n\nWARNING: This storage option is depeciated.') 50 | console.log(' See ReadMe for details of the changes required.\n\n') 51 | } 52 | } else { 53 | argv.database.type = 'wiki-storage-' + argv.database.type 54 | } 55 | 56 | argv.security_type ||= './security' 57 | if (argv.security_type === './security') { 58 | console.log('\n\nINFORMATION: Using default security module.') 59 | } else { 60 | argv.security_type = 'wiki-security-' + argv.security_type 61 | } 62 | argv.security_legacy ||= false 63 | 64 | // resolve all relative paths 65 | argv.root = path.resolve(argv.root) 66 | argv.packageDir = path.resolve(argv.packageDir) 67 | argv.data = path.resolve(argv.data) 68 | argv.client = path.resolve(argv.client) 69 | argv.db = path.resolve(argv.db) 70 | argv.status = path.resolve(argv.status) 71 | argv.assets = path.resolve(argv.assets) 72 | argv.recycler = path.resolve(argv.recycler) 73 | argv.commons = path.resolve(argv.commons) 74 | argv.id = path.resolve(argv.id) 75 | 76 | if (/node_modules/.test(argv.data)) { 77 | console.log('\n\nWARNING : The dafault data path is not a safe place.') 78 | console.log(' : by using ', argv.data, ' your pages will be lost when packages are updated.') 79 | console.log(' : You are strongly advised to use an alternative directory.') 80 | console.log(' : See the wiki package ReadMe for how to do this.\n\n') 81 | } 82 | return argv 83 | } 84 | -------------------------------------------------------------------------------- /test/sitemap.js: -------------------------------------------------------------------------------- 1 | const { describe, it, before, after } = require('node:test') 2 | const assert = require('node:assert/strict') 3 | 4 | const supertest = require('supertest') 5 | const fs = require('node:fs') 6 | const server = require('..') 7 | const path = require('node:path') 8 | const random = require('../lib/random_id') 9 | const testid = random() 10 | const argv = require('../lib/defaultargs')({ 11 | data: path.join('/tmp', 'sfwtests', testid), 12 | port: 55556, 13 | security_legacy: true, 14 | test: true, 15 | }) 16 | 17 | describe('sitemap', () => { 18 | let app = {} 19 | let runningServer = null 20 | 21 | before(done => { 22 | app = server(argv) 23 | app.once('owner-set', () => { 24 | runningServer = app.listen(app.startOpts.port, app.startOpts.host, done) 25 | }) 26 | }) 27 | 28 | after(() => { 29 | runningServer.close() 30 | }) 31 | 32 | const request = supertest('http://localhost:55556') 33 | fs.mkdirSync(path.join('/tmp', 'sfwtests', testid, 'pages'), { recursive: true }) 34 | 35 | // location of the sitemap 36 | const sitemapLoc = path.join('/tmp', 'sfwtests', testid, 'status', 'sitemap.json') 37 | 38 | it('new site should have an empty sitemap', async () => { 39 | await request 40 | .get('/system/sitemap.json') 41 | .expect(200) 42 | .expect('Content-Type', /json/) 43 | .then(res => { 44 | assert.equal(res.body.length, 0) 45 | }) 46 | }) 47 | 48 | it('creating a page should add it to the sitemap', async () => { 49 | const body = JSON.stringify({ 50 | type: 'create', 51 | item: { 52 | title: 'Asdf Test Page', 53 | story: [ 54 | { id: 'a1', type: 'paragraph', text: 'this is the first paragraph' }, 55 | { id: 'a2', type: 'paragraph', text: 'this is the second paragraph' }, 56 | { id: 'a3', type: 'paragraph', text: 'this is the [[third]] paragraph' }, 57 | { id: 'a4', type: 'paragraph', text: 'this is the fourth paragraph' }, 58 | ], 59 | }, 60 | date: 1234567890123, 61 | }) 62 | 63 | await request 64 | .put('/page/adsf-test-page/action') 65 | .send('action=' + body) 66 | .expect(200) 67 | // sitemap update does not happen until after the put has returned, so wait for it to finish 68 | .then(() => new Promise(resolve => app.sitemaphandler.once('finished', () => resolve()))) 69 | .then( 70 | () => { 71 | const sitemap = JSON.parse(fs.readFileSync(sitemapLoc)) 72 | assert.equal(sitemap[0].slug, 'adsf-test-page') 73 | assert.equal(sitemap[0].synopsis, 'this is the first paragraph') 74 | assert.deepEqual(sitemap[0].links, { third: 'a3' }) 75 | }, 76 | err => { 77 | throw err 78 | }, 79 | ) 80 | }) 81 | 82 | it('synopsis should reflect edit to first paragraph', async () => { 83 | const body = JSON.stringify({ 84 | type: 'edit', 85 | item: { id: 'a1', type: 'paragraph', text: 'edited' }, 86 | id: 'a1', 87 | }) 88 | 89 | await request 90 | .put('/page/adsf-test-page/action') 91 | .send('action=' + body) 92 | .expect(200) 93 | .then(() => new Promise(resolve => app.sitemaphandler.once('finished', () => resolve()))) 94 | .then(() => { 95 | const sitemap = JSON.parse(fs.readFileSync(sitemapLoc)) 96 | assert.equal(sitemap[0].slug, 'adsf-test-page') 97 | assert.equal(sitemap[0].synopsis, 'edited') 98 | }) 99 | }) 100 | 101 | it('deleting a page should remove it from the sitemap', async () => { 102 | await request 103 | .delete('/adsf-test-page.json') 104 | .send() 105 | .expect(200) 106 | .then(() => new Promise(resolve => app.sitemaphandler.once('finished', () => resolve()))) 107 | .then(() => { 108 | const sitemap = JSON.parse(fs.readFileSync(sitemapLoc)) 109 | assert.deepEqual(sitemap, []) 110 | }) 111 | }) 112 | }) 113 | -------------------------------------------------------------------------------- /default-data/pages/welcome-visitors: -------------------------------------------------------------------------------- 1 | { 2 | "title": "Welcome Visitors", 3 | "story": [ 4 | { 5 | "text": "Welcome to this [[Federated Wiki]] site. From this page you can find who we are and what we do. New sites provide this information and then claim the site as their own. You will need your own site to participate.", 6 | "id": "7b56f22a4b9ee974", 7 | "type": "paragraph" 8 | }, 9 | { 10 | "type": "paragraph", 11 | "id": "821827c99b90cfd1", 12 | "text": "Pages about us." 13 | }, 14 | { 15 | "type": "factory", 16 | "id": "63ad2e58eecdd9e5", 17 | "prompt": "Link to a page about yourself here. Type your name enclosed in double square brackets. Then press Command/ALT-S to save.\n\nMake all pages here yours alone with the login below." 18 | }, 19 | { 20 | "type": "paragraph", 21 | "id": "2bbd646ff3f44b51", 22 | "text": "Pages where we do and share." 23 | }, 24 | { 25 | "type": "factory", 26 | "id": "05e2fa92643677ca", 27 | "prompt": "Create pages about things you do on this wiki. Type a descriptive name of something you will be writing about. Enclose it in square brackets. Then press Command/ALT-S to save." 28 | }, 29 | { 30 | "type": "paragraph", 31 | "id": "ee416d431ebf4fb4", 32 | "text": "You can edit your copy of these pages. Press [+] to add more writing spaces. Read [[How to Wiki]] for more ideas. Follow [[Recent Changes]] here and nearby." 33 | } 34 | ], 35 | "journal": [ 36 | { 37 | "type": "create", 38 | "item": { 39 | "title": "Welcome Visitors", 40 | "story": [] 41 | }, 42 | "date": 1420938191608 43 | }, 44 | { 45 | "type": "add", 46 | "item": { 47 | "text": "Welcome to this [[Federated Wiki]] site. From this page you can find who we are and what we do. New sites provide this information and then claim the site as their own. You will need your own site to participate.", 48 | "id": "7b56f22a4b9ee974", 49 | "type": "paragraph" 50 | }, 51 | "id": "7b56f22a4b9ee974", 52 | "date": 1420938199166 53 | }, 54 | { 55 | "type": "add", 56 | "item": { 57 | "type": "paragraph", 58 | "id": "821827c99b90cfd1", 59 | "text": "Pages about us." 60 | }, 61 | "after": "7b56f22a4b9ee974", 62 | "id": "821827c99b90cfd1", 63 | "date": 1420938202921 64 | }, 65 | { 66 | "type": "add", 67 | "item": { 68 | "type": "factory", 69 | "id": "63ad2e58eecdd9e5", 70 | "prompt": "Link to a page about yourself here. Type your name enclosed in double square brackets. Then press Command/ALT-S to save.\n\nMake all pages here yours alone with the login below." 71 | }, 72 | "after": "821827c99b90cfd1", 73 | "id": "63ad2e58eecdd9e5", 74 | "date": 1420938208737 75 | }, 76 | { 77 | "type": "add", 78 | "item": { 79 | "type": "paragraph", 80 | "id": "2bbd646ff3f44b51", 81 | "text": "Pages where we do and share." 82 | }, 83 | "after": "63ad2e58eecdd9e5", 84 | "id": "2bbd646ff3f44b51", 85 | "date": 1420938212624 86 | }, 87 | { 88 | "type": "add", 89 | "item": { 90 | "type": "factory", 91 | "id": "05e2fa92643677ca", 92 | "prompt": "Create pages about things you do on this wiki. Type a descriptive name of something you will be writing about. Enclose it in square brackets. Then press Command/ALT-S to save." 93 | }, 94 | "after": "2bbd646ff3f44b51", 95 | "id": "05e2fa92643677ca", 96 | "date": 1420938216782 97 | }, 98 | { 99 | "type": "add", 100 | "item": { 101 | "type": "paragraph", 102 | "id": "ee416d431ebf4fb4", 103 | "text": "You can edit your copy of these pages. Press [+] to add more writing spaces. Read [[How to Wiki]] for more ideas. Follow [[Recent Changes]] here and nearby." 104 | }, 105 | "after": "05e2fa92643677ca", 106 | "id": "ee416d431ebf4fb4", 107 | "date": 1420938220851 108 | } 109 | ] 110 | } -------------------------------------------------------------------------------- /test/server.js: -------------------------------------------------------------------------------- 1 | const { describe, it, before, after } = require('node:test') 2 | const assert = require('node:assert/strict') 3 | 4 | const supertest = require('supertest') 5 | const fs = require('node:fs') 6 | const server = require('..') 7 | const path = require('node:path') 8 | const random = require('../lib/random_id') 9 | const testid = random() 10 | const argv = require('../lib/defaultargs')({ 11 | data: path.join('/tmp', 'sfwtests', testid), 12 | packageDir: path.join(__dirname, '..', 'node_modules'), 13 | port: 55557, 14 | security_legacy: true, 15 | test: true, 16 | }) 17 | 18 | describe('server', () => { 19 | var app = {} 20 | let runningServer = null 21 | before(done => { 22 | // as starting the server this was does not create a sitemap file, create an empty one 23 | const sitemapLoc = path.join('/tmp', 'sfwtests', testid, 'status', 'sitemap.json') 24 | fs.mkdirSync(path.join('/tmp', 'sfwtests', testid)) 25 | fs.mkdirSync(path.join('/tmp', 'sfwtests', testid, 'status')) 26 | fs.writeFileSync(sitemapLoc, JSON.stringify([])) 27 | 28 | app = server(argv) 29 | app.once('owner-set', () => { 30 | runningServer = app.listen(app.startOpts.port, app.startOpts.host, done) 31 | }) 32 | }) 33 | 34 | after(() => { 35 | runningServer.close() 36 | }) 37 | 38 | const request = supertest('http://localhost:55557') 39 | 40 | // location of the test page 41 | const loc = path.join('/tmp', 'sfwtests', testid, 'pages', 'adsf-test-page') 42 | 43 | it('factories should return a list of plugin', async () => { 44 | await request 45 | .get('/system/factories.json') 46 | .expect(200) 47 | .expect('Content-Type', /json/) 48 | .then(res => { 49 | assert.equal(res.body[1].name, 'Video') 50 | assert.equal(res.body[1].category, 'format') 51 | }) 52 | }) 53 | 54 | it('new site should have an empty list of pages', async () => { 55 | await request 56 | .get('/system/slugs.json') 57 | .expect(200) 58 | .expect('Content-Type', /json/) 59 | .then(res => assert.deepEqual(res.body, [])) 60 | }) 61 | 62 | it('should create a page', async () => { 63 | const body = JSON.stringify({ 64 | type: 'create', 65 | item: { 66 | title: 'Asdf Test Page', 67 | story: [ 68 | { id: 'a1', type: 'paragraph', text: 'this is the first paragraph' }, 69 | { id: 'a2', type: 'paragraph', text: 'this is the second paragraph' }, 70 | { id: 'a3', type: 'paragraph', text: 'this is the third paragraph' }, 71 | { id: 'a4', type: 'paragraph', text: 'this is the fourth paragraph' }, 72 | ], 73 | }, 74 | date: 1234567890123, 75 | }) 76 | 77 | await request 78 | .put('/page/adsf-test-page/action') 79 | .send('action=' + body) 80 | .expect(200) 81 | }) 82 | 83 | it('should move the paragraphs to the order given ', async () => { 84 | const body = '{ "type": "move", "order": [ "a1", "a3", "a2", "a4"] }' 85 | 86 | await request 87 | .put('/page/adsf-test-page/action') 88 | .send('action=' + body) 89 | .expect(200) 90 | .then( 91 | () => { 92 | const page = JSON.parse(fs.readFileSync(loc)) 93 | assert.equal(page.story[1].id, 'a3') 94 | assert.equal(page.story[2].id, 'a2') 95 | assert.equal(page.journal[1].type, 'move') 96 | }, 97 | err => { 98 | throw err 99 | }, 100 | ) 101 | .catch(err => { 102 | throw err 103 | }) 104 | }) 105 | 106 | it('should add a paragraph', async () => { 107 | const body = JSON.stringify({ 108 | type: 'add', 109 | after: 'a2', 110 | item: { id: 'a5', type: 'paragraph', text: 'this is the NEW paragrpah' }, 111 | }) 112 | 113 | await request 114 | .put('/page/adsf-test-page/action') 115 | .send('action=' + body) 116 | .expect(200) 117 | .then(() => { 118 | const page = JSON.parse(fs.readFileSync(loc)) 119 | assert.equal(page.story.length, 5) 120 | assert.equal(page.story[3].id, 'a5') 121 | assert.equal(page.journal[2].type, 'add') 122 | }) 123 | .catch(err => { 124 | throw err 125 | }) 126 | }) 127 | 128 | it('should remove a paragraph with given id', async () => { 129 | const body = JSON.stringify({ 130 | type: 'remove', 131 | id: 'a2', 132 | }) 133 | 134 | await request 135 | .put('/page/adsf-test-page/action') 136 | .send('action=' + body) 137 | .expect(200) 138 | .then(() => { 139 | const page = JSON.parse(fs.readFileSync(loc)) 140 | assert.equal(page.story.length, 4) 141 | assert.equal(page.story[1].id, 'a3') 142 | assert.notEqual(page.story[2].id, 'a2') 143 | assert.equal(page.story[2].id, 'a5') 144 | assert.equal(page.journal[3].type, 'remove') 145 | }) 146 | .catch(err => { 147 | throw err 148 | }) 149 | }) 150 | 151 | it('should edit a paragraph in place', async () => { 152 | const body = JSON.stringify({ 153 | type: 'edit', 154 | item: { id: 'a3', type: 'paragraph', text: 'edited' }, 155 | id: 'a3', 156 | }) 157 | 158 | await request 159 | .put('/page/adsf-test-page/action') 160 | .send('action=' + body) 161 | .expect(200) 162 | .then(() => { 163 | const page = JSON.parse(fs.readFileSync(loc)) 164 | assert.equal(page.story[1].text, 'edited') 165 | assert.equal(page.journal[4].type, 'edit') 166 | }) 167 | .catch(err => { 168 | throw err 169 | }) 170 | }) 171 | 172 | it('should default to no change', async () => { 173 | const body = JSON.stringify({ 174 | type: 'asdf', 175 | }) 176 | 177 | await request 178 | .put('/page/adsf-test-page/action') 179 | .send('action=' + body) 180 | .expect(500) 181 | .then(() => { 182 | const page = JSON.parse(fs.readFileSync(loc)) 183 | assert.equal(page.story.length, 4) 184 | assert.equal(page.journal.length, 5) 185 | assert.equal(page.story[0].id, 'a1') 186 | assert.equal(page.story[3].text, 'this is the fourth paragraph') 187 | assert.equal(page.journal[4].type, 'edit') 188 | }) 189 | .catch(err => { 190 | throw err 191 | }) 192 | }) 193 | 194 | it('should refuse to create over a page', async () => { 195 | const body = JSON.stringify({ 196 | type: 'create', 197 | item: { title: 'Doh' }, 198 | id: 'c1', 199 | }) 200 | 201 | await request 202 | .put('/page/adsf-test-page/action') 203 | .send('action=' + body) 204 | .expect(409) 205 | .then(() => { 206 | const page = JSON.parse(fs.readFileSync(loc)) 207 | assert.notEqual(page.title, 'Doh') 208 | }) 209 | .catch(err => { 210 | throw err 211 | }) 212 | }) 213 | 214 | it('site should now have one page', async () => { 215 | await request 216 | .get('/system/slugs.json') 217 | .expect(200) 218 | .expect('Content-Type', /json/) 219 | .then(res => { 220 | assert.equal(res.body.length, 1) 221 | assert.equal(res.body[0], 'adsf-test-page') 222 | }) 223 | .catch(err => { 224 | throw err 225 | }) 226 | }) 227 | }) 228 | -------------------------------------------------------------------------------- /lib/sitemap.js: -------------------------------------------------------------------------------- 1 | /* 2 | * Federated Wiki : Node Server 3 | * 4 | * Copyright Ward Cunningham and other contributors 5 | * Licensed under the MIT license. 6 | * https://github.com/fedwiki/wiki-server/blob/master/LICENSE.txt 7 | */ 8 | 9 | // **sitemap.coffee** 10 | 11 | const fs = require('fs') 12 | const path = require('path') 13 | const events = require('events') 14 | const writeFileAtomic = require('write-file-atomic') 15 | const xml2js = require('xml2js') 16 | 17 | const synopsis = require('wiki-client/lib/synopsis') 18 | 19 | const asSlug = name => 20 | name 21 | .replace(/\s/g, '-') 22 | .replace(/[^A-Za-z0-9-]/g, '') 23 | .toLowerCase() 24 | 25 | module.exports = exports = argv => { 26 | const wikiName = new URL(argv.url).hostname 27 | 28 | let sitemap = [] 29 | 30 | const queue = [] 31 | 32 | let sitemapPageHandler = null 33 | 34 | // ms since last update we will remove sitemap from memory 35 | const sitemapTimeoutMs = 120000 36 | let sitemapTimeoutHandler = null 37 | 38 | const sitemapLoc = path.join(argv.status, 'sitemap.json') 39 | const xmlSitemapLoc = path.join(argv.status, 'sitemap.xml') 40 | 41 | let working = false 42 | 43 | const lastEdit = journal => { 44 | if (!journal) return undefined 45 | // find the last journal entry, that is not a fork, with a date. 46 | const last = journal.findLast(action => { 47 | return action.date && action.type != 'fork' 48 | }) 49 | return last ? last.date : undefined 50 | } 51 | 52 | const sitemapUpdate = (file, page, cb) => { 53 | let pageLinks, pageLinksMap 54 | const extractPageLinks = (collaborativeLinks, currentItem, currentIndex, array) => { 55 | // extract collaborative links 56 | // - this will need extending if we also extract the id of the item containing the link 57 | try { 58 | const linkRe = /\[\[([^\]]+)\]\]/g 59 | let match = undefined 60 | while ((match = linkRe.exec(currentItem.text)) != null) { 61 | if (!collaborativeLinks.has(asSlug(match[1]))) { 62 | collaborativeLinks.set(asSlug(match[1]), currentItem.id) 63 | } 64 | } 65 | if ('reference' == currentItem.type) { 66 | if (!collaborativeLinks.has(currentItem.slug)) { 67 | collaborativeLinks.set(currentItem.slug, currentItem.id) 68 | } 69 | } 70 | } catch (err) { 71 | console.log( 72 | `METADATA *** ${wikiName} Error extracting links from ${currentIndex} of ${JSON.stringify(array)}`, 73 | err.message, 74 | ) 75 | } 76 | return collaborativeLinks 77 | } 78 | try { 79 | pageLinksMap = page.story.reduce(extractPageLinks, new Map()) 80 | } catch (err) { 81 | console.log(`METADATA *** ${wikiName} reduce to extract links on ${file} failed`, err.message) 82 | pageLinksMap = [] 83 | } 84 | // 85 | if (pageLinksMap.size > 0) { 86 | pageLinks = Object.fromEntries(pageLinksMap) 87 | } else { 88 | pageLinks = undefined 89 | } 90 | 91 | const entry = { 92 | slug: file, 93 | title: page.title, 94 | date: lastEdit(page.journal), 95 | synopsis: synopsis(page), 96 | links: pageLinks, 97 | } 98 | 99 | const slugs = sitemap.map(page => page.slug) 100 | 101 | const idx = slugs.indexOf(file) 102 | 103 | if (~idx) { 104 | sitemap[idx] = entry 105 | } else { 106 | sitemap.push(entry) 107 | } 108 | cb() 109 | } 110 | 111 | const sitemapRemovePage = (file, cb) => { 112 | const slugs = sitemap.map(page => page.slug) 113 | const idx = slugs.indexOf(file) 114 | 115 | if (~idx) { 116 | sitemap.splice(idx, 1) 117 | } 118 | cb() 119 | } 120 | 121 | const sitemapSave = (sitemap, cb) => { 122 | fs.access(argv.status, fs.constants.F_OK, err => { 123 | if (!err) { 124 | writeFileAtomic(sitemapLoc, JSON.stringify(sitemap), e => { 125 | if (e) return cb(e) 126 | cb() 127 | }) 128 | } else 129 | fs.mkdir(argv.status, { recursive: true }, () => { 130 | writeFileAtomic(sitemapLoc, JSON.stringify(sitemap), e => { 131 | if (e) return cb(e) 132 | cb() 133 | }) 134 | }) 135 | }) 136 | } 137 | 138 | const sitemapRestore = cb => { 139 | fs.access(sitemapLoc, fs.constants.F_OK, err => { 140 | if (!err) { 141 | fs.readFile(sitemapLoc, (err, data) => { 142 | if (err) return cb(err) 143 | try { 144 | sitemap = JSON.parse(data) 145 | } catch (e) { 146 | return cb(e) 147 | } 148 | process.nextTick(() => { 149 | serial(queue.shift()) 150 | }) 151 | }) 152 | } else { 153 | // sitemap file does not exist, so needs creating 154 | itself.createSitemap(sitemapPageHandler) 155 | } 156 | }) 157 | } 158 | 159 | const xmlSitemapSave = (sitemap, cb) => { 160 | const xmlmapPages = [] 161 | sitemap.forEach(page => { 162 | const result = {} 163 | result['loc'] = argv.url + '/' + page.slug + '.html' 164 | if (page.date) { 165 | const date = new Date(page.date) 166 | if (!isNaN(date.valueOf())) { 167 | result['lastmod'] = date.toISOString().substring(0, 10) 168 | } 169 | } 170 | xmlmapPages.push(result) 171 | }) 172 | const xmlmap = { urlset: { $: { xmlns: 'http://www.sitemaps.org/schemas/sitemap/0.9' }, url: xmlmapPages } } 173 | const builder = new xml2js.Builder() 174 | const xml = builder.buildObject(xmlmap) 175 | fs.access(argv.status, fs.constants.F_OK, err => { 176 | if (!err) { 177 | writeFileAtomic(xmlSitemapLoc, xml, e => { 178 | if (e) return cb(e) 179 | cb() 180 | }) 181 | } else { 182 | fs.mkdir(argv.status, { recursive: true }, () => { 183 | writeFileAtomic(xmlSitemapLoc, xml, e => { 184 | if (e) return cb(e) 185 | cb() 186 | }) 187 | }) 188 | } 189 | }) 190 | } 191 | 192 | const serial = item => { 193 | if (item) { 194 | switch (item.action) { 195 | case 'update': 196 | itself.start() 197 | sitemapUpdate(item.file, item.page, e => process.nextTick(() => serial(queue.shift()))) 198 | break 199 | case 'remove': 200 | itself.start() 201 | sitemapRemovePage(item.file, e => process.nextTick(() => serial(queue.shift()))) 202 | break 203 | default: 204 | console.log(`Sitemap unexpected action ${item.action} for ${item.page} in ${wikiName}`) 205 | process.nextTick(() => serial(queue.shift)) 206 | } 207 | } else 208 | sitemapSave(sitemap, e => { 209 | if (e) console.log(`Problems saving sitemap ${wikiName}: ` + e) 210 | itself.stop() 211 | }) 212 | xmlSitemapSave(sitemap, e => { 213 | if (e) console.log(`Problems saving sitemap(xml) ${wikiName}`) + e 214 | }) 215 | } 216 | 217 | // #### Public stuff #### 218 | 219 | const itself = new events.EventEmitter() 220 | itself.start = () => { 221 | clearTimeout(sitemapTimeoutHandler) 222 | working = true 223 | itself.emit('working') 224 | } 225 | itself.stop = () => { 226 | const clearsitemap = () => { 227 | console.log(`removing sitemap ${wikiName} from memory`) 228 | sitemap.length = 0 229 | clearTimeout(sitemapTimeoutHandler) 230 | } 231 | // don't clear sitemap when in test environment. It just delays the tests completing. 232 | if (!argv.test) sitemapTimeoutHandler = setTimeout(clearsitemap, sitemapTimeoutMs) 233 | working = false 234 | itself.emit('finished') 235 | } 236 | itself.isWorking = () => { 237 | working 238 | } 239 | 240 | itself.createSitemap = pagehandler => { 241 | itself.start() 242 | // we save the pagehandler, so we can recreate the sitemap if it is removed 243 | if (!sitemapPageHandler) sitemapPageHandler = pagehandler 244 | 245 | pagehandler.pages((e, newsitemap) => { 246 | if (e) { 247 | console.log(`createSitemap ${wikiName} : error ` + e) 248 | itself.stop() 249 | return e 250 | } 251 | sitemap = newsitemap 252 | 253 | process.nextTick(() => { 254 | serial(queue.shift()) 255 | }) 256 | }) 257 | } 258 | 259 | itself.removePage = file => { 260 | const action = 'remove' 261 | queue.push({ action, file }) 262 | if (sitemap.length === 0 && !working) { 263 | itself.start() 264 | sitemapRestore(e => { 265 | if (e) console.log(`Problems restoring sitemap ${wikiName} : ` + e) 266 | itself.createSitemap(sitemapPageHandler) 267 | }) 268 | } else { 269 | if (!working) serial(queue.shift()) 270 | } 271 | } 272 | 273 | itself.update = (file, page) => { 274 | const action = 'update' 275 | queue.push({ action, file, page }) 276 | if (sitemap.length === 0 && !working) { 277 | itself.start() 278 | sitemapRestore(e => { 279 | if (e) console.log(`Problems restoring sitemap ${wikiName} : ` + e) 280 | itself.createSitemap(sitemapPageHandler) 281 | }) 282 | } else { 283 | if (!working) serial(queue.shift()) 284 | } 285 | } 286 | 287 | return itself 288 | } 289 | -------------------------------------------------------------------------------- /default-data/pages/federated-wiki: -------------------------------------------------------------------------------- 1 | { 2 | "title": "Federated Wiki", 3 | "story": [ 4 | { 5 | "type": "paragraph", 6 | "id": "530fa0c6d830da7d", 7 | "text": "Federated Wiki sites share pages circulating within a creative commons. A single-page browser application can read from many sites at once and save changes in that browser. Users who host their own sites can login there to have their edits shared back to the federation as they edit." 8 | }, 9 | { 10 | "type": "reference", 11 | "id": "efb2a20c1ef9eacf", 12 | "site": "sites.fed.wiki.org", 13 | "title": "sites.fed.wiki.org", 14 | "text": "Catalog of federated wiki sites with domain names for page titles and brief descriptions tuned for search." 15 | }, 16 | { 17 | "type": "paragraph", 18 | "id": "d72fef48db142972", 19 | "text": "We publish federated wiki software as a Node.js package ready to run on a variety of platforms. This is usefully run on a personal laptop or an industrial server in the cloud. Most people get started by joining a community and launching sites and/or servers with their help. The server software supports a multi-tenant \"farm\" option useful for small groups or heavy users. [https://www.npmjs.com/package/wiki npm]" 20 | }, 21 | { 22 | "type": "paragraph", 23 | "id": "74269607fa946000", 24 | "text": "A community of open-source developers maintain both the client-side and server-side applications most frequently used to browse and edit pages. Pages themselves are composed of paragraph-sized items of various kinds. This same community provides a core set of plugins for rich content pages and a variety of experimental plugins that push boundaries of web computing. [https://github.com/fedwiki github]" 25 | }, 26 | { 27 | "type": "paragraph", 28 | "id": "be5beb04ec02236f", 29 | "text": "Ward Cunningham started the federation in 2011 with a workshop project called Smallest Federated Wiki or simply SFW. The data visualization and sharing mechanisms were supported by Nike's Sustainable Business and Innovation group. Early history has been documented in a series of video screencasts. Search for \"video\"." 30 | } 31 | ], 32 | "journal": [ 33 | { 34 | "type": "create", 35 | "item": { 36 | "title": "Federated Wiki", 37 | "story": [] 38 | }, 39 | "date": 1418521457737 40 | }, 41 | { 42 | "item": { 43 | "type": "factory", 44 | "id": "530fa0c6d830da7d" 45 | }, 46 | "id": "530fa0c6d830da7d", 47 | "type": "add", 48 | "date": 1418521468150 49 | }, 50 | { 51 | "type": "edit", 52 | "id": "530fa0c6d830da7d", 53 | "item": { 54 | "type": "paragraph", 55 | "id": "530fa0c6d830da7d", 56 | "text": "Federated Wiki sites share pages circulating within a creative commons. A single-page browser application can read from many sites at once and save changes in that browser. Users who host their own sites can login there to have their edits shared back to the federation as they edit." 57 | }, 58 | "date": 1418521991680 59 | }, 60 | { 61 | "item": { 62 | "type": "factory", 63 | "id": "74269607fa946000" 64 | }, 65 | "id": "74269607fa946000", 66 | "type": "add", 67 | "after": "530fa0c6d830da7d", 68 | "date": 1418522205755 69 | }, 70 | { 71 | "type": "edit", 72 | "id": "74269607fa946000", 73 | "item": { 74 | "type": "paragraph", 75 | "id": "74269607fa946000", 76 | "text": "A community of open-source developers maintain both the client-side and server-side applications most frequently used to browse and edit pages. Pages themselves are composed of paragraph-sized items of various kinds. This same community provides a core set of plugins for rich content pages and a variety of experimental plugins that push boundaries of web computing." 77 | }, 78 | "date": 1418522793324 79 | }, 80 | { 81 | "type": "edit", 82 | "id": "74269607fa946000", 83 | "item": { 84 | "type": "paragraph", 85 | "id": "74269607fa946000", 86 | "text": "A community of open-source developers maintain both the client-side and server-side applications most frequently used to browse and edit pages. Pages themselves are composed of paragraph-sized items of various kinds. This same community provides a core set of plugins for rich content pages and a variety of experimental plugins that push boundaries of web computing. [https://github.com/fedwiki github]" 87 | }, 88 | "date": 1418522877756 89 | }, 90 | { 91 | "item": { 92 | "type": "factory", 93 | "id": "be5beb04ec02236f" 94 | }, 95 | "id": "be5beb04ec02236f", 96 | "type": "add", 97 | "after": "74269607fa946000", 98 | "date": 1418522935324 99 | }, 100 | { 101 | "type": "edit", 102 | "id": "be5beb04ec02236f", 103 | "item": { 104 | "type": "paragraph", 105 | "id": "be5beb04ec02236f", 106 | "text": "Ward Cunningham started the federation in 2011 with a workshop project called Smallest Federated Wiki or simply SFW. The data visualization and sharing mechanisms were supported by Nike's Sustainable Business and Innovation group. Early history has been documented in a series of video screencasts." 107 | }, 108 | "date": 1418523559948 109 | }, 110 | { 111 | "item": { 112 | "type": "factory", 113 | "id": "efb2a20c1ef9eacf" 114 | }, 115 | "id": "efb2a20c1ef9eacf", 116 | "type": "add", 117 | "after": "be5beb04ec02236f", 118 | "date": 1418523670124 119 | }, 120 | { 121 | "type": "edit", 122 | "id": "efb2a20c1ef9eacf", 123 | "item": { 124 | "type": "reference", 125 | "id": "efb2a20c1ef9eacf", 126 | "site": "sites.fed.wiki.org", 127 | "title": "sites.fed.wiki.org", 128 | "text": "Catalog of federated wiki sites with domain names for page titles and brief descriptions tuned for search." 129 | }, 130 | "date": 1418523717019 131 | }, 132 | { 133 | "type": "move", 134 | "order": [ 135 | "530fa0c6d830da7d", 136 | "efb2a20c1ef9eacf", 137 | "74269607fa946000", 138 | "be5beb04ec02236f" 139 | ], 140 | "id": "efb2a20c1ef9eacf", 141 | "date": 1418523723158 142 | }, 143 | { 144 | "type": "edit", 145 | "id": "be5beb04ec02236f", 146 | "item": { 147 | "type": "paragraph", 148 | "id": "be5beb04ec02236f", 149 | "text": "Ward Cunningham started the federation in 2011 with a workshop project called Smallest Federated Wiki or simply SFW. The data visualization and sharing mechanisms were supported by Nike's Sustainable Business and Innovation group. Early history has been documented in a series of video screencasts. Search for \"video\"." 150 | }, 151 | "date": 1418523815810 152 | }, 153 | { 154 | "item": { 155 | "type": "factory", 156 | "id": "d72fef48db142972" 157 | }, 158 | "id": "d72fef48db142972", 159 | "type": "add", 160 | "after": "be5beb04ec02236f", 161 | "date": 1418523843204 162 | }, 163 | { 164 | "type": "edit", 165 | "id": "d72fef48db142972", 166 | "item": { 167 | "type": "paragraph", 168 | "id": "d72fef48db142972", 169 | "text": "We publish federated wiki software as a Node.js package ready to run on a variety of platforms. This is usefully run on a personal laptop or an industrial server in the cloud. Most people get started by joining a community and launching servers in support of that community's purpose." 170 | }, 171 | "date": 1418524089665 172 | }, 173 | { 174 | "type": "edit", 175 | "id": "d72fef48db142972", 176 | "item": { 177 | "type": "paragraph", 178 | "id": "d72fef48db142972", 179 | "text": "We publish federated wiki software as a Node.js package ready to run on a variety of platforms. This is usefully run on a personal laptop or an industrial server in the cloud. Most people get started by joining a community and launching sites and/or servers with their help. [https://www.npmjs.com/package/wiki npm]" 180 | }, 181 | "date": 1418524277519 182 | }, 183 | { 184 | "type": "edit", 185 | "id": "d72fef48db142972", 186 | "item": { 187 | "type": "paragraph", 188 | "id": "d72fef48db142972", 189 | "text": "We publish federated wiki software as a Node.js package ready to run on a variety of platforms. This is usefully run on a personal laptop or an industrial server in the cloud. Most people get started by joining a community and launching sites and/or servers with their help. The server software supports a multi-tenant \"farm\" option useful for small groups or heavy users. [https://www.npmjs.com/package/wiki npm]" 190 | }, 191 | "date": 1418524419231 192 | }, 193 | { 194 | "type": "move", 195 | "order": [ 196 | "530fa0c6d830da7d", 197 | "efb2a20c1ef9eacf", 198 | "74269607fa946000", 199 | "d72fef48db142972", 200 | "be5beb04ec02236f" 201 | ], 202 | "id": "d72fef48db142972", 203 | "date": 1418524447194 204 | }, 205 | { 206 | "type": "move", 207 | "order": [ 208 | "530fa0c6d830da7d", 209 | "efb2a20c1ef9eacf", 210 | "d72fef48db142972", 211 | "74269607fa946000", 212 | "be5beb04ec02236f" 213 | ], 214 | "id": "74269607fa946000", 215 | "date": 1418524455903 216 | } 217 | ] 218 | } -------------------------------------------------------------------------------- /lib/search.js: -------------------------------------------------------------------------------- 1 | /* 2 | * Federated Wiki : Node Server 3 | * 4 | * Copyright Ward Cunningham and other contributors 5 | * Licensed under the MIT license. 6 | * https://github.com/fedwiki/wiki-server/blob/master/LICENSE.txt 7 | */ 8 | 9 | // **search.js** 10 | 11 | const fs = require('node:fs') 12 | const path = require('node:path') 13 | const events = require('node:events') 14 | const url = require('node:url') 15 | const writeFileAtomic = require('write-file-atomic') 16 | 17 | const miniSearch = require('minisearch') 18 | 19 | module.exports = exports = argv => { 20 | const wikiName = new URL(argv.url).hostname 21 | let siteIndex = [] 22 | const queue = [] 23 | 24 | let searchPageHandler = null 25 | 26 | // ms since last update we will remove index from memory 27 | // orig - searchTimeoutMs = 1200000 28 | const searchTimeoutMs = 120000 // temp reduce to 2 minutes 29 | let searchTimeoutHandler = null 30 | 31 | const siteIndexLoc = path.join(argv.status, 'site-index.json') 32 | const indexUpdateFlag = path.join(argv.status, 'index-updated') 33 | 34 | let working = false 35 | 36 | const touch = (file, cb) => { 37 | fs.stat(file, (err, stats) => { 38 | if (err === null) return cb() 39 | fs.open(file, 'w', (err, fd) => { 40 | if (err) cb(err) 41 | fs.close(fd, err => { 42 | cb(err) 43 | }) 44 | }) 45 | }) 46 | } 47 | 48 | const searchPageUpdate = (slug, page, cb) => { 49 | // to update we have to remove the page first, and then readd it 50 | let pageText 51 | try { 52 | pageText = page.story.reduce(extractPageText, '') 53 | } catch (err) { 54 | console.log(`SITE INDEX *** ${wikiName} reduce to extract the text on ${slug} failed`, err.message) 55 | pageText = '' 56 | } 57 | if (siteIndex.has(slug)) { 58 | siteIndex.replace({ 59 | id: slug, 60 | title: page.title, 61 | content: pageText, 62 | }) 63 | } else { 64 | siteIndex.add({ 65 | id: slug, 66 | title: page.title, 67 | content: pageText, 68 | }) 69 | } 70 | cb() 71 | } 72 | 73 | const searchPageRemove = (slug, cb) => { 74 | // remove page from index 75 | try { 76 | siteIndex.discard(slug) 77 | } catch (err) { 78 | // swallow error, if the page was not in index 79 | if (!err.message.includes('not in the index')) { 80 | console.log(`removing ${slug} from index ${wikiName} failed`, err) 81 | } 82 | } 83 | cb() 84 | } 85 | 86 | const searchSave = (siteIndex, cb) => { 87 | // save index to file 88 | fs.access(argv.status, fs.constants.F_OK, err => { 89 | if (!err) { 90 | writeFileAtomic(siteIndexLoc, JSON.stringify(siteIndex), e => { 91 | if (e) return cb(e) 92 | touch(indexUpdateFlag, () => { 93 | cb() 94 | }) 95 | }) 96 | } else { 97 | fs.mkdir(argv.status, { recursive: true }, () => { 98 | writeFileAtomic(siteIndexLoc, JSON.stringify(siteIndex), e => { 99 | if (e) return cb(e) 100 | touch(indexUpdateFlag, () => { 101 | cb() 102 | }) 103 | }) 104 | }) 105 | } 106 | }) 107 | } 108 | 109 | const searchRestore = cb => { 110 | // restore index, or create if it doesn't already exist 111 | fs.access(siteIndexLoc, fs.constants.F_OK, err => { 112 | if (!err) { 113 | fs.readFile(siteIndexLoc, (err, data) => { 114 | if (err) return cb(err) 115 | try { 116 | siteIndex = miniSearch.loadJSON(data, { 117 | fields: ['title', 'content'], 118 | }) 119 | } catch (e) { 120 | return cb(e) 121 | } 122 | process.nextTick(() => { 123 | serial(queue.shift()) 124 | }) 125 | }) 126 | } 127 | }) 128 | } 129 | 130 | const serial = item => { 131 | if (item) { 132 | switch (item.action) { 133 | case 'update': 134 | itself.start() 135 | searchPageUpdate(item.slug, item.page, () => { 136 | process.nextTick(() => { 137 | serial(queue.shift()) 138 | }) 139 | }) 140 | break 141 | case 'remove': 142 | itself.start() 143 | searchPageRemove(item.slug, () => { 144 | process.nextTick(() => { 145 | serial(queue.shift()) 146 | }) 147 | }) 148 | break 149 | default: 150 | console.log(`SITE INDEX *** unexpected action ${item.action} for ${item.page}`) 151 | process.nextTick(() => { 152 | serial(queue.shift) 153 | }) 154 | } 155 | } else { 156 | searchSave(siteIndex, e => { 157 | if (e) console.log('SITE INDEX *** save failed: ' + e) 158 | itself.stop() 159 | }) 160 | } 161 | } 162 | 163 | const extractItemText = text => { 164 | return text 165 | .replace(/\[([^\]]*?)\][[(].*?[\])]/g, ' $1 ') 166 | .replace(/\[{2}|\[(?:[\S]+)|\]{1,2}/g, ' ') 167 | .replace(/\n/g, ' ') 168 | .replace(//g, ' ') 169 | .replace(/<(?:"[^"]*"['"]*|'[^']*'['"]*|[^'">])+>/g, ' ') 170 | .replace(/<(?:[^>])+>/g, ' ') 171 | .replace(/(https?:.*?)(?=\p{White_Space}|\p{Quotation_Mark}|$)/gu, match => { 172 | try { 173 | const myUrl = new URL(match) 174 | return myUrl.hostname 175 | } catch { 176 | return ' ' 177 | } 178 | }) 179 | .replace(/[\p{P}\p{Emoji}\p{Symbol}}]+/gu, ' ') 180 | .replace(/[\p{White_Space}\n\t]+/gu, ' ') 181 | } 182 | 183 | const extractPageText = (pageText, currentItem, currentIndex, array) => { 184 | // console.log('extractPageText', pageText, currentItem, currentIndex, array) 185 | try { 186 | if (currentItem.text) { 187 | switch (currentItem.type) { 188 | case 'paragraph': 189 | case 'markdown': 190 | case 'html': 191 | case 'reference': 192 | case 'image': 193 | case 'pagefold': 194 | case 'math': 195 | case 'mathjax': 196 | case 'code': 197 | pageText += ' ' + extractItemText(currentItem.text) 198 | break 199 | case 'audio': 200 | case 'video': 201 | case 'frame': 202 | pageText += 203 | ' ' + 204 | extractItemText( 205 | currentItem.text 206 | .split(/\r\n?|\n/) 207 | .map(line => { 208 | const firstWord = line.split(/\p{White_Space}/u)[0] 209 | if ( 210 | firstWord.startsWith('http') || 211 | firstWord.toUpperCase() === firstWord || 212 | firstWord.startsWith('//') 213 | ) { 214 | // line is markup 215 | return '' 216 | } else { 217 | return line 218 | } 219 | }) 220 | .join(' '), 221 | ) 222 | } 223 | } 224 | } catch (err) { 225 | throw new Error(`Error extracting text from ${currentIndex}, ${JSON.stringify(currentItem)} ${err}, ${err.stack}`) 226 | } 227 | return pageText 228 | } 229 | 230 | // #### Public stuff #### 231 | 232 | var itself = new events.EventEmitter() 233 | itself.start = () => { 234 | clearTimeout(searchTimeoutHandler) 235 | working = true 236 | return itself.emit('indexing') 237 | } 238 | itself.stop = () => { 239 | const clearsearch = () => { 240 | console.log(`SITE INDEX ${wikiName} : removed from memory`) 241 | siteIndex = [] 242 | clearTimeout(searchTimeoutHandler) 243 | } 244 | searchTimeoutHandler = setTimeout(clearsearch, searchTimeoutMs) 245 | working = false 246 | return itself.emit('indexed') 247 | } 248 | itself.isWorking = () => { 249 | return working 250 | } 251 | itself.createIndex = pagehandler => { 252 | itself.start() 253 | 254 | // we save the pagehandler, so we can recreate the site index if it is removed 255 | searchPageHandler = searchPageHandler ?? pagehandler 256 | 257 | //timeLabel = `SITE INDEX ${wikiName} : Created` 258 | //console.time timeLabel 259 | 260 | pagehandler.slugs((e, slugs) => { 261 | if (e) { 262 | console.log(`SITE INDEX *** createIndex ${wikiName} error:`, e) 263 | itself.stop() 264 | return e 265 | } 266 | siteIndex = new miniSearch({ 267 | fields: ['title', 'content'], 268 | }) 269 | 270 | const indexPromises = slugs.map(slug => { 271 | return new Promise(resolve => { 272 | pagehandler.get(slug, (err, page) => { 273 | if (err) { 274 | console.log(`SITE INDEX *** ${wikiName}: error reading page`, slug) 275 | return 276 | } 277 | // page 278 | let pageText 279 | try { 280 | pageText = page.story.reduce(extractPageText, '') 281 | } catch (err) { 282 | console.log(`SITE INDEX *** ${wikiName} reduce to extract text on ${slug} failed`, err.message) 283 | // console.log "page", page 284 | pageText = '' 285 | } 286 | siteIndex.add({ 287 | id: slug, 288 | title: page.title, 289 | content: pageText, 290 | }) 291 | resolve() 292 | }) 293 | }) 294 | }) 295 | Promise.all(indexPromises).then(() => { 296 | // console.timeEnd timeLabel 297 | process.nextTick(() => { 298 | serial(queue.shift()) 299 | }) 300 | }) 301 | }) 302 | } 303 | 304 | itself.removePage = slug => { 305 | const action = 'remove' 306 | queue.push({ action, slug }) 307 | if (Array.isArray(siteIndex) && !working) { 308 | itself.start() 309 | searchRestore(e => { 310 | if (e) console.log(`SITE INDEX *** Problems restoring search index ${wikiName}:` + e) 311 | itself.createIndex(searchPageHandler) 312 | }) 313 | } else { 314 | if (!working) serial(queue.shift()) 315 | } 316 | } 317 | 318 | itself.update = (slug, page) => { 319 | const action = 'update' 320 | queue.push({ action, slug, page }) 321 | if (Array.isArray(siteIndex) && !working) { 322 | itself.start() 323 | searchRestore(e => { 324 | if (e) console.log(`SITE INDEX *** Problems restoring search index ${wikiName}:` + e) 325 | itself.createIndex(searchPageHandler) 326 | }) 327 | } else { 328 | if (!working) serial(queue.shift()) 329 | } 330 | } 331 | itself.startUp = pagehandler => { 332 | // called on server startup, here we check if wiki already is index 333 | // we only create an index if there is either no index or there have been updates since last startup 334 | console.log(`SITE INDEX ${wikiName} : StartUp`) 335 | fs.stat(siteIndexLoc, (err, stats) => { 336 | if (err === null) { 337 | // site index exists, but has it been updated? 338 | fs.stat(indexUpdateFlag, (err, stats) => { 339 | if (!err) { 340 | // index has been updated, so recreate it. 341 | itself.createIndex(pagehandler) 342 | // remove the update flag once the index has been created 343 | itself.once('indexed', () => { 344 | fs.unlink(indexUpdateFlag, err => { 345 | if (err) console.log(`+++ SITE INDEX ${wikiName} : unable to delete update flag`) 346 | }) 347 | }) 348 | } else { 349 | // not been updated, but is it the correct version? 350 | fs.readFile(siteIndexLoc, (err, data) => { 351 | if (!err) { 352 | let testIndex 353 | try { 354 | testIndex = JSON.parse(data) 355 | } catch (err) { 356 | testIndex = {} 357 | } 358 | if (testIndex.serializationVersion != 2) 359 | console.log(`+++ SITE INDEX ${wikiName} : updating to latest version.`) 360 | itself.createIndex(pagehandler) 361 | // remove the update flag once the index has been created 362 | itself.once('indexed', () => { 363 | fs.unlink(indexUpdateFlag, err => { 364 | if (err) console.log(`+++ SITE INDEX ${wikiName} : unable to delete update flag`) 365 | }) 366 | }) 367 | } else { 368 | console.log(`+++ SITE INDEX ${wikiName} : error reading index - attempting creating`) 369 | itself.createIndex(pagehandler) 370 | // remove the update flag once the index has been created 371 | itself.once('indexed', () => { 372 | fs.unlink(indexUpdateFlag, err => { 373 | if (err) console.log(`+++ SITE INDEX ${wikiName} : unable to delete update flag`) 374 | }) 375 | }) 376 | } 377 | }) 378 | } 379 | }) 380 | } else { 381 | // index does not exist, so create it 382 | itself.createIndex(pagehandler) 383 | // remove the update flag once the index has been created 384 | itself.once('indexed', () => { 385 | fs.unlink(indexUpdateFlag, err => { 386 | if (err) console.log(`+++ SITE INDEX ${wikiName} : unable to delete update flag`) 387 | }) 388 | }) 389 | } 390 | }) 391 | } 392 | 393 | return itself 394 | } 395 | -------------------------------------------------------------------------------- /lib/page.js: -------------------------------------------------------------------------------- 1 | /* 2 | * Federated Wiki : Node Server 3 | * 4 | * Copyright Ward Cunningham and other contributors 5 | * Licensed under the MIT license. 6 | * https://github.com/fedwiki/wiki-server/blob/master/LICENSE.txt 7 | */ 8 | // **page.coffee** 9 | // Module for interacting with pages persisted on the server. 10 | // Everything is stored using json flat files. 11 | 12 | // #### Requires #### 13 | const fs = require('fs') 14 | const path = require('path') 15 | const events = require('events') 16 | 17 | const random_id = require('./random_id') 18 | const synopsis = require('wiki-client/lib/synopsis') 19 | 20 | const asSlug = name => 21 | name 22 | .replace(/\s/g, '-') 23 | .replace(/[^A-Za-z0-9-]/g, '') 24 | .toLowerCase() 25 | 26 | // Export a function that generates a page handler 27 | // when called with options object. 28 | module.exports = exports = argv => { 29 | const wikiName = new URL(argv.url).hostname 30 | 31 | fs.mkdir(argv.db, { recursive: true }, e => { 32 | if (e) throw e 33 | }) 34 | 35 | // create a list of plugin pages. 36 | const pluginPages = new Map() 37 | Object.keys(require.main.require('./package').dependencies) 38 | .filter(depend => depend.startsWith('wiki-plugin')) 39 | .forEach(plugin => { 40 | const pagesPath = path.join( 41 | path.dirname(require.resolve(`${plugin}/package`, { paths: require.main.paths })), 42 | 'pages', 43 | ) 44 | fs.readdir(pagesPath, { withFileTypes: true }, (err, entries) => { 45 | if (err) return 46 | entries.forEach(entry => { 47 | if (entry.isFile() && !pluginPages.has(entry.name)) { 48 | pluginPages.set(entry.name, { pluginName: plugin, pluginPath: entry.parentPath }) 49 | } 50 | }) 51 | }) 52 | }) 53 | 54 | // #### Private utility methods. #### 55 | const load_parse = (loc, cb, annotations = {}) => { 56 | let page 57 | fs.readFile(loc, (err, data) => { 58 | if (err) return cb(err) 59 | try { 60 | page = JSON.parse(data) 61 | } catch { 62 | const errorPage = path.basename(loc) 63 | const errorPagePath = path.dirname(loc) 64 | const recyclePage = path.resolve(errorPagePath, '..', 'recycle', errorPage) 65 | fs.access(path.dirname(recyclePage), fs.constants.F_OK, err => { 66 | if (!err) { 67 | fs.rename(loc, recyclePage, err => { 68 | if (err) { 69 | console.log(`ERROR: moving problem page ${loc} to recycler`, err) 70 | } else { 71 | console.log(`ERROR: problem page ${loc} moved to recycler`) 72 | } 73 | }) 74 | } else { 75 | fs.mkdir(path.dirname(recyclePage), { recursive: true }, err => { 76 | if (err) { 77 | console.log('ERROR: creating recycler', err) 78 | } else { 79 | fs.rename(loc, recyclePage, err => { 80 | if (err) { 81 | console.log(`ERROR: moving problem page ${loc} to recycler`, err) 82 | } else { 83 | console.log(`ERROR: problem page ${loc} moved to recycler`) 84 | } 85 | }) 86 | } 87 | }) 88 | } 89 | }) 90 | 91 | return cb(null, 'Error Parsing Page', 404) 92 | } 93 | for (const [key, val] of Object.entries(annotations)) { 94 | page[key] = val 95 | } 96 | cb(null, page) 97 | }) 98 | } 99 | 100 | const load_parse_copy = (defloc, file, cb) => { 101 | fs.readFile(defloc, (err, data) => { 102 | if (err) cb(err) 103 | let page 104 | try { 105 | page = JSON.parse(data) 106 | } catch (e) { 107 | return cb(e) 108 | } 109 | cb(null, page) 110 | // TODO: what is happening here?! put will never be reached??? 111 | itself.put(file, page, err => { 112 | if (err) cb(err) 113 | }) 114 | }) 115 | } 116 | // Reads and writes are async, but serially queued to avoid race conditions. 117 | const queue = [] 118 | 119 | const tryDefaults = (file, cb) => { 120 | const lastDefault = cb => { 121 | const defloc = path.join(argv.root, 'default-data', 'pages', file) 122 | fs.access(defloc, fs.constants.F_OK, err => { 123 | if (!err) { 124 | cb(defloc) 125 | } else { 126 | cb(null) 127 | } 128 | }) 129 | } 130 | if (argv.defaults) { 131 | const defloc = path.join(argv.data, '..', argv.defaults, 'pages', file) 132 | fs.access(defloc, fs.constants.F_OK, err => { 133 | if (!err) { 134 | cb(defloc) 135 | } else { 136 | lastDefault(cb) 137 | } 138 | }) 139 | } else { 140 | lastDefault(cb) 141 | } 142 | } 143 | 144 | // Main file io function, when called without page it reads, 145 | // when called with page it writes. 146 | const fileio = (action, file, page, cb) => { 147 | const loc = file.startsWith('recycler/') ? path.join(argv.recycler, file.split('/')[1]) : path.join(argv.db, file) 148 | 149 | switch (action) { 150 | case 'delete': 151 | if (file.startsWith('recycler/')) { 152 | // delete from recycler 153 | fs.access(loc, fs.constants.F_OK, err => { 154 | if (!err) 155 | fs.unlink(loc, err => { 156 | cb(err) 157 | }) 158 | }) 159 | } else { 160 | // move page to recycler 161 | fs.access(loc, fs.constants.F_OK, err => { 162 | if (!err) { 163 | const recycleLoc = path.join(argv.recycler, file) 164 | fs.access(path.dirname(recycleLoc), fs.constants.F_OK, err => { 165 | if (!err) { 166 | fs.rename(loc, recycleLoc, err => { 167 | cb(err) 168 | }) 169 | } else { 170 | fs.mkdir(path.dirname(recycleLoc), { recursive: true }, err => { 171 | if (err) cb(err) 172 | fs.rename(loc, recycleLoc, err => { 173 | cb(err) 174 | }) 175 | }) 176 | } 177 | }) 178 | } else { 179 | cb('page does not exist') 180 | } 181 | }) 182 | } 183 | break 184 | case 'recycle': { 185 | const copyFile = (source, target, cb) => { 186 | const done = err => { 187 | if (!cbCalled) { 188 | cb(err) 189 | cbCalled = true 190 | } 191 | return 192 | } 193 | 194 | let cbCalled = false 195 | 196 | const rd = fs.createReadStream(source) 197 | rd.on('error', err => { 198 | done(err) 199 | return 200 | }) 201 | 202 | const wr = fs.createWriteStream(target) 203 | wr.on('error', err => { 204 | done(err) 205 | return 206 | }) 207 | wr.on('close', () => { 208 | done() 209 | return 210 | }) 211 | rd.pipe(wr) 212 | return 213 | } 214 | 215 | fs.access(loc, fs.constants.F_OK, err => { 216 | if (!err) { 217 | const recycleLoc = path.join(argv.recycler, file) 218 | fs.access(path.dirname(recycleLoc), fs.constants.F_OK, err => { 219 | if (!err) { 220 | copyFile(loc, recycleLoc, err => { 221 | cb(err) 222 | }) 223 | } else { 224 | fs.mkdir(path.dirname(recycleLoc), { recursive: true }, err => { 225 | if (err) cb(err) 226 | copyFile(loc, recycleLoc, err => { 227 | cb(err) 228 | }) 229 | }) 230 | } 231 | }) 232 | } else { 233 | cb('page does not exist') 234 | } 235 | }) 236 | break 237 | } 238 | case 'get': 239 | fs.access(loc, fs.constants.F_OK, err => { 240 | if (!err) { 241 | load_parse(loc, cb, { plugin: undefined }) 242 | } else { 243 | tryDefaults(file, defloc => { 244 | if (defloc) { 245 | load_parse(defloc, cb) 246 | } else { 247 | if (pluginPages.has(file)) { 248 | const { pluginName, pluginPath } = pluginPages.get(file) 249 | load_parse(path.join(pluginPath, file), cb, { plugin: pluginName.slice(12) }) 250 | } else { 251 | cb(null, 'Page not found', 404) 252 | } 253 | } 254 | }) 255 | } 256 | }) 257 | break 258 | case 'put': 259 | page = JSON.stringify(page, null, 2) 260 | fs.access(path.dirname(loc), fs.constants.F_OK, err => { 261 | if (!err) { 262 | fs.writeFile(loc, page, err => { 263 | if (err) { 264 | console.log(`ERROR: write file ${loc} `, err) 265 | } 266 | cb(err) 267 | }) 268 | } else { 269 | fs.mkdir(path.dirname(loc), { recursive: true }, err => { 270 | if (err) cb(err) 271 | fs.writeFile(loc, page, err => { 272 | if (err) { 273 | console.log(`ERROR: write file ${loc} `, err) 274 | } 275 | cb(err) 276 | }) 277 | }) 278 | } 279 | }) 280 | break 281 | default: 282 | console.log(`pagehandler: unrecognized action ${action}`) 283 | } 284 | } 285 | 286 | // Control variable that tells if the serial queue is currently working. 287 | // Set back to false when all jobs are complete. 288 | let working = false 289 | 290 | // Keep file io working on queued jobs, but don't block the main thread. 291 | const serial = item => { 292 | if (item) { 293 | itself.start() 294 | fileio(item.action, item.file, item.page, (err, data, status) => { 295 | process.nextTick(() => { 296 | serial(queue.shift()) 297 | }) 298 | item.cb(err, data, status) 299 | }) 300 | } else { 301 | itself.stop() 302 | } 303 | } 304 | 305 | // #### Public stuff #### 306 | // Make the exported object an instance of EventEmitter 307 | // so other modules can tell if it is working or not. 308 | const itself = new events.EventEmitter() 309 | 310 | itself.start = () => { 311 | working = true 312 | itself.emit('working') 313 | } 314 | 315 | itself.stop = () => { 316 | working = false 317 | itself.emit('finished') 318 | } 319 | 320 | itself.isWorking = () => working 321 | 322 | // get method takes a slug and a callback, adding them to the queue, 323 | // starting serial if it isn't already working. 324 | itself.get = (file, cb) => { 325 | queue.push({ action: 'get', file, page: null, cb }) 326 | if (!working) serial(queue.shift()) 327 | } 328 | 329 | // put takes a slugged name, the page as a json object, and a callback. 330 | // adds them to the queue, and starts it unless it is working. 331 | itself.put = (file, page, cb) => { 332 | queue.push({ action: 'put', file, page, cb }) 333 | if (!working) serial(queue.shift()) 334 | } 335 | 336 | itself.delete = (file, cb) => { 337 | queue.push({ action: 'delete', file, page: null, cb }) 338 | if (!working) serial(queue.shift()) 339 | } 340 | 341 | itself.saveToRecycler = (file, cb) => { 342 | queue.push({ action: 'recycle', file, page: null, cb }) 343 | if (!working) serial(queue.shift()) 344 | } 345 | 346 | const editDate = journal => { 347 | if (!journal) return undefined 348 | // find the last journal entry, that is not a fork, with a date. 349 | const last = journal.findLast(action => { 350 | return action.date && action.type != 'fork' 351 | }) 352 | return last ? last.date : undefined 353 | } 354 | 355 | itself.pages = cb => { 356 | const extractPageLinks = (collaborativeLinks, currentItem, currentIndex, array) => { 357 | // extract collaborative links 358 | // - this will need extending if we also extract the id of the item containing the link 359 | try { 360 | const linkRe = /\[\[([^\]]+)\]\]/g 361 | let match = undefined 362 | while ((match = linkRe.exec(currentItem.text)) != null) { 363 | if (!collaborativeLinks.has(asSlug(match[1]))) { 364 | collaborativeLinks.set(asSlug(match[1]), currentItem.id) 365 | } 366 | } 367 | if ('reference' == currentItem.type) { 368 | if (!collaborativeLinks.has(currentItem.slug)) { 369 | collaborativeLinks.set(currentItem.slug, currentItem.id) 370 | } 371 | } 372 | } catch (err) { 373 | console.log( 374 | `METADATA *** ${wikiName} Error extracting links from ${currentIndex} of ${JSON.stringify(array)}`, 375 | err.message, 376 | ) 377 | } 378 | return collaborativeLinks 379 | } 380 | 381 | fs.readdir(argv.db, (e, files) => { 382 | if (e) return cb(e) 383 | const doSitemap = async file => { 384 | return new Promise(resolve => { 385 | itself.get(file, (e, page, status) => { 386 | if (file.match(/^\./)) return resolve(null) 387 | if (e || status === 404) { 388 | console.log('Problem building sitemap:', file, 'e: ', e, 'status:', status) 389 | return resolve(null) // Ignore errors in the pagehandler get. 390 | } 391 | let pageLinksMap 392 | try { 393 | pageLinksMap = page.story.reduce(extractPageLinks, new Map()) 394 | } catch (err) { 395 | console.log(`METADATA *** ${wikiName} reduce to extract links on ${file} failed`, err.message) 396 | pageLinksMap = [] 397 | } 398 | // 399 | const pageLinks = pageLinksMap.size > 0 ? Object.fromEntries(pageLinksMap) : undefined 400 | 401 | resolve({ 402 | slug: file, 403 | title: page.title, 404 | date: editDate(page.journal), 405 | synopsis: synopsis(page), 406 | links: pageLinks, 407 | }) 408 | }) 409 | }) 410 | } 411 | 412 | Promise.all(files.map(doSitemap)) 413 | .then(sitemap => { 414 | cb( 415 | null, 416 | sitemap.filter(item => item != null), 417 | ) 418 | }) 419 | .catch(e => cb(e)) 420 | }) 421 | } 422 | 423 | itself.slugs = cb => { 424 | fs.readdir(argv.db, { withFileTypes: true }, (e, files) => { 425 | if (e) { 426 | console.log('Problem reading pages directory', e) 427 | return cb(e) 428 | } 429 | 430 | const onlyFiles = files.map(i => (i.isFile() ? i.name : null)).filter(i => i != null && !i?.startsWith('.')) 431 | cb(null, onlyFiles) 432 | }) 433 | } 434 | 435 | return itself 436 | } 437 | -------------------------------------------------------------------------------- /default-data/pages/how-to-wiki: -------------------------------------------------------------------------------- 1 | { 2 | "title": "How To Wiki", 3 | "story": [ 4 | { 5 | "type": "paragraph", 6 | "id": "07ef6b0e53a77dd2", 7 | "text": "Here we describe how to use the web interface to the federated wiki. Read [[About Federated Wiki]] to learn about the project. If you are just starting to write in your first wiki then read the [[Field Guide to the Federation]]." 8 | }, 9 | { 10 | "type": "paragraph", 11 | "id": "6e236e59a759df32", 12 | "text": "Tip: Use arrow keys to scroll left and right." 13 | }, 14 | { 15 | "type": "html", 16 | "id": "d88b9a104822256b", 17 | "text": "

How To Read Content" 18 | }, 19 | { 20 | "type": "paragraph", 21 | "id": "9906193876ea1213", 22 | "text": "How to [[Find The Beginning]] of any site." 23 | }, 24 | { 25 | "type": "paragraph", 26 | "id": "ddb43bb5cfbd9189", 27 | "text": "How to [[Follow Links]] within and between sites." 28 | }, 29 | { 30 | "type": "paragraph", 31 | "id": "f1df1f71c9ee8a17", 32 | "text": "How to [[Search Sites]] nearby the ones you've visited." 33 | }, 34 | { 35 | "type": "paragraph", 36 | "id": "f88127fbb75f20c6", 37 | "text": "How to [[Find Sites]] that share with you." 38 | }, 39 | { 40 | "type": "paragraph", 41 | "id": "55a0377f63c23d93", 42 | "text": "How to [[Read History]] of any page." 43 | }, 44 | { 45 | "type": "html", 46 | "id": "be22840c9f21b356", 47 | "text": "

How To Add Content" 48 | }, 49 | { 50 | "type": "paragraph", 51 | "id": "1012dd3528d4c788", 52 | "text": "How to [[Add Text]] to an existing paragraph." 53 | }, 54 | { 55 | "type": "paragraph", 56 | "id": "69fd0eba8fefe2aa", 57 | "text": "How to [[Add Links]] to a paragraph." 58 | }, 59 | { 60 | "type": "paragraph", 61 | "id": "1db26c2e0c7ed126", 62 | "text": "How to [[Add Formatting]] to a paragraph." 63 | }, 64 | { 65 | "type": "paragraph", 66 | "id": "208e32f178e91c99", 67 | "text": "How to [[Add Weird Characters]] to a paragraph." 68 | }, 69 | { 70 | "type": "paragraph", 71 | "id": "fa3e22f363cd93d5", 72 | "text": "How to [[Add Paragraphs]] or paragraph-like items to a story." 73 | }, 74 | { 75 | "type": "paragraph", 76 | "id": "d03bcc1fd0278522", 77 | "text": "How to [[Move Paragraphs]] within and between wiki pages." 78 | }, 79 | { 80 | "type": "paragraph", 81 | "id": "4cb87e0352907dfe", 82 | "text": "How to [[Add Pages]] by adding links to nonexistant pages." 83 | }, 84 | { 85 | "type": "paragraph", 86 | "id": "6f729deb0e1c75af", 87 | "text": "How to [[Copy Pages]] by forking them from remote sites." 88 | }, 89 | { 90 | "type": "paragraph", 91 | "id": "8bb136a087a9710f", 92 | "text": "How to [[Import Documents]] from various sources." 93 | }, 94 | { 95 | "type": "paragraph", 96 | "id": "9b96aa8af66cc688", 97 | "text": "How to [[Add Videos]] from YouTube or Vimeo." 98 | }, 99 | { 100 | "type": "html", 101 | "id": "243033dc6528eab1", 102 | "text": "

How to Remove Content" 103 | }, 104 | { 105 | "type": "paragraph", 106 | "id": "73e9a16ca95be40e", 107 | "text": "How to [[Remove Paragraphs]] and other editable items." 108 | }, 109 | { 110 | "type": "paragraph", 111 | "id": "c63dc8ff04ecdc31", 112 | "text": "How to [[Remove Pages]] or direct readers to other sites." 113 | }, 114 | { 115 | "type": "html", 116 | "id": "d3a1bd6b0c947378", 117 | "text": "

How To Improve Wiki" 118 | }, 119 | { 120 | "type": "paragraph", 121 | "id": "a2bb10949e81f2c6", 122 | "text": "How to [[Review Changes]] to any page or site." 123 | }, 124 | { 125 | "type": "paragraph", 126 | "id": "d594101067de9a5c", 127 | "text": "How to [[Track Changes]] to many sites at once. " 128 | }, 129 | { 130 | "type": "paragraph", 131 | "id": "c8fda36855c0da59", 132 | "text": "How to [[Host a Site]] to share your work." 133 | }, 134 | { 135 | "type": "paragraph", 136 | "id": "f2a7762b712791e5", 137 | "text": "How to [[Add Plugins]] to interpret new kinds of data." 138 | }, 139 | { 140 | "type": "paragraph", 141 | "id": "a4eeb3d307944e60", 142 | "text": "How to [[Track Updates]] to software on this site." 143 | }, 144 | { 145 | "type": "paragraph", 146 | "id": "4ecd1aedbb454789", 147 | "text": "How to [[Contribute Code]] to the federated wiki application." 148 | } 149 | ], 150 | "journal": [ 151 | { 152 | "type": "create", 153 | "item": { 154 | "title": "How To Wiki", 155 | "story": [ 156 | { 157 | "type": "paragraph", 158 | "id": "07ef6b0e53a77dd2", 159 | "text": "Read a little bit. Then move on to our [http://sandbox.fed.wiki.org Sandbox] and give your new knowledge a workout. Still confused? Look through our our [[Frequently Asked Questions]]." 160 | }, 161 | { 162 | "type": "html", 163 | "id": "d88b9a104822256b", 164 | "text": "

How To Add Content" 165 | }, 166 | { 167 | "type": "paragraph", 168 | "id": "1012dd3528d4c788", 169 | "text": "How to [[Add Text]] to an existing paragraph." 170 | }, 171 | { 172 | "type": "paragraph", 173 | "id": "69fd0eba8fefe2aa", 174 | "text": "How to [[Add Links]] to a paragraph." 175 | }, 176 | { 177 | "type": "paragraph", 178 | "id": "1db26c2e0c7ed126", 179 | "text": "How to [[Add Formatting]] to a paragraph." 180 | }, 181 | { 182 | "type": "paragraph", 183 | "id": "208e32f178e91c99", 184 | "text": "How to [[Add Weird Characters]] to a paragraph." 185 | }, 186 | { 187 | "type": "paragraph", 188 | "id": "fa3e22f363cd93d5", 189 | "text": "How to [[Add Paragraphs]] or paragraph-like items to a story." 190 | }, 191 | { 192 | "type": "paragraph", 193 | "id": "d03bcc1fd0278522", 194 | "text": "How to [[Move Paragraphs]] within and between wiki pages." 195 | }, 196 | { 197 | "type": "paragraph", 198 | "id": "4cb87e0352907dfe", 199 | "text": "How to [[Add Pages]] by adding links to existing pages." 200 | }, 201 | { 202 | "type": "paragraph", 203 | "id": "8bb136a087a9710f", 204 | "text": "How to [[Import Documents]] from various sources." 205 | }, 206 | { 207 | "type": "html", 208 | "id": "243033dc6528eab1", 209 | "text": "

How to Remove Content" 210 | }, 211 | { 212 | "type": "paragraph", 213 | "id": "73e9a16ca95be40e", 214 | "text": "How to [[Remove Paragraphs]] and some other editable items." 215 | }, 216 | { 217 | "type": "paragraph", 218 | "id": "c63dc8ff04ecdc31", 219 | "text": "How to [[Remove Pages]] or direct readers to other sites." 220 | }, 221 | { 222 | "type": "html", 223 | "id": "d3a1bd6b0c947378", 224 | "text": "

How To Improve Wiki" 225 | }, 226 | { 227 | "type": "paragraph", 228 | "id": "a2bb10949e81f2c6", 229 | "text": "How to [[Review Changes]] to any page or site." 230 | }, 231 | { 232 | "type": "paragraph", 233 | "id": "c8fda36855c0da59", 234 | "text": "How to [[Host a Site]] to share your work." 235 | }, 236 | { 237 | "type": "paragraph", 238 | "id": "f2a7762b712791e5", 239 | "text": "How to [[Add Plugins]] to interpret new kinds of data." 240 | } 241 | ] 242 | }, 243 | "date": 1344124800000 244 | }, 245 | { 246 | "type": "edit", 247 | "id": "d88b9a104822256b", 248 | "item": { 249 | "type": "paragraph", 250 | "id": "d88b9a104822256b", 251 | "text": "

How To Read Content" 252 | }, 253 | "date": 1344612795460 254 | }, 255 | { 256 | "item": { 257 | "type": "paragraph", 258 | "id": "9906193876ea1213", 259 | "text": "

How To Add Content" 260 | }, 261 | "id": "9906193876ea1213", 262 | "type": "add", 263 | "after": "d88b9a104822256b", 264 | "date": 1344612795965 265 | }, 266 | { 267 | "type": "edit", 268 | "id": "9906193876ea1213", 269 | "item": { 270 | "type": "paragraph", 271 | "id": "9906193876ea1213", 272 | "text": "How to [[Find The Beginning]] of any site." 273 | }, 274 | "date": 1344612850492 275 | }, 276 | { 277 | "item": { 278 | "type": "paragraph", 279 | "id": "ddb43bb5cfbd9189", 280 | "text": "

How To Add Content" 281 | }, 282 | "id": "ddb43bb5cfbd9189", 283 | "type": "add", 284 | "after": "9906193876ea1213", 285 | "date": 1344612850998 286 | }, 287 | { 288 | "type": "edit", 289 | "id": "ddb43bb5cfbd9189", 290 | "item": { 291 | "type": "paragraph", 292 | "id": "ddb43bb5cfbd9189", 293 | "text": "How to [[Follow Links]] within and between sites." 294 | }, 295 | "date": 1344612938627 296 | }, 297 | { 298 | "item": { 299 | "type": "paragraph", 300 | "id": "55a0377f63c23d93", 301 | "text": "

How To Add Content" 302 | }, 303 | "id": "55a0377f63c23d93", 304 | "type": "add", 305 | "after": "ddb43bb5cfbd9189", 306 | "date": 1344612939134 307 | }, 308 | { 309 | "type": "edit", 310 | "id": "55a0377f63c23d93", 311 | "item": { 312 | "type": "paragraph", 313 | "id": "55a0377f63c23d93", 314 | "text": "How to [[Read History]] of pages." 315 | }, 316 | "date": 1344613093693 317 | }, 318 | { 319 | "item": { 320 | "type": "paragraph", 321 | "id": "be22840c9f21b356", 322 | "text": "

How To Add Content" 323 | }, 324 | "id": "be22840c9f21b356", 325 | "type": "add", 326 | "after": "55a0377f63c23d93", 327 | "date": 1344613094199 328 | }, 329 | { 330 | "type": "edit", 331 | "id": "55a0377f63c23d93", 332 | "item": { 333 | "type": "paragraph", 334 | "id": "55a0377f63c23d93", 335 | "text": "How to [[Read History]] of any page." 336 | }, 337 | "date": 1344613112202 338 | }, 339 | { 340 | "type": "move", 341 | "order": [ 342 | "07ef6b0e53a77dd2", 343 | "d88b9a104822256b", 344 | "9906193876ea1213", 345 | "ddb43bb5cfbd9189", 346 | "55a0377f63c23d93", 347 | "69fd0eba8fefe2aa", 348 | "be22840c9f21b356", 349 | "1012dd3528d4c788", 350 | "1db26c2e0c7ed126", 351 | "208e32f178e91c99", 352 | "fa3e22f363cd93d5", 353 | "d03bcc1fd0278522", 354 | "4cb87e0352907dfe", 355 | "8bb136a087a9710f", 356 | "243033dc6528eab1", 357 | "73e9a16ca95be40e", 358 | "c63dc8ff04ecdc31", 359 | "d3a1bd6b0c947378", 360 | "a2bb10949e81f2c6", 361 | "c8fda36855c0da59", 362 | "f2a7762b712791e5" 363 | ], 364 | "id": "69fd0eba8fefe2aa", 365 | "date": 1344994979920 366 | }, 367 | { 368 | "type": "move", 369 | "order": [ 370 | "07ef6b0e53a77dd2", 371 | "d88b9a104822256b", 372 | "9906193876ea1213", 373 | "ddb43bb5cfbd9189", 374 | "55a0377f63c23d93", 375 | "be22840c9f21b356", 376 | "1012dd3528d4c788", 377 | "69fd0eba8fefe2aa", 378 | "1db26c2e0c7ed126", 379 | "208e32f178e91c99", 380 | "fa3e22f363cd93d5", 381 | "d03bcc1fd0278522", 382 | "4cb87e0352907dfe", 383 | "8bb136a087a9710f", 384 | "243033dc6528eab1", 385 | "73e9a16ca95be40e", 386 | "c63dc8ff04ecdc31", 387 | "d3a1bd6b0c947378", 388 | "a2bb10949e81f2c6", 389 | "c8fda36855c0da59", 390 | "f2a7762b712791e5" 391 | ], 392 | "id": "69fd0eba8fefe2aa", 393 | "date": 1344995001433 394 | }, 395 | { 396 | "item": { 397 | "type": "paragraph", 398 | "id": "f1df1f71c9ee8a17", 399 | "text": "" 400 | }, 401 | "id": "f1df1f71c9ee8a17", 402 | "type": "add", 403 | "after": "ddb43bb5cfbd9189", 404 | "date": 1347261684093 405 | }, 406 | { 407 | "type": "edit", 408 | "id": "f1df1f71c9ee8a17", 409 | "item": { 410 | "type": "paragraph", 411 | "id": "f1df1f71c9ee8a17", 412 | "text": "How to [[Search Sites]] nearby." 413 | }, 414 | "date": 1347261743098 415 | }, 416 | { 417 | "type": "edit", 418 | "id": "f1df1f71c9ee8a17", 419 | "item": { 420 | "type": "paragraph", 421 | "id": "f1df1f71c9ee8a17", 422 | "text": "How to [[Search Sites]] nearby the ones you've seen." 423 | }, 424 | "date": 1347261760209 425 | }, 426 | { 427 | "type": "edit", 428 | "id": "f1df1f71c9ee8a17", 429 | "item": { 430 | "type": "paragraph", 431 | "id": "f1df1f71c9ee8a17", 432 | "text": "How to [[Search Sites]] nearby the ones you've visited." 433 | }, 434 | "date": 1347266523144 435 | }, 436 | { 437 | "type": "edit", 438 | "id": "07ef6b0e53a77dd2", 439 | "item": { 440 | "type": "paragraph", 441 | "id": "07ef6b0e53a77dd2", 442 | "text": "This is our help documentation. Read a little bit. Then move on to our [http://sandbox.fed.wiki.org Sandbox] and give your new knowledge a workout. Still confused? Look through our our [[Frequently Asked Questions]]." 443 | }, 444 | "date": 1350863772732 445 | }, 446 | { 447 | "item": { 448 | "type": "paragraph", 449 | "id": "d594101067de9a5c", 450 | "text": "" 451 | }, 452 | "id": "d594101067de9a5c", 453 | "type": "add", 454 | "after": "55a0377f63c23d93", 455 | "date": 1354085168265 456 | }, 457 | { 458 | "type": "edit", 459 | "id": "d594101067de9a5c", 460 | "item": { 461 | "type": "paragraph", 462 | "id": "d594101067de9a5c", 463 | "text": "How to [[Track Changes]] you and others have made." 464 | }, 465 | "date": 1354085188989 466 | }, 467 | { 468 | "type": "fork", 469 | "site": "fed.wiki.org", 470 | "date": 1358732351905 471 | }, 472 | { 473 | "type": "edit", 474 | "id": "4cb87e0352907dfe", 475 | "item": { 476 | "type": "paragraph", 477 | "id": "4cb87e0352907dfe", 478 | "text": "How to [[Add Pages]] by adding links to nonexistant pages." 479 | }, 480 | "site": "fed.wiki.org", 481 | "date": 1358732351906 482 | }, 483 | { 484 | "item": { 485 | "type": "paragraph", 486 | "id": "6f729deb0e1c75af", 487 | "text": "" 488 | }, 489 | "id": "6f729deb0e1c75af", 490 | "type": "add", 491 | "after": "4cb87e0352907dfe", 492 | "date": 1358732398504 493 | }, 494 | { 495 | "type": "edit", 496 | "id": "6f729deb0e1c75af", 497 | "item": { 498 | "type": "paragraph", 499 | "id": "6f729deb0e1c75af", 500 | "text": "How to [[Fork Pages]] to copy them from remote sites." 501 | }, 502 | "date": 1358732431158 503 | }, 504 | { 505 | "type": "edit", 506 | "id": "6f729deb0e1c75af", 507 | "item": { 508 | "type": "paragraph", 509 | "id": "6f729deb0e1c75af", 510 | "text": "How to [[Copy Pages]] by forking them from remote sites." 511 | }, 512 | "date": 1358732453148 513 | }, 514 | { 515 | "type": "edit", 516 | "id": "d594101067de9a5c", 517 | "item": { 518 | "type": "paragraph", 519 | "id": "d594101067de9a5c", 520 | "text": "How to [[Track Changes]] track changes to many sites at once. " 521 | }, 522 | "date": 1359351001148 523 | }, 524 | { 525 | "type": "edit", 526 | "id": "d594101067de9a5c", 527 | "item": { 528 | "type": "paragraph", 529 | "id": "d594101067de9a5c", 530 | "text": "How to [[Track Changes]] to many sites at once. " 531 | }, 532 | "date": 1359351018106 533 | }, 534 | { 535 | "type": "move", 536 | "order": [ 537 | "07ef6b0e53a77dd2", 538 | "d88b9a104822256b", 539 | "9906193876ea1213", 540 | "ddb43bb5cfbd9189", 541 | "f1df1f71c9ee8a17", 542 | "55a0377f63c23d93", 543 | "be22840c9f21b356", 544 | "1012dd3528d4c788", 545 | "69fd0eba8fefe2aa", 546 | "1db26c2e0c7ed126", 547 | "208e32f178e91c99", 548 | "fa3e22f363cd93d5", 549 | "d03bcc1fd0278522", 550 | "4cb87e0352907dfe", 551 | "6f729deb0e1c75af", 552 | "8bb136a087a9710f", 553 | "243033dc6528eab1", 554 | "73e9a16ca95be40e", 555 | "c63dc8ff04ecdc31", 556 | "d3a1bd6b0c947378", 557 | "a2bb10949e81f2c6", 558 | "d594101067de9a5c", 559 | "c8fda36855c0da59", 560 | "f2a7762b712791e5" 561 | ], 562 | "id": "d594101067de9a5c", 563 | "date": 1359351026703 564 | }, 565 | { 566 | "item": { 567 | "type": "paragraph", 568 | "id": "a4eeb3d307944e60", 569 | "text": "" 570 | }, 571 | "id": "a4eeb3d307944e60", 572 | "type": "add", 573 | "after": "f2a7762b712791e5", 574 | "date": 1385946646514 575 | }, 576 | { 577 | "type": "edit", 578 | "id": "a4eeb3d307944e60", 579 | "item": { 580 | "type": "paragraph", 581 | "id": "a4eeb3d307944e60", 582 | "text": "How to [[Track Updates]] deployed on this site." 583 | }, 584 | "date": 1385946764769 585 | }, 586 | { 587 | "type": "edit", 588 | "id": "a4eeb3d307944e60", 589 | "item": { 590 | "type": "paragraph", 591 | "id": "a4eeb3d307944e60", 592 | "text": "How to [[Track Updates]] to software on this site." 593 | }, 594 | "date": 1385948784056 595 | }, 596 | { 597 | "item": { 598 | "type": "paragraph", 599 | "id": "f88127fbb75f20c6", 600 | "text": "" 601 | }, 602 | "id": "f88127fbb75f20c6", 603 | "type": "add", 604 | "after": "c8fda36855c0da59", 605 | "date": 1386645286746 606 | }, 607 | { 608 | "type": "edit", 609 | "id": "f88127fbb75f20c6", 610 | "item": { 611 | "type": "paragraph", 612 | "id": "f88127fbb75f20c6", 613 | "text": "How to [[Find Sites]] that share with you." 614 | }, 615 | "date": 1386645321701 616 | }, 617 | { 618 | "item": { 619 | "type": "paragraph", 620 | "id": "9b96aa8af66cc688", 621 | "text": "" 622 | }, 623 | "id": "9b96aa8af66cc688", 624 | "type": "add", 625 | "after": "8bb136a087a9710f", 626 | "date": 1388893424438 627 | }, 628 | { 629 | "type": "edit", 630 | "id": "9b96aa8af66cc688", 631 | "item": { 632 | "type": "paragraph", 633 | "id": "9b96aa8af66cc688", 634 | "text": "How to Add Videos]] from YouTube or Vimeo." 635 | }, 636 | "date": 1388893446392 637 | }, 638 | { 639 | "type": "edit", 640 | "id": "9b96aa8af66cc688", 641 | "item": { 642 | "type": "paragraph", 643 | "id": "9b96aa8af66cc688", 644 | "text": "How to [[Add Videos]] from YouTube or Vimeo." 645 | }, 646 | "date": 1388893453024 647 | }, 648 | { 649 | "item": { 650 | "type": "paragraph", 651 | "id": "4ecd1aedbb454789", 652 | "text": "" 653 | }, 654 | "id": "4ecd1aedbb454789", 655 | "type": "add", 656 | "after": "a4eeb3d307944e60", 657 | "date": 1402629683922 658 | }, 659 | { 660 | "type": "edit", 661 | "id": "4ecd1aedbb454789", 662 | "item": { 663 | "type": "paragraph", 664 | "id": "4ecd1aedbb454789", 665 | "text": "How to [[Contribute Code]] to the federated wiki application." 666 | }, 667 | "date": 1402629730967 668 | }, 669 | { 670 | "type": "move", 671 | "order": [ 672 | "07ef6b0e53a77dd2", 673 | "d88b9a104822256b", 674 | "9906193876ea1213", 675 | "ddb43bb5cfbd9189", 676 | "f1df1f71c9ee8a17", 677 | "f88127fbb75f20c6", 678 | "55a0377f63c23d93", 679 | "be22840c9f21b356", 680 | "1012dd3528d4c788", 681 | "69fd0eba8fefe2aa", 682 | "1db26c2e0c7ed126", 683 | "208e32f178e91c99", 684 | "fa3e22f363cd93d5", 685 | "d03bcc1fd0278522", 686 | "4cb87e0352907dfe", 687 | "6f729deb0e1c75af", 688 | "8bb136a087a9710f", 689 | "9b96aa8af66cc688", 690 | "243033dc6528eab1", 691 | "73e9a16ca95be40e", 692 | "c63dc8ff04ecdc31", 693 | "d3a1bd6b0c947378", 694 | "a2bb10949e81f2c6", 695 | "d594101067de9a5c", 696 | "c8fda36855c0da59", 697 | "f2a7762b712791e5", 698 | "a4eeb3d307944e60", 699 | "4ecd1aedbb454789" 700 | ], 701 | "id": "f88127fbb75f20c6", 702 | "date": 1417029105072 703 | }, 704 | { 705 | "type": "edit", 706 | "id": "d88b9a104822256b", 707 | "item": { 708 | "type": "html", 709 | "id": "d88b9a104822256b", 710 | "text": "

How To Read Content" 711 | }, 712 | "date": 1418273097889 713 | }, 714 | { 715 | "type": "edit", 716 | "id": "be22840c9f21b356", 717 | "item": { 718 | "type": "html", 719 | "id": "be22840c9f21b356", 720 | "text": "

How To Add Content" 721 | }, 722 | "date": 1418273104594 723 | }, 724 | { 725 | "type": "edit", 726 | "id": "243033dc6528eab1", 727 | "item": { 728 | "type": "html", 729 | "id": "243033dc6528eab1", 730 | "text": "

How to Remove Content" 731 | }, 732 | "date": 1418273109275 733 | }, 734 | { 735 | "type": "edit", 736 | "id": "d3a1bd6b0c947378", 737 | "item": { 738 | "type": "html", 739 | "id": "d3a1bd6b0c947378", 740 | "text": "

How To Improve Wiki" 741 | }, 742 | "date": 1418273113292 743 | }, 744 | { 745 | "item": { 746 | "type": "factory", 747 | "id": "6abaa5c6559057c0" 748 | }, 749 | "id": "6abaa5c6559057c0", 750 | "type": "add", 751 | "after": "4ecd1aedbb454789", 752 | "date": 1418519427218 753 | }, 754 | { 755 | "type": "edit", 756 | "id": "6abaa5c6559057c0", 757 | "item": { 758 | "type": "pagefold", 759 | "id": "6abaa5c6559057c0", 760 | "text": "html" 761 | }, 762 | "date": 1418519432043 763 | }, 764 | { 765 | "type": "move", 766 | "order": [ 767 | "07ef6b0e53a77dd2", 768 | "d88b9a104822256b", 769 | "6abaa5c6559057c0", 770 | "9906193876ea1213", 771 | "ddb43bb5cfbd9189", 772 | "f1df1f71c9ee8a17", 773 | "f88127fbb75f20c6", 774 | "55a0377f63c23d93", 775 | "be22840c9f21b356", 776 | "1012dd3528d4c788", 777 | "69fd0eba8fefe2aa", 778 | "1db26c2e0c7ed126", 779 | "208e32f178e91c99", 780 | "fa3e22f363cd93d5", 781 | "d03bcc1fd0278522", 782 | "4cb87e0352907dfe", 783 | "6f729deb0e1c75af", 784 | "8bb136a087a9710f", 785 | "9b96aa8af66cc688", 786 | "243033dc6528eab1", 787 | "73e9a16ca95be40e", 788 | "c63dc8ff04ecdc31", 789 | "d3a1bd6b0c947378", 790 | "a2bb10949e81f2c6", 791 | "d594101067de9a5c", 792 | "c8fda36855c0da59", 793 | "f2a7762b712791e5", 794 | "a4eeb3d307944e60", 795 | "4ecd1aedbb454789" 796 | ], 797 | "id": "6abaa5c6559057c0", 798 | "date": 1418519437778 799 | }, 800 | { 801 | "type": "move", 802 | "order": [ 803 | "07ef6b0e53a77dd2", 804 | "d88b9a104822256b", 805 | "9906193876ea1213", 806 | "ddb43bb5cfbd9189", 807 | "6abaa5c6559057c0", 808 | "f1df1f71c9ee8a17", 809 | "f88127fbb75f20c6", 810 | "55a0377f63c23d93", 811 | "be22840c9f21b356", 812 | "1012dd3528d4c788", 813 | "69fd0eba8fefe2aa", 814 | "1db26c2e0c7ed126", 815 | "208e32f178e91c99", 816 | "fa3e22f363cd93d5", 817 | "d03bcc1fd0278522", 818 | "4cb87e0352907dfe", 819 | "6f729deb0e1c75af", 820 | "8bb136a087a9710f", 821 | "9b96aa8af66cc688", 822 | "243033dc6528eab1", 823 | "73e9a16ca95be40e", 824 | "c63dc8ff04ecdc31", 825 | "d3a1bd6b0c947378", 826 | "a2bb10949e81f2c6", 827 | "d594101067de9a5c", 828 | "c8fda36855c0da59", 829 | "f2a7762b712791e5", 830 | "a4eeb3d307944e60", 831 | "4ecd1aedbb454789" 832 | ], 833 | "id": "6abaa5c6559057c0", 834 | "date": 1418519515869 835 | }, 836 | { 837 | "type": "edit", 838 | "id": "07ef6b0e53a77dd2", 839 | "item": { 840 | "type": "paragraph", 841 | "id": "07ef6b0e53a77dd2", 842 | "text": "Here we describe how to use the web interface to the federated wiki. Read [[About Federated Wiki]] to learn about the project. If you are just starting to write in your first wiki then read the [[Field Guide to Wiki]]." 843 | }, 844 | "date": 1488039158808 845 | }, 846 | { 847 | "type": "edit", 848 | "id": "07ef6b0e53a77dd2", 849 | "item": { 850 | "type": "paragraph", 851 | "id": "07ef6b0e53a77dd2", 852 | "text": "Here we describe how to use the web interface to the federated wiki. Read [[About Federated Wiki]] to learn about the project. If you are just starting to write in your first wiki then read the [[Field Guide to the Federation]]." 853 | }, 854 | "date": 1488039205290 855 | }, 856 | { 857 | "type": "add", 858 | "id": "6e236e59a759df32", 859 | "item": { 860 | "type": "paragraph", 861 | "id": "6e236e59a759df32", 862 | "text": "Tip: Use arrow keys to scroll left and right between pages." 863 | }, 864 | "after": "07ef6b0e53a77dd2", 865 | "date": 1488039288800 866 | }, 867 | { 868 | "type": "edit", 869 | "id": "6e236e59a759df32", 870 | "item": { 871 | "type": "paragraph", 872 | "id": "6e236e59a759df32", 873 | "text": "Tip: Use arrow keys to scroll left and right." 874 | }, 875 | "date": 1488039303256 876 | }, 877 | { 878 | "type": "remove", 879 | "id": "6abaa5c6559057c0", 880 | "date": 1488039420420 881 | }, 882 | { 883 | "type": "edit", 884 | "id": "73e9a16ca95be40e", 885 | "item": { 886 | "type": "paragraph", 887 | "id": "73e9a16ca95be40e", 888 | "text": "How to [[Remove Paragraphs]] and other editable items." 889 | }, 890 | "date": 1488041103204 891 | }, 892 | { 893 | "type": "fork", 894 | "site": "fed.wiki.org", 895 | "date": 1488473254245 896 | } 897 | ] 898 | } -------------------------------------------------------------------------------- /lib/server.js: -------------------------------------------------------------------------------- 1 | /* 2 | * Federated Wiki : Node Server 3 | * 4 | * Copyright Ward Cunningham and other contributors 5 | * Licensed under the MIT license. 6 | * https://github.com/fedwiki/wiki-server/blob/master/LICENSE.txt 7 | */ 8 | 9 | // **server.coffee** is the main guts of the express version 10 | // of (Smallest Federated Wiki)[https://github.com/WardCunningham/Smallest-Federated-Wiki]. 11 | // The CLI and Farm are just front ends 12 | // for setting arguments, and spawning servers. In a complex system 13 | // you would probably want to replace the CLI/Farm with your own code, 14 | // and use server.coffee directly. 15 | // 16 | // #### Dependencies #### 17 | // anything not in the standard library is included in the repo, or 18 | // can be installed with an: 19 | // npm install 20 | 21 | // Standard lib 22 | const fs = require('fs') 23 | const path = require('path') 24 | const http = require('http') 25 | const url = require('url') 26 | const { pipeline } = require('node:stream/promises') 27 | 28 | // From npm 29 | const express = require('express') 30 | const hbs = require('express-hbs') 31 | const f = require('flates') 32 | 33 | const createDOMPurify = require('dompurify') 34 | const { JSDOM } = require('jsdom') 35 | 36 | const window = new JSDOM('').window 37 | const DOMPurify = createDOMPurify(window) 38 | 39 | // Using native fetch API (available in Node.js 18+) 40 | 41 | // Express 4 middleware 42 | const logger = require('morgan') 43 | const cookieParser = require('cookie-parser') 44 | const methodOverride = require('method-override') 45 | // session = require 'express-session' 46 | const sessions = require('client-sessions') 47 | const bodyParser = require('body-parser') 48 | const errorHandler = require('errorhandler') 49 | 50 | // Local files 51 | const random = require('./random_id') 52 | const defargs = require('./defaultargs') 53 | const resolveClient = require('wiki-client/lib/resolve') 54 | const pluginsFactory = require('./plugins') 55 | const sitemapFactory = require('./sitemap') 56 | const searchFactory = require('./search') 57 | 58 | const render = page => { 59 | return ( 60 | f.div({ class: 'twins' }, f.p('')) + 61 | '\n' + 62 | f.div( 63 | { class: 'header' }, 64 | f.h1( 65 | f.a({ href: '/', style: 'text-decoration: none' }, f.img({ height: '32px', src: '/favicon.png' })) + 66 | ' ' + 67 | page.title, 68 | ), 69 | ) + 70 | '\n' + 71 | f.div( 72 | { class: 'story' }, 73 | page.story 74 | .map(story => { 75 | if (!story) return '' 76 | if (story.type === 'paragraph') { 77 | f.div({ class: 'item paragraph' }, f.p(resolveClient.resolveLinks(story.text))) 78 | } else if (story.type === 'image') { 79 | f.div( 80 | { class: 'item image' }, 81 | f.img({ class: 'thumbnail', src: story.url }), 82 | f.p(resolveClient.resolveLinks(story.text || story.caption || 'uploaded image')), 83 | ) 84 | } else if (story.type === 'html') { 85 | f.div({ class: 'item html' }, f.p(resolveClient.resolveLinks(story.text || '', DOMPurify.sanitize))) 86 | } else f.div({ class: 'item' }, f.p(resolveClient.resolveLinks(story.text || ''))) 87 | }) 88 | .join('\n'), 89 | ) 90 | ) 91 | } 92 | // Set export objects for node and coffee to a function that generates a sfw server. 93 | module.exports = exports = argv => { 94 | // Create the main application object, app. 95 | const app = express() 96 | 97 | // remove x-powered-by header 98 | app.disable('x-powered-by') 99 | 100 | // defaultargs.coffee exports a function that takes the argv object 101 | // that is passed in and then does its 102 | // best to supply sane defaults for any arguments that are missing. 103 | argv = defargs(argv) 104 | 105 | app.startOpts = argv 106 | 107 | const log = (...stuff) => { 108 | if (argv.debug) console.log(stuff) 109 | } 110 | const loga = (...stuff) => { 111 | console.log(stuff) 112 | } 113 | 114 | const ourErrorHandler = (req, res, next) => { 115 | let fired = false 116 | res.e = (error, status) => { 117 | if (!fired) { 118 | fired = true 119 | res.statusCode = status || 500 120 | res.end('Server ' + error) 121 | log('Res sent:', res.statusCode, error) 122 | } else { 123 | log('Already fired', error) 124 | } 125 | } 126 | next() 127 | } 128 | let pagehandler, sitemaphandler, searchhandler, securityhandler 129 | // Require the database adapter and initialize it with options. 130 | app.pagehandler = pagehandler = require(argv.database.type)(argv) 131 | 132 | // Require the sitemap adapter and initialize it with options. 133 | app.sitemaphandler = sitemaphandler = sitemapFactory(argv) 134 | 135 | // Require the site indexer and initialize it with options 136 | app.searchhandler = searchhandler = searchFactory(argv) 137 | 138 | // Require the security adapter and initialize it with options. 139 | app.securityhandler = securityhandler = require(argv.security_type)(log, loga, argv) 140 | 141 | // If the site is owned, owner will contain the name of the owner 142 | let owner = '' 143 | 144 | // If the user is logged in, user will contain their identity 145 | let user = '' 146 | 147 | // Called from authentication when the site is claimed, 148 | // to update the name of the owner held here. 149 | const updateOwner = id => { 150 | owner = id 151 | } 152 | 153 | // #### Middleware #### 154 | // 155 | // Allow json to be got cross origin. 156 | const cors = (req, res, next) => { 157 | res.header('Access-Control-Allow-Origin', req.get('origin') || '*') 158 | next() 159 | } 160 | 161 | const remoteGet = (remote, slug, cb) => { 162 | // assume http, as we know no better at this point and we need to specify a protocol. 163 | const remoteURL = new URL(`http://${remote}/${slug}.json`).toString() 164 | // set a two second timeout 165 | fetch(remoteURL, { signal: AbortSignal.timeout(2000) }) 166 | .then(res => { 167 | if (res.ok) { 168 | return res 169 | } 170 | throw new Error(res.statusText) 171 | }) 172 | .then(res => { 173 | return res.json() 174 | }) 175 | .then(json => { 176 | cb(null, json, 200) 177 | }) 178 | .catch(err => { 179 | console.error('Unable to fetch remote resource', remote, slug, err) 180 | cb(err, 'Page not found', 404) 181 | }) 182 | } 183 | 184 | // #### Express configuration #### 185 | // Set up all the standard express server options, 186 | // including hbs to use handlebars/mustache templates 187 | // saved with a .html extension, and no layout. 188 | 189 | // 190 | const staticPathOptions = { 191 | dotfiles: 'ignore', 192 | etag: true, 193 | immutable: false, 194 | lastModified: false, 195 | maxAge: '1h', 196 | } 197 | 198 | app.set('views', path.join(require.resolve('wiki-client/package.json'), '..', 'views')) 199 | app.set('view engine', 'html') 200 | app.engine('html', hbs.express4()) 201 | app.set('view options', { layout: false }) 202 | 203 | // return deterministically colored strings 204 | const colorString = str => { 205 | const colorReset = '\x1b[0m' 206 | let hash = 0 207 | str.split('').forEach(char => { 208 | hash = char.charCodeAt(0) + ((hash << 5) - hash) 209 | }) 210 | let color = '\x1b[38;2' 211 | ;[...Array(3).keys()].forEach(i => { 212 | const value = (hash >> (i * 8)) & 0xff 213 | color += ':' + value.toString() 214 | }) 215 | color += 'm' 216 | return color + str + colorReset 217 | } 218 | 219 | // use logger, at least in development, probably needs a param to configure (or turn off). 220 | // use stream to direct to somewhere other than stdout. 221 | logger.token('vhost', (req, res) => { 222 | return colorString(req.hostname) 223 | }) 224 | app.use(logger(':vhost :method :url :status :res[content-length] - :response-time ms')) 225 | app.use(cookieParser()) 226 | app.use(bodyParser.json({ limit: argv.uploadLimit })) 227 | app.use(bodyParser.urlencoded({ extended: true, limit: argv.uploadLimit })) 228 | app.use(methodOverride()) 229 | const cookieValue = { 230 | httpOnly: true, 231 | sameSite: 'lax', 232 | } 233 | if (argv.wiki_domain) { 234 | if (!argv.wiki_domain.endsWith('localhost')) { 235 | cookieValue['domain'] = argv.wiki_domain 236 | } 237 | } 238 | // use secureProxy as TLS is terminated in outside the node process 239 | let cookieName 240 | if (argv.secure_cookie) { 241 | cookieName = 'wikiTlsSession' 242 | cookieValue['secureProxy'] = true 243 | } else { 244 | cookieName = 'wikiSession' 245 | } 246 | app.use( 247 | sessions({ 248 | cookieName: cookieName, 249 | requestKey: 'session', 250 | secret: argv.cookieSecret, 251 | // make the session session_duration days long 252 | duration: argv.session_duration * 24 * 60 * 60 * 1000, 253 | // add 12 hours to session if less than 12 hours to expiry 254 | activeDuration: 24 * 60 * 60 * 1000, 255 | cookie: cookieValue, 256 | }), 257 | ) 258 | 259 | app.use(ourErrorHandler) 260 | 261 | // Add static route to the client 262 | app.use(express.static(argv.client, staticPathOptions)) 263 | 264 | // ##### Define security routes ##### 265 | securityhandler.defineRoutes(app, cors, updateOwner) 266 | 267 | // Add static route to assets 268 | app.use('/assets', cors, express.static(argv.assets)) 269 | 270 | // Add static routes to the plugins client. 271 | Object.keys(require.main.require('./package').dependencies) 272 | .filter(depend => depend.startsWith('wiki-plugin')) 273 | .forEach(plugin => { 274 | const clientPath = path.join( 275 | path.dirname(require.resolve(`${plugin}/package`, { paths: require.main.paths })), 276 | 'client', 277 | ) 278 | const pluginPath = '/plugins/' + plugin.slice(12) 279 | app.use(pluginPath, cors, express.static(clientPath, staticPathOptions)) 280 | }) 281 | 282 | // Add static routes to the security client. 283 | if (argv.security != './security') { 284 | app.use('/security', express.static(path.join(argv.packageDir, argv.security_type, 'client'), staticPathOptions)) 285 | } 286 | 287 | // ##### Set up standard environments. ##### 288 | // In dev mode turn on console.log debugging as well as showing the stack on err. 289 | if ('development' == app.get('env')) { 290 | app.use(errorHandler()) 291 | argv.debug = true 292 | } 293 | 294 | // Show all of the options a server is using. 295 | log(argv) 296 | 297 | // #### Routes #### 298 | // Routes currently make up the bulk of the Express port of 299 | // Smallest Federated Wiki. Most routes use literal names, 300 | // or regexes to match, and then access req.params directly. 301 | 302 | // ##### Redirects ##### 303 | // Common redirects that may get used throughout the routes. 304 | const index = argv.home + '.html' 305 | const oops = '/oops' 306 | 307 | // ##### Get routes ##### 308 | // Routes have mostly been kept together by http verb, with the exception 309 | // of the openID related routes which are at the end together. 310 | 311 | // Main route for initial contact. Allows us to 312 | // link into a specific set of pages, local and remote. 313 | // Can also be handled by the client, but it also sets up 314 | // the login status, and related footer html, which the client 315 | // relies on to know if it is logged in or not. 316 | app.get(/^((\/[a-zA-Z0-9:.-]+\/[a-z0-9-]+(_rev\d+)?)+)\/?$/, cors, (req, res, next) => { 317 | const urlPages = req.params[0] 318 | .split('/') 319 | .filter((_, index) => index % 2 === 0) 320 | .slice(1) 321 | const urlLocs = req.params[0] 322 | .split('/') 323 | .slice(1) 324 | .filter((_, index) => index % 2 === 0) 325 | if (['plugin', 'auth'].indexOf(urlLocs[0]) > -1) { 326 | return next() 327 | } 328 | const title = urlPages.slice().pop().replace(/-+/g, ' ') 329 | user = securityhandler.getUser(req) 330 | const info = { 331 | title, 332 | pages: [], 333 | authenticated: user ? true : false, 334 | user: user, 335 | seedNeighbors: argv.neighbors, 336 | owned: owner ? true : false, 337 | isOwner: securityhandler.isAuthorized(req) ? true : false, 338 | ownedBy: owner ? owner : '', 339 | } 340 | for (const [idx, page] of urlPages.entries()) { 341 | let pageDiv 342 | if (urlLocs[idx] === 'view') { 343 | pageDiv = { page } 344 | } else { 345 | pageDiv = { page, origin: `data-site=${urlLocs[idx]}` } 346 | } 347 | info.pages.push(pageDiv) 348 | } 349 | res.render('static.html', info) 350 | }) 351 | 352 | app.get(/^\/([a-z0-9-]+)\.html$/, cors, (req, res, next) => { 353 | const slug = req.params[0] 354 | log(slug) 355 | if (slug === 'runtests') return next() 356 | pagehandler.get(slug, (e, page, status) => { 357 | if (e) { 358 | return res.e(e) 359 | } 360 | if (status === 404) { 361 | return res.status(status).send(page) 362 | } 363 | page.title ||= slug.replace(/-+/g, ' ') 364 | page.story ||= [] 365 | user = securityhandler.getUser(req) 366 | 367 | const info = { 368 | title: page.title, 369 | pages: [ 370 | { 371 | page: slug, 372 | generated: 'data-server-generated=true', 373 | story: render(page), 374 | }, 375 | ], 376 | authenticated: user ? true : false, 377 | user: user, 378 | seedNeighbors: argv.neighbors, 379 | owned: owner ? true : false, 380 | isOwner: securityhandler.isAuthorized(req) ? true : false, 381 | ownedBy: owner ? owner : '', 382 | } 383 | res.render('static.html', info) 384 | }) 385 | }) 386 | 387 | app.get('/system/factories.json', (req, res) => { 388 | res.status(200) 389 | res.header('Content-Type', 'application/json') 390 | const factories = [] 391 | Object.keys(require.main.require('./package').dependencies) 392 | .filter(depend => depend.startsWith('wiki-plugin')) 393 | .forEach(plugin => { 394 | try { 395 | factories.push(require.main.require(`${plugin}/factory`)) 396 | } catch { 397 | // do nothing if plugin doesn't have a factory category. 398 | } 399 | }) 400 | res.end(JSON.stringify(factories)) 401 | }) 402 | 403 | // ###### Json Routes ###### 404 | // Handle fetching local and remote json pages. 405 | // Local pages are handled by the pagehandler module. 406 | app.get(/^\/([a-z0-9-]+)\.json$/, cors, (req, res) => { 407 | const file = req.params[0] 408 | pagehandler.get(file, (e, page, status) => { 409 | if (e) { 410 | return res.e(e) 411 | } 412 | res.status(status || 200).send(page) 413 | }) 414 | }) 415 | 416 | // Remote pages use the http client to retrieve the page 417 | // and sends it to the client. TODO: consider caching remote pages locally. 418 | app.get(/^\/remote\/([a-zA-Z0-9:.-]+)\/([a-z0-9-]+)\.json$/, (req, res) => { 419 | remoteGet(req.params[0], req.params[1], (e, page, status) => { 420 | if (e) { 421 | log('remoteGet error:', e) 422 | return res.e(e) 423 | } 424 | res.status(status || 200).send(page) 425 | }) 426 | }) 427 | 428 | // ###### Theme Routes ###### 429 | // If themes doesn't exist send 404 and let the client 430 | // deal with it. 431 | app.get(/^\/theme\/(\w+\.\w+)$/, cors, (req, res) => { 432 | res.sendFile(path.join(argv.status, 'theme', req.params[0]), { dotfiles: 'allow' }, e => { 433 | if (e) { 434 | // swallow the error if the theme does not exist... 435 | if (req.path === '/theme/style.css') { 436 | res.set('Content-Type', 'text/css') 437 | res.send('') 438 | } else { 439 | res.sendStatus(404) 440 | } 441 | } 442 | }) 443 | }) 444 | 445 | // ###### Favicon Routes ###### 446 | // If favLoc doesn't exist send the default favicon. 447 | const favLoc = path.join(argv.status, 'favicon.png') 448 | const defaultFavLoc = path.join(argv.root, 'default-data', 'status', 'favicon.png') 449 | app.get('/favicon.png', cors, (req, res) => { 450 | fs.access(favLoc, fs.constants.F_OK, err => { 451 | if (!err) { 452 | res.sendFile(favLoc, { dotfiles: 'allow' }) 453 | } else { 454 | res.sendFile(defaultFavLoc) 455 | } 456 | }) 457 | }) 458 | 459 | const authorized = (req, res, next) => { 460 | if (securityhandler.isAuthorized(req)) { 461 | next() 462 | } else { 463 | console.log('rejecting', req.path) 464 | res.sendStatus(403) 465 | } 466 | } 467 | 468 | // Accept favicon image posted to the server, and if it does not already exist 469 | // save it. 470 | app.post('/favicon.png', authorized, (req, res) => { 471 | const favicon = req.body.image.replace(/^data:image\/png;base64,/, '') 472 | const buf = Buffer.from(favicon, 'base64') 473 | fs.access(argv.status, fs.constants.F_OK, err => { 474 | if (!err) { 475 | fs.writeFile(favLoc, buf, e => { 476 | if (e) { 477 | return res.e(e) 478 | } 479 | res.send('Favicon Saved') 480 | }) 481 | } else { 482 | fs.mkdir(argv.status, { recursive: true }, () => { 483 | fs.writeFile(favLoc, buf, e => { 484 | if (e) { 485 | return res.e(e) 486 | } 487 | res.send('Favicon Saved') 488 | }) 489 | }) 490 | } 491 | }) 492 | }) 493 | 494 | // Redirect remote favicons to the server they are needed from. 495 | app.get(/^\/remote\/([a-zA-Z0-9:.-]+\/favicon.png)$/, (req, res) => { 496 | const remotefav = `http://${req.params[0]}` 497 | res.redirect(remotefav) 498 | }) 499 | 500 | // ###### Recycler Routes ###### 501 | // These routes are only available to the site's owner 502 | 503 | // Give the recycler a standard flag - use the Taiwan symbol as the use of 504 | // negative space outward pointing arrows nicely indicates that items can be removed 505 | const recyclerFavLoc = path.join(argv.root, 'default-data', 'status', 'recycler.png') 506 | app.get('/recycler/favicon.png', authorized, (req, res) => { 507 | res.sendFile(recyclerFavLoc, { dotfiles: 'allow' }) 508 | }) 509 | 510 | // Send an array of pages currently in the recycler via json 511 | app.get('/recycler/system/slugs.json', authorized, (req, res) => { 512 | fs.readdir(argv.recycler, (e, files) => { 513 | if (e) { 514 | return res.e(e) 515 | } 516 | const doRecyclermap = async file => { 517 | return new Promise(resolve => { 518 | const recycleFile = 'recycler/' + file 519 | pagehandler.get(recycleFile, (e, page, status) => { 520 | if (e || status === 404) { 521 | console.log('Problem building recycler map:', file, 'e: ', e) 522 | // this will leave an undefined/empty item in the array, which we will filter out later 523 | return resolve(null) 524 | } 525 | resolve({ 526 | slug: file, 527 | title: page.title, 528 | }) 529 | }) 530 | }) 531 | } 532 | 533 | Promise.all(files.map(doRecyclermap)) 534 | .then(recyclermap => { 535 | recyclermap = recyclermap.filter(el => !!el) 536 | res.send(recyclermap) 537 | }) 538 | .catch(error => { 539 | res.e(error) 540 | }) 541 | }) 542 | }) 543 | 544 | // Fetching page from the recycler 545 | /////^/([a-z0-9-]+)\.json$/// 546 | app.get(/^\/recycler\/([a-z0-9-]+)\.json$/, authorized, (req, res) => { 547 | const file = 'recycler/' + req.params[0] 548 | pagehandler.get(file, (e, page, status) => { 549 | if (e) { 550 | return res.e(e) 551 | } 552 | res.status(status || 200).send(page) 553 | }) 554 | }) 555 | 556 | // Delete page from the recycler 557 | app.delete(/^\/recycler\/([a-z0-9-]+)\.json$/, authorized, (req, res) => { 558 | const file = 'recycler/' + req.params[0] 559 | pagehandler.delete(file, err => { 560 | if (err) { 561 | res.status(500).send(err) 562 | } 563 | res.status(200).send('') 564 | }) 565 | }) 566 | 567 | // ###### Meta Routes ###### 568 | // Send an array of pages in the database via json 569 | app.get('/system/slugs.json', cors, (req, res) => { 570 | pagehandler.slugs((err, files) => { 571 | if (err) { 572 | res.status(500).send(err) 573 | } 574 | res.send(files) 575 | }) 576 | }) 577 | 578 | // Returns a list of installed plugins. (does this get called anymore!) 579 | app.get('/system/plugins.json', cors, (req, res) => { 580 | try { 581 | const pluginNames = Object.keys(require.main.require('./package').dependencies) 582 | .filter(depend => depend.startsWith('wiki-plugin')) 583 | .map(name => name.slice(12)) 584 | res.send(pluginNames) 585 | } catch (e) { 586 | return res.e(e) 587 | } 588 | }) 589 | //{ 590 | const sitemapLoc = path.join(argv.status, 'sitemap.json') 591 | app.get('/system/sitemap.json', cors, (req, res) => { 592 | fs.access(sitemapLoc, fs.constants.F_OK, err => { 593 | if (!err) { 594 | res.sendFile(sitemapLoc, { dotfiles: 'allow' }) 595 | } else { 596 | // only createSitemap if we are not already creating one 597 | if (!sitemaphandler.isWorking()) { 598 | sitemaphandler.createSitemap(pagehandler) 599 | } 600 | // wait for the sitemap file to be written, before sending 601 | sitemaphandler.once('finished', () => { 602 | res.sendFile(sitemapLoc, { dotfiles: 'allow' }) 603 | }) 604 | } 605 | }) 606 | }) 607 | 608 | const xmlSitemapLoc = path.join(argv.status, 'sitemap.xml') 609 | app.get('/sitemap.xml', cors, (req, res) => { 610 | fs.access(sitemapLoc, fs.constants.F_OK, err => { 611 | if (!err) { 612 | res.sendFile(xmlSitemapLoc, { dotfiles: 'allow' }) 613 | } else { 614 | if (!sitemaphandler.isWorking()) { 615 | sitemaphandler.createSitemap(pagehandler) 616 | } 617 | sitemaphandler.once('finished', () => { 618 | res.sendFile(xmlSitemapLoc, { dotfiles: 'allow' }) 619 | }) 620 | } 621 | }) 622 | }) 623 | 624 | const searchIndexLoc = path.join(argv.status, 'site-index.json') 625 | app.get('/system/site-index.json', cors, (req, res) => { 626 | fs.access(searchIndexLoc, fs.constants.F_OK, err => { 627 | if (!err) { 628 | res.sendFile(searchIndexLoc, { dotfiles: 'allow' }) 629 | } else { 630 | // only create index if we are not already creating one 631 | if (!searchhandler.isWorking()) { 632 | searchhandler.createIndex(pagehandler) 633 | } 634 | searchhandler.once('indexed', () => { 635 | res.sendFile(searchIndexLoc, { dotfiles: 'allow' }) 636 | }) 637 | } 638 | }) 639 | }) 640 | 641 | app.get('/system/export.json', cors, (req, res) => { 642 | pagehandler.pages((e, sitemap) => { 643 | if (e) { 644 | return res.e(e) 645 | } 646 | const pagePromises = sitemap.map(stub => { 647 | return new Promise((resolve, reject) => { 648 | pagehandler.get(stub.slug, (error, page) => { 649 | if (error) { 650 | return reject(error) 651 | } 652 | resolve({ slug: stub.slug, page }) 653 | }) 654 | }) 655 | }) 656 | 657 | Promise.all(pagePromises) 658 | .then(pages => { 659 | const pageExport = pages.reduce((dict, combined) => { 660 | dict[combined.slug] = combined.page 661 | return dict 662 | }, {}) 663 | // TODO: this fails for a very large site 664 | res.json(pageExport) 665 | }) 666 | .catch(error => { 667 | res.e(error) 668 | }) 669 | }) 670 | }) 671 | 672 | const admin = (req, res, next) => { 673 | if (securityhandler.isAdmin(req)) { 674 | next() 675 | } else { 676 | console.log('rejecting', req.path) 677 | res.sendStatus(403) 678 | } 679 | } 680 | 681 | app.get('/system/version.json', admin, (req, res) => { 682 | const versions = {} 683 | const wikiModule = require.main 684 | versions[wikiModule.require('./package').name] = wikiModule.require('./package').version 685 | versions[wikiModule.require('wiki-server/package').name] = wikiModule.require('wiki-server/package').version 686 | versions[wikiModule.require('wiki-client/package').name] = wikiModule.require('wiki-client/package').version 687 | 688 | versions['security'] = {} 689 | Object.keys(require.main.require('./package').dependencies) 690 | .filter(depend => depend.startsWith('wiki-security')) 691 | .forEach(key => { 692 | versions.security[key] = wikiModule.require(`${key}/package`).version 693 | }) 694 | 695 | versions['plugins'] = {} 696 | Object.keys(require.main.require('./package').dependencies) 697 | .filter(depend => depend.startsWith('wiki-plugin')) 698 | .forEach(key => { 699 | versions.plugins[key] = wikiModule.require(`${key}/package`).version 700 | }) 701 | 702 | res.json(versions) 703 | }) 704 | 705 | // ##### Proxy routes ##### 706 | 707 | app.get('/proxy/*splat', authorized, (req, res) => { 708 | const pathParts = req.originalUrl.split('/') 709 | const remoteHost = pathParts[2] 710 | pathParts.splice(0, 3) 711 | const remoteResource = pathParts.join('/') 712 | // this will fail if remote is TLS only! 713 | const requestURL = 'http://' + remoteHost + '/' + remoteResource 714 | console.log('PROXY Request: ', requestURL) 715 | if ( 716 | requestURL.endsWith('.json') || 717 | requestURL.endsWith('.png') || 718 | requestURL.endsWith('.jpg') || 719 | pathParts[0] === 'plugin' 720 | ) { 721 | fetch(requestURL, { signal: AbortSignal.timeout(2000) }) 722 | .then(async fetchRes => { 723 | if (fetchRes.ok) { 724 | res.set('content-type', fetchRes.headers.get('content-type')) 725 | res.set('last-modified', fetchRes.headers.get('last-modified')) 726 | await pipeline(fetchRes.body, res) 727 | } else { 728 | res.status(fetchRes.status).end() 729 | } 730 | }) 731 | .catch(err => { 732 | console.log('ERROR: Proxy Request ', requestURL, err) 733 | res.status(500).end() 734 | }) 735 | } else { 736 | res.status(400).end() 737 | } 738 | }) 739 | 740 | // ##### Put routes ##### 741 | 742 | app.put(/^\/page\/([a-z0-9-]+)\/action$/i, authorized, (req, res) => { 743 | const action = JSON.parse(req.body.action) 744 | // Handle all of the possible actions to be taken on a page, 745 | const actionCB = (e, page, status) => { 746 | //if e then return res.e e 747 | if (status === 404) { 748 | // res.status(status).send(page) 749 | return res.e(page, status) 750 | } 751 | // Using Coffee-Scripts implicit returns we assign page.story to the 752 | // result of a list comprehension by way of a switch expression. 753 | try { 754 | page.story = (() => { 755 | switch (action.type) { 756 | case 'move': 757 | return action.order.map(id => { 758 | const match = page.story.filter(para => id === para.id)[0] 759 | if (!match) throw 'Ignoring move. Try reload.' 760 | return match 761 | }) 762 | case 'add': { 763 | const idx = page.story.map(para => para.id).indexOf(action.after) + 1 764 | page.story.splice(idx, 0, action.item) 765 | return page.story 766 | } 767 | 768 | case 'remove': 769 | return page.story.filter(para => para?.id !== action.id) 770 | 771 | case 'edit': 772 | return page.story.map(para => { 773 | if (para.id === action.id) { 774 | return action.item 775 | } else { 776 | return para 777 | } 778 | }) 779 | 780 | case 'create': 781 | case 'fork': 782 | return page.story || [] 783 | 784 | default: 785 | log('Unfamiliar action:', action) 786 | //page.story 787 | throw 'Unfamiliar action ignored' 788 | } 789 | })() 790 | } catch (e) { 791 | return res.e(e) 792 | } 793 | // Add a blank journal if it does not exist. 794 | // And add what happened to the journal. 795 | if (!page.journal) { 796 | page.journal = [] 797 | } 798 | if (action.fork) { 799 | page.journal.push({ type: 'fork', site: action.fork, date: action.date - 1 }) 800 | delete action.fork 801 | } 802 | page.journal.push(action) 803 | pagehandler.put(req.params[0], page, e => { 804 | if (e) return res.e(e) 805 | res.send('ok') 806 | // log 'saved' 807 | }) 808 | // update sitemap 809 | sitemaphandler.update(req.params[0], page) 810 | 811 | // update site index 812 | searchhandler.update(req.params[0], page) 813 | } 814 | // log action 815 | 816 | // If the action is a fork, get the page from the remote server, 817 | // otherwise ask pagehandler for it. 818 | if (action.fork) { 819 | pagehandler.saveToRecycler(req.params[0], err => { 820 | if (err && err !== 'page does not exist') { 821 | console.log(`Error saving ${req.params[0]} before fork: ${err}`) 822 | } 823 | if (action.forkPage) { 824 | const forkPageCopy = JSON.parse(JSON.stringify(action.forkPage)) 825 | delete action.forkPage 826 | actionCB(null, forkPageCopy) 827 | } else { 828 | // Legacy path, new clients will provide forkPage on implicit forks. 829 | remoteGet(action.fork, req.params[0], actionCB) 830 | } 831 | }) 832 | } else if (action.type === 'create') { 833 | // Prevent attempt to write circular structure 834 | const itemCopy = JSON.parse(JSON.stringify(action.item)) 835 | pagehandler.get(req.params[0], (e, page, status) => { 836 | if (e) return actionCB(e) 837 | if (status !== 404) { 838 | res.status(409).send('Page already exists.') 839 | } else { 840 | actionCB(null, itemCopy) 841 | } 842 | }) 843 | } else if (action.type === 'fork') { 844 | pagehandler.saveToRecycler(req.params[0], err => { 845 | if (err) console.log(`Error saving ${req.params[0]} before fork: ${err}`) 846 | if (action.forkPage) { 847 | // push 848 | const forkPageCopy = JSON.parse(JSON.stringify(action.forkPage)) 849 | delete action.forkPage 850 | actionCB(null, forkPageCopy) 851 | } else { 852 | // pull 853 | remoteGet(action.site, req.params[0], actionCB) 854 | } 855 | }) 856 | } else { 857 | pagehandler.get(req.params[0], actionCB) 858 | } 859 | }) 860 | 861 | // Return the oops page when login fails. 862 | app.get('/oops', (req, res) => { 863 | res.statusCode = 403 864 | res.render('oops.html', { msg: 'This is not your wiki!' }) 865 | }) 866 | 867 | // Traditional request to / redirects to index :) 868 | app.get('/', cors, (req, res) => { 869 | const home = path.join(argv.assets, 'home', 'index.html') 870 | fs.stat(home, (err, stats) => { 871 | if (err || !stats.isFile()) { 872 | res.redirect(index) 873 | } else { 874 | res.redirect('/assets/home/index.html') 875 | } 876 | }) 877 | }) 878 | 879 | // ##### Delete Routes ##### 880 | 881 | app.delete(/^\/([a-z0-9-]+)\.json$/, authorized, (req, res) => { 882 | const pageFile = req.params[0] 883 | // we need the original page text to remove it from the index, so get the original text before deleting it 884 | pagehandler.get(pageFile, (e, page, status) => { 885 | const title = page.title 886 | pagehandler.delete(pageFile, err => { 887 | if (err) { 888 | res.status(500).send(err) 889 | } else { 890 | sitemaphandler.removePage(pageFile) 891 | res.status(200).send('') 892 | // update site index 893 | searchhandler.removePage(req.params[0]) 894 | } 895 | }) 896 | }) 897 | }) 898 | 899 | // #### Start the server #### 900 | // 901 | // set a default process exitCode, so we can diferentiate between exiting as part of a reload, 902 | // and an exit after an uncaught error. 903 | // except when test is set, so the tests don't report a fail when closing the server process. 904 | process.exitCode = argv.test ? 0 : 1 905 | 906 | // Wait to make sure owner is known before listening. 907 | securityhandler.retrieveOwner(e => { 908 | // Throw if you can't find the initial owner 909 | if (e) throw e 910 | owner = securityhandler.getOwner() 911 | console.log('owner: ' + owner) 912 | app.emit('owner-set') 913 | }) 914 | 915 | app.on('running-serv', server => { 916 | // ### Plugins ### 917 | // Should replace most WebSocketServers below. 918 | const plugins = pluginsFactory(argv) 919 | plugins.startServers({ argv, app }) 920 | // ### Sitemap ### 921 | // create sitemap at start-up 922 | sitemaphandler.createSitemap(pagehandler) 923 | // create site index at start-up 924 | searchhandler.startUp(pagehandler) 925 | }) 926 | 927 | // Return app when called, so that it can be watched for events and shutdown with .close() externally. 928 | return app 929 | } 930 | --------------------------------------------------------------------------------