├── .datignore ├── .gitignore ├── .mailmap ├── .npmignore ├── .travis.yml ├── AUTHORS.txt ├── Gruntfile.js ├── LICENSE.txt ├── README.md ├── ReadMe-old.md ├── client.coffee ├── client ├── .datignore ├── ReadMe.md ├── client-bootstrap.js ├── client-loader.js ├── favicon.png ├── images │ ├── crosses.png │ ├── email_sign_in_blue.png │ ├── external-link-ltr-icon.png │ ├── linen2.jpg │ ├── linen4.jpg │ ├── noise.png │ └── oops.jpg ├── index.html ├── index.json ├── js │ ├── d3 │ │ └── d3.min.js │ ├── jquery-2.2.4.js │ ├── jquery-2.2.4.min.js │ ├── jquery-2.2.4.min.map │ ├── jquery-migrate-1.4.1.js │ ├── jquery-migrate-1.4.1.min.js │ ├── jquery-ui │ │ └── 1.11.4 │ │ │ ├── images │ │ │ ├── ui-bg_flat_0_aaaaaa_40x100.png │ │ │ ├── ui-bg_flat_75_ffffff_40x100.png │ │ │ ├── ui-bg_glass_55_fbf9ee_1x400.png │ │ │ ├── ui-bg_glass_65_ffffff_1x400.png │ │ │ ├── ui-bg_glass_75_dadada_1x400.png │ │ │ ├── ui-bg_glass_75_e6e6e6_1x400.png │ │ │ ├── ui-bg_glass_95_fef1ec_1x400.png │ │ │ ├── ui-bg_highlight-soft_75_cccccc_1x100.png │ │ │ ├── ui-icons_222222_256x240.png │ │ │ ├── ui-icons_2e83ff_256x240.png │ │ │ ├── ui-icons_454545_256x240.png │ │ │ ├── ui-icons_888888_256x240.png │ │ │ └── ui-icons_cd0a0a_256x240.png │ │ │ ├── jquery-ui.min.css │ │ │ └── jquery-ui.min.js │ ├── jquery.ui.touch-punch.min.js │ ├── underscore-min.js │ ├── underscore-min.js.map │ └── underscore-min.map ├── oops.html ├── pages │ ├── federated-wiki.json │ ├── how-to-wiki.json │ ├── smallest-federated-wiki.json │ ├── system │ │ └── sitemap.json │ └── welcome-visitors.json ├── plugins.json ├── runtests.html ├── style │ ├── print.css │ └── style.css ├── test │ ├── mocha.css │ └── mocha.js ├── twitter-maintainance.jpg ├── ui.html ├── wiki.json └── wiki │ ├── about-federated-wiki.json │ ├── federated-wiki.json │ ├── field-guide-to-the-federation.json │ ├── frequently-asked-questions.json │ ├── system │ └── sitemap.json │ ├── welcome-visitors.json │ ├── wiki-to-wiki.json │ └── youre-new-here.json ├── favicon.png ├── lib ├── actionSymbols.coffee ├── active.coffee ├── addToJournal.coffee ├── bind.coffee ├── datHandler.coffee ├── dialog.coffee ├── drop.coffee ├── editor.coffee ├── factory.coffee ├── forward.js ├── future.coffee ├── image.coffee ├── importer.coffee ├── itemz.coffee ├── legacy.coffee ├── license.coffee ├── lineup.coffee ├── link.coffee ├── neighborhood.coffee ├── neighbors.coffee ├── page.coffee ├── pageHandler.coffee ├── paragraph.coffee ├── plugin.coffee ├── plugins.coffee ├── random.coffee ├── reference.coffee ├── refresh.coffee ├── resolve.coffee ├── revision.coffee ├── search.coffee ├── searchbox.coffee ├── security.coffee ├── siteAdapter.coffee ├── siteindexHandler.coffee ├── sitemapHandler.coffee ├── state.coffee ├── synopsis.coffee ├── target.coffee ├── util.coffee └── wiki.coffee ├── package.json ├── scripts ├── call-graph.dot ├── call-sites.dot ├── call-sites.pl ├── requires-graph.dot ├── requires-graph.pl ├── squeeze-logic.coffee └── squeeze-test.js ├── test ├── active.coffee ├── drop.coffee ├── lineup.coffee ├── mockServer.coffee ├── neighborhood.coffee ├── page.coffee ├── pageHandler.coffee ├── plugin.coffee ├── random.coffee ├── refresh.coffee ├── resolve.coffee ├── revision.coffee ├── search.coffee ├── util.coffee └── wiki.coffee └── testclient.coffee /.datignore: -------------------------------------------------------------------------------- 1 | .git 2 | .dat 3 | .mailmap 4 | node_modules 5 | *.log 6 | **/.DS_Store 7 | Thumbs.db 8 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | *.swp 3 | *.iml 4 | .idea/ 5 | .sass-cache 6 | .rvmrc 7 | 8 | node_modules 9 | npm-debug.log 10 | 11 | # ignore build directory - used while building with grunt 12 | build 13 | 14 | # ignore generated javascript - recreated by running grunt 15 | test/*.js 16 | test/*.map 17 | client/client.js 18 | client/client.*.js 19 | client/client.map 20 | client/client.*.map 21 | client/test/testclient.js 22 | client/test/testclient.map 23 | 24 | # ignore dat files 25 | .dat 26 | dat.json 27 | 28 | # ignore hyperdrive files 29 | .hyperdrive-import-key 30 | 31 | # ignore plugins 32 | client/plugins/ 33 | client/wiki.json -------------------------------------------------------------------------------- /.mailmap: -------------------------------------------------------------------------------- 1 | Nick Niemeir 2 | Joshua Benuck 3 | Eric Dobbs 4 | -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | # ignore the build directory 2 | build 3 | 4 | 5 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: node_js 2 | node_js: 3 | - "10" 4 | - "12" 5 | - "13" 6 | install: 7 | - npm install 8 | before_install: 9 | - npm install -g grunt-cli 10 | script: 11 | - grunt mochaTest 12 | sudo: false 13 | -------------------------------------------------------------------------------- /AUTHORS.txt: -------------------------------------------------------------------------------- 1 | Authors ordered by first contribution 2 | 3 | Ward Cunningham 4 | Stephen Judkins 5 | Sam Goldstein 6 | Steven Black 7 | Don Park 8 | Sven Dowideit 9 | Adam Solove 10 | Nick Niemeir 11 | Erkan Yilmaz 12 | Matt Niemeir 13 | Daan van Berkel 14 | Nicholas Hallahan 15 | Ola Bini 16 | Danilo Sato 17 | Henning Schumann 18 | Michael Deardeuff 19 | Pete Hodgson 20 | Marcin Cieslak 21 | M. Kelley Harris (http://www.kelleyharris.com) 22 | Ryan Bennett 23 | Paul Rodwell 24 | David Turnbull 25 | Austin King 26 | enyst 27 | Enrico Spinielli 28 | judell 29 | Santiago Ferreira 30 | i2p-lbt 31 | Andrew Ettinger 32 | Robert Smith 33 | Joshua Benuck 34 | Eric Dobbs 35 | -------------------------------------------------------------------------------- /Gruntfile.js: -------------------------------------------------------------------------------- 1 | module.exports = function (grunt) { 2 | grunt.loadNpmTasks('grunt-browserify'); 3 | grunt.loadNpmTasks('grunt-contrib-watch'); 4 | grunt.loadNpmTasks('grunt-mocha-test'); 5 | grunt.loadNpmTasks('grunt-contrib-clean'); 6 | // grunt.loadNpmTasks('grunt-contrib-uglify-es'); 7 | grunt.loadNpmTasks('grunt-git-authors'); 8 | grunt.loadNpmTasks('grunt-retire'); 9 | 10 | // N.B. The development build includes paths in the mapfile, at the browserify step, that are not accessable 11 | // from the browser. 12 | 13 | 14 | 15 | grunt.initConfig({ 16 | 17 | pkg: grunt.file.readJSON('package.json'), 18 | 19 | authors: { 20 | prior: [ 21 | "Ward Cunningham ", 22 | "Stephen Judkins ", 23 | "Sam Goldstein ", 24 | "Steven Black ", 25 | "Don Park ", 26 | "Sven Dowideit ", 27 | "Adam Solove ", 28 | "Nick Niemeir ", 29 | "Erkan Yilmaz ", 30 | "Matt Niemeir ", 31 | "Daan van Berkel ", 32 | "Nicholas Hallahan ", 33 | "Ola Bini ", 34 | "Danilo Sato ", 35 | "Henning Schumann ", 36 | "Michael Deardeuff ", 37 | "Pete Hodgson ", 38 | "Marcin Cieslak ", 39 | "M. Kelley Harris (http://www.kelleyharris.com)", 40 | "Ryan Bennett ", 41 | "Paul Rodwell ", 42 | "David Turnbull ", 43 | "Austin King " 44 | ] 45 | }, 46 | 47 | retire: { 48 | js: ['client/js/*.js'], 49 | options: {} 50 | }, 51 | 52 | // tidy-up before we start the build 53 | clean: ['build/*', 'client/client.js', 'client/client.map', 'client/client.*.js', 'client/client.*.map', 'client/test/testclient.js'], 54 | 55 | browserify: { 56 | // build the client that we will include in the package 57 | packageClient: { 58 | src: ['./client.coffee'], 59 | dest: 'client/client.js', 60 | options: { 61 | transform: ['coffeeify', 'browserify-versionify'], 62 | // transform: [['coffeeify', {transpile: {presets: ['@babel/preset-env']}}]], 63 | browserifyOptions: { 64 | extensions: ".coffee" 65 | } 66 | } 67 | }, 68 | // build for local development version of the client will go here (once mapfile issues are resolved) 69 | 70 | // build the browser testclient 71 | testClient: { 72 | src: ['./testclient.coffee'], 73 | dest: 'client/test/testclient.js', 74 | options: { 75 | transform: [['coffeeify', {transpile: {presets: ['@babel/preset-env']}}]], 76 | browserifyOptions: { 77 | extensions: ".coffee" 78 | } 79 | } 80 | } 81 | }, 82 | 83 | mochaTest: { 84 | test: { 85 | options: { 86 | timeout: false, 87 | reporter: 'spec', 88 | require: 'coffeescript/register' 89 | }, 90 | src: [ 91 | 'test/util.coffee', 92 | 'test/random.coffee', 93 | 'test/page.coffee', 94 | 'test/lineup.coffee', 95 | 'test/drop.coffee', 96 | 'test/revision.coffee', 97 | 'test/resolve.coffee', 98 | 'test/wiki.coffee' 99 | ] 100 | } 101 | }, 102 | 103 | watch: { 104 | all: { 105 | files: ['test/*.coffee', 'lib/*.coffee', '*.coffee'], 106 | tasks: ['build'] 107 | } 108 | } 109 | }); 110 | 111 | // build without sourcemaps 112 | grunt.registerTask('build', ['clean', 'browserify:packageClient', 'browserify:testClient']); 113 | 114 | // check for out-of-date libraries and known vulnerabilities 115 | 116 | grunt.registerTask('check', ['retire']); 117 | 118 | 119 | // the default is to do the production build. 120 | grunt.registerTask('default', ['build']); 121 | 122 | }; 123 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2011-2018 Ward Cunningham and other contributors 4 | 5 | This software consists of voluntary contributions made by many 6 | individuals. For exact contribution history, see the revision history 7 | available at https://github.com/fedwiki/wiki-client 8 | 9 | The following license applies to all parts of this software except as 10 | documented below: 11 | 12 | ==== 13 | 14 | Permission is hereby granted, free of charge, to any person obtaining a copy 15 | of this software and associated documentation files (the "Software"), to deal 16 | in the Software without restriction, including without limitation the rights 17 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 18 | copies of the Software, and to permit persons to whom the Software is 19 | furnished to do so, subject to the following conditions: 20 | 21 | The above copyright notice and this permission notice shall be included in 22 | all copies or substantial portions of the Software. 23 | 24 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 25 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 26 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 27 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 28 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 29 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 30 | THE SOFTWARE. 31 | 32 | ==== 33 | 34 | All files located in the client pages directory are licensed under a 35 | Creative Commons Attribution-ShareAlike 4.0 International License. 36 | 37 | CC BY-SA 4.0 : http://creativecommons.org/licenses/by-sa/4.0/ 38 | 39 | ==== 40 | 41 | All files located in the node_modules and client/js are 42 | externally maintained libraries used by this software which have their 43 | own licenses; we recommend you read them, as their terms may differ from 44 | the terms above. 45 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # wiki-client (hyperdrive variant) 2 | 3 | A variant of the Federated Wiki client-side javascript for exploring using beaker browser, and Hyperdrive based federated wiki sites. 4 | 5 | This hyperdrive wiki client is currently level with the June 2020 build (v0.20.3) of the Node version. 6 | 7 | ## Creating a New Wiki: 8 | 9 | You will need a recent version of Beaker Browser, *1.0 beta 1 or later*. 10 | 11 | In Beaker Browser open the Federated Wiki Client drive, [hyper://cbbc6003c42ba597635ef590e326b59512c06c56d61b100aa141ed51011a29e6](hyper://cbbc6003c42ba597635ef590e326b59512c06c56d61b100aa141ed51011a29e6). Click on the "Creating a New Wiki" link, fill in the form, and click on "Create Wiki" button. This will create your new wiki, and open it in new browser tab. 12 | 13 | From there you can get started creating content and sharing it with the world. 14 | 15 | ## Sharing your wiki: 16 | 17 | The default configuration for Beaker will leave the hyperdrive daemon running in background to share your hyperdrives with others. If you are writing together with others, you may want to use the option to ["Host This Drive"](https://docs.beakerbrowser.com/beginner/hosting-hyperdrives) to help keep each others wiki available online. 18 | 19 | There currently is not a simple option to share a hyperdrive wiki with the wider http(s) based federation, as it was with the previous version of Beaker. It is hoped that an easy way of doing this is available soon. 20 | 21 |
22 | 23 | For more information on the Hyperdrive Protocol, visit [their website](https://hypercore-protocol.org/). 24 | 25 | We have a chat group on Matrix, [#fedwiki:matrix.org](https://matrix.to/#/#fedwiki:matrix.org), and meet-up for a video chat on Wednesdays at 10am Pacific Time (PST/PDT) *location gets announced in chat*. 26 | 27 | 30 | --- 31 | 32 | The hyperdrive version of the Federated Wiki has been split into two parts: wiki storage, client (which includes the core plugins). The wiki client is mounted within each wiki's hyperdrive as a [mounted frontend](https://docs.beakerbrowser.com/developers/frontends-.ui-folder). 33 | 34 | This [git repository](https://github.com/paul90/wiki-client-dat-variant) contains the client logic needed to display the wiki, the client subdirectory is used to create the *wiki client's* hyperdrive [hyper://cbbc6003c42ba597635ef590e326b59512c06c56d61b100aa141ed51011a29e6](hyper://cbbc6003c42ba597635ef590e326b59512c06c56d61b100aa141ed51011a29e6). 35 | -------------------------------------------------------------------------------- /ReadMe-old.md: -------------------------------------------------------------------------------- 1 | Client Goals 2 | ============ 3 | 4 | A server offers direct restful read/write access to pages it owns and proxy access to pages held elsewhere in federated space. 5 | A page is owned if it was created with the server or has been cloned and edited such that it is believed to be the most authoritative copy of a page previously owned elsewhere. 6 | A server operates as a proxy to the rest of the federated wiki. 7 | In this role it reformats data and metadata providing a unified experience. 8 | It is welcome to collect behavioral statistics in order to improve this experience by anticipating permitted peer-to-peer server operations. 9 | 10 | In summary, the server's client side exists to: 11 | 12 | * Offer to a user a browsing experience that is independent of any specific server. 13 | * Support writing, editing and curating of one server in a way that offers suitable influence over others. 14 | 15 | Working with Browserify 16 | ======================= 17 | 18 | The client side is written in CoffeeScript, and built with Browserify. 19 | If you are not checking in changes you need not concern yourself with this. 20 | We've checked in the generated Javascript for the client application. 21 | 22 | If you do want to check in changes, install node v0.6.x 23 | 24 | * On Linux download the source from [GitHub](https://github.com/joyent/node) 25 | * On Windows get the installer from the [main node.js site](http://nodejs.org). 26 | * On Mac you should be able to choose either. 27 | 28 | Once node is installed come back to this directory and run: 29 | 30 | * `npm install` To install CoffeeScript, Browserify, and all their dependencies. 31 | 32 | You can now use: 33 | 34 | * `npm start` To build the main client. 35 | * `npm test` To build the test client. 36 | 37 | These commands build client.js and test/testclient.js from client.coffee and 38 | testclient.coffee respectively. They use their entry files to require the 39 | rest of the coffee script they need from the source CS files in /lib. 40 | 41 | We also have a cool automated talking (Mac only) Perl build script that uses 42 | a globally installed browserify via `npm install -g browserify`, it watches 43 | for changes, builds the clients automatically, and gives a verbal report 44 | when you have syntax errors. 45 | 46 | Testing 47 | ======= 48 | 49 | All the client tests can be run by visiting /runtests.html on your server 50 | or by running the full ruby test suite. Information about the libraries we 51 | are using for testing can be found at: 52 | 53 | * http://visionmedia.github.com/mocha/ 54 | * https://github.com/LearnBoost/expect.js 55 | * http://sinonjs.org/ 56 | 57 | CoffeeScript hints 58 | ================== 59 | 60 | We recommend taking time to learn the CoffeeScript syntax and the rationale for the Javascript idioms it employs. Start here: 61 | 62 | http://jashkenas.github.com/coffee-script/ 63 | 64 | We used a Javascript to Coffeescript converter to create the first draft of client.coffee. You may find this converter useful for importing sample codes. 65 | 66 | http://ricostacruz.com/js2coffee/ 67 | 68 | -------------------------------------------------------------------------------- /client.coffee: -------------------------------------------------------------------------------- 1 | console.log "Window Name: " + window.name 2 | window.name = window.location.host 3 | 4 | window.wiki = require './lib/wiki' 5 | require './lib/legacy' 6 | require './lib/bind' 7 | require './lib/plugins' 8 | -------------------------------------------------------------------------------- /client/.datignore: -------------------------------------------------------------------------------- 1 | .git 2 | .dat 3 | node_modules 4 | *.log 5 | **/.DS_Store 6 | Thumbs.db 7 | -------------------------------------------------------------------------------- /client/ReadMe.md: -------------------------------------------------------------------------------- 1 | # wiki-client (hyperdrive variant) 2 | 3 | A variant of the Federated Wiki client-side javascript for exploring using beaker browser, and Hyperdrive based federated wiki sites. 4 | 5 | This hyperdrive wiki client is currently level with the June 2020 build (v0.20.3) of the Node version. 6 | 7 | ## Creating a New Wiki: 8 | 9 | You will need a recent version of Beaker Browser, *1.0 beta 1 or later*. 10 | 11 | In Beaker Browser open the Federated Wiki Client drive, [hyper://cbbc6003c42ba597635ef590e326b59512c06c56d61b100aa141ed51011a29e6](hyper://cbbc6003c42ba597635ef590e326b59512c06c56d61b100aa141ed51011a29e6). Click on the "Creating a New Wiki" link, fill in the form, and click on "Create Wiki" button. This will create your new wiki, and open it in new browser tab. 12 | 13 | From there you can get started creating content and sharing it with the world. 14 | 15 | ## Sharing your wiki: 16 | 17 | The default configuration for Beaker will leave the hyperdrive daemon running in background to share your hyperdrives with others. If you are writing together with others, you may want to use the option to ["Host This Drive"](https://docs.beakerbrowser.com/beginner/hosting-hyperdrives) to help keep each others wiki available online. 18 | 19 | There currently is not a simple option to share a hyperdrive wiki with the wider http(s) based federation, as it was with the previous version of Beaker. It is hoped that an easy way of doing this is available soon. 20 | 21 |
22 | 23 | For more information on the Hyperdrive Protocol, visit [their website](https://hypercore-protocol.org/). 24 | 25 | We have a chat group on Matrix, [#fedwiki:matrix.org](https://matrix.to/#/#fedwiki:matrix.org), and meet-up for a video chat on Wednesdays at 10am Pacific Time (PST/PDT) *location gets announced in chat*. 26 | 27 | 30 | --- 31 | 32 | The hyperdrive version of the Federated Wiki has been split into two parts: wiki storage, client (which includes the core plugins). The wiki client is mounted within each wiki's hyperdrive as a [mounted frontend](https://docs.beakerbrowser.com/developers/frontends-.ui-folder). 33 | 34 | This [git repository](https://github.com/paul90/wiki-client-dat-variant) contains the client logic needed to display the wiki, the client subdirectory is used to create the *wiki client's* hyperdrive [hyper://cbbc6003c42ba597635ef590e326b59512c06c56d61b100aa141ed51011a29e6](hyper://cbbc6003c42ba597635ef590e326b59512c06c56d61b100aa141ed51011a29e6). 35 | -------------------------------------------------------------------------------- /client/client-bootstrap.js: -------------------------------------------------------------------------------- 1 | // This is the first stage of the client loader. 2 | // 3 | // Here we check to see if we are running in a Hyperdrive 10 capable browser (one that 4 | // supports the beaker.hyperdrive API). 5 | // 6 | // If the browser being used is not dat capable, we create a cover page, which 7 | // includes a list of pages this wiki has, and a page flag that can be dragged 8 | // into the lineup of the server based wiki client. 9 | 10 | 'use strict' 11 | 12 | var myVersion = "20.05.19 (client wiki)" 13 | console.log('+++ Client Bootstrap Version: ', myVersion) 14 | 15 | // do we have the beaker API? 16 | if (!('beaker' in window)) { 17 | // lets just check we are not in a frame first... 18 | var inFrame = (window.self !== window.top) 19 | 20 | // some helper from pfrazee's ui kit - https://github.com/pfrazee/pauls-ui-kit 21 | function $ (el, sel = undefined) { 22 | if (typeof sel === 'string') { 23 | return el.querySelector(sel) 24 | } 25 | return document.querySelector(el) 26 | } 27 | function $$ (el, sel = undefined) { 28 | if (typeof sel === 'string') { 29 | return Array.from(el.querySelectorAll(sel)) 30 | } 31 | return Array.from(document.querySelectorAll(el)) 32 | } 33 | function render (html) { 34 | var template = document.createElement('template') 35 | template.innerHTML = html 36 | return template.content 37 | } 38 | // end of helpers 39 | // add cover page style 40 | $('head').append(render(` 41 | `)) 70 | 71 | // replace page with cover page content 72 | document.body.append(render(` 73 |
74 |
75 |

Welcome Visitors

76 |
77 |
78 |

Welcome to this dat based Federated Wiki site.

79 |

For more information about Federated Wiki a good place to start is About Federated Wiki.

80 |
81 |
`)) 82 | 83 | if (inFrame) { 84 | $('main').append(render(` 85 |

It looks as if you are seeing Federated Wiki in a browser frame. The Hyperdrive 10 API 86 | that we rely on is not currently available in this environment, so you are seeing a cover page.

87 |

Press HERE to open this Federated Wiki in a new tab.

`)) 88 | } 89 | 90 | // add list of pages 91 | 92 | fetch('/wiki/system/sitemap.json') 93 | .then(function(response) { 94 | return response.json() 95 | }) 96 | .then(function(sitemap) { 97 | if (Array.isArray(sitemap)) { 98 | if (sitemap.length > 0) { 99 | $('main').append(render(` 100 |

The contents of this Federated Wiki site can be accessed either with: -

101 |
    102 |
  • a traditional web browser by dragging the page flag, above, to another Federated Wiki site, or
  • 103 |
  • by using a browser that has Hyperdrive 10 support, for example Beaker Browser, to open this site.
  • 104 |
105 |

This wiki contains the following pages:

106 |
    107 |
`)) 108 | sitemap.forEach(function(page) { 109 | $('#pages').append(render(`
  • ${page.title}
  • `)) 110 | }) 111 | 112 | } else { 113 | $('main').append(render(` 114 |

    This Federated Wiki is empty.

    `)) 115 | } 116 | } else { 117 | $('main').append(render(` 118 |

    There were problems reading the list of pages for this Federated Wiki.

    `)) 119 | } 120 | }) 121 | } else { 122 | // we are using a dat capable browser, so lets get the client to load itself. 123 | async function launchWikiClient () { 124 | var wikiOrigin = window.location.origin 125 | var wikiArchive = beaker.hyperdrive.drive(wikiOrigin) 126 | var wikiClientLoaderURL = undefined 127 | 128 | // are we using a mounted frontend? 129 | var usingFrontend = new URL(document.currentScript.src).href.includes('.ui') 130 | 131 | if (usingFrontend) { 132 | wikiClientLoaderURL = '/.ui/client-loader.js' 133 | } else { 134 | // read wiki.json 135 | var data = await wikiArchive.readFile('/wiki.json') 136 | var wikiConfig = JSON.parse(data) 137 | // the wiki.json is expected to hold both the key to find the client. 138 | var rawClientURL = wikiConfig.client.key 139 | wikiClientLoaderURL = new URL('/client-loader.js', 'hyper://'+rawClientURL) 140 | } 141 | 142 | // import the client loader module 143 | var clientLoader = document.createElement('script') 144 | clientLoader.src = wikiClientLoaderURL 145 | clientLoader.type = 'text/javascript' 146 | document.head.appendChild(clientLoader) 147 | } 148 | 149 | launchWikiClient() 150 | } 151 | -------------------------------------------------------------------------------- /client/client-loader.js: -------------------------------------------------------------------------------- 1 | // this loads the wiki client 2 | 'use strict' 3 | 4 | var myVersion = "20.05.19" 5 | console.log('+++ Client Loader Version: ', myVersion) 6 | 7 | var clientOrigin = new URL(document.currentScript.src).origin 8 | console.log('+++ loading client from: ', clientOrigin) 9 | 10 | async function setupClient () { 11 | var clientOrigin = undefined 12 | // are we using a mounted frontend? 13 | var usingFrontend = new URL(document.currentScript.src).href.includes('.ui') 14 | 15 | if (usingFrontend) { 16 | clientOrigin = '/.ui' 17 | } else { 18 | clientOrigin = new URL(document.currentScript.src).origin 19 | var wikiOrigin = window.location.origin 20 | 21 | var clientRawKey = await beaker.hyperdrive.getInfo(clientOrigin).then(x => { return x.key }) 22 | var wikiRawKey = await beaker.hyperdrive.getInfo(wikiOrigin).then(x => { return x.key }) 23 | 24 | /* console.log("client origin", clientOrigin) 25 | console.log("wiki origin", wikiOrigin) 26 | console.log("client Key RAW", clientRawKey) 27 | console.log("wiki Key RAW", wikiRawKey) */ 28 | 29 | if (clientRawKey === wikiRawKey) { 30 | clientOrigin = '' 31 | } 32 | } 33 | 34 | var clientHTML = ` 35 | 36 | 37 | 38 | Federated Wiki 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 |
    58 |
    59 |
    60 |
    61 | 63 | 64 | 65 | 66 | 67 |   68 | 69 |   70 | 71 | 72 | 73 | 74 |
    75 | 76 | 85 | 86 | 87 | ` 88 | document.open() 89 | document.write(clientHTML) 90 | document.close() 91 | } 92 | 93 | setupClient() 94 | -------------------------------------------------------------------------------- /client/favicon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/paul90/wiki-client-dat-variant/0a9b918b27c8da86b4d45b372cbd12caaeecc8e4/client/favicon.png -------------------------------------------------------------------------------- /client/images/crosses.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/paul90/wiki-client-dat-variant/0a9b918b27c8da86b4d45b372cbd12caaeecc8e4/client/images/crosses.png -------------------------------------------------------------------------------- /client/images/email_sign_in_blue.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/paul90/wiki-client-dat-variant/0a9b918b27c8da86b4d45b372cbd12caaeecc8e4/client/images/email_sign_in_blue.png -------------------------------------------------------------------------------- /client/images/external-link-ltr-icon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/paul90/wiki-client-dat-variant/0a9b918b27c8da86b4d45b372cbd12caaeecc8e4/client/images/external-link-ltr-icon.png -------------------------------------------------------------------------------- /client/images/linen2.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/paul90/wiki-client-dat-variant/0a9b918b27c8da86b4d45b372cbd12caaeecc8e4/client/images/linen2.jpg -------------------------------------------------------------------------------- /client/images/linen4.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/paul90/wiki-client-dat-variant/0a9b918b27c8da86b4d45b372cbd12caaeecc8e4/client/images/linen4.jpg -------------------------------------------------------------------------------- /client/images/noise.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/paul90/wiki-client-dat-variant/0a9b918b27c8da86b4d45b372cbd12caaeecc8e4/client/images/noise.png -------------------------------------------------------------------------------- /client/images/oops.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/paul90/wiki-client-dat-variant/0a9b918b27c8da86b4d45b372cbd12caaeecc8e4/client/images/oops.jpg -------------------------------------------------------------------------------- /client/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Federated Wiki: Client Wiki 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | -------------------------------------------------------------------------------- /client/index.json: -------------------------------------------------------------------------------- 1 | { 2 | "title": "Federated Wiki: Client (dev)", 3 | "description": "Development version of the Dat variant of Federated Wiki" 4 | } 5 | -------------------------------------------------------------------------------- /client/js/jquery-ui/1.11.4/images/ui-bg_flat_0_aaaaaa_40x100.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/paul90/wiki-client-dat-variant/0a9b918b27c8da86b4d45b372cbd12caaeecc8e4/client/js/jquery-ui/1.11.4/images/ui-bg_flat_0_aaaaaa_40x100.png -------------------------------------------------------------------------------- /client/js/jquery-ui/1.11.4/images/ui-bg_flat_75_ffffff_40x100.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/paul90/wiki-client-dat-variant/0a9b918b27c8da86b4d45b372cbd12caaeecc8e4/client/js/jquery-ui/1.11.4/images/ui-bg_flat_75_ffffff_40x100.png -------------------------------------------------------------------------------- /client/js/jquery-ui/1.11.4/images/ui-bg_glass_55_fbf9ee_1x400.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/paul90/wiki-client-dat-variant/0a9b918b27c8da86b4d45b372cbd12caaeecc8e4/client/js/jquery-ui/1.11.4/images/ui-bg_glass_55_fbf9ee_1x400.png -------------------------------------------------------------------------------- /client/js/jquery-ui/1.11.4/images/ui-bg_glass_65_ffffff_1x400.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/paul90/wiki-client-dat-variant/0a9b918b27c8da86b4d45b372cbd12caaeecc8e4/client/js/jquery-ui/1.11.4/images/ui-bg_glass_65_ffffff_1x400.png -------------------------------------------------------------------------------- /client/js/jquery-ui/1.11.4/images/ui-bg_glass_75_dadada_1x400.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/paul90/wiki-client-dat-variant/0a9b918b27c8da86b4d45b372cbd12caaeecc8e4/client/js/jquery-ui/1.11.4/images/ui-bg_glass_75_dadada_1x400.png -------------------------------------------------------------------------------- /client/js/jquery-ui/1.11.4/images/ui-bg_glass_75_e6e6e6_1x400.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/paul90/wiki-client-dat-variant/0a9b918b27c8da86b4d45b372cbd12caaeecc8e4/client/js/jquery-ui/1.11.4/images/ui-bg_glass_75_e6e6e6_1x400.png -------------------------------------------------------------------------------- /client/js/jquery-ui/1.11.4/images/ui-bg_glass_95_fef1ec_1x400.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/paul90/wiki-client-dat-variant/0a9b918b27c8da86b4d45b372cbd12caaeecc8e4/client/js/jquery-ui/1.11.4/images/ui-bg_glass_95_fef1ec_1x400.png -------------------------------------------------------------------------------- /client/js/jquery-ui/1.11.4/images/ui-bg_highlight-soft_75_cccccc_1x100.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/paul90/wiki-client-dat-variant/0a9b918b27c8da86b4d45b372cbd12caaeecc8e4/client/js/jquery-ui/1.11.4/images/ui-bg_highlight-soft_75_cccccc_1x100.png -------------------------------------------------------------------------------- /client/js/jquery-ui/1.11.4/images/ui-icons_222222_256x240.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/paul90/wiki-client-dat-variant/0a9b918b27c8da86b4d45b372cbd12caaeecc8e4/client/js/jquery-ui/1.11.4/images/ui-icons_222222_256x240.png -------------------------------------------------------------------------------- /client/js/jquery-ui/1.11.4/images/ui-icons_2e83ff_256x240.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/paul90/wiki-client-dat-variant/0a9b918b27c8da86b4d45b372cbd12caaeecc8e4/client/js/jquery-ui/1.11.4/images/ui-icons_2e83ff_256x240.png -------------------------------------------------------------------------------- /client/js/jquery-ui/1.11.4/images/ui-icons_454545_256x240.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/paul90/wiki-client-dat-variant/0a9b918b27c8da86b4d45b372cbd12caaeecc8e4/client/js/jquery-ui/1.11.4/images/ui-icons_454545_256x240.png -------------------------------------------------------------------------------- /client/js/jquery-ui/1.11.4/images/ui-icons_888888_256x240.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/paul90/wiki-client-dat-variant/0a9b918b27c8da86b4d45b372cbd12caaeecc8e4/client/js/jquery-ui/1.11.4/images/ui-icons_888888_256x240.png -------------------------------------------------------------------------------- /client/js/jquery-ui/1.11.4/images/ui-icons_cd0a0a_256x240.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/paul90/wiki-client-dat-variant/0a9b918b27c8da86b4d45b372cbd12caaeecc8e4/client/js/jquery-ui/1.11.4/images/ui-icons_cd0a0a_256x240.png -------------------------------------------------------------------------------- /client/js/jquery.ui.touch-punch.min.js: -------------------------------------------------------------------------------- 1 | /*! 2 | * jQuery UI Touch Punch 0.2.3 3 | * 4 | * Copyright 2011–2014, Dave Furfero 5 | * Dual licensed under the MIT or GPL Version 2 licenses. 6 | * 7 | * Depends: 8 | * jquery.ui.widget.js 9 | * jquery.ui.mouse.js 10 | */ 11 | !function(a){function f(a,b){if(!(a.originalEvent.touches.length>1)){a.preventDefault();var c=a.originalEvent.changedTouches[0],d=document.createEvent("MouseEvents");d.initMouseEvent(b,!0,!0,window,1,c.screenX,c.screenY,c.clientX,c.clientY,!1,!1,!1,!1,0,null),a.target.dispatchEvent(d)}}if(a.support.touch="ontouchend"in document,a.support.touch){var e,b=a.ui.mouse.prototype,c=b._mouseInit,d=b._mouseDestroy;b._touchStart=function(a){var b=this;!e&&b._mouseCapture(a.originalEvent.changedTouches[0])&&(e=!0,b._touchMoved=!1,f(a,"mouseover"),f(a,"mousemove"),f(a,"mousedown"))},b._touchMove=function(a){e&&(this._touchMoved=!0,f(a,"mousemove"))},b._touchEnd=function(a){e&&(f(a,"mouseup"),f(a,"mouseout"),this._touchMoved||f(a,"click"),e=!1)},b._mouseInit=function(){var b=this;b.element.bind({touchstart:a.proxy(b,"_touchStart"),touchmove:a.proxy(b,"_touchMove"),touchend:a.proxy(b,"_touchEnd")}),c.call(b)},b._mouseDestroy=function(){var b=this;b.element.unbind({touchstart:a.proxy(b,"_touchStart"),touchmove:a.proxy(b,"_touchMove"),touchend:a.proxy(b,"_touchEnd")}),d.call(b)}}}(jQuery); 12 | -------------------------------------------------------------------------------- /client/oops.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 |
    7 | 8 |

    9 | {{msg}} 10 |

    11 |
    12 | 13 | 14 | -------------------------------------------------------------------------------- /client/pages/smallest-federated-wiki.json: -------------------------------------------------------------------------------- 1 | { 2 | "title": "Smallest Federated Wiki", 3 | "story": [ 4 | { 5 | "type": "paragraph", 6 | "id": "4b6f52ddeb6ebb39", 7 | "text": "The Smallest Federated Wiki was the founding project that has grown into many sites running substantially evolved client and server software. See [[Federated Wiki]]" 8 | } 9 | ], 10 | "journal": [ 11 | { 12 | "type": "create", 13 | "item": { 14 | "title": "Smallest Federated Wiki", 15 | "story": [] 16 | }, 17 | "date": 1418658255368 18 | }, 19 | { 20 | "item": { 21 | "type": "factory", 22 | "id": "4b6f52ddeb6ebb39" 23 | }, 24 | "id": "4b6f52ddeb6ebb39", 25 | "type": "add", 26 | "date": 1418658275922 27 | }, 28 | { 29 | "type": "edit", 30 | "id": "4b6f52ddeb6ebb39", 31 | "item": { 32 | "type": "paragraph", 33 | "id": "4b6f52ddeb6ebb39", 34 | "text": "The Smallest Federated Wiki was the founding project that has grown into many sites running substantially evolved client and server software. See [[Federated Wiki]]" 35 | }, 36 | "date": 1418658294473 37 | } 38 | ] 39 | } -------------------------------------------------------------------------------- /client/pages/system/sitemap.json: -------------------------------------------------------------------------------- 1 | [ ] 2 | -------------------------------------------------------------------------------- /client/pages/welcome-visitors.json: -------------------------------------------------------------------------------- 1 | { 2 | "title": "Welcome Visitors", 3 | "story": [ 4 | { 5 | "text": "Welcome to this [[Federated Wiki]] site. From this page you can find who we are and what we do. New sites provide this information and then claim the site as their own. You will need your own site to participate.", 6 | "id": "7b56f22a4b9ee974", 7 | "type": "paragraph" 8 | }, 9 | { 10 | "type": "paragraph", 11 | "id": "821827c99b90cfd1", 12 | "text": "Pages about us." 13 | }, 14 | { 15 | "type": "factory", 16 | "id": "63ad2e58eecdd9e5", 17 | "prompt": "Link to a page about yourself here. Type your name enclosed in double square brackets. Then press Command/ALT-S to save.\n\nMake all pages here yours alone with the login below." 18 | }, 19 | { 20 | "type": "paragraph", 21 | "id": "2bbd646ff3f44b51", 22 | "text": "Pages where we do and share." 23 | }, 24 | { 25 | "type": "factory", 26 | "id": "05e2fa92643677ca", 27 | "prompt": "Create pages about things you do on this wiki. Type a descriptive name of something you will be writing about. Enclose it in square brackets. Then press Command/ALT-S to save." 28 | }, 29 | { 30 | "type": "paragraph", 31 | "id": "ee416d431ebf4fb4", 32 | "text": "You can edit your copy of these pages. Press [+] to add more writing spaces. Read [[How to Wiki]] for more ideas. Follow [[Recent Changes]] here and nearby." 33 | } 34 | ], 35 | "journal": [ 36 | { 37 | "type": "create", 38 | "item": { 39 | "title": "Welcome Visitors", 40 | "story": [] 41 | }, 42 | "date": 1420938191608 43 | }, 44 | { 45 | "type": "add", 46 | "item": { 47 | "text": "Welcome to this [[Federated Wiki]] site. From this page you can find who we are and what we do. New sites provide this information and then claim the site as their own. You will need your own site to participate.", 48 | "id": "7b56f22a4b9ee974", 49 | "type": "paragraph" 50 | }, 51 | "id": "7b56f22a4b9ee974", 52 | "date": 1420938199166 53 | }, 54 | { 55 | "type": "add", 56 | "item": { 57 | "type": "paragraph", 58 | "id": "821827c99b90cfd1", 59 | "text": "Pages about us." 60 | }, 61 | "after": "7b56f22a4b9ee974", 62 | "id": "821827c99b90cfd1", 63 | "date": 1420938202921 64 | }, 65 | { 66 | "type": "add", 67 | "item": { 68 | "type": "factory", 69 | "id": "63ad2e58eecdd9e5", 70 | "prompt": "Link to a page about yourself here. Type your name enclosed in double square brackets. Then press Command/ALT-S to save.\n\nMake all pages here yours alone with the login below." 71 | }, 72 | "after": "821827c99b90cfd1", 73 | "id": "63ad2e58eecdd9e5", 74 | "date": 1420938208737 75 | }, 76 | { 77 | "type": "add", 78 | "item": { 79 | "type": "paragraph", 80 | "id": "2bbd646ff3f44b51", 81 | "text": "Pages where we do and share." 82 | }, 83 | "after": "63ad2e58eecdd9e5", 84 | "id": "2bbd646ff3f44b51", 85 | "date": 1420938212624 86 | }, 87 | { 88 | "type": "add", 89 | "item": { 90 | "type": "factory", 91 | "id": "05e2fa92643677ca", 92 | "prompt": "Create pages about things you do on this wiki. Type a descriptive name of something you will be writing about. Enclose it in square brackets. Then press Command/ALT-S to save." 93 | }, 94 | "after": "2bbd646ff3f44b51", 95 | "id": "05e2fa92643677ca", 96 | "date": 1420938216782 97 | }, 98 | { 99 | "type": "add", 100 | "item": { 101 | "type": "paragraph", 102 | "id": "ee416d431ebf4fb4", 103 | "text": "You can edit your copy of these pages. Press [+] to add more writing spaces. Read [[How to Wiki]] for more ideas. Follow [[Recent Changes]] here and nearby." 104 | }, 105 | "after": "05e2fa92643677ca", 106 | "id": "ee416d431ebf4fb4", 107 | "date": 1420938220851 108 | } 109 | ] 110 | } -------------------------------------------------------------------------------- /client/plugins.json: -------------------------------------------------------------------------------- 1 | { 2 | "activity": "/plugins/activity", 3 | "assets": "/plugins/assets", 4 | "audio": "/plugins/audio", 5 | "bars": "/plugins/bars", 6 | "bytebeat": "/plugins/bytebeat", 7 | "calculator": "/plugins/calculator", 8 | "calendar": "/plugins/calendar", 9 | "changes": "/plugins/changes", 10 | "chart": "/plugins/chart", 11 | "code": "/plugins/code", 12 | "data": "/plugins/data", 13 | "factory": "/plugins/factory", 14 | "favicon": "/plugins/favicon", 15 | "flagmatic": "/plugins/flagmatic", 16 | "force": "/plugins/force", 17 | "future": "/plugins/future", 18 | "graphviz": "/plugins/graphviz", 19 | "grep": "/plugins/grep", 20 | "html": "/plugins/html", 21 | "image": "/plugins/image", 22 | "line": "/plugins/line", 23 | "map": "/plugins/map", 24 | "markdown": "/plugins/markdown", 25 | "mathjax": "/plugins/mathjax", 26 | "metabolism": "/plugins/metabolism", 27 | "method": "/plugins/method", 28 | "pagefold": "/plugins/pagefold", 29 | "paragraph": "/plugins/paragraph", 30 | "pushpin": "/plugins/pushpin", 31 | "radar": "/plugins/radar", 32 | "recycler": "/plugins/recycler", 33 | "reduce": "/plugins/reduce", 34 | "reference": "/plugins/reference", 35 | "rollup": "/plugins/rollup", 36 | "roster": "/plugins/roster", 37 | "scatter": "/plugins/scatter", 38 | "search": "/plugins/search", 39 | "transport": "/plugins/transport", 40 | "video": "/plugins/video", 41 | "wikigenesis": "/plugins/wikigenesis", 42 | "zones": "/plugins/zones" 43 | } 44 | -------------------------------------------------------------------------------- /client/runtests.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | SFW Mocha Tests 6 | 7 | 8 | 9 | 10 | 11 | 12 | 15 | 16 | 17 | 18 |
    19 | 20 | 21 | -------------------------------------------------------------------------------- /client/style/print.css: -------------------------------------------------------------------------------- 1 | body { 2 | font-family: ‘Times New Roman’, Georgia, serif; 3 | font-size: 12pt; 4 | line-height: 1.2; 5 | background: white; 6 | color: black; 7 | } 8 | 9 | .main { 10 | width: auto; 11 | border: 0; 12 | margin: 10% 20%; 13 | padding: 0; 14 | float: none !important; 15 | } 16 | 17 | .page { 18 | page-break-after: always; 19 | } 20 | 21 | .story { 22 | text-align: left; 23 | } 24 | 25 | p { 26 | orphans:3; 27 | widows:2; 28 | } 29 | 30 | 31 | img.remote { 32 | width: 16px; 33 | height: 16px; 34 | } 35 | 36 | A:link, A:visited { 37 | background: transparent; 38 | color: #111; 39 | text-decoration: none; 40 | font-weight: 600; 41 | } 42 | 43 | .story a:link:after, 44 | .story a:visited:after { 45 | content: " (" attr(href) ") "; 46 | font-size: 90%; 47 | font-weight: 500; 48 | } 49 | 50 | .journal { 51 | display: none; 52 | } 53 | 54 | .footer { 55 | font-size: 10pt; 56 | padding-top: 24pt; 57 | float: bottom; 58 | string-set: Footer self; 59 | } 60 | 61 | .footer a { 62 | text-decoration: none; 63 | } 64 | 65 | 66 | footer { 67 | display: none; 68 | } 69 | -------------------------------------------------------------------------------- /client/test/mocha.css: -------------------------------------------------------------------------------- 1 | @charset "utf-8"; 2 | 3 | body { 4 | margin:0; 5 | } 6 | 7 | #mocha { 8 | font: 20px/1.5 "Helvetica Neue", Helvetica, Arial, sans-serif; 9 | margin: 60px 50px; 10 | } 11 | 12 | #mocha ul, 13 | #mocha li { 14 | margin: 0; 15 | padding: 0; 16 | } 17 | 18 | #mocha ul { 19 | list-style: none; 20 | } 21 | 22 | #mocha h1, 23 | #mocha h2 { 24 | margin: 0; 25 | } 26 | 27 | #mocha h1 { 28 | margin-top: 15px; 29 | font-size: 1em; 30 | font-weight: 200; 31 | } 32 | 33 | #mocha h1 a { 34 | text-decoration: none; 35 | color: inherit; 36 | } 37 | 38 | #mocha h1 a:hover { 39 | text-decoration: underline; 40 | } 41 | 42 | #mocha .suite .suite h1 { 43 | margin-top: 0; 44 | font-size: .8em; 45 | } 46 | 47 | #mocha .hidden { 48 | display: none; 49 | } 50 | 51 | #mocha h2 { 52 | font-size: 12px; 53 | font-weight: normal; 54 | cursor: pointer; 55 | } 56 | 57 | #mocha .suite { 58 | margin-left: 15px; 59 | } 60 | 61 | #mocha .test { 62 | margin-left: 15px; 63 | overflow: hidden; 64 | } 65 | 66 | #mocha .test.pending:hover h2::after { 67 | content: '(pending)'; 68 | font-family: arial, sans-serif; 69 | } 70 | 71 | #mocha .test.pass.medium .duration { 72 | background: #c09853; 73 | } 74 | 75 | #mocha .test.pass.slow .duration { 76 | background: #b94a48; 77 | } 78 | 79 | #mocha .test.pass::before { 80 | content: '✓'; 81 | font-size: 12px; 82 | display: block; 83 | float: left; 84 | margin-right: 5px; 85 | color: #00d6b2; 86 | } 87 | 88 | #mocha .test.pass .duration { 89 | font-size: 9px; 90 | margin-left: 5px; 91 | padding: 2px 5px; 92 | color: #fff; 93 | -webkit-box-shadow: inset 0 1px 1px rgba(0,0,0,.2); 94 | -moz-box-shadow: inset 0 1px 1px rgba(0,0,0,.2); 95 | box-shadow: inset 0 1px 1px rgba(0,0,0,.2); 96 | -webkit-border-radius: 5px; 97 | -moz-border-radius: 5px; 98 | -ms-border-radius: 5px; 99 | -o-border-radius: 5px; 100 | border-radius: 5px; 101 | } 102 | 103 | #mocha .test.pass.fast .duration { 104 | display: none; 105 | } 106 | 107 | #mocha .test.pending { 108 | color: #0b97c4; 109 | } 110 | 111 | #mocha .test.pending::before { 112 | content: '◦'; 113 | color: #0b97c4; 114 | } 115 | 116 | #mocha .test.fail { 117 | color: #c00; 118 | } 119 | 120 | #mocha .test.fail pre { 121 | color: black; 122 | } 123 | 124 | #mocha .test.fail::before { 125 | content: '✖'; 126 | font-size: 12px; 127 | display: block; 128 | float: left; 129 | margin-right: 5px; 130 | color: #c00; 131 | } 132 | 133 | #mocha .test pre.error { 134 | color: #c00; 135 | max-height: 300px; 136 | overflow: auto; 137 | } 138 | 139 | /** 140 | * (1): approximate for browsers not supporting calc 141 | * (2): 42 = 2*15 + 2*10 + 2*1 (padding + margin + border) 142 | * ^^ seriously 143 | */ 144 | #mocha .test pre { 145 | display: block; 146 | float: left; 147 | clear: left; 148 | font: 12px/1.5 monaco, monospace; 149 | margin: 5px; 150 | padding: 15px; 151 | border: 1px solid #eee; 152 | max-width: 85%; /*(1)*/ 153 | max-width: calc(100% - 42px); /*(2)*/ 154 | word-wrap: break-word; 155 | border-bottom-color: #ddd; 156 | -webkit-border-radius: 3px; 157 | -webkit-box-shadow: 0 1px 3px #eee; 158 | -moz-border-radius: 3px; 159 | -moz-box-shadow: 0 1px 3px #eee; 160 | border-radius: 3px; 161 | } 162 | 163 | #mocha .test h2 { 164 | position: relative; 165 | } 166 | 167 | #mocha .test a.replay { 168 | position: absolute; 169 | top: 3px; 170 | right: 0; 171 | text-decoration: none; 172 | vertical-align: middle; 173 | display: block; 174 | width: 15px; 175 | height: 15px; 176 | line-height: 15px; 177 | text-align: center; 178 | background: #eee; 179 | font-size: 15px; 180 | -moz-border-radius: 15px; 181 | border-radius: 15px; 182 | -webkit-transition: opacity 200ms; 183 | -moz-transition: opacity 200ms; 184 | transition: opacity 200ms; 185 | opacity: 0.3; 186 | color: #888; 187 | } 188 | 189 | #mocha .test:hover a.replay { 190 | opacity: 1; 191 | } 192 | 193 | #mocha-report.pass .test.fail { 194 | display: none; 195 | } 196 | 197 | #mocha-report.fail .test.pass { 198 | display: none; 199 | } 200 | 201 | #mocha-report.pending .test.pass, 202 | #mocha-report.pending .test.fail { 203 | display: none; 204 | } 205 | #mocha-report.pending .test.pass.pending { 206 | display: block; 207 | } 208 | 209 | #mocha-error { 210 | color: #c00; 211 | font-size: 1.5em; 212 | font-weight: 100; 213 | letter-spacing: 1px; 214 | } 215 | 216 | #mocha-stats { 217 | position: fixed; 218 | top: 15px; 219 | right: 10px; 220 | font-size: 12px; 221 | margin: 0; 222 | color: #888; 223 | z-index: 1; 224 | } 225 | 226 | #mocha-stats .progress { 227 | float: right; 228 | padding-top: 0; 229 | } 230 | 231 | #mocha-stats em { 232 | color: black; 233 | } 234 | 235 | #mocha-stats a { 236 | text-decoration: none; 237 | color: inherit; 238 | } 239 | 240 | #mocha-stats a:hover { 241 | border-bottom: 1px solid #eee; 242 | } 243 | 244 | #mocha-stats li { 245 | display: inline-block; 246 | margin: 0 5px; 247 | list-style: none; 248 | padding-top: 11px; 249 | } 250 | 251 | #mocha-stats canvas { 252 | width: 40px; 253 | height: 40px; 254 | } 255 | 256 | #mocha code .comment { color: #ddd; } 257 | #mocha code .init { color: #2f6fad; } 258 | #mocha code .string { color: #5890ad; } 259 | #mocha code .keyword { color: #8a6343; } 260 | #mocha code .number { color: #2f6fad; } 261 | 262 | @media screen and (max-device-width: 480px) { 263 | #mocha { 264 | margin: 60px 0px; 265 | } 266 | 267 | #mocha #stats { 268 | position: absolute; 269 | } 270 | } 271 | -------------------------------------------------------------------------------- /client/twitter-maintainance.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/paul90/wiki-client-dat-variant/0a9b918b27c8da86b4d45b372cbd12caaeecc8e4/client/twitter-maintainance.jpg -------------------------------------------------------------------------------- /client/ui.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Federated Wiki: Empty Wiki 6 | 7 | 8 | 9 | 10 | 11 | 12 | 13 | 14 | -------------------------------------------------------------------------------- /client/wiki.json: -------------------------------------------------------------------------------- 1 | { 2 | "author": "Wiki Client", 3 | "version": "20.05.19", 4 | "client": { 5 | "key": "ad8cb16333ca09b5bdd05d4e7ef0b911ccb7151f7ae3e6515d6456927f6ba0ca" 6 | } 7 | } 8 | -------------------------------------------------------------------------------- /client/wiki/field-guide-to-the-federation.json: -------------------------------------------------------------------------------- 1 | { 2 | "title": "Field Guide to the Federation", 3 | "story": [ 4 | { 5 | "type": "paragraph", 6 | "id": "1eb0999952e0c847", 7 | "text": "We show you what we find interesting in the federation and suggest how you can make yourself at home here." 8 | }, 9 | { 10 | "type": "paragraph", 11 | "id": "96fb4c9bb0463f56", 12 | "text": "Tip: Use the browser's back button to go back. Use the arrow keys to scroll left and right. Don't use touch until you understand how wiki works (it could work better)." 13 | }, 14 | { 15 | "type": "paragraph", 16 | "id": "6b95cf9425142c87", 17 | "text": "[[You're New Here]] so make yourself comfortable." 18 | }, 19 | { 20 | "type": "paragraph", 21 | "id": "9a5d18bcf23308b0", 22 | "text": "[[Featured Sites]] that show how we like to write." 23 | }, 24 | { 25 | "type": "paragraph", 26 | "id": "8b6536865792e0ed", 27 | "text": "[[Watch Everything]] that is new in the federation." 28 | }, 29 | { 30 | "type": "paragraph", 31 | "id": "9102503c5b1c9a53", 32 | "text": "[[Welcome Your Community]] to work this way." 33 | }, 34 | { 35 | "type": "paragraph", 36 | "id": "4298e056e51456a4", 37 | "text": "[[Keep Safe]] from yourself, others, and the march of time." 38 | } 39 | ], 40 | "journal": [ 41 | { 42 | "type": "create", 43 | "item": { 44 | "title": "Field Guide to the Federation", 45 | "story": [] 46 | }, 47 | "date": 1465750303628 48 | }, 49 | { 50 | "item": { 51 | "type": "factory", 52 | "id": "1eb0999952e0c847" 53 | }, 54 | "id": "1eb0999952e0c847", 55 | "type": "add", 56 | "date": 1465750327845 57 | }, 58 | { 59 | "type": "edit", 60 | "id": "1eb0999952e0c847", 61 | "item": { 62 | "type": "paragraph", 63 | "id": "1eb0999952e0c847", 64 | "text": "We show you what we find interesting in the federation and suggest how you can make yourself at home here." 65 | }, 66 | "date": 1465750393944 67 | }, 68 | { 69 | "type": "add", 70 | "id": "6b95cf9425142c87", 71 | "item": { 72 | "type": "paragraph", 73 | "id": "6b95cf9425142c87", 74 | "text": "[[You're New Here]] so make yourself comfortable." 75 | }, 76 | "after": "1eb0999952e0c847", 77 | "date": 1465750472348 78 | }, 79 | { 80 | "type": "add", 81 | "id": "96fb4c9bb0463f56", 82 | "item": { 83 | "type": "paragraph", 84 | "id": "96fb4c9bb0463f56", 85 | "text": "Use the browser's back button to go back." 86 | }, 87 | "after": "1eb0999952e0c847", 88 | "date": 1465750767712 89 | }, 90 | { 91 | "type": "add", 92 | "id": "6378d5eb44ea7f6e", 93 | "item": { 94 | "type": "paragraph", 95 | "id": "6378d5eb44ea7f6e", 96 | "text": "Use the arrow keys to scroll left and right." 97 | }, 98 | "after": "96fb4c9bb0463f56", 99 | "date": 1465750796365 100 | }, 101 | { 102 | "type": "edit", 103 | "id": "96fb4c9bb0463f56", 104 | "item": { 105 | "type": "paragraph", 106 | "id": "96fb4c9bb0463f56", 107 | "text": "Tip: Use the browser's back button to go back." 108 | }, 109 | "date": 1465750843856 110 | }, 111 | { 112 | "type": "remove", 113 | "id": "6378d5eb44ea7f6e", 114 | "date": 1465750848604 115 | }, 116 | { 117 | "type": "edit", 118 | "id": "96fb4c9bb0463f56", 119 | "item": { 120 | "type": "paragraph", 121 | "id": "96fb4c9bb0463f56", 122 | "text": "Tip: Use the browser's back button to go back. Use the arrow keys to scroll left and right." 123 | }, 124 | "date": 1465750851121 125 | }, 126 | { 127 | "type": "edit", 128 | "id": "96fb4c9bb0463f56", 129 | "item": { 130 | "type": "paragraph", 131 | "id": "96fb4c9bb0463f56", 132 | "text": "Tip: Use the browser's back button to go back. Use the arrow keys to scroll left and right. Don't use touch until you understand how wiki works." 133 | }, 134 | "date": 1465750877971 135 | }, 136 | { 137 | "type": "edit", 138 | "id": "96fb4c9bb0463f56", 139 | "item": { 140 | "type": "paragraph", 141 | "id": "96fb4c9bb0463f56", 142 | "text": "Tip: Use the browser's back button to go back. Use the arrow keys to scroll left and right. Don't use touch until you understand how wiki works (it could work better)." 143 | }, 144 | "date": 1465750892428 145 | }, 146 | { 147 | "type": "add", 148 | "id": "9a5d18bcf23308b0", 149 | "item": { 150 | "type": "paragraph", 151 | "id": "9a5d18bcf23308b0", 152 | "text": "[[Featured Sites]] that show how we like to write." 153 | }, 154 | "after": "6b95cf9425142c87", 155 | "date": 1465752423709 156 | }, 157 | { 158 | "type": "add", 159 | "id": "8b6536865792e0ed", 160 | "item": { 161 | "type": "paragraph", 162 | "id": "8b6536865792e0ed", 163 | "text": "[[Watch Everything]] that is new in the federation." 164 | }, 165 | "after": "9a5d18bcf23308b0", 166 | "date": 1465757095163 167 | }, 168 | { 169 | "type": "add", 170 | "id": "9102503c5b1c9a53", 171 | "item": { 172 | "type": "paragraph", 173 | "id": "9102503c5b1c9a53", 174 | "text": "[[Welcome Your Community]] to work this way." 175 | }, 176 | "after": "8b6536865792e0ed", 177 | "date": 1465758088668 178 | }, 179 | { 180 | "type": "add", 181 | "id": "4298e056e51456a4", 182 | "item": { 183 | "type": "paragraph", 184 | "id": "4298e056e51456a4", 185 | "text": "[[Keep Safe]] from yourself, others and the march of time." 186 | }, 187 | "after": "9102503c5b1c9a53", 188 | "date": 1488335368485 189 | }, 190 | { 191 | "type": "edit", 192 | "id": "4298e056e51456a4", 193 | "item": { 194 | "type": "paragraph", 195 | "id": "4298e056e51456a4", 196 | "text": "[[Keep Safe]] from yourself, others, and the march of time." 197 | }, 198 | "date": 1498664660237 199 | }, 200 | { 201 | "type": "fork", 202 | "site": "hello.ward.bay.wiki.org", 203 | "date": 1537893052941 204 | } 205 | ] 206 | } 207 | -------------------------------------------------------------------------------- /client/wiki/system/sitemap.json: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "slug": "field-guide-to-the-federation", 4 | "title": "Field Guide to the Federation", 5 | "date": 1498664660237, 6 | "synopsis": "We show you what we find interesting in the federation and suggest how you can make yourself at home here." 7 | }, 8 | { 9 | "slug": "frequently-asked-questions", 10 | "title": "Frequently Asked Questions", 11 | "date": 1536772100631, 12 | "synopsis": "As our end user documentation emerges we can expect there to be many questions. We will collect answers here, organize them and then use this to guide improvements." 13 | }, 14 | { 15 | "slug": "about-federated-wiki", 16 | "title": "About Federated Wiki", 17 | "date": 1469224975210, 18 | "synopsis": "We remain excited about this platform and have become increasingly confident that it embodies important new ideas. We will explain." 19 | }, 20 | { 21 | "slug": "welcome-visitors", 22 | "title": "Welcome Visitors", 23 | "date": 1537892074778, 24 | "synopsis": "Welcome to this [[Federated Wiki]] site. The pages on this particular site describe how to get things done with the dat variant of Federated Wiki." 25 | }, 26 | { 27 | "slug": "youre-new-here", 28 | "title": "You're New Here", 29 | "date": 1537865735964, 30 | "synopsis": "There are four steps to making a home here once you have your own site. In other systems this would be like filling out your profile page. Do these steps in any order." 31 | }, 32 | { 33 | "slug": "wiki-to-wiki", 34 | "title": "Wiki to Wiki", 35 | "date": 1536518340101, 36 | "synopsis": "Wiki empowers authors with control over their own publication. Our client-server software imposes server administration upon authors. We experiment to remove that burden." 37 | }, 38 | { 39 | "slug": "federated-wiki", 40 | "title": "Federated Wiki", 41 | "date": 1536518373540, 42 | "synopsis": "Federated Wiki sites share pages circulating within a creative commons. A single-page browser application can read from many sites at once and save changes in that browser. Authors who host their own sites can login there to have their edits shared back to the federation as they edit." 43 | } 44 | ] 45 | -------------------------------------------------------------------------------- /client/wiki/welcome-visitors.json: -------------------------------------------------------------------------------- 1 | { 2 | "title": "Welcome Visitors", 3 | "story": [ 4 | { 5 | "text": "Welcome to this [[Federated Wiki]] site. The pages on this particular site describe how to get things done with the dat variant of Federated Wiki.", 6 | "id": "7b56f22a4b9ee974", 7 | "type": "paragraph" 8 | }, 9 | { 10 | "type": "paragraph", 11 | "id": "e78e134a621e2034", 12 | "text": "Curious? Read [[About Federated Wiki]]." 13 | }, 14 | { 15 | "type": "paragraph", 16 | "id": "a2bdc2ba750b13a6", 17 | "text": "Want to create a new wiki? See [[Creating a New Wiki]]." 18 | }, 19 | { 20 | "type": "paragraph", 21 | "id": "aecee88e47edbc29", 22 | "text": "Or share you dat wiki with others? See [[Sharing Your Wiki]]." 23 | }, 24 | { 25 | "type": "paragraph", 26 | "id": "89e45ed6d95700d4", 27 | "text": "Just starting to write? See [[Field Guide to the Federation]]." 28 | }, 29 | { 30 | "type": "paragraph", 31 | "id": "603d6f8bd4228fe1", 32 | "text": "Looking for a reference manual? See [[How to Wiki]]." 33 | }, 34 | { 35 | "type": "paragraph", 36 | "id": "1374fd83da1b360e", 37 | "text": "Still confused? See [[Frequently Asked Questions]]." 38 | }, 39 | { 40 | "type": "paragraph", 41 | "id": "71166a24a45fe78d", 42 | "text": "Find updates to this site in [[Recent Changes]]." 43 | } 44 | ], 45 | "journal": [ 46 | { 47 | "type": "create", 48 | "item": { 49 | "title": "Welcome Visitors", 50 | "story": [] 51 | }, 52 | "date": 1420938191608 53 | }, 54 | { 55 | "type": "add", 56 | "item": { 57 | "text": "Welcome to this [[Federated Wiki]] site. From this page you can find who we are and what we do. New sites provide this information and then claim the site as their own. You will need your own site to participate.", 58 | "id": "7b56f22a4b9ee974", 59 | "type": "paragraph" 60 | }, 61 | "id": "7b56f22a4b9ee974", 62 | "date": 1420938199166 63 | }, 64 | { 65 | "type": "add", 66 | "item": { 67 | "type": "paragraph", 68 | "id": "821827c99b90cfd1", 69 | "text": "Pages about us." 70 | }, 71 | "after": "7b56f22a4b9ee974", 72 | "id": "821827c99b90cfd1", 73 | "date": 1420938202921 74 | }, 75 | { 76 | "type": "add", 77 | "item": { 78 | "type": "factory", 79 | "id": "63ad2e58eecdd9e5", 80 | "prompt": "Link to a page about yourself here. Type your name enclosed in double square brackets. Then press Command/ALT-S to save.\n\nMake all pages here yours alone with the login below." 81 | }, 82 | "after": "821827c99b90cfd1", 83 | "id": "63ad2e58eecdd9e5", 84 | "date": 1420938208737 85 | }, 86 | { 87 | "type": "add", 88 | "item": { 89 | "type": "paragraph", 90 | "id": "2bbd646ff3f44b51", 91 | "text": "Pages where we do and share." 92 | }, 93 | "after": "63ad2e58eecdd9e5", 94 | "id": "2bbd646ff3f44b51", 95 | "date": 1420938212624 96 | }, 97 | { 98 | "type": "add", 99 | "item": { 100 | "type": "factory", 101 | "id": "05e2fa92643677ca", 102 | "prompt": "Create pages about things you do on this wiki. Type a descriptive name of something you will be writing about. Enclose it in square brackets. Then press Command/ALT-S to save." 103 | }, 104 | "after": "2bbd646ff3f44b51", 105 | "id": "05e2fa92643677ca", 106 | "date": 1420938216782 107 | }, 108 | { 109 | "type": "add", 110 | "item": { 111 | "type": "paragraph", 112 | "id": "ee416d431ebf4fb4", 113 | "text": "You can edit your copy of these pages. Press [+] to add more writing spaces. Read [[How to Wiki]] for more ideas. Follow [[Recent Changes]] here and nearby." 114 | }, 115 | "after": "05e2fa92643677ca", 116 | "id": "ee416d431ebf4fb4", 117 | "date": 1420938220851 118 | }, 119 | { 120 | "type": "edit", 121 | "id": "7b56f22a4b9ee974", 122 | "item": { 123 | "text": "Welcome to this [[Federated Wiki]] site. The pages on this particular site describe how to get things done with the dat variant of Federated Wiki.", 124 | "id": "7b56f22a4b9ee974", 125 | "type": "paragraph" 126 | }, 127 | "date": 1535963081951 128 | }, 129 | { 130 | "type": "add", 131 | "item": { 132 | "type": "paragraph", 133 | "id": "e78e134a621e2034", 134 | "text": "Curious? Read [[About Federated Wiki]]." 135 | }, 136 | "after": "7b56f22a4b9ee974", 137 | "id": "e78e134a621e2034", 138 | "date": 1535963094935 139 | }, 140 | { 141 | "type": "remove", 142 | "id": "821827c99b90cfd1", 143 | "date": 1535963114829 144 | }, 145 | { 146 | "type": "remove", 147 | "id": "63ad2e58eecdd9e5", 148 | "date": 1535963118947 149 | }, 150 | { 151 | "type": "remove", 152 | "id": "2bbd646ff3f44b51", 153 | "date": 1535963127292 154 | }, 155 | { 156 | "type": "remove", 157 | "id": "05e2fa92643677ca", 158 | "date": 1535963129747 159 | }, 160 | { 161 | "type": "add", 162 | "item": { 163 | "type": "paragraph", 164 | "id": "89e45ed6d95700d4", 165 | "text": "Just starting to write? See [[Field Guide to the Federation]]." 166 | }, 167 | "after": "e78e134a621e2034", 168 | "id": "89e45ed6d95700d4", 169 | "date": 1535963233153 170 | }, 171 | { 172 | "type": "add", 173 | "item": { 174 | "type": "paragraph", 175 | "id": "603d6f8bd4228fe1", 176 | "text": "Looking for a reference manual? See [[How to Wiki]]." 177 | }, 178 | "after": "89e45ed6d95700d4", 179 | "id": "603d6f8bd4228fe1", 180 | "date": 1535963237424 181 | }, 182 | { 183 | "type": "add", 184 | "item": { 185 | "type": "paragraph", 186 | "id": "1374fd83da1b360e", 187 | "text": "Still confused? See [[Frequently Asked Questions]]." 188 | }, 189 | "after": "603d6f8bd4228fe1", 190 | "id": "1374fd83da1b360e", 191 | "date": 1535963242136 192 | }, 193 | { 194 | "type": "add", 195 | "item": { 196 | "type": "paragraph", 197 | "id": "71166a24a45fe78d", 198 | "text": "Find updates to this site in [[Recent Changes]]." 199 | }, 200 | "after": "1374fd83da1b360e", 201 | "id": "71166a24a45fe78d", 202 | "date": 1535963253064 203 | }, 204 | { 205 | "type": "remove", 206 | "id": "ee416d431ebf4fb4", 207 | "date": 1535963259563 208 | }, 209 | { 210 | "type": "add", 211 | "id": "a2bdc2ba750b13a6", 212 | "item": { 213 | "type": "paragraph", 214 | "id": "a2bdc2ba750b13a6", 215 | "text": "Want to create a new wiki? See [[Creating a New Wiki]]." 216 | }, 217 | "after": "e78e134a621e2034", 218 | "date": 1537891986260 219 | }, 220 | { 221 | "type": "add", 222 | "id": "aecee88e47edbc29", 223 | "item": { 224 | "type": "paragraph", 225 | "id": "aecee88e47edbc29", 226 | "text": "Or share you dat wiki with others? See [[Sharing Your Wiki]]." 227 | }, 228 | "after": "a2bdc2ba750b13a6", 229 | "date": 1537892074778 230 | } 231 | ] 232 | } 233 | -------------------------------------------------------------------------------- /client/wiki/wiki-to-wiki.json: -------------------------------------------------------------------------------- 1 | { 2 | "title": "Wiki to Wiki", 3 | "story": [ 4 | { 5 | "type": "markdown", 6 | "id": "674b3d2eba5a4bdc", 7 | "text": "Wiki empowers authors with control over their own publication. Our client-server software imposes server administration upon authors. We experiment to remove that burden." 8 | } 9 | ], 10 | "journal": [ 11 | { 12 | "type": "create", 13 | "item": { 14 | "title": "Wiki to Wiki", 15 | "story": [] 16 | }, 17 | "date": 1536517297624 18 | }, 19 | { 20 | "item": { 21 | "type": "markdown", 22 | "id": "674b3d2eba5a4bdc", 23 | "text": "Wiki empowers authors with control over their own publication. Our client-server software imposes server administration upon authors. We experiment to remove that burden." 24 | }, 25 | "id": "674b3d2eba5a4bdc", 26 | "type": "add", 27 | "date": 1536517299777 28 | }, 29 | { 30 | "type": "edit", 31 | "id": "674b3d2eba5a4bdc", 32 | "item": { 33 | "type": "markdown", 34 | "id": "674b3d2eba5a4bdc", 35 | "text": "Wiki empowers authors with control over their own publication. Our client-server software imposes server administration upon authors. We experiment to remove that burden." 36 | }, 37 | "date": 1536518340101 38 | } 39 | ] 40 | } 41 | -------------------------------------------------------------------------------- /favicon.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/paul90/wiki-client-dat-variant/0a9b918b27c8da86b4d45b372cbd12caaeecc8e4/favicon.png -------------------------------------------------------------------------------- /lib/actionSymbols.coffee: -------------------------------------------------------------------------------- 1 | # We use unicode characters as icons for actions 2 | # in the journal. Fork and add are also button 3 | # labels used for user actions leading to forks 4 | # and adds. How poetic. 5 | 6 | # Page keeps its own list of symbols used as journal 7 | # action separators. 8 | 9 | symbols = 10 | create: '☼' 11 | add: '+' 12 | edit: '✎' 13 | fork: '⚑' 14 | move: '↕' 15 | remove: '✕' 16 | 17 | fork = symbols['fork'] 18 | add = symbols['add'] 19 | 20 | module.exports = {symbols, fork, add} -------------------------------------------------------------------------------- /lib/active.coffee: -------------------------------------------------------------------------------- 1 | # Wiki considers one page to be active. Use active.set to change which 2 | # page this is. A page need not be active to be edited. 3 | 4 | module.exports = active = {} 5 | 6 | active.scrollContainer = undefined 7 | 8 | findScrollContainer = -> 9 | scrolled = $("body, html").filter -> $(this).scrollLeft() > 0 10 | if scrolled.length > 0 11 | scrolled 12 | else 13 | $("body, html").scrollLeft(12).filter(-> $(this).scrollLeft() > 0).scrollTop(0) 14 | 15 | scrollTo = ($page) -> 16 | return unless $page.position()? 17 | active.scrollContainer ?= findScrollContainer() 18 | bodyWidth = $("body").width() 19 | minX = active.scrollContainer.scrollLeft() 20 | maxX = minX + bodyWidth 21 | target = $page.position().left 22 | width = $page.outerWidth(true) 23 | contentWidth = $(".page").outerWidth(true) * $(".page").length 24 | 25 | # determine target position to scroll to... 26 | if target < minX 27 | scrollTarget = target 28 | else if target + width > maxX 29 | scrollTarget = target - (bodyWidth - width) 30 | else if maxX > $(".pages").outerWidth() 31 | scrollTarget = Math.min(target, contentWidth - bodyWidth) 32 | # scroll to target and set focus once animation is complete 33 | active.scrollContainer.animate({ 34 | scrollLeft: scrollTarget 35 | }, () -> 36 | # only set focus if focus is not already within the page to get focus 37 | $page.focus() unless $.contains $page[0], document.activeElement ) 38 | 39 | 40 | active.set = ($page, noScroll) -> 41 | $('.incremental-search').remove() 42 | $page = $($page) 43 | $(".active").removeClass("active") 44 | $page.addClass("active") 45 | scrollTo $page unless noScroll 46 | -------------------------------------------------------------------------------- /lib/addToJournal.coffee: -------------------------------------------------------------------------------- 1 | # A wiki page has a journal of actions that have been completed. 2 | # The addToJournal function is called when the origin server 3 | # response that the network operation is complete. 4 | 5 | util = require './util' 6 | actionSymbols = require './actionSymbols' 7 | 8 | module.exports = ($journal, action) -> 9 | $page = $journal.parents('.page:first') 10 | title = '' 11 | title += "#{action.site}\n" if action.site? 12 | title += action.type || 'separator' 13 | title += " #{util.formatElapsedTime(action.date)}" if action.date? 14 | $action = $(""" """).addClass("action").addClass(action.type || 'separator') 15 | .text(action.symbol || actionSymbols.symbols[action.type]) 16 | .attr('title',title) 17 | .attr('data-id', action.id || "0") 18 | .data('action', action) 19 | controls = $journal.children('.control-buttons') 20 | if controls.length > 0 21 | $action.insertBefore(controls) 22 | else 23 | $action.appendTo($journal) 24 | if action.type == 'fork' and action.site? 25 | $action 26 | .css("background-image", "url(#{wiki.site(action.site).flag()}") 27 | .attr("href", "#{wiki.site(action.site).getDirectURL($page.attr('id'))}.html") 28 | .attr("target", "#{action.site}") 29 | .data("site", action.site) 30 | .data("slug", $page.attr('id')) 31 | -------------------------------------------------------------------------------- /lib/bind.coffee: -------------------------------------------------------------------------------- 1 | # Bind connects the searchbox and the neighbors, both views, 2 | # to the neighborhood, the model that they use. This breaks 3 | # a dependency loop that will probably dissapear when views 4 | # are more event oriented. 5 | 6 | # Similarly state depends on injection rather than requiring 7 | # link and thereby breaks another dependency loop. 8 | 9 | neighborhood = require './neighborhood' 10 | neighbors = require './neighbors' 11 | searchbox = require './searchbox' 12 | 13 | state = require './state' 14 | link = require './link' 15 | 16 | $ -> 17 | 18 | searchbox.inject neighborhood 19 | searchbox.bind() 20 | 21 | neighbors.inject neighborhood 22 | neighbors.bind() 23 | 24 | if window.seedNeighbors 25 | seedNeighbors.split(',').forEach (site) -> 26 | neighborhood.registerNeighbor(site.trim()) 27 | 28 | state.inject link -------------------------------------------------------------------------------- /lib/datHandler.coffee: -------------------------------------------------------------------------------- 1 | # datHandler contains... 2 | 3 | $.holdReady true 4 | 5 | _ = require 'underscore' 6 | 7 | module.exports = datHandler = {} 8 | 9 | # we save details of each plugin 10 | pluginRoutes = {} 11 | pluginPages = {} 12 | factories = [] 13 | 14 | # a list of default wiki pages that the client holds 15 | defaultPages = [] 16 | 17 | datHandler.usingFrontend = usingFrontend = new URL(document.currentScript.src).href.includes('.ui') 18 | clientOrigin = '' 19 | wikiOrigin = '' 20 | 21 | 22 | datHandler.archive = wikiArchive = beaker.hyperdrive.drive(window.location.origin) 23 | 24 | datHandler.pluginPages = pluginPages 25 | datHandler.pluginRoutes = pluginRoutes 26 | datHandler.factories = factories 27 | datHandler.defaultPages = defaultPages 28 | datHandler.clientOrigin = clientOrigin 29 | 30 | datHandler.init = init = () -> 31 | 32 | # load configuration for plugins 33 | loadPluginData = () -> 34 | # fetch plugin defaults 35 | fetchDefaultPlugins = () -> 36 | url = clientOrigin + "/plugins.json" 37 | fetch(url) 38 | .then (response) -> 39 | return response.json() 40 | 41 | fetchLocalPlugins = () -> 42 | try 43 | data = await datHandler.archive.readFile('/plugins.json') 44 | parsedData = JSON.parse(data) 45 | catch error 46 | console.log "Fetch Local Plugins:", error 47 | parsedData = {} 48 | return parsedData 49 | 50 | defaultPlugins = await fetchDefaultPlugins() 51 | console.log '**** clientOrigin', clientOrigin 52 | _.each defaultPlugins, (pluginURL, plugin) -> 53 | # the default plugins are in the client's plugin directory, 54 | # the plugin URL from the clients `plugin.json` is relative, 55 | # so we prefix the pluginURL with the client origin. 56 | pluginRoutes[plugin] = clientOrigin + pluginURL 57 | # allow wiki site to load/override plugins 58 | if usingFrontend or !clientOrigin.startsWith wikiOrigin 59 | localPlugins = await fetchLocalPlugins() 60 | _.each localPlugins, (pluginURL, plugin) -> 61 | pluginRoutes[plugin] = pluginURL 62 | 63 | 64 | # build a list of plugin pages 65 | buildPluginPageList = () -> 66 | _.each pluginRoutes, (pluginURL, plugin) -> 67 | url = new URL(pluginURL) 68 | datOrigin = url.origin 69 | pluginPath = url.pathname 70 | if datOrigin is wikiOrigin 71 | pluginArchive = wikiArchive 72 | else 73 | pluginArchive = beaker.hyperdrive.drive(datOrigin) 74 | try 75 | pages = await pluginArchive.readdir(pluginPath + "/pages") 76 | catch error 77 | pages = [] 78 | _.each pages, (page) -> 79 | # we are only interested in page files 80 | pluginPages[page] = {url: pluginURL, plugin: plugin} if page.endsWith('.json') 81 | 82 | buildFactoriesList = () -> 83 | 84 | _.each pluginRoutes, (pluginURL, plugin) -> 85 | url = pluginURL + "/factory.json" 86 | fetch(url) 87 | .then (response) -> 88 | return response.json() 89 | .then (factoryJson) -> 90 | factories.push factoryJson 91 | .catch (err) -> 92 | console.log "No factory details for #{plugin}" 93 | 94 | buildDefaultPageList = () -> 95 | if usingFrontend 96 | try 97 | pages = await wikiArchive.readdir("/.ui/pages", {includeStats: true}) 98 | catch error 99 | pages = [] 100 | else 101 | clientArchive = beaker.hyperdrive.drive(clientOrigin) 102 | try 103 | pages = await clientArchive.readdir("/pages", {includeStats: true}) 104 | catch error 105 | pages = [] 106 | pages = pages.filter (page) -> page.stat.isFile() and page.name.endsWith('.json') 107 | _.each pages, (page) -> 108 | defaultPages.push page.name 109 | 110 | preLoadEditors = (catalog) -> 111 | catalog 112 | .filter((entry) -> entry.editor) 113 | .forEach((entry) -> 114 | console.log("#{entry.name} Plugin declares an editor, so pre-loading the plugin") 115 | wiki.getPlugin(entry.name.toLowerCase(), (plugin) -> 116 | if ! plugin.editor or typeof plugin.editor != 'function' 117 | console.log("""#{entry.name} Plugin ERROR. 118 | Cannot find `editor` function in plugin. Set `"editor": false` in factory.json or 119 | Correct the plugin to include all three of `{emit, bind, editor}` 120 | """) 121 | ) 122 | ) 123 | 124 | # are we using a mounted frontend? 125 | if usingFrontend 126 | clientOrigin = '/.ui' 127 | else 128 | clientOrigin = new URL(document.currentScript.src).origin 129 | wikiOrigin = window.location.origin 130 | # 131 | if clientOrigin.startsWith '/' 132 | clientOrigin = wikiOrigin + clientOrigin 133 | 134 | 135 | await loadPluginData() 136 | 137 | await buildPluginPageList() 138 | 139 | await buildFactoriesList() 140 | 141 | await buildDefaultPageList() 142 | 143 | preLoadEditors(factories) 144 | 145 | $.holdReady false 146 | 147 | init() 148 | -------------------------------------------------------------------------------- /lib/dialog.coffee: -------------------------------------------------------------------------------- 1 | # Dialog manages a single
    that is used to present a 2 | # jQuery UI dialog used for detail display, usually on 3 | # double click. 4 | 5 | resolve = require './resolve' 6 | 7 | $dialog = null 8 | 9 | emit = -> 10 | $dialog = $('
    ') 11 | .html('This dialog will show every time!') 12 | .dialog { autoOpen: false, title: 'Basic Dialog', height: 600, width: 800 } 13 | 14 | open = (title, html) -> 15 | $dialog.html html 16 | $dialog.dialog "option", "title", resolve.resolveLinks(title) 17 | $dialog.dialog 'open' 18 | 19 | module.exports = {emit, open} -------------------------------------------------------------------------------- /lib/drop.coffee: -------------------------------------------------------------------------------- 1 | # handle drops of wiki pages or thing that go on wiki pages 2 | # (we'll move decoding logic out of factory) 3 | 4 | nurl = require 'url' 5 | 6 | isFile = (event) -> 7 | if (dt = event.originalEvent.dataTransfer)? 8 | if 'Files' in dt.types 9 | return dt.files[0] 10 | null 11 | 12 | isUrl = (event) -> 13 | if (dt = event.originalEvent.dataTransfer)? 14 | if dt.types? and ('text/uri-list' in dt.types or 'text/x-moz-url' in dt.types) 15 | url = dt.getData 'URL' 16 | return url if url?.length 17 | null 18 | 19 | isPage = (url) -> 20 | if found = url.match /^(?:https?|hyper):\/\/([a-zA-Z0-9:.-]+)(\/#?([a-zA-Z0-9:.-]+)\/([a-z0-9-]+(_rev\d+)?))+$/ 21 | item = {} 22 | [ignore, origin, ignore, item.site, item.slug, ignore] = found 23 | item.site = origin if item.site in ['view','local','origin'] 24 | return item 25 | null 26 | 27 | isImage = (url) -> 28 | parsedURL = nurl.parse(url, true, true) 29 | if parsedURL.pathname.match(/\.(jpg|jpeg|png|svg)$/i) 30 | return url 31 | null 32 | 33 | isVideo = (url) -> 34 | parsedURL = nurl.parse(url, true, true) 35 | # check if video dragged from search (Google) 36 | try 37 | if parsedURL.query.source is 'video' 38 | parsedURL = nurl.parse(parsedURL.query.url, true, true) 39 | catch error 40 | 41 | 42 | switch parsedURL.hostname 43 | when "www.youtube.com" 44 | if parsedURL.query.list? 45 | return {text: "YOUTUBE PLAYLIST #{parsedURL.query.list}"} 46 | else 47 | return {text: "YOUTUBE #{parsedURL.query.v}"} 48 | when "youtu.be" # should redirect to www.youtube.com, but... 49 | if parsedURL.query.list? 50 | return {text: "YOUTUBE PLAYLIST #{parsedURL.query.list}"} 51 | else 52 | return {text: "YOUTUBE #{parsedURL.pathname.substr(1)}"} 53 | when "vimeo.com" 54 | return {text: "VIMEO #{parsedURL.pathname.substr(1)}"} 55 | when "archive.org" 56 | return {text: "ARCHIVE #{parsedURL.pathname.substr(parsedURL.pathname.lastIndexOf('/') + 1)}"} 57 | when "tedxtalks.ted.com" 58 | return {text: "TEDX #{parsedURL.pathname.substr(parsedURL.pathname.lastIndexOf('/') + 1)}"} 59 | when "www.ted.com" 60 | return {text: "TED #{parsedURL.pathname.substr(parsedURL.pathname.lastIndexOf('/') + 1)}"} 61 | else 62 | null 63 | 64 | 65 | dispatch = (handlers) -> 66 | (event) -> 67 | stop = (ignored) -> 68 | event.preventDefault() 69 | event.stopPropagation() 70 | if url = isUrl event 71 | if page = isPage url 72 | if (handle = handlers.page)? 73 | return stop handle page 74 | if video = isVideo url 75 | if (handle = handlers.video)? 76 | return stop handle video 77 | if image = isImage url 78 | if (handle = handlers.image)? 79 | return stop handle image 80 | punt = {url} 81 | if file = isFile event 82 | if (handle = handlers.file)? 83 | return stop handle file 84 | punt = {file} 85 | if (handle = handlers.punt)? 86 | punt ||= {dt:event.dataTransfer, types:event.dataTransfer?.types} 87 | stop handle punt 88 | 89 | 90 | module.exports = {dispatch} 91 | -------------------------------------------------------------------------------- /lib/editor.coffee: -------------------------------------------------------------------------------- 1 | # Editor provides a small textarea for editing wiki markup. 2 | # It can split and join paragraphs markup but leaves other 3 | # types alone assuming they will interpret multiple lines. 4 | 5 | plugin = require './plugin' 6 | itemz = require './itemz' 7 | pageHandler = require './pageHandler' 8 | link = require './link' 9 | random = require './random' 10 | 11 | 12 | # Editor takes a div and an item that goes in it. 13 | # Options manage state during splits and joins. 14 | # Options are available to plugins but rarely used. 15 | # 16 | # caret: position -- sets the cursor at the point of join 17 | # append: true -- sets the cursor to end and scrolls there 18 | # after: id -- new item to be added after id 19 | # sufix: text -- editor opens with unsaved suffix appended 20 | # field: 'text' -- editor operates on this field of the item 21 | 22 | escape = (string) -> 23 | string 24 | .replace(/&/g, '&') 25 | .replace(//g, '>') 27 | 28 | textEditor = ($item, item, option={}) -> 29 | console.log 'textEditor', item.id, option 30 | enterCount = 0 if item.type is 'markdown' 31 | return unless $('.editEnable').is(':visible') 32 | 33 | keydownHandler = (e) -> 34 | 35 | if e.which == 27 #esc for save 36 | e.preventDefault() 37 | $textarea.focusout() 38 | return false 39 | 40 | if (e.ctrlKey || e.metaKey) and e.which == 83 #ctrl-s for save 41 | e.preventDefault() 42 | $textarea.focusout() 43 | return false 44 | 45 | if (e.ctrlKey || e.metaKey) and e.which == 73 #ctrl-i for information 46 | e.preventDefault() 47 | page = $(e.target).parents('.page') unless e.shiftKey 48 | link.doInternalLink "about #{item.type} plugin", page 49 | return false 50 | 51 | if (e.ctrlKey || e.metaKey) and e.which == 77 #ctrl-m for menu 52 | e.preventDefault() 53 | $item.removeClass(item.type).addClass(item.type = 'factory') 54 | $textarea.focusout() 55 | return false 56 | 57 | # provides automatic new paragraphs on enter and concatenation on backspace 58 | if item.type is 'paragraph' or item.type is 'markdown' 59 | sel = getSelectionPos($textarea) # position of caret or selected text coords 60 | 61 | if e.which is $.ui.keyCode.BACKSPACE and sel.start is 0 and sel.start is sel.end 62 | $previous = $item.prev() 63 | previous = itemz.getItem $previous 64 | return false unless previous.type is item.type 65 | caret = previous[option.field||'text'].length 66 | suffix = $textarea.val() 67 | $textarea.val('') # Need current text area to be empty. Item then gets deleted. 68 | textEditor $previous, previous, {caret, suffix} 69 | return false 70 | 71 | if e.which is $.ui.keyCode.ENTER 72 | # console.log "Type: #{item.type}, enterCount: #{enterCount}" 73 | return false unless sel 74 | if item.type is 'markdown' 75 | enterCount++ 76 | # console.log "Type: #{item.type}, enterCount: #{enterCount}" 77 | if item.type is 'paragraph' or (item.type is 'markdown' and enterCount is 2) 78 | $page = $item.parents('.page') 79 | text = $textarea.val() 80 | prefix = text.substring 0, sel.start 81 | suffix = text.substring(sel.end) 82 | if prefix is '' 83 | $textarea.val(suffix) 84 | $textarea.focusout() 85 | spawnEditor($page, $item.prev(), item.type, prefix) 86 | else 87 | $textarea.val(prefix) 88 | $textarea.focusout() 89 | spawnEditor($page, $item, item.type, suffix) 90 | return false 91 | else 92 | enterCount = 0 if item.type is 'markdown' 93 | 94 | focusoutHandler = -> 95 | $item.removeClass 'textEditing' 96 | $textarea.unbind() 97 | $page = $item.parents('.page:first') 98 | if item[option.field||'text'] = $textarea.val() 99 | # Remove output and source styling as type may have changed. 100 | $item.removeClass("output-item") 101 | $item.removeClass (_index, className) -> 102 | return (className.match(/\S+-source/) || []).join " " 103 | plugin.do $item.empty(), item 104 | if option.after 105 | return if item[option.field||'text'] == '' 106 | pageHandler.put $page, {type: 'add', id: item.id, item: item, after: option.after} 107 | else 108 | return if item[option.field||'text'] == original 109 | pageHandler.put $page, {type: 'edit', id: item.id, item: item} 110 | else 111 | unless option.after 112 | pageHandler.put $page, {type: 'remove', id: item.id} 113 | index = $(".item").index($item) 114 | $item.remove() 115 | plugin.renderFrom index 116 | null 117 | 118 | return if $item.hasClass 'textEditing' 119 | $item.addClass 'textEditing' 120 | $item.unbind() 121 | original = item[option.field||'text'] ? '' 122 | $textarea = $("") 123 | .focusout focusoutHandler 124 | .bind 'keydown', keydownHandler 125 | $item.html $textarea 126 | if option.caret 127 | setCaretPosition $textarea, option.caret 128 | else if option.append # we want the caret to be at the end 129 | setCaretPosition $textarea, $textarea.val().length 130 | #scrolls to bottom of text area 131 | $textarea.scrollTop($textarea[0].scrollHeight - $textarea.height()) 132 | else 133 | $textarea.focus() 134 | 135 | spawnEditor = ($page, $before, type, text) -> 136 | item = 137 | type: type 138 | id: random.itemId() 139 | text: text 140 | $item = $ """
    """ 141 | $item 142 | .data('item', item) 143 | .data('pageElement', $page) 144 | $before.after $item 145 | before = itemz.getItem $before 146 | textEditor $item, item, {after: before?.id} 147 | 148 | 149 | # If the selection start and selection end are both the same, 150 | # then you have the caret position. If there is selected text, 151 | # the browser will not tell you where the caret is, but it will 152 | # either be at the beginning or the end of the selection 153 | # (depending on the direction of the selection). 154 | 155 | getSelectionPos = ($textarea) -> 156 | el = $textarea.get(0) # gets DOM Node from from jQuery wrapper 157 | if document.selection # IE 158 | el.focus() 159 | sel = document.selection.createRange() 160 | sel.moveStart 'character', -el.value.length 161 | iePos = sel.text.length 162 | {start: iePos, end: iePos} 163 | else 164 | {start: el.selectionStart, end: el.selectionEnd} 165 | 166 | setCaretPosition = ($textarea, caretPos) -> 167 | el = $textarea.get(0) 168 | if el? 169 | if el.createTextRange # IE 170 | range = el.createTextRange() 171 | range.move "character", caretPos 172 | range.select() 173 | else # rest of the world 174 | el.setSelectionRange caretPos, caretPos 175 | el.focus() 176 | 177 | # # may want special processing on paste eventually 178 | # textarea.bind 'paste', (e) -> 179 | # console.log 'textedit paste', e 180 | # console.log e.originalEvent.clipboardData.getData('text') 181 | 182 | module.exports = {textEditor} 183 | -------------------------------------------------------------------------------- /lib/forward.js: -------------------------------------------------------------------------------- 1 | // This is not needed, as is, in hyperdrive as there is no server... 2 | // rather than using socket.io, there may be some interesting 3 | // uses using peerSockets or listening for changes in the hyperdrive. 4 | // So, we won't just remove this, but retain it as a reminder of 5 | // some future avenue of discovery. 6 | 7 | function pageFor(pageKey) { 8 | let $page = $('.page').filter((_i, page) => $(page).data('key') == pageKey) 9 | if ($page.length == 0) return null 10 | if ($page.length > 1) console.log('warning: more than one page found for', key, $page) 11 | return $page[0] 12 | } 13 | 14 | function itemElemFor(pageItem) { 15 | let [pageKey, item] = pageItem.split('/') 16 | let page = pageFor(pageKey) 17 | if(!page) return null 18 | let $item = $(page).find(`.item[data-id=${item}]`) 19 | if ($item.length == 0) return null 20 | if ($item.length > 1) console.log('warning: more than one item found for', pageItem, $item) 21 | return $item[0] 22 | } 23 | 24 | function slugItemFor(itemElem) { 25 | let slug = $(itemElem).parents('.page:first').attr('id').split('_')[0] 26 | let id = $(itemElem).attr('data-id') 27 | let slugItem = `${slug}/${id}` 28 | return slugItem 29 | } 30 | 31 | // map of client producers keyed by server consumers 32 | // client producers are identified by their pageItem 33 | // server consumers are identified by their slugItem 34 | const cProducers = {} 35 | const sConsumers = [] 36 | var withSocket = new Promise((resolve, reject) => { 37 | $.getScript('/socket.io/socket.io.js').done(() => { 38 | console.log('socket.io loaded successfully!') 39 | var socket = io() 40 | socket.on('reconnect', () => { 41 | console.log('reconnected: reregistering client side listeners', sConsumers) 42 | sConsumers.forEach(sConsumer => { 43 | // only need to inform the server since client side listeners survive a disconnect 44 | socket.emit('subscribe', sConsumer) 45 | }) 46 | }) 47 | window.socket = socket 48 | resolve(socket) 49 | }).fail(() => { 50 | console.log('unable to load socket.io') 51 | reject(Error('unable to load socket.io')) 52 | }) 53 | }) 54 | 55 | function listener({slugItem, result}) { 56 | let sConsumer = slugItem 57 | let missing = [] 58 | cProducers[sConsumer].forEach(cProducer => { 59 | let [pageKey, item] = cProducer.split('/') 60 | let itemElem = itemElemFor(cProducer) 61 | if (!itemElem) { 62 | missing.push(cProducer) 63 | return 64 | } 65 | eventProcessors[cProducer]($(itemElem), cProducer, result) 66 | }) 67 | missing.forEach(cProducer => { 68 | // The item for the producer has been moved or removed, unregister the listener. 69 | console.log("Removing client side listener for", cProducer) 70 | cProducers[sConsumer].splice(cProducers[sConsumer].indexOf(cProducer), 1) 71 | eventProcessors[cProducer] = null 72 | if (cProducers[sConsumer].length == 0) { 73 | delete cProducers[sConsumer] 74 | console.log('Removing server side listener for', sConsumer) 75 | sConsumers.splice(sConsumers.indexOf(sConsumer), 1) 76 | withSocket.then(socket => { 77 | // stop listening and tell the server to stop sending 78 | socket.off(sConsumer, listener) 79 | socket.emit('unsubscribe', sConsumer) 80 | }) 81 | } 82 | }) 83 | } 84 | 85 | function registerHandler({sConsumer, cProducer, socket}) { 86 | if (!cProducers[sConsumer]) cProducers[sConsumer] = [] 87 | if (sConsumers.indexOf(sConsumer) == -1) { 88 | sConsumers.push(sConsumer) 89 | console.log(`subscribing to ${sConsumer}`, sConsumers) 90 | socket.on(sConsumer, listener) 91 | socket.emit('subscribe', sConsumer) 92 | } 93 | if (cProducers[sConsumer].indexOf(cProducer) == -1) { 94 | cProducers[sConsumer].push(cProducer) 95 | console.log('adding producer', cProducer, cProducers) 96 | } 97 | } 98 | 99 | let eventProcessors = {} 100 | function init($item, item, processEvent) { 101 | let bound = withSocket.then((socket) => { 102 | let page = $item.parents('.page').data('key') 103 | let slug = $page.attr('id').split('_')[0] 104 | let id = item.id 105 | 106 | let cProducer = `${page}/${id}` 107 | let sConsumer = `${slug}/${id}` 108 | eventProcessors[cProducer] = processEvent 109 | return {sConsumer, cProducer, socket} 110 | }) 111 | bound.then(registerHandler) 112 | } 113 | 114 | module.exports = {init} 115 | 116 | -------------------------------------------------------------------------------- /lib/future.coffee: -------------------------------------------------------------------------------- 1 | # A Future plugin represents a page that hasn't been written 2 | # or wasn't found where expected. It recognizes template pages 3 | # and offers to clone them or make a blank page. 4 | 5 | resolve = require './resolve' 6 | neighborhood = require './neighborhood' 7 | 8 | lineup = require './lineup' 9 | refresh = require './refresh' 10 | 11 | emit = ($item, item) -> 12 | $item.append """#{item.text}

    new blank page""" 13 | if transport = item.create?.source?.transport 14 | $item.append """
    transport from #{transport}""" 15 | $item.append "

    unavailable

    " 16 | $.get '//localhost:4020', -> 17 | $item.find('.caption').text 'ready' 18 | if (info = neighborhood.sites[location.host])? and info.sitemap? 19 | for item in info.sitemap 20 | if item.slug.match /-template$/ 21 | $item.append """
    from #{resolve.resolveLinks "[[#{item.title}]]"}""" 22 | 23 | bind = ($item, item) -> 24 | $item.find('button.transport').click (e) -> 25 | $item.find('.caption').text 'waiting' 26 | 27 | # duplicatingTransport and Templage logic 28 | 29 | params = 30 | title: $item.parents('.page').data('data').title 31 | create: item.create 32 | 33 | req = 34 | type: "POST", 35 | url: item.create.source.transport 36 | dataType: 'json', 37 | contentType: "application/json", 38 | data: JSON.stringify(params) 39 | 40 | $.ajax(req).done (page) -> 41 | $item.find('.caption').text 'ready' 42 | resultPage = wiki.newPage(page) 43 | $page = $item.parents('.page') 44 | pageObject = lineup.atKey $page.data('key') 45 | pageObject.become(resultPage,resultPage) 46 | page = pageObject.getRawPage() 47 | refresh.rebuildPage pageObject, $page.empty() 48 | 49 | 50 | module.exports = {emit, bind} 51 | -------------------------------------------------------------------------------- /lib/image.coffee: -------------------------------------------------------------------------------- 1 | # An Image plugin presents a picture with a caption. The image source 2 | # can be any URL but we have been using "data urls" so as to get the 3 | # proper sharing semantics if not storage efficency. 4 | 5 | dialog = require './dialog' 6 | editor = require './editor' 7 | resolve = require './resolve' 8 | 9 | ipfs = false 10 | ipfs_probed = false 11 | 12 | probe_ipfs = () -> 13 | ipfs_probed = true 14 | gateway = "http://127.0.0.1:8080" 15 | $.ajax "#{gateway}/ipfs/Qmb1oS3TaS8vekxXqogoYsixe47sXcVxQ22kPWH8VSd7yQ", 16 | timeout: 30000 17 | success: (data) -> ipfs = data == "wiki\n" 18 | complete: (xhr, status) -> console.log "ipfs gateway #{status}" 19 | 20 | emit = ($item, item) -> 21 | item.text ||= item.caption 22 | $item.append "

    #{resolve.resolveLinks(item.text)}

    " 23 | 24 | bind = ($item, item) -> 25 | $item.dblclick -> 26 | editor.textEditor $item, item 27 | 28 | if (item.ipfs and not ipfs_probed) 29 | probe_ipfs() 30 | 31 | $item.find('img').dblclick (event) -> 32 | event.stopPropagation() 33 | url = if ipfs and item.ipfs? 34 | "#{gateway}/ipfs/#{item.ipfs}" 35 | else if item.source? 36 | # somehow test for continued existnace? Maybe register an error handler? 37 | item.source 38 | else 39 | item.url 40 | dialog.open item.text, """""" 41 | 42 | module.exports = {emit, bind} 43 | -------------------------------------------------------------------------------- /lib/importer.coffee: -------------------------------------------------------------------------------- 1 | # An Importer plugin completes the ghost page created upon drop of a site export file. 2 | 3 | util = require './util' 4 | link = require './link' 5 | newPage = require('./page').newPage 6 | 7 | escape = (text)-> 8 | text 9 | .replace /&/g, '&' 10 | .replace //g, '>' 12 | 13 | emit = ($item, item) -> 14 | 15 | render = (pages) -> 16 | result = [] 17 | for slug, page of pages 18 | line = "
    #{ escape(page.title) || slug }" 19 | if page.journal 20 | if (date = page.journal[page.journal.length - 1].date) 21 | line += "   from #{util.formatElapsedTime date}" 22 | else 23 | line += "   from revision #{page.journal.length - 1}" 24 | result.push line 25 | result.join '
    ' 26 | 27 | $item.append """ 28 |

    29 | #{render item.pages} 30 |

    31 | """ 32 | 33 | bind = ($item, item) -> 34 | $item.find('a').click (e) -> 35 | slug = $(e.target).attr('href') 36 | $page = $(e.target).parents('.page') unless e.shiftKey 37 | pageObject = newPage(item.pages[slug]) 38 | link.showResult pageObject, {$page, rev:pageObject.getRevision()} 39 | false 40 | 41 | module.exports = {emit, bind} 42 | -------------------------------------------------------------------------------- /lib/itemz.coffee: -------------------------------------------------------------------------------- 1 | # The itemz module understands how we have been keeping track of 2 | # story items and their corresponding divs. It offers utility 3 | # functions used elsewere. We anticipate a more proper model eventually. 4 | 5 | pageHandler = require './pageHandler' 6 | plugin = require './plugin' 7 | random = require './random' 8 | 9 | 10 | sleep = (time, done) -> setTimeout done, time 11 | 12 | getItem = ($item) -> 13 | $($item).data("item") or $($item).data('staticItem') if $($item).length > 0 14 | 15 | removeItem = ($item, item) -> 16 | pageHandler.put $item.parents('.page:first'), {type: 'remove', id: item.id} 17 | $item.remove() 18 | 19 | createItem = ($page, $before, item) -> 20 | $page = $before.parents('.page') unless $page? 21 | item.id = random.itemId() 22 | $item = $ """ 23 |
    24 | """ 25 | $item 26 | .data('item', item) 27 | .data('pageElement', $page) 28 | if $before? 29 | $before.after $item 30 | else 31 | $page.find('.story').append $item 32 | plugin.do $item, item 33 | before = getItem $before 34 | sleep 500, -> 35 | pageHandler.put $page, {item, id: item.id, type: 'add', after: before?.id} 36 | $item 37 | 38 | replaceItem = ($item, type, item) -> 39 | newItem = $.extend({}, item) 40 | $item.empty().unbind() 41 | $item.removeClass(type).addClass(newItem.type) 42 | $page = $item.parents('.page:first') 43 | try 44 | $item.data 'pageElement', $page 45 | $item.data 'item', newItem 46 | plugin.getPlugin item.type, (plugin) -> 47 | plugin.emit $item, newItem 48 | plugin.bind $item, newItem 49 | catch err 50 | $item.append "

    #{err}

    " 51 | pageHandler.put $page, {type: 'edit', id: newItem.id, item: newItem} 52 | 53 | module.exports = {createItem, removeItem, getItem, replaceItem} -------------------------------------------------------------------------------- /lib/license.coffee: -------------------------------------------------------------------------------- 1 | # The license module explains federated wiki license terms 2 | # including the proper attribution of collaborators. 3 | resolve = require './resolve' 4 | lineup = require './lineup' 5 | cc = -> 6 | """ 7 |

    8 | 9 | Creative Commons License 10 |

    11 | This work is licensed under a 12 | 13 | Creative Commons Attribution-ShareAlike 4.0 International License 14 | . 15 |

    16 | This license applies uniformly to all contributions 17 | by all authors. Where authors quote other sources 18 | they do so within the terms of fair use or other 19 | compatiable terms. 20 |

    21 | """ 22 | authors = (page, site) -> 23 | return "" unless page.journal? 24 | done = {} 25 | list = [] 26 | for action in page.journal.slice(0).reverse() 27 | site = action.site if action.site? 28 | unless action.type is 'fork' or done[site]? 29 | siteURL = wiki.site(site).getDirectURL("") 30 | siteFlag = wiki.site(site).flag() 31 | list.push """ #{site}""" 32 | done[site] = true 33 | return "" unless list.length > 0 34 | """ 35 |

    36 | Author's Sites: 37 |

    38 | #{list.join "
    "} 39 |

    40 | """ 41 | provenance = (action) -> 42 | return "" unless action?.provenance? 43 | """ 44 |

    45 | Created From: 46 |

    47 | #{resolve.resolveLinks action.provenance} 48 |

    49 | """ 50 | info = ($page) -> 51 | pageObject = lineup.atKey($page.data('key')) 52 | page = pageObject.getRawPage() 53 | site = pageObject.getRemoteSite location.hostname 54 | cc() + 55 | authors(page, site) + 56 | provenance(page.journal[0]) 57 | module.exports = {info} 58 | -------------------------------------------------------------------------------- /lib/lineup.coffee: -------------------------------------------------------------------------------- 1 | # The lineup represents a sequence of pages with possible 2 | # duplication. We maintain the lineup in parallel with 3 | # the DOM list of .page elements. Eventually lineup will 4 | # play a more central role managing calculations and 5 | # display updates. 6 | 7 | random = require './random' 8 | 9 | pageByKey = {} 10 | keyByIndex = [] 11 | 12 | 13 | # Basic manipulations that correspond to typical user activity 14 | 15 | addPage = (pageObject) -> 16 | key = random.randomBytes 4 17 | pageByKey[key] = pageObject 18 | keyByIndex.push key 19 | return key 20 | 21 | changePageIndex = (key, newIndex) -> 22 | oldIndex = keyByIndex.indexOf key 23 | keyByIndex.splice(oldIndex, 1) 24 | keyByIndex.splice(newIndex, 0, key) 25 | 26 | removeKey = (key) -> 27 | return null unless key in keyByIndex 28 | keyByIndex = keyByIndex.filter (each) -> key != each 29 | delete pageByKey[key] 30 | key 31 | 32 | removeAllAfterKey = (key) -> 33 | result = [] 34 | return result unless key in keyByIndex 35 | while keyByIndex[keyByIndex.length-1] != key 36 | unwanted = keyByIndex.pop() 37 | result.unshift unwanted 38 | delete pageByKey[unwanted] 39 | result 40 | 41 | atKey = (key) -> 42 | pageByKey[key] 43 | 44 | titleAtKey = (key) -> 45 | atKey(key).getTitle() 46 | 47 | bestTitle = -> 48 | return "Wiki" unless keyByIndex.length 49 | titleAtKey keyByIndex[keyByIndex.length-1] 50 | 51 | 52 | # Debug access to internal state used by unit tests. 53 | 54 | debugKeys = -> 55 | keyByIndex 56 | 57 | debugReset = -> 58 | pageByKey = {} 59 | keyByIndex = [] 60 | 61 | 62 | # Debug self-check which corrects misalignments until we get it right 63 | 64 | debugSelfCheck = (keys) -> 65 | return if (have = "#{keyByIndex}") is (want = "#{keys}") 66 | console.log 'The lineup is out of sync with the dom.' 67 | console.log ".pages:", keys 68 | console.log "lineup:", keyByIndex 69 | return unless "#{Object.keys(keyByIndex).sort()}" is "#{Object.keys(keys).sort()}" 70 | console.log 'It looks like an ordering problem we can fix.' 71 | keysByIndex = keys 72 | 73 | 74 | # Select a few crumbs from the lineup that will take us 75 | # close to welcome-visitors on a (possibly) remote site. 76 | 77 | leftKey = (key) -> 78 | pos = keyByIndex.indexOf key 79 | return null if pos < 1 80 | keyByIndex[pos-1] 81 | 82 | crumbs = (key, location) -> 83 | page = pageByKey[key] 84 | host = page.getRemoteSite(location) 85 | result = ['view', slug = page.getSlug()] 86 | result.unshift('view', 'welcome-visitors') unless slug == 'welcome-visitors' 87 | if host != location and (left = leftKey key)? 88 | unless (adjacent = pageByKey[left]).isRemote() 89 | result.push(location, adjacent.getSlug()) 90 | result.unshift(host) 91 | result 92 | 93 | 94 | module.exports = {addPage, changePageIndex, removeKey, removeAllAfterKey, atKey, titleAtKey, bestTitle, debugKeys, debugReset, crumbs, debugSelfCheck} 95 | -------------------------------------------------------------------------------- /lib/link.coffee: -------------------------------------------------------------------------------- 1 | # Here is where we attach federated semantics to internal 2 | # links. Call doInternalLink to add a new page to the display 3 | # given a page name, a place to put it an an optional site 4 | # to retrieve it from. 5 | 6 | lineup = require './lineup' 7 | active = require './active' 8 | refresh = require './refresh' 9 | {asTitle, asSlug, pageEmitter} = require './page' 10 | 11 | createPage = (name, loc) -> 12 | site = loc if loc and loc isnt 'view' 13 | title = asTitle(name) 14 | $page = $ """ 15 |
    16 |
    17 |

    18 |
    19 |

    #{title}

    20 |
    21 |
    22 |
    23 | """ 24 | $page.data('site', site) if site 25 | $page 26 | 27 | showPage = (name, loc) -> 28 | createPage(name, loc).appendTo('.main').each((_i, e) -> refresh.cycle($(e))) 29 | 30 | doInternalLink = (name, $page, site=null) -> 31 | name = asSlug(name) 32 | $($page).nextAll().remove() if $page? 33 | lineup.removeAllAfterKey $($page).data('key') if $page? 34 | showPage(name,site) 35 | active.set($('.page').last()) 36 | 37 | showResult = (pageObject, options={}) -> 38 | $(options.$page).nextAll().remove() if options.$page? 39 | lineup.removeAllAfterKey $(options.$page).data('key') if options.$page? 40 | slug = pageObject.getSlug() 41 | slug += "_rev#{options.rev}" if options.rev? 42 | $page = createPage(slug).addClass('ghost') 43 | $page.appendTo($('.main')) 44 | refresh.buildPage( pageObject, $page ) 45 | active.set($('.page').last()) 46 | 47 | pageEmitter.on 'show', (page) -> 48 | console.log 'pageEmitter handling', page 49 | showResult page 50 | 51 | module.exports = {createPage, doInternalLink, showPage, showResult} 52 | -------------------------------------------------------------------------------- /lib/neighborhood.coffee: -------------------------------------------------------------------------------- 1 | # The neighborhood provides a cache of site maps read from 2 | # various federated wiki sites. It is careful to fetch maps 3 | # slowly and keeps track of get requests in flight. 4 | 5 | _ = require 'underscore' 6 | sitemapHandler = require "./sitemapHandler" 7 | siteindexHandler = require "./siteindexHandler" 8 | 9 | miniSearch = require 'minisearch' 10 | 11 | module.exports = neighborhood = {} 12 | 13 | neighborhood.sites = {} 14 | nextAvailableFetch = 0 15 | nextFetchInterval = 500 16 | 17 | populateSiteInfoFor = (site,neighborInfo)-> 18 | return if neighborInfo.sitemapRequestInflight 19 | neighborInfo.sitemapRequestInflight = true 20 | 21 | transition = (site, from, to) -> 22 | $(""".neighbor[data-site="#{site}"]""") 23 | .find('div') 24 | .removeClass(from) 25 | .addClass(to) 26 | 27 | fetchMap = -> 28 | transition site, 'wait', 'fetch' 29 | wiki.site(site).get 'system/sitemap.json', (err, data) -> 30 | neighborInfo.sitemapRequestInflight = false 31 | if !err 32 | neighborInfo.sitemap = data 33 | transition site, 'fetch', 'done' 34 | $('body').trigger 'new-neighbor-done', site 35 | else 36 | transition site, 'fetch', 'fail' 37 | try 38 | wiki.site(site).refresh () -> 39 | # empty function 40 | 41 | 42 | 43 | # we use `wiki.site(site).getIndex` as we want the serialized index as a string. 44 | wiki.site(site).getIndex 'system/site-index.json', (err, data) -> 45 | if !err 46 | neighborInfo.siteIndex = miniSearch.loadJSON(data, { 47 | fields: ['title', 'content'] 48 | }) 49 | console.log site, 'index loaded' 50 | else 51 | console.log 'error loading index', site, err 52 | 53 | now = Date.now() 54 | if now > nextAvailableFetch 55 | nextAvailableFetch = now + nextFetchInterval 56 | setTimeout fetchMap, 100 57 | else 58 | setTimeout fetchMap, nextAvailableFetch - now 59 | nextAvailableFetch += nextFetchInterval 60 | 61 | neighborhood.retryNeighbor = (site)-> 62 | console.log 'retrying neighbor' 63 | neighborInfo = {} 64 | neighborhood.sites[site] = neighborInfo 65 | populateSiteInfoFor(site, neighborInfo) 66 | 67 | neighborhood.registerNeighbor = (site)-> 68 | return if neighborhood.sites[site]? 69 | neighborInfo = {} 70 | neighborhood.sites[site] = neighborInfo 71 | populateSiteInfoFor( site, neighborInfo ) 72 | $('body').trigger 'new-neighbor', site 73 | 74 | neighborhood.updateSitemap = (pageObject)-> 75 | site = location.host 76 | return unless neighborInfo = neighborhood.sites[site] 77 | return if neighborInfo.sitemapRequestInflight 78 | slug = pageObject.getSlug() 79 | date = pageObject.getDate() 80 | title = pageObject.getTitle() 81 | synopsis = pageObject.getSynopsis() 82 | entry = {slug, date, title, synopsis} 83 | sitemap = neighborInfo.sitemap 84 | index = sitemap.findIndex (slot) -> slot.slug == slug 85 | sitemapHandler.update(sitemap) 86 | if index >= 0 87 | sitemap[index] = entry 88 | else 89 | sitemap.push entry 90 | $('body').trigger 'new-neighbor-done', site 91 | 92 | neighborhood.deleteFromSitemap = (pageObject)-> 93 | site = location.host 94 | return unless neighborInfo = neighborhood.sites[site] 95 | return if neighborInfo.sitemapRequestInflight 96 | slug = pageObject.getSlug() 97 | sitemap = neighborInfo.sitemap 98 | index = sitemap.findIndex (slot) -> slot.slug == slug 99 | return unless index >= 0 100 | sitemap.splice(index) 101 | sitemapHandler.update(sitemap) 102 | $('body').trigger 'delete-neighbor-done', site 103 | 104 | neighborhood.listNeighbors = ()-> 105 | _.keys( neighborhood.sites ) 106 | 107 | # Page Search 108 | 109 | extractPageText = (pageText, currentItem) -> 110 | switch currentItem.type 111 | when 'paragraph' 112 | pageText += ' ' + currentItem.text.replace /\[{1,2}|\]{1,2}/g, '' 113 | when 'markdown' 114 | # really need to extract text from the markdown, but for now just remove link brackets... 115 | pageText += ' ' + currentItem.text.replace /\[{1,2}|\]{1,2}/g, '' 116 | when 'html' 117 | pageText += ' ' + currentItem.text.replace /<[^>]*>/g, '' 118 | else 119 | if currentItem.text? 120 | for line in currentItem.text.split /\r\n?|\n/ 121 | pageText += ' ' + line.replace /\[{1,2}|\]{1,2}/g, '' unless line.match /^[A-Z]+[ ].*/ 122 | pageText 123 | 124 | 125 | neighborhood.updateIndex = (pageObject, originalStory) -> 126 | console.log "updating #{pageObject.getSlug()} in index" 127 | site = location.host 128 | return unless neighborInfo = neighborhood.sites[site] 129 | 130 | originalText = originalStory.reduce( extractPageText, '') 131 | 132 | slug = pageObject.getSlug() 133 | title = pageObject.getTitle() 134 | rawStory = pageObject.getRawPage().story 135 | newText = rawStory.reduce( extractPageText, '') 136 | 137 | # try remove original page from index 138 | try 139 | neighborInfo.siteIndex.remove { 140 | 'id': slug 141 | 'title': title 142 | 'content': originalText 143 | } 144 | catch err 145 | # swallow error, if the page was not in index 146 | console.log "removing #{slug} from index failed", err unless err.message.includes('not in the index') 147 | 148 | neighborInfo.siteIndex.add { 149 | 'id': slug 150 | 'title': title 151 | 'content': newText 152 | } 153 | # save the updated index 154 | siteindexHandler.update() 155 | 156 | neighborhood.deleteFromIndex = (pageObject) -> 157 | site = location.host 158 | return unless neighborInfo = neighborhood.sites[site] 159 | 160 | slug = pageObject.getSlug() 161 | title = pageObject.getTitle() 162 | rawStory = pageObject.getRawPage().story 163 | pageText = rawStory.reduce(extractPageText, '') 164 | try 165 | neighborInfo.siteIndex.remove { 166 | 'id': slug 167 | 'title': title 168 | 'content': pageText 169 | } 170 | catch err 171 | # swallow error, if the page was not in index 172 | console.log "removing #{slug} from index failed", err unless err.message.includes('not in the index') 173 | 174 | # save the updated index 175 | siteindexHandler.update() 176 | 177 | 178 | neighborhood.search = (searchQuery)-> 179 | finds = [] 180 | tally = {} 181 | 182 | tick = (key) -> 183 | if tally[key]? then tally[key]++ else tally[key] = 1 184 | 185 | 186 | 187 | indexSite = (site, siteInfo) -> 188 | timeLabel = "indexing sitemap ( #{site} )" 189 | console.time timeLabel 190 | console.log 'indexing sitemap:', site 191 | siteIndex = new miniSearch({ 192 | fields: ['title', 'content'] 193 | }) 194 | neighborInfo.sitemap.forEach ((page) -> 195 | siteIndex.add { 196 | 'id': page.slug 197 | 'title': page.title 198 | 'content': page.synopsis 199 | } 200 | return 201 | ) 202 | console.timeEnd timeLabel 203 | return siteIndex 204 | 205 | start = Date.now() 206 | # load, or create (from sitemap), site index 207 | for own neighborSite,neighborInfo of neighborhood.sites 208 | if neighborInfo.sitemap 209 | # do we already have an index? 210 | unless neighborInfo.siteIndex? 211 | # create an index using sitemap 212 | neighborInfo.siteIndex = indexSite(neighborSite, neighborInfo) 213 | 214 | origin = location.host 215 | for own neighborSite,neighborInfo of neighborhood.sites 216 | if neighborInfo.siteIndex 217 | tick 'sites' 218 | if tally['pages']? 219 | tally['pages'] += neighborInfo.sitemap.length 220 | else 221 | tally['pages'] = neighborInfo.sitemap.length 222 | if neighborSite is origin 223 | titleBoost = 20 224 | contentBoost = 2 225 | else 226 | titleBoost = 10 227 | contentBoost = 1 228 | searchResult = neighborInfo.siteIndex.search searchQuery, 229 | boost: 230 | title: titleBoost 231 | content: contentBoost 232 | prefix: true 233 | combineWith: 'AND' 234 | searchResult.forEach (result) -> 235 | tick 'finds' 236 | finds.push 237 | page: neighborInfo.sitemap.find ({slug}) => slug is result.id 238 | site: neighborSite 239 | rank: result.score 240 | 241 | # sort the finds by rank 242 | finds.sort (a,b) -> 243 | return b.rank - a.rank 244 | 245 | tally['msec'] = Date.now() - start 246 | { finds, tally } 247 | -------------------------------------------------------------------------------- /lib/neighbors.coffee: -------------------------------------------------------------------------------- 1 | # This module manages the display of site flags representing 2 | # fetched sitemaps stored in the neighborhood. It progresses 3 | # through a series of states which, when attached to the flags, 4 | # cause them to animate as an indication of work in progress. 5 | 6 | link = require './link' 7 | wiki = require './wiki' 8 | neighborhood = require './neighborhood' 9 | 10 | sites = null 11 | totalPages = 0 12 | 13 | 14 | flag = (site) -> 15 | # status class progression: .wait, .fetch, .fail or .done 16 | console.log 'neighbor - flag' 17 | """ 18 | 19 |
    20 | 21 |
    22 |
    23 | """ 24 | 25 | inject = (neighborhood) -> 26 | sites = neighborhood.sites 27 | 28 | bind = -> 29 | $neighborhood = $('.neighborhood') 30 | $('body') 31 | .on 'new-neighbor', (e, site) -> 32 | $neighborhood.append flag site 33 | .on 'new-neighbor-done', (e, site) -> 34 | pageCount = sites[site].sitemap.length 35 | img = $(""".neighborhood .neighbor[data-site="#{site}"]""").find('img') 36 | img.attr('title', "#{site}\n #{pageCount} pages") 37 | totalPages += pageCount 38 | $('.searchbox .pages').text "#{totalPages} pages" 39 | .delegate '.neighbor img', 'click', (e) -> 40 | # add handling refreshing neighbor that has failed 41 | if $(e.target).parent().hasClass('fail') 42 | $(e.target).parent().removeClass('fail').addClass('wait') 43 | site = $(e.target).attr('title') 44 | wiki.site(site).refresh () -> 45 | console.log 'about to retry neighbor' 46 | neighborhood.retryNeighbor(site) 47 | else 48 | link.doInternalLink 'welcome-visitors', null, @.title.split("\n")[0] 49 | 50 | module.exports = {inject, bind} 51 | -------------------------------------------------------------------------------- /lib/page.coffee: -------------------------------------------------------------------------------- 1 | # Page provides a factory for pageObjects, a model that combines 2 | # the json derrived object and the site from which it came. 3 | 4 | 5 | formatDate = require('./util').formatDate 6 | random = require './random' 7 | revision = require './revision' 8 | synopsis = require './synopsis' 9 | _ = require 'underscore' 10 | 11 | # http://pragprog.com/magazines/2011-08/decouple-your-apps-with-eventdriven-coffeescript 12 | {EventEmitter} = require 'events' 13 | pageEmitter = new EventEmitter 14 | 15 | 16 | # TODO: better home for asSlug 17 | asSlug = (name) -> 18 | name.replace(/\s/g, '-').replace(/[^A-Za-z0-9-]/g, '').toLowerCase() 19 | 20 | asTitle = (slug) -> 21 | slug.replace(/-/g, ' ') 22 | 23 | nowSections = (now) -> 24 | [ 25 | {symbol: '❄', date: now-1000*60*60*24*366, period: 'a Year'} 26 | {symbol: '⚘', date: now-1000*60*60*24*31*3, period: 'a Season'} 27 | {symbol: '⚪', date: now-1000*60*60*24*31, period: 'a Month'} 28 | {symbol: '☽', date: now-1000*60*60*24*7, period: 'a Week'} 29 | {symbol: '☀', date: now-1000*60*60*24, period: 'a Day'} 30 | {symbol: '⌚', date: now-1000*60*60, period: 'an Hour'} 31 | ] 32 | 33 | newPage = (json, site) -> 34 | page = json || {} 35 | page.title ||= 'empty' 36 | page.story ||= [] 37 | page.journal ||= [] 38 | 39 | getRawPage = -> 40 | page 41 | 42 | getContext = -> 43 | context = ['view'] 44 | context.push site if isRemote() 45 | addContext = (site) -> context.push site if site? and not _.include context, site 46 | addContext action?.site for action in page.journal.slice(0).reverse() 47 | context 48 | 49 | isPlugin = -> 50 | page.plugin? 51 | 52 | isRemote = -> 53 | ! (site in [undefined, null, 'view', 'origin', 'local', 'recycler']) 54 | 55 | isLocal = -> 56 | site == 'local' 57 | 58 | isRecycler = -> 59 | site == 'recycler' 60 | 61 | getRemoteSite = (host = null) -> 62 | if isRemote() then site else host 63 | 64 | getRemoteSiteDetails = (host = null) -> 65 | result = [] 66 | result.push(getRemoteSite host) if host or isRemote() 67 | result.push("#{page.plugin} plugin") if isPlugin() 68 | result.join "\n" 69 | 70 | getSlug = -> 71 | asSlug page.title 72 | 73 | getNeighbors = (host) -> 74 | neighbors = [] 75 | if isRemote() 76 | neighbors.push site 77 | else 78 | neighbors.push host if host? 79 | for item in page.story 80 | neighbors.push item.site if item?.site? 81 | for action in page.journal 82 | neighbors.push action.site if action?.site? 83 | _.uniq neighbors 84 | 85 | getTitle = -> 86 | page.title 87 | 88 | setTitle = (title) -> 89 | page.title = title 90 | 91 | getRevision = -> 92 | page.journal.length-1 93 | 94 | getDate = -> 95 | action = page.journal[getRevision()] 96 | if action? 97 | if action.date? 98 | return action.date 99 | return undefined 100 | 101 | getTimestamp = -> 102 | action = page.journal[getRevision()] 103 | if action? 104 | if action.date? 105 | formatDate(action.date) 106 | else 107 | "Revision #{getRevision()}" 108 | else 109 | "Unrecorded Date" 110 | 111 | getSynopsis = -> 112 | synopsis page 113 | 114 | addItem = (item) -> 115 | item = _.extend {}, {id: random.itemId()}, item 116 | page.story.push item 117 | 118 | getItem = (id) -> 119 | for item in page.story 120 | return item if item.id is id 121 | return null 122 | 123 | seqItems = (each) -> 124 | promise = new Promise (resolve, _reject) -> 125 | emitItem = (i) -> 126 | return resolve() if i >= page.story.length 127 | each page.story[i]||{text:'null'}, -> emitItem i+1 128 | emitItem 0 129 | return promise 130 | 131 | addParagraph = (text) -> 132 | type = "paragraph" 133 | addItem {type, text} 134 | 135 | # page.journal.push {type: 'add'} 136 | 137 | seqActions = (each) -> 138 | smaller = 0 139 | sections = nowSections (new Date).getTime() 140 | emitAction = (i) -> 141 | return if i >= page.journal.length 142 | action = page.journal[i]||{} 143 | bigger = action.date || 0 144 | separator = null 145 | for section in sections 146 | if section.date > smaller and section.date < bigger 147 | separator = section 148 | smaller = bigger 149 | each {action, separator}, -> emitAction i+1 150 | emitAction 0 151 | 152 | become = (story, journal) -> 153 | page.story = story?.getRawPage().story || [] 154 | page.journal = journal?.getRawPage().journal if journal? 155 | 156 | siteLineup = -> 157 | slug = getSlug() 158 | path = if slug == 'welcome-visitors' 159 | "view/welcome-visitors" 160 | else 161 | "view/welcome-visitors/view/#{slug}" 162 | if isRemote() 163 | link = wiki.site(site).getDirectURL(path) 164 | if link.protocol is "hyper:" 165 | "hyper://#{site}/##{path}" 166 | else 167 | link 168 | else 169 | if window.location.protocol is "hyper:" 170 | "/##{path}" 171 | else 172 | "/#{path}" 173 | 174 | notDuplicate = (journal, action) -> 175 | for each in journal 176 | if each.id == action.id and each.date == action.date 177 | return false 178 | true 179 | 180 | merge = (update) -> 181 | merged = (action for action in page.journal) 182 | for action in update.getRawPage().journal 183 | merged.push action if notDuplicate(page.journal, action) 184 | merged.push 185 | type: 'fork' 186 | site: update.getRemoteSite() 187 | date: (new Date()).getTime() 188 | newPage revision.create(999, {title: page.title, journal: merged}), site 189 | 190 | apply = (action) -> 191 | revision.apply page, action 192 | site = null if action.site 193 | 194 | getCreate = () -> 195 | isCreate = (action) -> action.type == 'create' 196 | page.journal.reverse().find(isCreate) 197 | 198 | {getRawPage, getContext, isPlugin, isRemote, isLocal, isRecycler, getRemoteSite, getRemoteSiteDetails, getSlug, getNeighbors, getTitle, setTitle, getRevision, getDate, getTimestamp, getSynopsis, addItem, getItem, addParagraph, seqItems, seqActions, become, siteLineup, merge, apply, getCreate} 199 | 200 | module.exports = {newPage, asSlug, asTitle, pageEmitter} 201 | -------------------------------------------------------------------------------- /lib/paragraph.coffee: -------------------------------------------------------------------------------- 1 | # The Paragraph plugin holds text that can be edited and rendered 2 | # with hyperlinks. It will eventually escape html tags but for the 3 | # moment we live dangerously. 4 | 5 | editor = require './editor' 6 | resolve = require './resolve' 7 | itemz = require './itemz' 8 | 9 | type = (text) -> 10 | if text.match /<(i|b|p|a|h\d|hr|br|li|img|div|span|table|blockquote)\b.*?>/i 11 | 'html' 12 | else 13 | 'markdown' 14 | 15 | emit = ($item, item) -> 16 | for text in item.text.split /\n\n+/ 17 | $item.append "

    #{resolve.resolveLinks(text)}

    " if text.match /\S/ 18 | 19 | bind = ($item, item) -> 20 | $item.dblclick (e) -> 21 | if e.shiftKey 22 | item.type = type(item.text) 23 | itemz.replaceItem $item, 'paragraph', item 24 | else 25 | editor.textEditor $item, item, {'append': true} 26 | 27 | module.exports = {emit, bind} 28 | -------------------------------------------------------------------------------- /lib/plugin.coffee: -------------------------------------------------------------------------------- 1 | # The plugin module manages the dynamic retrieval of plugin 2 | # javascript including additional scripts that may be requested. 3 | ### 4 | forward = require './forward' 5 | ### 6 | 7 | module.exports = plugin = {} 8 | 9 | pluginDats = {} 10 | 11 | escape = (s) -> 12 | (''+s) 13 | .replace(/&/g, '&') 14 | .replace(//g, '>') 16 | .replace(/"/g, '"') 17 | .replace(/'/g, ''') 18 | .replace(/\//g,'/') 19 | 20 | # define loadScript that allows fetching a script. 21 | # see example in http://api.jquery.com/jQuery.getScript/ 22 | 23 | loadScript = (url, options) -> 24 | console.log("loading url:", url) 25 | options = $.extend(options or {}, 26 | dataType: "script" 27 | cache: true 28 | url: url 29 | ) 30 | $.ajax options 31 | 32 | scripts = [] 33 | loadingScripts = {} 34 | getScript = plugin.getScript = (url, callback = () ->) -> 35 | if url in scripts 36 | callback() 37 | else 38 | loadScript url 39 | .done -> 40 | scripts.push url 41 | callback() 42 | .fail (_jqXHR, _textStatus, err) -> 43 | console.log('getScript: Failed to load:', url, err) 44 | callback() 45 | 46 | plugin.renderFrom = (notifIndex) -> 47 | $items = $(".item").slice(notifIndex) 48 | 49 | console.log "notifIndex", notifIndex, "about to render", $items.toArray() 50 | promise = Promise.resolve() 51 | emitNextItem = (itemElems) -> 52 | return promise if itemElems.length == 0 53 | itemElem = itemElems.shift() 54 | $item = $(itemElem) 55 | unless $item.hasClass('textEditing') 56 | item = $item.data('item') 57 | promise = promise.then -> 58 | return new Promise (resolve, reject) -> 59 | $item.off() 60 | plugin.emit $item.empty(), item, () -> 61 | resolve() 62 | emitNextItem(itemElems) 63 | # The concat here makes a copy since we need to loop through the same 64 | # items to do a bind. 65 | promise = emitNextItem $items.toArray() 66 | # Binds must be called sequentially in order to store the promises used to order bind operations. 67 | # Note: The bind promises used here are for ordering "bind creation". 68 | # The ordering of "bind results" is done within the plugin.bind wrapper. 69 | promise = promise.then -> 70 | promise = Promise.resolve() 71 | bindNextItem = (itemElems) -> 72 | return promise if itemElems.length == 0 73 | itemElem = itemElems.shift() 74 | $item = $(itemElem) 75 | item = $item.data('item') 76 | promise = promise.then -> 77 | return new Promise (resolve, reject) -> 78 | plugin.getPlugin item.type, (plugin) -> 79 | plugin.bind $item, item 80 | resolve() 81 | bindNextItem(itemElems) 82 | bindNextItem($items.toArray()) 83 | return promise 84 | 85 | emit = (pluginEmit) -> 86 | fn = ($item, item) -> 87 | $item.addClass('emit') 88 | pluginEmit($item, item) 89 | fn 90 | 91 | bind = (name, pluginBind) -> 92 | fn = ($item, item, oldIndex) -> 93 | # Clear out any list of consumed items. 94 | $item[0].consuming = [] 95 | index = $('.item').index($item) 96 | consumes = window.plugins[name].consumes 97 | waitFor = Promise.resolve() 98 | # Wait for all items in the lineup that produce what we consume 99 | # before calling our bind method. 100 | if consumes 101 | deps = [] 102 | consumes.forEach (consuming) -> 103 | producers = $(".item:lt(#{index})").filter(consuming) 104 | console.log(name, "consumes", consuming) 105 | console.log(producers, "produce", consuming) 106 | if not producers or producers.length == 0 107 | console.log 'warn: no items in lineup that produces', consuming 108 | console.log("there are #{producers.length} instances of #{consuming}") 109 | producers.each (_i, el) -> 110 | page_key = $(el).parents('.page').data('key') 111 | item_id = $(el).attr('data-id') 112 | $item[0].consuming.push("#{page_key}/#{item_id}") 113 | deps.push(el.promise) 114 | waitFor = Promise.all(deps) 115 | waitFor 116 | .then -> 117 | $item.removeClass('emit') 118 | bindPromise = pluginBind($item, item) 119 | if not bindPromise or typeof(bindPromise.then) == 'function' 120 | bindPromise = Promise.resolve(bindPromise) 121 | # This is where the "bind results" promise for the current item is stored 122 | $item[0].promise = bindPromise 123 | ### this is definally not needed... no server, but... 124 | .then -> 125 | # If the plugin has the needed callback, subscribe to server side events 126 | # for the current page 127 | if window.plugins[name].processServerEvent 128 | console.log 'listening for server events', $item, item 129 | forward.init $item, item, window.plugins[name].processServerEvent 130 | ### 131 | .catch (e) -> 132 | console.log 'plugin emit: unexpected error', e 133 | return fn 134 | 135 | plugin.wrap = (name, p) -> 136 | p.emit = emit(p.emit) 137 | p.bind = bind(name, p.bind) 138 | return p 139 | 140 | plugin.get = plugin.getPlugin = (name, callback) -> 141 | return loadingScripts[name].then(callback) if loadingScripts[name] 142 | loadingScripts[name] = new Promise (resolve, _reject) -> 143 | return resolve(window.plugins[name]) if window.plugins[name] 144 | # create plugin url using pluginRoutes 145 | pluginScriptUrl = "#{wiki.pluginRoutes[name]}/client/#{name}.js" 146 | getScript pluginScriptUrl, () -> 147 | p = window.plugins[name] 148 | if p 149 | plugin.wrap(name, p) 150 | return resolve(p) 151 | loadingScripts[name].then (plugin) -> 152 | delete loadingScripts[name] 153 | return callback(plugin) 154 | return loadingScripts[name] 155 | 156 | 157 | plugin.do = plugin.doPlugin = ($item, item, done=->) -> 158 | $item.data('item', item) 159 | promise = plugin.renderFrom $('.item').index($item) 160 | promise.then -> 161 | done() 162 | 163 | plugin.emit = (div, item, done=->) -> 164 | error = (ex, script) -> 165 | div.append """ 166 |
    167 | #{escape item.text || ""} 168 |
    169 |
    170 | """ 171 | if item.text? 172 | div.find('.error').dblclick (e) -> 173 | wiki.textEditor div, item 174 | div.find('button').on 'click', -> 175 | wiki.dialog ex.toString(), """ 176 |

    This "#{item.type}" plugin won't show.

    177 |
  • Is it available on this server? 178 |
  • Is its markup correct? 179 |
  • Can it find necessary data? 180 |
  • Has network access been interrupted? 181 |
  • Has its code been tested? 182 |

    Developers may open debugging tools and retry the plugin.

    183 | 184 |

    Learn more 185 | 188 | About Plugins 189 | 190 | 191 |

    192 | """ 193 | $('.retry').on 'click', -> 194 | if script.emit.length > 2 195 | script.emit div, item, -> 196 | done() 197 | else 198 | script.emit div, item 199 | done() 200 | 201 | div.data 'pageElement', div.parents(".page") 202 | div.data 'item', item 203 | plugin.get item.type, (script) -> 204 | try 205 | throw TypeError("Can't find plugin for '#{item.type}'") unless script? 206 | if script.emit.length > 2 207 | script.emit div, item, -> 208 | script.bind div, item if bind 209 | done() 210 | else 211 | script.emit div, item 212 | done() 213 | catch err 214 | console.log 'plugin error', err 215 | error(err, script) 216 | done() 217 | 218 | plugin.registerPlugin = (pluginName,pluginFn)-> 219 | window.plugins[pluginName] = pluginFn($) 220 | -------------------------------------------------------------------------------- /lib/plugins.coffee: -------------------------------------------------------------------------------- 1 | # This module preloads the plugins directory with a few 2 | # plugins that we can't live without. They will be 3 | # browserified along with the rest of the core javascript. 4 | plugin = require './plugin' 5 | 6 | window.plugins = 7 | reference: plugin.wrap('reference', require './reference') 8 | factory: plugin.wrap('factory', require './factory') 9 | paragraph: plugin.wrap('paragraph', require './paragraph') 10 | image: plugin.wrap('image', require './image') 11 | future: plugin.wrap('future', require './future') 12 | importer: plugin.wrap('importer', require './importer') 13 | -------------------------------------------------------------------------------- /lib/random.coffee: -------------------------------------------------------------------------------- 1 | # We create strings of hexidecimal digits representing a 2 | # given number of random bytes. We use short strings for 3 | # cache busting, medium strings for keys linking dom to 4 | # model, and, longer still strings for lifetime identity 5 | # of story elements. 6 | 7 | randomByte = -> 8 | (((1+Math.random())*0x100)|0).toString(16).substring(1) 9 | 10 | randomBytes = (n) -> 11 | (randomByte() for [1..n]).join('') 12 | 13 | itemId = -> 14 | randomBytes 8 15 | 16 | module.exports = {randomByte, randomBytes, itemId} -------------------------------------------------------------------------------- /lib/reference.coffee: -------------------------------------------------------------------------------- 1 | # The Reference plugin holds a site and page name to be 2 | # found on that site. Search, for example, produces a page of 3 | # references. Double click will edit the body of a reference 4 | # but not the name and site. 5 | 6 | editor = require './editor' 7 | resolve = require './resolve' 8 | page = require './page' 9 | 10 | # see http://fed.wiki.org/about-reference-plugin.html 11 | 12 | emit = ($item, item) -> 13 | slug = item.slug 14 | slug ||= page.asSlug item.title if item.title? 15 | slug ||= 'welcome-visitors' 16 | site = item.site 17 | resolve.resolveFrom site, -> 18 | $item.append """ 19 |

    20 | 26 | #{resolve.resolveLinks "[[#{item.title or slug}]]"} 27 | — 28 | #{resolve.resolveLinks(item.text)} 29 |

    30 | """ 31 | bind = ($item, item) -> 32 | $item.dblclick -> editor.textEditor $item, item 33 | 34 | module.exports = {emit, bind} 35 | -------------------------------------------------------------------------------- /lib/resolve.coffee: -------------------------------------------------------------------------------- 1 | # The function resolveLinks converts link markup to html syntax. 2 | # The html will have a search path (the resolutionContext) encoded 3 | # into the title of tags it generates. 4 | 5 | asSlug = require('./page').asSlug 6 | 7 | module.exports = resolve = {} 8 | 9 | resolve.resolutionContext = [] 10 | 11 | resolve.escape = escape = (string) -> 12 | (string||'') 13 | .replace(/&/g, '&') 14 | .replace(//g, '>') 16 | 17 | resolve.resolveFrom = (addition, callback) -> 18 | resolve.resolutionContext.push addition 19 | try 20 | callback() 21 | finally 22 | resolve.resolutionContext.pop() 23 | 24 | # resolveLinks takes a second argument which is a substitute text sanitizer. 25 | # Plugins that do their own markup should insert themselves here but they 26 | # must escape html as part of their processing. Sanitizers must pass markers〖12〗. 27 | 28 | resolve.resolveLinks = (string, sanitize=escape) -> 29 | stashed = [] 30 | 31 | stash = (text) -> 32 | here = stashed.length 33 | stashed.push text 34 | "〖#{here}〗" 35 | 36 | unstash = (match, digits) -> 37 | stashed[+digits] 38 | 39 | internal = (match, name) -> 40 | slug = asSlug name 41 | if slug.length 42 | stash """#{escape name}""" 43 | else 44 | match 45 | 46 | external = (match, href, protocol, rest) -> 47 | stash """#{escape rest}""" 48 | 49 | # markup conversion happens in four phases: 50 | # - unexpected markers are adulterated 51 | # - links are found, converted, and stashed away properly escaped 52 | # - remaining text is sanitized and/or escaped 53 | # - unique markers are replaced with unstashed links 54 | 55 | string = (string||'') 56 | .replace /〖(\d+)〗/g, "〖 $1 〗" 57 | .replace /\[\[([^\]]+)\]\]/gi, internal 58 | .replace /\[((http|https|ftp|dat|hyper):.*?) (.*?)\]/gi, external 59 | sanitize string 60 | .replace /〖(\d+)〗/g, unstash 61 | -------------------------------------------------------------------------------- /lib/revision.coffee: -------------------------------------------------------------------------------- 1 | # This module interprets journal actions in order to update 2 | # a story or even regenerate a complete story from some or 3 | # all of a journal. 4 | 5 | apply = (page, action) -> 6 | 7 | order = -> 8 | (item?.id for item in page.story||[]) 9 | 10 | add = (after, item) -> 11 | index = order().indexOf(after) + 1 12 | page.story.splice(index, 0, item) 13 | 14 | remove = -> 15 | if (index = order().indexOf action.id) != -1 16 | page.story.splice(index,1) 17 | 18 | page.story ||= [] 19 | 20 | switch action.type 21 | when 'create' 22 | if action.item? 23 | page.title = action.item.title if action.item.title? 24 | page.story = action.item.story.slice() if action.item.story? 25 | when 'add' 26 | add action.after, action.item 27 | when 'edit' 28 | if (index = order().indexOf action.id) != -1 29 | page.story.splice(index,1,action.item) 30 | else 31 | page.story.push action.item 32 | when 'move' 33 | # construct relative addresses from absolute order 34 | index = action.order.indexOf action.id 35 | after = action.order[index-1] 36 | item = page.story[order().indexOf action.id] 37 | remove() 38 | add after, item 39 | when 'remove' 40 | remove() 41 | 42 | page.journal ||= [] 43 | page.journal.push action 44 | 45 | create = (revIndex, data) -> 46 | revIndex = +revIndex 47 | revJournal = data.journal[0..revIndex] 48 | revPage = {title: data.title, story: []} 49 | for action in revJournal 50 | apply revPage, action||{} 51 | return revPage 52 | 53 | module.exports = {create, apply} -------------------------------------------------------------------------------- /lib/search.coffee: -------------------------------------------------------------------------------- 1 | # The search module invokes neighborhood's query function, 2 | # formats the results as story items, and then opens a 3 | # page to present them. 4 | 5 | pageHandler = require './pageHandler' 6 | random = require './random' 7 | link = require './link' 8 | active = require './active' 9 | newPage = require('./page').newPage 10 | resolve = require './resolve' 11 | page = require './page' 12 | 13 | # from: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Regular_Expressions 14 | escapeRegExp = (string) -> 15 | string.replace(/[.*+?^${}()|[\]\\]/g, '\\$&') 16 | 17 | # From reference.coffee 18 | emit = ($item, item) -> 19 | slug = item.slug 20 | slug ||= page.asSlug item.title if item.title? 21 | slug ||= 'welcome-visitors' 22 | site = item.site 23 | resolve.resolveFrom site, -> 24 | $item.append """ 25 |

    26 | 32 | #{resolve.resolveLinks "[[#{item.title or slug}]]"} 33 | — 34 | #{resolve.resolveLinks(item.text)} 35 |

    36 | """ 37 | finishClick = (e, name) -> 38 | e.preventDefault() 39 | page = $(e.target).parents('.page') unless e.shiftKey 40 | link.doInternalLink name, page, $(e.target).data('site') 41 | return false 42 | 43 | createSearch = ({neighborhood})-> 44 | incrementalSearch = (searchQuery)-> 45 | if searchQuery.length < 2 46 | $('.incremental-search').remove() 47 | return 48 | if $('.incremental-search').length == 0 49 | offset = $('.searchbox').position() 50 | $('
    ') 51 | .css('left', "#{offset.left}px") 52 | .css('bottom', "#{offset.top + $('.searchbox').height()}px") 53 | .addClass('incremental-search') 54 | .delegate '.internal', 'click', (e) -> 55 | e.target = $(e.target).parent()[0] if e.target.nodeName == 'SPAN' 56 | name = $(e.target).data 'pageName' 57 | # ensure that name is a string (using string interpolation) 58 | name = "#{name}" 59 | pageHandler.context = $(e.target).attr('title').split(' => ') 60 | finishClick e, name 61 | 62 | .delegate 'img.remote', 'click', (e) -> 63 | # expand to handle click on temporary flag 64 | if $(e.target).attr('src').startsWith('data:image/png') 65 | e.preventDefault() 66 | site = $(e.target).data('site') 67 | wiki.site(site).refresh () -> 68 | # empty function... 69 | else 70 | name = $(e.target).data('slug') 71 | pageHandler.context = [$(e.target).data('site')] 72 | finishClick e, name 73 | .appendTo($('.searchbox')) 74 | 75 | searchResults = neighborhood.search(searchQuery) 76 | searchTerms = searchQuery.split(' ') 77 | .map (t) -> 78 | return t.toLowerCase() 79 | .filter(String) 80 | searchHighlightRegExp = new RegExp("\\b(" + searchQuery.split(' ') 81 | .map (t) -> 82 | return t.replace(/[.*+?^${}()|[\]\\]/g, '\\$&') 83 | .filter(String) 84 | .join('|') + ")", 'i') 85 | highlightText = (text) -> 86 | text.split(searchHighlightRegExp) 87 | .map (p) -> 88 | if searchTerms.includes p.toLowerCase() 89 | return "{{#{p}}}" 90 | else return p 91 | .join('') 92 | $search = $('.incremental-search').empty() 93 | if !searchResults.finds || searchResults.finds.length == 0 94 | $('
    ').text('No results found').addClass('no-results').appendTo($search) 95 | count = 0 96 | max_results = 100 97 | for result in searchResults.finds 98 | count += 1 99 | if count == max_results + 1 100 | $('
    ').text("#{searchResults.finds.length - max_results} results omitted").addClass('omitted-results').appendTo($search) 101 | if count > max_results 102 | continue 103 | $item = $('
    ').appendTo($search) 104 | item = 105 | id: random.itemId(), 106 | type: "reference" 107 | site: result.site, 108 | slug: result.page.slug, 109 | title: highlightText(result.page.title) 110 | text: highlightText(result.page.synopsis) 111 | emit($item, item) 112 | $item.html($item.html() 113 | .split new RegExp("(\{\{.*?\}\})", 'i') 114 | .map (p) -> 115 | if (p.indexOf '{{') == 0 116 | return "#{p.substring(2, p.length - 2)}" 117 | else return p 118 | .join '' 119 | ) 120 | 121 | performSearch = (searchQuery)-> 122 | searchResults = neighborhood.search(searchQuery) 123 | if searchResults.finds && searchResults.finds.length == 1 124 | $('.incremental-search').find('.internal').click() 125 | $('.incremental-search').remove() 126 | return 127 | $('.incremental-search').remove() 128 | tally = searchResults.tally 129 | resultPage = newPage() 130 | resultPage.setTitle "Search for '#{searchQuery}'" 131 | resultPage.addParagraph """ 132 | String '#{searchQuery}' found on #{tally.finds||'none'} of #{tally.pages||'no'} pages from #{tally.sites||'no'} sites. 133 | Text matched on #{tally.title||'no'} titles, #{tally.text||'no'} paragraphs, and #{tally.slug||'no'} slugs. 134 | Elapsed time #{tally.msec} milliseconds. 135 | """ 136 | for result in searchResults.finds 137 | resultPage.addItem 138 | "type": "reference" 139 | "site": result.site 140 | "slug": result.page.slug 141 | "title": result.page.title 142 | "text": result.page.synopsis || '' 143 | 144 | link.showResult resultPage 145 | 146 | 147 | { 148 | incrementalSearch 149 | performSearch 150 | } 151 | module.exports = createSearch 152 | -------------------------------------------------------------------------------- /lib/searchbox.coffee: -------------------------------------------------------------------------------- 1 | # Handle input events from the search box. There is machinery 2 | # here that anticipates incremental search that is yet to be coded. 3 | # We use dependency injection to break dependency loops. 4 | 5 | createSearch = require './search' 6 | 7 | search = null 8 | 9 | inject = (neighborhood) -> 10 | search = createSearch({neighborhood}) 11 | 12 | bind = -> 13 | $('input.search').attr('autocomplete', 'off') 14 | $('input.search').on 'keydown', (e)-> 15 | if e.keyCode == 27 16 | $('.incremental-search').remove() 17 | $('input.search').on 'keypress', (e)-> 18 | return if e.keyCode != 13 # 13 == return 19 | searchQuery = $(this).val() 20 | search.performSearch( searchQuery ) 21 | $(this).val("") 22 | 23 | $('input.search').on 'focus', (e)-> 24 | searchQuery = $(this).val() 25 | search.incrementalSearch( searchQuery ) 26 | 27 | $('input.search').on 'input', (e)-> 28 | searchQuery = $(this).val() 29 | search.incrementalSearch( searchQuery ) 30 | 31 | module.exports = {inject, bind} 32 | -------------------------------------------------------------------------------- /lib/security.coffee: -------------------------------------------------------------------------------- 1 | 2 | module.exports = security = {} 3 | 4 | # make use of plugin getScript to load the security plugin's client code 5 | plugin = require './plugin' 6 | state = require './state' 7 | lineup = require './lineup' 8 | refresh = require './refresh' 9 | 10 | module.exports = (user) -> 11 | 12 | # plugin.getScript "/security/security.js", () -> 13 | # window.plugins.security.setup(user) 14 | 15 | # not the right place for this, atleast if this wasn't just an experiment... 16 | 17 | startupHash = $(location).attr('hash') 18 | 19 | if startupHash is '' 20 | $("section.main").html("
    ") 21 | else 22 | hashPages = state.urlPages() 23 | hashSites = state.urlLocs() 24 | 25 | mainContent = "" 26 | 27 | for hashPage, idx in hashPages 28 | if hashSites[idx] is "view" 29 | mainContent += "
    " 30 | else 31 | mainContent += "
    " 32 | 33 | $("section.main").html(mainContent) 34 | 35 | wikiOrigin = window.location.origin 36 | archive = beaker.hyperdrive.drive(wikiOrigin) 37 | 38 | archiveInfo = await archive.getInfo() 39 | 40 | if archiveInfo.writable 41 | window.isAuthenticated = true 42 | window.isOwner = true 43 | $('.editEnable').toggle() 44 | 45 | data = await archive.readFile('/wiki.json') 46 | wikiDetails = JSON.parse(data) 47 | $("#site-owner").html("Site Owned by: #{wikiDetails['author']}") 48 | -------------------------------------------------------------------------------- /lib/siteindexHandler.coffee: -------------------------------------------------------------------------------- 1 | # The site-index holds a text index of the pages in a wiki site. 2 | # Here we handle the creation of the site index, if it is missing, and 3 | # update it as wiki pages are added, editted, and removed. 4 | 5 | miniSearch = require 'minisearch' 6 | 7 | module.exports = siteindexHandler = {} 8 | 9 | extractPageText = (pageText, currentItem, currentIndex, array) -> 10 | try 11 | switch currentItem.type 12 | when 'paragraph' 13 | pageText += ' ' + currentItem.text.replace /\[{1,2}|\]{1,2}/g, '' 14 | when 'markdown' 15 | # really need to extract text from the markdown, but for now just remove link brackets... 16 | pageText += ' ' + currentItem.text.replace /\[{1,2}|\]{1,2}/g, '' 17 | when 'html' 18 | pageText += ' ' + currentItem.text.replace /<[^>]*>/g, '' 19 | else 20 | if currentItem.text? 21 | for line in currentItem.text.split /\r\n?|\n/ 22 | pageText += ' ' + line.replace /\[{1,2}|\]{1,2}/g, '' unless line.match /^[A-Z]+[ ].*/ 23 | catch err 24 | console.log "SITE INDEX *** #{wikiName} Error extracting text from '#{currentIndex}' of #{JSON.stringify(array)}", err.message 25 | pageText 26 | 27 | buildSiteIndex = () -> 28 | # here we build the site index, if it is missing 29 | siteIndex = new miniSearch({ 30 | fields: ['title', 'content'] 31 | }) 32 | 33 | pages = await wiki.archive.readdir("/wiki", {includeStats: true}) 34 | .catch (err) -> 35 | console.log '--- Site Index - error reading wiki directory', err 36 | pages = [] 37 | 38 | pages = pages.filter (page) -> page.stat.isFile() and page.name.endsWith('.json') 39 | 40 | indexPromises = pages.map (page) -> 41 | return new Promise (resolve) -> 42 | pageJSON = await wiki.archive.readFile "/wiki/" + page.name, 'json' 43 | 44 | try 45 | pageText = pageJSON.story.reduce extractPageText, '' 46 | catch err 47 | console.log "SITE INDEX *** reduce to extract text on #{page.name} failed", err.message 48 | pageText = "" 49 | 50 | siteIndex.add { 51 | 'id': page.name.replace '.json', '' 52 | 'title': page.title 53 | 'content': pageText 54 | } 55 | resolve() 56 | Promise.all(indexPromises) 57 | .then () -> 58 | return siteIndex 59 | 60 | 61 | 62 | 63 | 64 | 65 | siteindexHandler.update = () -> 66 | # write site index to hyperdrive 67 | siteIndex = wiki.neighborhood[location.host].siteIndex 68 | 69 | await wiki.archive.writeFile("/wiki/system/site-index.json", JSON.stringify(siteIndex, null, '\t')) 70 | .then (err) -> 71 | if err 72 | console.log "site-index update failed:", reason 73 | 74 | 75 | init = () -> 76 | 77 | checkSiteIndex = () -> 78 | siteIndexUrl = "/wiki/system/site-index.json" 79 | fetch(siteIndexUrl) 80 | .then (response) -> 81 | if !response.ok 82 | throw Error(response.statusText) 83 | return response 84 | .then (response) -> 85 | return response.text() 86 | .catch (error) -> 87 | # problem with site-index, lets rebuild it, if we can. 88 | info = await wiki.archive.getInfo() 89 | if info.writable 90 | console.log "+++ Rebuilding Missing Site Index" 91 | await buildSiteIndex() 92 | .then (newSiteIndex) -> 93 | await wiki.archive.writeFile( siteIndexUrl, JSON.stringify(newSiteIndex, null, '\t')) 94 | .catch (error) -> 95 | console.log "---- Error writing recreated site index", error 96 | return newSiteIndex 97 | else 98 | console.log "---- Site index is missing, not rebuilt as we are not the owner" 99 | return [] 100 | 101 | # check wiki has a site index, and recreate if it is missing, and we are the wiki owner 102 | await checkSiteIndex() 103 | 104 | init() -------------------------------------------------------------------------------- /lib/sitemapHandler.coffee: -------------------------------------------------------------------------------- 1 | # The sitemap holds details of the pages in a wiki site. 2 | # Here we handle the creation of the sitemap, if it is missing, and 3 | # update it as wiki pages are added, editted, and removed. 4 | 5 | synopsis = require './synopsis' 6 | 7 | module.exports = sitemapHandler = {} 8 | 9 | originSitemap = [] 10 | 11 | buildSitemap = () -> 12 | 13 | editDate = (journal) -> 14 | for action in (journal || []) by -1 15 | return action.date if action.date and action.type != 'fork' 16 | undefined 17 | 18 | extractPageInfo = (page) -> 19 | rawPageData = await wiki.archive.readFile("/wiki/" + page.name) 20 | pageJSON = JSON.parse(rawPageData) 21 | return 22 | slug: page.name.split('.')[0] 23 | title: pageJSON.title 24 | date: editDate(pageJSON.journal) 25 | synopsis: synopsis(pageJSON) 26 | 27 | try 28 | pages = await wiki.archive.readdir("/wiki", {includeStats: true}) 29 | catch error 30 | pages = [] 31 | 32 | pages = pages.filter (page) -> page.stat.isFile() and page.name.endsWith('.json') 33 | pages = pages.map (page) -> 34 | pageEntry = await extractPageInfo(page) 35 | .then (pageEntry) -> 36 | return pageEntry 37 | Promise.all(pages) 38 | .then (pages) -> 39 | return pages 40 | 41 | sitemapHandler.update = (sitemap) -> 42 | # write sitemap to dat 43 | await wiki.archive.writeFile("/wiki/system/sitemap.json", JSON.stringify(sitemap, null, '\t')) 44 | .then (err) -> 45 | if err 46 | console.log "sitemap update failed:", reason 47 | 48 | 49 | 50 | init = () -> 51 | 52 | clientOrigin = new URL(document.currentScript.src).origin 53 | wikiOrigin = window.location.origin 54 | 55 | # fetch sitemap 56 | 57 | checkSitemap = () -> 58 | sitemapUrl = '/wiki/system/sitemap.json' 59 | fetch(sitemapUrl) 60 | .then (response) -> 61 | if !response.ok 62 | throw Error(response.statusText) 63 | return response 64 | .then (response) -> 65 | return response.json() 66 | .catch (error) -> 67 | # problem with sitemap, lets rebuild it, if we can. 68 | info = await wiki.archive.getInfo() 69 | if info.writable 70 | console.log "+++ Rebuilding Missing Sitemap" 71 | await buildSitemap() 72 | .then (newsitemap) -> 73 | await wiki.archive.writeFile("/wiki/system/sitemap.json", JSON.stringify(newsitemap, null, '\t')) 74 | .catch (error) -> 75 | console.log "---- Error writing recreated sitemap", error 76 | return newsitemap 77 | else 78 | console.log "---- Sitemap is missing, not rebuilt as we are not the owner" 79 | return [] 80 | 81 | # check wiki has a sitemap, and recreate if it is missing and we are the wiki owner 82 | await checkSitemap() 83 | 84 | 85 | init() 86 | -------------------------------------------------------------------------------- /lib/state.coffee: -------------------------------------------------------------------------------- 1 | # The state module saves the .page lineup in the browser's location 2 | # bar and history. It also reconstructs that state when the browser 3 | # notifies us that the user has changed this sequence. 4 | 5 | active = require './active' 6 | lineup = require './lineup' 7 | link = null 8 | 9 | module.exports = state = {} 10 | 11 | # FUNCTIONS and HANDLERS to manage location bar and back button 12 | 13 | state.inject = (link_) -> 14 | link = link_ 15 | 16 | state.pagesInDom = -> 17 | $.makeArray $(".page").map (_, el) -> el.id 18 | 19 | state.urlPages = -> 20 | hash = "/" + $(location).attr('hash').substring(1) 21 | (i for i in hash.split('/') by 2)[1..] 22 | 23 | state.locsInDom = -> 24 | $.makeArray $(".page").map (_, el) -> 25 | $(el).data('site') or 'view' 26 | 27 | state.urlLocs = -> 28 | hash = "/" + $(location).attr('hash').substring(1) 29 | (j for j in hash.split('/')[1..] by 2) 30 | 31 | state.setUrl = -> 32 | document.title = lineup.bestTitle() 33 | if history and history.pushState 34 | locs = state.locsInDom() 35 | pages = state.pagesInDom() 36 | hash = ("#{locs?[idx] or 'view'}/#{page}" for page, idx in pages).join('/') 37 | url = "/#" + hash 38 | console.log "setUrl", url 39 | unless url is $(location).attr('pathname') + $(location).attr('hash') 40 | history.pushState(null, null, url) 41 | 42 | state.debugStates = () -> 43 | console.log 'a .page keys ', ($(each).data('key') for each in $('.page')) 44 | console.log 'a lineup keys', lineup.debugKeys() 45 | 46 | state.show = (e) -> 47 | oldPages = state.pagesInDom() 48 | newPages = state.urlPages() 49 | oldLocs = state.locsInDom() 50 | newLocs = state.urlLocs() 51 | 52 | return if (!location.pathname or location.pathname is '/') 53 | 54 | matching = true 55 | for name, idx in oldPages 56 | continue if matching and= name is newPages[idx] 57 | old = $('.page:last') 58 | lineup.removeKey old.data('key') 59 | old.remove() 60 | 61 | matching = true 62 | for name, idx in newPages 63 | continue if matching and= name is oldPages[idx] 64 | console.log 'push', idx, name 65 | link.showPage(name, newLocs[idx]) 66 | 67 | state.debugStates() 68 | 69 | active.set($('.page').last()) 70 | document.title = lineup.bestTitle() 71 | 72 | state.first = -> 73 | state.setUrl() 74 | firstUrlPages = state.urlPages() 75 | firstUrlLocs = state.urlLocs() 76 | oldPages = state.pagesInDom() 77 | for urlPage, idx in firstUrlPages when urlPage not in oldPages 78 | link.createPage(urlPage, firstUrlLocs[idx]) unless urlPage is '' 79 | -------------------------------------------------------------------------------- /lib/synopsis.coffee: -------------------------------------------------------------------------------- 1 | # The synopsis module extracts a summary from the json derrived 2 | # representation of a page. This might be from a "synopsys:" field, 3 | # but more likely it comes from text found in the first or second item. 4 | 5 | module.exports = (page) -> 6 | synopsis = page.synopsis 7 | if page? && page.story? 8 | p1 = page.story[0] 9 | p2 = page.story[1] 10 | synopsis ||= p1.text if p1 && p1.type == 'paragraph' 11 | synopsis ||= p2.text if p2 && p2.type == 'paragraph' 12 | synopsis ||= p1.text if p1 && p1.text? 13 | synopsis ||= p2.text if p2 && p2.text? 14 | synopsis ||= page.story? && "A page with #{page.story.length} items." 15 | else 16 | synopsis = 'A page with no story.' 17 | # discard anything after the first line break, after trimming any at beginning 18 | synopsis = synopsis.trim().split(/\r|\n/, 1)[0] 19 | return synopsis.substring(0, 560) 20 | -------------------------------------------------------------------------------- /lib/target.coffee: -------------------------------------------------------------------------------- 1 | # Target handles hovers over items and actions. Other visible 2 | # items and actions with the same id will highlight. In some cases 3 | # an event is generated inviting other pages to scroll the item 4 | # into view. Target tracks hovering even when not requested so 5 | # that highlighting can be immediate when requested. 6 | 7 | targeting = false 8 | item = null 9 | itemElem = null 10 | action = null 11 | consumed = null 12 | 13 | 14 | 15 | bind = -> 16 | $(document) 17 | .keydown (e) -> startTargeting e if e.keyCode == 16 18 | .keyup (e) -> stopTargeting e if e.keyCode == 16 19 | $('.main') 20 | .delegate '.item', 'mouseenter', enterItem 21 | .delegate '.item', 'mouseleave', leaveItem 22 | .delegate '.action', 'mouseenter', enterAction 23 | .delegate '.action', 'mouseleave', leaveAction 24 | .delegate '.page', 'align-item', alignItem 25 | 26 | 27 | startTargeting = (e) -> 28 | targeting = e.shiftKey 29 | if targeting 30 | $('.emit').addClass('highlight') 31 | if id = item || action 32 | $("[data-id=#{id}]").addClass('target') 33 | if itemElem 34 | consumed = itemElem.consuming 35 | if consumed 36 | consumed.forEach (i) -> itemFor(i).addClass('consumed') 37 | 38 | 39 | 40 | stopTargeting = (e) -> 41 | targeting = e.shiftKey 42 | unless targeting 43 | $('.emit').removeClass('highlight') 44 | $('.item, .action').removeClass 'target' 45 | $('.item').removeClass 'consumed' 46 | 47 | pageFor = (pageKey) -> 48 | $page = $('.page').filter((_i, page) => $(page).data('key') == pageKey) 49 | return null if $page.length == 0 50 | console.log('warning: more than one page found for', key, $page) if $page.length > 1 51 | return $page 52 | 53 | itemFor = (pageItem) -> 54 | [pageKey, _item] = pageItem.split('/') 55 | $page = pageFor(pageKey) 56 | return null if !$page 57 | $item = $page.find(".item[data-id=#{_item}]") 58 | return null if $item.length == 0 59 | console.log('warning: more than one item found for', pageItem, $item) if $item.length > 1 60 | return $item 61 | 62 | enterItem = (e) -> 63 | item = ($item = $(this)).attr('data-id') 64 | itemElem = $item[0] 65 | if targeting 66 | $("[data-id=#{item}]").addClass('target') 67 | key = ($page = $(this).parents('.page:first')).data('key') 68 | place = $item.offset().top 69 | $('.page').trigger('align-item', {key, id:item, place}) 70 | consumed = itemElem.consuming 71 | if consumed 72 | consumed.forEach (i) -> itemFor(i).addClass('consumed') 73 | 74 | 75 | leaveItem = (e) -> 76 | if targeting 77 | $('.item, .action').removeClass('target') 78 | $('.item').removeClass('consumed') 79 | item = null 80 | itemElem = null 81 | 82 | 83 | 84 | enterAction = (e) -> 85 | action = $(this).data('id') 86 | if targeting 87 | $("[data-id=#{action}]").addClass('target') 88 | key = $(this).parents('.page:first').data('key') 89 | $('.page').trigger('align-item', {key, id:action}) 90 | 91 | leaveAction = (e) -> 92 | if targeting 93 | $("[data-id=#{action}]").removeClass('target') 94 | action = null 95 | 96 | 97 | 98 | alignItem = (e, align) -> 99 | $page = $(this) 100 | return if $page.data('key') == align.key 101 | $item = $page.find(".item[data-id=#{align.id}]") 102 | return unless $item.length 103 | place = align.place || $page.height()/2 104 | offset = $item.offset().top + $page.scrollTop() - place 105 | $page.stop().animate {scrollTop: offset}, 'slow' 106 | 107 | 108 | 109 | module.exports = {bind} 110 | -------------------------------------------------------------------------------- /lib/util.coffee: -------------------------------------------------------------------------------- 1 | # This module collects various functions that might belong 2 | # better elsewhere. At one point we thought of uniformity 3 | # of representations but that hasn't been a strong influency. 4 | 5 | module.exports = util = {} 6 | 7 | 8 | # for chart plug-in 9 | util.formatTime = (time) -> 10 | d = new Date (if time > 10000000000 then time else time*1000) 11 | mo = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'][d.getMonth()] 12 | h = d.getHours() 13 | am = if h < 12 then 'AM' else 'PM' 14 | h = if h == 0 then 12 else if h > 12 then h - 12 else h 15 | mi = (if d.getMinutes() < 10 then "0" else "") + d.getMinutes() 16 | "#{h}:#{mi} #{am}
    #{d.getDate()} #{mo} #{d.getFullYear()}" 17 | 18 | # for journal mouse-overs and possibly for date header 19 | util.formatDate = (msSinceEpoch) -> 20 | d = new Date(msSinceEpoch) 21 | wk = ['Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat'][d.getDay()] 22 | mo = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'][d.getMonth()] 23 | day = d.getDate(); 24 | yr = d.getFullYear(); 25 | h = d.getHours() 26 | am = if h < 12 then 'AM' else 'PM' 27 | h = if h == 0 then 12 else if h > 12 then h - 12 else h 28 | mi = (if d.getMinutes() < 10 then "0" else "") + d.getMinutes() 29 | sec = (if d.getSeconds() < 10 then "0" else "") + d.getSeconds() 30 | "#{wk} #{mo} #{day}, #{yr}
    #{h}:#{mi}:#{sec} #{am}" 31 | 32 | util.formatElapsedTime = (msSinceEpoch) -> 33 | msecs = (new Date().getTime() - msSinceEpoch) 34 | return "#{Math.floor msecs} milliseconds ago" if (secs = msecs/1000) < 2 35 | return "#{Math.floor secs} seconds ago" if (mins = secs/60) < 2 36 | return "#{Math.floor mins} minutes ago" if (hrs = mins/60) < 2 37 | return "#{Math.floor hrs} hours ago" if (days = hrs/24) < 2 38 | return "#{Math.floor days} days ago" if (weeks = days/7) < 2 39 | return "#{Math.floor weeks} weeks ago" if (months = days/31) < 2 40 | return "#{Math.floor months} months ago" if (years = days/365) < 2 41 | return "#{Math.floor years} years ago" 42 | 43 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "wiki-client-dat", 3 | "version": "20.07.11", 4 | "description": "Federated Wiki - hyperdrive protocol variant of the Client-side Javascript", 5 | "keywords": [ 6 | "wiki", 7 | "federated wiki", 8 | "wiki client", 9 | "dat" 10 | ], 11 | "author": { 12 | "name": "Ward Cunningham", 13 | "email": "ward@c2.com", 14 | "url": "http://ward.fed.wiki.org" 15 | }, 16 | "contributors": [ 17 | { 18 | "name": "Nick Niemeir", 19 | "email": "nick.niemeir@gmail.com", 20 | "url": "http://nrn.io" 21 | }, 22 | { 23 | "name": "Paul Rodwell", 24 | "email": "paul.rodwell@btinternet.com", 25 | "url": "https://rodwell.me" 26 | } 27 | ], 28 | "dependencies": { 29 | "async": "^2.6.3", 30 | "localforage": "^1.7.2", 31 | "underscore": "^1.9.1" 32 | }, 33 | "scripts": { 34 | "test": "grunt mochaTest" 35 | }, 36 | "devDependencies": { 37 | "@babel/core": "^7.8.7", 38 | "@babel/preset-env": "^7.8.7", 39 | "@mapbox/sanitize-caja": "^0.1.4", 40 | "browserify-versionify": "^1.0.6", 41 | "coffeeify": "^3.0.1", 42 | "coffeescript": "^2.5.0", 43 | "expect.js": "^0.3.1", 44 | "grunt": "^1.3.0", 45 | "grunt-babel": "^8.0.0", 46 | "grunt-browserify": "^5.2.0", 47 | "grunt-contrib-clean": "^2.0.0", 48 | "grunt-contrib-uglify-es": "^3.3.0", 49 | "grunt-contrib-watch": "^1.1.0", 50 | "grunt-git-authors": "^3.2.0", 51 | "grunt-mocha-test": "^0.13.2", 52 | "grunt-retire": "^1.0.3", 53 | "marked": "^0.7.0", 54 | "minisearch": "^2.1.3", 55 | "mocha": "^7.1.1", 56 | "sinon": "^9.0.1" 57 | }, 58 | "license": "MIT", 59 | "repository": { 60 | "type": "git", 61 | "url": "https://github.com/paul90/wiki-client-dat" 62 | }, 63 | "bugs": { 64 | "url": "https://github.com/paul90/wiki-client-dat/issues" 65 | } 66 | } 67 | -------------------------------------------------------------------------------- /scripts/call-graph.dot: -------------------------------------------------------------------------------- 1 | digraph call_graph { 2 | node [style=filled; fillcolor=white; color=white]; 3 | subgraph [style=filled; fillcolor=lightgray]; 4 | 5 | ready [shape=box]; 6 | ready -> begin; 7 | 8 | subgraph cluster_page { 9 | fillcolor=palegreen; 10 | label = "page"; 11 | newPage; 12 | become; 13 | } 14 | 15 | subgraph cluster_lineup { 16 | fillcolor=palegreen; 17 | label = "lineup"; 18 | addPage [label="add\npage"]; 19 | removeKey [label="remove\nkey"]; 20 | removeAllAfter [label="remove\nall\nafter"]; 21 | } 22 | 23 | subgraph cluster_link { 24 | label = "link"; 25 | fillcolor=palegreen; 26 | doInternalLink [label="internal\nlink"]; 27 | createPage [label="create\npage"]; 28 | doInternalLink -> createPage; 29 | 30 | } 31 | 32 | subgraph cluster_legacy { 33 | label = "legacy"; 34 | click_create_button [label="create\nbutton"]; 35 | begin; 36 | } 37 | begin -> first; 38 | begin -> cycle; 39 | doInternalLink -> cycle; 40 | doInternalLink -> removeAllAfter; 41 | click_create_button -> get; 42 | click_create_button -> become; 43 | click_create_button -> put; 44 | click_create_button -> rebuildPage; 45 | 46 | 47 | createPage -> page; 48 | page [shape=box] 49 | 50 | subgraph cluster_pageHandler { 51 | label = "pageHandler"; 52 | get -> trouble; put; 53 | } 54 | get -> newPage; 55 | trouble -> newPage; 56 | 57 | 58 | subgraph cluster_refresh { 59 | label = "refresh"; 60 | buildPage [label="build\npage"] 61 | rebuildPage [label="rebuild\npage"] 62 | cycle -> missing -> buildPage -> rebuildPage; 63 | } 64 | missing -> newPage; 65 | cycle -> get; 66 | cycle -> buildPage; 67 | buildPage -> addPage; 68 | rebuildPage -> story; story[shape=box]; 69 | rebuildPage -> journal; journal[shape=box]; 70 | 71 | 72 | subgraph cluster_search { 73 | label = "search"; 74 | performSearch [label="perform\nsearch"]; 75 | } 76 | performSearch -> newPage; 77 | performSearch -> createPage; 78 | performSearch -> buildPage; 79 | 80 | 81 | subgraph cluster_state { 82 | label = "state"; 83 | show; first; 84 | } 85 | show -> cycle; 86 | show -> createPage; 87 | show -> removeKey; 88 | first -> createPage; 89 | 90 | 91 | } -------------------------------------------------------------------------------- /scripts/call-sites.dot: -------------------------------------------------------------------------------- 1 | digraph { 2 | rankdir=LR; 3 | node [style=filled; fillcolor=lightBlue]; 4 | equals -> pageHandler; 5 | createFactory -> random; 6 | createFactory -> plugin; 7 | createFactory -> pageHandler; 8 | handleHeaderClick -> lineup; 9 | handleHeaderClick -> lineup; 10 | emitControls -> actionSymbols; 11 | emitControls -> actionSymbols; 12 | emitFooter -> random; 13 | emitFooter -> random; 14 | emitTwins -> asSlug; 15 | emitTwins -> neighborhood; 16 | renderPageIntoPageElement -> lineup; 17 | renderPageIntoPageElement -> lineup; 18 | renderPageIntoPageElement -> resolve; 19 | renderPageIntoPageElement -> plugin; 20 | renderPageIntoPageElement -> addToJournal; 21 | renderPageIntoPageElement -> addToJournal; 22 | createMissingFlag -> plugin; 23 | rebuildPage -> plugin; 24 | rebuildPage -> state; 25 | buildPage -> lineup; 26 | createGhostPage -> pageHandler; 27 | createGhostPage -> newPage; 28 | createGhostPage -> neighborhood; 29 | createGhostPage -> neighborhood; 30 | whenGotten -> neighborhood; 31 | whenGotten -> pageHandler; 32 | } 33 | -------------------------------------------------------------------------------- /scripts/call-sites.pl: -------------------------------------------------------------------------------- 1 | $dep = "pageHandler|plugin|state|neighborhood|addToJournal|actionSymbols|lineup|resolve|random|pageModule|newPage|asSlug"; 2 | 3 | 4 | @lines = `cat lib/refresh.coffee`; 5 | print "digraph {\nrankdir=LR;\nnode [style=filled; fillcolor=lightBlue];\n"; 6 | for (@lines) { 7 | next if /require/; 8 | next if /^#/; 9 | $from = $1 if /^\s*(\w+)\s*=\s*(\(|->)/; 10 | while (/\b($dep)\b/g) { 11 | print "$from -> $1;\n" if $from; 12 | } 13 | } 14 | print "}\n"; -------------------------------------------------------------------------------- /scripts/requires-graph.dot: -------------------------------------------------------------------------------- 1 | digraph { node [style=filled]; 2 | 3 | actionSymbols [fillcolor=paleGreen]; 4 | 5 | active [fillcolor=gold]; 6 | active [shape=box]; 7 | 8 | addToJournal [fillcolor=gold]; 9 | util -> addToJournal [dir=back]; 10 | actionSymbols -> addToJournal [dir=back]; 11 | addToJournal [shape=box]; 12 | 13 | bind [fillcolor=paleGreen]; 14 | neighborhood -> bind [dir=back]; 15 | neighbors -> bind [dir=back]; 16 | searchbox -> bind [dir=back]; 17 | state -> bind [dir=back]; 18 | link -> bind [dir=back]; 19 | bind [shape=box]; 20 | 21 | dialog [fillcolor=paleGreen]; 22 | resolve -> dialog [dir=back]; 23 | dialog [shape=box]; 24 | 25 | drop [fillcolor=paleGreen]; 26 | 27 | editor [fillcolor=paleGreen]; 28 | plugin -> editor [dir=back]; 29 | itemz -> editor [dir=back]; 30 | pageHandler -> editor [dir=back]; 31 | link -> editor [dir=back]; 32 | random -> editor [dir=back]; 33 | editor [shape=box]; 34 | 35 | factory [fillcolor=gold]; 36 | neighborhood -> factory [dir=back]; 37 | plugin -> factory [dir=back]; 38 | resolve -> factory [dir=back]; 39 | pageHandler -> factory [dir=back]; 40 | editor -> factory [dir=back]; 41 | synopsis -> factory [dir=back]; 42 | drop -> factory [dir=back]; 43 | factory [shape=box]; 44 | 45 | future [fillcolor=paleGreen]; 46 | resolve -> future [dir=back]; 47 | neighborhood -> future [dir=back]; 48 | future [shape=box]; 49 | 50 | image [fillcolor=paleGreen]; 51 | dialog -> image [dir=back]; 52 | editor -> image [dir=back]; 53 | resolve -> image [dir=back]; 54 | image [shape=box]; 55 | 56 | itemz [fillcolor=paleGreen]; 57 | pageHandler -> itemz [dir=back]; 58 | plugin -> itemz [dir=back]; 59 | random -> itemz [dir=back]; 60 | itemz [shape=box]; 61 | 62 | legacy [fillcolor=gold]; 63 | pageHandler -> legacy [dir=back]; 64 | state -> legacy [dir=back]; 65 | active -> legacy [dir=back]; 66 | refresh -> legacy [dir=back]; 67 | lineup -> legacy [dir=back]; 68 | drop -> legacy [dir=back]; 69 | dialog -> legacy [dir=back]; 70 | link -> legacy [dir=back]; 71 | page -> legacy [dir=back]; 72 | legacy [shape=box]; 73 | 74 | lineup [fillcolor=paleGreen]; 75 | random -> lineup [dir=back]; 76 | 77 | link [fillcolor=paleGreen]; 78 | lineup -> link [dir=back]; 79 | active -> link [dir=back]; 80 | refresh -> link [dir=back]; 81 | page -> link [dir=back]; 82 | link [shape=box]; 83 | 84 | neighborhood [fillcolor=gold]; 85 | neighborhood [shape=box]; 86 | 87 | neighbors [fillcolor=paleGreen]; 88 | link -> neighbors [dir=back]; 89 | neighbors [shape=box]; 90 | 91 | page [fillcolor=paleGreen]; 92 | util -> page [dir=back]; 93 | random -> page [dir=back]; 94 | revision -> page [dir=back]; 95 | 96 | pageHandler [fillcolor=gold]; 97 | state -> pageHandler [dir=back]; 98 | revision -> pageHandler [dir=back]; 99 | addToJournal -> pageHandler [dir=back]; 100 | page -> pageHandler [dir=back]; 101 | random -> pageHandler [dir=back]; 102 | lineup -> pageHandler [dir=back]; 103 | pageHandler [shape=box]; 104 | 105 | paragraph [fillcolor=paleGreen]; 106 | editor -> paragraph [dir=back]; 107 | resolve -> paragraph [dir=back]; 108 | paragraph [shape=box]; 109 | 110 | persona [fillcolor=gold]; 111 | persona [shape=box]; 112 | 113 | plugin [fillcolor=gold]; 114 | plugin [shape=box]; 115 | 116 | plugins [fillcolor=paleGreen]; 117 | reference -> plugins [dir=back]; 118 | factory -> plugins [dir=back]; 119 | paragraph -> plugins [dir=back]; 120 | image -> plugins [dir=back]; 121 | future -> plugins [dir=back]; 122 | 123 | random [fillcolor=paleGreen]; 124 | 125 | reference [fillcolor=gold]; 126 | editor -> reference [dir=back]; 127 | resolve -> reference [dir=back]; 128 | reference [shape=box]; 129 | 130 | refresh [fillcolor=gold]; 131 | pageHandler -> refresh [dir=back]; 132 | plugin -> refresh [dir=back]; 133 | state -> refresh [dir=back]; 134 | neighborhood -> refresh [dir=back]; 135 | addToJournal -> refresh [dir=back]; 136 | actionSymbols -> refresh [dir=back]; 137 | lineup -> refresh [dir=back]; 138 | resolve -> refresh [dir=back]; 139 | random -> refresh [dir=back]; 140 | page -> refresh [dir=back]; 141 | refresh [shape=box]; 142 | 143 | resolve [fillcolor=paleGreen]; 144 | page -> resolve [dir=back]; 145 | 146 | revision [fillcolor=gold]; 147 | 148 | search [fillcolor=gold]; 149 | link -> search [dir=back]; 150 | active -> search [dir=back]; 151 | page -> search [dir=back]; 152 | 153 | searchbox [fillcolor=paleGreen]; 154 | search -> searchbox [dir=back]; 155 | searchbox [shape=box]; 156 | 157 | state [fillcolor=gold]; 158 | active -> state [dir=back]; 159 | lineup -> state [dir=back]; 160 | state [shape=box]; 161 | 162 | synopsis [fillcolor=gold]; 163 | 164 | util [fillcolor=gold]; 165 | } 166 | -------------------------------------------------------------------------------- /scripts/requires-graph.pl: -------------------------------------------------------------------------------- 1 | # read all source files in lib, generate graph of require dependencies 2 | # usage: perl require-graph.pl 3 | 4 | @new = qw" page lineup drop dialog link tempwiki neighbors searchbox bind plugins future image paragraph resolve itemz editor actionSymbols random "; 5 | 6 | for (<../lib/*.coffee>) { 7 | next if /wiki/; 8 | $from = $1 if /(\w+)\.coffee/; 9 | $color = $from ~~ @new ? 'paleGreen' : 'gold'; 10 | $dot .= "\n$from [fillcolor=$color];\n"; 11 | open F, $_; 12 | 13 | $jquery = 0; 14 | for () { 15 | if (/\brequire\b.+\.\/(\w+)\b/) { 16 | $dot .= "$1 -> $from [dir=back];\n"; 17 | 18 | } 19 | if (/^\s*\$/) { 20 | $jquery = 1; 21 | } 22 | } 23 | if ($jquery) { 24 | $dot .= "$from [shape=box];\n" 25 | } 26 | } 27 | 28 | # for (<../test/*.coffee>) { 29 | # $from = $1 if /(\w+)\.coffee/; 30 | # $color = 'lightBlue'; 31 | # $dot .= "\n\"test\\n$from\" [fillcolor=$color];\n"; 32 | # open F, $_; 33 | 34 | # for () { 35 | # if (/\brequire\b.+\.\.\/lib\/(\w+)\b/) { 36 | # $dot .= "$1 -> \"test\\n$from\" [dir=back];\n"; 37 | 38 | # } 39 | # } 40 | # } 41 | 42 | open D, '>requires-graph.dot'; 43 | print D "digraph { node [style=filled];\n$dot}\n"; -------------------------------------------------------------------------------- /scripts/squeeze-logic.coffee: -------------------------------------------------------------------------------- 1 | 2 | squeeze = (source) -> 3 | target = {x:500, y:300} 4 | oversize = Math.max 1, Math.min( source.x/target.x, source.y/target.y) 5 | iterations = Math.floor Math.log2 oversize 6 | prescale = oversize / 2 ** iterations 7 | console.log source, oversize, '=', prescale, '* 2 ^', iterations 8 | 9 | tests = [210, 510, 1100, 2100, 5100, 11000] 10 | for x in tests 11 | console.log '' 12 | for y in tests 13 | squeeze {x, y} -------------------------------------------------------------------------------- /scripts/squeeze-test.js: -------------------------------------------------------------------------------- 1 | // Using testcheck to validate the algorithm used for squeezing an image. 2 | // usage: npm i testcheck; node script/squeeze-test.js 3 | 4 | const { check, gen, property } = require('testcheck') 5 | 6 | function resizeImage(startWidth, startHeight) { 7 | var cW = startWidth 8 | var cH = startHeight 9 | var tW = 500 10 | var tH = 300 11 | 12 | const smallEnough = function smallEnough(width, height) { 13 | return width <= tW || height <= tH 14 | } 15 | 16 | // determine size for first squeeze 17 | const f = 18 | cW / cH > tW / tH 19 | ? cH / tH 20 | : cW / tW 21 | 22 | // final size (we need to round as target number, as target size is not 2^n) 23 | var fW = Math.round(cW / f) 24 | var fH = Math.round(cH / f) 25 | 26 | //console.log('target', fW, fH) 27 | 28 | var squeezes = 0 29 | var x 30 | var y 31 | if (fW = tW) { 32 | x = tW 33 | y = cW 34 | } else { 35 | x = tH 36 | y = cH 37 | } 38 | do { 39 | x *= 2 40 | squeezes += 1 41 | } while (x < y) 42 | 43 | // first squeeze will be to 44 | cW = fW 45 | cH = fH 46 | for (let x = 1; x < squeezes; x++) { 47 | cW *= 2 48 | cH *= 2 49 | } 50 | //console.log('*first squeeze', startWidth, startHeight, cW, cH ) 51 | 52 | // and then squeeze until smallEnough, if not already smallEnough 53 | if (!smallEnough(cW, cH)) { 54 | do { 55 | cH /= 2 56 | cW /= 2 57 | } while (!smallEnough(cW, cH)) 58 | } 59 | 60 | //console.log(startWidth, startHeight, cW, cH) 61 | 62 | return ((cW = tW && cH >= tH) || (cH = tH && cW >= tW)) 63 | } 64 | 65 | const result = check(property([gen.intWithin(450,4096), gen.intWithin(250,4096)], (x,y) => { 66 | return resizeImage(x,y) 67 | }), {seed: 50, numTests: 1001}) 68 | 69 | console.log(result) -------------------------------------------------------------------------------- /test/active.coffee: -------------------------------------------------------------------------------- 1 | active = require '../lib/active' 2 | 3 | describe 'active', -> 4 | 5 | before -> 6 | $('
    ').appendTo('body') 7 | $('
    ').appendTo('body') 8 | active.set($('#active1')) 9 | 10 | it 'should detect the scroll container', -> 11 | expect(active.scrollContainer).to.be.a($) 12 | 13 | it 'should set the active div', -> 14 | active.set($('#active2')) 15 | expect($('#active2').hasClass('active')).to.be.true 16 | 17 | it 'should remove previous active class', -> 18 | expect($('#active1').hasClass('active')).to.be.false 19 | 20 | -------------------------------------------------------------------------------- /test/drop.coffee: -------------------------------------------------------------------------------- 1 | drop = require '../lib/drop' 2 | expect = require 'expect.js' 3 | 4 | # construct mock event objects 5 | 6 | signal = (mock, handler) -> 7 | handler mock 8 | 9 | mockDrop = (dataTransfer) -> 10 | preventDefault: -> 11 | stopPropagation: -> 12 | 13 | originalEvent: 14 | dataTransfer: dataTransfer 15 | 16 | mockUrl = (type, url) -> 17 | mockDrop 18 | types: [type] 19 | getData: (spec) -> url 20 | 21 | mockFile = (spec) -> 22 | mockDrop 23 | types: ['File'] 24 | files: [spec] 25 | 26 | # test the handling of mock events 27 | 28 | describe 'drop', -> 29 | 30 | it 'should handle remote pages', -> 31 | event = mockUrl 'text/uri-list', 'http://localhost:3000/fed.wiki.org/welcome-visitors' 32 | signal event, drop.dispatch 33 | page: (page) -> expect(page).to.eql({slug: 'welcome-visitors', site: 'fed.wiki.org'}) 34 | 35 | it 'should handle local pages', -> 36 | event = mockUrl 'text/uri-list', 'http://fed.wiki.org/view/welcome-visitors' 37 | signal event, drop.dispatch 38 | page: (page) -> expect(page).to.eql({slug: 'welcome-visitors', site: 'fed.wiki.org'}) 39 | 40 | it 'should handle list of pages', -> 41 | event = mockUrl 'text/uri-list', 'http://sfw.c2.com/view/welcome-visitors/view/pattern-language' 42 | signal event, drop.dispatch 43 | page: (page) -> expect(page).to.eql({slug: 'pattern-language', site: 'sfw.c2.com'}) 44 | 45 | it 'should handle a YouTube video', -> 46 | event = mockUrl 'text/uri-list', 'https://www.youtube.com/watch?v=rFpDK2KhAgw' 47 | signal event, drop.dispatch 48 | video: (video) -> expect(video).to.eql({text: 'YOUTUBE rFpDK2KhAgw'}) 49 | 50 | it 'should handle a YouTube playlist', -> 51 | event = mockUrl 'text/uri-list', 'https://www.youtube.com/watch?v=ksoe4Un7bLo&list=PLze65Ckn-WXZpRzLeUPxqsEkFY6vt2hF7' 52 | signal event, drop.dispatch 53 | video: (video) -> expect(video).to.eql({text: 'YOUTUBE PLAYLIST PLze65Ckn-WXZpRzLeUPxqsEkFY6vt2hF7'}) 54 | 55 | it 'should handle a YouTu.be video', -> 56 | event = mockUrl 'text/uri-list', 'https://youtu.be/z2p4VRKgQYU' 57 | signal event, drop.dispatch 58 | video: (video) -> expect(video).to.eql({text: 'YOUTUBE z2p4VRKgQYU'}) 59 | 60 | it 'should handle a YouTu.be playlist', -> 61 | event = mockUrl 'text/uri-list', 'https://youtu.be/pBu6cixcaxI?list=PL0LQM0SAx601_99m2E2NPsm62pKoSCnV5' 62 | signal event, drop.dispatch 63 | video: (video) -> expect(video).to.eql({text: 'YOUTUBE PLAYLIST PL0LQM0SAx601_99m2E2NPsm62pKoSCnV5'}) 64 | 65 | it 'should handle a vimeo video', -> 66 | event = mockUrl 'text/uri-list', 'https://vimeo.com/90834988' 67 | signal event, drop.dispatch 68 | video: (video) -> expect(video).to.eql({text: 'VIMEO 90834988'}) 69 | 70 | it 'should handle a archive.org video', -> 71 | event = mockUrl 'text/uri-list', 'https://archive.org/details/IcelandJazz' 72 | signal event, drop.dispatch 73 | video: (video) -> expect(video).to.eql({text: 'ARCHIVE IcelandJazz'}) 74 | 75 | it 'should handle a TEDX video', -> 76 | event = mockUrl 'text/uri-list', 'http://tedxtalks.ted.com/video/Be-a-Daydream-Believer-Anne-Zac' 77 | signal event, drop.dispatch 78 | video: (video) -> expect(video).to.eql({text: 'TEDX Be-a-Daydream-Believer-Anne-Zac'}) 79 | 80 | it 'should handle a TED video', -> 81 | event = mockUrl 'text/uri-list', 'http://www.ted.com/talks/david_camarillo_why_helmets_don_t_prevent_concussions_and_what_might' 82 | signal event, drop.dispatch 83 | video: (video) -> expect(video).to.eql({text: 'TED david_camarillo_why_helmets_don_t_prevent_concussions_and_what_might'}) 84 | 85 | it 'should handle text file', -> 86 | file = {name: "foo.txt", type: "text/plain"} 87 | event = mockFile file 88 | signal event, drop.dispatch 89 | file: (data) -> expect(data).to.eql(file) 90 | -------------------------------------------------------------------------------- /test/lineup.coffee: -------------------------------------------------------------------------------- 1 | lineup = require('../lib/lineup') 2 | newPage = require('../lib/page').newPage 3 | expect = require 'expect.js' 4 | 5 | describe 'lineup', -> 6 | 7 | it 'should assign unique keys', -> 8 | pageObject = newPage() 9 | lineup.debugReset() 10 | key1 = lineup.addPage pageObject 11 | key2 = lineup.addPage pageObject 12 | expect(key1).to.not.equal key2 13 | 14 | it 'should preserve identity', -> 15 | pageObject = newPage() 16 | lineup.debugReset() 17 | key1 = lineup.addPage pageObject 18 | key2 = lineup.addPage pageObject 19 | expect(key1).to.not.eql null 20 | expect(lineup.atKey(key1)).to.be lineup.atKey(key2) 21 | 22 | it 'should remove a page', -> 23 | pageObject = newPage() 24 | lineup.debugReset() 25 | key1 = lineup.addPage pageObject 26 | key2 = lineup.addPage pageObject 27 | key3 = lineup.addPage pageObject 28 | result = lineup.removeKey key2 29 | expect([lineup.debugKeys(), result]).to.eql [[key1, key3], key2] 30 | 31 | it 'should remove downstream pages', -> 32 | pageObject = newPage() 33 | lineup.debugReset() 34 | key1 = lineup.addPage pageObject 35 | key2 = lineup.addPage pageObject 36 | key3 = lineup.addPage pageObject 37 | result = lineup.removeAllAfterKey key1 38 | expect([lineup.debugKeys(), result]).to.eql [[key1], [key2, key3]] 39 | 40 | describe 'crumbs', -> 41 | 42 | fromUri = (uri) -> 43 | lineup.debugReset() 44 | fields = uri.split /\// 45 | result = [] 46 | while fields.length 47 | host = fields.shift() 48 | result.push lineup.addPage newPage {title: fields.shift()}, host 49 | result 50 | 51 | it 'should reload welcome', -> 52 | keys = fromUri 'view/welcome-visitors' 53 | crumbs = lineup.crumbs keys[0], 'foo.com' 54 | expect(crumbs).to.eql ['foo.com', 'view', 'welcome-visitors'] 55 | 56 | it 'should load remote welcome', -> 57 | keys = fromUri 'bar.com/welcome-visitors' 58 | crumbs = lineup.crumbs keys[0], 'foo.com' 59 | expect(crumbs).to.eql ['bar.com', 'view', 'welcome-visitors'] 60 | 61 | it 'should reload welcome before some-page', -> 62 | keys = fromUri 'view/some-page' 63 | crumbs = lineup.crumbs keys[0], 'foo.com' 64 | expect(crumbs).to.eql ['foo.com', 'view', 'welcome-visitors', 'view', 'some-page'] 65 | 66 | it 'should load remote welcome and some-page', -> 67 | keys = fromUri 'bar.com/some-page' 68 | crumbs = lineup.crumbs keys[0], 'foo.com' 69 | expect(crumbs).to.eql ['bar.com', 'view', 'welcome-visitors', 'view', 'some-page'] 70 | 71 | it 'should remote the adjacent local page when changing origin', -> 72 | keys = fromUri 'view/once-local/bar.com/some-page' 73 | crumbs = lineup.crumbs keys[1], 'foo.com' 74 | expect(crumbs).to.eql ['bar.com', 'view', 'welcome-visitors', 'view', 'some-page', 'foo.com', 'once-local'] 75 | 76 | it 'should remote the stacked adjacent local page when changing origin', -> 77 | keys = fromUri 'view/stack1/view/stack2/view/once-local/bar.com/some-page' 78 | crumbs = lineup.crumbs keys[3], 'foo.com' 79 | expect(crumbs).to.eql ['bar.com', 'view', 'welcome-visitors', 'view', 'some-page', 'foo.com', 'once-local'] 80 | 81 | it 'should remote the welcome rooted stacked adjacent local page when changing origin', -> 82 | keys = fromUri 'view/welcome-visitors/view/stack2/view/once-local/bar.com/some-page' 83 | crumbs = lineup.crumbs keys[3], 'foo.com' 84 | expect(crumbs).to.eql ['bar.com', 'view', 'welcome-visitors', 'view', 'some-page', 'foo.com', 'once-local'] 85 | 86 | -------------------------------------------------------------------------------- /test/mockServer.coffee: -------------------------------------------------------------------------------- 1 | sinon = require 'sinon' 2 | 3 | simulatePageNotFound = -> 4 | xhrFor404 = { 5 | status: 404 6 | } 7 | sinon.stub(jQuery, "ajax").yieldsTo('error',xhrFor404) 8 | 9 | simulatePageFound = (pageToReturn = {})-> 10 | sinon.stub(jQuery, "ajax").yieldsTo('success', pageToReturn) 11 | 12 | 13 | module.exports = { 14 | simulatePageNotFound, 15 | simulatePageFound 16 | } 17 | -------------------------------------------------------------------------------- /test/neighborhood.coffee: -------------------------------------------------------------------------------- 1 | expect = require 'expect.js' 2 | _ = require 'underscore' 3 | 4 | neighborhood = require '../lib/neighborhood' 5 | 6 | describe 'neighborhood', -> 7 | 8 | describe 'no neighbors', -> 9 | it 'should return an empty array for our search', -> 10 | searchResult = neighborhood.search( "query string" ) 11 | expect(searchResult.finds).to.eql( [] ) 12 | 13 | 14 | describe 'a single neighbor with a few pages', -> 15 | before -> 16 | fakeSitemap = [ 17 | { title: 'Page One', slug: 'page-one', date: 'date1' }, 18 | { title: 'Page Two', slug: 'page-two', date: 'date2' }, 19 | { title: 'Page Three' } 20 | ] 21 | 22 | neighbor = { 23 | sitemap: fakeSitemap 24 | } 25 | 26 | neighborhood.sites = {} 27 | neighborhood.sites['my-site'] = neighbor 28 | 29 | it 'returns all pages that match the query', -> 30 | searchResult = neighborhood.search( "Page" ) 31 | expect( searchResult.finds ).to.have.length(3) 32 | 33 | it 'returns only pages that match the query', -> 34 | searchResult = neighborhood.search( "Page T" ) 35 | expect( searchResult.finds ).to.have.length(2) 36 | 37 | it 'should package the results in the correct format', -> 38 | expectedResult = [ 39 | { 40 | site: 'my-site', 41 | page: { title: 'Page Two', slug: 'page-two', date: 'date2' }, 42 | rank: 1 43 | } 44 | ] 45 | searchResult = neighborhood.search( "Page Two" ) 46 | expect( searchResult.finds ).to.eql( expectedResult ) 47 | 48 | 49 | it.skip 'searches both the slug and the title' 50 | 51 | describe 'more than one neighbor', -> 52 | before -> 53 | neighborhood.sites = {} 54 | neighborhood.sites['site-one'] = { 55 | sitemap: [ 56 | { title: 'Page One from Site 1' }, 57 | { title: 'Page Two from Site 1' }, 58 | { title: 'Page Three from Site 1' } 59 | ] 60 | } 61 | 62 | neighborhood.sites['site-two'] = { 63 | sitemap: [ 64 | { title: 'Page One from Site 2' }, 65 | { title: 'Page Two from Site 2' }, 66 | { title: 'Page Three from Site 2' } 67 | ] 68 | } 69 | 70 | it 'returns matching pages from every neighbor', -> 71 | searchResult = neighborhood.search( "Page Two" ) 72 | expect( searchResult.finds ).to.have.length(2) 73 | sites = _.pluck( searchResult.finds, 'site' ) 74 | expect( sites.sort() ).to.eql( ['site-one','site-two'].sort() ) 75 | 76 | 77 | describe 'an unpopulated neighbor', -> 78 | before -> 79 | neighborhood.sites = {} 80 | neighborhood.sites['unpopulated-site'] = {} 81 | 82 | it 'gracefully ignores unpopulated neighbors', -> 83 | searchResult = neighborhood.search( "some search query" ) 84 | expect( searchResult.finds ).to.be.empty() 85 | 86 | it.skip 'should re-populate the neighbor' 87 | -------------------------------------------------------------------------------- /test/page.coffee: -------------------------------------------------------------------------------- 1 | newPage = require('../lib/page').newPage 2 | expect = require 'expect.js' 3 | 4 | 5 | describe 'page', -> 6 | 7 | 8 | before () -> 9 | wiki = {} 10 | wiki.site = (site) -> { 11 | getURL: (route) -> 12 | "//#{site}/#{route}" 13 | getDirectURL: (route) -> 14 | "//#{site}/#{route}" 15 | } 16 | global.wiki = wiki 17 | 18 | describe 'newly created', -> 19 | 20 | it 'should start empty', -> 21 | pageObject = newPage() 22 | expect(pageObject.getSlug()).to.eql('empty') 23 | 24 | it 'should not be remote', -> 25 | pageObject = newPage() 26 | expect(pageObject.isRemote()).to.be.false 27 | 28 | it 'should have default contex', -> 29 | pageObject = newPage() 30 | expect(pageObject.getContext()).to.eql(['view']) 31 | 32 | describe 'from json', -> 33 | 34 | it 'should have a title', -> 35 | pageObject = newPage 36 | title: "New Page" 37 | expect(pageObject.getSlug()).to.eql('new-page') 38 | 39 | it 'should have a default context', -> 40 | pageObject = newPage 41 | title: "New Page" 42 | expect(pageObject.getContext()).to.eql(['view']) 43 | 44 | it 'should have context from site and (reversed) journal', -> 45 | pageObject = newPage 46 | journal: [ 47 | { type: 'fork', site: 'one.org'}, 48 | { type: 'fork', site: 'two.org'} 49 | ], 'example.com' 50 | expect(pageObject.getContext()).to.eql(['view','example.com','two.org','one.org']) 51 | 52 | it 'should have context without duplicates', -> 53 | pageObject = newPage 54 | journal: [ 55 | { type: 'fork', site: 'one.org'}, 56 | { type: 'fork', site: 'one.org'} 57 | ], 'example.com' 58 | expect(pageObject.getContext()).to.eql(['view','example.com','one.org']) 59 | 60 | it 'should have neighbors from site, reference and journal (in order, without duplicates)', -> 61 | pageObject = newPage 62 | story: [ 63 | { type: 'reference', site: 'one.org' }, 64 | { type: 'reference', site: 'two.org' }, 65 | { type: 'reference', site: 'one.org' } 66 | ] 67 | journal: [ 68 | { type: 'fork', site: 'three.org'}, 69 | { type: 'fork', site: 'four.org'}, 70 | { type: 'fork', site: 'three.org'} 71 | ], 'example.com' 72 | expect(pageObject.getNeighbors()).to.eql(['example.com','one.org','two.org','three.org','four.org']) 73 | 74 | describe 'site info', -> 75 | 76 | it 'should report null if local', -> 77 | pageObject = newPage() 78 | expect(pageObject.getRemoteSite()).to.be null 79 | 80 | it 'should report local host if provided', -> 81 | pageObject = newPage() 82 | expect(pageObject.getRemoteSite('fed.wiki.org')).to.be 'fed.wiki.org' 83 | 84 | it 'should report remote host if remote', -> 85 | pageObject = newPage {}, 'sfw.c2.com' 86 | expect(pageObject.getRemoteSite('fed.wiki.org')).to.be 'sfw.c2.com' 87 | 88 | describe 'site lineup', -> 89 | 90 | it 'should start with welcome-visitors', -> 91 | pageObject = newPage {title: "Welcome Visitors"} 92 | expect(pageObject.siteLineup()).to.be '/view/welcome-visitors' 93 | 94 | it 'should end on this page', -> 95 | pageObject = newPage {title: "Some Page"} 96 | expect(pageObject.siteLineup()).to.be '/view/welcome-visitors/view/some-page' 97 | 98 | it 'should use absolute address for remote pages', -> 99 | pageObject = newPage {title: "Some Page"}, 'fed.wiki.org' 100 | expect(pageObject.siteLineup()).to.be '//fed.wiki.org/view/welcome-visitors/view/some-page' 101 | 102 | describe 'site details', -> 103 | 104 | it 'should report residence only if local', -> 105 | pageObject = newPage {plugin: 'method'} 106 | expect(pageObject.getRemoteSiteDetails()).to.be 'method plugin' 107 | 108 | it 'should report residence and local host if provided', -> 109 | pageObject = newPage {plugin: 'method'} 110 | expect(pageObject.getRemoteSiteDetails('fed.wiki.org')).to.be 'fed.wiki.org\nmethod plugin' 111 | 112 | it 'should report residence and remote host if remote', -> 113 | pageObject = newPage {plugin: 'method'}, 'sfw.c2.com' 114 | expect(pageObject.getRemoteSiteDetails('fed.wiki.org')).to.be 'sfw.c2.com\nmethod plugin' 115 | -------------------------------------------------------------------------------- /test/pageHandler.coffee: -------------------------------------------------------------------------------- 1 | _ = require 'underscore' 2 | expect = require 'expect.js' 3 | sinon = require 'sinon' 4 | 5 | pageHandler = require '../lib/pageHandler' 6 | mockServer = require './mockServer' 7 | 8 | # disable reference to dom 9 | pageHandler.useLocalStorage = -> false 10 | 11 | describe 'pageHandler.get', -> 12 | 13 | it 'should have an empty context', -> 14 | expect(pageHandler.context).to.eql([]) 15 | 16 | pageInformationWithoutSite = { 17 | slug: 'slugName' 18 | rev: 'revName' 19 | } 20 | 21 | genericPageInformation = _.extend( {}, pageInformationWithoutSite, {site: 'siteName'} ) 22 | 23 | genericPageData = { 24 | journal: [] 25 | } 26 | 27 | beforeEach () -> 28 | wiki = {} 29 | wiki.local = { 30 | get: (route, done) -> 31 | done {msg: "no page named '#{route}' in browser local storage"} 32 | } 33 | wiki.origin = { 34 | get: (route, done) -> 35 | $.ajax 36 | type: 'GET' 37 | dataType: 'json' 38 | url: "/#{route}" 39 | success: (page) -> done null, page 40 | error: (xhr, type, msg) -> done {msg, xhr}, null 41 | put: (route, data, done) -> 42 | $.ajax 43 | type: 'PUT' 44 | url: "/page/#{route}/action" 45 | data: 46 | 'action': JSON.stringify(data) 47 | success: () -> done null 48 | error: (xhr, type, msg) -> done {xhr, type, msg} 49 | } 50 | wiki.site = (site) -> { 51 | get: (route, done) -> 52 | url = "//#{site}/#{route}" 53 | $.ajax 54 | type: 'GET' 55 | dataType: 'json' 56 | url: url 57 | success: (data) -> done null, data 58 | error: (xhr, type, msg) -> 59 | done {msg, xhr}, null 60 | } 61 | global.wiki = wiki 62 | 63 | describe 'ajax fails', -> 64 | 65 | before -> 66 | mockServer.simulatePageNotFound() 67 | 68 | after -> 69 | jQuery.ajax.restore() 70 | 71 | it "should tell us when it can't find a page (server specified)", -> 72 | whenGotten = sinon.spy() 73 | whenNotGotten = sinon.spy() 74 | 75 | pageHandler.get 76 | pageInformation: _.clone( genericPageInformation ) 77 | whenGotten: whenGotten 78 | whenNotGotten: whenNotGotten 79 | 80 | expect( whenGotten.called ).to.be.false 81 | expect( whenNotGotten.called ).to.be.true 82 | 83 | it "should tell us when it can't find a page (server unspecified)", -> 84 | whenGotten = sinon.spy() 85 | whenNotGotten = sinon.spy() 86 | 87 | pageHandler.get 88 | pageInformation: _.clone( pageInformationWithoutSite ) 89 | whenGotten: whenGotten 90 | whenNotGotten: whenNotGotten 91 | 92 | expect( whenGotten.called ).to.be.false 93 | expect( whenNotGotten.called ).to.be.true 94 | 95 | describe 'ajax, success', -> 96 | before -> 97 | sinon.stub(jQuery, "ajax").yieldsTo('success', genericPageData) 98 | $('
    ').appendTo('body') 99 | 100 | it 'should get a page from specific site', -> 101 | whenGotten = sinon.spy() 102 | pageHandler.get 103 | pageInformation: _.clone( genericPageInformation ) 104 | whenGotten: whenGotten 105 | 106 | expect(whenGotten.calledOnce).to.be.true 107 | expect(jQuery.ajax.calledOnce).to.be.true 108 | expect(jQuery.ajax.args[0][0]).to.have.property('type', 'GET') 109 | expect(jQuery.ajax.args[0][0].url).to.match(///^//siteName/slugName\.json///) 110 | 111 | after -> 112 | jQuery.ajax.restore() 113 | 114 | describe 'ajax, search', -> 115 | before -> 116 | mockServer.simulatePageNotFound() 117 | pageHandler.context = ['view', 'example.com', 'asdf.test', 'foo.bar'] 118 | 119 | it 'should search through the context for a page', -> 120 | pageHandler.get 121 | pageInformation: _.clone( pageInformationWithoutSite ) 122 | whenGotten: sinon.stub() 123 | whenNotGotten: sinon.stub() 124 | 125 | expect(jQuery.ajax.args[0][0].url).to.match(///^/slugName\.json///) 126 | expect(jQuery.ajax.args[1][0].url).to.match(///^//example.com/slugName\.json///) 127 | expect(jQuery.ajax.args[2][0].url).to.match(///^//asdf.test/slugName\.json///) 128 | expect(jQuery.ajax.args[3][0].url).to.match(///^//foo.bar/slugName\.json///) 129 | 130 | after -> 131 | jQuery.ajax.restore() 132 | 133 | describe 'pageHandler.put', -> 134 | before -> 135 | $('
    ').appendTo('body') 136 | sinon.stub(jQuery, "ajax").yieldsTo('success') 137 | 138 | it 'should save an action', (done) -> 139 | action = {type: 'edit', id: 1, item: {id:1}} 140 | pageHandler.put $('#pageHandler3'), action 141 | expect(jQuery.ajax.args[0][0].data).to.eql({action: JSON.stringify(action)}) 142 | done() 143 | 144 | after -> 145 | jQuery.ajax.restore() 146 | -------------------------------------------------------------------------------- /test/plugin.coffee: -------------------------------------------------------------------------------- 1 | plugin = require '../lib/plugin' 2 | sinon = require 'sinon' 3 | expect = require 'expect.js' 4 | 5 | describe 'plugin', -> 6 | fakeDeferred = undefined 7 | $page = null 8 | 9 | before -> 10 | $page = $('
    ') 11 | $page.appendTo('body') 12 | sinon.spy(jQuery, 'ajax') 13 | 14 | after -> 15 | jQuery.ajax.restore() 16 | $page.empty() 17 | 18 | it 'should have default image type', -> 19 | expect(window.plugins).to.have.property('image') 20 | 21 | it 'should fetch a plugin script from the right location', -> 22 | plugin.get 'activity' 23 | expect(jQuery.ajax.calledOnce).to.be(true) 24 | expect(jQuery.ajax.args[0][0].url).to.be('/plugins/activity/activity.js') 25 | 26 | it 'should render a plugin', -> 27 | item = 28 | type: 'paragraph' 29 | text: 'blah [[Link]] asdf' 30 | plugin.do $('#plugin'), item 31 | expect($('#plugin').html()).to 32 | .be('

    blah Link asdf

    ') 33 | -------------------------------------------------------------------------------- /test/random.coffee: -------------------------------------------------------------------------------- 1 | random = require '../lib/random' 2 | expect = require 'expect.js' 3 | 4 | describe 'random', -> 5 | 6 | it 'should make random bytes', -> 7 | a = random.randomByte() 8 | expect(a).to.be.a 'string' 9 | expect(a.length).to.be 2 10 | 11 | it 'should make random byte strings', -> 12 | s = random.randomBytes(4) 13 | expect(s).to.be.a 'string' 14 | expect(s.length).to.be 8 15 | 16 | it 'should make random item ids', -> 17 | s = random.itemId() 18 | expect(s).to.be.a 'string' 19 | expect(s.length).to.be 16 20 | -------------------------------------------------------------------------------- /test/refresh.coffee: -------------------------------------------------------------------------------- 1 | refresh = require('../lib/refresh') 2 | lineup = require('../lib/lineup') 3 | mockServer = require('./mockServer') 4 | 5 | describe 'refresh', -> 6 | 7 | $page = undefined 8 | 9 | beforeEach -> 10 | wiki = {} 11 | wiki.local = { 12 | get: (route, done) -> 13 | done {msg: "no page named '#{route}' in browser local storage"} 14 | } 15 | wiki.origin = { 16 | get: (route, done) -> 17 | $.ajax 18 | type: 'GET' 19 | dataType: 'json' 20 | url: "/#{route}" 21 | success: (page) -> done null, page 22 | error: (xhr, type, msg) -> done {msg, xhr}, null 23 | } 24 | wiki.site = (site) -> { 25 | flag: () -> 26 | "//#{site}/favicon.png" 27 | getDirectURL: (route) -> 28 | "//#{site}/#{route}" 29 | get: (route, done) -> 30 | url = "//#{site}/#{route}" 31 | $.ajax 32 | type: 'GET' 33 | dataType: 'json' 34 | url: url 35 | success: (data) -> done null, data 36 | error: (xhr, type, msg) -> 37 | done {msg, xhr}, null 38 | } 39 | global.wiki = wiki 40 | 41 | describe 'when page not found', -> 42 | 43 | before -> 44 | $page = $('
    ') 45 | $page.appendTo('body') 46 | mockServer.simulatePageNotFound() 47 | after -> 48 | jQuery.ajax.restore() 49 | 50 | it "creates a ghost page", -> 51 | $page.each refresh.cycle 52 | expect( $page.hasClass('ghost') ).to.be(true) 53 | expect( key = $page.data('key') ).to.be.a('string') 54 | expect( pageObject = lineup.atKey(key) ).to.be.an('object') 55 | expect( pageObject.getRawPage().story[0].type ).to.be('future') 56 | 57 | describe 'when page found', -> 58 | 59 | before -> 60 | $page = $('
    ') 61 | $page.appendTo('body') 62 | mockServer.simulatePageFound({title: 'asdf'}) 63 | after -> 64 | jQuery.ajax.restore() 65 | 66 | it 'should refresh a page', (done) -> 67 | $page.each refresh.cycle 68 | expect($('#refresh h1').text().trim()).to.be('asdf') 69 | done() 70 | -------------------------------------------------------------------------------- /test/resolve.coffee: -------------------------------------------------------------------------------- 1 | resolve = require('../lib/resolve') 2 | expect = require 'expect.js' 3 | 4 | # Here we test new features retlated to escaping/sanitizing text while resolving. 5 | # See other related tests at /tests/wiki.coffee 6 | 7 | r = (text) -> resolve.resolveLinks text 8 | 9 | f = (text) -> 10 | found = [] 11 | text 12 | .replace /\s+/, '' 13 | .replace />(.*?) found.push each 14 | found 15 | 16 | describe 'resolve', -> 17 | 18 | describe 'plain text', -> 19 | it 'should pass unchanged', -> 20 | expect(r 'The quick brown fox.').to.eql 'The quick brown fox.' 21 | 22 | describe 'escaping', -> 23 | it 'should encode <, >, & in plain text', -> 24 | expect(r '5 < 10 && 5 > 3').to.eql '5 < 10 && 5 > 3' 25 | 26 | it 'should encode <, >, & in link text', -> 27 | expect(r '[[5 < 10 && 5 > 3]]').to.contain '>5 < 10 && 5 > 3' 28 | 29 | it 'should not encode before making slugs for hrefs', -> 30 | expect(r '[[5 < 10 && 5 > 3]]').to.contain 'href="/5--10--5--3.html"' 31 | 32 | it 'should not encode before making slugs for data-page-names', -> 33 | expect(r '[[5 < 10 && 5 > 3]]').to.contain 'data-page-name="5--10--5--3"' 34 | 35 | describe 'multiple links', -> 36 | it 'should be kept ordered', -> 37 | expect(f r '[[alpha]],[[beta]]&[[gamma]]').to.eql ['alpha', ',', 'beta', '&', 'gamma'] 38 | 39 | it 'should preserve internal before external', -> 40 | expect(f r '[[alpha]],[http:c2.com beta]').to.eql ['alpha', ',', 'beta'] 41 | 42 | it 'should preserve external before internal', -> 43 | expect(f r '[http:c2.com beta],[[alpha]]').to.eql ['beta', ',', 'alpha'] 44 | 45 | describe 'markers', -> 46 | it 'should be adulterated where unexpected', -> 47 | expect(r 'foo 〖12〗 bar').to.eql "foo 〖 12 〗 bar" 48 | 49 | 50 | 51 | -------------------------------------------------------------------------------- /test/revision.coffee: -------------------------------------------------------------------------------- 1 | {newPage} = require('../lib/page') 2 | revision = require '../lib/revision' 3 | expect = require 'expect.js' 4 | 5 | # fixture -- create proper pages from model pages 6 | 7 | id = (i) -> 8 | "#{i}0" 9 | 10 | item = (i,n='') -> 11 | {type:'paragraph', text:"t#{i}#{n}", id:id(i)} 12 | 13 | action = (a) -> 14 | return a if typeof a != 'string' 15 | [t, i, v...] = a.split '' 16 | switch t 17 | when 'c' then {type:'create', id:id(i), item:{title:"Create #{v}", story:(item i for i in v)}} 18 | when 'a' 19 | if v[0]? 20 | {type:'add', id:id(i), item:item(i), after:id(v[0])} 21 | else 22 | {type:'add', id:id(i), item:item(i)} 23 | when 'r' then {type:'remove', id:id(i)} 24 | when 'e' then {type:'edit', id:id(i), item:item(i,'edited')} 25 | when 'm' then {type:'move', id:id(i), order:(id(j) for j in v)} 26 | else throw "can't model '#{t}' action" 27 | 28 | fixture = (model) -> 29 | model.title = model.title || "About #{model.story || model.journal}" 30 | model.story = (item i for i in model.story || []) 31 | model.journal = (action a for a in model.journal || []) 32 | model 33 | 34 | expectText = (version) -> 35 | expect((each.text for each in version.story)) 36 | 37 | describe 'revision', -> 38 | 39 | describe 'testing helpers', -> 40 | 41 | describe 'action', -> 42 | 43 | it 'should make create actions', -> 44 | expect(action('c312')).to.eql {type: 'create', id: '30', item: {title: "Create 1,2", story:[{type: 'paragraph', text: 't1', id: '10'},{type: 'paragraph', text: 't2', id: '20'}]}} 45 | 46 | it 'should make empty create actions', -> 47 | expect(action('c0')).to.eql {type: 'create', id: '00', item: {title: "Create ", story:[]}} 48 | 49 | it 'should make add actions', -> 50 | expect(action('a3')).to.eql {type: 'add', id: '30', item: {type: 'paragraph', text: 't3', id: '30'}} 51 | 52 | it 'should make add after actions', -> 53 | expect(action('a31')).to.eql {type: 'add', id: '30', item: {type: 'paragraph', text: 't3', id: '30'}, after: '10'} 54 | 55 | it 'should make remove actions', -> 56 | expect(action('r3')).to.eql {type: 'remove', id: '30'} 57 | 58 | it 'should make edit actions', -> 59 | expect(action('e3')).to.eql {type: 'edit', id: '30', item: {type: 'paragraph', text: 't3edited', id: '30'}} 60 | 61 | it 'should make move actions', -> 62 | expect(action('m1321')).to.eql {type: 'move', id: '10', order:['30','20','10']} 63 | 64 | describe 'fixture', -> 65 | 66 | data = fixture 67 | story: [1, 2, 3] 68 | journal: ['c12', 'a3', {type: 'foo'}] 69 | 70 | it 'should make stories with text', -> 71 | expect((e.text for e in data.story)).to.eql ['t1', 't2', 't3'] 72 | 73 | it 'should make stories with ids', -> 74 | expect((e.id for e in data.story)).to.eql ['10', '20', '30'] 75 | 76 | it 'should make journals with actions', -> 77 | expect((a.type for a in data.journal)).to.eql ['create', 'add', 'foo'] 78 | 79 | it 'should make titles from the model', -> 80 | expect(data.title).to.be 'About 1,2,3' 81 | 82 | describe 'applying actions', -> 83 | 84 | it 'should create a story', -> 85 | revision.apply (page = {}), {type: 'create', item: {story: [{type: 'foo'}]}} 86 | expect(page.story).to.eql [{type: 'foo'}] 87 | 88 | it 'should add an item', -> 89 | revision.apply (page = {}), {type: 'add', item: {type: 'foo'}} 90 | expect(page.story).to.eql [{type: 'foo'}] 91 | 92 | it 'should edit an item', -> 93 | revision.apply (page = {story:[{type:'foo',id:'3456'}]}), {type:'edit', id:'3456',item: {type:'bar',id:'3456'}} 94 | expect(page.story).to.eql [{type: 'bar', id:'3456'}] 95 | 96 | it 'should move first item to the bottom', -> 97 | page = 98 | story: [ 99 | {type:'foo', id:'1234'} 100 | {type:'bar', id:'3456'} 101 | ] 102 | revision.apply page, {type: 'move', id:'1234', order:['3456','1234']} 103 | expect(page.story).to.eql [{type:'bar', id:'3456'},{type:'foo', id:'1234'}] 104 | 105 | it 'should move last item to the top', -> 106 | page = 107 | story: [ 108 | {type:'foo', id:'1234'} 109 | {type:'bar', id:'3456'} 110 | ] 111 | revision.apply page, {type: 'move', id:'3456', order:['3456','1234']} 112 | expect(page.story).to.eql [{type:'bar', id:'3456'},{type:'foo', id:'1234'}] 113 | 114 | it 'should remove an item', -> 115 | page = 116 | story: [ 117 | {type:'foo', id:'1234'} 118 | {type:'bar', id:'3456'} 119 | ] 120 | revision.apply page, {type:'remove', id:'1234'} 121 | expect(page.story).to.eql [{type:'bar', id:'3456'}] 122 | 123 | 124 | describe 'creating revisions', -> 125 | 126 | describe 'titling', -> 127 | 128 | it 'should use create title if present', -> 129 | data = fixture {journal: ['c0123']} 130 | version = revision.create 0, data 131 | expect(version.title).to.eql('Create 1,2,3') 132 | 133 | it 'should use existing title if create title absent', -> 134 | data = fixture {title: 'Foo', journal: [{type: 'create', item: {story: []}}]} 135 | version = revision.create 0, data 136 | expect(version.title).to.eql('Foo') 137 | 138 | describe 'sequencing', -> 139 | data = fixture 140 | story:[1,2,3] 141 | journal:['a1','a21','a32'] 142 | 143 | it 'should do little to an empty page', -> 144 | emptyPage = newPage({}).getRawPage() 145 | version = revision.create -1, emptyPage 146 | expect(newPage(version).getRawPage()).to.eql(emptyPage) 147 | 148 | it 'should shorten the journal to given revision', -> 149 | version = revision.create 1, data 150 | expect(version.journal.length).to.be(2) 151 | 152 | it 'should recreate story on given revision', -> 153 | version = revision.create 1, data 154 | expectText(version).to.eql ['t1', 't2'] 155 | 156 | it 'should accept revision as string', -> 157 | version = revision.create '1', data 158 | expect(version.journal.length).to.be(2) 159 | 160 | describe 'workflows', -> 161 | 162 | describe 'dragging item from another page', -> 163 | it 'should place story item on dropped position', -> 164 | data = fixture 165 | journal:['c0135','a21','a43'] 166 | version = revision.create 3, data 167 | expectText(version).to.eql ['t1','t2','t3','t4','t5'] 168 | 169 | it 'should place story items at the beginning when dropped position is not defined', -> 170 | data = fixture 171 | journal:['c0135','a2','a4'] 172 | version = revision.create 3, data 173 | expectText(version).to.eql ['t4','t2','t1','t3','t5'] 174 | 175 | describe 'editing items', -> 176 | 177 | it 'should replace edited stories item', -> 178 | data = fixture 179 | journal:['c012345','e3','e1'] 180 | version = revision.create 3, data 181 | expectText(version).to.eql ['t1edited','t2','t3edited','t4','t5'] 182 | 183 | it 'should place item at end if edited item is not found', -> 184 | data = fixture 185 | journal:['c012345','e9'] 186 | version = revision.create 2, data 187 | expectText(version).to.eql ['t1','t2','t3','t4','t5','t9edited',] 188 | 189 | describe 'reordering items', -> 190 | 191 | it 'should move item up', -> 192 | data = fixture 193 | journal:['c012345','m414235'] 194 | version = revision.create 2, data 195 | expectText(version).to.eql ['t1','t4','t2','t3','t5'] 196 | 197 | it 'should move item to top', -> 198 | data = fixture 199 | journal:['c012345','m441235'] 200 | version = revision.create 2, data 201 | expectText(version).to.eql ['t4','t1','t2','t3','t5'] 202 | 203 | it 'should move item down', -> 204 | data = fixture 205 | journal:['c012345','m213425'] 206 | version = revision.create 2, data 207 | expectText(version).to.eql ['t1','t3','t4','t2','t5'] 208 | 209 | describe 'deleting items', -> 210 | it 'should remove the story items', -> 211 | data = fixture 212 | journal:['c012345','r4', 'r2'] 213 | version = revision.create 3, data 214 | expectText(version).to.eql ['t1','t3','t5'] 215 | -------------------------------------------------------------------------------- /test/search.coffee: -------------------------------------------------------------------------------- 1 | createSearch = require '../lib/search' 2 | 3 | describe 'search', -> 4 | # Can't test for right now, because performing a search 5 | # does DOM manipulation to build a page, which fails in the test runner. We'd like to isolate that DOM manipulation, but can't right now. 6 | it.skip 'performs a search on the neighborhood', -> 7 | spyNeighborhood = { 8 | search: sinon.stub().returns([]) 9 | } 10 | search = createSearch( neighborhood: spyNeighborhood ) 11 | search.performSearch( 'some search query' ) 12 | 13 | expect( spyNeighborhood.search.called ).to.be(true) 14 | expect( spyNeighborhood.search.args[0][0] ).to.be('some search query') 15 | -------------------------------------------------------------------------------- /test/util.coffee: -------------------------------------------------------------------------------- 1 | util = require '../lib/util' 2 | expect = require 'expect.js' 3 | 4 | timezoneOffset = -> 5 | ((new Date(1333843344000)).getTimezoneOffset() * 60) 6 | 7 | describe 'util', -> 8 | 9 | it 'should format unix time', -> 10 | s = util.formatTime(1333843344 + timezoneOffset()) 11 | expect(s).to.be '12:02 AM
    8 Apr 2012' 12 | it 'should format javascript time', -> 13 | s = util.formatTime(1333843344000 + timezoneOffset() * 1000) 14 | expect(s).to.be '12:02 AM
    8 Apr 2012' 15 | it 'should format revision date', -> 16 | s = util.formatDate(1333843344000 + timezoneOffset() * 1000) 17 | expect(s).to.be 'Sun Apr 8, 2012
    12:02:24 AM' 18 | 19 | -------------------------------------------------------------------------------- /test/wiki.coffee: -------------------------------------------------------------------------------- 1 | wiki = require '../lib/wiki' 2 | expect = require 'expect.js' 3 | 4 | describe 'wiki', -> 5 | 6 | describe 'link resolution', -> 7 | 8 | it 'should pass free text as is', -> 9 | s = wiki.resolveLinks "hello world" 10 | expect(s).to.be 'hello world' 11 | 12 | describe 'internal links', -> 13 | s = wiki.resolveLinks "hello [[world]]" 14 | it 'should be class internal', -> 15 | expect(s).to.contain 'class="internal"' 16 | it 'should relative reference html', -> 17 | expect(s).to.contain 'href="/world.html"' 18 | it 'should have data-page-name', -> 19 | expect(s).to.contain 'data-page-name="world"' 20 | 21 | describe 'external links', -> 22 | s = wiki.resolveLinks "hello [http://world.com?foo=1&bar=2 world]" 23 | it 'should be class external', -> 24 | expect(s).to.contain 'class="external"' 25 | it 'should absolute reference html', -> 26 | expect(s).to.contain 'href="http://world.com?foo=1&bar=2"' 27 | it 'should not have data-page-name', -> 28 | expect(s).to.not.contain 'data-page-name' 29 | 30 | describe 'slug formation', -> 31 | 32 | it 'should convert capitals to lowercase', -> 33 | s = wiki.asSlug 'WelcomeVisitors' 34 | expect(s).to.be 'welcomevisitors' 35 | 36 | it 'should convert spaces to dashes', -> 37 | s = wiki.asSlug ' now is the time ' 38 | expect(s).to.be '-now-is--the-time-' 39 | 40 | it 'should pass letters, numbers and dash', -> 41 | s = wiki.asSlug 'THX-1138' 42 | expect(s).to.be 'thx-1138' 43 | 44 | it 'should discard other puctuation', -> 45 | s = wiki.asSlug '(The) World, Finally.' 46 | expect(s).to.be 'the-world-finally' 47 | -------------------------------------------------------------------------------- /testclient.coffee: -------------------------------------------------------------------------------- 1 | mocha.setup('bdd') 2 | 3 | window.wiki = require('./lib/wiki') 4 | require './lib/bind' 5 | require './lib/plugins' 6 | 7 | 8 | require './test/util' 9 | require './test/active' 10 | require './test/pageHandler' 11 | require './test/page' 12 | require './test/refresh' 13 | require './test/plugin' 14 | require './test/revision' 15 | require './test/neighborhood' 16 | require './test/search' 17 | require './test/drop' 18 | require './test/lineup' 19 | require './test/wiki' 20 | require './test/random' 21 | 22 | 23 | $ -> 24 | $('

    Testing artifacts:

    ').appendTo('body') 25 | mocha.run() 26 | 27 | --------------------------------------------------------------------------------