├── .gitignore ├── src ├── dns.js ├── browser.js ├── migrate.js ├── gateway.js ├── api.js └── library.js ├── dathelper.json ├── package.json ├── .travis.yml ├── LICENSE.txt ├── installer.sh ├── datfox-helper.js └── Readme.md /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules/ 2 | library/ 3 | dist/ -------------------------------------------------------------------------------- /src/dns.js: -------------------------------------------------------------------------------- 1 | const datDns = require('dat-dns'); 2 | module.exports = datDns(); 3 | -------------------------------------------------------------------------------- /dathelper.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "dathelper", 3 | "description": "Dat helper daemon", 4 | "path": "/path/to/dat-fox-helper/datfox-helper.js", 5 | "type": "stdio", 6 | "allowed_extensions": [ 7 | "{e992d888-6346-4e09-98b5-8c61307970e6}", 8 | "{acc91f3f-2194-4f88-b25a-84ec4ea65683}" 9 | ] 10 | } 11 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "dat-fox-helper", 3 | "version": "0.2.1", 4 | "description": "Bridge to Dat for a browser extension", 5 | "main": "index.js", 6 | "scripts": { 7 | "package": "pkg datfox-helper.js -c package.json --out-path dist/" 8 | }, 9 | "author": "Sam Macbeth", 10 | "license": "MIT", 11 | "dependencies": { 12 | "@sammacbeth/dat-api-v1": "^0.3.0", 13 | "@sammacbeth/dat-archive": "0.0.12", 14 | "@sammacbeth/dat-protocol-handler": "0.0.11", 15 | "dat-dns": "^4.1.2", 16 | "fs-extra": "^8.1.0", 17 | "mime": "2.3.1", 18 | "node-dat-archive": "git+https://git@github.com/sammacbeth/node-dat-archive", 19 | "node-persist": "^3.0.0", 20 | "pauls-dat-api": "8.0.1", 21 | "pump": "3.0.0", 22 | "rimraf": "^3.0.0" 23 | }, 24 | "devDependencies": { 25 | "pkg": "^4.4.2" 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: node_js 2 | node_js: 3 | - '12' 4 | sudo: false 5 | install: 6 | - npm install --silent 7 | script: 8 | - npm run package 9 | deploy: 10 | provider: releases 11 | skip_cleanup: true 12 | api_key: 13 | secure: dU7/67MitABHtr6hNKs/YjiC1Om0yZiPsKoIFvIZI8+8uOapJHiFAtprso62OZRN13mQiBuE9JqbfHQ3PxjBLaMoN2q7ww8O7cdzwchzZFqs2WCHZXPttnnjS1AS6LWNFNYeSRBaFp60TjJvmERxb0aGz6ZB5EE7OQ7K18CDaw29bre3SNgVd9RYw3iANVQ96eQttU3586xzfptDtITDnL12pYztdfrn0ssMccH45gYdYgL+fjKB+n4yvySAEJdTpTdnLykKM683olxs1DXXxu7lh0zmTiaoSRIUlFsMLpIgHTZ7BcEg0FCuTf+5I5FTJDghVlsnQOTeQ/9chKJxjO4EE2M7v1PYPykoA4s11p5ksCxjPfhpRx95+J4Y7jRgx7et4KkQVrIq2K3yxRAUY8s2OcByOCU8bJXPv7AduU9KUWSt4acQSg+O4dvbHMDO4/0abP9cLThQD+55sgR+7Q4/W2Q7YBsQDJhH9VkC7pGjlGDPUMnAudlGp1AZcnntoxGnHPppNlqGL8Y8xgWiTkETVTcvt7YSTkdlEmE/Q58lvHt1i6IyW4ig6K2o64sLTbeDgEhmeeOT3LWBRixlhNwwWdHh+/I9x8Lt/zd0LFbnHa7TEL/YrZkWvqFgePxwlWtpzv4TpBFckwSh+heboZRgqMKNJzVpGv4e82pMjMk= 14 | file_glob: true 15 | file: dist/dat-fox-helper-* 16 | on: 17 | repo: sammacbeth/dat-fox-helper 18 | tags: true 19 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2018 Sam Macbeth 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. -------------------------------------------------------------------------------- /src/browser.js: -------------------------------------------------------------------------------- 1 | const process = require('process'); 2 | 3 | // Implementation of the extension native messaging protocol over stdio 4 | let expected = null; 5 | const empty = Buffer.alloc(0) 6 | let buffer = empty; 7 | 8 | const listeners = new Set(); 9 | 10 | function processData(data) { 11 | buffer = Buffer.concat([ buffer, data ]); 12 | 13 | if (expected == null) { 14 | // this is the start of a message, the length is in the first 4 bytes 15 | expected = buffer.readInt32LE(0); 16 | buffer = buffer.length > 4 ? buffer.slice(4) : empty; 17 | } 18 | if (buffer.length < expected) { 19 | // we didn't get all of the message yet 20 | return; 21 | } 22 | const message = JSON.parse(buffer.toString('utf8', 0, expected)); 23 | listeners.forEach((fn) => { 24 | try { 25 | fn(message); 26 | } catch(e) { 27 | postMessage({ type: 'native_exception', error: e.toString() }); 28 | } 29 | }); 30 | buffer = buffer.length === expected ? empty : Buffer.from(buffer.slice(expected)); 31 | expected = null; 32 | // check if there are more messages to process 33 | if (buffer.length > 0) { 34 | processData(Buffer.alloc(0)); 35 | } 36 | } 37 | process.stdin.on('data', processData); 38 | 39 | function postMessage(message) { 40 | const string = JSON.stringify(message); 41 | const length = Buffer.byteLength(string, 'utf8'); 42 | const buffer = Buffer.allocUnsafe(4 + length); 43 | buffer.writeInt32LE(length); 44 | buffer.write(string, 4, length, 'utf8'); 45 | process.stdout.write(buffer); 46 | }; 47 | 48 | module.exports = { 49 | onMessage: { 50 | addListener(fn) { 51 | listeners.add(fn); 52 | } 53 | }, 54 | postMessage, 55 | }; -------------------------------------------------------------------------------- /installer.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | { # this ensures the entire script is downloaded # 4 | 5 | REPO_URL="https://github.com/sammacbeth/dat-fox-helper/releases/download" 6 | TAG="v0.2.1" 7 | MANIFEST_URL="https://raw.githubusercontent.com/sammacbeth/dat-fox-helper/$TAG/dathelper.json" 8 | 9 | if [[ "$OSTYPE" == "linux-gnu" ]]; then 10 | BINDIR_DEFAULT=$HOME/.local/datfox 11 | BIN_NAME="dat-fox-helper-linux" 12 | MANIFEST_PATH="$HOME/.mozilla/native-messaging-hosts/" 13 | elif [[ `uname` == "Darwin" ]]; then 14 | BINDIR_DEFAULT="$HOME/Library/Application Support/datfox" 15 | BIN_NAME="dat-fox-helper-macos" 16 | MANIFEST_PATH="$HOME/Library/Application Support/Mozilla/NativeMessagingHosts/" 17 | fi 18 | 19 | if [[ -z "$BIN_NAME" ]]; then 20 | echo "Unable to detect platform" 21 | exit 1 22 | fi 23 | 24 | echo "Installing dat-fox-helper" 25 | read -p "dat-fox-helper install directory: [$BINDIR_DEFAULT] " BINDIR 26 | if [[ -z "$BINDIR" ]]; then 27 | BINDIR=$BINDIR_DEFAULT 28 | fi 29 | 30 | # prepare bin dir 31 | mkdir -p "$BINDIR/" 32 | cd "$BINDIR" 33 | echo "Downloading binary" 34 | curl -L -o dat-fox-helper $REPO_URL/$TAG/$BIN_NAME 35 | chmod +x "$BINDIR/dat-fox-helper" 36 | 37 | # prepare native manifest 38 | echo "Installing Firefox manifest to $MANIFEST_PATH" 39 | mkdir -p "$MANIFEST_PATH" 40 | curl -L -o "$MANIFEST_PATH/dathelper.json" $MANIFEST_URL 41 | # set path in manifest 42 | path1esc=$(echo "/path/to/dat-fox-helper/datfox-helper.js" | sed 's_/_\\/_g') 43 | path2esc=$(echo "$BINDIR/dat-fox-helper" | sed 's_/_\\/_g') 44 | if [[ `uname` == "Darwin" ]]; then 45 | sed -i "" -e "s/$path1esc/$path2esc/" "$MANIFEST_PATH/dathelper.json" 46 | else 47 | sed -i "s/$path1esc/$path2esc/" "$MANIFEST_PATH/dathelper.json" 48 | fi 49 | 50 | echo "Done" 51 | 52 | } 53 | -------------------------------------------------------------------------------- /datfox-helper.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | const path = require('path'); 3 | const browser = require('./src/browser'); 4 | const datApi = require('./src/api'); 5 | const DatGateway = require('./src/gateway'); 6 | const Library = require('./src/library'); 7 | const version = require('./package.json').version; 8 | 9 | const libraryDir = path.join(process.cwd(), 'library'); 10 | const library = new Library(libraryDir); 11 | library.init(); 12 | const gateway = new DatGateway(library); 13 | 14 | const handlers = { 15 | supportedActions: () => Promise.resolve(Object.keys(handlers)), 16 | getVersion: () => Promise.resolve(version), 17 | listLibrary: () => library.listLibrary(), 18 | getOpenArchives: () => Promise.resolve(library.getOpenArchives()), 19 | removeFromLibrary: ({ url }) => library.remove(url), 20 | closeArchive: ({ url }) => library.close(url), 21 | }; 22 | // collect available APIs 23 | Object.assign(handlers, datApi({ 24 | getArchive: library.getArchive.bind(library), 25 | createArchive: library.createArchive.bind(library), 26 | forkArchive: library.forkArchive.bind(library), 27 | }), { 28 | startGateway: ({ port=3000 }) => { 29 | return gateway.listen(port); 30 | }, 31 | }); 32 | 33 | // make API available over native messaging via stdio 34 | browser.onMessage.addListener((message) => { 35 | const { id, action } = message; 36 | if (handlers[action]) { 37 | handlers[action](message).then((result) => { 38 | browser.postMessage({ 39 | id, 40 | action, 41 | result, 42 | }); 43 | }, (error) => { 44 | browser.postMessage({ 45 | id, 46 | action, 47 | error: error.toString(), 48 | }); 49 | }); 50 | } else { 51 | browser.postMessage({ 52 | id, 53 | action, 54 | error: 'unhandled_message', 55 | message, 56 | }); 57 | } 58 | }); 59 | -------------------------------------------------------------------------------- /src/migrate.js: -------------------------------------------------------------------------------- 1 | const fs = require('fs-extra'); 2 | const path = require('path'); 3 | const promisify = require('util').promisify; 4 | const DatArchive = require('node-dat-archive') 5 | 6 | async function extractDat(dir) { 7 | const datDir = path.join(dir, '.dat'); 8 | const keyFile = path.join(datDir, 'metadata.key') 9 | if (await fs.exists(datDir) && await fs.exists(keyFile)) { 10 | const key = await fs.readFile(keyFile); 11 | return { 12 | address: key.toString('hex'), 13 | dir: dir, 14 | } 15 | } 16 | return false; 17 | } 18 | 19 | module.exports = async function(libraryDir, node) { 20 | const newLibraryDir = path.join(libraryDir, 'dat1'); 21 | // check if new library folder already exists 22 | if (await fs.exists(newLibraryDir)) { 23 | return; 24 | } 25 | // collect potential archives 26 | let folders = (await fs.readdir(libraryDir)).map((f) => path.join(libraryDir, f)); 27 | const folderCandidates = await Promise.all(folders.map(d => extractDat(d))) 28 | // do migrations 29 | await fs.mkdir(newLibraryDir); 30 | await Promise.all(folderCandidates.filter((v) => v !== false).map(async ({ address, dir }) => { 31 | const archive = await DatArchive.load({ localPath: dir, datOptions: { latest: true }}); 32 | const newDat = await node.getDat(address, { persist: true, autoSwarm: false, sparse: false }); 33 | const stream = archive._archive.replicate({ live: true }); 34 | stream.pipe(newDat.drive.replicate({ live: true })).pipe(stream); 35 | setTimeout(async () => { 36 | if (archive._archive.writable) { 37 | console.error('import secret key', archive._archive.metadata.secretKey.toString('hex')); 38 | await new Promise((resolve) => { 39 | newDat.drive.metadata._storage.secretKey.write(0, archive._archive.metadata.secretKey, resolve) 40 | }); 41 | } 42 | archive._close(); 43 | newDat.close(); 44 | }, 10000); 45 | })); 46 | } -------------------------------------------------------------------------------- /Readme.md: -------------------------------------------------------------------------------- 1 | # Dat-fox Helper 2 | 3 | A bridge to the Dat network for use by browsers. Provides: 4 | 5 | * A HTTP proxy to load content from Dat (using [dat-gateway](https://github.com/sammacbeth/dat-gateway)) 6 | * [Native messaging](https://developer.mozilla.org/en-US/Add-ons/WebExtensions/Native_messaging) enabling compatible browser extensions to control this process, 7 | for example implementing the [DatArchive API](https://beakerbrowser.com/docs/apis/dat.html), or requesting certain Dats to be seeded 8 | locally. 9 | 10 | This bridge is intended to run with the [dat-fox](https://github.com/sammacbeth/dat-fox) 11 | prototype webextension for Firefox. 12 | 13 | 14 | ## Installing 15 | 16 | ### Installer (Linux and Mac only) 17 | 18 | The helper can be automatically configured for use in Firefox with the `installer.sh` script by running the following in a terminal: 19 | 20 | ```bash 21 | curl -o- https://raw.githubusercontent.com/sammacbeth/dat-fox-helper/master/installer.sh | bash 22 | ``` 23 | 24 | ### Manual install 25 | 26 | To install manually, first download the latest binary release for your OS from [here](https://github.com/sammacbeth/dat-fox-helper/releases). You should create a folder for the binary as it will place dat files in the `library` folder relative to its location. Usual locations are: 27 | * Linux `~/.local/datfox/` 28 | * Mac `~/Library/Application Support/datfox/` 29 | * Windows `C:\Users\{Your User}\AppData\Roaming\datfox\` 30 | 31 | On Mac and Linux you will also have to make the binary executable. 32 | ```bash 33 | # Linux 34 | chmod +x ~/.local/datfox/dat-fox-helper-linux 35 | # Mac 36 | chmod +x ~/.local/datfox/dat-fox-helper-macos 37 | ``` 38 | 39 | Now we need to make Firefox aware of the binary, using a [Native Manifest](https://developer.mozilla.org/en-US/Add-ons/WebExtensions/Native_manifests#Native_messaging_manifests). Create a JSON file with the following contents: 40 | ```json 41 | { 42 | "name": "dathelper", 43 | "description": "Dat helper daemon", 44 | "path": "/home/user/.local/datfox/dat-fox-helper-linux", 45 | "type": "stdio", 46 | "allowed_extensions": [ 47 | "{e992d888-6346-4e09-98b5-8c61307970e6}", 48 | "{acc91f3f-2194-4f88-b25a-84ec4ea65683}" 49 | ] 50 | } 51 | ``` 52 | 53 | Update the `"path"` value to match the absolute path to the dat-fox-helper binary. Save this file to: 54 | * Linux: `~/.mozilla/native-messaging-hosts/dathelper.json` 55 | * Mac: `~/Library/Application Support/Mozilla/NativeMessagingHosts/dathelper.json` 56 | * Windows: The same directory as the binary. Take care to double the backslashes in the path to escape them. 57 | 58 | ### Windows only: Add registry key 59 | 60 | 1. Open the start menu and type `regedit` to open the registry editor. 61 | 2. On the menu on the left, navigation to `HKEY_CURRENT_USER\Software\Mozilla\NativeMessagingHosts`. 62 | 3. Right click on `NativeMessagingHosts` and choose `New` -> `Key`. 63 | 4. Give the name as `dathelper` 64 | 5. With `dathelper` selected, choose the `(Default)` entry on the right pane and set the value to be the path to your `dathelper.json`. 65 | 66 | ### Verifying the install 67 | 68 | The [dat-fox](https://github.com/sammacbeth/dat-fox) extension will automatically verify if the binary can be automatically launched when it starts up. 69 | 70 | ## Running from source 71 | 72 | On Linux and Mac Firefox can also launch the helper from source. Check out this repository and install dependencies: 73 | ```bash 74 | git clone git@github.com:sammacbeth/dat-fox-helper.git 75 | cd dat-fox-helper 76 | npm install 77 | ``` 78 | 79 | Make sure the first line of `datfox-helper.js` points to your `node` binary. You can check this with `which node` in a terminal: 80 | ```sh 81 | #!/path/to/node 82 | ``` 83 | 84 | Now complete the steps from the previous section, but instead of using the binary, set the `"path"` to point to `datfox-helper.js`. 85 | -------------------------------------------------------------------------------- /src/gateway.js: -------------------------------------------------------------------------------- 1 | const http = require('http'); 2 | const mime = require('mime'); 3 | const joinPaths = require('path').join; 4 | const pump = require('pump') 5 | const parseDatURL = require('parse-dat-url'); 6 | const pda = require('pauls-dat-api'); 7 | const parseRange = require('range-parser'); 8 | const datProtocol = require('@sammacbeth/dat-protocol-handler'); 9 | const dns = require('./dns'); 10 | 11 | class DatGateway { 12 | constructor(library) { 13 | this.library = library; 14 | this.server = http.createServer(async (req, res) => { 15 | try { 16 | await this.handleRequest(req, res); 17 | } catch(e) { 18 | res.statusCode = 500; 19 | res.end(e.toString()); 20 | } 21 | }); 22 | // this.handler = datProtocol.default(library.node, dns.resolveName, { persist: true, autoSwarm: true }); 23 | } 24 | 25 | listen (port) { 26 | return new Promise((resolve, reject) => { 27 | this.server.listen(port, (err) => { 28 | if (err) return reject(err) 29 | else return resolve() 30 | }) 31 | }) 32 | } 33 | 34 | async handleRequest(req, res) { 35 | // mimic beakerbrowser's dat protocol handling for the web. 36 | // Cribbed from https://github.com/beakerbrowser/beaker/blob/master/app/background-process/protocols/dat.js 37 | // with minor alterations 38 | const errorResponse = (code, message) => { 39 | res.statusCode = code; 40 | res.end(message); 41 | } 42 | 43 | if (['GET', 'HEAD'].indexOf(req.method) === -1) { 44 | errorResponse(405, 'Method Not Supported'); 45 | return; 46 | } 47 | 48 | const url = req.url; 49 | const { host, pathname, version } = parseDatURL(url); 50 | 51 | try { 52 | const address = await dns.resolveName(host); 53 | const dat = await this.library.node.getDat(address, { 54 | persist: true, 55 | autoSwarm: true, 56 | sparse: true 57 | }); 58 | await dat.ready; 59 | const result = await datProtocol.resolvePath(dat.drive, pathname, version); 60 | 61 | if (result.directory === true) { 62 | res.statusCode = 200; 63 | res.write(`Directory ${result.path}`); 64 | res.end(); 65 | return 66 | } 67 | const size = new Promise((resolve) => { 68 | result.drive.stat(result.path, (err, stat) => { 69 | resolve(stat.size); 70 | }); 71 | }) 72 | 73 | // handle range 74 | /* 75 | res.setHeader('Accept-Ranges', 'bytes'); 76 | let range = req.headers.Range || req.headers.range; 77 | let start = 0; 78 | let end = 0; 79 | if (range) range = parseRange(size, range); 80 | if (range && range.type === 'bytes') { 81 | const sendRange = range[0]; 82 | start = sendRange.start; 83 | end = sendRange.end; 84 | res.statusCode = 206; 85 | res.setHeader('Content-Range', `bytes ${sendRange.start}-${sendRange.end}/${size}`); 86 | res.setHeader('Content-Length', sendRange.end - sendRange.start + 1); 87 | } else { 88 | res.setHeader('Content-Length', size); 89 | res.statusCode = 200; 90 | } 91 | */ 92 | 93 | res.setHeader('Content-Type', mime.getType(result.path)); 94 | res.setHeader('Access-Control-Allow-Origin', '*'); 95 | res.setHeader('Cache-Control', 'public, max-age: 60'); 96 | if (req.method === 'HEAD') { 97 | res.end(); 98 | } 99 | pump(result.drive.createReadStream(result.path), res); 100 | } catch (e) { 101 | if (e instanceof datProtocol.NotFoundError) { 102 | errorResponse(404, 'Archive Not Found'); 103 | return; 104 | } 105 | if (e instanceof datProtocol.NetworkTimeoutError) { 106 | errorResponse(500, 'Timed out loading dat'); 107 | return; 108 | } 109 | errorResponse(501, e.toString()); 110 | } 111 | 112 | // TODO: CSP in manifest 113 | /* 114 | if (manifest && manifest.content_security_policy && typeof manifest.content_security_policy === 'string') { 115 | res.setHeader('Content-Security-Policy', manifest.content_security_policy); 116 | } 117 | */ 118 | 119 | 120 | } 121 | } 122 | 123 | module.exports = DatGateway; 124 | -------------------------------------------------------------------------------- /src/api.js: -------------------------------------------------------------------------------- 1 | const dns = require('./dns'); 2 | 3 | function seralisableStat(stat) { 4 | return { 5 | ...stat, 6 | _isDirectory: stat.isDirectory(), 7 | _isFile: stat.isFile(), 8 | } 9 | } 10 | 11 | class ActivityStream { 12 | constructor(stream) { 13 | this.stream = stream; 14 | this.listeners = new Map(); 15 | this.evntQueue = new Map(); 16 | this.waitingResolve = new Map(); 17 | this.lastPoll = Date.now(); 18 | } 19 | 20 | addEventListener(name) { 21 | if (this.listeners.has(name)) { 22 | return; 23 | } 24 | this.evntQueue.set(name, []); 25 | this.listeners.set(name, this.stream.addEventListener(name, (ev) => { 26 | if (this.waitingResolve.has(name)) { 27 | this.waitingResolve.get(name)([ev]); 28 | } else { 29 | const queue = this.evntQueue.get(name) || []; 30 | queue.push(ev); 31 | this.evntQueue.set(name, queue); 32 | } 33 | })); 34 | } 35 | 36 | take(name, timeout) { 37 | this.addEventListener(name); 38 | this.lastPoll = Date.now(); 39 | const queue = this.evntQueue.get(name); 40 | if (queue && queue.length > 0) { 41 | this.evntQueue.delete(name); 42 | return Promise.resolve(queue); 43 | } 44 | return new Promise((resolve) => { 45 | const timer = setTimeout(() => { 46 | this.waitingResolve.delete(name); 47 | resolve([]); 48 | }, timeout || 10000); 49 | this.waitingResolve.set(name, (evs) => { 50 | this.waitingResolve.delete(name); 51 | clearTimeout(timer); 52 | resolve(evs); 53 | }); 54 | }); 55 | } 56 | 57 | close() { 58 | // flush waiting polls 59 | this.waitingResolve.forEach((resolve, name) => { 60 | resolve(this.evntQueue.get(name) || []); 61 | }); 62 | this.stream.close(); 63 | } 64 | } 65 | 66 | let streamIdx = 1; 67 | const activeStreams = new Map(); 68 | 69 | // clean unclosed streams 70 | setInterval(() => { 71 | const now = Date.now(); 72 | activeStreams.forEach((stream, id) => { 73 | if (now - stream.lastPoll > 60000) { 74 | stream.close(); 75 | activeStreams.delete(id); 76 | } 77 | }); 78 | }, 60000); 79 | 80 | module.exports = ({ getArchive, createArchive, forkArchive }) => ({ 81 | apiVersion: () => Promise.resolve(1), 82 | // DatArchive static methods 83 | load: async ({ url }) => { 84 | await (await getArchive(url))._loadPromise; 85 | return url; 86 | }, 87 | resolveName: (message) => dns.resolveName(message.name), 88 | create: ({ opts }) => createArchive(opts), 89 | fork: ({ url, opts }) => forkArchive(url, opts), 90 | // DatArchive class methods 91 | getInfo: async (message) => (await getArchive(message.url)).getInfo(message.opts), 92 | configure: async ({ url, opts }) => (await getArchive(url)).configure(opts), 93 | copy: async ({ url, path, dstPath, opts }) => (await getArchive(url)).copy(path, dstPath, opts), 94 | stat: async (message) => (await getArchive(message.url)) 95 | .stat(message.path, message.opts) 96 | .then(s => seralisableStat(s)), 97 | readdir: async ({ url, path, opts }) => (await getArchive(url)) 98 | .readdir(path, opts) 99 | .then((dir) => { 100 | if (opts && opts.stat) { 101 | return dir.map(({ name, stat }) => ({ name, stat: seralisableStat(stat) })); 102 | } 103 | return dir; 104 | }), 105 | history: async (message) => (await getArchive(message.url)).history(message.opts), 106 | readFile: async ({ url, path, opts }) => (await getArchive(url)).readFile(path, opts), 107 | writeFile: async ({ url, path, data, opts }) => (await getArchive(url)).writeFile(path, data, opts), 108 | mkdir: async ({ url, path }) => (await getArchive(url)).mkdir(path), 109 | unlink: async ({ url, path }) => (await getArchive(url)).unlink(path), 110 | rmdir: async ({ url, path, opts }) => (await getArchive(url)).rmdir(path, opts), 111 | rename: async({ url, oldPath, newPath, opts }) => (await getArchive(url)).rename(oldPath, newPath, opts), 112 | diff: async ({ url, opts }) =>(await getArchive(url)).diff(opts), 113 | commit: async ({ url }) => (await getArchive(url)).commit(), 114 | revert: async ({ url }) => (await getArchive(url)).revert(), 115 | download: async ({ url, path, opts }) =>(await getArchive(url)).download(path, opts), 116 | createFileActivityStream: async ({ url, pattern }) => { 117 | const archive = await getArchive(url); 118 | await archive._loadPromise; 119 | const stream = new ActivityStream(archive.createFileActivityStream(pattern)); 120 | const id = ++streamIdx; 121 | activeStreams.set(id, stream); 122 | return { streamId: id }; 123 | }, 124 | createNetworkActivityStream: async ({ url }) => { 125 | const archive = await getArchive(url); 126 | await archive._loadPromise; 127 | const stream = new ActivityStream(archive.createNetworkActivityStream()); 128 | const id = ++streamIdx; 129 | activeStreams.set(id, stream); 130 | return { streamId: id }; 131 | }, 132 | pollActivityStream: ({ streamId, event }) => { 133 | return activeStreams.get(streamId).take(event, 30000); 134 | }, 135 | closeActivityStream: ({ streamId }) => { 136 | activeStreams.get(streamId).close(); 137 | activeStreams.delete(streamId); 138 | return Promise.resolve({ streamId }); 139 | }, 140 | }); 141 | -------------------------------------------------------------------------------- /src/library.js: -------------------------------------------------------------------------------- 1 | const apiFactory = require('@sammacbeth/dat-api-v1').default; 2 | const { create, fork, default: createDatArchive } = require('@sammacbeth/dat-archive'); 3 | const fs = require('fs-extra'); 4 | const storage = require('node-persist'); 5 | const rimraf = require('rimraf'); 6 | const raf = require('random-access-file'); 7 | const dns = require('./dns'); 8 | const migrate = require('./migrate'); 9 | 10 | const DAT_PRESERVED_FIELDS_ON_FORK = [ 11 | 'web_root', 12 | 'fallback_page', 13 | 'links' 14 | ]; 15 | 16 | function formatArchiveName(name) { 17 | return name.replace(' ', '-') 18 | .replace('/', '_') 19 | .replace('\\', '_') 20 | .replace(':', '_'); 21 | } 22 | 23 | const datOpts = { 24 | persist: true, 25 | autoSwarm: true, 26 | sparse: true 27 | }; 28 | 29 | class Library { 30 | constructor(libraryDir) { 31 | this.libraryDir = libraryDir; 32 | this.datDir = `${this.libraryDir}/dat1`; 33 | // open and active archives 34 | this.archives = new Map(); 35 | this.archiveUsage = new Map(); 36 | this.node = apiFactory({ 37 | persistantStorageFactory: (key) => Promise.resolve((f) => { 38 | return raf(`${this.datDir}/${key}/${f.replace('/', '.')}`); 39 | }), 40 | persistantStorageDeleter: (key) => new Promise((resolve) => { 41 | rimraf(`${this.datDir}/${key}`, resolve); 42 | }), 43 | }, datOpts) 44 | } 45 | 46 | async init() { 47 | // create library dir if it does not exist 48 | if (!fs.existsSync(this.libraryDir)) { 49 | fs.mkdirSync(this.libraryDir); 50 | } 51 | await migrate(this.libraryDir, this.node); 52 | this.ready = await storage.init({ dir: `${this.libraryDir}/.metadata` }); 53 | const library = await this.listLibrary(); 54 | if (library) { 55 | if (library.length === 0) { 56 | // check for owned 57 | const addresses = await fs.readdir(this.datDir); 58 | const checkOwned = addresses.map(async (address) => { 59 | try { 60 | const dat = await this.node.getDat(address, datOpts); 61 | await dat.ready; 62 | if (dat.drive.writable) { 63 | const archive = createDatArchive(dat.drive); 64 | this.updateLibraryEntry(archive); 65 | this.archives.set(address, archive); 66 | } else { 67 | dat.close(); 68 | } 69 | } catch (e) { 70 | } 71 | }); 72 | await Promise.all(checkOwned); 73 | } 74 | // library exists, open archives in it 75 | const loadLibrary = library.map(async ({ url }) => { 76 | try { 77 | const address = await dns.resolveName(url); 78 | const dat = await this.node.getDat(address, datOpts); 79 | await dat.ready; 80 | const archive = createDatArchive(dat.drive); 81 | this.archives.set(address, archive); 82 | } catch (e) { 83 | // failed to load archive, remove from library 84 | await storage.removeItem(url); 85 | } 86 | }); 87 | 88 | await Promise.all(loadLibrary); 89 | } 90 | // TODO: add other dats in folder 91 | } 92 | 93 | async listLibrary() { 94 | await this.ready; 95 | return storage.values(); 96 | } 97 | 98 | async remove(url) { 99 | // remove from library 100 | await this.ready; 101 | const archiveInfo = await storage.getItem(url); 102 | if (!archiveInfo) { 103 | throw new Error('Archive not in library'); 104 | } 105 | // close archive 106 | await this.close(url); 107 | await storage.removeItem(url); 108 | return archiveInfo; 109 | } 110 | 111 | async close(url) { 112 | const host = await dns.resolveName(url); 113 | if (this.node.dats.has(host)) { 114 | this.node.dats.get(host).close(); 115 | } 116 | this.archives.delete(host); 117 | this.archiveUsage.delete(host); 118 | } 119 | 120 | getOpenArchives() { 121 | return [...this.archives.entries()].map(([host, archive]) => ({ 122 | host, 123 | url: archive.url, 124 | lastUsed: this.archiveUsage.get(host), 125 | })); 126 | } 127 | 128 | async ensureCacheDir() { 129 | const exists = await new Promise(resolve => !fs.exists(this.datDir, resolve)); 130 | if (!exists) { 131 | await new Promise(resolve => !fs.mkdir(this.datDir, resolve)); 132 | } 133 | } 134 | 135 | async createTempArchive(address) { 136 | await this.ensureCacheDir(); 137 | const dat = await this.node.getDat(address, datOpts); 138 | await dat.ready; 139 | const archive = createDatArchive(dat.drive); 140 | return archive; 141 | } 142 | 143 | async getArchive(url) { 144 | const host = await dns.resolveName(url); 145 | if (!this.archives.has(host)) { 146 | this.archives.set(host, await this.createTempArchive(host)); 147 | } 148 | this.archiveUsage.set(host, Date.now()); 149 | return this.archives.get(host); 150 | } 151 | 152 | async createArchive(opts) { 153 | const archive = await create(this.node, datOpts, opts); 154 | const { host } = await dns.resolveName(archive.url); 155 | this.archives.set(host, archive); 156 | 157 | this.updateLibraryEntry(archive); 158 | return archive.url; 159 | } 160 | 161 | async forkArchive(srcArchiveUrl, opts) { 162 | const srcAddress = await dns.resolveName(srcArchiveUrl); 163 | const srcDat = await this.node.getDat(srcAddress, datOpts); 164 | const dstArchive = await fork(this.node, srcDat.drive, datOpts, opts); 165 | this.updateLibraryEntry(dstArchive); 166 | return dstArchive.url; 167 | } 168 | 169 | updateLibraryEntry(archive) { 170 | archive.getInfo().then((info) => { 171 | const { key, url, title, description, isOwner } = info; 172 | storage.setItem(archive.url, { 173 | dir: `${this.libraryDir}/dat1/${key}/`, 174 | url, 175 | owner: isOwner, 176 | title, 177 | description, 178 | }); 179 | }); 180 | } 181 | } 182 | 183 | module.exports = Library; 184 | --------------------------------------------------------------------------------