├── .eslintrc ├── .gitignore ├── DatArchive.js ├── LICENSE ├── README.md ├── browser.js ├── example.html ├── index.js └── package.json /.eslintrc: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "standard" 3 | } -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | 8 | # Runtime data 9 | pids 10 | *.pid 11 | *.seed 12 | *.pid.lock 13 | 14 | # Directory for instrumented libs generated by jscoverage/JSCover 15 | lib-cov 16 | 17 | # Coverage directory used by tools like istanbul 18 | coverage 19 | 20 | # nyc test coverage 21 | .nyc_output 22 | 23 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) 24 | .grunt 25 | 26 | # Bower dependency directory (https://bower.io/) 27 | bower_components 28 | 29 | # node-waf configuration 30 | .lock-wscript 31 | 32 | # Compiled binary addons (http://nodejs.org/api/addons.html) 33 | build/Release 34 | 35 | # Dependency directories 36 | node_modules/ 37 | jspm_packages/ 38 | 39 | # Typescript v1 declaration files 40 | typings/ 41 | 42 | # Optional npm cache directory 43 | .npm 44 | 45 | # Optional eslint cache 46 | .eslintcache 47 | 48 | # Optional REPL history 49 | .node_repl_history 50 | 51 | # Output of 'npm pack' 52 | *.tgz 53 | 54 | # Yarn Integrity file 55 | .yarn-integrity 56 | 57 | # dotenv environment variables file 58 | .env 59 | 60 | # Package locks 61 | package-lock.json 62 | yarn.lock 63 | 64 | # Bundle 65 | bundle.js -------------------------------------------------------------------------------- /DatArchive.js: -------------------------------------------------------------------------------- 1 | /* global localStorage, prompt */ 2 | 3 | // Ripped out of node-dat-archive 4 | 5 | const path = require('path') 6 | const pda = require('pauls-dat-api') 7 | const parseURL = require('url-parse') 8 | const concat = require('concat-stream') 9 | const EventTarget = require('dom-event-target') 10 | const { timer, toEventTarget } = require('node-dat-archive/lib/util') 11 | const { 12 | DAT_MANIFEST_FILENAME, 13 | DAT_VALID_PATH_REGEX 14 | } = require('node-dat-archive/lib/const') 15 | const { 16 | ArchiveNotWritableError, 17 | ProtectedFileNotWritableError, 18 | InvalidPathError 19 | } = require('beaker-error-constants') 20 | const hexTo32 = require('hex-to-32') 21 | const Dat = require('dat-js') 22 | 23 | // Gateways are hella slow so we'll have a crazy long timeout 24 | const API_TIMEOUT = 15 * 1000 25 | 26 | const BASE_32_KEY_LENGTH = 52 27 | 28 | const to = (opts) => 29 | (opts && typeof opts.timeout !== 'undefined') 30 | ? opts.timeout 31 | : API_TIMEOUT 32 | 33 | class DatArchive extends EventTarget { 34 | static _getDat () { 35 | if (this.dat) return this.dat 36 | const dat = new Dat() 37 | this.dat = dat 38 | 39 | return dat 40 | } 41 | 42 | static _isLocal (key) { 43 | try { 44 | const current = DatArchive._listLocal() 45 | return current.includes(key) 46 | } catch (e) { 47 | return false 48 | } 49 | } 50 | 51 | static _addLocal (key) { 52 | try { 53 | const current = DatArchive._listLocal() 54 | DatArchive._saveLocal(current.concat(key)) 55 | } catch (e) { 56 | DatArchive._saveLocal([key]) 57 | } 58 | } 59 | 60 | static _listLocal () { 61 | try { 62 | return JSON.parse(localStorage.getItem('dats')) 63 | } catch (e) { 64 | return [] 65 | } 66 | } 67 | 68 | static _saveLocal (list) { 69 | localStorage.setItem('dats', JSON.stringify(list)) 70 | } 71 | 72 | constructor (url) { 73 | super() 74 | this.url = url 75 | 76 | let { key, version } = getURLData(url) 77 | 78 | let archive = null 79 | 80 | const dat = DatArchive._getDat() 81 | if (key) { 82 | const options = {} 83 | if (DatArchive._isLocal(key)) { 84 | options.persist = true 85 | } 86 | archive = dat.get(key, options) 87 | } else { 88 | archive = dat.create({ 89 | persist: true 90 | }) 91 | DatArchive._addLocal(archive.metadata.key.toString('hex')) 92 | } 93 | 94 | this._archive = archive 95 | 96 | this._loadPromise = waitReady(archive).then(async () => { 97 | this._checkout = version ? archive.checkout(version) : archive 98 | this.url = this.url || `dat://${archive.key.toString('hex')}` 99 | this._loadPromise = null 100 | 101 | // if (!archive.writable && !archive.metadata.length) { 102 | // // wait to receive a first update 103 | // await new Promise((resolve, reject) => { 104 | // archive.metadata.update(err => { 105 | // if (err) reject(err) 106 | // else resolve() 107 | // }) 108 | // }) 109 | // } 110 | }) 111 | 112 | var s = toEventTarget(pda.createNetworkActivityStream(this._archive)) 113 | 114 | s.addEventListener('network-changed', detail => 115 | this.send('network-changed', { target: this, ...detail }) 116 | ) 117 | 118 | s.addEventListener('download', detail => 119 | this.send('download', { target: this, ...detail }) 120 | ) 121 | 122 | s.addEventListener('upload', detail => 123 | this.send('upload', { target: this, ...detail }) 124 | ) 125 | 126 | s.addEventListener('sync', detail => 127 | this.send('sync', { target: this, ...detail }) 128 | ) 129 | } 130 | 131 | async getInfo (opts = {}) { 132 | return timer(to(opts), async () => { 133 | await this._loadPromise 134 | 135 | // read manifest 136 | var manifest 137 | try { 138 | manifest = await pda.readManifest(this._checkout) 139 | } catch (e) { 140 | manifest = {} 141 | } 142 | 143 | // return 144 | return { 145 | key: this._archive.key.toString('hex'), 146 | url: this.url, 147 | isOwner: this._archive.writable, 148 | 149 | // state 150 | version: this._checkout.version, 151 | peers: this._archive.metadata.peers.length, 152 | mtime: 0, 153 | size: 0, 154 | 155 | // manifest 156 | title: manifest.title, 157 | description: manifest.description, 158 | type: manifest.type, 159 | author: manifest.author 160 | } 161 | }) 162 | } 163 | 164 | async configure (settings) { 165 | await this._loadPromise 166 | if (!settings || typeof settings !== 'object') throw new Error('Invalid argument') 167 | if ('title' in settings || 'description' in settings || 'type' in settings || 'author' in settings) { 168 | await pda.updateManifest(this._archive, settings) 169 | } 170 | } 171 | 172 | async diff () { 173 | // noop 174 | return [] 175 | } 176 | 177 | async commit () { 178 | // noop 179 | return [] 180 | } 181 | 182 | async revert () { 183 | // noop 184 | return [] 185 | } 186 | 187 | async history (opts = {}) { 188 | return timer(to(opts), async () => { 189 | await this._loadPromise 190 | var reverse = opts.reverse === true 191 | var { start, end } = opts 192 | 193 | // if reversing the output, modify start/end 194 | start = start || 0 195 | end = end || this._checkout.metadata.length 196 | if (reverse) { 197 | // swap values 198 | let t = start 199 | start = end 200 | end = t 201 | // start from the end 202 | start = this._checkout.metadata.length - start 203 | end = this._checkout.metadata.length - end 204 | } 205 | 206 | return new Promise((resolve, reject) => { 207 | var stream = this._checkout.history({ live: false, start, end }) 208 | stream.pipe(concat({ encoding: 'object' }, values => { 209 | values = values.map(massageHistoryObj) 210 | if (reverse) values.reverse() 211 | resolve(values) 212 | })) 213 | stream.on('error', reject) 214 | }) 215 | }) 216 | } 217 | 218 | async stat (filepath, opts = {}) { 219 | filepath = massageFilepath(filepath) 220 | return timer(to(opts), async () => { 221 | await this._loadPromise 222 | return pda.stat(this._checkout, filepath) 223 | }) 224 | } 225 | 226 | async readFile (filepath, opts = {}) { 227 | filepath = massageFilepath(filepath) 228 | return timer(to(opts), async () => { 229 | await this._loadPromise 230 | return pda.readFile(this._checkout, filepath, opts) 231 | }) 232 | } 233 | 234 | watch (pathPattern, onInvalidated) { 235 | if (typeof pathPattern === 'function') { 236 | onInvalidated = pathPattern 237 | pathPattern = null 238 | } 239 | 240 | if (this._loadPromise) { 241 | var proxy = new EventTarget() 242 | this._loadPromise.then(() => { 243 | var evts = this.watch(pathPattern, onInvalidated) 244 | evts.addEventListener('invalidated', (e) => { 245 | proxy.send('invalidated', e) 246 | }) 247 | evts.addEventListener('changed', (e) => { 248 | proxy.send('changed', e) 249 | }) 250 | }) 251 | return proxy 252 | } 253 | 254 | var evts = toEventTarget(pda.watch(this._archive, pathPattern)) 255 | if (onInvalidated) evts.addEventListener('invalidated', onInvalidated) 256 | return evts 257 | } 258 | 259 | async writeFile (filepath, data, opts = {}) { 260 | filepath = massageFilepath(filepath) 261 | return timer(to(opts), async () => { 262 | await this._loadPromise 263 | if (this._version) throw new ArchiveNotWritableError('Cannot modify a historic version') 264 | await assertWritePermission(this._archive) 265 | await assertValidFilePath(filepath) 266 | await assertUnprotectedFilePath(filepath) 267 | return pda.writeFile(this._archive, filepath, data, opts) 268 | }) 269 | } 270 | 271 | async unlink (filepath) { 272 | filepath = massageFilepath(filepath) 273 | return timer(to(), async () => { 274 | await this._loadPromise 275 | if (this._version) throw new ArchiveNotWritableError('Cannot modify a historic version') 276 | await assertWritePermission(this._archive) 277 | await assertUnprotectedFilePath(filepath) 278 | return pda.unlink(this._archive, filepath) 279 | }) 280 | } 281 | 282 | async download (filepath, opts = {}) { 283 | filepath = massageFilepath(filepath) 284 | return timer(to(opts), async (checkin) => { 285 | await this._loadPromise 286 | if (this._version) throw new Error('Not yet supported: can\'t download() old versions yet. Sorry!') // TODO 287 | if (this._archive.writable) { 288 | return // no need to download 289 | } 290 | return pda.download(this._archive, filepath) 291 | }) 292 | } 293 | 294 | async readdir (filepath, opts = {}) { 295 | filepath = massageFilepath(filepath) 296 | return timer(to(opts), async () => { 297 | await this._loadPromise 298 | var names = await pda.readdir(this._checkout, filepath, opts) 299 | if (opts.stat) { 300 | for (let i = 0; i < names.length; i++) { 301 | names[i] = { 302 | name: names[i], 303 | stat: await pda.stat(this._checkout, path.join(filepath, names[i])) 304 | } 305 | } 306 | } 307 | return names 308 | }) 309 | } 310 | 311 | async mkdir (filepath) { 312 | filepath = massageFilepath(filepath) 313 | return timer(to(), async () => { 314 | await this._loadPromise 315 | if (this._version) throw new ArchiveNotWritableError('Cannot modify a historic version') 316 | await assertWritePermission(this._archive) 317 | await assertValidPath(filepath) 318 | await assertUnprotectedFilePath(filepath) 319 | return pda.mkdir(this._archive, filepath) 320 | }) 321 | } 322 | 323 | async rmdir (filepath, opts = {}) { 324 | return timer(to(opts), async () => { 325 | filepath = massageFilepath(filepath) 326 | await this._loadPromise 327 | if (this._version) throw new ArchiveNotWritableError('Cannot modify a historic version') 328 | await assertUnprotectedFilePath(filepath) 329 | return pda.rmdir(this._archive, filepath, opts) 330 | }) 331 | } 332 | 333 | async copy (path, dstPath, opts) { 334 | path = massageFilepath(path) 335 | dstPath = massageFilepath(dstPath) 336 | return timer(to(opts), async () => { 337 | await this._loadPromise 338 | await pda.copy(this._archive, path, dstPath) 339 | }) 340 | } 341 | 342 | async rename (filepath, dstpath, opts) { 343 | filepath = massageFilepath(filepath) 344 | dstpath = massageFilepath(dstpath) 345 | return timer(to(opts), async () => { 346 | await this._loadPromise 347 | await pda.rename(this._archive, filepath, dstpath) 348 | }) 349 | } 350 | 351 | createFileActivityStream (pathPattern) { 352 | return toEventTarget(pda.watch(this._archive, pathPattern)) 353 | } 354 | 355 | createNetworkActivityStream () { 356 | return toEventTarget(pda.createNetworkActivityStream(this._archive)) 357 | } 358 | 359 | static async resolveName (name) { 360 | throw new Error('Not Supported') 361 | } 362 | 363 | static async fork (url, opts) { 364 | const srcDat = new DatArchive(url) 365 | 366 | const destDat = await DatArchive.create(opts) 367 | 368 | await srcDat._loadPromise 369 | 370 | await pda.exportArchiveToArchive({ 371 | srcArchive: srcDat._archive, 372 | dstArchive: destDat._archive 373 | }) 374 | 375 | return destDat 376 | } 377 | 378 | static async selectArchive (options) { 379 | const urls = DatArchive._listLocal() 380 | const archives = urls.map((url) => new DatArchive(url)) 381 | 382 | const info = await Promise.all(archives.map((archive) => archive.getInfo())) 383 | 384 | const message = ` 385 | Please choose a Dat Archive: 386 | ${info.map(({ url, title }, index) => `${index}. ${title || 'Untitled'}: ${url}`).join('\n')} 387 | ` 388 | 389 | const selection = prompt(message, 0) 390 | 391 | const archive = archives[selection] 392 | 393 | if (!archive) throw new Error('Archive Not Found', selection) 394 | 395 | return archive 396 | } 397 | 398 | static async create ({ title, description, type, author } = {}) { 399 | const archive = new DatArchive(null) 400 | 401 | await archive._loadPromise 402 | 403 | await pda.writeManifest(archive._archive, { url: archive.url, title, description, type, author }) 404 | 405 | return archive 406 | } 407 | 408 | static async load (url) { 409 | const archive = new DatArchive(url) 410 | 411 | await archive._loadPromise 412 | 413 | return archive 414 | } 415 | } 416 | 417 | module.exports = DatArchive 418 | 419 | // helper to check if filepath refers to a file that userland is not allowed to edit directly 420 | function assertUnprotectedFilePath (filepath) { 421 | if (filepath === '/' + DAT_MANIFEST_FILENAME) { 422 | throw new ProtectedFileNotWritableError() 423 | } 424 | } 425 | 426 | async function assertWritePermission (archive) { 427 | // ensure we have the archive's private key 428 | if (!archive.writable) { 429 | throw new ArchiveNotWritableError() 430 | } 431 | return true 432 | } 433 | 434 | async function assertValidFilePath (filepath) { 435 | if (filepath.slice(-1) === '/') { 436 | throw new InvalidPathError('Files can not have a trailing slash') 437 | } 438 | await assertValidPath(filepath) 439 | } 440 | 441 | async function assertValidPath (fileOrFolderPath) { 442 | if (!DAT_VALID_PATH_REGEX.test(fileOrFolderPath)) { 443 | throw new InvalidPathError('Path contains invalid characters') 444 | } 445 | } 446 | 447 | function massageHistoryObj ({ name, version, type }) { 448 | return { path: name, version, type } 449 | } 450 | 451 | function massageFilepath (filepath) { 452 | filepath = filepath || '' 453 | filepath = decodeURIComponent(filepath) 454 | if (!filepath.startsWith('/')) { 455 | filepath = '/' + filepath 456 | } 457 | return filepath 458 | } 459 | 460 | function waitReady (archive) { 461 | return new Promise((resolve, reject) => { 462 | archive.ready((err) => { 463 | if (err) reject(err) 464 | else resolve(archive) 465 | }) 466 | }) 467 | } 468 | 469 | function getURLData (url) { 470 | let key = null 471 | let version = null 472 | 473 | if (url) { 474 | if (!url.startsWith('dat://') && !url.startsWith('http://') && !url.startsWith('https://')) url = `dat://${url}` 475 | const parsed = parseURL(url) 476 | let hostname = null 477 | const isDat = parsed.protocol.indexOf('dat') === 0 478 | const isUndefined = parsed.protocol.indexOf('undefined') === 0 479 | if (isDat || isUndefined) { 480 | const hostnameParts = parsed.hostname.split('+') 481 | hostname = hostnameParts[0] 482 | version = hostnameParts[1] || null 483 | } else { 484 | const hostnameParts = parsed.hostname.split('.') 485 | const subdomain = hostnameParts[0] 486 | if (subdomain.length === BASE_32_KEY_LENGTH) { 487 | hostname = hexTo32.decode(subdomain) 488 | } else { 489 | hostname = parsed.hostname 490 | } 491 | } 492 | key = hostname 493 | } 494 | 495 | return { 496 | key, 497 | version 498 | } 499 | } 500 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2018 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | **Note: This is being replaced by the [Dat SDK](https://github.com/datproject/sdk)** 2 | 3 | # dat-archive-web 4 | DatArchive implementation for browsers using [dat-js](https://github.com/datproject/dat-js#readme) 5 | 6 | ``` 7 | npm install --save dat-archive-web 8 | ``` 9 | 10 | Or 11 | 12 | ```html 13 | 14 | ``` 15 | 16 | ## Example 17 | 18 | ```javascript 19 | // After including the bundle in a script tag, or requiring it 20 | 21 | const archive = new DatArchive('dat://87ed2e3b160f261a032af03921a3bd09227d0a4cde73466c17114816cae43336') 22 | 23 | // Subscribe to network events 24 | archive.addEventListener("download", ({bytes}) => console.log(`Downloaded ${bytes} bytes`)) 25 | 26 | // Subscribe to file events 27 | const emitter = archive.watch("/path/to/watch") 28 | emitter.addEventListener("change", ({path}) => console.log(`file at path: ${path} changed!`)) 29 | 30 | archive.readFile('/index.html') 31 | .then((html) => console.log(html)) 32 | ``` 33 | 34 | ## API 35 | 36 | Implements the same interface as [DatArchive](https://beakerbrowser.com/docs/apis/dat.html) in Beaker with the following exceptions: 37 | 38 | - `archive.diff()`, `archive.commit()`, and `archive.revert()` are not supported 39 | - `DatArchive.selectArchive()` doesn't do filtering and looks crappy. Uses [window.prompt](https://developer.mozilla.org/en-US/docs/Web/API/Window/prompt) API 40 | - `DatArchive.resolveName()` doesn't work and DNS based urls aren't supported. Waiting for dat-js support 41 | 42 | # Features 43 | 44 | - [x] Support most DatArchive methods 45 | - [x] Public bridges used to replicate with non-browser network 46 | - [x] Detect HTTP/HTTPS in gateway URL 47 | - [x] Data stored in memory by default, unless it was created locally. 48 | - [] Functional DatDNS support (via gateway) 49 | - [x] Full support for versions (Needs testing, but code is there) 50 | - [x] Forking (without preserving change feed) 51 | - [x] DatArchive.selectArchive() Really rudimentary 52 | 53 | ## Development 54 | 55 | - This project uses the `standard` code style 56 | - Run the example in node with `npm install && npm run example` 57 | - Build the browserify bundle with `npm run build` 58 | -------------------------------------------------------------------------------- /browser.js: -------------------------------------------------------------------------------- 1 | const DatArchive = require('./') 2 | 3 | if (!window.DatArchive) window.DatArchive = DatArchive 4 | 5 | module.exports = DatArchive 6 | -------------------------------------------------------------------------------- /example.html: -------------------------------------------------------------------------------- 1 | Dat Archive Web Example 2 | 3 | 34 | -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | const DatArchive = require('./DatArchive') 2 | 3 | module.exports = DatArchive 4 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "dat-archive-web", 3 | "version": "2.0.5", 4 | "description": "DatArchive implementation for borwsers that uses dat-gateway", 5 | "main": "index.js", 6 | "scripts": { 7 | "test": "echo \"Error: no test specified\" && exit 1", 8 | "example": "node example.js", 9 | "build": "browserify -r fs:graceful-fs -s DatArchiveWeb -e ./browser.js > bundle.js" 10 | }, 11 | "repository": { 12 | "type": "git", 13 | "url": "git+https://github.com/RangerMauve/dat-archive-web.git" 14 | }, 15 | "files": [ 16 | "DatArchive.js", 17 | "browser.js", 18 | "index.js", 19 | "README.md", 20 | "LICENSE", 21 | "bundle.js" 22 | ], 23 | "keywords": [ 24 | "dat", 25 | "archive", 26 | "web", 27 | "beaker" 28 | ], 29 | "author": "rangermauve", 30 | "license": "MIT", 31 | "bugs": { 32 | "url": "https://github.com/RangerMauve/dat-archive-web/issues" 33 | }, 34 | "homepage": "https://github.com/RangerMauve/dat-archive-web#readme", 35 | "devDependencies": { 36 | "browserify": "^16.2.0", 37 | "eslint": "^4.19.1", 38 | "eslint-config-standard": "^11.0.0", 39 | "eslint-plugin-import": "^2.10.0", 40 | "eslint-plugin-node": "^6.0.1", 41 | "eslint-plugin-promise": "^3.7.0", 42 | "eslint-plugin-standard": "^3.0.1" 43 | }, 44 | "dependencies": { 45 | "beaker-error-constants": "^1.4.0", 46 | "concat-stream": "^1.6.2", 47 | "dat-js": "^8.0.0", 48 | "dom-event-target": "^1.0.0", 49 | "hex-to-32": "^1.0.0", 50 | "node-dat-archive": "^2.0.1", 51 | "pauls-dat-api": "^7.0.1", 52 | "url-parse": "^1.4.0" 53 | } 54 | } 55 | --------------------------------------------------------------------------------