├── .gitignore
├── README.md
├── index.js
├── lib
├── const.js
└── util.js
├── package-lock.json
├── package.json
└── test
├── api-in-memory.js
├── api-latest.js
├── api.js
├── lib
└── dat-helpers.js
└── scaffold
├── test-runner-dat
├── dat.json
├── index.html
└── index.js
└── test-static-dat
├── beaker.png
├── hello.txt
└── subdir
├── hello.txt
└── space in the name.txt
/.gitignore:
--------------------------------------------------------------------------------
1 | node_modules/
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 | [](https://dat-ecosystem.org/)
3 |
4 | More info on active projects and modules at [dat-ecosystem.org](https://dat-ecosystem.org/)
5 |
6 | ---
7 |
8 | ## Node DatArchive
9 |
10 | A nodejs API for Dat which is compatible with Beaker's DatArchive API. Useful for testing and for writing apps that work in the browser and in nodejs.
11 |
12 | ```js
13 | var DatArchive = require('node-dat-archive')
14 |
15 | // create a new archive
16 | var archive = await DatArchive.create({
17 | localPath: './my-archive-data',
18 | title: 'My Archive',
19 | description: 'A test of the node DatArchive API'
20 | })
21 |
22 | // load an existing archive from disk
23 | var archive = await DatArchive.load({
24 | localPath: './my-archive-data'
25 | })
26 |
27 | // load an existing archive from the URL:
28 | var archive = new DatArchive(datURL, {localPath: './my-archive-data'})
29 |
30 | // using the instance
31 | await archive.writeFile('hello.txt', 'world')
32 | var names = await archive.readdir('/')
33 | console.log(names) // => ['hello.txt']
34 | ```
35 |
36 | By default, `node-dat-archive` stores the Dat data in the `localPath` folder using the SLEEP format (dat's internal structure).
37 | If you want the folder to show the latest files (the dat cli behavior) pass `latest: true` in the `datOptions`.
38 |
39 | ```js
40 | var archive = await DatArchive.create({
41 | localPath: './my-archive-data',
42 | datOptions: {latest: true}
43 | })
44 | var archive = await DatArchive.load({
45 | localPath: './my-archive-data',
46 | datOptions: {latest: true}
47 | })
48 | var archive = new DatArchive(datURL, {
49 | localPath: './my-archive-data',
50 | datOptions: {latest: true}
51 | })
52 | ```
53 |
54 | You can also pass options through to [dat-node](https://github.com/datproject/dat-node) with `datOptions`, or pass options to its `.joinNetwork([opts])` method with `netOptions`:
55 |
56 | ```js
57 | var archive = new DatArchive(datURL, {
58 | localPath: './my-archive-data',
59 | datOptions: {
60 | live: true
61 | },
62 | netOptions: {
63 | upload: false
64 | }
65 | })
66 | ```
67 |
68 | This will extend node-dat-archive's defaults.
69 |
70 | ### Differences from Browser API
71 |
72 | - This module adds the `localPath` parameter. Use the `localPath` to specify where the data for the archive should be stored. If not provided, the archive will be stored in memory.
73 | - This module also adds `datOptions` and `netOptions` to configure the [dat-node](https://github.com/datproject/dat-node) usage.
74 | - This module also adds `DatArchive.load()` to read an archive from disk.
75 | - This module does *yet* not include `DatArchive.fork`.
76 | - This module does *yet* not include `DatArchive.unlink`.
77 | - This module will not include `DatArchive.selectArchive`.
78 | - `archive.getInfo()` does not give a valid `mtime` or `size`.
79 | - `networked:` opt is not yet supported.
80 |
81 | ### Quick API reference
82 |
83 | Refer to the [Beaker `DatArchive` docs](https://beakerbrowser.com/docs/apis/dat.html).
84 |
85 | ```js
86 | var archive = new DatArchive(url, {localPath:, datOptions:, netOptions:})
87 | var archive = await DatArchive.create({localPath:, datOptions:, netOptions:, title:, description:, type:, author:, networked:})
88 | var archive = await DatArchive.load({localPath:, datOptions:, netOptions:})
89 | var key = await DatArchive.resolveName(url)
90 | archive.url
91 | await archive.configure({title:, description:, type:, author:, networked:})
92 | var info = await archive.getInfo({timeout:})
93 | var stat = await archive.stat(path, {timeout:})
94 | var content = await archive.readFile(path, {encoding:, timeout:})
95 | var names = archive.readdir(path, {recursive:, stat:, timeout:})
96 | await archive.writeFile(path, data, encoding)
97 | await archive.mkdir(path)
98 | await archive.unlink(path)
99 | await archive.rmdir(path, {recursive:})
100 | var history = await archive.history({start:, end:, reverse:, timeout:})
101 | await archive.download(path, {timeout:})
102 | var emitter = archive.createFileActivityStream(pattern)
103 | var emitter = archive.createNetworkActivityStream()
104 |
105 | // node-only:
106 | archive._loadPromise // promise for when the archive is ready to use
107 | archive._close() // exit swarm, close all files
108 | ```
109 |
--------------------------------------------------------------------------------
/index.js:
--------------------------------------------------------------------------------
1 | const path = require('path')
2 | const fs = require('fs')
3 | const parseDatURL = require('parse-dat-url')
4 | const pda = require('pauls-dat-api')
5 | const concat = require('concat-stream')
6 | const Dat = require('dat-node')
7 | const ram = require('random-access-memory')
8 | const {datDns, timer, toEventTarget} = require('./lib/util')
9 | const {
10 | DAT_MANIFEST_FILENAME,
11 | DAT_VALID_PATH_REGEX,
12 | DEFAULT_DAT_API_TIMEOUT
13 | } = require('./lib/const')
14 | const {
15 | ArchiveNotWritableError,
16 | ProtectedFileNotWritableError,
17 | InvalidPathError
18 | } = require('beaker-error-constants')
19 |
20 | // exported api
21 | // =
22 |
23 | const to = (opts) =>
24 | (opts && typeof opts.timeout !== 'undefined')
25 | ? opts.timeout
26 | : DEFAULT_DAT_API_TIMEOUT
27 |
28 | class DatArchive {
29 | constructor (url, {localPath, datOptions, netOptions} = {}) {
30 |
31 | // parse URL
32 | const urlp = url ? parseDatURL(url) : null
33 | this.url = urlp ? `dat://${urlp.hostname}` : null
34 |
35 | // load the archive
36 | this._archive = null
37 | this._checkout = null
38 | this._version = urlp && urlp.version ? +urlp.version : null
39 | this._localPath = localPath
40 | this._loadPromise = new Promise((resolve, reject) => {
41 | // TODO resolve DNS
42 | const temp = !localPath
43 | let options = urlp ? {key: urlp.hostname, sparse: true, temp} : {indexing: false, temp}
44 | if (datOptions) {
45 | Object.keys(datOptions).forEach((key) => {
46 | options[key] = datOptions[key]
47 | })
48 | }
49 | if (typeof options.latest === 'undefined') {
50 | options.latest = false
51 | }
52 | Dat(localPath || ram, options, async (err, dat) => {
53 | if (err) {
54 | return reject(err)
55 | }
56 | dat.joinNetwork(netOptions)
57 | this.url = this.url || `dat://${dat.archive.key.toString('hex')}`
58 | this._archive = dat.archive
59 | this._checkout = (this._version) ? dat.archive.checkout(this._version) : dat.archive
60 | this._close = async () => {
61 | await new Promise((resolve, reject) => {
62 | dat.close(err => {
63 | if (err) reject(err)
64 | else resolve()
65 | })
66 | })
67 | }
68 |
69 | // await initial metadata sync if not the owner
70 | if (!dat.archive.writable && !dat.archive.metadata.length) {
71 | // wait to receive a first update
72 | await new Promise((resolve, reject) => {
73 | dat.archive.metadata.update(err => {
74 | if (err) reject(err)
75 | else resolve()
76 | })
77 | })
78 | }
79 |
80 | resolve()
81 | })
82 | })
83 | }
84 |
85 | static async create ({localPath, datOptions, netOptions, title, description, type, author}) {
86 | // make sure the directory DNE or is empty
87 | if (localPath) {
88 | let st = await new Promise(resolve => fs.stat(localPath, (err, st) => resolve(st)))
89 | if (st) {
90 | if (!st.isDirectory()) {
91 | throw new Error('Cannot create Dat archive. (A file exists at the target location.)')
92 | }
93 | let listing = await new Promise(resolve => fs.readdir(localPath, (err, listing) => resolve(listing)))
94 | if (listing && listing.length > 0) {
95 | throw new Error('Cannot create Dat archive. (The target folder is not empty.)')
96 | }
97 | }
98 | }
99 |
100 | // create the dat
101 | var archive = new DatArchive(null, {localPath, datOptions, netOptions})
102 | await archive._loadPromise
103 | await pda.writeManifest(archive._archive, {url: archive.url, title, description, type, author})
104 | return archive
105 | }
106 |
107 | static async load ({localPath, datOptions, netOptions}) {
108 | if (!localPath) {
109 | throw new Error('Must provide {localPath}.')
110 | }
111 |
112 | // make sure the directory exists
113 | var st = await new Promise(resolve => fs.stat(localPath, (err, st) => resolve(st)))
114 | if (!st || !st.isDirectory()) {
115 | throw new Error('Cannot load Dat archive. (No folder exists at the given location.)')
116 | }
117 |
118 | // load the dat
119 | var archive = new DatArchive(null, {localPath, datOptions, netOptions})
120 | await archive._loadPromise
121 | return archive
122 | }
123 |
124 | async configure (settings) {
125 | await this._loadPromise
126 | if (!settings || typeof settings !== 'object') throw new Error('Invalid argument')
127 | const knownProps = [
128 | 'author',
129 | 'description',
130 | 'fallback_page',
131 | 'links',
132 | 'title',
133 | 'type',
134 | 'web_root'
135 | ]
136 | if (knownProps.filter(prop => prop in settings).length > 0) {
137 | await pda.updateManifest(this._archive, settings)
138 | }
139 | if ('networked' in settings) {
140 | // TODO
141 | }
142 | }
143 |
144 | async getInfo (url, opts = {}) {
145 | return timer(to(opts), async () => {
146 | await this._loadPromise
147 |
148 | // read manifest
149 | var manifest
150 | try {
151 | manifest = await pda.readManifest(this._checkout)
152 | } catch (e) {
153 | manifest = {}
154 | }
155 |
156 | // return
157 | return {
158 | key: this._archive.key.toString('hex'),
159 | url: this.url,
160 | isOwner: this._archive.writable,
161 |
162 | // state
163 | version: this._checkout.version,
164 | peers: this._archive.metadata.peers.length,
165 | mtime: 0,
166 | size: 0,
167 |
168 | // manifest
169 | title: manifest.title,
170 | description: manifest.description,
171 | type: manifest.type,
172 | author: manifest.author,
173 | links: manifest.links
174 | }
175 | })
176 | }
177 |
178 | async diff () {
179 | // noop
180 | return []
181 | }
182 |
183 | async commit () {
184 | // noop
185 | return []
186 | }
187 |
188 | async revert () {
189 | // noop
190 | return []
191 | }
192 |
193 | async history (opts = {}) {
194 | return timer(to(opts), async () => {
195 | await this._loadPromise
196 | var reverse = opts.reverse === true
197 | var {start, end} = opts
198 |
199 | // if reversing the output, modify start/end
200 | start = start || 0
201 | end = end || this._checkout.metadata.length
202 | if (reverse) {
203 | // swap values
204 | let t = start
205 | start = end
206 | end = t
207 | // start from the end
208 | start = this._checkout.metadata.length - start
209 | end = this._checkout.metadata.length - end
210 | }
211 |
212 | return new Promise((resolve, reject) => {
213 | var stream = this._checkout.history({live: false, start, end})
214 | stream.pipe(concat({encoding: 'object'}, values => {
215 | values = values.map(massageHistoryObj)
216 | if (reverse) values.reverse()
217 | resolve(values)
218 | }))
219 | stream.on('error', reject)
220 | })
221 | })
222 | }
223 |
224 | async stat (filepath, opts = {}) {
225 | filepath = massageFilepath(filepath)
226 | return timer(to(opts), async () => {
227 | await this._loadPromise
228 | return pda.stat(this._checkout, filepath)
229 | })
230 | }
231 |
232 | async readFile (filepath, opts = {}) {
233 | filepath = massageFilepath(filepath)
234 | return timer(to(opts), async () => {
235 | await this._loadPromise
236 | return pda.readFile(this._checkout, filepath, opts)
237 | })
238 | }
239 |
240 | async writeFile (filepath, data, opts = {}) {
241 | filepath = massageFilepath(filepath)
242 | return timer(to(opts), async () => {
243 | await this._loadPromise
244 | if (this._version) throw new ArchiveNotWritableError('Cannot modify a historic version')
245 | await assertWritePermission(this._archive)
246 | await assertValidFilePath(filepath)
247 | await assertUnprotectedFilePath(filepath)
248 | return pda.writeFile(this._archive, filepath, data, opts)
249 | })
250 | }
251 |
252 | async unlink (filepath) {
253 | filepath = massageFilepath(filepath)
254 | return timer(to(), async () => {
255 | await this._loadPromise
256 | if (this._version) throw new ArchiveNotWritableError('Cannot modify a historic version')
257 | await assertWritePermission(this._archive)
258 | await assertUnprotectedFilePath(filepath)
259 | return pda.unlink(this._archive, filepath)
260 | })
261 | }
262 |
263 | async download (filepath, opts = {}) {
264 | filepath = massageFilepath(filepath)
265 | return timer(to(opts), async (checkin) => {
266 | await this._loadPromise
267 | if (this._version) throw new Error('Not yet supported: can\'t download() old versions yet. Sorry!') // TODO
268 | if (this._archive.writable) {
269 | return // no need to download
270 | }
271 | return pda.download(this._archive, filepath)
272 | })
273 | }
274 |
275 | async readdir (filepath, opts = {}) {
276 | filepath = massageFilepath(filepath)
277 | return timer(to(opts), async () => {
278 | await this._loadPromise
279 | var names = await pda.readdir(this._checkout, filepath, opts)
280 | if (opts.stat) {
281 | for (let i = 0; i < names.length; i++) {
282 | names[i] = {
283 | name: names[i],
284 | stat: await pda.stat(this._checkout, path.join(filepath, names[i]))
285 | }
286 | }
287 | }
288 | return names
289 | })
290 | }
291 |
292 | async mkdir (filepath) {
293 | filepath = massageFilepath(filepath)
294 | return timer(to(), async () => {
295 | await this._loadPromise
296 | if (this._version) throw new ArchiveNotWritableError('Cannot modify a historic version')
297 | await assertWritePermission(this._archive)
298 | await assertValidPath(filepath)
299 | await assertUnprotectedFilePath(filepath)
300 | return pda.mkdir(this._archive, filepath)
301 | })
302 | }
303 |
304 | async rmdir (filepath, opts = {}) {
305 | return timer(to(opts), async () => {
306 | filepath = massageFilepath(filepath)
307 | await this._loadPromise
308 | if (this._version) throw new ArchiveNotWritableError('Cannot modify a historic version')
309 | await assertUnprotectedFilePath(filepath)
310 | return pda.rmdir(this._archive, filepath, opts)
311 | })
312 | }
313 |
314 | watch (pathSpec = null, onInvalidated = null) {
315 | // usage: (onInvalidated)
316 | if (typeof pathSpec === 'function') {
317 | onInvalidated = pathSpec
318 | pathSpec = null
319 | }
320 |
321 | var evts = toEventTarget(pda.watch(this._archive, pathSpec))
322 | if (onInvalidated) {
323 | evts.addEventListener('invalidated', onInvalidated)
324 | }
325 | return evts
326 | }
327 |
328 | createFileActivityStream (pathPattern) {
329 | console.warn('node-dat-archive: The DatArchive createFileActivityStream() API has been deprecated, use watch() instead.')
330 | return this.watch(pathPattern)
331 | }
332 |
333 | createNetworkActivityStream () {
334 | return toEventTarget(pda.createNetworkActivityStream(this._archive))
335 | }
336 |
337 | static async resolveName (name) {
338 | return datDns.resolveName(name)
339 | }
340 | }
341 |
342 | module.exports = DatArchive
343 |
344 | // internal helpers
345 | // =
346 |
347 | // helper to check if filepath refers to a file that userland is not allowed to edit directly
348 | function assertUnprotectedFilePath (filepath) {
349 | if (filepath === '/' + DAT_MANIFEST_FILENAME) {
350 | throw new ProtectedFileNotWritableError()
351 | }
352 | }
353 |
354 | async function assertWritePermission (archive) {
355 | // ensure we have the archive's private key
356 | if (!archive.writable) {
357 | throw new ArchiveNotWritableError()
358 | }
359 | return true
360 | }
361 |
362 | async function assertValidFilePath (filepath) {
363 | if (filepath.slice(-1) === '/') {
364 | throw new InvalidPathError('Files can not have a trailing slash')
365 | }
366 | await assertValidPath(filepath)
367 | }
368 |
369 | async function assertValidPath (fileOrFolderPath) {
370 | if (!DAT_VALID_PATH_REGEX.test(fileOrFolderPath)) {
371 | throw new InvalidPathError('Path contains invalid characters')
372 | }
373 | }
374 |
375 | function massageHistoryObj ({name, version, type}) {
376 | return {path: name, version, type}
377 | }
378 |
379 | function massageFilepath (filepath) {
380 | filepath = filepath || ''
381 | filepath = decodeURIComponent(filepath)
382 | if (!filepath.startsWith('/')) {
383 | filepath = '/' + filepath
384 | }
385 | return filepath
386 | }
387 |
--------------------------------------------------------------------------------
/lib/const.js:
--------------------------------------------------------------------------------
1 | // url file paths
2 | exports.DAT_VALID_PATH_REGEX = /^[a-z0-9-._~!$&'()*+,;=:@/\s]+$/i
3 |
4 | // dat settings
5 | exports.DAT_MANIFEST_FILENAME = 'dat.json'
6 | exports.DEFAULT_DAT_API_TIMEOUT = 5e3
7 |
--------------------------------------------------------------------------------
/lib/util.js:
--------------------------------------------------------------------------------
1 | const {TimeoutError} = require('beaker-error-constants')
2 | const EventTarget = require('dom-event-target')
3 |
4 | exports.datDns = require('dat-dns')()
5 |
6 | exports.timer = function (ms, fn) {
7 | var currentAction
8 | var isTimedOut = false
9 |
10 | // no timeout?
11 | if (!ms) return fn(() => false)
12 |
13 | return new Promise((resolve, reject) => {
14 | // start the timer
15 | const timer = setTimeout(() => {
16 | isTimedOut = true
17 | reject(new TimeoutError(currentAction ? `Timed out while ${currentAction}` : undefined))
18 | }, ms)
19 |
20 | // call the fn to get the promise
21 | var promise = fn(action => {
22 | if (action) currentAction = action
23 | return isTimedOut
24 | })
25 |
26 | // wrap the promise
27 | promise.then(
28 | val => {
29 | clearTimeout(timer)
30 | resolve(val)
31 | },
32 | err => {
33 | clearTimeout(timer)
34 | reject(err)
35 | }
36 | )
37 | })
38 | }
39 |
40 | exports.toEventTarget = function (es) {
41 | var target = new EventTarget()
42 | es.on('data', ([event, args]) => target.send(event, args))
43 | target.close = es.destroy.bind(es)
44 | return target
45 | }
46 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "node-dat-archive",
3 | "version": "2.2.0",
4 | "description": "A nodejs API for Dat which is compatible with Beaker's DatArchive API",
5 | "main": "index.js",
6 | "scripts": {
7 | "test": "ava -s test/*.js"
8 | },
9 | "repository": {
10 | "type": "git",
11 | "url": "git+ssh://git@github.com/beakerbrowser/node-dat-archive.git"
12 | },
13 | "keywords": [
14 | "dat",
15 | "beaker",
16 | "p2p"
17 | ],
18 | "author": "Paul Frazee ",
19 | "license": "MIT",
20 | "bugs": {
21 | "url": "https://github.com/beakerbrowser/node-dat-archive/issues"
22 | },
23 | "homepage": "https://github.com/beakerbrowser/node-dat-archive#readme",
24 | "dependencies": {
25 | "beaker-error-constants": "^1.2.0",
26 | "concat-stream": "^1.6.0",
27 | "dat-dns": "^2.0.0",
28 | "dat-node": "^3.5.3",
29 | "dom-event-target": "^1.0.0",
30 | "parse-dat-url": "^3.0.1",
31 | "pauls-dat-api": "^8.0.3"
32 | },
33 | "devDependencies": {
34 | "ava": "^0.25.0",
35 | "tempy": "^0.1.0"
36 | }
37 | }
38 |
--------------------------------------------------------------------------------
/test/api-in-memory.js:
--------------------------------------------------------------------------------
1 | const test = require('ava')
2 | const os = require('os')
3 | const path = require('path')
4 | const fs = require('fs')
5 | const tempy = require('tempy')
6 | const {shareDat, createDat} = require('./lib/dat-helpers')
7 | const DatArchive = require('../')
8 |
9 | var testStaticDat, testStaticDatURL
10 | var createdArchive
11 | var fakeDatURL = 'dat://' + ('f'.repeat(64)) + '/'
12 | var beakerPng = fs.readFileSync(__dirname + '/scaffold/test-static-dat/beaker.png')
13 |
14 | test.before(async t => {
15 | // share the test static dat
16 | testStaticDat = await shareDat(__dirname + '/scaffold/test-static-dat')
17 | testStaticDatURL = 'dat://' + testStaticDat.archive.key.toString('hex') + '/'
18 | })
19 |
20 | // tests
21 | //
22 |
23 | test('archive.readdir', async t => {
24 | var archive = new DatArchive(testStaticDatURL)
25 |
26 | // root dir
27 | let listing1 = await archive.readdir('/')
28 | t.deepEqual(listing1.sort(), ['beaker.png', 'hello.txt', 'subdir'])
29 |
30 | // subdir
31 | let listing2 = await archive.readdir('/subdir')
32 | t.deepEqual(listing2.sort(), ['hello.txt', 'space in the name.txt'])
33 |
34 | // root dir stat=true
35 | let listing3 = await archive.readdir('/', {stat: true})
36 | listing3 = listing3.sort()
37 | t.is(listing3[0].name, 'beaker.png')
38 | t.truthy(listing3[0].stat)
39 | t.is(listing3[1].name, 'hello.txt')
40 | t.truthy(listing3[1].stat)
41 | t.is(listing3[2].name, 'subdir')
42 | t.truthy(listing3[2].stat)
43 |
44 | // subdir stat=true
45 | let listing4 = await archive.readdir('/subdir', {stat: true})
46 | listing4 = listing4.sort()
47 | t.is(listing4[0].name, 'hello.txt')
48 | t.truthy(listing4[0].stat)
49 | t.is(listing4[1].name, 'space in the name.txt')
50 | t.truthy(listing4[1].stat)
51 | })
52 |
53 | test('archive.readFile', async t => {
54 | var archive = new DatArchive(testStaticDatURL)
55 |
56 | // read utf8
57 | var helloTxt = await archive.readFile('hello.txt')
58 | t.deepEqual(helloTxt, 'hello')
59 |
60 | // read utf8 2
61 | var helloTxt2 = await archive.readFile('/subdir/hello.txt', 'utf8')
62 | t.deepEqual(helloTxt2, 'hi')
63 |
64 | // read utf8 when spaces are in the name
65 | var helloTxt2 = await archive.readFile('/subdir/space in the name.txt', 'utf8')
66 | t.deepEqual(helloTxt2, 'hi')
67 |
68 | // read hex
69 | var beakerPngHex = await archive.readFile('beaker.png', 'hex')
70 | t.deepEqual(beakerPngHex, beakerPng.toString('hex'))
71 |
72 | // read base64
73 | var beakerPngBase64 = await archive.readFile('beaker.png', 'base64')
74 | t.deepEqual(beakerPngBase64, beakerPng.toString('base64'))
75 |
76 | // read binary
77 | var beakerPngBinary = await archive.readFile('beaker.png', 'binary')
78 | t.truthy(beakerPng.equals(beakerPngBinary))
79 |
80 | // timeout: read an archive that does not exist
81 | var badArchive = new DatArchive(fakeDatURL)
82 | await t.throws(badArchive.readFile('hello.txt', { timeout: 500 }))
83 | })
84 |
85 | test('archive.stat', async t => {
86 | var archive = new DatArchive(testStaticDatURL)
87 |
88 | // stat root file
89 | var entry = await archive.stat('hello.txt')
90 | t.deepEqual(entry.isFile(), true, 'root file')
91 |
92 | // stat subdir file
93 | var entry = await archive.stat('subdir/hello.txt')
94 | t.deepEqual(entry.isFile(), true, 'subdir file')
95 |
96 | // stat subdir
97 | var entry = await archive.stat('subdir')
98 | t.deepEqual(entry.isDirectory(), true, 'subdir')
99 |
100 | // stat non-existent file
101 | await t.throws(archive.stat('notfound'))
102 |
103 | // stat alt-formed path
104 | var entry = await archive.stat('/hello.txt')
105 | t.deepEqual(entry.isFile(), true, 'alt-formed path')
106 |
107 | // stat path w/spaces in it
108 | var entry = await archive.stat('/subdir/space in the name.txt')
109 | t.deepEqual(entry.isFile(), true, 'path w/spaces in it')
110 |
111 | // stat path w/spaces in it
112 | var entry = await archive.stat('/subdir/space%20in%20the%20name.txt')
113 | t.deepEqual(entry.isFile(), true, 'path w/spaces in it')
114 |
115 | // timeout: stat an archive that does not exist
116 | var badArchive = new DatArchive(fakeDatURL)
117 | await t.throws(badArchive.stat('hello.txt', { timeout: 500 }))
118 | })
119 |
120 | test('DatArchive.create', async t => {
121 | // create it
122 | createdArchive = await DatArchive.create({
123 | title: 'The Title',
124 | description: 'The Description'
125 | })
126 |
127 | // check the dat.json
128 | var manifest = JSON.parse(await createdArchive.readFile('dat.json'))
129 | t.deepEqual(manifest.title, 'The Title')
130 | t.deepEqual(manifest.description, 'The Description')
131 | })
132 |
133 | test('archive.configure', async t => {
134 | // configure it
135 | await createdArchive.configure({
136 | title: 'The New Title',
137 | description: 'The New Description'
138 | })
139 |
140 | // check the dat.json
141 | var manifest = JSON.parse(await createdArchive.readFile('dat.json'))
142 | t.deepEqual(manifest.title, 'The New Title')
143 | t.deepEqual(manifest.description, 'The New Description')
144 | })
145 |
146 | test('archive.writeFile', async t => {
147 | async function dotest (filename, content, encoding) {
148 | // write to the top-level
149 | await createdArchive.writeFile(filename, content, encoding)
150 |
151 | // read it back
152 | var res = await createdArchive.readFile(filename, encoding)
153 | if (encoding === 'binary') {
154 | t.truthy(content.equals(res))
155 | } else {
156 | t.deepEqual(res, content)
157 | }
158 | }
159 |
160 | var beakerPng = fs.readFileSync(__dirname + '/scaffold/test-static-dat/beaker.png')
161 | await dotest('hello.txt', 'hello world', 'utf8')
162 | await dotest('beaker1.png', beakerPng, 'binary')
163 | await dotest('beaker2.png', beakerPng.toString('base64'), 'base64')
164 | await dotest('beaker3.png', beakerPng.toString('hex'), 'hex')
165 | })
166 |
167 | test('archive.writeFile gives an error for malformed names', async t => {
168 | await t.throws(createdArchive.writeFile('/', 'hello world'))
169 | await t.throws(createdArchive.writeFile('/subdir/hello.txt/', 'hello world'))
170 | await t.throws(createdArchive.writeFile('hello`.txt', 'hello world'))
171 | })
172 |
173 | test('archive.writeFile protects the manifest', async t => {
174 | await t.throws(createdArchive.writeFile('dat.json', 'hello world'))
175 | })
176 |
177 | test('archive.mkdir', async t => {
178 | await createdArchive.mkdir('subdir')
179 | var res = await createdArchive.stat('subdir')
180 | t.deepEqual(res.isDirectory(), true)
181 | })
182 |
183 | test('archive.writeFile writes to subdirectories', async t => {
184 | await createdArchive.writeFile('subdir/hello.txt', 'hello world', 'utf8')
185 | var res = await createdArchive.readFile('subdir/hello.txt', 'utf8')
186 | t.deepEqual(res, 'hello world')
187 | })
188 |
189 | test('versioned reads and writes', async t => {
190 | // create a fresh dat
191 | var archive = await DatArchive.create({title: 'Another Test Dat'})
192 |
193 | // do some writes
194 | await archive.writeFile('/one.txt', 'a', 'utf8')
195 | await archive.writeFile('/two.txt', 'b', 'utf8')
196 | await archive.writeFile('/one.txt', 'c', 'utf8')
197 |
198 | // check history
199 | var history = await archive.history()
200 | if (history.length !== 4) {
201 | console.log('Weird history', history)
202 | }
203 | t.deepEqual(history.length, 4)
204 |
205 | // helper
206 | function checkout (v) {
207 | return new DatArchive(archive.url + v)
208 | }
209 |
210 | // read back versions
211 | t.deepEqual((await checkout('+1').readdir('/')).length, 1)
212 | t.deepEqual((await checkout('+2').readdir('/')).length, 2)
213 | t.deepEqual((await checkout('+3').readdir('/')).length, 3)
214 | t.deepEqual((await checkout('+2').readFile('/one.txt')), 'a')
215 | t.deepEqual((await checkout('+4').readFile('/one.txt')), 'c')
216 | var statRev2 = await checkout('+2').stat('/one.txt')
217 | var statRev4 = await checkout('+4').stat('/one.txt')
218 | t.truthy(statRev2.offset < statRev4.offset)
219 | })
220 |
221 | test('Fail to write to unowned archives', async t => {
222 | var archive = new DatArchive(testStaticDatURL)
223 | await t.throws(archive.writeFile('/denythis.txt', 'hello world', 'utf8'))
224 | await t.throws(archive.mkdir('/denythis'))
225 | })
226 |
227 | test('archive.getInfo', async t => {
228 | var archive = new DatArchive(testStaticDatURL)
229 | var info = await archive.getInfo()
230 | t.deepEqual(info.isOwner, false)
231 | t.deepEqual(info.version, 4)
232 | })
233 |
234 | test('archive.download', async t => {
235 | var archive = new DatArchive(testStaticDatURL)
236 |
237 | // ensure not yet downloaded
238 | var res = await archive.stat('/hello.txt')
239 | t.deepEqual(res.downloaded, 0)
240 |
241 | // download
242 | await archive.download('/hello.txt')
243 |
244 | // ensure downloaded
245 | var res = await archive.stat('/hello.txt')
246 | t.deepEqual(res.downloaded, res.blocks)
247 |
248 | // ensure not yet downloaded
249 | var res = await archive.stat('/subdir/hello.txt')
250 | t.deepEqual(res.downloaded, 0)
251 |
252 | // download
253 | await archive.download('/')
254 |
255 | // ensure downloaded
256 | var res = await archive.stat('/subdir/hello.txt')
257 | t.deepEqual(res.downloaded, res.blocks)
258 | })
259 |
260 | test('archive.watch', async t => {
261 | // create a fresh dat
262 | var archive = await DatArchive.create({title: 'Another Test Dat'})
263 | await archive._loadPromise
264 |
265 | // start the stream
266 | var res = []
267 | var events = archive.watch()
268 | events.addEventListener('changed', function ({path}) {
269 | res.push(path)
270 | })
271 |
272 | // make changes
273 | await archive.writeFile('/a.txt', 'one', 'utf8')
274 | await archive.writeFile('/b.txt', 'one', 'utf8')
275 | await archive.writeFile('/a.txt', 'one', 'utf8')
276 | await archive.writeFile('/a.txt', 'two', 'utf8')
277 | await archive.writeFile('/b.txt', 'two', 'utf8')
278 | await archive.writeFile('/c.txt', 'one', 'utf8')
279 |
280 | var n = 0
281 | while (res.length !== 6 && ++n < 10) {
282 | await sleep(500)
283 | }
284 | t.deepEqual(res, ['/a.txt', '/b.txt', '/a.txt', '/a.txt', '/b.txt', '/c.txt'])
285 | })
286 |
287 | test('archive.watch (onInvalidated)', async t => {
288 | // create a fresh dat
289 | var archive = await DatArchive.create({title: 'Another Test Dat'})
290 | await archive._loadPromise
291 |
292 | // start the stream
293 | var res = []
294 | archive.watch(function ({path}) {
295 | res.push(path)
296 | })
297 |
298 | // make changes
299 | await archive.writeFile('/a.txt', 'one', 'utf8')
300 | await archive.writeFile('/b.txt', 'one', 'utf8')
301 | await archive.writeFile('/a.txt', 'one', 'utf8')
302 | await archive.writeFile('/a.txt', 'two', 'utf8')
303 | await archive.writeFile('/b.txt', 'two', 'utf8')
304 | await archive.writeFile('/c.txt', 'one', 'utf8')
305 |
306 | var n = 0
307 | while (res.length !== 6 && ++n < 10) {
308 | await sleep(500)
309 | }
310 | t.deepEqual(res, ['/a.txt', '/b.txt', '/a.txt', '/a.txt', '/b.txt', '/c.txt'])
311 | })
312 |
313 | test('archive.watch (match filename)', async t => {
314 | // create a fresh dat
315 | var archive = await DatArchive.create({title: 'Another Test Dat'})
316 | await archive._loadPromise
317 |
318 | // start the stream
319 | var res = []
320 | archive.watch('/a.txt', function ({path}) {
321 | res.push(path)
322 | })
323 |
324 | // make changes
325 | await archive.writeFile('/a.txt', 'one', 'utf8')
326 | await archive.writeFile('/b.txt', 'one', 'utf8')
327 | await archive.writeFile('/a.txt', 'one', 'utf8')
328 | await archive.writeFile('/a.txt', 'two', 'utf8')
329 | await archive.writeFile('/b.txt', 'two', 'utf8')
330 | await archive.writeFile('/c.txt', 'one', 'utf8')
331 |
332 | var n = 0
333 | while (res.length !== 3 && ++n < 10) {
334 | await sleep(500)
335 | }
336 | t.deepEqual(res, ['/a.txt', '/a.txt', '/a.txt'])
337 | })
338 |
339 | test('archive.watch (glob)', async t => {
340 | // create a fresh dat
341 | var archive = await DatArchive.create({title: 'Another Test Dat'})
342 | await archive._loadPromise
343 |
344 | // start the stream
345 | var res = []
346 | archive.watch('/*.txt', function ({path}) {
347 | res.push(path)
348 | })
349 |
350 | // make changes
351 | await archive.writeFile('/a.txt', 'one', 'utf8')
352 | await archive.writeFile('/b.txt', 'one', 'utf8')
353 | await archive.writeFile('/a.txt', 'one', 'utf8')
354 | await archive.writeFile('/a.txt', 'two', 'utf8')
355 | await archive.writeFile('/b.txt', 'two', 'utf8')
356 | await archive.writeFile('/c.txt', 'one', 'utf8')
357 |
358 | var n = 0
359 | while (res.length !== 6 && ++n < 10) {
360 | await sleep(500)
361 | }
362 | t.deepEqual(res, ['/a.txt', '/b.txt', '/a.txt', '/a.txt', '/b.txt', '/c.txt'])
363 | })
364 |
365 | test('archive.watch (array)', async t => {
366 | // create a fresh dat
367 | var archive = await DatArchive.create({title: 'Another Test Dat'})
368 | await archive._loadPromise
369 |
370 | // start the stream
371 | var res = []
372 | archive.watch(['/a.txt', '/c.txt'], function ({path}) {
373 | res.push(path)
374 | })
375 |
376 | // make changes
377 | await archive.writeFile('/a.txt', 'one', 'utf8')
378 | await archive.writeFile('/b.txt', 'one', 'utf8')
379 | await archive.writeFile('/a.txt', 'one', 'utf8')
380 | await archive.writeFile('/a.txt', 'two', 'utf8')
381 | await archive.writeFile('/b.txt', 'two', 'utf8')
382 | await archive.writeFile('/c.txt', 'one', 'utf8')
383 |
384 | var n = 0
385 | while (res.length !== 4 && ++n < 10) {
386 | await sleep(500)
387 | }
388 | t.deepEqual(res, ['/a.txt', '/a.txt', '/a.txt', '/c.txt'])
389 | })
390 |
391 | test('archive.createNetworkActivityStream', async t => {
392 | // share the test static dat
393 | var testStaticDat2 = await createDat()
394 | var testStaticDat2URL = 'dat://' + testStaticDat2.archive.key.toString('hex')
395 | var archive = new DatArchive(testStaticDat2URL)
396 | await archive._loadPromise
397 |
398 | // start the download & network stream
399 | var res = {
400 | metadata: {
401 | down: 0,
402 | all: false
403 | },
404 | content: {
405 | down: 0,
406 | all: false
407 | }
408 | }
409 | var events = archive.createNetworkActivityStream()
410 | events.addEventListener('network-changed', () => {
411 | res.gotPeer = true
412 | })
413 | events.addEventListener('download', ({feed}) => {
414 | res[feed].down++
415 | })
416 | events.addEventListener('sync', ({feed}) => {
417 | res[feed].all = true
418 | })
419 |
420 | // do writes
421 | await new Promise(resolve => {
422 | testStaticDat2.importFiles(__dirname + '/scaffold/test-static-dat', resolve)
423 | })
424 |
425 | // download
426 | await archive.download()
427 |
428 | var n = 0
429 | while (!res.content.all && ++n < 10) {
430 | await sleep(500)
431 | }
432 | t.truthy(res.metadata.down > 0)
433 | t.truthy(res.content.down > 0)
434 | t.deepEqual(res.metadata.all, true)
435 | t.deepEqual(res.content.all, true)
436 | })
437 |
438 | function sleep (time) {
439 | return new Promise(resolve => setTimeout(resolve, time))
440 | }
441 |
--------------------------------------------------------------------------------
/test/api-latest.js:
--------------------------------------------------------------------------------
1 | const test = require('ava')
2 | const os = require('os')
3 | const path = require('path')
4 | const fs = require('fs')
5 | const tempy = require('tempy')
6 | const {shareDat, createDat} = require('./lib/dat-helpers')
7 | const DatArchive = require('../')
8 |
9 | var testStaticDat, testStaticDatURL
10 | var createdArchive
11 | var fakeDatURL = 'dat://' + ('f'.repeat(64)) + '/'
12 | var beakerPng = fs.readFileSync(__dirname + '/scaffold/test-static-dat/beaker.png')
13 |
14 | test.before(async t => {
15 | // share the test static dat
16 | testStaticDat = await shareDat(__dirname + '/scaffold/test-static-dat')
17 | testStaticDatURL = 'dat://' + testStaticDat.archive.key.toString('hex') + '/'
18 | })
19 |
20 | // tests
21 | //
22 |
23 | test('archive.readdir', async t => {
24 | var archive = new DatArchive(testStaticDatURL, {datOptions: {latest: true}, localPath: tempy.directory()})
25 |
26 | // root dir
27 | let listing1 = await archive.readdir('/')
28 | t.deepEqual(listing1.sort(), ['beaker.png', 'hello.txt', 'subdir'])
29 |
30 | // subdir
31 | let listing2 = await archive.readdir('/subdir')
32 | t.deepEqual(listing2.sort(), ['hello.txt', 'space in the name.txt'])
33 |
34 | // root dir stat=true
35 | let listing3 = await archive.readdir('/', {stat: true})
36 | listing3 = listing3.sort()
37 | t.is(listing3[0].name, 'beaker.png')
38 | t.truthy(listing3[0].stat)
39 | t.is(listing3[1].name, 'hello.txt')
40 | t.truthy(listing3[1].stat)
41 | t.is(listing3[2].name, 'subdir')
42 | t.truthy(listing3[2].stat)
43 |
44 | // subdir stat=true
45 | let listing4 = await archive.readdir('/subdir', {stat: true})
46 | listing4 = listing4.sort()
47 | t.is(listing4[0].name, 'hello.txt')
48 | t.truthy(listing4[0].stat)
49 | t.is(listing4[1].name, 'space in the name.txt')
50 | t.truthy(listing4[1].stat)
51 | })
52 |
53 | test('archive.readFile', async t => {
54 | var archive = new DatArchive(testStaticDatURL, {datOptions: {latest: true}, localPath: tempy.directory()})
55 |
56 | // read utf8
57 | var helloTxt = await archive.readFile('hello.txt')
58 | t.deepEqual(helloTxt, 'hello')
59 |
60 | // read utf8 2
61 | var helloTxt2 = await archive.readFile('/subdir/hello.txt', 'utf8')
62 | t.deepEqual(helloTxt2, 'hi')
63 |
64 | // read utf8 when spaces are in the name
65 | var helloTxt2 = await archive.readFile('/subdir/space in the name.txt', 'utf8')
66 | t.deepEqual(helloTxt2, 'hi')
67 |
68 | // read hex
69 | var beakerPngHex = await archive.readFile('beaker.png', 'hex')
70 | t.deepEqual(beakerPngHex, beakerPng.toString('hex'))
71 |
72 | // read base64
73 | var beakerPngBase64 = await archive.readFile('beaker.png', 'base64')
74 | t.deepEqual(beakerPngBase64, beakerPng.toString('base64'))
75 |
76 | // read binary
77 | var beakerPngBinary = await archive.readFile('beaker.png', 'binary')
78 | t.truthy(beakerPng.equals(beakerPngBinary))
79 |
80 | // timeout: read an archive that does not exist
81 | var badArchive = new DatArchive(fakeDatURL, {datOptions: {latest: true}, localPath: tempy.directory()})
82 | await t.throws(badArchive.readFile('hello.txt', { timeout: 500 }))
83 | })
84 |
85 | test('archive.stat', async t => {
86 | var archive = new DatArchive(testStaticDatURL, {datOptions: {latest: true}, localPath: tempy.directory()})
87 |
88 | // stat root file
89 | var entry = await archive.stat('hello.txt')
90 | t.deepEqual(entry.isFile(), true, 'root file')
91 |
92 | // stat subdir file
93 | var entry = await archive.stat('subdir/hello.txt')
94 | t.deepEqual(entry.isFile(), true, 'subdir file')
95 |
96 | // stat subdir
97 | var entry = await archive.stat('subdir')
98 | t.deepEqual(entry.isDirectory(), true, 'subdir')
99 |
100 | // stat non-existent file
101 | await t.throws(archive.stat('notfound'))
102 |
103 | // stat alt-formed path
104 | var entry = await archive.stat('/hello.txt')
105 | t.deepEqual(entry.isFile(), true, 'alt-formed path')
106 |
107 | // stat path w/spaces in it
108 | var entry = await archive.stat('/subdir/space in the name.txt')
109 | t.deepEqual(entry.isFile(), true, 'path w/spaces in it')
110 |
111 | // stat path w/spaces in it
112 | var entry = await archive.stat('/subdir/space%20in%20the%20name.txt')
113 | t.deepEqual(entry.isFile(), true, 'path w/spaces in it')
114 |
115 | // timeout: stat an archive that does not exist
116 | var badArchive = new DatArchive(fakeDatURL, {datOptions: {latest: true}, localPath: tempy.directory()})
117 | await t.throws(badArchive.stat('hello.txt', { timeout: 500 }))
118 | })
119 |
120 | test('DatArchive.create', async t => {
121 | // create it
122 | createdArchive = await DatArchive.create({
123 | datOptions: {latest: true},
124 | localPath: tempy.directory(),
125 | title: 'The Title',
126 | description: 'The Description',
127 | type: 'dataset',
128 | author: {name: 'Bob', url: 'dat://ffffffffffffffffffffffffffffffff'}
129 | })
130 |
131 | // check the dat.json
132 | var manifest = JSON.parse(await createdArchive.readFile('dat.json'))
133 | t.deepEqual(manifest.title, 'The Title')
134 | t.deepEqual(manifest.description, 'The Description')
135 | t.deepEqual(manifest.type, ['dataset'])
136 | t.deepEqual(manifest.author, {name: 'Bob', url: 'dat://ffffffffffffffffffffffffffffffff'})
137 | })
138 |
139 | test('DatArchive.load', async t => {
140 | // create it
141 | var loadedArchive = await DatArchive.load({
142 | datOptions: {latest: true},
143 | localPath: createdArchive._localPath
144 | })
145 |
146 | // check the dat.json
147 | var manifest = JSON.parse(await loadedArchive.readFile('dat.json'))
148 | t.deepEqual(manifest.title, 'The Title')
149 | t.deepEqual(manifest.description, 'The Description')
150 | t.deepEqual(manifest.type, ['dataset'])
151 | t.deepEqual(manifest.author, {name: 'Bob', url: 'dat://ffffffffffffffffffffffffffffffff'})
152 | })
153 |
154 | test('archive.configure', async t => {
155 | // configure it
156 | await createdArchive.configure({
157 | title: 'The New Title',
158 | description: 'The New Description',
159 | type: ['dataset', 'foo'],
160 | author: {name: 'Robert', url: 'dat://ffffffffffffffffffffffffffffffff'}
161 | })
162 |
163 | // check the dat.json
164 | var manifest = JSON.parse(await createdArchive.readFile('dat.json'))
165 | t.deepEqual(manifest.title, 'The New Title')
166 | t.deepEqual(manifest.description, 'The New Description')
167 | t.deepEqual(manifest.type, ['dataset', 'foo'])
168 | t.deepEqual(manifest.author, {name: 'Robert', url: 'dat://ffffffffffffffffffffffffffffffff'})
169 | })
170 |
171 | test('archive.writeFile', async t => {
172 | async function dotest (filename, content, encoding) {
173 | // write to the top-level
174 | await createdArchive.writeFile(filename, content, encoding)
175 |
176 | // read it back
177 | var res = await createdArchive.readFile(filename, encoding)
178 | if (encoding === 'binary') {
179 | t.truthy(content.equals(res))
180 | } else {
181 | t.deepEqual(res, content)
182 | }
183 | }
184 |
185 | var beakerPng = fs.readFileSync(__dirname + '/scaffold/test-static-dat/beaker.png')
186 | await dotest('hello.txt', 'hello world', 'utf8')
187 | await dotest('beaker1.png', beakerPng, 'binary')
188 | await dotest('beaker2.png', beakerPng.toString('base64'), 'base64')
189 | await dotest('beaker3.png', beakerPng.toString('hex'), 'hex')
190 | })
191 |
192 | test('archive.writeFile gives an error for malformed names', async t => {
193 | await t.throws(createdArchive.writeFile('/', 'hello world'))
194 | await t.throws(createdArchive.writeFile('/subdir/hello.txt/', 'hello world'))
195 | await t.throws(createdArchive.writeFile('hello`.txt', 'hello world'))
196 | })
197 |
198 | test('archive.writeFile protects the manifest', async t => {
199 | await t.throws(createdArchive.writeFile('dat.json', 'hello world'))
200 | })
201 |
202 | test('archive.mkdir', async t => {
203 | await createdArchive.mkdir('subdir')
204 | var res = await createdArchive.stat('subdir')
205 | t.deepEqual(res.isDirectory(), true)
206 | })
207 |
208 | test('archive.writeFile writes to subdirectories', async t => {
209 | await createdArchive.writeFile('subdir/hello.txt', 'hello world', 'utf8')
210 | var res = await createdArchive.readFile('subdir/hello.txt', 'utf8')
211 | t.deepEqual(res, 'hello world')
212 | })
213 |
214 | test('Fail to write to unowned archives', async t => {
215 | var archive = new DatArchive(testStaticDatURL, {datOptions: {latest: true}, localPath: tempy.directory()})
216 | await t.throws(archive.writeFile('/denythis.txt', 'hello world', 'utf8'))
217 | await t.throws(archive.mkdir('/denythis'))
218 | })
219 |
220 | test('archive.getInfo', async t => {
221 | var archive = new DatArchive(testStaticDatURL, {datOptions: {latest: true}, localPath: tempy.directory()})
222 | var info = await archive.getInfo()
223 | t.deepEqual(info.isOwner, false)
224 | t.deepEqual(info.version, 4)
225 | })
226 |
227 | test('archive.download', async t => {
228 | var archive = new DatArchive(testStaticDatURL, {datOptions: {latest: true}, localPath: tempy.directory()})
229 |
230 | // ensure not yet downloaded
231 | var res = await archive.stat('/hello.txt')
232 | t.deepEqual(res.downloaded, 0)
233 |
234 | // download
235 | await archive.download('/hello.txt')
236 |
237 | // ensure downloaded
238 | var res = await archive.stat('/hello.txt')
239 | t.deepEqual(res.downloaded, res.blocks)
240 |
241 | // ensure not yet downloaded
242 | var res = await archive.stat('/subdir/hello.txt')
243 | t.deepEqual(res.downloaded, 0)
244 |
245 | // download
246 | await archive.download('/')
247 |
248 | // ensure downloaded
249 | var res = await archive.stat('/subdir/hello.txt')
250 | t.deepEqual(res.downloaded, res.blocks)
251 | })
252 |
253 | test('archive.watch', async t => {
254 | // create a fresh dat
255 | var archive = await DatArchive.create({datOptions: {latest: true}, localPath: tempy.directory(), title: 'Another Test Dat'})
256 | await archive._loadPromise
257 |
258 | // start the stream
259 | var res = []
260 | var events = archive.watch()
261 | events.addEventListener('changed', function ({path}) {
262 | res.push(path)
263 | })
264 |
265 | // make changes
266 | await archive.writeFile('/a.txt', 'one', 'utf8')
267 | await archive.writeFile('/b.txt', 'one', 'utf8')
268 | await archive.writeFile('/a.txt', 'one', 'utf8')
269 | await archive.writeFile('/a.txt', 'two', 'utf8')
270 | await archive.writeFile('/b.txt', 'two', 'utf8')
271 | await archive.writeFile('/c.txt', 'one', 'utf8')
272 |
273 | var n = 0
274 | while (res.length !== 6 && ++n < 10) {
275 | await sleep(500)
276 | }
277 | t.deepEqual(res, ['/a.txt', '/b.txt', '/a.txt', '/a.txt', '/b.txt', '/c.txt'])
278 | })
279 |
280 | test('archive.watch (onInvalidated)', async t => {
281 | // create a fresh dat
282 | var archive = await DatArchive.create({datOptions: {latest: true}, localPath: tempy.directory(), title: 'Another Test Dat'})
283 | await archive._loadPromise
284 |
285 | // start the stream
286 | var res = []
287 | archive.watch(function ({path}) {
288 | res.push(path)
289 | })
290 |
291 | // make changes
292 | await archive.writeFile('/a.txt', 'one', 'utf8')
293 | await archive.writeFile('/b.txt', 'one', 'utf8')
294 | await archive.writeFile('/a.txt', 'one', 'utf8')
295 | await archive.writeFile('/a.txt', 'two', 'utf8')
296 | await archive.writeFile('/b.txt', 'two', 'utf8')
297 | await archive.writeFile('/c.txt', 'one', 'utf8')
298 |
299 | var n = 0
300 | while (res.length !== 6 && ++n < 10) {
301 | await sleep(500)
302 | }
303 | t.deepEqual(res, ['/a.txt', '/b.txt', '/a.txt', '/a.txt', '/b.txt', '/c.txt'])
304 | })
305 |
306 | test('archive.watch (match filename)', async t => {
307 | // create a fresh dat
308 | var archive = await DatArchive.create({datOptions: {latest: true}, localPath: tempy.directory(), title: 'Another Test Dat'})
309 | await archive._loadPromise
310 |
311 | // start the stream
312 | var res = []
313 | archive.watch('/a.txt', function ({path}) {
314 | res.push(path)
315 | })
316 |
317 | // make changes
318 | await archive.writeFile('/a.txt', 'one', 'utf8')
319 | await archive.writeFile('/b.txt', 'one', 'utf8')
320 | await archive.writeFile('/a.txt', 'one', 'utf8')
321 | await archive.writeFile('/a.txt', 'two', 'utf8')
322 | await archive.writeFile('/b.txt', 'two', 'utf8')
323 | await archive.writeFile('/c.txt', 'one', 'utf8')
324 |
325 | var n = 0
326 | while (res.length !== 3 && ++n < 10) {
327 | await sleep(500)
328 | }
329 | t.deepEqual(res, ['/a.txt', '/a.txt', '/a.txt'])
330 | })
331 |
332 | test('archive.watch (glob)', async t => {
333 | // create a fresh dat
334 | var archive = await DatArchive.create({datOptions: {latest: true}, localPath: tempy.directory(), title: 'Another Test Dat'})
335 | await archive._loadPromise
336 |
337 | // start the stream
338 | var res = []
339 | archive.watch('/*.txt', function ({path}) {
340 | res.push(path)
341 | })
342 |
343 | // make changes
344 | await archive.writeFile('/a.txt', 'one', 'utf8')
345 | await archive.writeFile('/b.txt', 'one', 'utf8')
346 | await archive.writeFile('/a.txt', 'one', 'utf8')
347 | await archive.writeFile('/a.txt', 'two', 'utf8')
348 | await archive.writeFile('/b.txt', 'two', 'utf8')
349 | await archive.writeFile('/c.txt', 'one', 'utf8')
350 |
351 | var n = 0
352 | while (res.length !== 6 && ++n < 10) {
353 | await sleep(500)
354 | }
355 | t.deepEqual(res, ['/a.txt', '/b.txt', '/a.txt', '/a.txt', '/b.txt', '/c.txt'])
356 | })
357 |
358 | test('archive.watch (array)', async t => {
359 | // create a fresh dat
360 | var archive = await DatArchive.create({datOptions: {latest: true}, localPath: tempy.directory(), title: 'Another Test Dat'})
361 | await archive._loadPromise
362 |
363 | // start the stream
364 | var res = []
365 | archive.watch(['/a.txt', '/c.txt'], function ({path}) {
366 | res.push(path)
367 | })
368 |
369 | // make changes
370 | await archive.writeFile('/a.txt', 'one', 'utf8')
371 | await archive.writeFile('/b.txt', 'one', 'utf8')
372 | await archive.writeFile('/a.txt', 'one', 'utf8')
373 | await archive.writeFile('/a.txt', 'two', 'utf8')
374 | await archive.writeFile('/b.txt', 'two', 'utf8')
375 | await archive.writeFile('/c.txt', 'one', 'utf8')
376 |
377 | var n = 0
378 | while (res.length !== 4 && ++n < 10) {
379 | await sleep(500)
380 | }
381 | t.deepEqual(res, ['/a.txt', '/a.txt', '/a.txt', '/c.txt'])
382 | })
383 |
384 | test('archive.createNetworkActivityStream', async t => {
385 | // share the test static dat
386 | var testStaticDat2 = await createDat()
387 | var testStaticDat2URL = 'dat://' + testStaticDat2.archive.key.toString('hex')
388 | var archive = new DatArchive(testStaticDat2URL, {datOptions: {latest: true}, localPath: tempy.directory()})
389 | await archive._loadPromise
390 |
391 | // start the download & network stream
392 | var res = {
393 | metadata: {
394 | down: 0,
395 | all: false
396 | },
397 | content: {
398 | down: 0,
399 | all: false
400 | }
401 | }
402 | var events = archive.createNetworkActivityStream()
403 | events.addEventListener('network-changed', () => {
404 | res.gotPeer = true
405 | })
406 | events.addEventListener('download', ({feed}) => {
407 | res[feed].down++
408 | })
409 | events.addEventListener('sync', ({feed}) => {
410 | res[feed].all = true
411 | })
412 |
413 | // do writes
414 | await new Promise(resolve => {
415 | testStaticDat2.importFiles(__dirname + '/scaffold/test-static-dat', resolve)
416 | })
417 |
418 | // download
419 | await archive.download()
420 |
421 | var n = 0
422 | while (!res.content.all && ++n < 10) {
423 | await sleep(500)
424 | }
425 | t.truthy(res.metadata.down > 0)
426 | t.truthy(res.content.down > 0)
427 | t.deepEqual(res.metadata.all, true)
428 | t.deepEqual(res.content.all, true)
429 | })
430 |
431 | function sleep (time) {
432 | return new Promise(resolve => setTimeout(resolve, time))
433 | }
434 |
--------------------------------------------------------------------------------
/test/api.js:
--------------------------------------------------------------------------------
1 | const test = require('ava')
2 | const os = require('os')
3 | const path = require('path')
4 | const fs = require('fs')
5 | const tempy = require('tempy')
6 | const {shareDat, createDat} = require('./lib/dat-helpers')
7 | const DatArchive = require('../')
8 |
9 | var testStaticDat, testStaticDatURL
10 | var createdArchive
11 | var fakeDatURL = 'dat://' + ('f'.repeat(64)) + '/'
12 | var beakerPng = fs.readFileSync(__dirname + '/scaffold/test-static-dat/beaker.png')
13 |
14 | test.before(async t => {
15 | // share the test static dat
16 | testStaticDat = await shareDat(__dirname + '/scaffold/test-static-dat')
17 | testStaticDatURL = 'dat://' + testStaticDat.archive.key.toString('hex') + '/'
18 | })
19 |
20 | // tests
21 | //
22 |
23 | test('archive.readdir', async t => {
24 | var archive = new DatArchive(testStaticDatURL, {localPath: tempy.directory()})
25 |
26 | // root dir
27 | let listing1 = await archive.readdir('/')
28 | t.deepEqual(listing1.sort(), ['beaker.png', 'hello.txt', 'subdir'])
29 |
30 | // subdir
31 | let listing2 = await archive.readdir('/subdir')
32 | t.deepEqual(listing2.sort(), ['hello.txt', 'space in the name.txt'])
33 |
34 | // root dir stat=true
35 | let listing3 = await archive.readdir('/', {stat: true})
36 | listing3 = listing3.sort()
37 | t.is(listing3[0].name, 'beaker.png')
38 | t.truthy(listing3[0].stat)
39 | t.is(listing3[1].name, 'hello.txt')
40 | t.truthy(listing3[1].stat)
41 | t.is(listing3[2].name, 'subdir')
42 | t.truthy(listing3[2].stat)
43 |
44 | // subdir stat=true
45 | let listing4 = await archive.readdir('/subdir', {stat: true})
46 | listing4 = listing4.sort()
47 | t.is(listing4[0].name, 'hello.txt')
48 | t.truthy(listing4[0].stat)
49 | t.is(listing4[1].name, 'space in the name.txt')
50 | t.truthy(listing4[1].stat)
51 | })
52 |
53 | test('archive.readFile', async t => {
54 | var archive = new DatArchive(testStaticDatURL, {localPath: tempy.directory()})
55 |
56 | // read utf8
57 | var helloTxt = await archive.readFile('hello.txt')
58 | t.deepEqual(helloTxt, 'hello')
59 |
60 | // read utf8 2
61 | var helloTxt2 = await archive.readFile('/subdir/hello.txt', 'utf8')
62 | t.deepEqual(helloTxt2, 'hi')
63 |
64 | // read utf8 when spaces are in the name
65 | var helloTxt2 = await archive.readFile('/subdir/space in the name.txt', 'utf8')
66 | t.deepEqual(helloTxt2, 'hi')
67 |
68 | // read hex
69 | var beakerPngHex = await archive.readFile('beaker.png', 'hex')
70 | t.deepEqual(beakerPngHex, beakerPng.toString('hex'))
71 |
72 | // read base64
73 | var beakerPngBase64 = await archive.readFile('beaker.png', 'base64')
74 | t.deepEqual(beakerPngBase64, beakerPng.toString('base64'))
75 |
76 | // read binary
77 | var beakerPngBinary = await archive.readFile('beaker.png', 'binary')
78 | t.truthy(beakerPng.equals(beakerPngBinary))
79 |
80 | // timeout: read an archive that does not exist
81 | var badArchive = new DatArchive(fakeDatURL, {localPath: tempy.directory()})
82 | await t.throws(badArchive.readFile('hello.txt', { timeout: 500 }))
83 | })
84 |
85 | test('archive.stat', async t => {
86 | var archive = new DatArchive(testStaticDatURL, {localPath: tempy.directory()})
87 |
88 | // stat root file
89 | var entry = await archive.stat('hello.txt')
90 | t.deepEqual(entry.isFile(), true, 'root file')
91 |
92 | // stat subdir file
93 | var entry = await archive.stat('subdir/hello.txt')
94 | t.deepEqual(entry.isFile(), true, 'subdir file')
95 |
96 | // stat subdir
97 | var entry = await archive.stat('subdir')
98 | t.deepEqual(entry.isDirectory(), true, 'subdir')
99 |
100 | // stat non-existent file
101 | await t.throws(archive.stat('notfound'))
102 |
103 | // stat alt-formed path
104 | var entry = await archive.stat('/hello.txt')
105 | t.deepEqual(entry.isFile(), true, 'alt-formed path')
106 |
107 | // stat path w/spaces in it
108 | var entry = await archive.stat('/subdir/space in the name.txt')
109 | t.deepEqual(entry.isFile(), true, 'path w/spaces in it')
110 |
111 | // stat path w/spaces in it
112 | var entry = await archive.stat('/subdir/space%20in%20the%20name.txt')
113 | t.deepEqual(entry.isFile(), true, 'path w/spaces in it')
114 |
115 | // timeout: stat an archive that does not exist
116 | var badArchive = new DatArchive(fakeDatURL, {localPath: tempy.directory()})
117 | await t.throws(badArchive.stat('hello.txt', { timeout: 500 }))
118 | })
119 |
120 | test('DatArchive.create', async t => {
121 | // create it
122 | createdArchive = await DatArchive.create({
123 | localPath: tempy.directory(),
124 | title: 'The Title',
125 | description: 'The Description',
126 | type: 'dataset',
127 | author: {name: 'Bob', url: 'dat://ffffffffffffffffffffffffffffffff'}
128 | })
129 |
130 | // check the dat.json
131 | var manifest = JSON.parse(await createdArchive.readFile('dat.json'))
132 | t.deepEqual(manifest.title, 'The Title')
133 | t.deepEqual(manifest.description, 'The Description')
134 | t.deepEqual(manifest.type, ['dataset'])
135 | t.deepEqual(manifest.author, {name: 'Bob', url: 'dat://ffffffffffffffffffffffffffffffff'})
136 | })
137 |
138 | test('DatArchive.load', async t => {
139 | // create it
140 | var loadedArchive = await DatArchive.load({
141 | localPath: createdArchive._localPath
142 | })
143 |
144 | // check the dat.json
145 | var manifest = JSON.parse(await loadedArchive.readFile('dat.json'))
146 | t.deepEqual(manifest.title, 'The Title')
147 | t.deepEqual(manifest.description, 'The Description')
148 | t.deepEqual(manifest.type, ['dataset'])
149 | t.deepEqual(manifest.author, {name: 'Bob', url: 'dat://ffffffffffffffffffffffffffffffff'})
150 | })
151 |
152 | test('archive.configure', async t => {
153 | // configure it
154 | await createdArchive.configure({
155 | title: 'The New Title',
156 | description: 'The New Description',
157 | type: ['dataset', 'foo'],
158 | author: {name: 'Robert', url: 'dat://ffffffffffffffffffffffffffffffff'},
159 | links: { prev: [{ href: 'dat://example.com' }] }
160 | })
161 |
162 | // check the dat.json
163 | var manifest = JSON.parse(await createdArchive.readFile('dat.json'))
164 | t.deepEqual(manifest.title, 'The New Title')
165 | t.deepEqual(manifest.description, 'The New Description')
166 | t.deepEqual(manifest.type, ['dataset', 'foo'])
167 | t.deepEqual(manifest.author, {name: 'Robert', url: 'dat://ffffffffffffffffffffffffffffffff'})
168 | t.deepEqual(manifest.links, { prev: [{ href: 'dat://example.com' }] })
169 | })
170 |
171 | test('archive.writeFile', async t => {
172 | async function dotest (filename, content, encoding) {
173 | // write to the top-level
174 | await createdArchive.writeFile(filename, content, encoding)
175 |
176 | // read it back
177 | var res = await createdArchive.readFile(filename, encoding)
178 | if (encoding === 'binary') {
179 | t.truthy(content.equals(res))
180 | } else {
181 | t.deepEqual(res, content)
182 | }
183 | }
184 |
185 | var beakerPng = fs.readFileSync(__dirname + '/scaffold/test-static-dat/beaker.png')
186 | await dotest('hello.txt', 'hello world', 'utf8')
187 | await dotest('beaker1.png', beakerPng, 'binary')
188 | await dotest('beaker2.png', beakerPng.toString('base64'), 'base64')
189 | await dotest('beaker3.png', beakerPng.toString('hex'), 'hex')
190 | })
191 |
192 | test('archive.writeFile gives an error for malformed names', async t => {
193 | await t.throws(createdArchive.writeFile('/', 'hello world'))
194 | await t.throws(createdArchive.writeFile('/subdir/hello.txt/', 'hello world'))
195 | await t.throws(createdArchive.writeFile('hello`.txt', 'hello world'))
196 | })
197 |
198 | test('archive.writeFile protects the manifest', async t => {
199 | await t.throws(createdArchive.writeFile('dat.json', 'hello world'))
200 | })
201 |
202 | test('archive.mkdir', async t => {
203 | await createdArchive.mkdir('subdir')
204 | var res = await createdArchive.stat('subdir')
205 | t.deepEqual(res.isDirectory(), true)
206 | })
207 |
208 | test('archive.writeFile writes to subdirectories', async t => {
209 | await createdArchive.writeFile('subdir/hello.txt', 'hello world', 'utf8')
210 | var res = await createdArchive.readFile('subdir/hello.txt', 'utf8')
211 | t.deepEqual(res, 'hello world')
212 | })
213 |
214 | test('versioned reads and writes', async t => {
215 | // create a fresh dat
216 | var archive = await DatArchive.create({localPath: tempy.directory(), title: 'Another Test Dat'})
217 |
218 | // do some writes
219 | await archive.writeFile('/one.txt', 'a', 'utf8')
220 | await archive.writeFile('/two.txt', 'b', 'utf8')
221 | await archive.writeFile('/one.txt', 'c', 'utf8')
222 |
223 | // check history
224 | var history = await archive.history()
225 | if (history.length !== 4) {
226 | console.log('Weird history', history)
227 | }
228 | t.deepEqual(history.length, 4)
229 |
230 | // helper
231 | function checkout (v) {
232 | return new DatArchive(archive.url + v, {localPath: tempy.directory()})
233 | }
234 |
235 | // read back versions
236 | t.deepEqual((await checkout('+1').readdir('/')).length, 1)
237 | t.deepEqual((await checkout('+2').readdir('/')).length, 2)
238 | t.deepEqual((await checkout('+3').readdir('/')).length, 3)
239 | t.deepEqual((await checkout('+2').readFile('/one.txt')), 'a')
240 | t.deepEqual((await checkout('+4').readFile('/one.txt')), 'c')
241 | var statRev2 = await checkout('+2').stat('/one.txt')
242 | var statRev4 = await checkout('+4').stat('/one.txt')
243 | t.truthy(statRev2.offset < statRev4.offset)
244 | })
245 |
246 | test('Fail to write to unowned archives', async t => {
247 | var archive = new DatArchive(testStaticDatURL, {localPath: tempy.directory()})
248 | await t.throws(archive.writeFile('/denythis.txt', 'hello world', 'utf8'))
249 | await t.throws(archive.mkdir('/denythis'))
250 | })
251 |
252 | test('archive.getInfo', async t => {
253 | var archive = new DatArchive(testStaticDatURL, {localPath: tempy.directory()})
254 | var info = await archive.getInfo()
255 | t.deepEqual(info.isOwner, false)
256 | t.deepEqual(info.version, 4)
257 | })
258 |
259 | test('archive.download', async t => {
260 | var archive = new DatArchive(testStaticDatURL, {localPath: tempy.directory()})
261 |
262 | // ensure not yet downloaded
263 | var res = await archive.stat('/hello.txt')
264 | t.deepEqual(res.downloaded, 0)
265 |
266 | // download
267 | await archive.download('/hello.txt')
268 |
269 | // ensure downloaded
270 | var res = await archive.stat('/hello.txt')
271 | t.deepEqual(res.downloaded, res.blocks)
272 |
273 | // ensure not yet downloaded
274 | var res = await archive.stat('/subdir/hello.txt')
275 | t.deepEqual(res.downloaded, 0)
276 |
277 | // download
278 | await archive.download('/')
279 |
280 | // ensure downloaded
281 | var res = await archive.stat('/subdir/hello.txt')
282 | t.deepEqual(res.downloaded, res.blocks)
283 | })
284 |
285 | test('archive.watch', async t => {
286 | // create a fresh dat
287 | var archive = await DatArchive.create({localPath: tempy.directory(), title: 'Another Test Dat'})
288 | await archive._loadPromise
289 |
290 | // start the stream
291 | var res = []
292 | var events = archive.watch()
293 | events.addEventListener('changed', function ({path}) {
294 | res.push(path)
295 | })
296 |
297 | // make changes
298 | await archive.writeFile('/a.txt', 'one', 'utf8')
299 | await archive.writeFile('/b.txt', 'one', 'utf8')
300 | await archive.writeFile('/a.txt', 'one', 'utf8')
301 | await archive.writeFile('/a.txt', 'two', 'utf8')
302 | await archive.writeFile('/b.txt', 'two', 'utf8')
303 | await archive.writeFile('/c.txt', 'one', 'utf8')
304 |
305 | var n = 0
306 | while (res.length !== 6 && ++n < 10) {
307 | await sleep(500)
308 | }
309 | t.deepEqual(res, ['/a.txt', '/b.txt', '/a.txt', '/a.txt', '/b.txt', '/c.txt'])
310 | })
311 |
312 | test('archive.watch (onInvalidated)', async t => {
313 | // create a fresh dat
314 | var archive = await DatArchive.create({localPath: tempy.directory(), title: 'Another Test Dat'})
315 | await archive._loadPromise
316 |
317 | // start the stream
318 | var res = []
319 | archive.watch(function ({path}) {
320 | res.push(path)
321 | })
322 |
323 | // make changes
324 | await archive.writeFile('/a.txt', 'one', 'utf8')
325 | await archive.writeFile('/b.txt', 'one', 'utf8')
326 | await archive.writeFile('/a.txt', 'one', 'utf8')
327 | await archive.writeFile('/a.txt', 'two', 'utf8')
328 | await archive.writeFile('/b.txt', 'two', 'utf8')
329 | await archive.writeFile('/c.txt', 'one', 'utf8')
330 |
331 | var n = 0
332 | while (res.length !== 6 && ++n < 10) {
333 | await sleep(500)
334 | }
335 | t.deepEqual(res, ['/a.txt', '/b.txt', '/a.txt', '/a.txt', '/b.txt', '/c.txt'])
336 | })
337 |
338 | test('archive.watch (match filename)', async t => {
339 | // create a fresh dat
340 | var archive = await DatArchive.create({localPath: tempy.directory(), title: 'Another Test Dat'})
341 | await archive._loadPromise
342 |
343 | // start the stream
344 | var res = []
345 | archive.watch('/a.txt', function ({path}) {
346 | res.push(path)
347 | })
348 |
349 | // make changes
350 | await archive.writeFile('/a.txt', 'one', 'utf8')
351 | await archive.writeFile('/b.txt', 'one', 'utf8')
352 | await archive.writeFile('/a.txt', 'one', 'utf8')
353 | await archive.writeFile('/a.txt', 'two', 'utf8')
354 | await archive.writeFile('/b.txt', 'two', 'utf8')
355 | await archive.writeFile('/c.txt', 'one', 'utf8')
356 |
357 | var n = 0
358 | while (res.length !== 3 && ++n < 10) {
359 | await sleep(500)
360 | }
361 | t.deepEqual(res, ['/a.txt', '/a.txt', '/a.txt'])
362 | })
363 |
364 | test('archive.watch (glob)', async t => {
365 | // create a fresh dat
366 | var archive = await DatArchive.create({localPath: tempy.directory(), title: 'Another Test Dat'})
367 | await archive._loadPromise
368 |
369 | // start the stream
370 | var res = []
371 | archive.watch('/*.txt', function ({path}) {
372 | res.push(path)
373 | })
374 |
375 | // make changes
376 | await archive.writeFile('/a.txt', 'one', 'utf8')
377 | await archive.writeFile('/b.txt', 'one', 'utf8')
378 | await archive.writeFile('/a.txt', 'one', 'utf8')
379 | await archive.writeFile('/a.txt', 'two', 'utf8')
380 | await archive.writeFile('/b.txt', 'two', 'utf8')
381 | await archive.writeFile('/c.txt', 'one', 'utf8')
382 |
383 | var n = 0
384 | while (res.length !== 6 && ++n < 10) {
385 | await sleep(500)
386 | }
387 | t.deepEqual(res, ['/a.txt', '/b.txt', '/a.txt', '/a.txt', '/b.txt', '/c.txt'])
388 | })
389 |
390 | test('archive.watch (array)', async t => {
391 | // create a fresh dat
392 | var archive = await DatArchive.create({localPath: tempy.directory(), title: 'Another Test Dat'})
393 | await archive._loadPromise
394 |
395 | // start the stream
396 | var res = []
397 | archive.watch(['/a.txt', '/c.txt'], function ({path}) {
398 | res.push(path)
399 | })
400 |
401 | // make changes
402 | await archive.writeFile('/a.txt', 'one', 'utf8')
403 | await archive.writeFile('/b.txt', 'one', 'utf8')
404 | await archive.writeFile('/a.txt', 'one', 'utf8')
405 | await archive.writeFile('/a.txt', 'two', 'utf8')
406 | await archive.writeFile('/b.txt', 'two', 'utf8')
407 | await archive.writeFile('/c.txt', 'one', 'utf8')
408 |
409 | var n = 0
410 | while (res.length !== 4 && ++n < 10) {
411 | await sleep(500)
412 | }
413 | t.deepEqual(res, ['/a.txt', '/a.txt', '/a.txt', '/c.txt'])
414 | })
415 |
416 | test('archive.createNetworkActivityStream', async t => {
417 | // share the test static dat
418 | var testStaticDat2 = await createDat()
419 | var testStaticDat2URL = 'dat://' + testStaticDat2.archive.key.toString('hex')
420 | var archive = new DatArchive(testStaticDat2URL, {localPath: tempy.directory()})
421 | await archive._loadPromise
422 |
423 | // start the download & network stream
424 | var res = {
425 | metadata: {
426 | down: 0,
427 | all: false
428 | },
429 | content: {
430 | down: 0,
431 | all: false
432 | }
433 | }
434 | var events = archive.createNetworkActivityStream()
435 | events.addEventListener('network-changed', () => {
436 | res.gotPeer = true
437 | })
438 | events.addEventListener('download', ({feed}) => {
439 | res[feed].down++
440 | })
441 | events.addEventListener('sync', ({feed}) => {
442 | res[feed].all = true
443 | })
444 |
445 | // do writes
446 | await new Promise(resolve => {
447 | testStaticDat2.importFiles(__dirname + '/scaffold/test-static-dat', resolve)
448 | })
449 |
450 | // download
451 | await archive.download()
452 |
453 | var n = 0
454 | while (!res.content.all && ++n < 10) {
455 | await sleep(500)
456 | }
457 | t.truthy(res.metadata.down > 0)
458 | t.truthy(res.content.down > 0)
459 | t.deepEqual(res.metadata.all, true)
460 | t.deepEqual(res.content.all, true)
461 | })
462 |
463 | function sleep (time) {
464 | return new Promise(resolve => setTimeout(resolve, time))
465 | }
466 |
--------------------------------------------------------------------------------
/test/lib/dat-helpers.js:
--------------------------------------------------------------------------------
1 | const Dat = require('dat-node')
2 | const tempy = require('tempy')
3 |
4 | exports.shareDat = function (dir) {
5 | return new Promise((resolve, reject) => {
6 | Dat(dir, {temp: true}, function (err, dat) {
7 | if (err) return reject(err)
8 | dat.joinNetwork()
9 | dat.importFiles(dir, function (err) {
10 | if (err) return reject(err)
11 | resolve(dat)
12 | })
13 | })
14 | })
15 | }
16 |
17 | exports.createDat = function () {
18 | return new Promise((resolve, reject) => {
19 | Dat(tempy.directory(), {temp: true}, function (err, dat) {
20 | if (err) return reject(err)
21 | dat.joinNetwork()
22 | resolve(dat)
23 | })
24 | })
25 | }
--------------------------------------------------------------------------------
/test/scaffold/test-runner-dat/dat.json:
--------------------------------------------------------------------------------
1 | {
2 | "title": "Test Runner Dat"
3 | }
--------------------------------------------------------------------------------
/test/scaffold/test-runner-dat/index.html:
--------------------------------------------------------------------------------
1 | Dat loaded
2 |
--------------------------------------------------------------------------------
/test/scaffold/test-runner-dat/index.js:
--------------------------------------------------------------------------------
1 | const stringify = JSON.stringify
2 | window.stringify = stringify
--------------------------------------------------------------------------------
/test/scaffold/test-static-dat/beaker.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/dat-ecosystem-archive/node-dat-archive/d79d2d1f88d6a53e7048afa04a8d23152098daad/test/scaffold/test-static-dat/beaker.png
--------------------------------------------------------------------------------
/test/scaffold/test-static-dat/hello.txt:
--------------------------------------------------------------------------------
1 | hello
--------------------------------------------------------------------------------
/test/scaffold/test-static-dat/subdir/hello.txt:
--------------------------------------------------------------------------------
1 | hi
--------------------------------------------------------------------------------
/test/scaffold/test-static-dat/subdir/space in the name.txt:
--------------------------------------------------------------------------------
1 | hi
--------------------------------------------------------------------------------