29 | 'use strict'
30 |
31 | const pMap = require('p-map')
32 | const pDoWhilst = require('p-do-whilst')
33 | const Entry = require('./entry')
34 |
35 | const hasItems = arr => arr && arr.length > 0
36 |
37 | class EntryIO {
38 | // Fetch log graphs in parallel
39 | static async fetchParallel (ipfs, hashes, { length, exclude = [], timeout, concurrency, onProgressCallback }) {
40 | const fetchOne = async (hash) => EntryIO.fetchAll(ipfs, hash, { length, exclude, timeout, onProgressCallback, concurrency })
41 | const concatArrays = (arr1, arr2) => arr1.concat(arr2)
42 | const flatten = (arr) => arr.reduce(concatArrays, [])
43 | const res = await pMap(hashes, fetchOne, { concurrency: Math.max(concurrency || hashes.length, 1) })
44 | return flatten(res)
45 | }
46 |
47 | /**
48 | * Fetch log entries
49 | *
50 | * @param {IPFS} [ipfs] An IPFS instance
51 | * @param {string} [hash] Multihash of the entry to fetch
52 | * @param {string} [parent] Parent of the node to be fetched
53 | * @param {Object} [all] Entries to skip
54 | * @param {Number} [amount=-1] How many entries to fetch
55 | * @param {Number} [depth=0] Current depth of the recursion
56 | * @param {function(hash, entry, parent, depth)} onProgressCallback
57 | * @returns {Promise<Array<Entry>>}
58 | */
59 | static async fetchAll (ipfs, hashes, { length = -1, exclude = [], timeout, onProgressCallback, onStartProgressCallback, concurrency = 32, delay = 0 } = {}) {
60 | const result = []
61 | const cache = {}
62 | const loadingCache = {}
63 | const loadingQueue = Array.isArray(hashes)
64 | ? { 0: hashes.slice() }
65 | : { 0: [hashes] }
66 | let running = 0 // keep track of how many entries are being fetched at any time
67 | let maxClock = 0 // keep track of the latest clock time during load
68 | let minClock = 0 // keep track of the minimum clock time during load
69 |
70 | // Does the loading queue have more to process?
71 | const loadingQueueHasMore = () => Object.values(loadingQueue).find(hasItems) !== undefined
72 |
73 | // Add a multihash to the loading queue
74 | const addToLoadingQueue = (e, idx) => {
75 | if (!loadingCache[e]) {
76 | if (!loadingQueue[idx]) loadingQueue[idx] = []
77 | if (!loadingQueue[idx].includes(e)) {
78 | loadingQueue[idx].push(e)
79 | }
80 | loadingCache[e] = true
81 | }
82 | }
83 |
84 | // Get the next items to process from the loading queue
85 | const getNextFromQueue = (length = 1) => {
86 | const getNext = (res, key, idx) => {
87 | const nextItems = loadingQueue[key]
88 | while (nextItems.length > 0 && res.length < length) {
89 | const hash = nextItems.shift()
90 | res.push(hash)
91 | }
92 | if (nextItems.length === 0) {
93 | delete loadingQueue[key]
94 | }
95 | return res
96 | }
97 | return Object.keys(loadingQueue).reduce(getNext, [])
98 | }
99 |
100 | // Add entries that we don't need to fetch to the "cache"
101 | const addToExcludeCache = e => { cache[e.hash] = true }
102 |
103 | // Fetch one entry and add it to the results
104 | const fetchEntry = async (hash) => {
105 | if (!hash || cache[hash]) {
106 | return
107 | }
108 |
109 | return new Promise((resolve, reject) => {
110 | // Resolve the promise after a timeout (if given) in order to
111 | // not get stuck loading a block that is unreachable
112 | const timer = timeout && timeout > 0
113 | ? setTimeout(() => {
114 | console.warn(`Warning: Couldn't fetch entry '${hash}', request timed out (${timeout}ms)`)
115 | resolve()
116 | }, timeout)
117 | : null
118 |
119 | const addToResults = (entry) => {
120 | if (Entry.isEntry(entry)) {
121 | const ts = entry.clock.time
122 |
123 | // Update min/max clocks
124 | maxClock = Math.max(maxClock, ts)
125 | minClock = result.length > 0
126 | ? Math.min(result[result.length - 1].clock.time, minClock)
127 | : maxClock
128 |
129 | const isLater = (result.length >= length && ts >= minClock)
130 | const calculateIndex = (idx) => maxClock - ts + ((idx + 1) * idx)
131 |
132 | // Add the entry to the results if
133 | // 1) we're fetching all entries
134 | // 2) results is not filled yet
135 | // the clock of the entry is later than current known minimum clock time
136 | if (length < 0 || result.length < length || isLater) {
137 | result.push(entry)
138 | cache[hash] = true
139 |
140 | if (onProgressCallback) {
141 | onProgressCallback(hash, entry, result.length, result.length)
142 | }
143 | }
144 |
145 | if (length < 0) {
146 | // If we're fetching all entries (length === -1), adds nexts and refs to the queue
147 | entry.next.forEach(addToLoadingQueue)
148 | if (entry.refs) entry.refs.forEach(addToLoadingQueue)
149 | } else {
150 | // If we're fetching entries up to certain length,
151 | // fetch the next if result is filled up, to make sure we "check"
152 | // the next entry if its clock is later than what we have in the result
153 | if (result.length < length || ts > minClock || (ts === minClock && !cache[entry.hash])) {
154 | entry.next.forEach(e => addToLoadingQueue(e, calculateIndex(0)))
155 | }
156 | if (entry.refs && (result.length + entry.refs.length <= length)) {
157 | entry.refs.forEach((e, i) => addToLoadingQueue(e, calculateIndex(i)))
158 | }
159 | }
160 | }
161 | }
162 |
163 | if (onStartProgressCallback) {
164 | onStartProgressCallback(hash, null, 0, result.length)
165 | }
166 |
167 | // Load the entry
168 | Entry.fromMultihash(ipfs, hash).then(async (entry) => {
169 | try {
170 | // Add it to the results
171 | addToResults(entry)
172 |
173 | // Simulate network latency (for debugging purposes)
174 | if (delay > 0) {
175 | const sleep = (ms = 0) => new Promise(resolve => setTimeout(resolve, ms))
176 | await sleep(delay)
177 | }
178 | resolve()
179 | } catch (e) {
180 | reject(e)
181 | } finally {
182 | clearTimeout(timer)
183 | }
184 | }).catch(reject)
185 | })
186 | }
187 |
188 | // One loop of processing the loading queue
189 | const _processQueue = async () => {
190 | if (running < concurrency) {
191 | const nexts = getNextFromQueue(concurrency)
192 | running += nexts.length
193 | await pMap(nexts, fetchEntry)
194 | running -= nexts.length
195 | }
196 | }
197 |
198 | // Add entries to exclude from processing to the cache before we start
199 | exclude.forEach(addToExcludeCache)
200 |
201 | // Fetch entries
202 | await pDoWhilst(_processQueue, loadingQueueHasMore)
203 |
204 | return result
205 | }
206 | }
207 |
208 | module.exports = EntryIO
209 |
210 |
211 |