├── .github
└── workflows
│ └── nodejs.yml
├── .gitignore
├── .npmignore
├── .travis.yml
├── LICENSE
├── Makefile
├── README.md
├── benchmark
└── load.js
├── diary.md
├── docco.css
├── edify.md
├── locket.js
├── package.json
├── redux.md
└── test
├── locket.t.js
└── readme.t.js
/.github/workflows/nodejs.yml:
--------------------------------------------------------------------------------
1 | name: Node CI
2 |
3 | on: [push]
4 |
5 | jobs:
6 | build:
7 |
8 | runs-on: ubuntu-latest
9 |
10 | strategy:
11 | matrix:
12 | node-version: [12.x,14.x,16.x]
13 |
14 | steps:
15 | - uses: actions/checkout@v1
16 | - name: Use Node.js ${{ matrix.node-version }}
17 | uses: actions/setup-node@v1
18 | with:
19 | node-version: ${{ matrix.node-version }}
20 | - name: npm install
21 | run: npm install --no-package-lock
22 | - name: npm ls
23 | run: npm ls
24 | - name: npm install nyc, prove, codecov
25 | run: npm install -g nyc prove codecov
26 | - name: npm test
27 | run: nyc npm test
28 | env:
29 | CI: true
30 | - name: generate codecov report
31 | run: nyc report --reporter=text-lcov > coverage.lcov
32 | - name: ship coverage to codecov
33 | run: codecov --branch=${GITHUB_REF##*/}
34 | env:
35 | CODECOV_TOKEN: ${{secrets.CODECOV_TOKEN}}
36 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | /.nyc_output
2 | /.wiseguy
3 | /coverage
4 | /docs
5 | /node_modules
6 | /test/tmp
7 | /benchmark/tmp
8 |
--------------------------------------------------------------------------------
/.npmignore:
--------------------------------------------------------------------------------
1 | /.*
2 | /*.md
3 | /coverage
4 | /docs
5 | /test
6 | /benchmark
7 |
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | sudo: false
2 |
3 | language: node_js
4 |
5 | node_js:
6 | - '12'
7 | - '14'
8 | - '16'
9 |
10 | branches:
11 | only:
12 | - master
13 | - travis-ci
14 |
15 | install:
16 | - npm install --no-package-lock --no-save
17 | - npm install -g nyc prove
18 |
19 | script:
20 | - nyc npm test
21 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | The MIT License
2 |
3 | Copyright (c) 2013-2021 Alan Gutierrez
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in
13 | all copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
21 | THE SOFTWARE.
22 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | all: test/readme.t.js README.md
2 |
3 | test/readme.t.js: edify.md
4 | edify --mode code $< > $@
5 | README.md: edify.md
6 | edify --mode text $< > $@
7 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Locket
2 |
3 | [](https://github.com/bigeasy/locket/actions)
4 | [](https://codecov.io/gh/bigeasy/locket)
5 | [](https://opensource.org/licenses/MIT)
6 |
7 | _Salvage Necklace 5: Inside Locket by [B Zedan](http://www.flickr.com/people/bzedan/)._
8 |
9 |
10 |
11 |
12 | A pure-JavaScript [leveldown](https://github.com/Level/leveldown) implementation
13 | backed by a persistent and durable evented I/0 b-tree for use with
14 | [levelup](https://github.com/Level/leveldown) — i.e. a database.
15 |
16 | | What | Where |
17 | | --- | --- |
18 | | Discussion | https://github.com/bigeasy/locket/issues/1 |
19 | | Documentation | https://bigeasy.github.io/locket |
20 | | Source | https://github.com/bigeasy/locket |
21 | | Issues | https://github.com/bigeasy/locket/issues |
22 | | CI | https://travis-ci.org/bigeasy/locket |
23 | | Coverage: | https://codecov.io/gh/bigeasy/locket |
24 | | License: | MIT |
25 |
26 | Locket installs from NPM.
27 |
28 | ```
29 | npm install locket
30 | ```
31 |
32 | ## Living `README.md`
33 |
34 | This `README.md` is also a unit test using the
35 | [Proof](https://github.com/bigeasy/proof) unit test framework. We'll use the
36 | Proof `okay` function to assert out statements in the readme. A Proof unit test
37 | generally looks like this.
38 |
39 | ```javascript
40 | require('proof')(4, okay => {
41 | okay('always okay')
42 | okay(true, 'okay if true')
43 | okay(1, 1, 'okay if equal')
44 | okay({ value: 1 }, { value: 1 }, 'okay if deep strict equal')
45 | })
46 | ```
47 |
48 | You can run this unit test yourself to see the output from the various
49 | code sections of the readme.
50 |
51 | ```text
52 | git clone git@github.com:bigeasy/locket.git
53 | cd locket
54 | npm install --no-package-lock --no-save
55 | node test/readme.t.js
56 | ```
57 |
58 | ## Usage
59 |
60 | ```javascript
61 | okay('okay')
62 | ```
63 |
--------------------------------------------------------------------------------
/benchmark/load.js:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env node
2 |
3 | Error.stackTraceLimit = 88
4 |
5 | /*
6 | ___ usage ___ en_US ___
7 | usage: node load.js
8 |
9 | All around tests for benchmarking Locket.
10 |
11 | options:
12 |
13 | -d, --leveldown
14 | use leveldown instead of Locket.
15 | ___ . ___
16 | */
17 |
18 | const Destructible = require('destructible')
19 | const Locket = require('../')
20 | const cadence = require('cadence')
21 | const path = require('path')
22 | const crypto = require('crypto')
23 | const seedrandom = require('seedrandom')
24 | const levelup = require('levelup')
25 | const leveldown = require('leveldown')
26 |
27 | const fs = require('fs').promises
28 |
29 | const { callback } = require('comeuppance')
30 |
31 | const random = (function () {
32 | const random = seedrandom(0)
33 | return function (max) {
34 | return Math.floor(random() * max)
35 | }
36 | })()
37 |
38 | const runner = cadence(function (step, arguable) {
39 | let start, insert, gather, level
40 | const tmp = path.join(__dirname, 'tmp')
41 | const o = { createIfMissing: true }
42 | const destructible = new Destructible('benchmark/load')
43 | const batches = []
44 | let key = 0
45 | while (batches.length != 124) {
46 | const entries = []
47 | for (let i = 0; i < 1024; i++) {
48 | if (false) {
49 | const value = random(1024)
50 | const sha = crypto.createHash('sha1')
51 | const buffer = Buffer.alloc(4)
52 | buffer.writeUInt32BE(value, 0)
53 | sha.update(buffer)
54 | entries.push({
55 | key: sha.digest(),
56 | value: buffer,
57 | type: !! random(2) ? 'put' : 'del'
58 | })
59 | } else {
60 | const buffer = Buffer.alloc(4)
61 | buffer.writeUInt32BE(key++, 0)
62 | entries.push({
63 | key: buffer,
64 | value: buffer,
65 | type: !! random(2) ? 'put' : 'del'
66 | })
67 | }
68 | }
69 | batches.push(entries)
70 | }
71 | destructible.promise.catch(error => console.log(error.stack))
72 | destructible.destruct(() => 'destructing')
73 | step(function () {
74 | return fs.rm(tmp, { recursive: true, force: true })
75 | }, function () {
76 | return fs.mkdir(tmp, { recursive: true })
77 | }, function () {
78 | start = Date.now()
79 | if (arguable.ultimate.leveldb) {
80 | const file = path.join(tmp, 'put')
81 | return leveldown(file)
82 | } else {
83 | const file = path.join(tmp, 'put')
84 | step(function () {
85 | return fs.mkdir(file)
86 | }, function () {
87 | return new Locket(file)
88 | })
89 | }
90 | }, function (leveldown) {
91 | const db = levelup(leveldown)
92 | step(function () {
93 | let batch = 0
94 | const loop = step.loop([ 0 ], function (i) {
95 | if (i == 124) {
96 | return [ loop.break ]
97 | }
98 | step(function () {
99 | console.log(i)
100 | db.batch(batches[i], step())
101 | }, function () {
102 | return i + 1
103 | })
104 | })
105 | }, function () {
106 | db.close(step())
107 | })
108 | }, function () {
109 | insert = Date.now() - start
110 | start = Date.now()
111 | return
112 | if (arguable.ultimate.leveldb) {
113 | const file = path.join(tmp, 'put')
114 | return leveldown(file)
115 | } else {
116 | const file = path.join(tmp, 'put')
117 | return new Locket(file)
118 | }
119 | }, function (leveldown) {
120 | return
121 | const db = levelup(leveldown)
122 | step(function () {
123 | step.ee(db.createReadStream())
124 | .on('data', function (data) { records.push(data) })
125 | .end('end')
126 | .error()
127 | }, function () {
128 | db.close(step())
129 | }, function () {
130 | gather = Date.now() - start
131 | console.log('insert: ' + insert + ', gather: ' + gather)
132 | })
133 | }, function () {
134 | console.log('insert: ' + insert)
135 | return destructible.destroy().promise
136 | })
137 | })
138 |
139 | /*
140 | ___ usage ___ en_US ___
141 | usage: prolific
142 |
143 | options:
144 |
145 | -l, --leveldb
146 | use leveldb
147 |
148 | ___ $ ___ en_US ___
149 |
150 | ___ . ___
151 | */
152 |
153 | require('arguable')(module, async arguable => {
154 | await callback(callback => runner(arguable, callback))
155 | })
156 |
--------------------------------------------------------------------------------
/diary.md:
--------------------------------------------------------------------------------
1 | # Locket Diary
2 |
3 | ## Design Discussion
4 |
5 | Going to need an iterator that goes to the left in Strata, on that goes to the
6 | key before the given key. `strata.iterator(key, { before: true }, callback)`
7 |
8 | ## Merge
9 |
10 | When merging one of the staging trees, it seems kind of silly to actually delete
11 | the records, which really means writing a delete. Better to simply obliterate
12 | the tree when you're done and recreate it? Yes, but that is a new state. One for
13 | which I don't have a recovery planned. I do have a recover strategy for a tree
14 | that fails in the middle of a delete or balance.
15 |
16 | How do I know which transactions succeeded? I'll only write the transaction if
17 | there are actual operations in the `batch`. Then, when I encounter a transaction
18 | id in the `batch`, I know to remove the transaction from the transaction
19 | collection. At the end of the merge I promote the least transaction id using the
20 | in memory transaction hash.
21 |
22 | A stage tree must be completely merged before we merge the next tree. We can't
23 | merge a little of one tree, then merge a little of the other. Why? Because we're
24 | counting on this transaction id to always increase. When the iterator performs
25 | the three way merge, it will compare the two stage trees against the primary
26 | tree. We can be certain that the final, committed action our stage tree is the
27 | one that needs to be merged into the primary tree.
28 |
29 | Let's say we've allowed the user to toggle back and forth between stage trees.
30 | The user writes transaction 1 to the secondary stage tree. The user swaps trees
31 | telling Locket to merge the secondary stage tree and to log to the tertiary
32 | stage tree. The user writes transaction 2 to the tertiary stage tree. The user
33 | then tells Locket to swap back to secondary stage tree and to merge the tertiary
34 | stage tree. But, Locket never got around to merging transaction 1, concurrency
35 | don't ya know, and transaction 1 is the secondary stage tree. Meanwhile,
36 | transaction 2 is in the tertiary stage tree and is merged into the primary tree.
37 | When transaction 2 is merged into the primary tree it becomes transaction 0.
38 | Any valid transaction is a log takes precedence over the primary tree. The
39 | transaction 1 that was superseded by transaction 2 now vetoes the values that
40 | transaction 2 has merged into the primary tree.
41 |
42 | We're always moving forward, merging one tree and then the next. With this
43 | system, we can take our time with with a staging tree merging it into the
44 | primary tree. Then swap back. The user should not be calling merge faster than
45 | the merge operation can merge a stage.
46 |
47 | Also, we're going to have to merge when we open a Locket, because we don't have
48 | a way to know which stage was active, which was merging. We can make this less
49 | painful by taking a sip of each stage so we can launch with an empty stage if
50 | one exists, merging the full one.
51 |
52 | ## Revision Id Forever
53 |
54 | I'm not sure why I'm not keeping the revision id in the main table. It costs
55 | nothing and then I can use my able, baker stage names, which I won't do, but it
56 | won't matter what order things are merged, because I'll have a revision id and I
57 | won't overwrite any record.
58 |
59 | ## Replication
60 |
61 | Now that we have an archive, we can transport it to other servers as a log, and
62 | replay it, in pretty much any order. Latest version wins. Simply drop the files
63 | into the staging area, push them onto the end of the queue, and balance. Just as
64 | performant as any merge.
65 |
66 | ## Extract Iterators / Merge
67 |
68 | May as well extract our nice iterator and merge into `strata.merge`.
69 |
70 | ## In-Memory Merge
71 |
72 | One at a time, it would be simple to insert an entry into a tree. When it is a
73 | batch, if the batch is large, say a large append, we're going to block everyone
74 | if we are appending to a staging log that is already part of the collective
75 | tree. Anyone reading is going to wait on the write to the tail.
76 |
77 | However, we're only writing in the sense that we're merging the batch, not in
78 | the sense that we're waiting for the write to flush.
79 |
80 | I start to worry about when a version is committed, but then I recall that this
81 | is not a concern for LevelDB and is therefore not a concern for Locket.
82 |
83 | ## Multiple Writers
84 |
85 | They would all write to their own log, but the logs would be merged into a
86 | single page, so that loading the page reads in many logs. They records would get
87 | put in the right order and MVCC takes over. Still only ever two outstanding
88 | pages, but many logs for those pages.
89 |
--------------------------------------------------------------------------------
/docco.css:
--------------------------------------------------------------------------------
1 | /*--------------------- Typography ----------------------------*/
2 |
3 | @font-face {
4 | font-family: 'aller-light';
5 | src: url('public/fonts/aller-light.eot');
6 | src: url('public/fonts/aller-light.eot?#iefix') format('embedded-opentype'),
7 | url('public/fonts/aller-light.woff') format('woff'),
8 | url('public/fonts/aller-light.ttf') format('truetype');
9 | font-weight: normal;
10 | font-style: normal;
11 | }
12 |
13 | @font-face {
14 | font-family: 'aller-bold';
15 | src: url('public/fonts/aller-bold.eot');
16 | src: url('public/fonts/aller-bold.eot?#iefix') format('embedded-opentype'),
17 | url('public/fonts/aller-bold.woff') format('woff'),
18 | url('public/fonts/aller-bold.ttf') format('truetype');
19 | font-weight: normal;
20 | font-style: normal;
21 | }
22 |
23 | @font-face {
24 | font-family: 'roboto-black';
25 | src: url('public/fonts/roboto-black.eot');
26 | src: url('public/fonts/roboto-black.eot?#iefix') format('embedded-opentype'),
27 | url('public/fonts/roboto-black.woff') format('woff'),
28 | url('public/fonts/roboto-black.ttf') format('truetype');
29 | font-weight: normal;
30 | font-style: normal;
31 | }
32 |
33 | /*--------------------- Layout ----------------------------*/
34 | html { height: 100%; }
35 | body {
36 | font-family: "aller-light";
37 | font-size: 14px;
38 | line-height: 18px;
39 | color: #30404f;
40 | margin: 0; padding: 0;
41 | height:100%;
42 | }
43 | #container { min-height: 100%; }
44 |
45 | a {
46 | color: #000;
47 | }
48 |
49 | b, strong {
50 | font-weight: normal;
51 | font-family: "aller-bold";
52 | }
53 |
54 | p {
55 | margin: 15px 0 0px;
56 | }
57 | .annotation ul, .annotation ol {
58 | margin: 25px 0;
59 | }
60 | .annotation ul li, .annotation ol li {
61 | font-size: 14px;
62 | line-height: 18px;
63 | margin: 10px 0;
64 | }
65 |
66 | h1, h2, h3, h4, h5, h6 {
67 | color: #112233;
68 | line-height: 1em;
69 | font-weight: normal;
70 | font-family: "roboto-black";
71 | text-transform: uppercase;
72 | margin: 30px 0 15px 0;
73 | }
74 |
75 | h1 {
76 | margin-top: 40px;
77 | }
78 | h2 {
79 | font-size: 1.26em;
80 | }
81 |
82 | hr {
83 | border: 0;
84 | background: 1px #ddd;
85 | height: 1px;
86 | margin: 20px 0;
87 | }
88 |
89 | pre, tt, code {
90 | font-size: 12px; line-height: 16px;
91 | font-family: Menlo, Monaco, Consolas, "Lucida Console", monospace;
92 | margin: 0; padding: 0;
93 | }
94 | .annotation pre {
95 | display: block;
96 | margin: 0;
97 | padding: 7px 10px;
98 | background: #fcfcfc;
99 | -moz-box-shadow: inset 0 0 10px rgba(0,0,0,0.1);
100 | -webkit-box-shadow: inset 0 0 10px rgba(0,0,0,0.1);
101 | box-shadow: inset 0 0 10px rgba(0,0,0,0.1);
102 | overflow-x: auto;
103 | }
104 | .annotation pre code {
105 | border: 0;
106 | padding: 0;
107 | background: transparent;
108 | }
109 |
110 |
111 | blockquote {
112 | border-left: 5px solid #ccc;
113 | margin: 0;
114 | padding: 1px 0 1px 1em;
115 | }
116 | .sections blockquote p {
117 | font-family: Menlo, Consolas, Monaco, monospace;
118 | font-size: 12px; line-height: 16px;
119 | color: #999;
120 | margin: 10px 0 0;
121 | white-space: pre-wrap;
122 | }
123 |
124 | ul.sections {
125 | list-style: none;
126 | padding:0 0 5px 0;;
127 | margin:0;
128 | }
129 |
130 | /*
131 | Force border-box so that % widths fit the parent
132 | container without overlap because of margin/padding.
133 |
134 | More Info : http://www.quirksmode.org/css/box.html
135 | */
136 | ul.sections > li > div {
137 | -moz-box-sizing: border-box; /* firefox */
138 | -ms-box-sizing: border-box; /* ie */
139 | -webkit-box-sizing: border-box; /* webkit */
140 | -khtml-box-sizing: border-box; /* konqueror */
141 | box-sizing: border-box; /* css3 */
142 | }
143 |
144 |
145 | /*---------------------- Jump Page -----------------------------*/
146 | #jump_to, #jump_page {
147 | margin: 0;
148 | background: white;
149 | -webkit-box-shadow: 0 0 25px #777; -moz-box-shadow: 0 0 25px #777;
150 | -webkit-border-bottom-left-radius: 5px; -moz-border-radius-bottomleft: 5px;
151 | font: 16px Arial;
152 | cursor: pointer;
153 | text-align: right;
154 | list-style: none;
155 | }
156 |
157 | #jump_to a {
158 | text-decoration: none;
159 | }
160 |
161 | #jump_to a.large {
162 | display: none;
163 | }
164 | #jump_to a.small {
165 | font-size: 22px;
166 | font-weight: bold;
167 | color: #676767;
168 | }
169 |
170 | #jump_to, #jump_wrapper {
171 | position: fixed;
172 | right: 0; top: 0;
173 | padding: 10px 15px;
174 | margin:0;
175 | }
176 |
177 | #jump_wrapper {
178 | display: none;
179 | padding:0;
180 | }
181 |
182 | #jump_to:hover #jump_wrapper {
183 | display: block;
184 | }
185 |
186 | #jump_page_wrapper{
187 | position: fixed;
188 | right: 0;
189 | top: 0;
190 | bottom: 0;
191 | }
192 |
193 | #jump_page {
194 | padding: 5px 0 3px;
195 | margin: 0 0 25px 25px;
196 | max-height: 100%;
197 | overflow: auto;
198 | }
199 |
200 | #jump_page .source {
201 | display: block;
202 | padding: 15px;
203 | text-decoration: none;
204 | border-top: 1px solid #eee;
205 | }
206 |
207 | #jump_page .source:hover {
208 | background: #f5f5ff;
209 | }
210 |
211 | #jump_page .source:first-child {
212 | }
213 |
214 | /*---------------------- Low resolutions (> 320px) ---------------------*/
215 | @media only screen and (min-width: 320px) {
216 | .pilwrap { display: none; }
217 |
218 | ul.sections > li > div {
219 | display: block;
220 | padding:5px 10px 0 10px;
221 | }
222 |
223 | ul.sections > li > div.annotation ul, ul.sections > li > div.annotation ol {
224 | padding-left: 30px;
225 | }
226 |
227 | ul.sections > li > div.content {
228 | overflow-x:auto;
229 | -webkit-box-shadow: inset 0 0 5px #e5e5ee;
230 | box-shadow: inset 0 0 5px #e5e5ee;
231 | border: 1px solid #dedede;
232 | margin:0px 10px 0px 10px;
233 | padding-bottom: 0px;
234 | padding-top: 0px;
235 | }
236 |
237 | ul.sections > li > div.annotation pre {
238 | margin: 7px 0 7px;
239 | padding-left: 15px;
240 | }
241 |
242 | ul.sections > li > div.annotation p tt, .annotation code {
243 | background: #f8f8ff;
244 | border: 1px solid #dedede;
245 | font-size: 12px;
246 | padding: 0 0.2em;
247 | }
248 | }
249 |
250 | /*---------------------- (> 481px) ---------------------*/
251 | @media only screen and (min-width: 481px) {
252 | #container {
253 | position: relative;
254 | }
255 | body {
256 | background-color: #F5F5FF;
257 | font-size: 15px;
258 | line-height: 21px;
259 | }
260 | pre, tt, code {
261 | line-height: 18px;
262 | }
263 | p, ul, ol {
264 | margin: 0 0 15px;
265 | }
266 |
267 |
268 | #jump_to {
269 | padding: 5px 10px;
270 | }
271 | #jump_wrapper {
272 | padding: 0;
273 | }
274 | #jump_to, #jump_page {
275 | font: 10px Arial;
276 | text-transform: uppercase;
277 | }
278 | #jump_page .source {
279 | padding: 5px 10px;
280 | }
281 | #jump_to a.large {
282 | display: inline-block;
283 | }
284 | #jump_to a.small {
285 | display: none;
286 | }
287 |
288 |
289 |
290 | #background {
291 | position: absolute;
292 | top: 0; bottom: 0;
293 | width: 350px;
294 | background: #fff;
295 | border-right: 1px solid #e5e5ee;
296 | z-index: -1;
297 | }
298 |
299 | ul.sections > li > div.annotation ul, ul.sections > li > div.annotation ol {
300 | padding-left: 40px;
301 | }
302 |
303 | ul.sections > li {
304 | white-space: nowrap;
305 | }
306 |
307 | ul.sections > li > div {
308 | display: inline-block;
309 | }
310 |
311 | ul.sections > li > div.annotation {
312 | max-width: 350px;
313 | min-width: 350px;
314 | min-height: 5px;
315 | padding: 0px 13px 0px 13px;
316 | overflow-x: hidden;
317 | white-space: normal;
318 | vertical-align: top;
319 | text-align: left;
320 | }
321 | ul.sections > li > div.annotation pre {
322 | margin: 15px 0 15px;
323 | padding-left: 15px;
324 | }
325 |
326 | ul.sections > li > div.content {
327 | padding: 13px;
328 | vertical-align: top;
329 | border: none;
330 | -webkit-box-shadow: none;
331 | box-shadow: none;
332 | }
333 |
334 | .pilwrap {
335 | position: relative;
336 | display: inline;
337 | }
338 |
339 | .pilcrow {
340 | font: 12px Arial;
341 | text-decoration: none;
342 | color: #454545;
343 | position: absolute;
344 | top: 3px; left: -20px;
345 | padding: 1px 2px;
346 | opacity: 0;
347 | -webkit-transition: opacity 0.2s linear;
348 | }
349 | .for-h1 .pilcrow {
350 | top: 47px;
351 | }
352 | .for-h2 .pilcrow, .for-h3 .pilcrow, .for-h4 .pilcrow {
353 | top: 35px;
354 | }
355 |
356 | ul.sections > li > div.annotation:hover .pilcrow {
357 | opacity: 1;
358 | }
359 | }
360 |
361 | /*---------------------- (> 1025px) ---------------------*/
362 | @media only screen and (min-width: 1025px) {
363 |
364 | body {
365 | font-size: 16px;
366 | line-height: 24px;
367 | }
368 |
369 | #background {
370 | width: 525px;
371 | }
372 | ul.sections > li > div.annotation {
373 | max-width: 525px;
374 | min-width: 525px;
375 | padding: 0px 25px 0px 50px;
376 | }
377 | ul.sections > li > div.content {
378 | padding: 0px 15px 0px 25px;
379 | }
380 | }
381 |
382 | /*---------------------- Syntax Highlighting -----------------------------*/
383 |
384 | td.linenos { background-color: #f0f0f0; padding-right: 10px; }
385 | span.lineno { background-color: #f0f0f0; padding: 0 5px 0 5px; }
386 | /*
387 |
388 | github.com style (c) Vasily Polovnyov
389 |
390 | */
391 |
392 | pre code {
393 | display: block; padding: 0.5em;
394 | color: #000;
395 | background: #f8f8ff
396 | }
397 |
398 | pre .hljs-comment,
399 | pre .hljs-template_comment,
400 | pre .hljs-diff .hljs-header,
401 | pre .hljs-javadoc {
402 | color: #408080;
403 | font-style: italic
404 | }
405 |
406 | pre .hljs-keyword,
407 | pre .hljs-assignment,
408 | pre .hljs-literal,
409 | pre .hljs-css .hljs-rule .hljs-keyword,
410 | pre .hljs-winutils,
411 | pre .hljs-javascript .hljs-title,
412 | pre .hljs-lisp .hljs-title,
413 | pre .hljs-subst {
414 | color: #954121;
415 | /*font-weight: bold*/
416 | }
417 |
418 | pre .hljs-number,
419 | pre .hljs-hexcolor {
420 | color: #40a070
421 | }
422 |
423 | pre .hljs-string,
424 | pre .hljs-tag .hljs-value,
425 | pre .hljs-phpdoc,
426 | pre .hljs-tex .hljs-formula {
427 | color: #219161;
428 | }
429 |
430 | pre .hljs-title,
431 | pre .hljs-id {
432 | color: #19469D;
433 | }
434 | pre .hljs-params {
435 | color: #00F;
436 | }
437 |
438 | pre .hljs-javascript .hljs-title,
439 | pre .hljs-lisp .hljs-title,
440 | pre .hljs-subst {
441 | font-weight: normal
442 | }
443 |
444 | pre .hljs-class .hljs-title,
445 | pre .hljs-haskell .hljs-label,
446 | pre .hljs-tex .hljs-command {
447 | color: #458;
448 | font-weight: bold
449 | }
450 |
451 | pre .hljs-tag,
452 | pre .hljs-tag .hljs-title,
453 | pre .hljs-rules .hljs-property,
454 | pre .hljs-django .hljs-tag .hljs-keyword {
455 | color: #000080;
456 | font-weight: normal
457 | }
458 |
459 | pre .hljs-attribute,
460 | pre .hljs-variable,
461 | pre .hljs-instancevar,
462 | pre .hljs-lisp .hljs-body {
463 | color: #008080
464 | }
465 |
466 | pre .hljs-regexp {
467 | color: #B68
468 | }
469 |
470 | pre .hljs-class {
471 | color: #458;
472 | font-weight: bold
473 | }
474 |
475 | pre .hljs-symbol,
476 | pre .hljs-ruby .hljs-symbol .hljs-string,
477 | pre .hljs-ruby .hljs-symbol .hljs-keyword,
478 | pre .hljs-ruby .hljs-symbol .hljs-keymethods,
479 | pre .hljs-lisp .hljs-keyword,
480 | pre .hljs-tex .hljs-special,
481 | pre .hljs-input_number {
482 | color: #990073
483 | }
484 |
485 | pre .hljs-builtin,
486 | pre .hljs-constructor,
487 | pre .hljs-built_in,
488 | pre .hljs-lisp .hljs-title {
489 | color: #0086b3
490 | }
491 |
492 | pre .hljs-preprocessor,
493 | pre .hljs-pi,
494 | pre .hljs-doctype,
495 | pre .hljs-shebang,
496 | pre .hljs-cdata {
497 | color: #999;
498 | font-weight: bold
499 | }
500 |
501 | pre .hljs-deletion {
502 | background: #fdd
503 | }
504 |
505 | pre .hljs-addition {
506 | background: #dfd
507 | }
508 |
509 | pre .hljs-diff .hljs-change {
510 | background: #0086b3
511 | }
512 |
513 | pre .hljs-chunk {
514 | color: #aaa
515 | }
516 |
517 | pre .hljs-tex .hljs-formula {
518 | opacity: 0.5;
519 | }
520 |
--------------------------------------------------------------------------------
/edify.md:
--------------------------------------------------------------------------------
1 | # Locket
2 |
3 | [](https://github.com/bigeasy/locket/actions)
4 | [](https://codecov.io/gh/bigeasy/locket)
5 | [](https://opensource.org/licenses/MIT)
6 |
7 | _Salvage Necklace 5: Inside Locket by [B Zedan](http://www.flickr.com/people/bzedan/)._
8 |
9 |
10 |
11 |
12 | A pure-JavaScript [leveldown](https://github.com/Level/leveldown) implementation
13 | backed by a persistent and durable evented I/0 b-tree for use with
14 | [levelup](https://github.com/Level/leveldown) — i.e. a database.
15 |
16 | | What | Where |
17 | | --- | --- |
18 | | Discussion | https://github.com/bigeasy/locket/issues/1 |
19 | | Documentation | https://bigeasy.github.io/locket |
20 | | Source | https://github.com/bigeasy/locket |
21 | | Issues | https://github.com/bigeasy/locket/issues |
22 | | CI | https://travis-ci.org/bigeasy/locket |
23 | | Coverage: | https://codecov.io/gh/bigeasy/locket |
24 | | License: | MIT |
25 |
26 | Locket installs from NPM.
27 |
28 | ```
29 | //{ "mode": "text" }
30 | npm install locket
31 | ```
32 |
33 | ## Living `README.md`
34 |
35 | This `README.md` is also a unit test using the
36 | [Proof](https://github.com/bigeasy/proof) unit test framework. We'll use the
37 | Proof `okay` function to assert out statements in the readme. A Proof unit test
38 | generally looks like this.
39 |
40 | ```javascript
41 | //{ "code": { "tests": 1 }, "text": { "tests": 4 } }
42 | require('proof')(%(tests)d, okay => {
43 | //{ "include": "test", "mode": "code" }
44 | //{ "include": "proof" }
45 | })
46 | ```
47 |
48 | ```javascript
49 | //{ "name": "proof", "mode": "text" }
50 | okay('always okay')
51 | okay(true, 'okay if true')
52 | okay(1, 1, 'okay if equal')
53 | okay({ value: 1 }, { value: 1 }, 'okay if deep strict equal')
54 | ```
55 |
56 | You can run this unit test yourself to see the output from the various
57 | code sections of the readme.
58 |
59 | ```text
60 | //{ "mode": "text" }
61 | git clone git@github.com:bigeasy/locket.git
62 | cd locket
63 | npm install --no-package-lock --no-save
64 | node test/readme.t.js
65 | ```
66 |
67 | ## Usage
68 |
69 | ```javascript
70 | //{ "name": "test" }
71 | okay('okay')
72 | ```
73 |
--------------------------------------------------------------------------------
/locket.js:
--------------------------------------------------------------------------------
1 | // In case you forgot, Alan. You've finished a rewrite. You no longer have many
2 | // different staging trees, you only have one staging tree and it is not a
3 | // tree, it is simply a log, a b-tree with only one leaf. When it is time to
4 | // merge, it is renamed and then it is merged. Thus there is the primary tree,
5 | // the staging log and possibly a merging log. There are, therefore, only two
6 | // extra cursors in addition to the primary tree, and they can always be read
7 | // pretty much directly using an Advance iterator, instead of having to traverse
8 | // them as if they where actual trees.
9 | //
10 | // We have no way of vacuuming the primary tree at this point.
11 | //
12 | // *Note:*
13 | //
14 | // Please curb your compulsion to refactor the upstream libraries any further.
15 | //
16 | // You thought long and hard about this. You are not getting smarter.
17 |
18 | // Node.js API.
19 | const util = require('util')
20 | const assert = require('assert')
21 |
22 | // Return the first value that is not `null` nor `undefined`.
23 | const { coalesce } = require('extant')
24 |
25 | // Modules for storage and concurrency.
26 | const Strata = require('b-tree')
27 |
28 | // A fiber-constrained `async`/`await` work queue.
29 | const Turnstile = require('turnstile')
30 |
31 | // Write-ahead log.
32 | const WriteAhead = require('writeahead')
33 |
34 | // LevelUp adaptors.
35 | const AbstractLevelDOWN = require('abstract-leveldown').AbstractLevelDOWN
36 | const AbstractIterator = require('abstract-leveldown').AbstractIterator
37 |
38 | // An `async`/`await` trampoline.
39 | const { Trampoline } = require('reciprocate')
40 |
41 | // Structured concurrency.
42 | const Destructible = require('destructible')
43 |
44 | // A Swiss Army asynchronous control-flow function generator for JavaScript.
45 | const cadence = require('cadence')
46 |
47 | // A LRU cache for memory paging and content caching.
48 | const Magazine = require('magazine')
49 |
50 | // Handle-based `async`/`await` file operations.
51 | const Operation = require('operation')
52 |
53 | // WAL merge tree.
54 | const Amalgamator = require('amalgamate')
55 | const Rotator = require('amalgamate/rotator')
56 |
57 | const Fracture = require('fracture')
58 |
59 |
60 | // Modules for file operations. We use `strftime` to create date stamped file
61 | // names.
62 | const fs = require('fs').promises
63 | const path = require('path')
64 |
65 | const constrain = require('constrain')
66 |
67 | // Conditionally catch JavaScript exceptions based on type and properties.
68 | const rescue = require('rescue')
69 |
70 | // A comparator function builder.
71 | const ascension = require('ascension')
72 |
73 | const mvcc = {
74 | satiate: require('satiate'),
75 | constrain: require('constrain/iterator')
76 | }
77 |
78 | // TODO Let's see if we can get throught his without having to worry about
79 | // encodings.
80 | function encode (buffer) {
81 | return Buffer.isBuffer(buffer) ? buffer : Buffer.from(buffer)
82 | }
83 |
84 | class Paginator {
85 | constructor (iterator, constraints, options) {
86 | const constrained = constraints == null
87 | ? iterator
88 | : mvcc.constrain(iterator, constraints)
89 | this._iterator = mvcc.satiate(constrained, 1)
90 | this._constraints = constraints
91 | this._keyAsBuffer = options.keyAsBuffer
92 | this._valueAsBuffer = options.valueAsBuffer
93 | this._keys = options.keys
94 | this._values = options.values
95 | this._items = []
96 | this._index = 0
97 | }
98 |
99 | next = cadence(function (step) {
100 | if (this._items.length != this._index) {
101 | const item = this._items[this._index++], result = new Array(2)
102 | if (this._keys) {
103 | result[0] = this._keyAsBuffer ? item.parts[1] : item.parts[1].toString()
104 | }
105 | if (this._values) {
106 | result[1] = this._valueAsBuffer ? item.parts[2] : item.parts[2].toString()
107 | }
108 | return result
109 | }
110 | let items = null
111 | step(function () {
112 | const trampoline = new Trampoline
113 | this._iterator.next(trampoline, $items => items = $items)
114 | trampoline.callback(step())
115 | }, function () {
116 | if (this._iterator.done) {
117 | return []
118 | } else {
119 | this._items = items
120 | this._index = 0
121 | this.next(step())
122 | }
123 | })
124 | })
125 | }
126 |
127 | const duplicated = ascension([ Buffer.compare, Number, -1, Number, -1 ])
128 |
129 | function createConstraint (options) {
130 | const start = coalesce(options.gt, options.start, options.gte, null)
131 | const end = coalesce(options.lt, options.end, options.lte, null)
132 | const limit = coalesce(options.limit, -1)
133 | const reverse = coalesce(options.reverse, false)
134 | const direction = reverse ? 'reverse' : 'forward'
135 | const keys = [{
136 | comparator: duplicated,
137 | key: [ start, Number.MAX_SAFE_INTEGER, Number.MAX_SAFE_INTEGER ],
138 | inclusive: options.lt == null,
139 | limit: limit,
140 | direction: direction,
141 | reverse: reverse
142 | }, {
143 | comparator: duplicated,
144 | key: [ end, 0, 0 ],
145 | inclusive: options.gt == null,
146 | limit: limit,
147 | direction: direction,
148 | reverse: reverse
149 | }]
150 | if (reverse) {
151 | keys.reverse()
152 | }
153 | if (keys[1].limit == -1 && keys[1].key[0] == null) {
154 | keys[1] = null
155 | }
156 | keys[0].key = keys[0].key[0]
157 | return keys
158 | }
159 |
160 | // An implementation of the LevelDOWN `Iterator` object.
161 | //
162 | // The LevelUP interface allows you to specify encodings at both when you create
163 | // the database and when you invoke one of it's functions, so we need to pass
164 | // two different sets of options to all the functions that consider the
165 | // encoding.
166 |
167 | // TODO No call to super!
168 | class Iterator extends AbstractIterator {
169 | constructor (db, options) {
170 | super(db)
171 | this._constraint = createConstraint(options)
172 | this._options = options
173 | this._db = db
174 | this._transaction = this._db._rotator.locker.snapshot()
175 | this._paginator = this._db._paginator(this._constraint, this._transaction, this._options)
176 | }
177 |
178 | // Our LevelUP `Iterator` implementation is a wrapper around the internal
179 | // iterator that merges our Strata b-tree with one or two logs where we are
180 | // gathering new batch operations. See the `Locket._internalIterator` method
181 | // for a description of how we compose an amalgamated MVCC iterator from the
182 | // MVCC iterator modules.
183 |
184 | //
185 | _next (callback) {
186 | this._paginator.next(callback)
187 | }
188 |
189 | _seek (target) {
190 | const paginator = this._paginator
191 | this._paginator = this.db._paginator(target, this._options)
192 | paginator.release()
193 | }
194 |
195 | _end (callback) {
196 | this._db._rotator.locker.release(this._transaction)
197 | callback()
198 | }
199 | }
200 |
201 | class Locket extends AbstractLevelDOWN {
202 | constructor (location, options = {}) {
203 | super()
204 | this.location = location
205 | this._cache = coalesce(options.cache, new Magazine),
206 | // TODO Allow common operation handle cache.
207 | this._versions = {}
208 | this._options = options
209 | this._version = 1n
210 | this._amalgamator = null
211 | }
212 |
213 | _serializeKey = encode
214 |
215 | _serializeValue = encode
216 |
217 | _open = cadence(function (step, options) {
218 | const destructible = this._destructible = new Destructible('locket')
219 | // TODO Only use the old callback `fs`.
220 | const fs = require('fs')
221 | // TODO What is the behavior if you close while opening, or open while closing?
222 | step(function () {
223 | step(function () {
224 | fs.readdir(this.location, step())
225 | }, function (files) {
226 | const exists = ([ 'wal', 'tree' ]).filter(file => ~files.indexOf(file))
227 | if (exists.length == 0) {
228 | return false
229 | }
230 | if (exists.length == 2) {
231 | return true
232 | }
233 | // TODO Interrupt or LevelUp specific error.
234 | throw new Error('partial extant database')
235 | }, function (exists) {
236 | if (! exists) {
237 | step(function () {
238 | fs.mkdir(path.resolve(this.location, 'wal'), { recursive: true }, step())
239 | }, function () {
240 | fs.mkdir(path.resolve(this.location, 'tree'), { recursive: true }, step())
241 | }, function () {
242 | return false
243 | })
244 | } else {
245 | return exists
246 | }
247 | })
248 | }, async function (exists) {
249 | const turnstile = new Turnstile(destructible.durable($ => $(), { isolated: true }, 'turnstile'))
250 | const writeahead = new WriteAhead(destructible.durable($ => $(), 'writeahead'), turnstile, await WriteAhead.open({
251 | directory: path.resolve(this.location, 'wal')
252 | }))
253 | this._rotator = new Rotator(destructible.durable($ => $(), 'rotator'), await Rotator.open(writeahead), { size: 1024 * 1024 })
254 | // TODO Make strands user optional.
255 | return this._rotator.open(Fracture.stack(), 'locket', {
256 | handles: new Operation.Cache(new Magazine),
257 | directory: path.resolve(this.location, 'tree'),
258 | create: ! exists,
259 | cache: this._cache,
260 | key: 'tree',
261 | // TODO Use CRC32 or FNV.
262 | checksum: () => '0',
263 | extractor: parts => parts[0],
264 | serializer: {
265 | key: {
266 | serialize: key => [ key ],
267 | deserialize: parts => parts[0]
268 | },
269 | parts: {
270 | serialize: parts => parts,
271 | deserialize: parts => parts
272 | }
273 | },
274 | }, {
275 | pages: new Magazine,
276 | turnstile: turnstile,
277 | comparator: Buffer.compare,
278 | transformer: function (operation) {
279 | if (operation.type == 'put') {
280 | return {
281 | method: 'insert',
282 | key: operation.key,
283 | parts: [ operation.key, operation.value ]
284 | }
285 | }
286 | return {
287 | method: 'remove',
288 | key: operation.key
289 | }
290 | },
291 | primary: options.primary || {
292 | leaf: { split: 1024 * 32, merge: 32 },
293 | branch: { split: 1024 * 32, merge: 32 },
294 | },
295 | stage: options.stage || {
296 | leaf: { split: 1024 * 1024 * 1024, merge: 32 },
297 | branch: { split: 1024 * 1024 * 1024, merge: 32 },
298 | }
299 | })
300 | }, function (amalgamator) {
301 | this._amalgamator = amalgamator
302 | }, function () {
303 | return []
304 | })
305 | })
306 |
307 | // Iteration of the database requires merging the results from the deep storage
308 | // b-tree and the one or two staging logs.
309 | //
310 | // We do this by creating a merged Homogonize iterator across the b-tree and the
311 | // logs. This is an iteartor that takes one or more iterators and advances
312 | // through. It will advance each iterator and then when it is advanced, it
313 | // returns the least value of each of the three iterators (or greatest value if
314 | // iteration is reversed.)
315 | //
316 | // We then use the versioned iterator from the Designate module which will
317 | // select the key/value pair for a key that has the greatest committed version.
318 | //
319 | // Finally we need to respect the properties given a user when creating an
320 | // external iterator; start, stop, greater than, less than, greater than or
321 | // equal to, less than or equal to. We use a Dilute iterator to select out only
322 | // records that have not been deleted and that match the user's range critera.
323 |
324 | //
325 | _paginator = function (constraint, transaction, options) {
326 | const [{ key, inclusive, direction }, constraints ] = constraint
327 | const iterator = this._amalgamator.iterator(transaction, direction, key, inclusive)
328 | return new Paginator(iterator, constraints, options)
329 | }
330 |
331 | _iterator = function (options) {
332 | return new Iterator(this, options)
333 | }
334 |
335 | // TODO Maybe just leave this?
336 | _get = cadence(function (step, key, options) {
337 | const constraint = createConstraint({ gte: key, lte: key })
338 | const snapshot = this._rotator.locker.snapshot()
339 | const paginator = this._paginator(constraint, snapshot, {
340 | keys: true, values: true, keyAsBuffer: true, valueAsBuffer: true
341 | })
342 | step(function () {
343 | paginator.next(step())
344 | }, [], function (next) {
345 | this._rotator.locker.release(snapshot)
346 | if (next.length != 0 && Buffer.compare(next[0], key) == 0) {
347 | return [ options.asBuffer ? next[1] : next[1].toString() ]
348 | }
349 | throw new Error('NotFoundError: not found')
350 | })
351 | })
352 |
353 | _put = function (key, value, options, callback) {
354 | this._batch([{ type: 'put', key: key, value: value }], options, callback)
355 | }
356 |
357 | _del = function (key, options, callback) {
358 | this._batch([{ type: 'del', key: key }], options, callback)
359 | }
360 |
361 | // Could use a header record. It would sort out to be less than all the user
362 | // records, so it wouldn't get in the way of a search, and we wouldn't have to
363 | // filter it. It does however mean at least two writes for every `put` or `del`
364 | // and I suspect that common usage is ingle `put` or `del`, so going to include
365 | // the count in ever record, it is only 32-bits.
366 | _batch = cadence(function (step, batch, options) {
367 | const mutator = this._rotator.locker.mutator()
368 | step(function () {
369 | return this._amalgamator.merge(Fracture.stack(), mutator, batch)
370 | }, function () {
371 | return this._rotator.commit(Fracture.stack(), mutator.mutation.version)
372 | }, function () {
373 | this._rotator.locker.commit(mutator)
374 | return []
375 | })
376 | })
377 |
378 | _approximateSize = cadence(function (from, to) {
379 | const constraint = constrain(Buffer.compare, encode, { gte: from, lte: to })
380 | let approximateSize = 0
381 | for (const items of this._whatever()) {
382 | for (const item of items) {
383 | approximateSize += item.heft
384 | }
385 | }
386 | return approximateSize
387 | })
388 |
389 | // TODO Countdown through the write queue.
390 | _close = cadence(function (step) {
391 | step(function () {
392 | return this._amalgamator.drain()
393 | }, function () {
394 | return this._destructible.destroy().promise
395 | }, function () {
396 | return []
397 | })
398 | })
399 | }
400 |
401 | module.exports = Locket
402 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "locket",
3 | "version": "2.0.0-alpha.2",
4 | "description":
5 |
6 | "A pure-JavaScript LevelDB implementation backed by a durable and persistent evented I/O b-tree for use with LevelUP.",
7 |
8 | "keywords":
9 | [
10 | "btree", "leveldb", "levelup", "binary", "database",
11 | "json", "b-tree", "concurrent", "persistence"
12 | ],
13 | "author": "Alan Gutierrez ",
14 | "homepage": "https://github.com/bigeasy/locket",
15 | "bugs": "https://github.com/bigeasy/locket/issues",
16 | "license": "MIT",
17 | "repository":
18 | {
19 | "type": "git",
20 | "url": "bigeasy/locket"
21 | },
22 | "dependencies":
23 | {
24 | "abstract-leveldown": "7.2.0",
25 | "amalgamate": "0.1.75",
26 | "b-tree": "2.0.0-alpha.120",
27 | "cadence": "4.0.0-alpha.5",
28 | "constrain": "2.0.0-alpha.6",
29 | "destructible": "7.0.0-alpha.71",
30 | "extant": "2.0.0-alpha.4",
31 | "fracture": "0.3.0-alpha.73",
32 | "magazine": "6.0.0-alpha.9",
33 | "reciprocate": "0.0.9",
34 | "rescue": "7.0.0-alpha.18",
35 | "satiate": "0.0.5",
36 | "whittle": "0.0.3",
37 | "writeahead": "0.0.49"
38 | },
39 | "devDependencies":
40 | {
41 | "arguable": "13.0.0-alpha.6",
42 | "comeuppance": "0.0.3",
43 | "eject": "0.3.1",
44 | "leveldown": "6.1.0",
45 | "levelup": "5.1.1",
46 | "proof": "^9.0.2",
47 | "seedrandom": "3.0.5",
48 | "tape": "5.3.1"
49 | },
50 | "main": "locket",
51 | "scripts":
52 | {
53 | "test": "proof --timeout 7000 test/*.t.js"
54 | }
55 | }
56 |
--------------------------------------------------------------------------------
/redux.md:
--------------------------------------------------------------------------------
1 | ## Thu Aug 20 15:56:56 CDT 2020
2 |
3 | Simple enough to ensure that at shutdown and at restart we merge any outstanding
4 | cursors. That settles the problem of keeping track of versions. There is a
5 | recover step to extract the valid versions, then we merge.
6 |
7 | Actually, that is all that needs to happen then, recovering the outstanding
8 | versions. Simplifies things. We can then warn if the b-tree hits a corrupted
9 | merge page, not as serious as having the primary tree in a bad state.
10 |
11 | If we're very concerned we could keep a log to verify state. Learning to have
12 | some faith in the file system. If the log entries are less than the buffer size
13 | we can append to a log and it will rarely be corrupted.
14 |
15 | For now, it will be enough to extract all valid verisons out of the staging
16 | pages.
17 |
--------------------------------------------------------------------------------
/test/locket.t.js:
--------------------------------------------------------------------------------
1 | require('proof')(6, require('cadence')(function (step, okay) {
2 | const path = require('path')
3 | const fs = require('fs')
4 |
5 | const Destructible = require('destructible')
6 | const destructible = new Destructible('put.t')
7 |
8 | const callback = require('comeuppance')
9 |
10 | const Locket = require('..')
11 |
12 | const location = path.join(__dirname, 'tmp', 'locket')
13 |
14 | step(function () {
15 | if (/^v12\./.test(process.version)) {
16 | fs.rmdir(location, { recursive: true }, step())
17 | } else {
18 | fs.rm(location, { recursive: true, force: true }, step())
19 | }
20 | }, function () {
21 | fs.mkdir(location, { recursive: true }, step())
22 | }, function () {
23 | new Locket(location)
24 |
25 | const locket = new Locket(location, {
26 | primary: {
27 | leaf: { split: 64, merge: 32 },
28 | branch: { split: 64, merge: 32 },
29 | },
30 | stage: {
31 | max: 128,
32 | leaf: { split: 64, merge: 32 },
33 | branch: { split: 64, merge: 32 },
34 | }
35 | })
36 |
37 | step(function () {
38 | locket.open({ createIfMissing: true }, step())
39 | }, function () {
40 | locket.put('a', 'z', step())
41 | }, function () {
42 | step(function () {
43 | locket.get('a', step())
44 | }, function (value) {
45 | okay({
46 | isBuffer: Buffer.isBuffer(value),
47 | value: value.toString()
48 | }, {
49 | isBuffer: true,
50 | value: 'z'
51 | }, 'put')
52 | })
53 | }, function () {
54 | step(function () {
55 | locket.get('a', { asBuffer: false }, step())
56 | }, function (value) {
57 | okay({
58 | type: typeof value,
59 | value: value
60 | }, {
61 | type: 'string',
62 | value: 'z'
63 | }, 'get')
64 | })
65 | }, function () {
66 | locket.del('a', step())
67 | }, function () {
68 | const test = []
69 | step([function () {
70 | locket.get('a', step())
71 | }, function (error) {
72 | test.push(error.message)
73 | }], function () {
74 | okay(test, [ 'NotFoundError: not found' ], 'get not found')
75 | })
76 | }, function () {
77 | locket.put(Buffer.from('a'), Buffer.from('z'), step())
78 | }, function () {
79 | locket.close(step())
80 | }, function () {
81 | locket.open({ createIfMissing: true }, step())
82 | }, function () {
83 | step(function () {
84 | locket.get('a', step())
85 | }, function (value) {
86 | okay({
87 | isBuffer: Buffer.isBuffer(value),
88 | value: value.toString()
89 | }, {
90 | isBuffer: true,
91 | value: 'z'
92 | }, 'reopen')
93 | })
94 | }, function () {
95 | const iterator = locket._iterator({
96 | keys: true, values: true, keyAsBuffer: false, valueAsBuffer: false
97 | })
98 | step(function () {
99 | iterator.next(step())
100 | }, function (key, value) {
101 | okay({ key, value }, { key: 'a', value: 'z' }, 'next')
102 | iterator.next(step())
103 | }, [], function (ended) {
104 | okay(ended, [], 'ended')
105 | iterator.end(step())
106 | })
107 | }, function () {
108 | const alphabet = 'abcdefghijklmnopqrstuvwxyz'.split('')
109 |
110 | const put = alphabet.map(letter => { return { type: 'put', key: letter, value: letter } })
111 | const del = alphabet.map(letter => { return { type: 'del', key: letter } })
112 |
113 | step.loop([ 0 ], function (i) {
114 | // **TODO** Double this number and it hangs indefinately.
115 | if (i == 64) {
116 | return [ step.break ]
117 | }
118 | step(function () {
119 | locket.batch(put.concat(del), step())
120 | }, function () {
121 | return i + 1
122 | })
123 | })
124 | }, function () {
125 | console.log('closeing')
126 | locket.close(step())
127 | }, function () {
128 | console.log('closed')
129 | locket.open({ createIfMissing: true }, step())
130 | }, function () {
131 | locket.close(step())
132 | })
133 | })
134 | }))
135 |
--------------------------------------------------------------------------------
/test/readme.t.js:
--------------------------------------------------------------------------------
1 | // # Locket
2 | //
3 | // [](https://github.com/bigeasy/locket/actions)
4 | // [](https://codecov.io/gh/bigeasy/locket)
5 | // [](https://opensource.org/licenses/MIT)
6 | //
7 | // _Salvage Necklace 5: Inside Locket by [B Zedan](http://www.flickr.com/people/bzedan/)._
8 | //
9 | //
10 | //
11 | //
12 | // A pure-JavaScript [leveldown](https://github.com/Level/leveldown) implementation
13 | // backed by a persistent and durable evented I/0 b-tree for use with
14 | // [levelup](https://github.com/Level/leveldown) — i.e. a database.
15 | //
16 | // | What | Where |
17 | // | --- | --- |
18 | // | Discussion | https://github.com/bigeasy/locket/issues/1 |
19 | // | Documentation | https://bigeasy.github.io/locket |
20 | // | Source | https://github.com/bigeasy/locket |
21 | // | Issues | https://github.com/bigeasy/locket/issues |
22 | // | CI | https://travis-ci.org/bigeasy/locket |
23 | // | Coverage: | https://codecov.io/gh/bigeasy/locket |
24 | // | License: | MIT |
25 | //
26 | // Locket installs from NPM.
27 |
28 | // ## Living `README.md`
29 | //
30 | // This `README.md` is also a unit test using the
31 | // [Proof](https://github.com/bigeasy/proof) unit test framework. We'll use the
32 | // Proof `okay` function to assert out statements in the readme. A Proof unit test
33 | // generally looks like this.
34 |
35 | require('proof')(1, okay => {
36 | // ## Usage
37 |
38 | okay('okay')
39 | })
40 |
41 | // You can run this unit test yourself to see the output from the various
42 | // code sections of the readme.
43 |
--------------------------------------------------------------------------------