├── .gitignore
├── .travis.yml
├── CHANGELOG.md
├── LICENSE
├── Makefile
├── README.md
├── SIGNED.md
├── browser
└── libkeybase.js
├── data
├── examples
├── app1.iced
├── encrypt.iced
├── recruit.iced
├── release.iced
└── trigger1.json
├── lib
├── assertion.js
├── assertion_parser.js
├── constants.js
├── err.js
├── kvstore.js
├── main.js
├── merkle
│ ├── leaf.js
│ └── pathcheck.js
└── sigchain
│ └── sigchain.js
├── notes
└── tweetnacl.md
├── package-lock.json
├── package.json
├── src
├── assertion.iced
├── assertion_parser.jison
├── constants.iced
├── err.iced
├── kvstore.iced
├── main.iced
├── merkle
│ ├── leaf.iced
│ └── pathcheck.iced
└── sigchain
│ └── sigchain.iced
└── test
├── browser
├── index.html
└── main.iced
├── files
├── 29_merkle_pathcheck.iced
├── 30_merkle_leaf.iced
├── 31_sigchain.iced
├── 32_kvstore.iced
└── 33_assertion.iced
└── run.iced
/.gitignore:
--------------------------------------------------------------------------------
1 | build-stamp
2 | node_modules/
3 | test-stamp
4 | test/browser/test.js
5 | .nyc_output
6 |
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | language: node_js
2 | node_js:
3 | - "node"
4 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | ## 1.2.33 (2019-11-04)
2 |
3 | - fix CI
4 |
5 | ## 1.2.32 (2019-11-04)
6 |
7 | - Upgrade libraries
8 |
9 | ## 1.2.31 (2019-02-01)
10 |
11 | - Change (new Buffer) to Buffer.from
12 |
13 | ## 1.2.30 (2018-11-16)
14 |
15 | - Update triplesec version
16 |
17 | ## 1.2.29 (2018-11-16)
18 |
19 | - Upgrade merkle-tree to 1.0.4
20 |
21 | ## 1.2.28 (2018-02-12)
22 |
23 | - Update iced error to '^0.0.9' so that modern versions can be used
24 |
25 | ## 1.2.27 (2018-02-08)
26 |
27 | Bugfix: fix some bugs with reset accounts in 1.2.26
28 |
29 | ## 1.2.26 (2018-02-08)
30 |
31 | Feature
32 | - Add a reset chain tail into position 4, to better capture reset sequence
33 |
34 | ## 1.2.25 (2016-08-29)
35 |
36 | Feature:
37 | - Add trivial Facebook awareness.
38 |
39 | ## 1.2.24 (2015-12-31)
40 |
41 | Rollback 1.2.23
42 |
43 | ## 1.2.23 (2015-12-31)
44 |
45 | Mistaken release: an unneeded feature
46 |
47 | ## 1.2.22 (2015-10-20)
48 |
49 | Bugfix:
50 | - Fix bugs related to detecting sigchain resets.
51 |
52 | ## 1.2.21 (2015-10-05)
53 |
54 | Bugfix:
55 | - Ignore broken reverse-sigs (https://github.com/keybase/libkeybase-js/issues/5)
56 |
57 | ## 1.2.20 (2015-09-21)
58 |
59 | Bugfix:
60 | - missing dependency in package.json
61 | - callback accidentally called twice in error case
62 | Enhancement
63 | - Simplified fix for 15 Sep 2015 bug. There's only one case to consider.
64 |
65 | ## 1.2.19 (2015-09-18)
66 |
67 | Bugfix:
68 | - Workaround keybase/keybase-issues#1765 signature corruption
69 |
70 | ## 1.2.18 (2015-09-11)
71 |
72 | Feature:
73 | - Respect PGP key hashes in the sigchain.
74 |
75 | ## 1.2.17 (2015-08-13)
76 |
77 | Changes:
78 | - Merge userids when merging PGP keys.
79 |
80 | ## 1.2.16 (2015-08-05)
81 |
82 | Changes:
83 | - A new assertion type for sig_seqnos
84 |
85 | ## 1.2.15 (2015-07-25)
86 |
87 | Changes:
88 | - Update kbpgp, use merge_public_omitting_revokes().
89 |
90 | ## 1.2.14 (2015-07-25)
91 |
92 | Bugfix:
93 | - Merge PGP primary keys via kbpgp @v2.0.33
94 | - Close keybase/keybase-issues#1707
95 |
96 | ## 1.2.13 (2015-07-21)
97 |
98 | Features:
99 | - Add logging to sigchain replays.
100 |
101 | ## 1.2.12 (2015-07-06)
102 |
103 | Bugfix:
104 | - Move to kbpgp v2.0.27 w/ subkey merging to handle zaher's sigchain
105 |
106 | ## 1.2.11 (2015-07-06)
107 |
108 | Bugfix:
109 | - Fix for zaher's sigchain, deal with multiple PGP uploads to recover
110 | removed subkeys
111 |
112 | ## 1.2.10 (2015-07-05)
113 |
114 | Bugfix:
115 | - Remove debugging from the previous
116 |
117 | ## 1.2.9 (2015-07-05)
118 |
119 | Bugfix:
120 | - Workaround for bad username -> UID conversion
121 | - Fixes: keybase/keybase#1655
122 |
123 | ## 1.2.8 (2015-07-03)
124 |
125 | Bugfix:
126 | - Fix to the proceeding, in which opts weren't properly passed.
127 |
128 | ## 1.2.7 (2015-07-03)
129 |
130 | Bugfixes:
131 | - Allow key time_travel, so that we can check prior states in which
132 | subkeys might still have been valid (though now they're expired).
133 |
134 | ## 1.2.6 (2015-07-02)
135 |
136 | Bugfixes:
137 | - Case-insensitive username comparisons (keybase/keybase-issues#1654)
138 |
139 | ## 1.2.5 (2015-07-02)
140 |
141 | Features:
142 | - Add a debug counter to track the number of unboxes we do.
143 |
144 | ## 1.2.4 (2015-07-01)
145 |
146 | Bugfixes:
147 | - Don't use Buffer.equals(), since it doesn't work on Node < 0.12.0;
148 | Use the paranoid bufeq_secure anyways.
149 |
150 | ## 1.2.3 (2015-07-01)
151 |
152 | Bugfixes:
153 | - Make keybase-test-vectors a dev dependency only.
154 |
155 | ## 1.2.2 (2015-07-01)
156 |
157 | Features:
158 | - New version of kbpgp.
159 |
160 | ## 1.2.1 (2015-07-01)
161 |
162 | Bugfixes:
163 | - Stop using server ctime at all. Sometimes the server is wrong.
164 |
165 | ## 1.2.0 (2015-06-23)
166 |
167 | Features:
168 | - Change the sigchain interface to make mistakes less likely.
169 |
170 | ## 1.1.7 (2015-06-23)
171 |
172 | Oops, I forgot to update the CHANGELOG for a while there.
173 |
174 | Features:
175 | - A full sigchain implementation.
176 | - A shared test suite in keybase-test-vectors.
177 |
178 | ## 1.0.2 (2015-04-21)
179 |
180 | Features:
181 | - Eldest_kid is now 3rd (0-indexed) slot in top-level array
182 | - If we add more slots to "triples" in the future, they can
183 | take any form.
184 | - Complete test coverage of Merkle leaf decoding
185 |
186 | ## 0.0.6 (2014-09-19)
187 |
188 | Features:
189 |
190 | - Expose more parsing innards, to be used from the node-client command line
191 |
192 | ## 0.0.5 (2014-09-19)
193 |
194 | Bugfixes:
195 |
196 | - Throw an error on assertions that can't possibly be useful.
197 |
198 | ## 0.0.4 (2014-09-19)
199 |
200 | Bugfixes:
201 |
202 | - Better error checking for assertions (and hopefully better error messages)
203 |
204 | ## 0.0.3 (2014-09-19)
205 |
206 | Features:
207 |
208 | - New flexible assertion language support; we're going to change the client to incorporate it.
209 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright (c) 2014, Keybase
2 | All rights reserved.
3 |
4 | Redistribution and use in source and binary forms, with or without
5 | modification, are permitted provided that the following conditions are met:
6 |
7 | * Redistributions of source code must retain the above copyright notice, this
8 | list of conditions and the following disclaimer.
9 |
10 | * Redistributions in binary form must reproduce the above copyright notice,
11 | this list of conditions and the following disclaimer in the documentation
12 | and/or other materials provided with the distribution.
13 |
14 | * Neither the name of the {organization} nor the names of its
15 | contributors may be used to endorse or promote products derived from
16 | this software without specific prior written permission.
17 |
18 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
19 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
20 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
21 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
22 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
23 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
24 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
25 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
26 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
27 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 |
29 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | default: build
2 | all: build
3 |
4 | ICED=node_modules/.bin/iced
5 | JISON=node_modules/.bin/jison
6 | BUILD_STAMP=build-stamp
7 | TEST_STAMP=test-stamp
8 | TEST_STAMP=test-stamp
9 | UGLIFYJS=node_modules/.bin/uglifyjs
10 | WD=`pwd`
11 | BROWSERIFY=node_modules/.bin/browserify
12 |
13 | BROWSER=browser/libkeybase.js
14 |
15 | lib/assertion_parser.js: src/assertion_parser.jison
16 | $(JISON) -o $@ $<
17 |
18 | lib/%.js: src/%.iced
19 | $(ICED) -I browserify -c -o `dirname $@` $<
20 |
21 | $(BUILD_STAMP): \
22 | lib/assertion.js \
23 | lib/assertion_parser.js \
24 | lib/constants.js \
25 | lib/err.js \
26 | lib/kvstore.js \
27 | lib/main.js \
28 | lib/merkle/leaf.js \
29 | lib/merkle/pathcheck.js \
30 | lib/sigchain/sigchain.js
31 | date > $@
32 |
33 | clean:
34 | find lib -type f -name *.js -exec rm {} \;
35 | rm -rf $(BUILD_STAMP) $(TEST_STAMP) test/browser/test.js
36 |
37 | setup:
38 | npm install -d
39 |
40 | coverage:
41 | ./node_modules/.bin/nyc $(ICED) test/run.iced
42 |
43 | test: test-server test-browser
44 |
45 | build: $(BUILD_STAMP)
46 |
47 | browser: $(BROWSER)
48 |
49 | $(BROWSER): lib/main.js $(BUILD_STAMP)
50 | $(BROWSERIFY) -u sodium -s kbpgp $< > $@
51 |
52 | test-server: $(BUILD_STAMP)
53 | $(ICED) test/run.iced
54 |
55 | test-browser: $(TEST_STAMP) $(BUILD_STAMP)
56 | @echo "Please visit in your favorite browser --> file://$(WD)/test/browser/index.html"
57 |
58 | $(TEST_STAMP): test/browser/test.js
59 | date > $@
60 |
61 | test/browser/test.js: test/browser/main.iced $(BUILD_STAMP)
62 | $(BROWSERIFY) -u sodium -t icsify $< > $@
63 |
64 | .PHONY: clean setup test test-browser coverage
65 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | ### libkeybase-js [](https://travis-ci.org/keybase/libkeybase-js)
2 |
3 | Various keybase libraries useful in Node or in the browser for manipulating
4 | site crypto.
5 |
--------------------------------------------------------------------------------
/SIGNED.md:
--------------------------------------------------------------------------------
1 | ##### Signed by https://keybase.io/oconnor663
2 | ```
3 | -----BEGIN PGP SIGNATURE-----
4 | Version: GnuPG v2
5 |
6 | iQEbBAABAgAGBQJWEqNGAAoJEHGHa2itSC0ylgEH9RBT6xh08BZz3sK55ETpALvN
7 | Sr5qReHHiGHFdgJop2gtbWVgYNdb+Z/ozPN+uwhL/Kaz5D+V/jr0rEK/cCDzNicS
8 | E0ITof1VVlfqnEjJj09R+vOMSEhzoyIV+t7RnaZn2qNyTo7TS2sxZc5zFoYFH4pq
9 | G+yujl0mH1ZwXdpsWrCpB1ARg8IbnMOH1topr24MVBEa+mbUorVN1NiaSHzNHvgy
10 | gA2HfPeS4v9XoF2FV3nqL9WsnozMCvzmEO6ZIe49zUZY+DybTMsXfJUTWy5F4dY4
11 | GlQkRqTWKa/UtbTdXdOZix/4OJ0dzkF9Js8uSYx76TETd+fSMOEOMk985sCMUA==
12 | =IEzk
13 | -----END PGP SIGNATURE-----
14 |
15 | ```
16 |
17 |
18 |
19 | ### Begin signed statement
20 |
21 | #### Expect
22 |
23 | ```
24 | size exec file contents
25 | ./
26 | 67 .gitignore 5e89d4014d03a0f7f21f1429ab57b662f835214ac9bc4512285fed2982011191
27 | 3182 CHANGELOG.md 3e4f315fb907781d2fa7cf54cf72ebf7a0078b8d5256dc55a606fc840a91c7c5
28 | 1484 LICENSE 20a8a5de57bfaf6870517c52265d4469770a23cbc1af85eb9973ce639b0abff2
29 | 1353 Makefile d7f684e836240e402e48f131cf4a8fe5efb722156cb8587f5e25288cfb812f2d
30 | 109 README.md f2dd5d8192cff83f9c44a3e56779977bfe82725df259970a5c2d5396e6f87e13
31 | browser/
32 | 6045 libkeybase.js 92b7c0f624b90754f89aa55f19a7cd6d45dc22cdac2c8e5ed9a353e26d498c89
33 | 3973 data ac1fb9d3a854c92f33c833ee8a263c640040092ec20dddede7f1bc67770dea36
34 | examples/
35 | 2768 app1.iced 88c574445bf29fdc74e899f4c7fff14933fb6b1d9a6ee788387dc2586d4dd51b
36 | 3983 encrypt.iced 319601683b74ce376e69da3a1f04c5317ce39a5c31a3c36ad90fc66dafcddabc
37 | 1730 recruit.iced 645be6aa67bfe992133503feddb66b9dc0c551dd4744ac3335f3230ceadcbcf4
38 | 1232 release.iced 89c83622eb0099f732de4c026c7d807998df328d8d364ec9cffbe179c398c98c
39 | 1249 trigger1.json 540beed96b0b13697bd7222a52fb48288ee4e99ba7ea328ef55506604e8bc1e6
40 | lib/
41 | 8627 assertion.js 7d5c2eb9951c87425450a11deea9d98533e48d5dde287a49c39198ed9f90c287
42 | 21312 assertion_parser.js d092db7012e31dea39a87232d2162ace9bf3f4c86550453471dc6e6c61823670
43 | 267 constants.js 7cb39b3933926960bd762fb9c6251eb67d36dfb1c62925739588510a43bbfb2b
44 | 2712 err.js b6ca0b3731e1cbb5902940367679e9213ef857082a8568569ce141bd50d75d41
45 | 22073 kvstore.js 40d061e72d572746b5d557d28e16ed43a4cd23da459e004382e2d15dab855702
46 | 625 main.js c046e0c31a2b087f1dc56a373b46f16691ce3065d4cd0c9ca0fa5c62bfb233c0
47 | merkle/
48 | 6356 leaf.js b22f0873a9f0e9adb00be96549f25a637af02dff88b34e8392d9d46f4835c0c3
49 | 16991 pathcheck.js 781c7078bfa40c5196036c8a886e7ff217f6bb7111ff2c30df61e2184400af2b
50 | sigchain/
51 | 56729 sigchain.js be9d400b161fa8867cfba8ef4c75b5f24d9992b8a39f81f5d6ddd3dcaab99687
52 | notes/
53 | 2129 tweetnacl.md 099fac68f7caebd05b6060781e7fcfb32726309bb4bd67aa35b10134a280e049
54 | 1138 package.json 628e78f8ab54b765f36ec2334f5c37937489006ae3b35f65fd620587b05c6c06
55 | src/
56 | 4407 assertion.iced fac9d95f8915318ec94dcd9f57f0f6a1b65b210429e6a5f6ac2ad8d33b97cd10
57 | 634 assertion_parser.jison 65624a141081113074e6c778af7df8fad6769d3c5b2fb96e8edcd31444f8706f
58 | 150 constants.iced 51334d9c4a8809755185ffa9363bc2bfd40582a78b18bf048136961b4385dfae
59 | 2590 err.iced 1c5a05067c904716368ed055186131c58691d5183a584f6a74677c882e7b441b
60 | 6948 kvstore.iced 53ebce5a6b584fc9977c6502554a0df97463a94416de4eacbb572fafe83f052d
61 | 466 main.iced 0d36dd68f0281e58d0269395bcd9b2ef4d6d2ffbd69307e292b5aa14e646b4df
62 | merkle/
63 | 3857 leaf.iced 5de4556aff8642d7bdfc39cb0edff1acbeb693ce040faa272fb4cf8fe3b6092d
64 | 4470 pathcheck.iced 23650aa2a4db7497e55b23c3f0ff0cdefeb60ff8f0363a6f4a1348405e9f5cc8
65 | sigchain/
66 | 30446 sigchain.iced 111175e79cbc8870bb90798409df4b58dbd10fc1ca0fbd025f35570ff8d5b992
67 | test/
68 | browser/
69 | 287 index.html e31387cfd94034901e89af59f0ad29a3e2f494eb7269f1806e757be21b3cf33e
70 | 258 main.iced a37b688cc46a4cfe2eee5892f556d4a4a96b2fcbe59e8e50e935bbc57262f16b
71 | files/
72 | 305797 29_merkle_pathcheck.iced eef8556b0450553b841643bd478ad2cd73e4ffd266f94be3262549606a1ea04c
73 | 4706 30_merkle_leaf.iced 2ed24fd02ac4d9c39149d974d774760448db6f4a600fd9adbb3b1b35e0d0000c
74 | 7330 31_sigchain.iced 9bb351dcee7b2655707e16c5537f1713fcbfabfad069fd9b6de42442aa46cc64
75 | 5624 32_kvstore.iced 90bacb5973649246d91f959d64c6cb6a7d52e1a2784ccd4363997f2ef5ef17a2
76 | 2704 33_assertion.iced 73f0b3beef768b9d003d7273f8917a2ada71b8d13b46cdf87d1eaa11c73340e2
77 | 52 run.iced 8e58458d6f5d0973dbb15d096e5366492add708f3123812b8e65d49a685de71c
78 | ```
79 |
80 | #### Ignore
81 |
82 | ```
83 | /SIGNED.md
84 | ```
85 |
86 | #### Presets
87 |
88 | ```
89 | git # ignore .git and anything as described by .gitignore files
90 | dropbox # ignore .dropbox-cache and other Dropbox-related files
91 | kb # ignore anything as described by .kbignore files
92 | ```
93 |
94 |
95 |
96 | ### End signed statement
97 |
98 |
99 |
100 | #### Notes
101 |
102 | With keybase you can sign any directory's contents, whether it's a git repo,
103 | source code distribution, or a personal documents folder. It aims to replace the drudgery of:
104 |
105 | 1. comparing a zipped file to a detached statement
106 | 2. downloading a public key
107 | 3. confirming it is in fact the author's by reviewing public statements they've made, using it
108 |
109 | All in one simple command:
110 |
111 | ```bash
112 | keybase dir verify
113 | ```
114 |
115 | There are lots of options, including assertions for automating your checks.
116 |
117 | For more info, check out https://keybase.io/docs/command_line/code_signing
--------------------------------------------------------------------------------
/browser/libkeybase.js:
--------------------------------------------------------------------------------
1 | !function(e){if("object"==typeof exports&&"undefined"!=typeof module)module.exports=e();else if("function"==typeof define&&define.amd)define([],e);else{var f;"undefined"!=typeof window?f=window:"undefined"!=typeof global?f=global:"undefined"!=typeof self&&(f=self),f.kbpgp=e()}}(function(){var define,module,exports;return (function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o 2) && ((_ref = this.val[2]) != null ? _ref.length : void 0) ? this.parse_triple(this.val[2]) : null;
93 | return new Leaf({
94 | pub: pub,
95 | semipriv: semipriv
96 | });
97 | };
98 |
99 | Parser.prototype.match_hex = function(s) {
100 | return (typeof s === 'string') && !!(s.match(/^([a-fA-F0-9]*)$/)) && (s.length % 2 === 0);
101 | };
102 |
103 | Parser.prototype.parse_triple = function(val) {
104 | var msg;
105 | msg = val.length < 2 ? "Bad triple with < 2 values" : val.length > 3 ? "Bad triple with > 3 values" : typeof val[0] !== 'number' ? "Bad sequence #" : !this.match_hex(val[1]) ? "bad value[1]" : val.length > 2 && val[2].length && !this.match_hex(val[2]) ? "bad value[2]" : null;
106 | if (msg != null) {
107 | throw new Error(msg);
108 | }
109 | return new Triple({
110 | seqno: val[0],
111 | payload_hash: val[1],
112 | sig_id: val[2]
113 | });
114 | };
115 |
116 | return Parser;
117 |
118 | })();
119 |
120 | exports.Leaf = Leaf = (function() {
121 | function Leaf(_arg) {
122 | this.pub = _arg.pub, this.semipriv = _arg.semipriv;
123 | }
124 |
125 | Leaf.prototype.get_public = function() {
126 | return this.pub;
127 | };
128 |
129 | Leaf.prototype.get_semiprivate = function() {
130 | return this.semipriv;
131 | };
132 |
133 | Leaf.prototype.to_json = function() {
134 | var ret;
135 | ret = [C.versions.leaf.v2, this.pub.to_json()];
136 | if (this.semipriv != null) {
137 | ret.push(this.semipriv.to_json());
138 | }
139 | return ret;
140 | };
141 |
142 | Leaf.prototype.to_string = function() {
143 | return JSON.stringify(this.to_json());
144 | };
145 |
146 | Leaf.parse = function(version, val) {
147 | var e, err, leaf, parser;
148 | parser = new Parser(version, val);
149 | err = leaf = null;
150 | try {
151 | leaf = parser.parse();
152 | } catch (_error) {
153 | e = _error;
154 | err = e;
155 | }
156 | return [err, leaf];
157 | };
158 |
159 | Leaf.prototype.seqno_assertion = function() {
160 | return (function(_this) {
161 | return function(rows) {
162 | var found, seqno, seqno_type, triple, _i, _len, _ref, _ref1, _ref2;
163 | found = {};
164 | for (_i = 0, _len = rows.length; _i < _len; _i++) {
165 | _ref = rows[_i], seqno_type = _ref.seqno_type, seqno = _ref.seqno;
166 | triple = (function() {
167 | switch (seqno_type) {
168 | case C.seqno_types.PUBLIC:
169 | return this.pub;
170 | case C.seqno_types.SEMIPRIVATE:
171 | return this.semipriv;
172 | default:
173 | return null;
174 | }
175 | }).call(_this);
176 | if ((triple == null) || (triple.seqno !== seqno)) {
177 | return false;
178 | }
179 | found[seqno_type] = true;
180 | }
181 | if (((_ref1 = _this.semipriv) != null ? _ref1.seqno : void 0) && (!found[C.seqno_types.SEMIPRIVATE])) {
182 | return false;
183 | }
184 | if (((_ref2 = _this.pub) != null ? _ref2.seqno : void 0) && (!found[C.seqno_types.PUBLIC])) {
185 | return false;
186 | }
187 | return true;
188 | };
189 | })(this);
190 | };
191 |
192 | return Leaf;
193 |
194 | })();
195 |
196 | }).call(this);
197 |
198 | },{"../constants":1}]},{},[2])(2)
199 | });
--------------------------------------------------------------------------------
/data:
--------------------------------------------------------------------------------
1 | iced> createHash('sha256').update("""{"body":{"key":{"eldest_kid":"010179c28cac38659e4119ebb385e703f488915d63399a46abb2d0f44851835281540a","fingerprint":"6f2f8d5fde046f22b87491e3653cf869fabfcb90","host":"keybase.io","key_id":"653cf869fabfcb90","kid":"010179c28cac38659e4119ebb385e703f488915d63399a46abb2d0f44851835281540a","uid":"33d6ed2df7e25cb985071d4028aa0119","username":"proservices"},"type":"web_service_binding","version":1},"ctime":1442353190,"expire_in":157680000,"prev":null,"seqno":1,"tag":"signature"}""").digest().toString('hex')
2 | '3803be27ec0c61b3fdcd8b9b7c78de3df73766736ef00727267858d34a039c7d'
3 | iced> createHash('sha256').update("""{"body":{"key":{"eldest_kid":"010179c28cac38659e4119ebb385e703f488915d63399a46abb2d0f44851835281540a","fingerprint":"6f2f8d5fde046f22b87491e3653cf869fabfcb90","host":"keybase.io","key_id":"653cf869fabfcb90","kid":"010179c28cac38659e4119ebb385e703f488915d63399a46abb2d0f44851835281540a","uid":"33d6ed2df7e25cb985071d4028aa0119","username":"proservices"},"type":"web_service_binding","version":1},"ctime":1442353190,"expire_in":157680000,"prev":null,"seqno":1,"tag":"signature"}\n""").digest().toString('hex')
4 | 'a05c5533c3a0be260c2c61d3e026c7f0ed9f050cf7fb1b3375561e9b74900f39'
5 |
6 |
7 | debug: |||||| libkeybase: replaying signature 1: 70c4026afec66312456b6820492b7936bff42b58ca7a035729462700677ef4190f
8 | debug: |||||| libkeybase: chain link parsed, type 'web_service_binding'
9 | debug: |||||| libkeybase: link not in the current subchain -- skipping ahead
10 | debug: |||||| libkeybase: replaying signature 2: e026622f98786452f8e75168941f6137de99a15034e9c9399e846e7aa2dda50b0f
11 | debug: |||||| libkeybase: chain link parsed, type 'web_service_binding'
12 | error: Previous payload hash doesn't match, expected: a05c5533c3a0be260c2c61d3e026c7f0ed9f050cf7fb1b3375561e9b74900f39 got: 3803be27ec0c61b3fdcd8b9b7c78de3df73766736ef00727267858d34a039c7d
13 |
14 | chainlink 2:
15 |
16 | sig: -----BEGIN PGP MESSAGE-----
17 | Comment: GPGTools - https://gpgtools.org
18 |
19 | owGtUm1QVFUYXlBLdlo+I5sBaboBMrU49/ueu1KSIuiq4fA5qbXdj3OXOwt7191l
20 | WUJsFkoF2ySC0YkNy+2HilgiDtGk0BQJATs1ZFMyiRm7DDpLM+UHMabdZehHTT87
21 | f877vud5nvO877wtumUabRTMmZcXBkZbokZneU3pwqXEOoRXxFrEUIdY4OIFK0Xo
22 | cJossogYEBRDMYYVcCBwAgFoioUkhrGQ5wlAQQYlJBIAFqNEmiBYliNpjudxEZVI
23 | ElAYICgcYBSJcogekWSrGdptdtnqVGVpCZeASEkiREk1xnnAkCwGCZoiBAnQrMTx
24 | ksCzqEqsUBwRhmqO5xxwrayoNTUxLdr7D/z/7Lt6UY4gRBqKuCgxEKfUjwCFMphI
25 | ojjgOFQVjgAd0G7lqqCKttkVNXHJAnQg9XpkKY7Mdglglp0V1fw/SZWyVRHhv6jO
26 | WlvkrQbypqWyiZetojpLleyCdoesWBEDpiIFpxyRwUgSJ1lAYKgegW6bbIcmOYKg
27 | GBqg6tGr5qAr0hFAdxM8xBkooAKN8YQkCiLgWZ4RGCBCQm2VYGiaIWgooSiDMzjN
28 | AAqIBMmhBCswIhJpbI9VQQy46pMzq5oO2WzlnNV2iNRrm6JTl2uitJqHVkRHtkyj
29 | jYn/e/XAn3EPdvSV/FioUzb4RjqGlLbK7KSCT7/f3P0EFog7OL3yxXupWecL5Qwq
30 | tLnMMzs1vU/ZOFl4aF3Z2nHvrzHuzivp606vcVi1j0ysfOdYm+4b0+rklqMz4dhd
31 | Ynt4mDx8p7xo7OGOvOItNUc7Pil4oPH87Jq/+pKui/6Su79lNDPe29AyHrj6ViAl
32 | etUNPTPWnBQ+iFxL7c1hjvecmVtm9s2svnG2ZcDLJPXUnynI8P0ERwbrAvnnkxpn
33 | LE+XGo9RX5UHko07plYUr6nwZADfc0ZH1N1+Q+6tPEdnzBcVKcH20ZnkhZxWROvh
34 | zxVNvtK0d3304Zr3T3wmPN5R/+aTZ9G+mLyT+bd3J8+kP9MV1Ea5T7WlPjryxpir
35 | sF9sC3Yn0kXh3/ELty5evpAQnJ544ePtv021617dHgd0tc01qKv45eP7t1nLLFkJ
36 | PemDmUO5+XhT59ZNk36N7pfpPX1bXy9Z1XDtw+utT9kqEgp/+DbhA7//tffKk7zZ
37 | eu2pynFX7D0LtnPDZCj2ROmB9nhPf3bznNt/7qPBm/Oh1lCzJYu+3Dt0Z9vy3MDF
38 | onBVic9z4Miz14fGMr3+jcpNdl/awOnkql13Z/ku49zemkPv3sbThpt8abbg54m6
39 | 3qyyyamvgfv5t0PzfyT2+lOOfDdx/7HpS1dwZTgYM9+o6W7sXG8KNMafbNi531j9
40 | Fw==
41 | =8kUm
42 | -----END PGP MESSAGE-----
43 | payload_json: {"body":{"key":{"eldest_kid":"010179c28cac38659e4119ebb385e703f488915d63399a46abb2d0f44851835281540a","fingerprint":"6f2f8d5fde046f22b87491e3653cf869fabfcb90","host":"keybase.io","key_id":"653cf869fabfcb90","kid":"010179c28cac38659e4119ebb385e703f488915d63399a46abb2d0f44851835281540a","uid":"33d6ed2df7e25cb985071d4028aa0119","username":"proservices"},"service":{"name":"github","username":"linodeproservices"},"type":"web_service_binding","version":1},"ctime":1442498310,"expire_in":157680000,"prev":"3803be27ec0c61b3fdcd8b9b7c78de3df73766736ef00727267858d34a039c7d","seqno":2,"tag":"signature"}
44 |
--------------------------------------------------------------------------------
/examples/app1.iced:
--------------------------------------------------------------------------------
1 |
2 | # Some pseudocode for a 3rd-party app, that uses keybase for key delegation...
3 |
4 | libkb = require 'libkeybase'
5 | {URI,api,KeyManager,User} = libkb
6 | {CSR} = libkb.api
7 |
8 | # Load up the "me" user as usual to access my public key info.
9 | {LocalStore} = require 'myapp'
10 | await LocalStore.open {}, defer err, store
11 | await User.load { store, query : { keybase : "max" }, opts : {} }, defer err, me
12 |
13 | appid = "aabbcc15"
14 | device_id = "iphone-4"
15 |
16 | # Application master key, shared across all devices; might be discarded if already registered
17 | # uri.format() would output keybase://max@/aabbcc15
18 | uri = new URI { username : "max", host : null, app_id }
19 |
20 | # Generate a new random key. You can use our
21 | await KeyManager.generate { algo : "ed25519", params : {}, uri }, defer err, km_master
22 |
23 | # device-specific key
24 | # uri.format() would Output keybase://max@keybase.io/aabbcc15/iphone-4
25 | uri = new URI { username : "max", host : null, app_id, device_id, host : "keybase.io" }
26 | await KeyManager.generate { algo : "ed25519", params : {}, uri }, defer err, km_device
27 |
28 | keys = { master : km_master, device : km_device }
29 |
30 | # Here are the steps in generating a 'CSR':
31 | # 1. For each key in the keyset
32 | # a. Sign the user's primary key bundle with the new key
33 | # 2. Bundle all sigs, and keys into a JSON object
34 | # 3. Generate a random, unguessable number too (maybe?)
35 | await CSR.generate { keys, user }, defer err, csr
36 |
37 | # Also might want to piggy-back on the CSR a secret shared across all device installs
38 | # of this app, though the feature is optional...
39 | ops = ENCRYPT
40 | uri = null # the primary (default) key manager
41 | await user.load_key_manager { uri, ops }, defer err, km_primary
42 | await km_master.export_private {}, defer err, km_master_priv
43 |
44 | # Problem: if encrypt_for is a PGP key and sign_with is some other format,
45 | # then we can't use the standard PGP sign-then-encrypt system.
46 | await box { sign_with : km_master, encrypt_for : km_primary, data : km_master_priv }, defer err, ctext
47 |
48 | # Now affix the piggy-backed reencryption request onto the CSR.
49 | csr.piggy_back { ciphertext: ctext }
50 |
51 | # Compute some sort of hash or visual hash so that the user isn't tricked into
52 | # approving a bogus CSR over on keybase
53 | await csr.compute_hash {}, defer err, hash
54 |
55 | # Post this CSR to keybase and get back an object to track and/or wait for
56 | # the auth to finish
57 | await csr.post_to_server {}, defer err, external_csr_auth
58 |
59 | # Direct the user to this Web URI, maybe?
60 | # Might also consider cross-app calls on iPhone or Android
61 | console.log external_csr_auth.get_web_uri()
62 |
63 | # Poll and/or open a socket to figure out when it's complete.
64 | await external_csr_auth.wait {}, defer err, status
65 |
66 | # not sure how this is going to work --- we can get a new master key
67 | await external_csr_auth.unbox { keyring : km_device }, defer err, plaintext
68 |
--------------------------------------------------------------------------------
/examples/encrypt.iced:
--------------------------------------------------------------------------------
1 |
2 | libkb = require 'libkeybase'
3 | {Assertion,User} = libkb
4 |
5 | # Your app needs to provide some idea of local storage that meets our requirements.
6 | {LocalStore} = require 'myapp'
7 |
8 | # Open the LocalStore, which can create one if none existed beforehand.
9 | await LocalStore.open {}, defer err, store
10 |
11 | # In this case, we assume that the user exists, and that we don't want to work
12 | # around a failure in loading him/her. In contrast, we'll see other versions of
13 | # this process in which we make temporary provisions based on a user not existing
14 | # (see recruit.json).
15 |
16 | # Steps to loading a user:
17 | #
18 | # 1. Fetching all signature data from the server (or from local storage)
19 | # 2. Cryptographic Checks
20 | # a. Checking the tail of the sigchain is in the Merkle Tree.
21 | # b. Checking the merkle tree is in the blockchain (optional)
22 | # c. Checking all links of the signature chain point to each other.
23 | # d. Checking that the tail is signed by the most recent active public key (the user might have
24 | # switched halfway down the chain).
25 | # 3. Identity Table Construction - Flatten the signature chain into a final "identity table"
26 | # of remote identities that link to this username/key combination.
27 | #
28 | # Next, 4a, b, and c can happen in any order:
29 | #
30 | # 4a. Remote check --- check that all proofs in the identity table still exist
31 | # 4b. Tracking resolution --- check the computed identity table against any existing tracker statements,
32 | # and resolve what needs to be fixed to bring any stale tracking up-to-date
33 | # 4c. Assertions -- check the user's given assertions against the computed identity table
34 | #
35 | # Next, 5 can happen only after all of 4a, b, and c
36 | #
37 | # 5. track/retrack -- sign a new tracking statement, if necessary, signing off on the above computations.
38 | #
39 |
40 | # Load a user from the server, and perform steps 1, 2, and 3. Recall that step 2b is optional,
41 | # and you can provide options here to enable it. If you do provide that option, there might be a
42 | # latency of up to 6 hours.
43 | #
44 | # The Store is optional, but if provided, we can check the store rather than
45 | # fetch from the server.
46 | await User.load { store, query : { keybase : "max" }, opts : {} }, defer err, me
47 |
48 | # As in 4c above...
49 | assertion = Assertion.compile "(key://aabbccdd && reddit://maxtaco && (https://goobar.com || http://goobar.com || dns://goobar.com)))"
50 | await me.assert { assertion }, defer err
51 |
52 | # Load a second user...
53 | await User.load { store, query : { "twitter" : "malgorithms" } }, defer err, chris
54 |
55 | #
56 | # Note that there is a 1-to-1 correspondence between the IdentityTable object and the
57 | # User object, but they are split apart for convenience.
58 | #
59 | idtab = chris.get_identity_table()
60 |
61 | # As in 4b above...
62 | #
63 | # State can be: NONE, if I haven't tracked Chris before; OK if my tracking
64 | # statement is fully up-to-date, STALE if my tracking statement is out-of-date,
65 | # or SUBSET, if it's a proper subset of the current state.
66 | #
67 | await idtab.check_tracking { tracker : me }, defer err, state
68 |
69 | # As in 4a above.
70 | #
71 | # An error will be returned if there was a catastrophic failure, not if
72 | # any one of the proofs failed. Check the status field for OK if all succeded, or
73 | # PARTIAL_FAILURE if some failed.
74 |
75 | await idtab.check_remotes {}, defer err, status, failures
76 |
77 | # Outputs any failures in JSON format, though you can query the idtab in a number of different ways
78 | # (which aren't finalized yet...)
79 | failures = idtab.get_failures_to_json()
80 |
81 | # As in 4c, optional assertions against the identity table
82 | await idtab.assert { assertion : [ { "key" : "aabb" }, { "reddit" : "maxtaco" } ] }, defer err
83 |
84 | # Fetch a key manager for a particular app (or for the main app if none specified), and for
85 | # the given public key operations.
86 | await chris.load_key_manager { { subkey_name : "myencryptor" }, ops }, defer err, km
87 |
88 | # Also possible to list subkeys; will generate a list of active keys. Can query by
89 | # prefix, or regex, or exact match (name).
90 | await chris.list_keys { prefix : "myapp." }, defer err, keys
91 |
--------------------------------------------------------------------------------
/examples/recruit.iced:
--------------------------------------------------------------------------------
1 |
2 | libkb = require 'libkeybase'
3 | assert = require 'assert'
4 | {User,E} = libkb
5 |
6 | # Your app needs to provide some idea of local storage that meets our requirements.
7 | {LocalStore} = require 'myapp'
8 |
9 | # Open the LocalStore, which can create one if none existed beforehand.
10 | await LocalStore.open {}, defer err, store
11 |
12 | # What if we fail to load a user?
13 | await User.load { store, query : { twitter : "chris_paradise" } }, defer err, user
14 |
15 | # This example is only for users who aren't found
16 | assert (err? and (err instanceof E.NotFoundError))
17 |
18 | # Still should be able to load me...
19 | await User.load { store, query : { keybase : "max" } }, defer err, me
20 |
21 | # Come up with some secret to later release to the user...
22 | secret = # ....
23 | subkey_name = # name of our app subkey
24 |
25 | assertion = Assertion.compile "twitter://chris_paradise || github://paradise_chris"
26 |
27 | #
28 | # Alternative assertion representation in JSON-lisp form:
29 | #
30 | assertion = Assertion.from_json [ "and"
31 | [ "or"
32 | [ "p", "twitter", "chris_paradise" ],
33 | [ "p", "github", "paradise_chris" ]
34 | ],
35 | [ "or"
36 | [ "p", "http", "eatabag.com" ],
37 | [ "p", "https", "eatabag.com" ],
38 | [ "p", "dns", "eatabag.com" ]
39 | ],
40 | [ "p", "https", "securedump.com" ]
41 | ]
42 |
43 | #
44 | # Will perform the following steps:
45 | #
46 | # 1. Load the key manager for me for this app with (ENCRYPT|SIGN) ops flags.
47 | # 2. Make a JSON object with my assertions, and my encrypted secret
48 | # 3. Sign the JSON object with my public key
49 | # 4. Post it to the server, yielding the given ID
50 | #
51 | # Note that you can provide an optional expire_in, which will tell the server when
52 | # to throw it away (and allows you to know when you're done with it.)
53 | #
54 | await me.recruit_user { assertion, expire_in, subkey_name, secret }, defer err, id
55 |
56 |
--------------------------------------------------------------------------------
/examples/release.iced:
--------------------------------------------------------------------------------
1 |
2 | libkb = require 'libkeybase'
3 | assert = require 'assert'
4 | {User,E} = libkb
5 |
6 | # Your app needs to provide some idea of local storage that meets our requirements.
7 | {LocalStore} = require 'myapp'
8 |
9 | # Open the LocalStore, which can create one if none existed beforehand.
10 | await LocalStore.open {}, defer err, store
11 |
12 | # Load me...
13 | await User.load { store, query : { keybase : "max" } }, defer err, me
14 |
15 | # Loads a list of recruited users who are now ready to be verified.
16 | # Makes an API to the server and verifies that I've signed them and they
17 | # aren't expired..
18 | await me.load_recruited_user_list {}, defer err, recruits
19 |
20 | for r in recruits
21 |
22 | # The recruits are skeletons, so we still need to load their signature chain
23 | # from the server.
24 | await r.load {}, defer err
25 | idtab = r.get_identity_table()
26 |
27 | # Check the remote tabs as usual
28 | await idtab.check_remotes {}, defer err
29 |
30 | # Our assertions are preloaded in the object
31 | await idtab.assert {}, defer err
32 |
33 | # Does the following:
34 | # 1. Gets the needed KeyManager from me
35 | # 2. Gets the user's keymanager for encryption for the given app
36 | # 3. Decrypts and reencrypts
37 | await r.release { me } , defer err, secret
38 |
39 | # And now perform app-specific information with the secret...
40 |
--------------------------------------------------------------------------------
/examples/trigger1.json:
--------------------------------------------------------------------------------
1 |
2 | // Fully-explicit system, I don't think this is worthwhile, since it's unclear what else we'll
3 | // use it for..
4 | {
5 | "players" : [ "04123456", "00112233" ],
6 | "type" : "script",
7 | "script": [
8 | [ "let",
9 | [ ["sender" , [ "load", "`04123456" ] ],
10 | ["receiver", [ "load", "`00112233" ] ] ]
11 | [ "if",
12 | [ "or",
13 | ["eq", [ "get_proven_id", "receiver", "`twitter" ], "maxtaco" ],
14 | ["eq", [ "get_proven_id", "receiver", "`github" ], "maxplusplus" ]
15 | ],
16 | [ "let",
17 | [ [ "plaintext", [ "decrypt", [ "get_key", "sender", "`main" ], "`---- BEGIN PGP MESSAGE ------..." ] ],
18 | [ "ciphertext", [ "encrypt", [ "get_key", "receiver", "`myapp"], "plaintext" ] ] ],
19 | [ "post", "00112233", "ciphertext" ]
20 | ]
21 | ]
22 | ]
23 | ]
24 | }
25 |
26 | // Simplified System -- let's go with this or else we'll never finish.
27 | {
28 | "reencryption" : {
29 | "sender" : "0412",
30 | "receiver" : "8811",
31 | "trigger" :
32 | [ "or",
33 | ["eq", [ "get_proven_id", "receiver", "`twitter" ], "maxtaco" ],
34 | ["eq", [ "get_proven_id", "receiver", "`github" ], "maxplusplus" ] ],
35 | "self_encryption" : "----- BEGIN PGP MESSAGE -----..."
36 | },
37 | "sig" : "----- BEGIN PGP MESSAGE -----...."
38 | }
39 |
--------------------------------------------------------------------------------
/lib/assertion.js:
--------------------------------------------------------------------------------
1 | // Generated by IcedCoffeeScript 108.0.13
2 | (function() {
3 | var AND, Expr, Fingerprint, Host, Http, Keybase, OR, Parser, Proof, ProofSet, URI, Web, parse, urlmod,
4 | __hasProp = {}.hasOwnProperty,
5 | __extends = function(child, parent) { for (var key in parent) { if (__hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; },
6 | __indexOf = [].indexOf || function(item) { for (var i = 0, l = this.length; i < l; i++) { if (i in this && this[i] === item) return i; } return -1; },
7 | __slice = [].slice;
8 |
9 | urlmod = require('url');
10 |
11 | Parser = require('./assertion_parser').Parser;
12 |
13 | Expr = (function() {
14 | function Expr() {}
15 |
16 | Expr.prototype.toString = function() {};
17 |
18 | Expr.prototype.match_set = function(proof_set) {
19 | return false;
20 | };
21 |
22 | return Expr;
23 |
24 | })();
25 |
26 | exports.URI = URI = (function(_super) {
27 | __extends(URI, _super);
28 |
29 | function URI(_arg) {
30 | this.key = _arg.key, this.value = _arg.value;
31 | }
32 |
33 | URI.prototype.keys = function() {
34 | return [this.key];
35 | };
36 |
37 | URI.prototype.check = function() {
38 | var _ref;
39 | if (!this.value && (this.value.length != null)) {
40 | throw new Error("Bad '" + this.key + "' assertion, no value found");
41 | }
42 | if ((_ref = this.key) !== 'twitter' && _ref !== 'github' && _ref !== 'hackernews' && _ref !== 'reddit' && _ref !== 'keybase' && _ref !== 'coinbase' && _ref !== 'facebook') {
43 | throw new Error("Unknown assertion type '" + this.key + "'");
44 | }
45 | };
46 |
47 | URI.prototype.to_lookup_query = function() {
48 | var d;
49 | d = {};
50 | d[this.key] = this.value;
51 | return d;
52 | };
53 |
54 | URI.parse_to_kv_pair = function(s) {
55 | var key, obj, value;
56 | obj = urlmod.parse(s);
57 | if (((key = obj.protocol) != null) && key.length) {
58 | key = key.toLowerCase();
59 | if ((key != null) && key.slice(-1) === ':') {
60 | key = key.slice(0, -1);
61 | }
62 | }
63 | value = obj.hostname;
64 | if ((key == null) && (value == null)) {
65 | value = obj.pathname;
66 | }
67 | if (value != null) {
68 | value = value.toLowerCase();
69 | }
70 | return {
71 | key: key,
72 | value: value
73 | };
74 | };
75 |
76 | URI.parse = function(_arg) {
77 | var key, klass, klasses, ret, s, strict, value, _ref;
78 | s = _arg.s, strict = _arg.strict;
79 | _ref = URI.parse_to_kv_pair(s), key = _ref.key, value = _ref.value;
80 | if (key != null ? key.length : void 0) {
81 |
82 | } else if (!strict) {
83 | key = "keybase";
84 | } else {
85 | throw new Error("Bad assertion, no 'type' given: " + s);
86 | }
87 | klasses = {
88 | web: Web,
89 | http: Http,
90 | dns: Host,
91 | https: Host,
92 | fingerprint: Fingerprint,
93 | keybase: Keybase
94 | };
95 | if ((klass = klasses[key]) == null) {
96 | klass = URI;
97 | }
98 | ret = new klass({
99 | key: key,
100 | value: value
101 | });
102 | ret.check();
103 | return ret;
104 | };
105 |
106 | URI.prototype.toString = function() {
107 | return "" + this.key + "://" + this.value;
108 | };
109 |
110 | URI.prototype.match_set = function(proof_set) {
111 | var proof, proofs, _i, _len;
112 | proofs = proof_set.get(this.keys());
113 | for (_i = 0, _len = proofs.length; _i < _len; _i++) {
114 | proof = proofs[_i];
115 | if (this.match_proof(proof)) {
116 | return true;
117 | }
118 | }
119 | return false;
120 | };
121 |
122 | URI.prototype.match_proof = function(proof) {
123 | var _ref;
124 | return (_ref = proof.key.toLowerCase(), __indexOf.call(this.keys(), _ref) >= 0) && (this.value === proof.value.toLowerCase());
125 | };
126 |
127 | URI.prototype.is_keybase = function() {
128 | return false;
129 | };
130 |
131 | return URI;
132 |
133 | })(Expr);
134 |
135 | Host = (function(_super) {
136 | __extends(Host, _super);
137 |
138 | function Host() {
139 | return Host.__super__.constructor.apply(this, arguments);
140 | }
141 |
142 | Host.prototype.check = function() {
143 | if (this.value.indexOf(".") < 0) {
144 | throw new Error("Bad hostname given: " + this.value);
145 | }
146 | };
147 |
148 | return Host;
149 |
150 | })(URI);
151 |
152 | Web = (function(_super) {
153 | __extends(Web, _super);
154 |
155 | function Web() {
156 | return Web.__super__.constructor.apply(this, arguments);
157 | }
158 |
159 | Web.prototype.keys = function() {
160 | return ['http', 'https', 'dns'];
161 | };
162 |
163 | return Web;
164 |
165 | })(Host);
166 |
167 | Http = (function(_super) {
168 | __extends(Http, _super);
169 |
170 | function Http() {
171 | return Http.__super__.constructor.apply(this, arguments);
172 | }
173 |
174 | Http.prototype.keys = function() {
175 | return ['http', 'https'];
176 | };
177 |
178 | return Http;
179 |
180 | })(Host);
181 |
182 | Fingerprint = (function(_super) {
183 | __extends(Fingerprint, _super);
184 |
185 | function Fingerprint() {
186 | return Fingerprint.__super__.constructor.apply(this, arguments);
187 | }
188 |
189 | Fingerprint.prototype.match_proof = function(proof) {
190 | return (this.key === proof.key.toLowerCase()) && (this.value === proof.value.slice(-1 * this.value.length).toLowerCase());
191 | };
192 |
193 | Fingerprint.prototype.check = function() {
194 | if (!this.value.match(/^[a-fA-F0-9]+$/)) {
195 | throw new Error("Bad fingerprint given: " + this.value);
196 | }
197 | };
198 |
199 | Fingerprint.prototype.to_lookup_query = function() {
200 | return {
201 | key_suffix: this.value
202 | };
203 | };
204 |
205 | return Fingerprint;
206 |
207 | })(URI);
208 |
209 | Keybase = (function(_super) {
210 | __extends(Keybase, _super);
211 |
212 | function Keybase() {
213 | return Keybase.__super__.constructor.apply(this, arguments);
214 | }
215 |
216 | Keybase.prototype.is_keybase = function() {
217 | return true;
218 | };
219 |
220 | return Keybase;
221 |
222 | })(URI);
223 |
224 | exports.AND = AND = (function(_super) {
225 | __extends(AND, _super);
226 |
227 | function AND() {
228 | var args;
229 | args = 1 <= arguments.length ? __slice.call(arguments, 0) : [];
230 | this.factors = args;
231 | }
232 |
233 | AND.prototype.toString = function() {
234 | var f;
235 | return "(" + ((function() {
236 | var _i, _len, _ref, _results;
237 | _ref = this.factors;
238 | _results = [];
239 | for (_i = 0, _len = _ref.length; _i < _len; _i++) {
240 | f = _ref[_i];
241 | _results.push(f.toString());
242 | }
243 | return _results;
244 | }).call(this)).join(" && ") + ")";
245 | };
246 |
247 | AND.prototype.match_set = function(proof_set) {
248 | var f, _i, _len, _ref;
249 | _ref = this.factors;
250 | for (_i = 0, _len = _ref.length; _i < _len; _i++) {
251 | f = _ref[_i];
252 | if (!f.match_set(proof_set)) {
253 | return false;
254 | }
255 | }
256 | return true;
257 | };
258 |
259 | return AND;
260 |
261 | })(Expr);
262 |
263 | OR = (function(_super) {
264 | __extends(OR, _super);
265 |
266 | function OR() {
267 | var args;
268 | args = 1 <= arguments.length ? __slice.call(arguments, 0) : [];
269 | this.terms = args;
270 | }
271 |
272 | OR.prototype.toString = function() {
273 | var t;
274 | return "(" + ((function() {
275 | var _i, _len, _ref, _results;
276 | _ref = this.terms;
277 | _results = [];
278 | for (_i = 0, _len = _ref.length; _i < _len; _i++) {
279 | t = _ref[_i];
280 | _results.push(t.toString());
281 | }
282 | return _results;
283 | }).call(this)).join(" || ") + ")";
284 | };
285 |
286 | OR.prototype.match_set = function(proof_set) {
287 | var t, _i, _len, _ref;
288 | _ref = this.terms;
289 | for (_i = 0, _len = _ref.length; _i < _len; _i++) {
290 | t = _ref[_i];
291 | if (t.match_set(proof_set)) {
292 | return true;
293 | }
294 | }
295 | return false;
296 | };
297 |
298 | return OR;
299 |
300 | })(Expr);
301 |
302 | exports.Proof = Proof = (function() {
303 | function Proof(_arg) {
304 | this.key = _arg.key, this.value = _arg.value;
305 | }
306 |
307 | return Proof;
308 |
309 | })();
310 |
311 | exports.ProofSet = ProofSet = (function() {
312 | function ProofSet(proofs) {
313 | this.proofs = proofs;
314 | this.make_index();
315 | }
316 |
317 | ProofSet.prototype.get = function(keys) {
318 | var k, out, v, _i, _len;
319 | out = [];
320 | for (_i = 0, _len = keys.length; _i < _len; _i++) {
321 | k = keys[_i];
322 | if ((v = this._index[k]) != null) {
323 | out = out.concat(v);
324 | }
325 | }
326 | return out;
327 | };
328 |
329 | ProofSet.prototype.make_index = function() {
330 | var d, proof, v, _i, _len, _ref;
331 | d = {};
332 | _ref = this.proofs;
333 | for (_i = 0, _len = _ref.length; _i < _len; _i++) {
334 | proof = _ref[_i];
335 | if ((v = d[proof.key]) == null) {
336 | v = d[proof.key] = [];
337 | }
338 | v.push(proof);
339 | }
340 | return this._index = d;
341 | };
342 |
343 | return ProofSet;
344 |
345 | })();
346 |
347 | exports.parse = parse = function(s) {
348 | var parser;
349 | parser = new Parser;
350 | parser.yy = {
351 | URI: URI,
352 | OR: OR,
353 | AND: AND
354 | };
355 | return parser.parse(s);
356 | };
357 |
358 | }).call(this);
359 |
--------------------------------------------------------------------------------
/lib/assertion_parser.js:
--------------------------------------------------------------------------------
1 | /* parser generated by jison 0.4.18 */
2 | /*
3 | Returns a Parser object of the following structure:
4 |
5 | Parser: {
6 | yy: {}
7 | }
8 |
9 | Parser.prototype: {
10 | yy: {},
11 | trace: function(),
12 | symbols_: {associative list: name ==> number},
13 | terminals_: {associative list: number ==> name},
14 | productions_: [...],
15 | performAction: function anonymous(yytext, yyleng, yylineno, yy, yystate, $$, _$),
16 | table: [...],
17 | defaultActions: {...},
18 | parseError: function(str, hash),
19 | parse: function(input),
20 |
21 | lexer: {
22 | EOF: 1,
23 | parseError: function(str, hash),
24 | setInput: function(input),
25 | input: function(),
26 | unput: function(str),
27 | more: function(),
28 | less: function(n),
29 | pastInput: function(),
30 | upcomingInput: function(),
31 | showPosition: function(),
32 | test_match: function(regex_match_array, rule_index),
33 | next: function(),
34 | lex: function(),
35 | begin: function(condition),
36 | popState: function(),
37 | _currentRules: function(),
38 | topState: function(),
39 | pushState: function(condition),
40 |
41 | options: {
42 | ranges: boolean (optional: true ==> token location info will include a .range[] member)
43 | flex: boolean (optional: true ==> flex-like lexing behaviour where the rules are tested exhaustively to find the longest match)
44 | backtrack_lexer: boolean (optional: true ==> lexer regexes are tested in order and for each matching regex the action code is invoked; the lexer terminates the scan when a token is returned by the action code)
45 | },
46 |
47 | performAction: function(yy, yy_, $avoiding_name_collisions, YY_START),
48 | rules: [...],
49 | conditions: {associative list: name ==> set},
50 | }
51 | }
52 |
53 |
54 | token location info (@$, _$, etc.): {
55 | first_line: n,
56 | last_line: n,
57 | first_column: n,
58 | last_column: n,
59 | range: [start_number, end_number] (where the numbers are indexes into the input string, regular zero-based)
60 | }
61 |
62 |
63 | the parseError function receives a 'hash' object with these members for lexer and parser errors: {
64 | text: (matched text)
65 | token: (the produced terminal token, if any)
66 | line: (yylineno)
67 | }
68 | while parser (grammar) errors will also provide these members, i.e. parser errors deliver a superset of attributes: {
69 | loc: (yylloc)
70 | expected: (string describing the set of expected tokens)
71 | recoverable: (boolean: TRUE when the parser has a error recovery rule available for this particular error)
72 | }
73 | */
74 | var assertion_parser = (function(){
75 | var o=function(k,v,o,l){for(o=o||{},l=k.length;l--;o[k[l]]=v);return o},$V0=[1,3],$V1=[1,4],$V2=[1,6],$V3=[1,7],$V4=[5,6,7,9];
76 | var parser = {trace: function trace () { },
77 | yy: {},
78 | symbols_: {"error":2,"expressions":3,"e":4,"EOF":5,"OR":6,"AND":7,"LPAREN":8,"RPAREN":9,"URI":10,"$accept":0,"$end":1},
79 | terminals_: {2:"error",5:"EOF",6:"OR",7:"AND",8:"LPAREN",9:"RPAREN",10:"URI"},
80 | productions_: [0,[3,2],[4,3],[4,3],[4,3],[4,1]],
81 | performAction: function anonymous(yytext, yyleng, yylineno, yy, yystate /* action[1] */, $$ /* vstack */, _$ /* lstack */) {
82 | /* this == yyval */
83 |
84 | var $0 = $$.length - 1;
85 | switch (yystate) {
86 | case 1:
87 | return $$[$0-1];
88 | break;
89 | case 2:
90 | this.$ = new yy.OR($$[$0-2], $$[$0]);
91 | break;
92 | case 3:
93 | this.$ = new yy.AND($$[$0-2], $$[$0]);
94 | break;
95 | case 4:
96 | this.$ = $$[$0-1];
97 | break;
98 | case 5:
99 | this.$ = yy.URI.parse({s : $$[$0], strict : true});
100 | break;
101 | }
102 | },
103 | table: [{3:1,4:2,8:$V0,10:$V1},{1:[3]},{5:[1,5],6:$V2,7:$V3},{4:8,8:$V0,10:$V1},o($V4,[2,5]),{1:[2,1]},{4:9,8:$V0,10:$V1},{4:10,8:$V0,10:$V1},{6:$V2,7:$V3,9:[1,11]},o([5,6,9],[2,2],{7:$V3}),o($V4,[2,3]),o($V4,[2,4])],
104 | defaultActions: {5:[2,1]},
105 | parseError: function parseError (str, hash) {
106 | if (hash.recoverable) {
107 | this.trace(str);
108 | } else {
109 | var error = new Error(str);
110 | error.hash = hash;
111 | throw error;
112 | }
113 | },
114 | parse: function parse(input) {
115 | var self = this, stack = [0], tstack = [], vstack = [null], lstack = [], table = this.table, yytext = '', yylineno = 0, yyleng = 0, recovering = 0, TERROR = 2, EOF = 1;
116 | var args = lstack.slice.call(arguments, 1);
117 | var lexer = Object.create(this.lexer);
118 | var sharedState = { yy: {} };
119 | for (var k in this.yy) {
120 | if (Object.prototype.hasOwnProperty.call(this.yy, k)) {
121 | sharedState.yy[k] = this.yy[k];
122 | }
123 | }
124 | lexer.setInput(input, sharedState.yy);
125 | sharedState.yy.lexer = lexer;
126 | sharedState.yy.parser = this;
127 | if (typeof lexer.yylloc == 'undefined') {
128 | lexer.yylloc = {};
129 | }
130 | var yyloc = lexer.yylloc;
131 | lstack.push(yyloc);
132 | var ranges = lexer.options && lexer.options.ranges;
133 | if (typeof sharedState.yy.parseError === 'function') {
134 | this.parseError = sharedState.yy.parseError;
135 | } else {
136 | this.parseError = Object.getPrototypeOf(this).parseError;
137 | }
138 | function popStack(n) {
139 | stack.length = stack.length - 2 * n;
140 | vstack.length = vstack.length - n;
141 | lstack.length = lstack.length - n;
142 | }
143 | _token_stack:
144 | var lex = function () {
145 | var token;
146 | token = lexer.lex() || EOF;
147 | if (typeof token !== 'number') {
148 | token = self.symbols_[token] || token;
149 | }
150 | return token;
151 | };
152 | var symbol, preErrorSymbol, state, action, a, r, yyval = {}, p, len, newState, expected;
153 | while (true) {
154 | state = stack[stack.length - 1];
155 | if (this.defaultActions[state]) {
156 | action = this.defaultActions[state];
157 | } else {
158 | if (symbol === null || typeof symbol == 'undefined') {
159 | symbol = lex();
160 | }
161 | action = table[state] && table[state][symbol];
162 | }
163 | if (typeof action === 'undefined' || !action.length || !action[0]) {
164 | var errStr = '';
165 | expected = [];
166 | for (p in table[state]) {
167 | if (this.terminals_[p] && p > TERROR) {
168 | expected.push('\'' + this.terminals_[p] + '\'');
169 | }
170 | }
171 | if (lexer.showPosition) {
172 | errStr = 'Parse error on line ' + (yylineno + 1) + ':\n' + lexer.showPosition() + '\nExpecting ' + expected.join(', ') + ', got \'' + (this.terminals_[symbol] || symbol) + '\'';
173 | } else {
174 | errStr = 'Parse error on line ' + (yylineno + 1) + ': Unexpected ' + (symbol == EOF ? 'end of input' : '\'' + (this.terminals_[symbol] || symbol) + '\'');
175 | }
176 | this.parseError(errStr, {
177 | text: lexer.match,
178 | token: this.terminals_[symbol] || symbol,
179 | line: lexer.yylineno,
180 | loc: yyloc,
181 | expected: expected
182 | });
183 | }
184 | if (action[0] instanceof Array && action.length > 1) {
185 | throw new Error('Parse Error: multiple actions possible at state: ' + state + ', token: ' + symbol);
186 | }
187 | switch (action[0]) {
188 | case 1:
189 | stack.push(symbol);
190 | vstack.push(lexer.yytext);
191 | lstack.push(lexer.yylloc);
192 | stack.push(action[1]);
193 | symbol = null;
194 | if (!preErrorSymbol) {
195 | yyleng = lexer.yyleng;
196 | yytext = lexer.yytext;
197 | yylineno = lexer.yylineno;
198 | yyloc = lexer.yylloc;
199 | if (recovering > 0) {
200 | recovering--;
201 | }
202 | } else {
203 | symbol = preErrorSymbol;
204 | preErrorSymbol = null;
205 | }
206 | break;
207 | case 2:
208 | len = this.productions_[action[1]][1];
209 | yyval.$ = vstack[vstack.length - len];
210 | yyval._$ = {
211 | first_line: lstack[lstack.length - (len || 1)].first_line,
212 | last_line: lstack[lstack.length - 1].last_line,
213 | first_column: lstack[lstack.length - (len || 1)].first_column,
214 | last_column: lstack[lstack.length - 1].last_column
215 | };
216 | if (ranges) {
217 | yyval._$.range = [
218 | lstack[lstack.length - (len || 1)].range[0],
219 | lstack[lstack.length - 1].range[1]
220 | ];
221 | }
222 | r = this.performAction.apply(yyval, [
223 | yytext,
224 | yyleng,
225 | yylineno,
226 | sharedState.yy,
227 | action[1],
228 | vstack,
229 | lstack
230 | ].concat(args));
231 | if (typeof r !== 'undefined') {
232 | return r;
233 | }
234 | if (len) {
235 | stack = stack.slice(0, -1 * len * 2);
236 | vstack = vstack.slice(0, -1 * len);
237 | lstack = lstack.slice(0, -1 * len);
238 | }
239 | stack.push(this.productions_[action[1]][0]);
240 | vstack.push(yyval.$);
241 | lstack.push(yyval._$);
242 | newState = table[stack[stack.length - 2]][stack[stack.length - 1]];
243 | stack.push(newState);
244 | break;
245 | case 3:
246 | return true;
247 | }
248 | }
249 | return true;
250 | }};
251 | /* generated by jison-lex 0.3.4 */
252 | var lexer = (function(){
253 | var lexer = ({
254 |
255 | EOF:1,
256 |
257 | parseError:function parseError(str, hash) {
258 | if (this.yy.parser) {
259 | this.yy.parser.parseError(str, hash);
260 | } else {
261 | throw new Error(str);
262 | }
263 | },
264 |
265 | // resets the lexer, sets new input
266 | setInput:function (input, yy) {
267 | this.yy = yy || this.yy || {};
268 | this._input = input;
269 | this._more = this._backtrack = this.done = false;
270 | this.yylineno = this.yyleng = 0;
271 | this.yytext = this.matched = this.match = '';
272 | this.conditionStack = ['INITIAL'];
273 | this.yylloc = {
274 | first_line: 1,
275 | first_column: 0,
276 | last_line: 1,
277 | last_column: 0
278 | };
279 | if (this.options.ranges) {
280 | this.yylloc.range = [0,0];
281 | }
282 | this.offset = 0;
283 | return this;
284 | },
285 |
286 | // consumes and returns one char from the input
287 | input:function () {
288 | var ch = this._input[0];
289 | this.yytext += ch;
290 | this.yyleng++;
291 | this.offset++;
292 | this.match += ch;
293 | this.matched += ch;
294 | var lines = ch.match(/(?:\r\n?|\n).*/g);
295 | if (lines) {
296 | this.yylineno++;
297 | this.yylloc.last_line++;
298 | } else {
299 | this.yylloc.last_column++;
300 | }
301 | if (this.options.ranges) {
302 | this.yylloc.range[1]++;
303 | }
304 |
305 | this._input = this._input.slice(1);
306 | return ch;
307 | },
308 |
309 | // unshifts one char (or a string) into the input
310 | unput:function (ch) {
311 | var len = ch.length;
312 | var lines = ch.split(/(?:\r\n?|\n)/g);
313 |
314 | this._input = ch + this._input;
315 | this.yytext = this.yytext.substr(0, this.yytext.length - len);
316 | //this.yyleng -= len;
317 | this.offset -= len;
318 | var oldLines = this.match.split(/(?:\r\n?|\n)/g);
319 | this.match = this.match.substr(0, this.match.length - 1);
320 | this.matched = this.matched.substr(0, this.matched.length - 1);
321 |
322 | if (lines.length - 1) {
323 | this.yylineno -= lines.length - 1;
324 | }
325 | var r = this.yylloc.range;
326 |
327 | this.yylloc = {
328 | first_line: this.yylloc.first_line,
329 | last_line: this.yylineno + 1,
330 | first_column: this.yylloc.first_column,
331 | last_column: lines ?
332 | (lines.length === oldLines.length ? this.yylloc.first_column : 0)
333 | + oldLines[oldLines.length - lines.length].length - lines[0].length :
334 | this.yylloc.first_column - len
335 | };
336 |
337 | if (this.options.ranges) {
338 | this.yylloc.range = [r[0], r[0] + this.yyleng - len];
339 | }
340 | this.yyleng = this.yytext.length;
341 | return this;
342 | },
343 |
344 | // When called from action, caches matched text and appends it on next action
345 | more:function () {
346 | this._more = true;
347 | return this;
348 | },
349 |
350 | // When called from action, signals the lexer that this rule fails to match the input, so the next matching rule (regex) should be tested instead.
351 | reject:function () {
352 | if (this.options.backtrack_lexer) {
353 | this._backtrack = true;
354 | } else {
355 | return this.parseError('Lexical error on line ' + (this.yylineno + 1) + '. You can only invoke reject() in the lexer when the lexer is of the backtracking persuasion (options.backtrack_lexer = true).\n' + this.showPosition(), {
356 | text: "",
357 | token: null,
358 | line: this.yylineno
359 | });
360 |
361 | }
362 | return this;
363 | },
364 |
365 | // retain first n characters of the match
366 | less:function (n) {
367 | this.unput(this.match.slice(n));
368 | },
369 |
370 | // displays already matched input, i.e. for error messages
371 | pastInput:function () {
372 | var past = this.matched.substr(0, this.matched.length - this.match.length);
373 | return (past.length > 20 ? '...':'') + past.substr(-20).replace(/\n/g, "");
374 | },
375 |
376 | // displays upcoming input, i.e. for error messages
377 | upcomingInput:function () {
378 | var next = this.match;
379 | if (next.length < 20) {
380 | next += this._input.substr(0, 20-next.length);
381 | }
382 | return (next.substr(0,20) + (next.length > 20 ? '...' : '')).replace(/\n/g, "");
383 | },
384 |
385 | // displays the character position where the lexing error occurred, i.e. for error messages
386 | showPosition:function () {
387 | var pre = this.pastInput();
388 | var c = new Array(pre.length + 1).join("-");
389 | return pre + this.upcomingInput() + "\n" + c + "^";
390 | },
391 |
392 | // test the lexed token: return FALSE when not a match, otherwise return token
393 | test_match:function(match, indexed_rule) {
394 | var token,
395 | lines,
396 | backup;
397 |
398 | if (this.options.backtrack_lexer) {
399 | // save context
400 | backup = {
401 | yylineno: this.yylineno,
402 | yylloc: {
403 | first_line: this.yylloc.first_line,
404 | last_line: this.last_line,
405 | first_column: this.yylloc.first_column,
406 | last_column: this.yylloc.last_column
407 | },
408 | yytext: this.yytext,
409 | match: this.match,
410 | matches: this.matches,
411 | matched: this.matched,
412 | yyleng: this.yyleng,
413 | offset: this.offset,
414 | _more: this._more,
415 | _input: this._input,
416 | yy: this.yy,
417 | conditionStack: this.conditionStack.slice(0),
418 | done: this.done
419 | };
420 | if (this.options.ranges) {
421 | backup.yylloc.range = this.yylloc.range.slice(0);
422 | }
423 | }
424 |
425 | lines = match[0].match(/(?:\r\n?|\n).*/g);
426 | if (lines) {
427 | this.yylineno += lines.length;
428 | }
429 | this.yylloc = {
430 | first_line: this.yylloc.last_line,
431 | last_line: this.yylineno + 1,
432 | first_column: this.yylloc.last_column,
433 | last_column: lines ?
434 | lines[lines.length - 1].length - lines[lines.length - 1].match(/\r?\n?/)[0].length :
435 | this.yylloc.last_column + match[0].length
436 | };
437 | this.yytext += match[0];
438 | this.match += match[0];
439 | this.matches = match;
440 | this.yyleng = this.yytext.length;
441 | if (this.options.ranges) {
442 | this.yylloc.range = [this.offset, this.offset += this.yyleng];
443 | }
444 | this._more = false;
445 | this._backtrack = false;
446 | this._input = this._input.slice(match[0].length);
447 | this.matched += match[0];
448 | token = this.performAction.call(this, this.yy, this, indexed_rule, this.conditionStack[this.conditionStack.length - 1]);
449 | if (this.done && this._input) {
450 | this.done = false;
451 | }
452 | if (token) {
453 | return token;
454 | } else if (this._backtrack) {
455 | // recover context
456 | for (var k in backup) {
457 | this[k] = backup[k];
458 | }
459 | return false; // rule action called reject() implying the next rule should be tested instead.
460 | }
461 | return false;
462 | },
463 |
464 | // return next match in input
465 | next:function () {
466 | if (this.done) {
467 | return this.EOF;
468 | }
469 | if (!this._input) {
470 | this.done = true;
471 | }
472 |
473 | var token,
474 | match,
475 | tempMatch,
476 | index;
477 | if (!this._more) {
478 | this.yytext = '';
479 | this.match = '';
480 | }
481 | var rules = this._currentRules();
482 | for (var i = 0; i < rules.length; i++) {
483 | tempMatch = this._input.match(this.rules[rules[i]]);
484 | if (tempMatch && (!match || tempMatch[0].length > match[0].length)) {
485 | match = tempMatch;
486 | index = i;
487 | if (this.options.backtrack_lexer) {
488 | token = this.test_match(tempMatch, rules[i]);
489 | if (token !== false) {
490 | return token;
491 | } else if (this._backtrack) {
492 | match = false;
493 | continue; // rule action called reject() implying a rule MISmatch.
494 | } else {
495 | // else: this is a lexer rule which consumes input without producing a token (e.g. whitespace)
496 | return false;
497 | }
498 | } else if (!this.options.flex) {
499 | break;
500 | }
501 | }
502 | }
503 | if (match) {
504 | token = this.test_match(match, rules[index]);
505 | if (token !== false) {
506 | return token;
507 | }
508 | // else: this is a lexer rule which consumes input without producing a token (e.g. whitespace)
509 | return false;
510 | }
511 | if (this._input === "") {
512 | return this.EOF;
513 | } else {
514 | return this.parseError('Lexical error on line ' + (this.yylineno + 1) + '. Unrecognized text.\n' + this.showPosition(), {
515 | text: "",
516 | token: null,
517 | line: this.yylineno
518 | });
519 | }
520 | },
521 |
522 | // return next match that has a token
523 | lex:function lex () {
524 | var r = this.next();
525 | if (r) {
526 | return r;
527 | } else {
528 | return this.lex();
529 | }
530 | },
531 |
532 | // activates a new lexer condition state (pushes the new lexer condition state onto the condition stack)
533 | begin:function begin (condition) {
534 | this.conditionStack.push(condition);
535 | },
536 |
537 | // pop the previously active lexer condition state off the condition stack
538 | popState:function popState () {
539 | var n = this.conditionStack.length - 1;
540 | if (n > 0) {
541 | return this.conditionStack.pop();
542 | } else {
543 | return this.conditionStack[0];
544 | }
545 | },
546 |
547 | // produce the lexer rule set which is active for the currently active lexer condition state
548 | _currentRules:function _currentRules () {
549 | if (this.conditionStack.length && this.conditionStack[this.conditionStack.length - 1]) {
550 | return this.conditions[this.conditionStack[this.conditionStack.length - 1]].rules;
551 | } else {
552 | return this.conditions["INITIAL"].rules;
553 | }
554 | },
555 |
556 | // return the currently active lexer condition state; when an index argument is provided it produces the N-th previous condition state, if available
557 | topState:function topState (n) {
558 | n = this.conditionStack.length - 1 - Math.abs(n || 0);
559 | if (n >= 0) {
560 | return this.conditionStack[n];
561 | } else {
562 | return "INITIAL";
563 | }
564 | },
565 |
566 | // alias for begin(condition)
567 | pushState:function pushState (condition) {
568 | this.begin(condition);
569 | },
570 |
571 | // return the number of states currently on the stack
572 | stateStackSize:function stateStackSize() {
573 | return this.conditionStack.length;
574 | },
575 | options: {},
576 | performAction: function anonymous(yy,yy_,$avoiding_name_collisions,YY_START) {
577 | var YYSTATE=YY_START;
578 | switch($avoiding_name_collisions) {
579 | case 0:/* ignore */
580 | break;
581 | case 1:return 10;
582 | break;
583 | case 2:return 7;
584 | break;
585 | case 3:return 6;
586 | break;
587 | case 4:return 8;
588 | break;
589 | case 5:return 9;
590 | break;
591 | case 6:return 'UNHANDLED';
592 | break;
593 | case 7:return 5;
594 | break;
595 | }
596 | },
597 | rules: [/^(?:[ \t\n]+)/,/^(?:[^ \t\n()&|]+)/,/^(?:&&)/,/^(?:\|\|)/,/^(?:\()/,/^(?:\))/,/^(?:.)/,/^(?:$)/],
598 | conditions: {"INITIAL":{"rules":[0,1,2,3,4,5,6,7],"inclusive":true}}
599 | });
600 | return lexer;
601 | })();
602 | parser.lexer = lexer;
603 | function Parser () {
604 | this.yy = {};
605 | }
606 | Parser.prototype = parser;parser.Parser = Parser;
607 | return new Parser;
608 | })();
609 |
610 |
611 | if (typeof require !== 'undefined' && typeof exports !== 'undefined') {
612 | exports.parser = assertion_parser;
613 | exports.Parser = assertion_parser.Parser;
614 | exports.parse = function () { return assertion_parser.parse.apply(assertion_parser, arguments); };
615 | exports.main = function commonjsMain (args) {
616 | if (!args[1]) {
617 | console.log('Usage: '+args[0]+' FILE');
618 | process.exit(1);
619 | }
620 | var source = require('fs').readFileSync(require('path').normalize(args[1]), "utf8");
621 | return exports.parser.parse(source);
622 | };
623 | if (typeof module !== 'undefined' && require.main === module) {
624 | exports.main(process.argv.slice(1));
625 | }
626 | }
--------------------------------------------------------------------------------
/lib/constants.js:
--------------------------------------------------------------------------------
1 | // Generated by IcedCoffeeScript 108.0.13
2 | (function() {
3 | module.exports = {
4 | versions: {
5 | leaf: {
6 | v1: 1,
7 | v2: 2
8 | }
9 | },
10 | seqno_types: {
11 | NONE: 0,
12 | PUBLIC: 1,
13 | PRIVATE: 2,
14 | SEMIPRIVATE: 3
15 | }
16 | };
17 |
18 | }).call(this);
19 |
--------------------------------------------------------------------------------
/lib/err.js:
--------------------------------------------------------------------------------
1 | // Generated by IcedCoffeeScript 108.0.13
2 | (function() {
3 | var E, ie;
4 |
5 | ie = require('iced-error');
6 |
7 | exports.E = E = ie.make_errors({
8 | GENERIC: "Generic error",
9 | INVAL: "Invalid value",
10 | NOT_FOUND: "Key not found",
11 | LOOKUP_NOT_FOUND: "Lookup failed",
12 | BAD_QUERY: "Bad query",
13 | DUPLICATE: "Duplicated value",
14 | BAD_MAC: "Message authentication failure",
15 | BAD_SIZE: "Wrong size",
16 | BAD_PREAMBLE: "Premable mismatch or bad file magic",
17 | BAD_IO: "bad input/output operation",
18 | BAD_HEADER: "Bad metadata in file",
19 | BAD_VALUE: "bad value",
20 | BAD_SEQNO: "wrong sequence number",
21 | BAD_PAYLOAD_HASH: "bad payload hash from server",
22 | INTERNAL: "internal assertion failed",
23 | MSGPACK: "Message pack format failure",
24 | BAD_PW_OR_MAC: "Bad password or file was corrupted",
25 | INIT: "Initialization error",
26 | ARGS: "Error in argument parsing",
27 | UNIMPLEMENTED: "Feature not implemented",
28 | CONFIG: "Configuration file error",
29 | HTTP: "HTTP status code error",
30 | KEYBASE: "Server-side failure",
31 | CANCEL: "Operation canceled",
32 | GPG: "Command line error",
33 | NOT_LOGGED_IN: "Not logged in",
34 | CORRUPTION: "Corruption",
35 | VERSION_ROLLBACK: "Verion rollback detected",
36 | NO_REMOTE_KEY: "No remote key found",
37 | NO_LOCAL_KEY: "No local key found",
38 | IMPORT: "Error importing key",
39 | VERIFY: "Signature verification problem",
40 | SCRAPE: "Scraper error",
41 | REMOTE_CHECK: "Remote check failure",
42 | PGP_ID_COLLISION: "PGP ID collision error",
43 | UID_MISMATCH: "userIDs didn't match",
44 | USERNAME_MISMATCH: "usernames didn't match",
45 | REMOTE_PROOF: "error in sanity checking a remote proof",
46 | UNTRACK: "error in untracking",
47 | UNKNOWN_SERVICE: "unknown service",
48 | PROOF_NOT_AVAILABLE: "proof isn't available",
49 | WRONG_SIGNER: "Expected a signature, but by someone else",
50 | REQUIRED_UPGRADE: "Software upgrade required",
51 | BAD_ASSERTION: "Bad assertion",
52 | KEY_EXISTS: "Key already exists",
53 | GENERATE: "key generation error",
54 | MISSING_PW: "missing passphrase",
55 | PROOF_EXISTS: "Proof already exists",
56 | SELF: "Can't perform this action on yourself",
57 | NO_USERNAME: "Your username isn't known",
58 | CLEAN_CANCEL: "Cleanly bailed out",
59 | PATCH: "Key patching failed",
60 | LOOPBACK: "Error in HKP loopback",
61 | NOT_CONFIGURED: "No user configured",
62 | HOST: "Host is down",
63 | SECURITY: "Security error",
64 | REQ_NOT_FOUND: "host not found",
65 | REQ_CONN_REFUSED: "connection refused",
66 | REQ_GENERIC: "generic error",
67 | KEY_NOT_TRUSTED: "key not trusted",
68 | KEY_NOT_FOUND: "key not found",
69 | REVOKED: "revoked",
70 | REVOKE: "error in revocation",
71 | BAD_MERKLE_LEAF: "bad merkle leaf found"
72 | });
73 |
74 | }).call(this);
75 |
--------------------------------------------------------------------------------
/lib/kvstore.js:
--------------------------------------------------------------------------------
1 | // Generated by IcedCoffeeScript 108.0.13
2 | (function() {
3 | var Base, E, Flat, FlatMemory, Lock, Memory, iced, log, make_esc, __iced_k, __iced_k_noop,
4 | __hasProp = {}.hasOwnProperty,
5 | __extends = function(child, parent) { for (var key in parent) { if (__hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; };
6 |
7 | iced = require('iced-runtime');
8 | __iced_k = __iced_k_noop = function() {};
9 |
10 | log = require('iced-logger');
11 |
12 | E = require('./err').E;
13 |
14 | make_esc = require('iced-error').make_esc;
15 |
16 | Lock = require('iced-lock').Lock;
17 |
18 | exports.Base = Base = (function() {
19 | function Base() {
20 | this.lock = new Lock;
21 | }
22 |
23 | Base.prototype.unimplemented = function(n, cb) {
24 | return cb(new E.UnimplementedError("BaseKvStore::" + n + ": unimplemented"));
25 | };
26 |
27 | Base.prototype.open = function(opts, cb) {
28 | return this.unimplemented('open', cb);
29 | };
30 |
31 | Base.prototype.nuke = function(opts, cb) {
32 | return this.unimplemented('nuke', cb);
33 | };
34 |
35 | Base.prototype.close = function(opts, cb) {
36 | return this.unimplemented('close', cb);
37 | };
38 |
39 | Base.prototype._put = function(_arg, cb) {
40 | var key, value;
41 | key = _arg.key, value = _arg.value;
42 | return this.unimplemented('_put', cb);
43 | };
44 |
45 | Base.prototype._get = function(_arg, cb) {
46 | var key;
47 | key = _arg.key;
48 | return this.unimplemented("_get", cb);
49 | };
50 |
51 | Base.prototype._resolve = function(_arg, cb) {
52 | var name;
53 | name = _arg.name;
54 | return this.unimplemented("_resolve", cb);
55 | };
56 |
57 | Base.prototype._link = function(_arg, cb) {
58 | var key, name;
59 | name = _arg.name, key = _arg.key;
60 | return this.unimplemented('_link', cb);
61 | };
62 |
63 | Base.prototype._unlink = function(_arg, cb) {
64 | var name;
65 | name = _arg.name;
66 | return this.unimplemented('_unlink', cb);
67 | };
68 |
69 | Base.prototype._unlink_all = function(_arg, cb) {
70 | var key;
71 | key = _arg.key;
72 | return this.unimplemented('_unlink_all', cb);
73 | };
74 |
75 | Base.prototype._remove = function(_arg, cb) {
76 | var key;
77 | key = _arg.key;
78 | return this.unimplemented('_remove', cb);
79 | };
80 |
81 | Base.prototype.make_kvstore_key = function(_arg) {
82 | var key, type;
83 | type = _arg.type, key = _arg.key;
84 | type || (type = key.slice(-2));
85 | return [type, key].join(":").toLowerCase();
86 | };
87 |
88 | Base.prototype.make_lookup_name = function(_arg) {
89 | var name, type;
90 | type = _arg.type, name = _arg.name;
91 | return [type, name].join(":").toLowerCase();
92 | };
93 |
94 | Base.prototype.unmake_kvstore_key = function(_arg) {
95 | var key, parts;
96 | key = _arg.key;
97 | parts = key.split(":");
98 | return {
99 | type: parts[0],
100 | key: parts.slice(1).join(":")
101 | };
102 | };
103 |
104 | Base.prototype.can_unlink = function() {
105 | return true;
106 | };
107 |
108 | Base.prototype.link = function(_arg, cb) {
109 | var key, name, type;
110 | type = _arg.type, name = _arg.name, key = _arg.key;
111 | return this._link({
112 | name: this.make_lookup_name({
113 | type: type,
114 | name: name
115 | }),
116 | key: this.make_kvstore_key({
117 | key: key,
118 | type: type
119 | })
120 | }, cb);
121 | };
122 |
123 | Base.prototype.unlink = function(_arg, cb) {
124 | var name, type;
125 | type = _arg.type, name = _arg.name;
126 | return this._unlink({
127 | name: this.make_lookup_name({
128 | type: type,
129 | name: name
130 | })
131 | }, cb);
132 | };
133 |
134 | Base.prototype.unlink_all = function(_arg, cb) {
135 | var key, type;
136 | type = _arg.type, key = _arg.key;
137 | return this._unlink_all({
138 | key: this.make_kvstore_key({
139 | type: type,
140 | key: key
141 | })
142 | }, cb);
143 | };
144 |
145 | Base.prototype.get = function(_arg, cb) {
146 | var key, type;
147 | type = _arg.type, key = _arg.key;
148 | return this._get({
149 | key: this.make_kvstore_key({
150 | type: type,
151 | key: key
152 | })
153 | }, cb);
154 | };
155 |
156 | Base.prototype.resolve = function(_arg, cb) {
157 | var err, key, name, type, ___iced_passed_deferral, __iced_deferrals, __iced_k;
158 | __iced_k = __iced_k_noop;
159 | ___iced_passed_deferral = iced.findDeferral(arguments);
160 | type = _arg.type, name = _arg.name;
161 | (function(_this) {
162 | return (function(__iced_k) {
163 | __iced_deferrals = new iced.Deferrals(__iced_k, {
164 | parent: ___iced_passed_deferral,
165 | filename: "/Users/max/src/keybase/libkeybase-js/src/kvstore.iced",
166 | funcname: "Base.resolve"
167 | });
168 | _this._resolve({
169 | name: _this.make_lookup_name({
170 | type: type,
171 | name: name
172 | })
173 | }, __iced_deferrals.defer({
174 | assign_fn: (function() {
175 | return function() {
176 | err = arguments[0];
177 | return key = arguments[1];
178 | };
179 | })(),
180 | lineno: 55
181 | }));
182 | __iced_deferrals._fulfill();
183 | });
184 | })(this)((function(_this) {
185 | return function() {
186 | if ((typeof err === "undefined" || err === null) && (typeof key !== "undefined" && key !== null)) {
187 | key = _this.unmake_kvstore_key({
188 | key: key
189 | }).key;
190 | }
191 | return cb(err, key);
192 | };
193 | })(this));
194 | };
195 |
196 | Base.prototype.put = function(_arg, cb) {
197 | var esc, key, kvsk, name, names, type, value, ___iced_passed_deferral, __iced_deferrals, __iced_k;
198 | __iced_k = __iced_k_noop;
199 | ___iced_passed_deferral = iced.findDeferral(arguments);
200 | type = _arg.type, key = _arg.key, value = _arg.value, name = _arg.name, names = _arg.names;
201 | esc = make_esc(cb, "BaseKvStore::put");
202 | kvsk = this.make_kvstore_key({
203 | type: type,
204 | key: key
205 | });
206 | log.debug("+ KvStore::put " + key + "/" + kvsk);
207 | (function(_this) {
208 | return (function(__iced_k) {
209 | __iced_deferrals = new iced.Deferrals(__iced_k, {
210 | parent: ___iced_passed_deferral,
211 | filename: "/Users/max/src/keybase/libkeybase-js/src/kvstore.iced",
212 | funcname: "Base.put"
213 | });
214 | _this._put({
215 | key: kvsk,
216 | value: value
217 | }, esc(__iced_deferrals.defer({
218 | lineno: 66
219 | })));
220 | __iced_deferrals._fulfill();
221 | });
222 | })(this)((function(_this) {
223 | return function() {
224 | if ((name != null) && (names == null)) {
225 | names = [name];
226 | }
227 | (function(__iced_k) {
228 | if (names && names.length) {
229 | (function(__iced_k) {
230 | var _i, _len, _ref, _results, _while;
231 | _ref = names;
232 | _len = _ref.length;
233 | _i = 0;
234 | _while = function(__iced_k) {
235 | var _break, _continue, _next;
236 | _break = __iced_k;
237 | _continue = function() {
238 | return iced.trampoline(function() {
239 | ++_i;
240 | return _while(__iced_k);
241 | });
242 | };
243 | _next = _continue;
244 | if (!(_i < _len)) {
245 | return _break();
246 | } else {
247 | name = _ref[_i];
248 | log.debug("| KvStore::link " + name + " -> " + key);
249 | (function(__iced_k) {
250 | __iced_deferrals = new iced.Deferrals(__iced_k, {
251 | parent: ___iced_passed_deferral,
252 | filename: "/Users/max/src/keybase/libkeybase-js/src/kvstore.iced",
253 | funcname: "Base.put"
254 | });
255 | _this.link({
256 | type: type,
257 | name: name,
258 | key: key
259 | }, esc(__iced_deferrals.defer({
260 | lineno: 71
261 | })));
262 | __iced_deferrals._fulfill();
263 | })(_next);
264 | }
265 | };
266 | _while(__iced_k);
267 | })(__iced_k);
268 | } else {
269 | return __iced_k();
270 | }
271 | })(function() {
272 | log.debug("- KvStore::put " + key + " -> ok");
273 | return cb(null);
274 | });
275 | };
276 | })(this));
277 | };
278 |
279 | Base.prototype.remove = function(_arg, cb) {
280 | var err, k, key, optional, type, ___iced_passed_deferral, __iced_deferrals, __iced_k;
281 | __iced_k = __iced_k_noop;
282 | ___iced_passed_deferral = iced.findDeferral(arguments);
283 | type = _arg.type, key = _arg.key, optional = _arg.optional;
284 | k = this.make_kvstore_key({
285 | type: type,
286 | key: key
287 | });
288 | (function(_this) {
289 | return (function(__iced_k) {
290 | __iced_deferrals = new iced.Deferrals(__iced_k, {
291 | parent: ___iced_passed_deferral,
292 | filename: "/Users/max/src/keybase/libkeybase-js/src/kvstore.iced",
293 | funcname: "Base.remove"
294 | });
295 | _this.lock.acquire(__iced_deferrals.defer({
296 | lineno: 79
297 | }));
298 | __iced_deferrals._fulfill();
299 | });
300 | })(this)((function(_this) {
301 | return function() {
302 | log.debug("+ DB remove " + key + "/" + k);
303 | (function(__iced_k) {
304 | __iced_deferrals = new iced.Deferrals(__iced_k, {
305 | parent: ___iced_passed_deferral,
306 | filename: "/Users/max/src/keybase/libkeybase-js/src/kvstore.iced",
307 | funcname: "Base.remove"
308 | });
309 | _this._remove({
310 | key: k
311 | }, __iced_deferrals.defer({
312 | assign_fn: (function() {
313 | return function() {
314 | return err = arguments[0];
315 | };
316 | })(),
317 | lineno: 83
318 | }));
319 | __iced_deferrals._fulfill();
320 | })(function() {
321 | (function(__iced_k) {
322 | if ((typeof err !== "undefined" && err !== null) && (err instanceof E.NotFoundError) && optional) {
323 | log.debug("| No object found for " + k);
324 | return __iced_k(err = null);
325 | } else {
326 | (function(__iced_k) {
327 | if (err == null) {
328 | (function(__iced_k) {
329 | __iced_deferrals = new iced.Deferrals(__iced_k, {
330 | parent: ___iced_passed_deferral,
331 | filename: "/Users/max/src/keybase/libkeybase-js/src/kvstore.iced",
332 | funcname: "Base.remove"
333 | });
334 | _this._unlink_all({
335 | type: type,
336 | key: k
337 | }, __iced_deferrals.defer({
338 | assign_fn: (function() {
339 | return function() {
340 | return err = arguments[0];
341 | };
342 | })(),
343 | lineno: 88
344 | }));
345 | __iced_deferrals._fulfill();
346 | })(__iced_k);
347 | } else {
348 | return __iced_k();
349 | }
350 | })(__iced_k);
351 | }
352 | })(function() {
353 | log.debug("- DB remove " + key + "/" + k + " -> " + (typeof err !== "undefined" && err !== null ? 'ok' : void 0));
354 | _this.lock.release();
355 | return cb(err);
356 | });
357 | });
358 | };
359 | })(this));
360 | };
361 |
362 | Base.prototype.lookup = function(_arg, cb) {
363 | var esc, key, name, type, value, ___iced_passed_deferral, __iced_deferrals, __iced_k;
364 | __iced_k = __iced_k_noop;
365 | ___iced_passed_deferral = iced.findDeferral(arguments);
366 | type = _arg.type, name = _arg.name;
367 | esc = make_esc(cb, "BaseKvStore::lookup");
368 | (function(_this) {
369 | return (function(__iced_k) {
370 | __iced_deferrals = new iced.Deferrals(__iced_k, {
371 | parent: ___iced_passed_deferral,
372 | filename: "/Users/max/src/keybase/libkeybase-js/src/kvstore.iced",
373 | funcname: "Base.lookup"
374 | });
375 | _this.resolve({
376 | name: name,
377 | type: type
378 | }, esc(__iced_deferrals.defer({
379 | assign_fn: (function() {
380 | return function() {
381 | return key = arguments[0];
382 | };
383 | })(),
384 | lineno: 99
385 | })));
386 | __iced_deferrals._fulfill();
387 | });
388 | })(this)((function(_this) {
389 | return function() {
390 | (function(__iced_k) {
391 | __iced_deferrals = new iced.Deferrals(__iced_k, {
392 | parent: ___iced_passed_deferral,
393 | filename: "/Users/max/src/keybase/libkeybase-js/src/kvstore.iced",
394 | funcname: "Base.lookup"
395 | });
396 | _this.get({
397 | type: type,
398 | key: key
399 | }, esc(__iced_deferrals.defer({
400 | assign_fn: (function() {
401 | return function() {
402 | return value = arguments[0];
403 | };
404 | })(),
405 | lineno: 100
406 | })));
407 | __iced_deferrals._fulfill();
408 | })(function() {
409 | return cb(null, value);
410 | });
411 | };
412 | })(this));
413 | };
414 |
415 | return Base;
416 |
417 | })();
418 |
419 | exports.Flat = Flat = (function(_super) {
420 | __extends(Flat, _super);
421 |
422 | function Flat() {
423 | return Flat.__super__.constructor.apply(this, arguments);
424 | }
425 |
426 | Flat.prototype.make_kvstore_key = function(_arg) {
427 | var key, type;
428 | type = _arg.type, key = _arg.key;
429 | return "kv:" + Flat.__super__.make_kvstore_key.call(this, {
430 | type: type,
431 | key: key
432 | });
433 | };
434 |
435 | Flat.prototype.make_lookup_name = function(_arg) {
436 | var name, type;
437 | type = _arg.type, name = _arg.name;
438 | return "lo:" + Flat.__super__.make_lookup_name.call(this, {
439 | type: type,
440 | name: name
441 | });
442 | };
443 |
444 | Flat.prototype.unmake_kvstore_key = function(_arg) {
445 | var key, parts;
446 | key = _arg.key;
447 | parts = key.split(":");
448 | return {
449 | type: parts[1],
450 | key: parts.slice(2).join(":")
451 | };
452 | };
453 |
454 | Flat.prototype._link = function(_arg, cb) {
455 | var err, key, name, ___iced_passed_deferral, __iced_deferrals, __iced_k;
456 | __iced_k = __iced_k_noop;
457 | ___iced_passed_deferral = iced.findDeferral(arguments);
458 | key = _arg.key, name = _arg.name;
459 | (function(_this) {
460 | return (function(__iced_k) {
461 | __iced_deferrals = new iced.Deferrals(__iced_k, {
462 | parent: ___iced_passed_deferral,
463 | filename: "/Users/max/src/keybase/libkeybase-js/src/kvstore.iced",
464 | funcname: "Flat._link"
465 | });
466 | _this._put({
467 | key: name,
468 | value: key
469 | }, __iced_deferrals.defer({
470 | assign_fn: (function() {
471 | return function() {
472 | return err = arguments[0];
473 | };
474 | })(),
475 | lineno: 122
476 | }));
477 | __iced_deferrals._fulfill();
478 | });
479 | })(this)((function(_this) {
480 | return function() {
481 | return cb(err);
482 | };
483 | })(this));
484 | };
485 |
486 | Flat.prototype._unlink = function(_arg, cb) {
487 | var err, name, ___iced_passed_deferral, __iced_deferrals, __iced_k;
488 | __iced_k = __iced_k_noop;
489 | ___iced_passed_deferral = iced.findDeferral(arguments);
490 | name = _arg.name;
491 | (function(_this) {
492 | return (function(__iced_k) {
493 | __iced_deferrals = new iced.Deferrals(__iced_k, {
494 | parent: ___iced_passed_deferral,
495 | filename: "/Users/max/src/keybase/libkeybase-js/src/kvstore.iced",
496 | funcname: "Flat._unlink"
497 | });
498 | _this._remove({
499 | key: name
500 | }, __iced_deferrals.defer({
501 | assign_fn: (function() {
502 | return function() {
503 | return err = arguments[0];
504 | };
505 | })(),
506 | lineno: 126
507 | }));
508 | __iced_deferrals._fulfill();
509 | });
510 | })(this)((function(_this) {
511 | return function() {
512 | return cb(err);
513 | };
514 | })(this));
515 | };
516 |
517 | Flat.prototype._unlink_all = function(_arg, cb) {
518 | var key;
519 | key = _arg.key;
520 | log.debug("| Can't _unlink_all names for " + key + " in flat kvstore");
521 | return cb(null);
522 | };
523 |
524 | Flat.prototype._resolve = function(_arg, cb) {
525 | var err, name, value, ___iced_passed_deferral, __iced_deferrals, __iced_k;
526 | __iced_k = __iced_k_noop;
527 | ___iced_passed_deferral = iced.findDeferral(arguments);
528 | name = _arg.name;
529 | (function(_this) {
530 | return (function(__iced_k) {
531 | __iced_deferrals = new iced.Deferrals(__iced_k, {
532 | parent: ___iced_passed_deferral,
533 | filename: "/Users/max/src/keybase/libkeybase-js/src/kvstore.iced",
534 | funcname: "Flat._resolve"
535 | });
536 | _this._get({
537 | key: name
538 | }, __iced_deferrals.defer({
539 | assign_fn: (function() {
540 | return function() {
541 | err = arguments[0];
542 | return value = arguments[1];
543 | };
544 | })(),
545 | lineno: 134
546 | }));
547 | __iced_deferrals._fulfill();
548 | });
549 | })(this)((function(_this) {
550 | return function() {
551 | if ((typeof err !== "undefined" && err !== null) && (err instanceof E.NotFoundError)) {
552 | err = new E.LookupNotFoundError("No lookup available for " + name);
553 | }
554 | return cb(err, value);
555 | };
556 | })(this));
557 | };
558 |
559 | Flat.prototype.can_unlink = function() {
560 | return false;
561 | };
562 |
563 | return Flat;
564 |
565 | })(Base);
566 |
567 | exports.Memory = Memory = (function(_super) {
568 | __extends(Memory, _super);
569 |
570 | function Memory() {
571 | Memory.__super__.constructor.apply(this, arguments);
572 | this.data = {
573 | lookup: {},
574 | rlookup: {},
575 | kv: {}
576 | };
577 | }
578 |
579 | Memory.prototype.open = function(opts, cb) {
580 | return cb(null);
581 | };
582 |
583 | Memory.prototype.nuke = function(opts, cb) {
584 | return cb(null);
585 | };
586 |
587 | Memory.prototype.close = function(opts, cb) {
588 | return cb(null);
589 | };
590 |
591 | Memory.prototype._put = function(_arg, cb) {
592 | var key, value;
593 | key = _arg.key, value = _arg.value;
594 | this.data.kv[key] = value;
595 | return cb(null);
596 | };
597 |
598 | Memory.prototype._get = function(_arg, cb) {
599 | var err, key, val;
600 | key = _arg.key;
601 | err = null;
602 | if ((val = this.data.kv[key]) === void 0) {
603 | err = new E.NotFoundError("key not found: '" + key + "'");
604 | }
605 | return cb(err, val);
606 | };
607 |
608 | Memory.prototype._resolve = function(_arg, cb) {
609 | var err, key, name;
610 | name = _arg.name;
611 | err = null;
612 | if ((key = this.data.lookup[name]) == null) {
613 | err = new E.LookupNotFoundError("name not found: '" + name + "'");
614 | }
615 | return cb(err, key);
616 | };
617 |
618 | Memory.prototype._link = function(_arg, cb) {
619 | var key, name, set;
620 | key = _arg.key, name = _arg.name;
621 | this.data.lookup[name] = key;
622 | if ((set = this.data.rlookup[key]) == null) {
623 | this.data.rlookup[key] = set = {};
624 | }
625 | set[name] = true;
626 | return cb(null);
627 | };
628 |
629 | Memory.prototype._unlink = function(_arg, cb) {
630 | var d, err, key, name;
631 | name = _arg.name;
632 | if ((key = this.data.lookup[name]) != null) {
633 | if ((d = this.data.rlookup[key]) != null) {
634 | delete d[name];
635 | }
636 | delete this.data.lookup[name];
637 | err = null;
638 | } else {
639 | err = new E.LookupNotFoundError("cannot unlink '" + name + "'");
640 | }
641 | return cb(err);
642 | };
643 |
644 | Memory.prototype._remove = function(_arg, cb) {
645 | var err, key, v;
646 | key = _arg.key;
647 | err = null;
648 | if ((v = this.data.kv[key]) == null) {
649 | err = new E.NotFoundError("key not found: '" + key + "'");
650 | } else {
651 | delete this.data.kv[key];
652 | }
653 | return cb(err);
654 | };
655 |
656 | Memory.prototype._unlink_all = function(_arg, cb) {
657 | var d, err, key, name, _;
658 | key = _arg.key;
659 | if ((d = this.data.rlookup[key]) != null) {
660 | for (name in d) {
661 | _ = d[name];
662 | delete this.data.lookup[name];
663 | }
664 | delete this.data.rlookup[key];
665 | err = null;
666 | } else {
667 | err = new E.LookupNotFoundError("cannot find names for key '" + key + "'");
668 | }
669 | return cb(err);
670 | };
671 |
672 | return Memory;
673 |
674 | })(Base);
675 |
676 | exports.FlatMemory = FlatMemory = (function(_super) {
677 | __extends(FlatMemory, _super);
678 |
679 | function FlatMemory() {
680 | FlatMemory.__super__.constructor.apply(this, arguments);
681 | this.kv = {};
682 | }
683 |
684 | FlatMemory.prototype.open = function(opts, cb) {
685 | return cb(null);
686 | };
687 |
688 | FlatMemory.prototype.nuke = function(opts, cb) {
689 | return cb(null);
690 | };
691 |
692 | FlatMemory.prototype.close = function(opts, cb) {
693 | return cb(null);
694 | };
695 |
696 | FlatMemory.prototype._put = function(_arg, cb) {
697 | var key, value;
698 | key = _arg.key, value = _arg.value;
699 | this.kv[key] = value;
700 | return cb(null);
701 | };
702 |
703 | FlatMemory.prototype._get = function(_arg, cb) {
704 | var err, key, val;
705 | key = _arg.key;
706 | err = null;
707 | if ((val = this.kv[key]) === void 0) {
708 | err = new E.NotFoundError("key not found: '" + key + "'");
709 | }
710 | return cb(err, val);
711 | };
712 |
713 | FlatMemory.prototype._remove = function(_arg, cb) {
714 | var err, key, v;
715 | key = _arg.key;
716 | err = null;
717 | if ((v = this.kv[key]) == null) {
718 | err = new E.NotFoundError("key not found: '" + key + "'");
719 | } else {
720 | delete this.kv[key];
721 | }
722 | return cb(err);
723 | };
724 |
725 | return FlatMemory;
726 |
727 | })(Flat);
728 |
729 | }).call(this);
730 |
--------------------------------------------------------------------------------
/lib/main.js:
--------------------------------------------------------------------------------
1 | // Generated by IcedCoffeeScript 108.0.13
2 | (function() {
3 | var k, mod, mods, v, _i, _len, _ref;
4 |
5 | exports.merkle = {
6 | leaf: require('./merkle/leaf'),
7 | pathcheck: require('./merkle/pathcheck')
8 | };
9 |
10 | _ref = {
11 | constants: require('./constants'),
12 | err: require('./err'),
13 | kvstore: require('./kvstore'),
14 | assertion: require('./assertion')
15 | };
16 | for (k in _ref) {
17 | v = _ref[k];
18 | exports[k] = v;
19 | }
20 |
21 | mods = [require('./sigchain/sigchain')];
22 |
23 | for (_i = 0, _len = mods.length; _i < _len; _i++) {
24 | mod = mods[_i];
25 | for (k in mod) {
26 | v = mod[k];
27 | exports[k] = v;
28 | }
29 | }
30 |
31 | }).call(this);
32 |
--------------------------------------------------------------------------------
/lib/merkle/leaf.js:
--------------------------------------------------------------------------------
1 | // Generated by IcedCoffeeScript 108.0.13
2 | (function() {
3 | var C, ChainTail, Leaf, Parser, ResetChainTail, is_positive_int;
4 |
5 | C = require('../constants');
6 |
7 | exports.ChainTail = ChainTail = (function() {
8 | function ChainTail(_arg) {
9 | this.seqno = _arg.seqno, this.payload_hash = _arg.payload_hash, this.sig_id = _arg.sig_id;
10 | }
11 |
12 | ChainTail.prototype.to_json = function() {
13 | return [this.seqno, this.payload_hash, this.sig_id];
14 | };
15 |
16 | return ChainTail;
17 |
18 | })();
19 |
20 | exports.ResetChainTail = ResetChainTail = (function() {
21 | function ResetChainTail(_arg) {
22 | this.seqno = _arg.seqno, this.payload_hash = _arg.payload_hash;
23 | }
24 |
25 | ResetChainTail.prototype.to_json = function() {
26 | return [this.seqno, this.payload_hash];
27 | };
28 |
29 | return ResetChainTail;
30 |
31 | })();
32 |
33 | is_positive_int = function(x) {
34 | return (typeof x === 'number') && (Math.floor(x) === x) && isFinite(x) && x >= 0;
35 | };
36 |
37 | Parser = (function() {
38 | function Parser(val) {
39 | this.val = val;
40 | }
41 |
42 | Parser.prototype.parse = function() {
43 | var version;
44 | if (!Array.isArray(this.val) || this.val.length < 1) {
45 | throw new Error("Expected an array of length 1 or more");
46 | } else if (!is_positive_int(this.val[0])) {
47 | throw new Error("Need a number for first slot");
48 | } else if (typeof this.val[1] === 'string') {
49 | version = 1;
50 | } else {
51 | version = this.val[0];
52 | }
53 | switch (version) {
54 | case C.versions.leaf.v1:
55 | return this.parse_v1();
56 | case C.versions.leaf.v2:
57 | return this.parse_v2();
58 | default:
59 | throw new Error("unknown leaf version: " + version);
60 | }
61 | };
62 |
63 | Parser.prototype.parse_v1 = function() {
64 | var pub;
65 | pub = this.parse_chain_tail(this.val);
66 | return new Leaf({
67 | pub: pub
68 | });
69 | };
70 |
71 | Parser.prototype.parse_v2 = function() {
72 | var eldest_kid, pub, reset, semipriv, _ref, _ref1;
73 | if (this.val.length < 2) {
74 | throw new Error("No public chain");
75 | }
76 | pub = this.val.length > 1 && ((_ref = this.val[1]) != null ? _ref.length : void 0) ? this.parse_chain_tail(this.val[1]) : null;
77 | semipriv = (this.val.length > 2) && ((_ref1 = this.val[2]) != null ? _ref1.length : void 0) ? this.parse_chain_tail(this.val[2]) : null;
78 | eldest_kid = this.val.length > 3 && (this.val[3] != null) ? this.parse_kid(this.val[3]) : null;
79 | reset = this.val.length > 4 && (this.val[4] != null) ? this.parse_reset_chain(this.val[4]) : null;
80 | return new Leaf({
81 | pub: pub,
82 | semipriv: semipriv,
83 | eldest_kid: eldest_kid,
84 | reset: reset
85 | });
86 | };
87 |
88 | Parser.prototype.match_hex = function(s) {
89 | return (typeof s === 'string') && !!(s.match(/^([a-fA-F0-9]*)$/)) && (s.length % 2 === 0);
90 | };
91 |
92 | Parser.prototype.parse_kid = function(x) {
93 | if (!this.match_hex(x)) {
94 | throw new Error("bad kid: " + x);
95 | }
96 | return x;
97 | };
98 |
99 | Parser.prototype.parse_reset_chain = function(val) {
100 | var msg;
101 | msg = null;
102 | if (val == null) {
103 | return null;
104 | }
105 | if (val.length < 2) {
106 | msg = "Bad reset chain tail with < 2 values";
107 | } else if (!is_positive_int(val[0])) {
108 | msg = "Bad sequence #";
109 | } else if (!this.match_hex(val[1])) {
110 | msg = "bad hash value";
111 | }
112 | if (msg != null) {
113 | throw new Error(msg);
114 | }
115 | return new ResetChainTail({
116 | seqno: val[0],
117 | payload_hash: val[1]
118 | });
119 | };
120 |
121 | Parser.prototype.parse_chain_tail = function(val) {
122 | var i, msg, v, _i, _len, _ref;
123 | msg = null;
124 | if (val.length < 2) {
125 | msg = "Bad chain tail with < 2 values";
126 | } else if (!is_positive_int(val[0])) {
127 | msg = "Bad sequence #";
128 | } else {
129 | _ref = val.slice(1, 3);
130 | for (i = _i = 0, _len = _ref.length; _i < _len; i = ++_i) {
131 | v = _ref[i];
132 | if ((v != null) && v.length) {
133 | if (!this.match_hex(v)) {
134 | msg = "bad value[" + i + "]";
135 | break;
136 | }
137 | }
138 | }
139 | }
140 | if (msg != null) {
141 | throw new Error(msg);
142 | }
143 | return new ChainTail({
144 | seqno: val[0],
145 | payload_hash: val[1],
146 | sig_id: val[2]
147 | });
148 | };
149 |
150 | return Parser;
151 |
152 | })();
153 |
154 | exports.Leaf = Leaf = (function() {
155 | function Leaf(_arg) {
156 | this.pub = _arg.pub, this.semipriv = _arg.semipriv, this.eldest_kid = _arg.eldest_kid, this.reset = _arg.reset;
157 | }
158 |
159 | Leaf.prototype.get_public = function() {
160 | return this.pub;
161 | };
162 |
163 | Leaf.prototype.get_semiprivate = function() {
164 | return this.semipriv;
165 | };
166 |
167 | Leaf.prototype.get_eldest_kid = function() {
168 | return this.eldest_kid;
169 | };
170 |
171 | Leaf.prototype.get_reset = function() {
172 | return this.reset;
173 | };
174 |
175 | Leaf.prototype.to_json = function() {
176 | var ret;
177 | ret = [C.versions.leaf.v2, (this.pub ? this.pub.to_json() : []), (this.semipriv != null ? this.semipriv.to_json() : []), this.eldest_kid, (this.reset != null ? this.reset.to_json() : null)];
178 | return ret;
179 | };
180 |
181 | Leaf.prototype.to_string = function() {
182 | return JSON.stringify(this.to_json());
183 | };
184 |
185 | Leaf.parse = function(val) {
186 | var e, err, leaf, parser;
187 | parser = new Parser(val);
188 | err = leaf = null;
189 | try {
190 | leaf = parser.parse();
191 | } catch (_error) {
192 | e = _error;
193 | err = e;
194 | }
195 | return [err, leaf];
196 | };
197 |
198 | Leaf.prototype.seqno_assertion = function() {
199 | return (function(_this) {
200 | return function(rows) {
201 | var chain_tail, found, seqno, seqno_type, _i, _len, _ref, _ref1, _ref2;
202 | found = {};
203 | for (_i = 0, _len = rows.length; _i < _len; _i++) {
204 | _ref = rows[_i], seqno_type = _ref.seqno_type, seqno = _ref.seqno;
205 | chain_tail = (function() {
206 | switch (seqno_type) {
207 | case C.seqno_types.PUBLIC:
208 | return this.pub;
209 | case C.seqno_types.SEMIPRIVATE:
210 | return this.semipriv;
211 | default:
212 | return null;
213 | }
214 | }).call(_this);
215 | if ((chain_tail == null) || (chain_tail.seqno !== seqno)) {
216 | return false;
217 | }
218 | found[seqno_type] = true;
219 | }
220 | if (((_ref1 = _this.semipriv) != null ? _ref1.seqno : void 0) && (!found[C.seqno_types.SEMIPRIVATE])) {
221 | return false;
222 | }
223 | if (((_ref2 = _this.pub) != null ? _ref2.seqno : void 0) && (!found[C.seqno_types.PUBLIC])) {
224 | return false;
225 | }
226 | return true;
227 | };
228 | })(this);
229 | };
230 |
231 | Leaf.prototype.reset_assertion = function(rows) {
232 | if (rows.length === 0) {
233 | return !(this.reset != null);
234 | } else if (rows.length && (this.reset != null)) {
235 | return (this.reset.seqno === rows[0].seqno) && (this.reset.payload_hash === rows[0].payload_hash);
236 | } else {
237 | return false;
238 | }
239 | };
240 |
241 | Leaf.prototype.seqno_and_prev_assertion = function(typ) {
242 | return (function(_this) {
243 | return function(rows) {
244 | var chain_tail;
245 | chain_tail = (function() {
246 | switch (typ) {
247 | case C.seqno_types.PUBLIC:
248 | return this.pub;
249 | case C.seqno_types.SEMIPRIVATE:
250 | return this.semipriv;
251 | default:
252 | return null;
253 | }
254 | }).call(_this);
255 | if (rows.length === 0) {
256 | if (chain_tail === null || chain_tail.length === 0) {
257 | return true;
258 | } else {
259 | return false;
260 | }
261 | } else if (rows.length === 1 && (chain_tail != null)) {
262 | return (chain_tail.seqno === rows[0].seqno) && (chain_tail.payload_hash === rows[0].payload_hash);
263 | } else {
264 | return false;
265 | }
266 | };
267 | })(this);
268 | };
269 |
270 | return Leaf;
271 |
272 | })();
273 |
274 | }).call(this);
275 |
--------------------------------------------------------------------------------
/lib/merkle/pathcheck.js:
--------------------------------------------------------------------------------
1 | // Generated by IcedCoffeeScript 108.0.13
2 | (function() {
3 | var BaseTree, C, Leaf, LegacyUidNameTree, MainTree, PathChecker, a_json_parse, hash, iced, make_esc, merkle, pathcheck, sha256, sha512, __iced_k, __iced_k_noop,
4 | __hasProp = {}.hasOwnProperty,
5 | __extends = function(child, parent) { for (var key in parent) { if (__hasProp.call(parent, key)) child[key] = parent[key]; } function ctor() { this.constructor = child; } ctor.prototype = parent.prototype; child.prototype = new ctor(); child.__super__ = parent.prototype; return child; };
6 |
7 | iced = require('iced-runtime');
8 | __iced_k = __iced_k_noop = function() {};
9 |
10 | C = require('../constants');
11 |
12 | make_esc = require('iced-error').make_esc;
13 |
14 | hash = require('triplesec').hash;
15 |
16 | merkle = require('merkle-tree');
17 |
18 | a_json_parse = require('iced-utils').util.a_json_parse;
19 |
20 | Leaf = require('./leaf').Leaf;
21 |
22 | sha256 = function(s) {
23 | return (new hash.SHA256).bufhash(Buffer.from(s, 'utf8')).toString('hex');
24 | };
25 |
26 | sha512 = function(s) {
27 | return (new hash.SHA512).bufhash(Buffer.from(s, 'utf8')).toString('hex');
28 | };
29 |
30 | module.exports = pathcheck = function(_arg, cb) {
31 | var err, km, pc, res, server_reply, ___iced_passed_deferral, __iced_deferrals, __iced_k;
32 | __iced_k = __iced_k_noop;
33 | ___iced_passed_deferral = iced.findDeferral(arguments);
34 | server_reply = _arg.server_reply, km = _arg.km;
35 | pc = new PathChecker({
36 | server_reply: server_reply,
37 | km: km
38 | });
39 | (function(_this) {
40 | return (function(__iced_k) {
41 | __iced_deferrals = new iced.Deferrals(__iced_k, {
42 | parent: ___iced_passed_deferral,
43 | filename: "/Users/max/src/keybase/libkeybase-js/src/merkle/pathcheck.iced"
44 | });
45 | pc.run(__iced_deferrals.defer({
46 | assign_fn: (function() {
47 | return function() {
48 | err = arguments[0];
49 | return res = arguments[1];
50 | };
51 | })(),
52 | lineno: 26
53 | }));
54 | __iced_deferrals._fulfill();
55 | });
56 | })(this)((function(_this) {
57 | return function() {
58 | return cb(err, res);
59 | };
60 | })(this));
61 | };
62 |
63 | PathChecker = (function() {
64 | function PathChecker(_arg) {
65 | this.server_reply = _arg.server_reply, this.km = _arg.km;
66 | }
67 |
68 | PathChecker.prototype.run = function(cb) {
69 | var esc, leaf, uid, username, ___iced_passed_deferral, __iced_deferrals, __iced_k;
70 | __iced_k = __iced_k_noop;
71 | ___iced_passed_deferral = iced.findDeferral(arguments);
72 | esc = make_esc(cb, "PathChecker::run");
73 | (function(_this) {
74 | return (function(__iced_k) {
75 | __iced_deferrals = new iced.Deferrals(__iced_k, {
76 | parent: ___iced_passed_deferral,
77 | filename: "/Users/max/src/keybase/libkeybase-js/src/merkle/pathcheck.iced",
78 | funcname: "PathChecker.run"
79 | });
80 | _this._verify_sig(esc(__iced_deferrals.defer({
81 | lineno: 39
82 | })));
83 | __iced_deferrals._fulfill();
84 | });
85 | })(this)((function(_this) {
86 | return function() {
87 | (function(__iced_k) {
88 | __iced_deferrals = new iced.Deferrals(__iced_k, {
89 | parent: ___iced_passed_deferral,
90 | filename: "/Users/max/src/keybase/libkeybase-js/src/merkle/pathcheck.iced",
91 | funcname: "PathChecker.run"
92 | });
93 | _this._verify_username(esc(__iced_deferrals.defer({
94 | assign_fn: (function() {
95 | return function() {
96 | uid = arguments[0];
97 | return username = arguments[1];
98 | };
99 | })(),
100 | lineno: 40
101 | })));
102 | __iced_deferrals._fulfill();
103 | })(function() {
104 | (function(__iced_k) {
105 | __iced_deferrals = new iced.Deferrals(__iced_k, {
106 | parent: ___iced_passed_deferral,
107 | filename: "/Users/max/src/keybase/libkeybase-js/src/merkle/pathcheck.iced",
108 | funcname: "PathChecker.run"
109 | });
110 | _this._verify_path({
111 | uid: uid
112 | }, esc(__iced_deferrals.defer({
113 | assign_fn: (function() {
114 | return function() {
115 | return leaf = arguments[0];
116 | };
117 | })(),
118 | lineno: 41
119 | })));
120 | __iced_deferrals._fulfill();
121 | })(function() {
122 | return cb(null, {
123 | leaf: leaf,
124 | uid: uid,
125 | username: username
126 | });
127 | });
128 | });
129 | };
130 | })(this));
131 | };
132 |
133 | PathChecker.prototype._verify_sig = function(cb) {
134 | var err, esc, kid, raw, sig, sigeng, ___iced_passed_deferral, __iced_deferrals, __iced_k;
135 | __iced_k = __iced_k_noop;
136 | ___iced_passed_deferral = iced.findDeferral(arguments);
137 | esc = make_esc(cb, "_verify_sig");
138 | kid = this.km.get_ekid().toString('hex');
139 | err = null;
140 | (function(_this) {
141 | return (function(__iced_k) {
142 | var _ref;
143 | if ((sig = (_ref = _this.server_reply.root.sigs[kid]) != null ? _ref.sig : void 0) == null) {
144 | return __iced_k(err = new Error("No signature found for kid: " + kid));
145 | } else {
146 | sigeng = _this.km.make_sig_eng();
147 | (function(__iced_k) {
148 | __iced_deferrals = new iced.Deferrals(__iced_k, {
149 | parent: ___iced_passed_deferral,
150 | filename: "/Users/max/src/keybase/libkeybase-js/src/merkle/pathcheck.iced",
151 | funcname: "PathChecker._verify_sig"
152 | });
153 | sigeng.unbox(sig, esc(__iced_deferrals.defer({
154 | assign_fn: (function() {
155 | return function() {
156 | return raw = arguments[0];
157 | };
158 | })(),
159 | lineno: 54
160 | })));
161 | __iced_deferrals._fulfill();
162 | })(function() {
163 | (function(__iced_k) {
164 | __iced_deferrals = new iced.Deferrals(__iced_k, {
165 | parent: ___iced_passed_deferral,
166 | filename: "/Users/max/src/keybase/libkeybase-js/src/merkle/pathcheck.iced",
167 | funcname: "PathChecker._verify_sig"
168 | });
169 | a_json_parse(raw.toString('utf8'), esc(__iced_deferrals.defer({
170 | assign_fn: (function(__slot_1) {
171 | return function() {
172 | return __slot_1._signed_payload = arguments[0];
173 | };
174 | })(_this),
175 | lineno: 55
176 | })));
177 | __iced_deferrals._fulfill();
178 | })(__iced_k);
179 | });
180 | }
181 | });
182 | })(this)((function(_this) {
183 | return function() {
184 | return cb(err);
185 | };
186 | })(this));
187 | };
188 |
189 | PathChecker.prototype._extract_nodes = function(_arg, cb) {
190 | var esc, list, node, ret, val, ___iced_passed_deferral, __iced_deferrals, __iced_k;
191 | __iced_k = __iced_k_noop;
192 | ___iced_passed_deferral = iced.findDeferral(arguments);
193 | list = _arg.list;
194 | esc = make_esc(cb, "PathChecker::_extract_nodes");
195 | ret = {};
196 | (function(_this) {
197 | return (function(__iced_k) {
198 | var _i, _len, _ref, _results, _while;
199 | _ref = list;
200 | _len = _ref.length;
201 | _i = 0;
202 | _while = function(__iced_k) {
203 | var _break, _continue, _next;
204 | _break = __iced_k;
205 | _continue = function() {
206 | return iced.trampoline(function() {
207 | ++_i;
208 | return _while(__iced_k);
209 | });
210 | };
211 | _next = _continue;
212 | if (!(_i < _len)) {
213 | return _break();
214 | } else {
215 | node = _ref[_i].node;
216 | (function(__iced_k) {
217 | __iced_deferrals = new iced.Deferrals(__iced_k, {
218 | parent: ___iced_passed_deferral,
219 | filename: "/Users/max/src/keybase/libkeybase-js/src/merkle/pathcheck.iced",
220 | funcname: "PathChecker._extract_nodes"
221 | });
222 | a_json_parse(node.val, esc(__iced_deferrals.defer({
223 | assign_fn: (function() {
224 | return function() {
225 | return val = arguments[0];
226 | };
227 | })(),
228 | lineno: 64
229 | })));
230 | __iced_deferrals._fulfill();
231 | })(function() {
232 | return _next(ret[node.hash] = val);
233 | });
234 | }
235 | };
236 | _while(__iced_k);
237 | });
238 | })(this)((function(_this) {
239 | return function() {
240 | return cb(null, ret);
241 | };
242 | })(this));
243 | };
244 |
245 | PathChecker.prototype._verify_username_legacy = function(_arg, cb) {
246 | var err, esc, leaf, nodes, root, tree, uid, username, ___iced_passed_deferral, __iced_deferrals, __iced_k;
247 | __iced_k = __iced_k_noop;
248 | ___iced_passed_deferral = iced.findDeferral(arguments);
249 | uid = _arg.uid, username = _arg.username;
250 | esc = make_esc(cb, "PathChecker::_verify_username_legacy");
251 | root = this._signed_payload.body.legacy_uid_root;
252 | (function(_this) {
253 | return (function(__iced_k) {
254 | __iced_deferrals = new iced.Deferrals(__iced_k, {
255 | parent: ___iced_passed_deferral,
256 | filename: "/Users/max/src/keybase/libkeybase-js/src/merkle/pathcheck.iced",
257 | funcname: "PathChecker._verify_username_legacy"
258 | });
259 | _this._extract_nodes({
260 | list: _this.server_reply.uid_proof_path
261 | }, esc(__iced_deferrals.defer({
262 | assign_fn: (function() {
263 | return function() {
264 | return nodes = arguments[0];
265 | };
266 | })(),
267 | lineno: 73
268 | })));
269 | __iced_deferrals._fulfill();
270 | });
271 | })(this)((function(_this) {
272 | return function() {
273 | tree = new LegacyUidNameTree({
274 | root: root,
275 | nodes: nodes
276 | });
277 | (function(__iced_k) {
278 | __iced_deferrals = new iced.Deferrals(__iced_k, {
279 | parent: ___iced_passed_deferral,
280 | filename: "/Users/max/src/keybase/libkeybase-js/src/merkle/pathcheck.iced",
281 | funcname: "PathChecker._verify_username_legacy"
282 | });
283 | tree.find({
284 | key: sha256(username)
285 | }, esc(__iced_deferrals.defer({
286 | assign_fn: (function() {
287 | return function() {
288 | return leaf = arguments[0];
289 | };
290 | })(),
291 | lineno: 75
292 | })));
293 | __iced_deferrals._fulfill();
294 | })(function() {
295 | err = leaf === uid ? null : new Error("UID mismatch " + leaf + " != " + uid + " in tree for " + username);
296 | return cb(err);
297 | });
298 | };
299 | })(this));
300 | };
301 |
302 | PathChecker.prototype._verify_path = function(_arg, cb) {
303 | var err, esc, leaf, leaf_raw, nodes, root, tree, uid, ___iced_passed_deferral, __iced_deferrals, __iced_k;
304 | __iced_k = __iced_k_noop;
305 | ___iced_passed_deferral = iced.findDeferral(arguments);
306 | uid = _arg.uid;
307 | esc = make_esc(cb, "PathChecker::_verify_path");
308 | root = this._signed_payload.body.root;
309 | (function(_this) {
310 | return (function(__iced_k) {
311 | __iced_deferrals = new iced.Deferrals(__iced_k, {
312 | parent: ___iced_passed_deferral,
313 | filename: "/Users/max/src/keybase/libkeybase-js/src/merkle/pathcheck.iced",
314 | funcname: "PathChecker._verify_path"
315 | });
316 | _this._extract_nodes({
317 | list: _this.server_reply.path
318 | }, esc(__iced_deferrals.defer({
319 | assign_fn: (function() {
320 | return function() {
321 | return nodes = arguments[0];
322 | };
323 | })(),
324 | lineno: 85
325 | })));
326 | __iced_deferrals._fulfill();
327 | });
328 | })(this)((function(_this) {
329 | return function() {
330 | tree = new MainTree({
331 | root: root,
332 | nodes: nodes
333 | });
334 | (function(__iced_k) {
335 | __iced_deferrals = new iced.Deferrals(__iced_k, {
336 | parent: ___iced_passed_deferral,
337 | filename: "/Users/max/src/keybase/libkeybase-js/src/merkle/pathcheck.iced",
338 | funcname: "PathChecker._verify_path"
339 | });
340 | tree.find({
341 | key: uid
342 | }, esc(__iced_deferrals.defer({
343 | assign_fn: (function() {
344 | return function() {
345 | return leaf_raw = arguments[0];
346 | };
347 | })(),
348 | lineno: 87
349 | })));
350 | __iced_deferrals._fulfill();
351 | })(function() {
352 | var _ref, _ref1;
353 | if (typeof leaf_raw !== "undefined" && leaf_raw !== null) {
354 | _ref = Leaf.parse(leaf_raw), err = _ref[0], leaf = _ref[1];
355 | } else {
356 | _ref1 = [null, null], err = _ref1[0], leaf = _ref1[1];
357 | }
358 | return cb(err, leaf);
359 | });
360 | };
361 | })(this));
362 | };
363 |
364 | PathChecker.prototype._verify_username = function(cb) {
365 | var err, uid, username, username_cased, ___iced_passed_deferral, __iced_deferrals, __iced_k, _ref;
366 | __iced_k = __iced_k_noop;
367 | ___iced_passed_deferral = iced.findDeferral(arguments);
368 | _ref = this.server_reply, uid = _ref.uid, username = _ref.username, username_cased = _ref.username_cased;
369 | err = null;
370 | (function(_this) {
371 | return (function(__iced_k) {
372 | if (uid.slice(-2) === '00') {
373 | (function(__iced_k) {
374 | __iced_deferrals = new iced.Deferrals(__iced_k, {
375 | parent: ___iced_passed_deferral,
376 | filename: "/Users/max/src/keybase/libkeybase-js/src/merkle/pathcheck.iced",
377 | funcname: "PathChecker._verify_username"
378 | });
379 | _this._verify_username_legacy({
380 | username: username,
381 | uid: uid
382 | }, __iced_deferrals.defer({
383 | assign_fn: (function() {
384 | return function() {
385 | return err = arguments[0];
386 | };
387 | })(),
388 | lineno: 102
389 | }));
390 | __iced_deferrals._fulfill();
391 | })(__iced_k);
392 | } else {
393 | err = _this._verify_username_hash({
394 | uid: uid,
395 | username: username,
396 | lc: false
397 | });
398 | return __iced_k((err != null) && (username_cased != null) && (username_cased !== username) && (username_cased.toLowerCase() === username) ? err = _this._verify_username_hash({
399 | uid: uid,
400 | username: username_cased
401 | }) : void 0);
402 | }
403 | });
404 | })(this)((function(_this) {
405 | return function() {
406 | return cb(err, uid, username);
407 | };
408 | })(this));
409 | };
410 |
411 | PathChecker.prototype._verify_username_hash = function(_arg) {
412 | var err, h, uid, uid2, username;
413 | uid = _arg.uid, username = _arg.username;
414 | h = (new hash.SHA256).bufhash(Buffer.from(username, "utf8"));
415 | uid2 = h.slice(0, 15).toString('hex') + '19';
416 | if (uid !== uid2) {
417 | err = new Error("bad UID: " + uid + " != " + uid2 + " for username " + username);
418 | }
419 | return err;
420 | };
421 |
422 | return PathChecker;
423 |
424 | })();
425 |
426 | BaseTree = (function(_super) {
427 | __extends(BaseTree, _super);
428 |
429 | function BaseTree(_arg) {
430 | this.root = _arg.root, this.nodes = _arg.nodes;
431 | BaseTree.__super__.constructor.call(this, {});
432 | }
433 |
434 | BaseTree.prototype.lookup_root = function(_arg, cb) {
435 | var txinfo;
436 | txinfo = _arg.txinfo;
437 | return cb(null, this.root);
438 | };
439 |
440 | BaseTree.prototype.lookup_node = function(_arg, cb) {
441 | var err, key, ret;
442 | key = _arg.key;
443 | ret = this.nodes[key];
444 | err = ret != null ? null : new Error("key not found: '" + key + "'");
445 | return cb(err, ret);
446 | };
447 |
448 | return BaseTree;
449 |
450 | })(merkle.Base);
451 |
452 | LegacyUidNameTree = (function(_super) {
453 | __extends(LegacyUidNameTree, _super);
454 |
455 | function LegacyUidNameTree() {
456 | return LegacyUidNameTree.__super__.constructor.apply(this, arguments);
457 | }
458 |
459 | LegacyUidNameTree.prototype.hash_fn = function(s) {
460 | return sha256(s);
461 | };
462 |
463 | return LegacyUidNameTree;
464 |
465 | })(BaseTree);
466 |
467 | MainTree = (function(_super) {
468 | __extends(MainTree, _super);
469 |
470 | function MainTree() {
471 | return MainTree.__super__.constructor.apply(this, arguments);
472 | }
473 |
474 | MainTree.prototype.hash_fn = function(s) {
475 | return sha512(s);
476 | };
477 |
478 | return MainTree;
479 |
480 | })(BaseTree);
481 |
482 | __iced_k_noop();
483 |
484 | }).call(this);
485 |
--------------------------------------------------------------------------------
/notes/tweetnacl.md:
--------------------------------------------------------------------------------
1 |
2 | # Some Notes on TweetNacl testing and development
3 |
4 | - I found it quite useful to test against the Python Ed25519 implementation
5 | to test interoperability between tweetnacl and standalone ed25519. For instance,
6 | I wanted to check the interchangeability of public keys, private keys, and
7 | signatures.
8 | - The library I used was this one: https://github.com/warner/python-ed25519
9 | - Here's how I generated a public/private key pair in python, and also a signature:
10 |
11 | ```python
12 | import ed25519
13 | import binascii
14 | sk,vk = ed25519.create_keypair()
15 | print binascii.hexlify(sk.to_bytes())
16 | print binascii.hexlify(vk.to_bytes())
17 | text = b"here is some text input string that I would like to sign!"
18 | sig = sk.sign(text)
19 | print binascii.hexlify(sig)
20 | print json.dump( [ binascii.hexlify(sk.to_bytes()), binascii.hexlify(vk.to_bytes()), text, binascii.hexlify(sig) ]
21 | ```
22 |
23 | - Let's say the output is:
24 |
25 | ```json
26 | ["b583929ee68d7ff98fae303307ebe37d1ba3e299e934fff93e42958fa8d077771cf962becea35c090f1a7c5d2ec776aada51db2cb24e9b01e3cf9378fd50dc28", "1cf962becea35c090f1a7c5d2ec776aada51db2cb24e9b01e3cf9378fd50dc28", "here is some text input string that I would like to sign!", "6b1000245e43d880926af664714101d53ee939231ff311fc296b429bb72beb7ce368d971aeb2418b95fcc8d1134bda521a0987ddd57ae8491c48ad3ddf29c809"]
27 | ```
28 |
29 |
30 | - Now you're good to go. Here's how to verify over in nodeland.
31 |
32 | ``coffee-script
33 | nacl = require 'tweetnacl'
34 | assert = require 'assert'
35 | data = ["b583929ee68d7ff98fae303307ebe37d1ba3e299e934fff93e42958fa8d077771cf962becea35c090f1a7c5d2ec776aada51db2cb24e9b01e3cf9378fd50dc28", "1cf962becea35c090f1a7c5d2ec776aada51db2cb24e9b01e3cf9378fd50dc28", "here is some text input string that I would like to sign!", "6b1000245e43d880926af664714101d53ee939231ff311fc296b429bb72beb7ce368d971aeb2418b95fcc8d1134bda521a0987ddd57ae8491c48ad3ddf29c809"]
36 | msg = new Uint8Array(new Buffer data[2], "utf8")
37 | badmsg = new Uint8Array(new Buffer data[2]+"XX", "utf8")
38 | vk = new Uint8Array(new Buffer data[1], "hex")
39 | sig = new Uint8Array(new Buffer data[3], "hex")
40 | assert nacl.sign.detached.verify(msg,sig,vk)
41 | assert not nacl.sign.detached.verify(badmsg, sig, vk)
42 | ```
43 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "libkeybase",
3 | "version": "1.2.33",
4 | "description": "A library for processing Keybase's signature tree",
5 | "main": "lib/main.js",
6 | "scripts": {
7 | "test": "make test"
8 | },
9 | "repository": {
10 | "type": "git",
11 | "url": "https://github.com/keybase/node-sig-tree"
12 | },
13 | "keywords": [
14 | "Merkle",
15 | "Tree",
16 | "Keybase"
17 | ],
18 | "author": "Maxwell Krohn",
19 | "license": "BSD-3-Clause",
20 | "bugs": {
21 | "url": "https://github.com/keybase/node-sig-tree/issues"
22 | },
23 | "homepage": "https://github.com/keybase/node-sig-tree",
24 | "dependencies": {
25 | "iced-error": "0.0.13",
26 | "iced-lock": "^2.0.1",
27 | "iced-logger": "0.0.6",
28 | "iced-runtime": "^1.0.4",
29 | "iced-utils": "^0.1.27",
30 | "kbpgp": "^2.1.6",
31 | "keybase-proofs": "^2.3.10",
32 | "merkle-tree": "^1.0.5",
33 | "pgp-utils": "0.0.35",
34 | "triplesec": "^4.0.3"
35 | },
36 | "devDependencies": {
37 | "browserify": "^16.5.0",
38 | "forge-sigchain": "github:keybase/node-forge-sigchain#cson",
39 | "iced-coffee-script": "^108.0.13",
40 | "iced-test": "0.0.22",
41 | "icsify": "^0.6.0",
42 | "jison": "^0.4.15",
43 | "keybase-test-vectors": "1.0.11",
44 | "nyc": "^14.1.1"
45 | }
46 | }
--------------------------------------------------------------------------------
/src/assertion.iced:
--------------------------------------------------------------------------------
1 |
2 | urlmod = require 'url'
3 | {Parser} = require './assertion_parser'
4 |
5 | #==================================================================
6 |
7 | class Expr
8 |
9 | toString : () ->
10 |
11 | match_set : (proof_set) -> false
12 |
13 | #==================================================================
14 |
15 | exports.URI = class URI extends Expr
16 |
17 | #----------------------------------------
18 |
19 | constructor : ( {@key, @value}) ->
20 |
21 | #----------------------------------------
22 |
23 | keys : () -> [ @key ]
24 |
25 | #----------------------------------------
26 |
27 | check : () ->
28 | if not @value and @value.length?
29 | throw new Error "Bad '#{@key}' assertion, no value found"
30 |
31 | throw new Error "Unknown assertion type '#{@key}'" unless @key in [
32 | 'twitter', 'github', 'hackernews', 'reddit', 'keybase', 'coinbase', 'facebook'
33 | ]
34 |
35 | #----------------------------------------
36 |
37 | to_lookup_query : () ->
38 | d = {}
39 | d[@key] = @value
40 | return d
41 |
42 | #----------------------------------------
43 |
44 | @parse_to_kv_pair : (s) ->
45 | obj = urlmod.parse(s)
46 |
47 | if (key = obj.protocol)? and key.length
48 | key = key.toLowerCase()
49 | key = key[0...-1] if key? and key[-1...] is ':'
50 |
51 | value = obj.hostname
52 | value = obj.pathname if not key? and not value?
53 | value = value.toLowerCase() if value?
54 |
55 | { key, value }
56 |
57 | #----------------------------------------
58 |
59 | @parse : ({s,strict}) ->
60 | {key,value} = URI.parse_to_kv_pair(s)
61 |
62 | if key?.length then # noop
63 | else if not strict then key = "keybase"
64 | else
65 | throw new Error "Bad assertion, no 'type' given: #{s}"
66 |
67 | klasses =
68 | web : Web
69 | http : Http
70 | dns : Host
71 | https : Host
72 | fingerprint : Fingerprint
73 | keybase : Keybase
74 |
75 | klass = URI unless (klass = klasses[key])?
76 | ret = new klass { key, value }
77 | ret.check()
78 | return ret
79 |
80 | #----------------------------------------
81 |
82 | toString : () -> "#{@key}://#{@value}"
83 |
84 | #----------------------------------------
85 |
86 | match_set : (proof_set) ->
87 | proofs = proof_set.get @keys()
88 | for proof in proofs
89 | return true if @match_proof(proof)
90 | return false
91 |
92 | #----------------------------------------
93 |
94 | match_proof : (proof) ->
95 | (proof.key.toLowerCase() in @keys()) and (@value is proof.value.toLowerCase())
96 |
97 | #----------------------------------------
98 |
99 | is_keybase : () -> false
100 |
101 | #==================================================================
102 |
103 | class Host extends URI
104 | check : () ->
105 | if @value.indexOf(".") < 0
106 | throw new Error "Bad hostname given: #{@value}"
107 |
108 | class Web extends Host
109 | keys : () -> [ 'http', 'https', 'dns' ]
110 |
111 | class Http extends Host
112 | keys : () -> [ 'http', 'https' ]
113 |
114 | class Fingerprint extends URI
115 | match_proof : (proof) ->
116 | ((@key is proof.key.toLowerCase()) and (@value is proof.value[(-1 * @value.length)...].toLowerCase()))
117 | check : () ->
118 | unless @value.match /^[a-fA-F0-9]+$/
119 | throw new Error "Bad fingerprint given: #{@value}"
120 | to_lookup_query : () -> { key_suffix : @value }
121 |
122 | class Keybase extends URI
123 | is_keybase : () -> true
124 |
125 | #==================================================================
126 |
127 | exports.AND = class AND extends Expr
128 |
129 | constructor : (args...) -> @factors = args
130 |
131 | toString : () -> "(" + (f.toString() for f in @factors).join(" && ") + ")"
132 |
133 | match_set : (proof_set) ->
134 | for f in @factors
135 | return false unless f.match_set(proof_set)
136 | return true
137 |
138 | #==================================================================
139 |
140 | class OR extends Expr
141 |
142 | constructor : (args...) -> @terms = args
143 |
144 | toString : () -> "(" + (t.toString() for t in @terms).join(" || ") + ")"
145 |
146 | match_set : (proof_set) ->
147 | for t in @terms
148 | return true if t.match_set(proof_set)
149 | return false
150 |
151 | #==================================================================
152 |
153 | exports.Proof = class Proof
154 |
155 | constructor : ({@key, @value}) ->
156 |
157 | #-----------------
158 |
159 | exports.ProofSet = class ProofSet
160 |
161 | constructor : (@proofs) ->
162 | @make_index()
163 |
164 | get : (keys) ->
165 | out = []
166 | for k in keys when (v = @_index[k])?
167 | out = out.concat v
168 | return out
169 |
170 | make_index : () ->
171 | d = {}
172 | for proof in @proofs
173 | v = d[proof.key] = [] unless (v = d[proof.key])?
174 | v.push proof
175 | @_index = d
176 |
177 | #==================================================================
178 |
179 | exports.parse = parse = (s) ->
180 | parser = new Parser
181 | parser.yy = { URI, OR, AND }
182 | return parser.parse(s)
183 |
184 | #==================================================================
185 |
--------------------------------------------------------------------------------
/src/assertion_parser.jison:
--------------------------------------------------------------------------------
1 | %lex
2 |
3 | %%
4 |
5 | [ \t\n]+ /* ignore */
6 | [^ \t\n()&|]+ return 'URI';
7 | '&&' return 'AND';
8 | '||' return 'OR';
9 | '(' return 'LPAREN';
10 | ')' return 'RPAREN';
11 | . return 'UNHANDLED';
12 | <> return 'EOF';
13 |
14 | /lex
15 |
16 | %left OR
17 | %left AND
18 |
19 | %start expressions
20 |
21 | %% /* language grammar */
22 |
23 | expressions
24 | : e EOF { return $1; }
25 | ;
26 |
27 | e
28 | : e OR e { $$ = new yy.OR($1, $3); }
29 | | e AND e { $$ = new yy.AND($1, $3); }
30 | | LPAREN e RPAREN { $$ = $2; }
31 | | URI { $$ = yy.URI.parse({s : $1, strict : true}); }
32 | ;
33 |
--------------------------------------------------------------------------------
/src/constants.iced:
--------------------------------------------------------------------------------
1 |
2 | module.exports =
3 | versions :
4 | leaf :
5 | v1 : 1
6 | v2 : 2
7 | seqno_types :
8 | NONE : 0
9 | PUBLIC : 1
10 | PRIVATE : 2
11 | SEMIPRIVATE : 3
12 |
--------------------------------------------------------------------------------
/src/err.iced:
--------------------------------------------------------------------------------
1 |
2 | ie = require 'iced-error'
3 |
4 | #================================================
5 |
6 | exports.E = E = ie.make_errors
7 | GENERIC : "Generic error"
8 | INVAL : "Invalid value"
9 | NOT_FOUND : "Key not found"
10 | LOOKUP_NOT_FOUND : "Lookup failed"
11 | BAD_QUERY : "Bad query"
12 | DUPLICATE : "Duplicated value"
13 | BAD_MAC : "Message authentication failure"
14 | BAD_SIZE : "Wrong size"
15 | BAD_PREAMBLE : "Premable mismatch or bad file magic"
16 | BAD_IO : "bad input/output operation"
17 | BAD_HEADER : "Bad metadata in file"
18 | BAD_VALUE : "bad value"
19 | BAD_SEQNO : "wrong sequence number"
20 | BAD_PAYLOAD_HASH : "bad payload hash from server"
21 | INTERNAL : "internal assertion failed"
22 | MSGPACK : "Message pack format failure"
23 | BAD_PW_OR_MAC : "Bad password or file was corrupted"
24 | INIT : "Initialization error"
25 | ARGS : "Error in argument parsing"
26 | UNIMPLEMENTED : "Feature not implemented"
27 | CONFIG : "Configuration file error"
28 | HTTP : "HTTP status code error"
29 | KEYBASE : "Server-side failure"
30 | CANCEL : "Operation canceled"
31 | GPG : "Command line error"
32 | NOT_LOGGED_IN : "Not logged in"
33 | CORRUPTION : "Corruption"
34 | VERSION_ROLLBACK : "Verion rollback detected"
35 | NO_REMOTE_KEY : "No remote key found"
36 | NO_LOCAL_KEY : "No local key found"
37 | IMPORT : "Error importing key"
38 | VERIFY : "Signature verification problem"
39 | SCRAPE : "Scraper error"
40 | REMOTE_CHECK : "Remote check failure"
41 | PGP_ID_COLLISION : "PGP ID collision error"
42 | UID_MISMATCH : "userIDs didn't match"
43 | USERNAME_MISMATCH : "usernames didn't match"
44 | REMOTE_PROOF : "error in sanity checking a remote proof"
45 | UNTRACK : "error in untracking"
46 | UNKNOWN_SERVICE : "unknown service"
47 | PROOF_NOT_AVAILABLE : "proof isn't available"
48 | WRONG_SIGNER : "Expected a signature, but by someone else"
49 | REQUIRED_UPGRADE : "Software upgrade required"
50 | BAD_ASSERTION : "Bad assertion"
51 | KEY_EXISTS : "Key already exists"
52 | GENERATE : "key generation error"
53 | MISSING_PW : "missing passphrase"
54 | PROOF_EXISTS : "Proof already exists"
55 | SELF : "Can't perform this action on yourself"
56 | NO_USERNAME : "Your username isn't known"
57 | CLEAN_CANCEL : "Cleanly bailed out"
58 | PATCH : "Key patching failed"
59 | LOOPBACK : "Error in HKP loopback"
60 | NOT_CONFIGURED : "No user configured"
61 | HOST : "Host is down"
62 | SECURITY : "Security error"
63 | REQ_NOT_FOUND : "host not found"
64 | REQ_CONN_REFUSED : "connection refused"
65 | REQ_GENERIC : "generic error"
66 | KEY_NOT_TRUSTED : "key not trusted"
67 | KEY_NOT_FOUND : "key not found"
68 | REVOKED : "revoked"
69 | REVOKE : "error in revocation"
70 | BAD_MERKLE_LEAF : "bad merkle leaf found"
71 |
72 | #================================================
73 |
--------------------------------------------------------------------------------
/src/kvstore.iced:
--------------------------------------------------------------------------------
1 |
2 | log = require 'iced-logger'
3 | {E} = require './err'
4 | {make_esc} = require 'iced-error'
5 | {Lock} = require 'iced-lock'
6 |
7 | ##=======================================================================
8 |
9 | exports.Base = class Base
10 |
11 | constructor : () ->
12 | @lock = new Lock
13 |
14 | #========================
15 |
16 | unimplemented : (n, cb) -> cb new E.UnimplementedError "BaseKvStore::#{n}: unimplemented"
17 |
18 | #========================
19 |
20 | # Base classes need to implement these...
21 | open : (opts, cb) -> @unimplemented('open', cb)
22 | nuke : (opts, cb) -> @unimplemented('nuke', cb)
23 | close : (opts, cb) -> @unimplemented('close', cb)
24 | _put : ({key,value},cb) -> @unimplemented('_put', cb)
25 | _get : ({key}, cb) -> @unimplemented("_get", cb)
26 | _resolve : ({name}, cb) -> @unimplemented("_resolve", cb)
27 | _link : ({name,key}, cb) -> @unimplemented('_link', cb)
28 | _unlink : ({name}, cb) -> @unimplemented('_unlink', cb)
29 | _unlink_all : ({key}, cb) -> @unimplemented('_unlink_all', cb)
30 | _remove : ({key}, cb) -> @unimplemented('_remove', cb)
31 |
32 | #=========================
33 |
34 | make_kvstore_key : ( {type, key } ) ->
35 | type or= key[-2...]
36 | [ type, key ].join(":").toLowerCase()
37 | make_lookup_name : ( {type, name} ) -> [ type, name ].join(":").toLowerCase()
38 |
39 | unmake_kvstore_key : ( {key}) ->
40 | parts = key.split(":")
41 | { type : parts[0], key : parts[1...].join(":") }
42 |
43 | can_unlink : () -> true
44 |
45 | #=========================
46 |
47 | link : ({type, name, key}, cb) ->
48 | @_link {
49 | name : @make_lookup_name({ type, name }),
50 | key : @make_kvstore_key({ key, type })
51 | }, cb
52 | unlink : ({type, name}, cb) -> @_unlink { name : @make_lookup_name({ type, name }) }, cb
53 | unlink_all : ({type, key}, cb) -> @_unlink_all { key : @make_kvstore_key({type, key}) }, cb
54 | get : ({type,key}, cb) -> @_get { key : @make_kvstore_key({type, key}) }, cb
55 | resolve : ({type, name}, cb) ->
56 | await @_resolve { name : @make_lookup_name({type,name})}, defer err, key
57 | if not err? and key?
58 | { key } = @unmake_kvstore_key { key }
59 | cb err, key
60 |
61 | #=========================
62 |
63 | put : ({type, key, value, name, names}, cb) ->
64 | esc = make_esc cb, "BaseKvStore::put"
65 | kvsk = @make_kvstore_key {type,key}
66 | log.debug "+ KvStore::put #{key}/#{kvsk}"
67 | await @_put { key : kvsk, value }, esc defer()
68 | names = [ name ] if name? and not names?
69 | if names and names.length
70 | for name in names
71 | log.debug "| KvStore::link #{name} -> #{key}"
72 | await @link { type, name, key }, esc defer()
73 | log.debug "- KvStore::put #{key} -> ok"
74 | cb null
75 |
76 | #-----
77 |
78 | remove : ({type, key, optional}, cb) ->
79 | k = @make_kvstore_key { type, key }
80 | await @lock.acquire defer()
81 |
82 | log.debug "+ DB remove #{key}/#{k}"
83 |
84 | await @_remove { key : k }, defer err
85 | if err? and (err instanceof E.NotFoundError) and optional
86 | log.debug "| No object found for #{k}"
87 | err = null
88 | else if not err?
89 | await @_unlink_all { type, key : k }, defer err
90 |
91 | log.debug "- DB remove #{key}/#{k} -> #{if err? then 'ok' else #{err.message}}"
92 | @lock.release()
93 |
94 | cb err
95 |
96 | #-----
97 |
98 | lookup : ({type, name}, cb) ->
99 | esc = make_esc cb, "BaseKvStore::lookup"
100 | await @resolve { name, type }, esc defer key
101 | await @get { type, key }, esc defer value
102 | cb null, value
103 |
104 | ##=======================================================================
105 |
106 | # Use this interface if you want to store objects in a Flat key-value store,
107 | # without secondary indices. In this case, "linking" multiple names to one
108 | # value is just a @_put{} link any other, and there is no ability to _unlink_all.
109 | # This layout is what we used for the original node client.
110 | exports.Flat = class Flat extends Base
111 |
112 | make_kvstore_key : ({type,key}) ->
113 | "kv:" + super { type, key }
114 |
115 | make_lookup_name : ({type,name}) ->
116 | "lo:" + super { type, name }
117 |
118 | unmake_kvstore_key : ( {key}) ->
119 | parts = key.split(":")
120 | { type : parts[1], key : parts[2...].join(":") }
121 |
122 | _link : ({key, name}, cb) ->
123 | await @_put { key : name, value : key }, defer err
124 | cb err
125 |
126 | _unlink : ({name}, cb) ->
127 | await @_remove { key : name }, defer err
128 | cb err
129 |
130 | _unlink_all : ({key}, cb) ->
131 | log.debug "| Can't _unlink_all names for #{key} in flat kvstore"
132 | cb null
133 |
134 | _resolve : ({name}, cb) ->
135 | await @_get { key : name }, defer err, value
136 | if err? and (err instanceof E.NotFoundError)
137 | err = new E.LookupNotFoundError "No lookup available for #{name}"
138 | cb err, value
139 |
140 | can_unlink : () -> false
141 |
142 | ##=======================================================================
143 |
144 | # A memory-backed store, mainly for testing...
145 | exports.Memory = class Memory extends Base
146 |
147 | constructor : () ->
148 | super
149 | @data =
150 | lookup : {}
151 | rlookup : {}
152 | kv : {}
153 |
154 | open : (opts, cb) -> cb null
155 | nuke : (opts, cb) -> cb null
156 | close : (opts, cb) -> cb null
157 |
158 | _put : ({key, value}, cb) ->
159 | @data.kv[key] = value
160 | cb null
161 |
162 | _get : ({key}, cb) ->
163 | err = null
164 | if (val = @data.kv[key]) is undefined
165 | err = new E.NotFoundError "key not found: '#{key}'"
166 | cb err, val
167 |
168 | _resolve : ({name}, cb) ->
169 | err = null
170 | unless (key = @data.lookup[name])?
171 | err = new E.LookupNotFoundError "name not found: '#{name}'"
172 | cb err, key
173 |
174 | _link : ({key, name}, cb) ->
175 | @data.lookup[name] = key
176 | @data.rlookup[key] = set = {} unless (set = @data.rlookup[key])?
177 | set[name] = true
178 | cb null
179 |
180 | _unlink : ({name}, cb) ->
181 | if (key = @data.lookup[name])?
182 | delete d[name] if (d = @data.rlookup[key])?
183 | delete @data.lookup[name]
184 | err = null
185 | else
186 | err = new E.LookupNotFoundError "cannot unlink '#{name}'"
187 | cb err
188 |
189 | _remove : ({key}, cb) ->
190 | err = null
191 | unless (v = @data.kv[key])?
192 | err = new E.NotFoundError "key not found: '#{key}'"
193 | else
194 | delete @data.kv[key]
195 | cb err
196 |
197 | _unlink_all : ({key}, cb) ->
198 | if (d = @data.rlookup[key])?
199 | for name,_ of d
200 | delete @data.lookup[name]
201 | delete @data.rlookup[key]
202 | err = null
203 | else
204 | err = new E.LookupNotFoundError "cannot find names for key '#{key}'"
205 | cb err
206 |
207 | ##=======================================================================
208 |
209 | # for testing, an in-memory flat store, that stores lookups and kv-pairs
210 | # in the same table, and doesn't have the ability to "unlink_all"
211 | exports.FlatMemory = class FlatMemory extends Flat
212 |
213 | constructor : () ->
214 | super
215 | @kv = {}
216 |
217 | open : (opts, cb) -> cb null
218 | nuke : (opts, cb) -> cb null
219 | close : (opts, cb) -> cb null
220 |
221 | _put : ({key, value}, cb) ->
222 | @kv[key] = value
223 | cb null
224 |
225 | _get : ({key}, cb) ->
226 | err = null
227 | if (val = @kv[key]) is undefined
228 | err = new E.NotFoundError "key not found: '#{key}'"
229 | cb err, val
230 |
231 | _remove : ({key}, cb) ->
232 | err = null
233 | unless (v = @kv[key])?
234 | err = new E.NotFoundError "key not found: '#{key}'"
235 | else
236 | delete @kv[key]
237 | cb err
238 |
239 | ##=======================================================================
240 |
--------------------------------------------------------------------------------
/src/main.iced:
--------------------------------------------------------------------------------
1 |
2 | exports.merkle =
3 | leaf : require('./merkle/leaf')
4 | pathcheck : require('./merkle/pathcheck')
5 |
6 | # Export all of these modules as namespace extensions
7 | exports[k] = v for k,v of {
8 | constants : require('./constants')
9 | err : require('./err')
10 | kvstore : require('./kvstore')
11 | assertion : require('./assertion')
12 | }
13 |
14 | # Export the exports of these modules to the top level
15 | mods = [
16 | require('./sigchain/sigchain')
17 | ]
18 | for mod in mods
19 | for k,v of mod
20 | exports[k] = v
21 |
--------------------------------------------------------------------------------
/src/merkle/leaf.iced:
--------------------------------------------------------------------------------
1 |
2 | C = require '../constants'
3 |
4 | #===========================================================
5 |
6 | exports.ChainTail = class ChainTail
7 | constructor : ({@seqno, @payload_hash, @sig_id }) ->
8 | to_json : () -> [ @seqno, @payload_hash, @sig_id ]
9 |
10 | #--------------------------
11 |
12 | exports.ResetChainTail = class ResetChainTail
13 | constructor : ({@seqno, @payload_hash}) ->
14 | to_json : () -> [ @seqno, @payload_hash ]
15 |
16 | #--------------------------
17 |
18 | is_positive_int = (x) ->
19 | return (typeof(x) is 'number') and (Math.floor(x) is x) and isFinite(x) and x >= 0
20 |
21 | #--------------------------
22 |
23 | class Parser
24 |
25 | constructor : (@val) ->
26 |
27 | parse : () ->
28 |
29 | if not Array.isArray(@val) or @val.length < 1
30 | throw new Error "Expected an array of length 1 or more"
31 | else if not is_positive_int @val[0]
32 | throw new Error "Need a number for first slot"
33 | else if typeof(@val[1]) is 'string'
34 | # We messed up and didn't version the initial leafs of the tree
35 | version = 1
36 | else
37 | version = @val[0]
38 |
39 | switch version
40 | when C.versions.leaf.v1 then @parse_v1()
41 | when C.versions.leaf.v2 then @parse_v2()
42 | else throw new Error "unknown leaf version: #{version}"
43 |
44 | parse_v1 : () ->
45 | pub = @parse_chain_tail @val
46 | new Leaf { pub }
47 |
48 | parse_v2 : () ->
49 | if @val.length < 2 then throw new Error "No public chain"
50 | pub = if (@val.length > 1 and @val[1]?.length) then @parse_chain_tail(@val[1]) else null
51 | semipriv = if (@val.length > 2) and @val[2]?.length then @parse_chain_tail(@val[2]) else null
52 | eldest_kid = if (@val.length > 3 and @val[3]?) then @parse_kid(@val[3]) else null
53 | reset = if (@val.length > 4 and @val[4]?) then @parse_reset_chain(@val[4]) else null
54 | return new Leaf { pub, semipriv, eldest_kid, reset }
55 |
56 | match_hex : (s) ->
57 | (typeof(s) is 'string') and !!(s.match(/^([a-fA-F0-9]*)$/)) and (s.length % 2 is 0)
58 |
59 | parse_kid : (x) ->
60 | throw new Error "bad kid: #{x}" unless @match_hex x
61 | return x
62 |
63 | parse_reset_chain : (val) ->
64 | msg = null
65 | if not val? then return null
66 | if val.length < 2 then msg = "Bad reset chain tail with < 2 values"
67 | else if not is_positive_int val[0] then msg = "Bad sequence #"
68 | else if not @match_hex val[1] then msg = "bad hash value"
69 | throw new Error msg if msg?
70 | return new ResetChainTail { seqno : val[0], payload_hash : val[1] }
71 |
72 | parse_chain_tail : (val) ->
73 | msg = null
74 | if (val.length < 2) then msg = "Bad chain tail with < 2 values"
75 | else if not is_positive_int val[0] then msg = "Bad sequence #"
76 | else
77 | # Slots #1,2 are both HexIds. We don't know what 3+ will be
78 | for v,i in val[1..2] when v? and v.length
79 | unless @match_hex v
80 | msg = "bad value[#{i}]"
81 | break
82 | throw new Error msg if msg?
83 | new ChainTail { seqno : val[0], payload_hash : val[1], sig_id : val[2] }
84 |
85 | #--------------------------
86 |
87 | exports.Leaf = class Leaf
88 |
89 | constructor : ({@pub, @semipriv, @eldest_kid, @reset}) ->
90 |
91 | get_public : () -> @pub
92 | get_semiprivate: () -> @semipriv
93 | get_eldest_kid : () -> @eldest_kid
94 | get_reset : () -> @reset
95 |
96 | to_json : () ->
97 | ret = [
98 | C.versions.leaf.v2,
99 | (if @pub then @pub.to_json() else []),
100 | (if @semipriv? then @semipriv.to_json() else []),
101 | @eldest_kid
102 | (if @reset? then @reset.to_json() else null)
103 | ]
104 | return ret
105 |
106 | to_string : () -> JSON.stringify(@to_json())
107 |
108 | @parse: ( val) ->
109 | parser = new Parser val
110 | err = leaf = null
111 | try leaf = parser.parse()
112 | catch e then err = e
113 | [err, leaf]
114 |
115 | seqno_assertion : () -> (rows) =>
116 | found = {}
117 |
118 | # Make sure that every sequence found in the DB is also in the LOL
119 | for {seqno_type, seqno} in rows
120 | chain_tail = switch seqno_type
121 | when C.seqno_types.PUBLIC then @pub
122 | when C.seqno_types.SEMIPRIVATE then @semipriv
123 | else null
124 | if not chain_tail? or (chain_tail.seqno isnt seqno) then return false
125 | found[seqno_type] = true
126 |
127 | # Make sure that every sequence found in the LOL is also in the DB.
128 | if @semipriv?.seqno and (not found[C.seqno_types.SEMIPRIVATE]) then return false
129 | if @pub?.seqno and (not found[C.seqno_types.PUBLIC]) then return false
130 |
131 | return true
132 |
133 | reset_assertion : (rows) ->
134 | if rows.length is 0 then not(@reset?)
135 | else if rows.length and @reset?
136 | (@reset.seqno is rows[0].seqno) and (@reset.payload_hash is rows[0].payload_hash)
137 | else
138 | false
139 |
140 | seqno_and_prev_assertion : (typ) -> (rows) =>
141 | chain_tail = switch typ
142 | when C.seqno_types.PUBLIC then @pub
143 | when C.seqno_types.SEMIPRIVATE then @semipriv
144 | else null
145 |
146 | # Case 0 is a null length
147 | if rows.length is 0
148 | if chain_tail is null or chain_tail.length is 0 then true
149 | else false
150 | else if rows.length is 1 and chain_tail?
151 | (chain_tail.seqno is rows[0].seqno) and (chain_tail.payload_hash is rows[0].payload_hash )
152 | else
153 | false
154 |
155 | #===========================================================
156 |
--------------------------------------------------------------------------------
/src/merkle/pathcheck.iced:
--------------------------------------------------------------------------------
1 |
2 | C = require '../constants'
3 | {make_esc} = require 'iced-error'
4 | {hash} = require 'triplesec'
5 | merkle = require 'merkle-tree'
6 | {a_json_parse} = require('iced-utils').util
7 | {Leaf} = require './leaf'
8 |
9 | sha256 = (s) -> (new hash.SHA256).bufhash(Buffer.from s, 'utf8').toString('hex')
10 | sha512 = (s) -> (new hash.SHA512).bufhash(Buffer.from s, 'utf8').toString('hex')
11 |
12 | #===========================================================
13 |
14 | #
15 | # pathcheck
16 | #
17 | # Given a reply from the server, and a keymanager that can verify the
18 | # reply, check the signature, check the path from the root the leaf,
19 | # check the username, and then callback.
20 | #
21 | # @param server_reply {Object} the JSON object the server sent back
22 | # @param km {KeyManager} a keyManager to verify the reply with
23 | # @param cb {Callback} Reply with the Leaf, uid,
24 | # and username verified by the merkle path
25 | module.exports = pathcheck = ({server_reply, km}, cb) ->
26 | pc = new PathChecker { server_reply, km }
27 | await pc.run defer err, res
28 | cb err, res
29 |
30 | #===========================================================
31 |
32 | class PathChecker
33 |
34 | constructor : ({@server_reply, @km}) ->
35 |
36 | #-----------
37 |
38 | run : (cb) ->
39 | esc = make_esc cb, "PathChecker::run"
40 | await @_verify_sig esc defer()
41 | await @_verify_username esc defer uid, username
42 | await @_verify_path {uid}, esc defer leaf
43 | cb null, {leaf, uid, username}
44 |
45 | #-----------
46 |
47 | _verify_sig : (cb) ->
48 | esc = make_esc cb, "_verify_sig"
49 | kid = @km.get_ekid().toString('hex')
50 | err = null
51 | unless (sig = @server_reply.root.sigs[kid]?.sig)?
52 | err = new Error "No signature found for kid: #{kid}"
53 | else
54 | sigeng = @km.make_sig_eng()
55 | await sigeng.unbox sig, esc defer raw
56 | await a_json_parse raw.toString('utf8'), esc defer @_signed_payload
57 | cb err
58 |
59 | #-----------
60 |
61 | _extract_nodes : ({list}, cb) ->
62 | esc = make_esc cb, "PathChecker::_extract_nodes"
63 | ret = {}
64 | for {node} in list
65 | await a_json_parse node.val, esc defer val
66 | ret[node.hash] = val
67 | cb null, ret
68 |
69 | #-----------
70 |
71 | _verify_username_legacy : ({uid, username}, cb) ->
72 | esc = make_esc cb, "PathChecker::_verify_username_legacy"
73 | root = @_signed_payload.body.legacy_uid_root
74 | await @_extract_nodes {list : @server_reply.uid_proof_path}, esc defer nodes
75 | tree = new LegacyUidNameTree { root, nodes }
76 | await tree.find {key : sha256(username) }, esc defer leaf
77 | err = if (leaf is uid) then null
78 | else new Error "UID mismatch #{leaf} != #{uid} in tree for #{username}"
79 | cb err
80 |
81 | #-----------
82 |
83 | _verify_path : ({uid}, cb) ->
84 | esc = make_esc cb, "PathChecker::_verify_path"
85 | root = @_signed_payload.body.root
86 | await @_extract_nodes { list : @server_reply.path}, esc defer nodes
87 | tree = new MainTree { root, nodes }
88 | await tree.find {key : uid}, esc defer leaf_raw
89 | # The leaf might be missing entirely, for empty users.
90 | if leaf_raw?
91 | [err, leaf] = Leaf.parse leaf_raw
92 | else
93 | [err, leaf] = [null, null]
94 | cb err, leaf
95 |
96 | #-----------
97 |
98 | _verify_username : (cb) ->
99 | {uid,username,username_cased} = @server_reply
100 | err = null
101 |
102 | if uid[-2...] is '00'
103 | await @_verify_username_legacy {username,uid}, defer err
104 |
105 | else
106 | err = @_verify_username_hash { uid, username, lc : false }
107 | if err? and username_cased? and
108 | (username_cased isnt username) and
109 | (username_cased.toLowerCase() is username)
110 | err = @_verify_username_hash { uid, username : username_cased }
111 |
112 | cb err, uid, username
113 |
114 | #-----------
115 |
116 | _verify_username_hash : ( {uid, username}) ->
117 | h = (new hash.SHA256).bufhash (Buffer.from username, "utf8")
118 | uid2 = h[0...15].toString('hex') + '19'
119 | if (uid isnt uid2)
120 | err = new Error "bad UID: #{uid} != #{uid2} for username #{username}"
121 | return err
122 |
123 | #===========================================================
124 |
125 | class BaseTree extends merkle.Base
126 |
127 | constructor : ({@root, @nodes}) ->
128 | super {}
129 |
130 | lookup_root : ({txinfo}, cb) ->
131 | cb null, @root
132 |
133 | lookup_node : ({key}, cb) ->
134 | ret = @nodes[key]
135 | err = if ret? then null else new Error "key not found: '#{key}'"
136 | cb err, ret
137 |
138 | #===========================================================
139 |
140 | class LegacyUidNameTree extends BaseTree
141 |
142 | hash_fn : (s) -> sha256 s
143 |
144 | #===========================================================
145 |
146 | class MainTree extends BaseTree
147 |
148 | hash_fn : (s) -> sha512 s
149 |
150 | #===========================================================
151 |
152 | __iced_k_noop()
153 |
--------------------------------------------------------------------------------
/src/sigchain/sigchain.iced:
--------------------------------------------------------------------------------
1 | {bufeq_secure,athrow, a_json_parse} = require('iced-utils').util
2 | {make_esc} = require 'iced-error'
3 | kbpgp = require('kbpgp')
4 | proofs = require('keybase-proofs')
5 | ie = require('iced-error')
6 | {trim} = require('pgp-utils').util
7 |
8 | UID_LEN = 32
9 | exports.SIG_ID_SUFFIX = SIG_ID_SUFFIX = "0f"
10 |
11 | strip_final_newline = (buf) ->
12 | s = buf.toString('utf8')
13 | if s[-1...] is "\n" then Buffer.from s[0...-1], "utf8"
14 | else buf
15 |
16 | # On 15 Sep 2015, a day that will live in infamy, some users made bad
17 | # sigchain additions due to a code error that was stripping out
18 | # whitespace from json payloads, writing those payloads to the DB, and then
19 | # offering those payloads back out for subsequent signatures. We address that
20 | # issue here by subtracting that dropped newline out right before we hash.
21 | # We should potentially have a whitelist here for sigids that are affected:
22 | bad_whitespace_sig_ids = {
23 | "595a73fc649c2c8ccc1aa79384e0b3e7ab3049d8df838f75ef0edbcb5bbc42990f" : true
24 | "e256078702afd7a15a24681259935b48342a49840ab6a90291b300961669790f0f" : true
25 | "30831001edee5e01c3b5f5850043f9ef7749a1ed8624dc703ae0922e1d0f16dd0f" : true
26 | "88e6c581dbccbf390559bcb30ca21548ba0ec4861ec2d666217bd4ed4a4a8c3f0f" : true
27 | "4db0fe3973b3a666c7830fcb39d93282f8bc414eca1d535033a5cc625eabda0c0f" : true
28 | "9ba23a9a1796fb22b3c938f1edf5aba4ca5be7959d9151895eb6aa7a8d8ade420f" : true
29 | "df0005f6c61bd6efd2867b320013800781f7f047e83fd44d484c2cb2616f019f0f" : true
30 | "a32692af33e559e00a40aa3bb4004744d2c1083112468ed1c8040eaacd15c6eb0f" : true
31 | "3e61901f50508aba72f12740fda2be488571afc51d718d845e339e5d1d1b531d0f" : true
32 | "de43758b653b3383aca640a96c7890458eadd35242e8f8531f29b606890a14ea0f" : true
33 | "b9ee3b46c97d48742a73e35494d3a373602460609e3c6c54a553fc4d83b659e40f" : true
34 | "0ff29c1d036c3f4841f3f485e28d77351abb3eeeb52d2f8d802fd15e383d9a5f0f" : true
35 | "eb1a13c6b6e42bb7470e222b51d36144a25ffc4fbc0b32e9a1ec11f059001bc80f" : true
36 | "9c189d6d644bad9596f78519d870a685624f813afc1d0e49155073d3b0521f970f" : true
37 | "aea7c8f7726871714e777ac730e77e1905a38e9587f9504b739ff9b77ef2d5cc0f" : true
38 | "ac6e225b8324c1fcbe814382e198495bea801dfeb56cb22b9e89066cc52ab03b0f" : true
39 | "3034e8b7d75861fc28a478b4992a8592b5478d4cbc7b87150d0b59573d731d870f" : true
40 | "140f1b7b7ba32f34ad6302d0ed78692cf1564760d78c082965dc3b8b5f7e27f10f" : true
41 | "833f27edcf54cc489795df1dc7d9f0cbea8253e1b84f5e82749a7a2a4ffc295c0f" : true
42 | "110a64513b4188eca2af6406a8a6dbf278dfce324b8879b5cb67e8626ff2af180f" : true
43 | "3042dbe45383b0c2eafe13a73da35c4e721be026d7908dfcef6eb121d95b75b10f" : true
44 | "50ba350ddc388f7c6fdba032a7d283e4caa0ca656f92f69257213222dd7deeaf0f" : true
45 | "803854b4074d668e1761ee9c533c0fc576bd0404cf26ff7545e14512f3b9002f0f" : true
46 | "2e08f0b9566e15fa1f9e67b236e5385cdb38d57ff51d7ab3e568532867c9f8890f" : true
47 | "cb97f4b62f2e817e8db8c6193440214ad20f906571e4851db186869f0b4c0e310f" : true
48 | "a5c4a30d1eaaf752df424bf813c5a907a5cf94fd371e280d39e0a3d078310fba0f" : true
49 | "c7d26afbc1957ecca890d8d9001a9cc4863490161720ad76a2aedeb8c2d50df70f" : true
50 | "b385c0c76d790aba156ff68fd571171fc7cb85f75e7fc9d1561d7960d8875acb0f" : true
51 | "47d349b8bb3c8457449390ca2ed5e489a70ad511ab3edb4c7f0af27eed8c65d30f" : true
52 | "2785b24acd6869e1e7d38a91793af549f3c35cd0729127d200b66f8c0ffba59b0f" : true
53 | "503df567f98cf5910ba44cb95e157e656afe95d159a15c7df4e88ac6016c948f0f" : true
54 | "2892863758cdaf9796fb36e2466093762efda94e74eb51e3ab9d6bec54064b8a0f" : true
55 | "e1d60584995e677254f7d913b3f40060b5500241d6de0c5822ba1282acc5e08b0f" : true
56 | "031b506b705926ea962e59046bfe1720dcf72c85310502020e2ae836b294fcde0f" : true
57 | "1454fec21489f17a6d78927af1c9dca4209360c6ef6bfa569d8b62d32e668ea30f" : true
58 | "ba68052597a3782f64079d7d9ec821ea9785c0868e44b597a04c9cd8bf634c1e0f" : true
59 | "db8d59151b2f78c82c095c9545f1e4d39947a0c0bcc01b907e0ace14517d39970f" : true
60 | "e088beccfee26c5df39239023d1e4e0cbcd63fd50d0bdc4bf2c2ba25ef1a8fe40f" : true
61 | "8182f385c347fe57d3c46fe40e8df0e2d6cabdac38f490417b313050249be9dc0f" : true
62 | "2415e1c77b0815661452ea683e366c6d9dfd2008a7dbc907004c3a33e56cf6190f" : true
63 | "44847743878bd56f5cd74980475e8f4e95d0d6ec1dd8722fd7cfc7761698ec780f" : true
64 | "70c4026afec66312456b6820492b7936bff42b58ca7a035729462700677ef4190f" : true
65 | "7591a920a5050de28faad24b5fe3336f658b964e0e64464b70878bfcf04537420f" : true
66 | "10a45e10ff2585b03b9b5bc449cb1a7a44fbb7fcf25565286cb2d969ad9b89ae0f" : true
67 | "062e6799f211177023bc310fd6e4e28a8e2e18f972d9b037d24434a203aca7240f" : true
68 | "db9a0afaab297048be0d44ffd6d89a3eb6a003256426d7fd87a60ab59880f8160f" : true
69 | "58bf751ddd23065a820449701f8a1a0a46019e1c54612ea0867086dbd405589a0f" : true
70 | "062e6799f211177023bc310fd6e4e28a8e2e18f972d9b037d24434a203aca7240f" : true
71 | "10a45e10ff2585b03b9b5bc449cb1a7a44fbb7fcf25565286cb2d969ad9b89ae0f" : true
72 | "44847743878bd56f5cd74980475e8f4e95d0d6ec1dd8722fd7cfc7761698ec780f" : true
73 | "58bf751ddd23065a820449701f8a1a0a46019e1c54612ea0867086dbd405589a0f" : true
74 | "70c4026afec66312456b6820492b7936bff42b58ca7a035729462700677ef4190f" : true
75 | "7591a920a5050de28faad24b5fe3336f658b964e0e64464b70878bfcf04537420f" : true
76 | "db9a0afaab297048be0d44ffd6d89a3eb6a003256426d7fd87a60ab59880f8160f" : true
77 | }
78 |
79 | # We had an incident where a Go client using an old reverse-sig format got some
80 | # links into a public chain. (Sorry Fred!) Skip reverse signature checking for
81 | # this fixed set of links.
82 | known_buggy_reverse_sigs = {
83 | "2a0da9730f049133ce728ba30de8c91b6658b7a375e82c4b3528d7ddb1a21f7a0f": true
84 | "eb5c7e7d3cf8370bed8ab55c0d8833ce9d74fd2c614cf2cd2d4c30feca4518fa0f": true
85 | "0f175ef0d3b57a9991db5deb30f2432a85bc05922bbe727016f3fb660863a1890f": true
86 | "48267f0e3484b2f97859829503e20c2f598529b42c1d840a8fc1eceda71458400f": true
87 | };
88 |
89 | # Some users (6) managed to reuse eldest keys after a sigchain reset, without
90 | # using the "eldest" link type, before the server prohibited this. To clients,
91 | # that means their chains don't appear to reset. We hardcode these cases.
92 | hardcoded_resets = {
93 | "11111487aa193b9fafc92851176803af8ed005983cad1eaf5d6a49a459b8fffe0f": true
94 | "df0005f6c61bd6efd2867b320013800781f7f047e83fd44d484c2cb2616f019f0f": true
95 | "32eab86aa31796db3200f42f2553d330b8a68931544bbb98452a80ad2b0003d30f": true
96 | "5ed7a3356fd0f759a4498fc6fed1dca7f62611eb14f782a2a9cda1b836c58db50f": true
97 | "d5fe2c5e31958fe45a7f42b325375d5bd8916ef757f736a6faaa66a6b18bec780f": true
98 | "1e116e81bc08b915d9df93dc35c202a75ead36c479327cdf49a15f3768ac58f80f": true
99 | }
100 |
101 | # For testing that caches are working properly. (Use a wrapper object instead
102 | # of a simple counter because main.iced copies things.)
103 | exports.debug =
104 | unbox_count: 0
105 |
106 | exports.ParsedKeys = ParsedKeys = class ParsedKeys
107 | @parse : ({key_bundles}, cb) ->
108 | # We only take key bundles from the server, either hex NaCl public keys, or
109 | # ascii-armored PGP public key strings. We compute the KIDs and
110 | # fingerprints ourselves, because we don't trust the server to do it for
111 | # us.
112 | esc = make_esc cb, "ParsedKeys.parse"
113 | default_eldest_kid_for_testing = null
114 | opts = { time_travel : true }
115 | parsed_keys = new ParsedKeys
116 | for bundle in key_bundles
117 | await kbpgp.ukm.import_armored_public {armored: bundle, opts}, esc defer key_manager
118 | await parsed_keys._add_key {key_manager}, esc defer()
119 | default_eldest_kid_for_testing or= key_manager.get_ekid().toString "hex"
120 | cb null, parsed_keys, default_eldest_kid_for_testing
121 |
122 | constructor : ->
123 | # Callers should use this class to get KeyManagers for KIDs they already
124 | # have, but callers MUST NOT iterate over the set of KIDs here as though it
125 | # were the valid set for a given user. The set of KIDs is *untrusted*
126 | # because it comes from the server. We keep the map private to prevent that
127 | # mistake.
128 | @_kids_to_merged_pgp_key_managers = {}
129 | @_kids_to_pgp_key_managers_by_hash = {}
130 | @_kids_to_nacl_keys = {}
131 |
132 | _add_key : ({key_manager}, cb) ->
133 | esc = make_esc "ParsedKeys._add_key"
134 | kid = key_manager.get_ekid()
135 | kid_str = kid.toString "hex"
136 |
137 | if key_manager.pgp?
138 | if (existing = @_kids_to_merged_pgp_key_managers[kid_str])?
139 | existing.merge_everything key_manager
140 | else
141 | @_kids_to_merged_pgp_key_managers[kid_str] = key_manager
142 | await key_manager.pgp_full_hash {}, esc defer hash
143 | (@_kids_to_pgp_key_managers_by_hash[kid_str] or= {})[hash] = key_manager
144 | else
145 | @_kids_to_nacl_keys[kid_str] = key_manager
146 | cb()
147 |
148 | # We may have multiple versions of a PGP key with the same KID/fingerprint.
149 | # They could have different subkeys and userids, and an old subkey could have
150 | # been used to sign an old link. We historically handled handle these cases
151 | # by merging all versions of a PGP key together, but since it's valid to
152 | # upload a new version of a PGP key specifically to revoke a subkey and
153 | # prevent it from signing new chainlinks, we realized that it's necessary to
154 | # track which version of the key is active. When a PGP key is signed in or
155 | # updated, the hash of the ASCII-armored public key is now specified in the
156 | # sigchain.
157 | #
158 | # get_merged_pgp_key_manager must only be used when a hash hasn't been
159 | # specified (in, say, an old sigchain). When an eldest, sibkey, or pgp_update
160 | # link specifies a hash, get_pgp_key_manager_with_hash must be used for all
161 | # following links signed by that KID.
162 |
163 | get_merged_pgp_key_manager : (kid) ->
164 | @_kids_to_merged_pgp_key_managers[kid]
165 |
166 | get_pgp_key_manager_with_hash : (kid, hash) ->
167 | @_kids_to_pgp_key_managers_by_hash[kid]?[hash]
168 |
169 | get_nacl_key_manager : (kid) ->
170 | @_kids_to_nacl_keys[kid]
171 |
172 | # KeyState tracks hashes that have been specified for PGP keys. As long as it's
173 | # kept up to date as the sigchain is replayed, it can safely be used to get the
174 | # correct KeyManager for a given KID.
175 | class KeyState
176 | constructor : ({@parsed_keys}) ->
177 | @_kid_to_hash = {}
178 |
179 | set_key_hash : ({kid, hash}, cb) ->
180 | if not @parsed_keys.get_pgp_key_manager_with_hash(kid, hash)?
181 | cb new E.NoKeyWithThisHashError "No PGP key with kid #{kid} and hash #{hash} exists"
182 | return
183 | @_kid_to_hash[kid] = hash
184 | cb()
185 |
186 | get_key_manager : (kid) ->
187 | if (key = @parsed_keys.get_nacl_key_manager kid)?
188 | return key
189 | if (hash = @_kid_to_hash[kid])?
190 | return @parsed_keys.get_pgp_key_manager_with_hash kid, hash
191 | return @parsed_keys.get_merged_pgp_key_manager kid
192 |
193 | class ChainLink
194 | @parse : ({sig_blob, key_state, sig_cache}, cb) ->
195 | esc = make_esc cb, "ChainLink.parse"
196 | # Unbox the signed payload. PGP key expiration is checked automatically
197 | # during unbox, using the ctime of the chainlink.
198 | await @_unbox_payload {sig_blob, key_state, sig_cache}, esc defer payload, sig_id, payload_hash
199 | # Check internal details of the payload, like uid length.
200 | await check_link_payload_format {payload}, esc defer()
201 | # Make sure the KID from the server matches the payload, and that any
202 | # payload PGP fingerprint also matches the KID.
203 | await @_check_payload_against_server_kid {sig_blob, payload, key_state}, esc defer()
204 | # Check any reverse signatures. For links where we skip this step, we will
205 | # ignore their contents later.
206 | if not known_buggy_reverse_sigs[sig_id]
207 | await @_check_reverse_signatures {payload, key_state}, esc defer()
208 | # The constructor takes care of all the payload parsing that isn't failable.
209 | cb null, new ChainLink {kid: sig_blob.kid, sig_id, payload, payload_hash}
210 |
211 | @_unbox_payload : ({sig_blob, key_state, sig_cache}, cb) ->
212 | esc = make_esc cb, "ChainLink._unbox_payload"
213 | # Get the signing KID directly from the server blob. We'll confirm later
214 | # that this is the same as the KID listed in the payload.
215 | kid = sig_blob.kid
216 | # Get the key_manager and sig_eng we need from the ParsedKeys object.
217 | key_manager = key_state.get_key_manager kid
218 | if not key_manager?
219 | await athrow (new E.NonexistentKidError "link signed by nonexistent kid #{kid}"), esc defer()
220 | sig_eng = key_manager.make_sig_eng()
221 | # We need the signing ctime to verify the signature, and that's actually in
222 | # the signed payload. So we fully parse the payload *before* verifying, and
223 | # do the actual (maybe cached) verification at the end.
224 | await sig_eng.get_body_and_unverified_payload(
225 | {armored: sig_blob.sig}, esc defer sig_body, unverified_buffer)
226 | sig_id = kbpgp.hash.SHA256(sig_body).toString("hex") + SIG_ID_SUFFIX
227 | payload_json = unverified_buffer.toString('utf8')
228 | await a_json_parse payload_json, esc defer payload
229 | ctime_seconds = payload.ctime
230 | # Now that we have the ctime, get the verified payload.
231 | if sig_cache?
232 | await sig_cache.get {sig_id}, esc defer verified_buffer
233 | if not verified_buffer?
234 | exports.debug.unbox_count++
235 | await key_manager.make_sig_eng().unbox(
236 | sig_blob.sig,
237 | defer(err, verified_buffer),
238 | {now: ctime_seconds})
239 | if err?
240 | await athrow (new E.VerifyFailedError err.message), esc defer()
241 | if sig_cache?
242 | await sig_cache.put {sig_id, payload_buffer: verified_buffer}, esc defer()
243 | # Check that what we verified matches the unverified payload we used above.
244 | # Ideally it should be impossible for there to be a difference, but this
245 | # protects us from bugs/attacks that might exploit multiple payloads,
246 | # particularly in PGP.
247 | await check_buffers_equal verified_buffer, unverified_buffer, esc defer()
248 | # Success!
249 |
250 | # See comment above about bad whitespace sigs from 15 Sep 2015
251 | hash_input = if bad_whitespace_sig_ids[sig_id] then strip_final_newline verified_buffer
252 | else verified_buffer
253 |
254 | payload_hash = kbpgp.hash.SHA256(hash_input).toString("hex")
255 |
256 | cb null, payload, sig_id, payload_hash
257 |
258 | @_check_payload_against_server_kid : ({sig_blob, payload, key_state}, cb) ->
259 | # Here's where we check the data we relied on in @_unbox_payload().
260 | signing_kid = sig_blob.kid
261 | signing_fingerprint = key_state.get_key_manager(signing_kid).get_pgp_fingerprint()?.toString('hex')
262 | payload_kid = payload.body.key.kid
263 | payload_fingerprint = payload.body.key.fingerprint
264 | err = null
265 | if payload_kid? and payload_kid isnt signing_kid
266 | err = new E.KidMismatchError "signing kid (#{signing_kid}) and payload kid (#{payload_kid}) mismatch"
267 | else if payload_fingerprint? and payload_fingerprint isnt signing_fingerprint
268 | err = new E.FingerprintMismatchError "signing fingerprint (#{signing_fingerprint}) and payload fingerprint (#{payload_fingerprint}) mismatch"
269 | cb err
270 |
271 | @_check_reverse_signatures : ({payload, key_state}, cb) ->
272 | esc = make_esc cb, "ChainLink._check_reverse_signatures"
273 | if payload.body.sibkey?
274 | kid = payload.body.sibkey.kid
275 | full_hash = payload.body.sibkey.full_hash
276 | sibkey_key_manager = if full_hash?
277 | # key_state hasn't been updated with the new sibkey's full hash yet
278 | key_state.parsed_keys.get_pgp_key_manager_with_hash kid, full_hash
279 | else
280 | key_state.get_key_manager kid
281 | if not sibkey_key_manager?
282 | await athrow (new E.NonexistentKidError "link reverse-signed by nonexistent kid #{kid}"), esc defer()
283 | sibkey_proof = new proofs.Sibkey {}
284 | await sibkey_proof.reverse_sig_check {json: payload, subkm: sibkey_key_manager}, defer err
285 | if err?
286 | await athrow (new E.ReverseSigVerifyFailedError err.message), esc defer()
287 | if payload.body.subkey?
288 | kid = payload.body.subkey.kid
289 | subkey_key_manager = key_state.get_key_manager kid
290 | if not subkey_key_manager?
291 | await athrow (new E.NonexistentKidError "link delegates nonexistent subkey #{kid}"), esc defer()
292 | cb null
293 |
294 | constructor : ({@kid, @sig_id, @payload, @payload_hash}) ->
295 | @uid = @payload.body.key.uid
296 | @username = @payload.body.key.username
297 | @seqno = @payload.seqno
298 | @prev = @payload.prev
299 | # @fingerprint is PGP-only.
300 | @fingerprint = @payload.body.key.fingerprint
301 | # Not all links have the "eldest_kid" field, but if they don't, then their
302 | # signing KID is implicitly the eldest.
303 | @eldest_kid = @payload.body.key.eldest_kid or @kid
304 | @ctime_seconds = @payload.ctime
305 | @etime_seconds = @ctime_seconds + @payload.expire_in
306 | @type = @payload.body.type
307 |
308 | # Only expected to be set in eldest links
309 | @signing_key_hash = @payload.body.key.full_hash
310 |
311 | @sibkey_delegation = @payload.body.sibkey?.kid
312 | @sibkey_hash = @payload.body.sibkey?.full_hash
313 |
314 | @subkey_delegation = @payload.body.subkey?.kid
315 |
316 | @pgp_update_kid = @payload.body.pgp_update?.kid
317 | @pgp_update_hash = @payload.body.pgp_update?.full_hash
318 |
319 | @key_revocations = []
320 | if @payload.body.revoke?.kids?
321 | @key_revocations = @payload.body.revoke.kids
322 | if @payload.body.revoke?.kid?
323 | @key_revocations.push(@payload.body.revoke.kid)
324 |
325 | @sig_revocations = []
326 | if @payload.body.revoke?.sig_ids?
327 | @sig_revocations = @payload.body.revoke.sig_ids
328 | if @payload.body.revoke?.sig_id?
329 | @sig_revocations.push(@payload.body.revoke.sig_id)
330 |
331 |
332 | # Exported for testing.
333 | exports.check_link_payload_format = check_link_payload_format = ({payload}, cb) ->
334 | esc = make_esc cb, "check_link_payload_format"
335 | uid = payload.body.key.uid
336 | if uid.length != UID_LEN
337 | await athrow (new E.BadLinkFormatError "UID wrong length: #{uid.length}"), esc defer()
338 | cb()
339 |
340 |
341 | # Also exported for testing. This check will never fail under normal
342 | # circumstances, so we need a test to explicitly make it fail.
343 | exports.check_buffers_equal = check_buffers_equal = (verified_buffer, unverified_buffer, cb) ->
344 | err = null
345 | if not bufeq_secure(verified_buffer,unverified_buffer)
346 | msg = """Payload mismatch!
347 | Verified:
348 | #{verified_buffer.toString('hex')}
349 | Unverified:
350 | #{unverified_buffer.toString('hex')}"""
351 | err = new E.VerifyFailedError msg
352 | cb err
353 |
354 |
355 | exports.SigChain = class SigChain
356 |
357 | # The replay() method is the main interface for all callers. It checks all of
358 | # the user's signatures and returns a SigChain object representing their
359 | # current state.
360 | #
361 | # @param {[string]} sig_blobs The parsed JSON signatures list returned from
362 | # the server's sig/get.json endpoint.
363 | # @param {ParsedKeys} parsed_keys The unverified collection of all the user's
364 | # public keys. This is usually obtained from the all_bundles list given
365 | # by user/lookup.json, passed to ParsedKeys.parse(). NaCl public key
366 | # material is contained entirely within the KID, so technically all this
367 | # extra data is only needed for PGP, but we treat both types of keys the
368 | # same way for simplicity.
369 | # @param {object} sig_cache An object with two methods: get({sig_id}, cb) and
370 | # put({sig_id, payload_buffer}, cb), which caches the payloads of
371 | # previously verified signatures. This parameter can be null, in which
372 | # case all signatures will be checked.
373 | # @param {string} uid Used only to check that the sigchain belongs to the
374 | # right user.
375 | # @param {string} username As with uid, used for confirming ownership.
376 | # @param {string} eldest_kid The full (i.e. with-prefix) KID of the user's
377 | # current eldest key. This is used to determine the latest subchain.
378 | # @param {object} log An object with logging methods (debug, info, warn,
379 | # error). May be null.
380 | @replay : ({sig_blobs, parsed_keys, sig_cache, uid, username, eldest_kid, log}, cb) ->
381 | log = log or (() ->)
382 | log "+ libkeybase: replay(username: #{username}, uid: #{uid}, eldest: #{eldest_kid})"
383 | esc = make_esc cb, "SigChain.replay"
384 | # Forgetting the eldest KID would silently give you an empty sigchain. Prevent this.
385 | if not eldest_kid?
386 | await athrow (new Error "eldest_kid parameter is required"), esc defer()
387 | # Initialize the SigChain.
388 | sigchain = new SigChain {uid, username, eldest_kid, parsed_keys}
389 | # Build the chain link by link, checking consistency all the way through.
390 | for sig_blob in sig_blobs
391 | log "| libkeybase: replaying signature #{sig_blob.seqno}: #{sig_blob.sig_id}"
392 | await sigchain._add_new_link {sig_blob, sig_cache, log}, esc defer()
393 | # If the eldest kid of the current subchain doesn't match the eldest kid
394 | # we're looking for, that means we're on a new zero-length subchain.
395 | if eldest_kid isnt sigchain._current_subchain_eldest
396 | sigchain._reset_subchain(eldest_kid)
397 | # After the chain is finished, make sure we've proven ownership of the
398 | # eldest key in some way.
399 | await sigchain._enforce_eldest_key_ownership {}, esc defer()
400 | log "- libkeybase: replay finished"
401 | cb null, sigchain
402 |
403 | # NOTE: Don't call the constructor directly. Use SigChain.replay().
404 | constructor : ({uid, username, eldest_kid, parsed_keys}) ->
405 | @_uid = uid
406 | @_username = username
407 | @_eldest_kid = eldest_kid
408 | @_parsed_keys = parsed_keys
409 | @_next_seqno = 1
410 | @_next_payload_hash = null
411 |
412 | @_reset_subchain(null)
413 |
414 | _reset_subchain : (current_subchain_eldest) ->
415 | @_current_subchain = []
416 | @_current_subchain_eldest = current_subchain_eldest
417 | @_key_state = new KeyState {parsed_keys: @_parsed_keys}
418 | @_unrevoked_links = {}
419 | @_valid_sibkeys = {}
420 | @_valid_sibkeys[current_subchain_eldest] = true
421 | @_sibkey_order = [current_subchain_eldest]
422 | @_valid_subkeys = {}
423 | @_subkey_order = []
424 | @_kid_to_etime_seconds = {}
425 | @_update_kid_pgp_etime { kid: current_subchain_eldest }
426 |
427 | # Return the list of links in the current subchain which have not been
428 | # revoked.
429 | get_links : () ->
430 | return (link for link in @_current_subchain when link.sig_id of @_unrevoked_links)
431 |
432 | # Return the list of sibkey KIDs which aren't revoked or expired.
433 | get_sibkeys : ({now}) ->
434 | now = now or current_time_seconds()
435 | ret = []
436 | for kid in @_sibkey_order
437 | etime = @_kid_to_etime_seconds[kid]
438 | expired = (etime? and now > etime)
439 | if @_valid_sibkeys[kid] and not expired
440 | ret.push @_key_state.get_key_manager kid
441 | ret
442 |
443 | # Return the list of subkey KIDs which aren't revoked or expired.
444 | get_subkeys : ({now}) ->
445 | now = now or current_time_seconds()
446 | ret = []
447 | for kid in @_subkey_order
448 | etime = @_kid_to_etime_seconds[kid]
449 | expired = (etime? and now > etime)
450 | if @_valid_subkeys[kid] and not expired
451 | ret.push @_key_state.get_key_manager kid
452 | ret
453 |
454 | _add_new_link : ({sig_blob, sig_cache, log}, cb) ->
455 | esc = make_esc cb, "SigChain._add_new_link"
456 |
457 | # This constructor checks that the link is internally consistent: its
458 | # signature is valid and belongs to the key it claims, and the same for any
459 | # reverse sigs.
460 | await ChainLink.parse {sig_blob, key_state: @_key_state, sig_cache}, esc defer link
461 | log "| libkeybase: chain link parsed, type '#{link.payload.body.type}'"
462 |
463 | # Make sure the link belongs in this chain (right username and uid) and at
464 | # this particular point in the chain (right seqno and prev hash).
465 | await @_check_link_belongs_here {link}, esc defer()
466 |
467 | # Now see if we've hit a sigchain reset. That can happen for one of two
468 | # reasons:
469 | # 1) The eldest kid reported by this link (either explicitly or
470 | # implicitly; see ChainLink.eldest_kid) is different from the one that
471 | # came before it.
472 | # 2) This link is of the "eldest" type.
473 | # We *don't* short-circuit here though. Verifying past subchains just like
474 | # we verify the current one actually simplifies PGP full hash handling.
475 | # (Otherwise we'd have to figure out how to maintain the KeyState, or else
476 | # we wouldn't even be able to verify link signatures [correctly, without
477 | # resorting to key merging].)
478 | if (link.eldest_kid isnt @_current_subchain_eldest or
479 | link.type is "eldest" or
480 | hardcoded_resets[link.sig_id])
481 | log "| libkeybase: starting new subchain"
482 | @_reset_subchain(link.eldest_kid)
483 |
484 | # Links with known bad reverse sigs still have to have valid payload hashes
485 | # and seqnos, but their contents are ignored, and their signing keys might
486 | # not be valid sibkeys (because the delegating links of those sibkeys might
487 | # also have been bad). Short-circuit here for these links, after checking
488 | # the link position but before checking the validity of the signing key.
489 | if known_buggy_reverse_sigs[link.sig_id]
490 | cb null
491 | return
492 |
493 | # Finally, make sure that the key that signed this link was actually valid
494 | # at the time the link was signed.
495 | await @_check_key_is_valid {link}, esc defer()
496 | log "| libkeybase: signing key is valid (#{link.kid})"
497 |
498 | # This link is valid and part of the current subchain. Update all the
499 | # relevant metadata.
500 | @_current_subchain.push(link)
501 | @_unrevoked_links[link.sig_id] = link
502 | await @_delegate_keys {link, log}, esc defer()
503 | await @_revoke_keys_and_sigs {link, log}, esc defer()
504 |
505 | cb null
506 |
507 | _check_link_belongs_here : ({link}, cb) ->
508 | err = null
509 | if link.uid isnt @_uid
510 | err = new E.WrongUidError(
511 | "Link doesn't refer to the right uid,
512 | expected: #{link.uid} got: #{@_uid}")
513 | else if link.username.toLowerCase() isnt @_username.toLowerCase()
514 | err = new E.WrongUsernameError(
515 | "Link doesn't refer to the right username,
516 | expected: #{link.username} got: #{@_username}")
517 | else if link.seqno isnt @_next_seqno
518 | err = new E.WrongSeqnoError(
519 | "Link sequence number is wrong, expected:
520 | #{@_next_seqno} got: #{link.seqno}")
521 | else if @_next_payload_hash? and link.prev isnt @_next_payload_hash
522 | err = new E.WrongPrevError(
523 | "Previous payload hash doesn't match,
524 | expected: #{@_next_payload_hash} got: #{link.prev}")
525 | @_next_seqno++
526 | @_next_payload_hash = link.payload_hash
527 | cb err
528 |
529 | _check_key_is_valid : ({link}, cb) ->
530 | err = null
531 | if link.kid not of @_valid_sibkeys
532 | err = new E.InvalidSibkeyError("not a valid sibkey: #{link.kid}, valid sibkeys:
533 | #{JSON.stringify(kid for kid of @_valid_sibkeys)}")
534 | else if link.ctime_seconds > @_kid_to_etime_seconds[link.kid]
535 | err = new E.ExpiredSibkeyError "expired sibkey: #{link.kid}"
536 | cb err
537 |
538 | _delegate_keys : ({link, log}, cb) ->
539 | esc = make_esc cb, 'SigChain._delegate_keys'
540 | # If this is the first link in the subchain, it implicitly delegates the
541 | # eldest key.
542 | if @_current_subchain.length == 1
543 | log "| libkeybase: delegating eldest key #{link.kid}"
544 | await @_delegate_sibkey {
545 | kid: link.kid
546 | etime_seconds: link.etime_seconds
547 | full_hash: link.signing_key_hash
548 | }, esc defer()
549 |
550 | if link.sibkey_delegation?
551 | log "| libkeybase: delegating sibkey #{link.sibkey_delegation}"
552 | await @_delegate_sibkey {
553 | kid: link.sibkey_delegation
554 | etime_seconds: link.etime_seconds
555 | full_hash: link.sibkey_hash
556 | }, esc defer()
557 |
558 | if link.subkey_delegation?
559 | log "| libkeybase: delegating subkey #{link.subkey_delegation}"
560 | await @_delegate_subkey {
561 | kid: link.subkey_delegation
562 | etime_seconds: link.etime_seconds
563 | }, esc defer()
564 |
565 | # Handle pgp_update links.
566 | if link.pgp_update_kid? and link.pgp_update_hash? and @_valid_sibkeys[link.pgp_update_kid]?
567 | await @_key_state.set_key_hash {kid: link.pgp_update_kid, hash: link.pgp_update_hash}, esc defer()
568 | cb()
569 |
570 | _delegate_sibkey : ({kid, etime_seconds, full_hash}, cb) ->
571 | esc = make_esc cb, 'SigChain._delegate_sibkey'
572 | @_valid_sibkeys[kid] = true
573 | if kid not in @_sibkey_order
574 | @_sibkey_order.push kid
575 | @_update_kid_etime { kid, etime_seconds }
576 | @_update_kid_pgp_etime { kid }
577 | if full_hash?
578 | await @_key_state.set_key_hash {kid, hash: full_hash}, esc defer()
579 | cb null
580 |
581 | _delegate_subkey : ({kid, etime_seconds}, cb) ->
582 | esc = make_esc cb, 'SigChain._delegate_subkey'
583 | @_valid_subkeys[kid] = true
584 | if kid not in @_subkey_order
585 | @_subkey_order.push kid
586 | @_update_kid_etime { kid, etime_seconds }
587 | # Subkeys are always NaCl, never PGP. So no full hash or pgp_etime stuff.
588 | cb null
589 |
590 | _update_kid_pgp_etime : ({kid}) ->
591 | # PGP keys have an internal etime, which could be sooner than their link
592 | # etime. If so, that's what we'll use.
593 | key_manager = @_key_state.get_key_manager kid
594 | lifespan = key_manager?.primary?.lifespan
595 | if lifespan?.expire_in?
596 | etime_seconds = lifespan.generated + lifespan.expire_in
597 | @_update_kid_etime {kid, etime_seconds}
598 |
599 | _update_kid_etime : ({kid, etime_seconds}) ->
600 | # PGP keys can have two different etimes: the expiration time of their
601 | # delegating chain link and the internal expiration time recorded by PGP.
602 | # We believe the more restrictive of the two.
603 | if not @_kid_to_etime_seconds[kid]?
604 | @_kid_to_etime_seconds[kid] = etime_seconds
605 | else
606 | @_kid_to_etime_seconds[kid] = Math.min(etime_seconds, @_kid_to_etime_seconds[kid])
607 |
608 | _revoke_keys_and_sigs : ({link, log}, cb) ->
609 | # Handle direct sibkey revocations.
610 | for kid in link.key_revocations
611 | if kid of @_valid_sibkeys
612 | log "| libkeybase: revoking sibkey #{kid}"
613 | delete @_valid_sibkeys[kid]
614 | if kid of @_valid_subkeys
615 | delete @_valid_subkeys[kid]
616 |
617 | # Handle revocations of an entire link.
618 | for sig_id in link.sig_revocations
619 | if sig_id of @_unrevoked_links
620 | revoked_link = @_unrevoked_links[sig_id]
621 | delete @_unrevoked_links[sig_id]
622 | # Keys delegated by the revoked link are implicitly revoked as well.
623 | revoked_sibkey = revoked_link.sibkey_delegation
624 | if revoked_sibkey? and revoked_sibkey of @_valid_sibkeys
625 | log "| libkeybase: revoking sibkey #{revoked_sibkey} from sig #{sig_id}"
626 | delete @_valid_sibkeys[revoked_sibkey]
627 | revoked_subkey = revoked_link.subkey_delegation
628 | if revoked_subkey? and revoked_subkey of @_valid_subkeys
629 | delete @_valid_subkeys[revoked_subkey]
630 | cb()
631 |
632 | _enforce_eldest_key_ownership : ({}, cb) ->
633 | # It's important that users actually *prove* they own their eldest key,
634 | # rather than just claiming someone else's key as their own. The server
635 | # normally enforces this, and here we check the server's work. Proof can
636 | # happen in one of two ways: either the eldest key signs a link in the
637 | # sigchain (thereby referencing the username in the signature payload), or
638 | # the eldest key is a PGP key that self-signs its own identity.
639 | esc = make_esc cb, "SigChain._enforce_eldest_key_ownership"
640 | if @_current_subchain.length > 0
641 | # There was at least one chain link signed by the eldest key.
642 | cb null
643 | return
644 | # No chain link signed by the eldest key. Check PGP self sig.
645 | eldest_km = @_key_state.get_key_manager @_eldest_kid
646 | if not eldest_km?
647 | # Server-reported eldest key is simply missing.
648 | await athrow (new E.NonexistentKidError "no key for eldest kid #{@_eldest_kid}"), esc defer()
649 | userids = eldest_km.get_userids_mark_primary()
650 | if not userids?
651 | # Server-reported key doesn't self-sign any identities (probably because
652 | # it's a NaCl key and not a PGP key).
653 | await athrow (new E.KeyOwnershipError "key #{@_eldest_kid} is not self-signing"), esc defer()
654 | expected_email = @_username + "@keybase.io"
655 | for identity in userids
656 | if identity.get_email() == expected_email
657 | # Found a matching identity. This key is good.
658 | cb null
659 | return
660 | # No matching identity found.
661 | await athrow (new E.KeyOwnershipError "key #{@_eldest_kid} is not owned by #{expected_email}"), esc defer()
662 |
663 | error_names = [
664 | "BAD_LINK_FORMAT"
665 | "EXPIRED_SIBKEY"
666 | "FINGERPRINT_MISMATCH"
667 | "INVALID_SIBKEY"
668 | "KEY_OWNERSHIP"
669 | "KID_MISMATCH"
670 | "NO_KEY_WITH_THIS_HASH"
671 | "NONEXISTENT_KID"
672 | "NOT_LATEST_SUBCHAIN"
673 | "REVERSE_SIG_VERIFY_FAILED"
674 | "VERIFY_FAILED"
675 | "WRONG_UID"
676 | "WRONG_USERNAME"
677 | "WRONG_SEQNO"
678 | "WRONG_PREV"
679 | ]
680 |
681 | # make_errors() needs its input to be a map
682 | errors_map = {}
683 | for name in error_names
684 | errors_map[name] = ""
685 | exports.E = E = ie.make_errors errors_map
686 |
687 | current_time_seconds = () ->
688 | Math.floor(new Date().getTime() / 1000)
689 |
690 | # Stupid coverage hack. If this breaks just delete it please, and I'm so sorry.
691 | __iced_k_noop()
692 |
--------------------------------------------------------------------------------
/test/browser/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | Libkeybase In-Browser Testing Page
5 |
6 |
7 |
8 |
9 | RC:
10 | ??
11 |
12 |
13 |
14 |
15 |
16 |
--------------------------------------------------------------------------------
/test/browser/main.iced:
--------------------------------------------------------------------------------
1 |
2 | mods =
3 | merkle_leaf : require '../files/30_merkle_leaf.iced'
4 | sig_chain : require '../files/31_sigchain.iced'
5 |
6 | {BrowserRunner} = require('iced-test')
7 |
8 | window.onload = () ->
9 | br = new BrowserRunner { log : "log", rc : "rc" }
10 | await br.run mods, defer rc
11 |
--------------------------------------------------------------------------------
/test/files/30_merkle_leaf.iced:
--------------------------------------------------------------------------------
1 |
2 | {Leaf,Triple} = require('../..').merkle.leaf
3 | C = require('../..').constants
4 |
5 | #====================================================
6 |
7 | exports.test_v1 = (T,cb) ->
8 | raw = [ 2, "aabb", "ccdd" ]
9 | [err,leaf] = Leaf.parse raw
10 | T.no_error err
11 | T.equal leaf.get_public().to_json(), raw, "the right leaf value came back"
12 | cb()
13 |
14 | #====================================================
15 |
16 | exports.test_v2_1 = (T,cb) ->
17 | raw = [ 2, [ 1, "aabb", "ccdd" ], [ 2, "eeff", "0011" ] ]
18 | [err,leaf] = Leaf.parse raw
19 | T.no_error err
20 | T.equal leaf.get_public().to_json(), raw[1], "the right public leaf value came back"
21 | T.equal leaf.get_semiprivate().to_json(), raw[2], "the right semiprivate leaf value came back"
22 | cb()
23 |
24 | #====================================================
25 |
26 | exports.test_v2_2 = (T,cb) ->
27 | raw = [ 2, [ 1, "aabb", "ccdd" ] ]
28 | [err,leaf] = Leaf.parse raw
29 | T.no_error err
30 | T.equal leaf.get_public().to_json(), raw[1], "the right public leaf value came back"
31 | T.assert not(leaf.get_semiprivate()?), "the right semiprivate leaf value came back"
32 | cb()
33 |
34 | #====================================================
35 |
36 | exports.test_v2_3 = (T,cb) ->
37 | bads = [
38 | [ ],
39 | [ "foo", null ],
40 | [ 3, null ],
41 | [ 2, [10, "aaa", "bbb" ] ],
42 | [ 2, [10, "aa", "bbb" ] ],
43 | [ 2, [10, "aaa", "bb" ] ],
44 | [ 2, [ "a", "aaa" ] ],
45 | [ 2, [10, "aa", "bb"], null, "a"],
46 | [ 2, [10, "aa", "bb"], [], "a"]
47 | [ 2, [10, "aa", "bb"], null, [2] ]
48 | [ 2, [10, "aa", "bb"], null, [2, 2] ]
49 | [ 2, [10, "aa", "bb"], null, ["aa", 2] ]
50 | [ 2, [10, "aa", "bb"], null, ["aa", "bb"] ]
51 | [ 2, null, [], "aa", [] ]
52 | [ 2, [1 ] ],
53 | [ 2 ],
54 | ]
55 | for bad,i in bads
56 | [err,leaf] = Leaf.parse bad
57 | T.assert err?, "parse error on object #{i}"
58 |
59 | goods = [
60 | [ 2, [1, "", "" ] ],
61 | [ 2, [], [], "aa" ]
62 | [ 2, null, null, "aa" ]
63 | [ 2, [], null, "aa" ]
64 | [ 2, null, [], "aa" ]
65 | [ 2, [10, "aa", "bb"], null, null ]
66 | ]
67 | for good, i in goods
68 | [err,leaf] = Leaf.parse good
69 | T.no_error err
70 |
71 | cb()
72 |
73 | #====================================================
74 |
75 | # test that beyond slots 1,2,3, it's open-ended
76 | exports.test_v2_4 = (T,cb) ->
77 | raw = [ 2, [ 1, "aabb", "ccdd", "4455", "other", "stuff", [ 1,2,3 ], { a: 3} ], [ 2, "eeff", "0011" ] ]
78 | [err,leaf] = Leaf.parse raw
79 | T.no_error err
80 | T.equal leaf.get_public().to_json(), raw[1][0...3], "the right public leaf value came back"
81 | T.equal leaf.get_semiprivate().to_json(), raw[2], "the right semiprivate leaf value came back"
82 | cb()
83 |
84 | #====================================================
85 |
86 | # test that beyond slots 1,2,3, it's open-ended
87 | exports.test_v2_5 = (T,cb) ->
88 | raw = [ 2,
89 | [ 1, "aabb", "ccdd", "4455", "other", "stuff", [ 1,2,3 ], { a: 3} ],
90 | [ 2, "eeff", "0011" ],
91 | "112233"
92 | [ 10, "eeff", 10 ]
93 | [ 3, 4, 5, 6, 7]
94 | ]
95 | [err,leaf] = Leaf.parse raw
96 | T.no_error err
97 | T.equal leaf.get_public().to_json(), raw[1][0...3], "the right public leaf value came back"
98 | T.equal leaf.get_semiprivate().to_json(), raw[2], "the right semiprivate leaf value came back"
99 | T.equal leaf.get_eldest_kid(), raw[3], "the right eldest kid leaf value came back"
100 |
101 | [err,leaf] = Leaf.parse leaf.to_json()
102 | T.no_error err
103 | T.equal leaf.get_eldest_kid(), raw[3], "full parse roundtrip"
104 | cb()
105 |
106 | #====================================================
107 |
108 | exports.test_v2_6 = (T,cb) ->
109 | raw = [ 2, null, null, "112233"]
110 | [err,leaf] = Leaf.parse raw
111 | T.no_error err
112 | [err,leaf] = Leaf.parse JSON.parse leaf.to_string()
113 | T.no_error err
114 | T.equal leaf.get_eldest_kid(), raw[3], "full parse roundtrip"
115 | cb()
116 |
117 | #====================================================
118 |
119 | exports.test_seqno_assertion = (T,cb) ->
120 | raw = [2, [ 10, "aa", "bb" ], [ 11, "cc", "dd" ], "ffee"]
121 | rows = [
122 | {seqno_type : C.seqno_types.PUBLIC, seqno : 10 },
123 | {seqno_type : C.seqno_types.SEMIPRIVATE, seqno : 11 },
124 | ]
125 | [err, leaf] = Leaf.parse raw
126 | T.no_error err
127 | assert = leaf.seqno_assertion()
128 | ok = assert(rows)
129 | T.assert ok, "assertion came back true"
130 |
131 | rows = [{ seqno_type : 10000, seqno : 10 } ]
132 | ok = assert(rows)
133 | T.assert not(ok), "bad seqno type"
134 |
135 | rows = [
136 | {seqno_type : C.seqno_types.PUBLIC, seqno : 10 },
137 | ]
138 | ok = assert(rows)
139 | T.assert not(ok), "missing semiprivate"
140 |
141 | rows = [
142 | {seqno_type : C.seqno_types.SEMIPRIVATE, seqno : 11 },
143 | ]
144 | ok = assert(rows)
145 | T.assert not(ok), "missing semiprivate"
146 |
147 | raw = [ 2, [10, "aa", "bb" ] ]
148 | rows = [
149 | {seqno_type : C.seqno_types.PUBLIC, seqno : 10 }
150 | ]
151 | [err, leaf] = Leaf.parse raw
152 | T.no_error err
153 | assert = leaf.seqno_assertion()
154 | ok = assert(rows)
155 | T.assert ok, "assertion came back true no semiprivate chain"
156 |
157 | raw = [ 2, null, [11, "cc", "dd" ] ]
158 | rows = [
159 | {seqno_type : C.seqno_types.SEMIPRIVATE, seqno : 11 }
160 | ]
161 | [err, leaf] = Leaf.parse raw
162 | T.no_error err
163 | assert = leaf.seqno_assertion()
164 | ok = assert(rows)
165 | T.assert ok, "assertion came back true no public chain"
166 | cb()
167 |
168 | #====================================================
169 |
170 | exports.test_v2_7 = (T,cb) ->
171 | raw = [ 2, [ 1, "aabb", "ccdd" ], [ 2, "eeff", "0011" ], null, [ 3, 'aabb' ] ]
172 | [err,leaf] = Leaf.parse raw
173 | T.no_error err
174 | T.equal leaf.get_public().to_json(), raw[1], "the right public leaf value came back"
175 | T.equal leaf.get_semiprivate().to_json(), raw[2], "the right semiprivate leaf value came back"
176 | T.equal leaf.get_reset().to_json(), raw[4], "the right reset chain"
177 | cb()
178 |
179 | #====================================================
180 |
181 | exports.test_v2_8 = (T,cb) ->
182 | raw = [ 2, [ 1, "aabb", "ccdd" ], [ 2, "eeff", "0011" ], 'cc00', [ 3, 'aabb' ] ]
183 | [err,leaf] = Leaf.parse raw
184 | T.no_error err
185 | T.equal leaf.get_public().to_json(), raw[1], "the right public leaf value came back"
186 | T.equal leaf.get_semiprivate().to_json(), raw[2], "the right semiprivate leaf value came back"
187 | T.equal leaf.get_eldest_kid(), raw[3], "the right eldest kid"
188 | T.equal leaf.get_reset().to_json(), raw[4], "the right reset chain"
189 | cb()
190 |
191 | #====================================================
192 |
193 |
--------------------------------------------------------------------------------
/test/files/31_sigchain.iced:
--------------------------------------------------------------------------------
1 | {make_esc} = require 'iced-error'
2 | fs = require('fs')
3 | node_sigchain = require('../..')
4 | C = require('../..').constants
5 | execSync = require('child_process').execSync
6 | fs = require('fs')
7 | path = require('path')
8 | tv = require 'keybase-test-vectors'
9 |
10 | #====================================================
11 |
12 | exports.test_eldest_key_required = (T, cb) ->
13 | # Make sure that if we forget to pass eldest key to SigChain.replay, that's
14 | # an error. Otherwise we could get confisingly empty results.
15 | esc = make_esc cb, "test_eldest_key_required"
16 | {chain, keys, username, uid} = tv.chain_test_inputs["ralph_chain.json"]
17 | await node_sigchain.ParsedKeys.parse {key_bundles: keys}, esc defer parsed_keys
18 | await node_sigchain.SigChain.replay {
19 | sig_blobs: chain
20 | parsed_keys
21 | uid
22 | username
23 | # OOPS! Forgot the eldest_kid!
24 | }, defer err, sigchain
25 | T.assert err, "Forgetting to pass the eldest_kid should fail the replay!"
26 | cb()
27 |
28 | exports.test_chain_link_format = (T, cb) ->
29 | # The Go implementation is strict about details like UID length. This
30 | # implementation was lenient, so we ended up creating some test cases that
31 | # were unusable with Go. After fixing the test cases, we added
32 | # check_link_payload_format() to make sure we don't miss this again. This
33 | # test just provides coverage for that method. It's not necessarily a failure
34 | # that other implementations should reproduce.
35 | bad_uid_payload =
36 | body:
37 | key:
38 | uid: "wronglen"
39 | await node_sigchain.check_link_payload_format {payload: bad_uid_payload}, defer err
40 | T.assert err?, "short uid should fail"
41 | if err?
42 | T.assert err.code == node_sigchain.E.code.BAD_LINK_FORMAT, "wrong error type"
43 | cb()
44 |
45 | exports.test_check_buffers_equal = (T, cb) ->
46 | # Test coverage for check_buffers_equal, which can never fail under normal
47 | # circumstances.
48 | await node_sigchain.check_buffers_equal (Buffer.from('0')), (Buffer.from('1')), defer err
49 | T.assert err?
50 | cb()
51 |
52 | exports.test_sig_cache = (T, cb) ->
53 | # We accept a sig_cache parameter to skip verifying signatures that we've
54 | # verified before. Exercise that code. (Piggybacking on that, use a fake log
55 | # to exercise that code too.)
56 | esc = make_esc cb, "test_sig_cache"
57 | {chain, keys, username, uid, label_kids} = tv.chain_test_inputs["ralph_chain.json"]
58 |
59 | # Create a fake sig_cache.
60 | store = {}
61 | sig_cache =
62 | get: ({sig_id}, cb) ->
63 | cb null, store[sig_id]
64 | put: ({sig_id, payload_buffer}, cb) ->
65 | T.assert(sig_id? and payload_buffer?,
66 | "Trying to cache something bad: #{sig_id}, #{payload_buffer}")
67 | store[sig_id] = payload_buffer
68 | cb null
69 |
70 | # Create a fake log.
71 | log = (() -> null)
72 |
73 | # Zero the unbox counter (in case other tests have run earlier).
74 | node_sigchain.debug.unbox_count = 0
75 |
76 | # Replay the sigchain the first time.
77 | await node_sigchain.ParsedKeys.parse {key_bundles: keys}, esc defer parsed_keys
78 | await node_sigchain.SigChain.replay {
79 | sig_blobs: chain
80 | parsed_keys
81 | sig_cache
82 | uid
83 | username
84 | eldest_kid: label_kids.second_eldest
85 | log
86 | }, esc defer sigchain
87 |
88 | # Confirm that there's stuff in the cache.
89 | T.equal chain.length, Object.keys(store).length, "All the chain link sigs should be cached."
90 |
91 | # Assert the new value of the unbox counter.
92 | T.equal chain.length, node_sigchain.debug.unbox_count, "unboxed ralph's links"
93 |
94 | # Replay it again with the full cache to exercise the cache hit code path.
95 | await node_sigchain.ParsedKeys.parse {key_bundles: keys}, esc defer parsed_keys
96 | await node_sigchain.SigChain.replay {
97 | sig_blobs: chain
98 | parsed_keys
99 | sig_cache
100 | uid
101 | username
102 | eldest_kid: label_kids.second_eldest
103 | }, esc defer sigchain
104 |
105 | # Confirm the cache hasn't grown.
106 | T.equal chain.length, Object.keys(store).length, "Cache should be the same size it was before."
107 |
108 | # Assert the unbox counter hasn't moved.
109 | T.equal chain.length, node_sigchain.debug.unbox_count, "no further unboxing"
110 |
111 | cb()
112 |
113 | exports.test_all_sigchain_tests = (T, cb) ->
114 | # This runs all the tests described in tests.json, which included many
115 | # example chains with both success parameters and expected failures.
116 | for test_name, body of tv.chain_tests.tests
117 | args = {T}
118 | for key, val of body
119 | args[key] = val
120 | T.waypoint test_name
121 | await do_sigchain_test args, defer err
122 | T.assert not err?, "Error in sigchain test '#{test_name}': #{err}"
123 | cb()
124 |
125 | do_sigchain_test = ({T, input, err_type, len, sibkeys, subkeys, eldest}, cb) ->
126 | esc = make_esc cb, "do_sigchain_test"
127 | input_blob = tv.chain_test_inputs[input]
128 | {chain, keys, username, uid} = input_blob
129 | await node_sigchain.ParsedKeys.parse {key_bundles: keys}, esc defer parsed_keys, default_eldest
130 | if not eldest?
131 | # By default, use the first key as the eldest.
132 | eldest_kid = default_eldest
133 | else
134 | eldest_kid = input_blob.label_kids[eldest]
135 | await node_sigchain.SigChain.replay {
136 | sig_blobs: chain
137 | parsed_keys
138 | uid
139 | username
140 | eldest_kid
141 | }, defer err, sigchain
142 | if err?
143 | if not err_type? or err_type != node_sigchain.E.name[err.code]
144 | # Not an error we expected.
145 | cb err
146 | return
147 | else
148 | # The error we were looking for!
149 | cb null
150 | return
151 | else if err_type?
152 | # We expected an error, and didn't get one!
153 | cb new Error "Expected error of type #{err_type}"
154 | return
155 | # No error.
156 | # Check the number of unrevoked links.
157 | links = sigchain.get_links()
158 | if len?
159 | T.assert links.length == len, "Expected exactly #{len} links, got #{links.length}"
160 | check_sibkey_and_subkey_count {T, sigchain, parsed_keys, eldest_kid, sibkeys, subkeys}
161 | cb()
162 |
163 | check_sibkey_and_subkey_count = ({T, sigchain, parsed_keys, eldest_kid, sibkeys, subkeys}) ->
164 | # Don't use the current time for tests, because eventually that will cause
165 | # keys to expire and tests to break.
166 | now = get_current_time_for_test { sigchain, parsed_keys }
167 | far_future = now + 100 * 365 * 24 * 60 * 60 # 100 years from now
168 |
169 | # Check the number of unrevoked/unexpired sibkeys.
170 | sibkeys_list = sigchain.get_sibkeys {now}
171 | if sibkeys?
172 | T.assert sibkeys_list.length == sibkeys, "Expected exactly #{sibkeys} sibkeys, got #{sibkeys_list.length}"
173 | if sigchain.get_links().length > 0
174 | # The eldest key might not expire if there are no links. Just skip this part of the test.
175 | T.assert sigchain.get_sibkeys({now: far_future}).length == 0, "Expected no sibkeys in the far future."
176 |
177 | # Check the number of unrevoked/unexpired subkeys.
178 | subkeys_list = sigchain.get_subkeys {now}
179 | if subkeys?
180 | T.assert subkeys_list.length == subkeys, "Expected exactly #{subkeys} subkeys, got #{subkeys_list.length}"
181 | T.assert sigchain.get_subkeys({now: far_future}).length == 0, "Expected no subkeys in the far future."
182 |
183 | # Get keys with the default time parameter (real now), just to make sure
184 | # nothing blows up (and to improve coverage :-D)
185 | sigchain.get_sibkeys {}
186 | sigchain.get_subkeys {}
187 |
188 | get_current_time_for_test = ({sigchain, parsed_keys}) ->
189 | # Pick a time that's later than the ctime of all links and PGP keys.
190 | t_seconds = 0
191 | for link in sigchain.get_links()
192 | t_seconds = Math.max(t_seconds, link.ctime_seconds)
193 | for kid, km of parsed_keys.key_managers
194 | if km.primary?.lifespan?.generated?
195 | t_seconds = Math.max(t_seconds, km.primary.lifespan.generated)
196 | return t_seconds
197 |
--------------------------------------------------------------------------------
/test/files/32_kvstore.iced:
--------------------------------------------------------------------------------
1 |
2 |
3 | top = require '../..'
4 | {Base,FlatMemory,Memory} = top.kvstore
5 | {E} = top.err
6 |
7 | # Turn this on for debugging output...
8 | #log = require 'iced-logger'
9 | #log.package().env().set_level(0)
10 |
11 | #========================================================
12 |
13 | OBJS = [
14 | { type : "a", key : "1", value : "a1", name : "name-a1" },
15 | { type : "a", key : "2", value : "a1a2", name : "name-a2" },
16 | { type : "a", key : "3", value : "a1a2a3", name : "name-a3" },
17 | { type : "b", key : "1", value : "b1", name : "name-b1" },
18 | { type : "b", key : "2", value : "b1b2", name : "name-b2" },
19 | { type : "b", key : "3", value : "b1b2b3", names : [ "name-b3" ] },
20 | ]
21 |
22 | #========================================================
23 |
24 | class Tester
25 |
26 | constructor : ({@T, klass}) ->
27 | @obj = new klass()
28 | @name = klass.name
29 |
30 | test : (cb) ->
31 | await @open defer()
32 | await @puts defer()
33 | await @gets defer()
34 | await @lookups defer()
35 | await @relink defer()
36 | await @unlink defer()
37 | await @resolve defer()
38 | await @remove defer()
39 | await @unlink_all defer()
40 | await @close defer()
41 | await @nuke defer()
42 | cb null
43 |
44 | close : (cb) ->
45 | await @obj.close {}, defer err
46 | @T.waypoint "close"
47 | @T.no_error err
48 | cb()
49 |
50 | nuke : (cb) ->
51 | await @obj.nuke {}, defer err
52 | @T.waypoint "nuke"
53 | @T.no_error err
54 | cb()
55 |
56 | open : (cb) ->
57 | await @obj.open {}, defer err
58 | @T.waypoint "open"
59 | @T.no_error err
60 | cb()
61 |
62 | puts : (cb) ->
63 | for o in OBJS
64 | await @obj.put o, defer err
65 | @T.no_error err
66 | await @obj.put { key : "aabb03", value : "value-aabb03" }, defer err
67 | @T.no_error err
68 | @T.waypoint "puts"
69 | cb()
70 |
71 | gets : (cb) ->
72 | for o,i in OBJS
73 | await @obj.get o, defer err, value
74 | @T.no_error err
75 | @T.equal value, o.value, "get test object #{i}"
76 | await @obj.get { type : "03", key : "aabb03" }, defer err, value
77 | @T.no_error err
78 | @T.equal value, "value-aabb03", "fetch of implicit type worked"
79 | @T.waypoint "gets"
80 | cb()
81 |
82 | lookups : (cb) ->
83 | for o,i in OBJS
84 | o.name = o.names[0] unless o.name?
85 | await @obj.lookup o, defer err, value
86 | @T.no_error err
87 | @T.equal value, o.value, "lookup test object #{i}"
88 | @T.waypoint "lookups"
89 | cb()
90 |
91 | relink : (cb) ->
92 | await @obj.link { type : "a", name : "foob", key : "1" }, defer err
93 | @T.no_error err
94 | await @obj.lookup { type : "a", name : "foob" }, defer err, value
95 | @T.no_error err
96 | @T.equal value, "a1", "relink worked (1)"
97 | await @obj.lookup { type : "a", name : "name-a1" }, defer err, value
98 | @T.no_error err
99 | @T.equal value, "a1", "relink worked (2)"
100 | @T.waypoint "relink"
101 | cb()
102 |
103 | unlink : (cb) ->
104 | if @obj.can_unlink()
105 | await @obj.unlink { type : "a", name : "zooot" }, defer err
106 | @T.assert (err? and err instanceof E.LookupNotFoundError), "unlink fails on name not found"
107 | await @obj.unlink { type : "a", name : "foob" }, defer err
108 | @T.no_error err
109 | await @obj.lookup { type : "a", name : "name-a1" }, defer err, value
110 | @T.no_error err
111 | @T.equal value, "a1", "unlink left original link in place"
112 | await @obj.lookup { type : "a", name : "foob" }, defer err, value
113 | @T.assert not(value?), "no value after unlink"
114 | @T.assert (err? and err instanceof E.LookupNotFoundError), "right lookup error"
115 | @T.waypoint "unlink"
116 | cb()
117 |
118 | resolve : (cb) ->
119 | await @obj.resolve { type : "a", name : "name-a3" }, defer err, key
120 | @T.no_error err
121 | @T.equal key, "3"
122 | @T.waypoint "resolve"
123 | cb()
124 |
125 | remove : (cb) ->
126 | # First try 2 failures to remove
127 | await @obj.remove { type : "a", key : "zoo" }, defer err
128 | @T.assert (err? and err instanceof E.NotFoundError), "right error on failed delete"
129 | await @obj.remove { type : "a", key : "zoo", optional : true }, defer err
130 | @T.no_error err
131 |
132 | await @obj.remove { type : "a" , key : "3" }, defer err
133 | @T.no_error err
134 | await @obj.get { type : "a", key : "3" }, defer err, value
135 | @T.assert not(value?), "No value should be found for a:3"
136 | @T.assert (err? and err instanceof E.NotFoundError), "NotFound for 'a:3'"
137 | await @obj.resolve { type : "a", name : "name-a3" }, defer err, key
138 | if @obj.can_unlink()
139 | @T.assert not(key?), "cannot resolve name 'name-a3'"
140 | @T.assert (err? and err instanceof E.LookupNotFoundError), "right lookup error"
141 | else
142 | @T.no_error err
143 | @T.equal key, "3", "still is there as a dangling pointer"
144 | @T.waypoint "remove"
145 | cb()
146 |
147 | unlink_all : (cb) ->
148 | await @obj.link { type : "b", key : "2", name : "cat" }, defer err
149 | @T.no_error err
150 | await @obj.link { type : "b", key : "2", name : "dog" }, defer err
151 | @T.no_error err
152 | if @obj.can_unlink()
153 | await @obj.unlink_all { type : "b", key : "cat" }, defer err
154 | @T.assert (err? and err instanceof E.LookupNotFoundError), "can't unlink what's not there"
155 | await @obj.unlink_all { type : "b", key : "2" }, defer err
156 | @T.no_error err
157 | cb()
158 |
159 | #========================================================
160 |
161 | test_store = ({T,klass},cb) ->
162 | tester = new Tester { T, klass }
163 | await tester.test defer()
164 | cb()
165 |
166 | exports.test_flat_memory = (T,cb) ->
167 | await test_store { T, klass : FlatMemory }, defer()
168 | cb()
169 |
170 | exports.test_memory = (T,cb) ->
171 | await test_store { T, klass : Memory }, defer()
172 | cb()
173 |
174 | exports.test_base = (T,cb) ->
175 | abstracts = [
176 | "open", "nuke", "close", "_unlink", "_unlink_all",
177 | "_remove", "_put", "_get", "_resolve", "_link"
178 | ]
179 | b = new Base {}
180 | for method in abstracts
181 | await b[method] {}, defer err
182 | T.assert (err? and err instanceof E.UnimplementedError), "method #{method} failed"
183 | cb()
184 |
--------------------------------------------------------------------------------
/test/files/33_assertion.iced:
--------------------------------------------------------------------------------
1 |
2 | {URI,parse,Proof,ProofSet} = require('../../').assertion
3 |
4 | expr = null
5 |
6 | exports.parse_0 = (T,cb) ->
7 | expr = parse "reddit://a && twitter://bb"
8 | cb()
9 |
10 | exports.match_0 = (T,cb) ->
11 | ps = new ProofSet [(new Proof { key : "reddit", value : "a" })]
12 | T.assert not expr.match_set(ps), "shouldn't match"
13 | cb()
14 |
15 | exports.parse_1 = (T,cb) ->
16 | expr = parse """
17 | web://foo.io || (reddit://a && twitter://bbbbb && fingerprint://aabbcc)
18 | """
19 | T.equal expr.toString(), '(web://foo.io || ((reddit://a && twitter://bbbbb) && fingerprint://aabbcc))'
20 | cb()
21 |
22 | exports.match_1 = (T,cb) ->
23 | ps = new ProofSet [(new Proof { key : "https", value : "foo.io" }) ]
24 | T.assert expr.match_set(ps), "first one matched"
25 | ps = new ProofSet [(new Proof { key : "https", value : "foob.io" }) ]
26 | T.assert not(expr.match_set(ps)), "second didn't"
27 | ps = new ProofSet [
28 | (new Proof { key : "reddit", value : "a" })
29 | (new Proof { key : "twitter", value : "bbbbb" })
30 | (new Proof { key : "fingerprint", value : "001122aabbcc" })
31 | ]
32 | T.assert expr.match_set(ps), "third one matched"
33 | ps = new ProofSet [
34 | (new Proof { key : "reddit", value : "a" })
35 | (new Proof { key : "fingerprint", value : "001122aabbcc" })
36 | ]
37 | T.assert not expr.match_set(ps), "fourth one didn't"
38 | ps = new ProofSet [
39 | (new Proof { key : "reddit", value : "a" })
40 | (new Proof { key : "twitter", value : "bbbbb" })
41 | (new Proof { key : "fingerprint", value : "aabbcc4" })
42 | ]
43 | T.assert not expr.match_set(ps), "fifth didn't"
44 | cb()
45 |
46 | exports.parse_2 = (T,cb) ->
47 | expr = parse "http://foo.com"
48 | ps = new ProofSet [(new Proof { key : 'http', value : 'foo.com'})]
49 | T.assert expr.match_set(ps), "first one matched"
50 | ps = new ProofSet [(new Proof { key : 'https', value : 'foo.com'})]
51 | T.assert expr.match_set(ps), "second one matched"
52 | ps = new ProofSet [(new Proof { key : 'dns', value : 'foo.com'})]
53 | T.assert not expr.match_set(ps), "third didnt"
54 | cb()
55 |
56 | exports.parse_bad_1 = (T,cb) ->
57 | bads = [
58 | "reddit"
59 | "reddit://"
60 | "reddit://aa ||"
61 | "reddit://aa &&"
62 | "reddit:// && ()"
63 | "fingerprint://aaXXxx"
64 | "dns://shoot"
65 | "http://nothing"
66 | "foo://bar"
67 | "keybase://ok || dns://fine.io || (twitter://still_good || bad://one)"
68 | ]
69 | for bad in bads
70 | try
71 | parse bad
72 | T.assert false, "#{bad}: shouldn't have parsed"
73 | catch error
74 | T.assert error?, "we got an error"
75 | cb()
76 |
77 | exports.parse_URI = (T,cb) ->
78 | r = URI.parse { s : "max", strict : false }
79 | T.assert r?, "got something back without strict mode"
80 | try
81 | URI.parse { s : "max", strict : true }
82 | T.assert false, "should not have worked with strict mode"
83 | catch err
84 | T.assert err?, "error on strict mode without key"
85 | cb()
86 |
87 |
--------------------------------------------------------------------------------
/test/run.iced:
--------------------------------------------------------------------------------
1 | require('iced-test').main { mainfile : __filename }
2 |
--------------------------------------------------------------------------------