├── BACKERS-2.md ├── BACKERS.md ├── LICENSE ├── README.md ├── SPONSORS.md ├── art ├── architecture.svg └── jsgit-sticker-sheet.svg ├── doc ├── lib │ ├── config-codec.md │ ├── deflate.md │ ├── inflate-stream.md │ ├── inflate.md │ ├── object-codec.md │ ├── pack-codec.md │ └── readme.md ├── mixins │ ├── fs-db.md │ ├── mem-db.md │ ├── pack-ops.md │ └── readme.md └── readme.md ├── lib ├── apply-delta.js ├── config-codec.js ├── defer.js ├── deflate.js ├── find-common.js ├── git-fs.js ├── inflate-stream.js ├── inflate.js ├── modes.js ├── object-codec.js ├── pack-codec.js ├── pkt-line.js └── wrap-handler.js ├── mixins ├── add-cache.js ├── create-tree.js ├── delay.js ├── fall-through.js ├── formats.js ├── fs-db.js ├── indexed-db.js ├── mem-cache.js ├── mem-db.js ├── pack-ops.js ├── path-to-entry.js ├── read-combiner.js ├── sync.js ├── walkers.js └── websql-db.js ├── net ├── git-fetch-pack.js ├── request-xhr.js ├── tcp-chrome-sockets.js ├── tcp-node.js ├── tcp-ws-proxy.js ├── transport-http.js └── transport-tcp.js ├── package.json └── test ├── run.js ├── sample-pack.js ├── test-config-codec.js ├── test-mem-db.js ├── test-object-codec.js ├── test-pack-codec.js ├── test-pack-ops.js └── test-zlib.js /BACKERS-2.md: -------------------------------------------------------------------------------- 1 | # BountySource Backers 2 | 3 | After the successful KickStarter, I decided to do a second fundraiser so that I could continue working on js-git. This was done as a [bountysource fundraiser][]. These are the people that contributed there. (Many were repeat contributers) 4 | 5 | ## Innovation Enabler 6 | 7 | > At this level of support, you are truly enabling open development. I assume you are backing on behalf of a company who can use js-git. I'll send you a small pack of stickers and a few t-shirts. 8 | > 9 | >Also, for being so awesome, I can fly out to visit your team to discuss how to integrate js-git once it's usable (US only unless you help cover costs). 10 | 11 | - Mozilla 12 | 13 | ## Heavy Backer 14 | 15 | > For committing to back a substantial amount of the project, I'll send you stickers, tshirts. 16 | > 17 | > Also I'll list you in BACKERS-2.md with your name, url, and short blurb. 18 | 19 | - Adobe 20 | 21 | ## Supporter 22 | 23 | > For backing this project we'll send you a js-git laptop sticker so you can show your support to the world. 24 | > 25 | >Also you will be listed in BACKERS-2.md for all history to see. 26 | 27 | - jden 28 | - othiym23 29 | - chrisjpowers 30 | - JohnSz 31 | - sindresorhus 32 | - aeby 33 | - maks 34 | - julien51 35 | - mofoghlu 36 | - JPBarringer 37 | - jeffslofish 38 | 39 | ## Basic Supporter 40 | 41 | > Your name will be listed in BACKERS-2.md in the main source tree of js-git. 42 | 43 | - servergrove 44 | - bluntworks 45 | - pdillon 46 | - pizzapanther 47 | - nschneble 48 | - Ohad Assulin 49 | - oxy 50 | - lettertwo 51 | - tmcw 52 | - joeandaverde 53 | - airportyh 54 | - nathanathan 55 | - signalwerk 56 | - ripta 57 | - vaughan 58 | - neilk 59 | - mikehenrty 60 | - vardump 61 | - Peter Burns 62 | - blittle 63 | - Stefan Stoichev 64 | - amaxwell01 65 | - dannyfritz 66 | - George V. Reilly 67 | - euforic 68 | - gflarity 69 | - generalhenry 70 | - piredman 71 | - Rebecca 72 | - st-luke 73 | - asafy 74 | - alessioalex 75 | - sergi 76 | - diversario 77 | - seriema 78 | - desaintmartin 79 | - DinisCruz 80 | - gotcha 81 | - nikolay 82 | - saintedlama 83 | - begebot 84 | - jbarratt 85 | - mikaelkaron 86 | - colinscroggins 87 | - Eric Elliott 88 | - owenb 89 | - balupton 90 | - fjakobs 91 | - romainhuet 92 | - angelyordanov 93 | - cscott 94 | - ilsken 95 | 96 | ## Anonymous Supporters 97 | 98 | There were also 33 other people who didn't claim any level of reward but contributed to the fundraiser. Some as much as $500 from individuals. Thank you all for the support. 99 | 100 | [bountysource fundraiser]: https://www.bountysource.com/fundraisers/325-js-git 101 | -------------------------------------------------------------------------------- /BACKERS.md: -------------------------------------------------------------------------------- 1 | # Kickstarter Backers 2 | 3 | Originally JS-Git started at a [kickstarter project][]. This was to enable me to spend the time required to get the project off the ground. These are the people who contributed to this fund raiser and wanted to be listed at backers. 4 | 5 | ## Deep Backer 6 | 7 | > You really wish this project existed and probably derive some sort of commercial value out of its existence. If you're within the continental United States. I'll come to your company and spend a day helping you integrate the library into your project. Your name, url, and short blurb will be in the BACKERS.md file for all history to see. 8 | 9 | - Mozilla 10 | 11 | ## Open Web Warrior 12 | 13 | > You believe in enabling the web platform and put your money where your mouth is. To reward your generosity, I'll give personal assistance to you or your company integrating the library into your project. This can be up to an hour of video chat or several email/chat sessions. You will get a sheet of laptop stickers and your name, and optional url will be in the BACKERS.md file for all history to see. 14 | 15 | - Michael Bradley 16 | - Scott González 17 | - Paolo Fragomeni 18 | - nearForm 19 | - Dav Glass 20 | - For Journalism 21 | - Jason Walsh 22 | - Meryn Stol 23 | - P77B Inc. 24 | - Boris Bokowski 25 | - ProjectLocker 26 | - Frank Gerhardt 27 | - Mathieu Lorber 28 | - Stefan Poggenpohl 29 | - Dj Gilcrease http://www.kanbansolutions.com/ 30 | - John Szwaronek 31 | - Pace Willisson 32 | - Oni Labs 33 | - O'Reilly Media 34 | - Peter Harkins 35 | - http://durandaljs.com/ 36 | - http://www.linkedin.com/pub/iain-cooke/5/226/64a/ 37 | - Reuben Katz 38 | 39 | ## Code Backer 40 | 41 | > Chris decided to back with code and had been immensely helpful in getting this project started. I sent him a pack of stickers as reward. 42 | 43 | - Chris Dickinson 44 | 45 | ## Believer 46 | 47 | > You believe this project is important and want to enable me to create it. I'll send you a vinyl laptop sticker with the awesome JSGit logo on it so you can show off your support. Your name will be in the BACKERS.md file for all history to see. 48 | 49 | - Luke Arduini 50 | - garza 51 | - Maciej Małecki 52 | - LJHarb (Jordan Harband) 53 | - Joel Hillacre 54 | - Sean Dunn 55 | - Anthony Blardo 56 | - Luke Karrys 57 | - Jonathan Lonowski 58 | - Joey Schluchter 59 | - Henry Allen-Tilford 60 | - Brett Stimmerman 61 | - natevw 62 | - Norman Jaffe 63 | - Zannalov 64 | - Brian Ford 65 | - Tom Wilson 66 | - Jonas S Almeida 67 | - Geir Gåsodden 68 | - Breck Yunits 69 | - Dmitrii Soltys 70 | - Greg Price 71 | - C. Scott Ananian 72 | - Donovan Sandey 73 | - John Frizelle 74 | - Sureshot Saddletrousers 75 | - Karl Tiedt 76 | - Matthew Robb 77 | - Kristofor Carle 78 | - Stubbornella 79 | - Dan Herbert 80 | - Chris Jaure (@chrisjaure) 81 | - Ben Adida 82 | - Jon Buckley 83 | - roxstyle 84 | - Endre Stølsvik 85 | - Irakli Gozalishvili 86 | - Matt Weagle 87 | - Lon Ingram 88 | - Mitch Skinner 89 | - Thomas Holloway 90 | - Peter Tillemans 91 | - John Waterson 92 | - Maksim Lin 93 | - Tom de Grunt 94 | - Timon Reinhard 95 | - Kevin Swiber 96 | - C J Silverio 97 | - Ken Heutmaker (bgok) 98 | - Joe McCann 99 | - Peter deHaan 100 | - Alex Lusco 101 | - Andreas Wenk (@awenkhh) 102 | - Todd Wolfson 103 | - Cristián Romo 104 | - Lyle Garza 105 | - Eric Laberge 106 | - David Thomas 107 | - Paul C. Cook 108 | - Craig "The Coder" Dunn 109 | - Ray Daly 110 | - Josh Marinacci 111 | - fauno 112 | - Jeff Burtoft 113 | - Patrick Roberts 114 | - Matthew Mirande 115 | - Matt Field 116 | - Dion Almaer 117 | - Murvin Lai 118 | - David E Levin 119 | - Monsur Hossain 120 | - Jesse Harlin 121 | - Mike Cooper - @mythmon 122 | - heikki 123 | - Zef Hemel 124 | - Vince Allen 125 | - Biko Tushinde 126 | - Nick Crohn 127 | - Justin Tucker @certainstrings 128 | - Kimberly Munoz 129 | - Nikolaj Ivancic 130 | - Paul Redman 131 | - Glenn Block 132 | - Owen Smith 133 | - Eugene Lazutkin 134 | - letterj - Jay Payne 135 | - Jason Campbell (@jxson) 136 | - noah peters 137 | - Pelle Wessman, @VoxPelli 138 | - Claes Magnusson, Malmö Yrkeshögskola 139 | - Yuuki Kunitake 140 | - Ady Ngom 141 | - Glenn Scott (@glennsc) 142 | - Corey Innis 143 | - Dmitry Pashkevich 144 | - Jay Pozo 145 | - Brian Gershon 146 | - Joe Feser 147 | - Tyler Breisacher 148 | - Nick Cooley @nickcooley 149 | - Hugh Kennedy 150 | - Jakub Korál 151 | - Manuel Alejandro de Brito Fontes 152 | 153 | ## Supporter 154 | 155 | > You really support this idea and want to do your part to help see it happen. Your name will be in the BACKERS.md file for all history to see. 156 | 157 | - Eric Knudtson 158 | - Damien Klinnert 159 | - Tom Fairfield 160 | - Jon Galloway 161 | - Ryan Joy (@atxryan) 162 | - Eran Hammer 163 | - Michael Matuzak 164 | - Joshua Barratt 165 | - Mathieu D'Amours 166 | - Duncan Kolba 167 | - Josh Roesslein 168 | - getify 169 | - Jonathan 'Wolf' Rentzsch 170 | - Zach Geis 171 | - Mirco Zeiss 172 | - Lapo Luchini - lapo@lapo.it 173 | - Jeremy Morrell 174 | - Michael Chui 175 | - Benaiah Mischenko 176 | - Aki Mimoto 177 | - rictic 178 | - Karolina Szczur 179 | - Sunil kumar Pissaye 180 | - Ben Vanik 181 | - Michael Henretty 182 | - TehShrike 183 | - Scott Shillcock 184 | - Doug Orleans 185 | - Nick Nisi 186 | - jostylr 187 | - Lukas Olson 188 | - Adam Blackburn 189 | - Michael Canfield 190 | - Azer Koçulu @4zjs 191 | - Neil Kandalgaonkar 192 | - Rob O'Dwyer 193 | - jamie brim 194 | - Michael Hausenblas 195 | - Don McCurdy 196 | - Scott MacFarlane 197 | - Alex Cox 198 | - kastner 199 | - David Robit Chenosaurus 200 | - Marko Bregant 201 | - Mariz Melo 202 | - Evan Solomon 203 | - Andrew Petersen 204 | - Ferdinand Salis-Samaden 205 | - danielkbx 206 | - Chris Alfano 207 | - Jason Demeuse 208 | - Tom Townsend 209 | - ShihChi Huang 210 | - Zellyn Hunter 211 | - ~ 212 | - Lynn Wallenstein 213 | - Sindre Sorhus 214 | - Yuriy Nemtsov 215 | - Dan Kohn 216 | - Patrick Mueller 217 | - David Leston 218 | - Larry Battle 219 | - Kevin Barry 220 | - Tomomi Imura 221 | - Kevin Vlahos 222 | - Gokce Mutlu 223 | - Niclas Hoyer 224 | - Jamie Oliver 225 | - Makis Tracend 226 | - Troy Forster 227 | - Filip Körling 228 | - Randy Sargent 229 | - Dr Mícheál Ó Foghlú, FeedHenry 230 | - Chris Christensen 231 | - Mike Preisinger 232 | - Neutron Drive 233 | - Kaleb Murphy 234 | - Kevin O'Hara 235 | - Ian Kennington Walter 236 | - John-Philip Johansson 237 | - Jan-Christoph Borchardt 238 | - Genshin Souzou K.K. [Phantom Creation Inc.] 239 | - Michael Dandy 240 | - Neil de Carteret 241 | - Dennis Reed 242 | - Jacob Lowe 243 | - Rob Tsuk 244 | - Aaron Hans 245 | - Dilys Sun 246 | - JP Sugarbroad 247 | - Austin Appleby 248 | - vicapow 249 | - Scott Elcomb 250 | - Soldair (Ryan Day) 251 | - Nick Young 252 | - Kalvir Sandhu - @kalv 253 | - Flynn Joffray 254 | - Stijn van Schooten 255 | - Jared Barboza (@codeimpossible) 256 | - Jörn Zaefferer 257 | - Miroslav Magda 258 | - Adam Argyle 259 | - Alexandru Vladutu 260 | - Dinis Cruz , OWASP O2 Platform 261 | - Elliott B. Edwards (automatonic) 262 | - Philippe Lachaise 263 | - Per Thulin 264 | - Matt Donnelly 265 | - kewah 266 | - Greg McCarvell 267 | - Pedro Teixeira (@pgte) 268 | - Marco Rogers (@polotek) 269 | - Jacques Crocker 270 | - Matthew Podwysocki 271 | - Frantz Gauthier 272 | - Gerry Cardinal III 273 | - Hansemann 274 | - Ben "unwiredben" Combee 275 | - Geoff Bentley, NZ 276 | - David Granado 277 | - Assaf Arkin 278 | - Trevor Baker 279 | - Elliot Glaysher 280 | - Wil Moore III 281 | - Jason Persampieri 282 | - Dennis G Daniels 283 | - Mike de Boer 284 | - Jeroen Janssen 285 | - Daniel Aleksandersen 286 | - Raphael Schweikert 287 | - Nikolay Bachiyski 288 | - Dima Samodurov 289 | - tmartineau 290 | - Daniel Flower 291 | - Kevin Garnett 292 | - Jae Hess 293 | - Matt Reynolds 294 | - LSD25 295 | - Nima Gardideh (nemo) 296 | - Patrick Collins (pat@burned.com) 297 | - Michael J. Ryan ([@tracker1](https://github.com/tracker1)) 298 | - technoweenie 299 | - David Hayes 300 | - Meyer SciTech Solutions, LLC 301 | - Srdjan 302 | - Joseph Werle 303 | - Sebastian Mikkel Wilson (@codebudo) 304 | - Brent Knight 305 | - Nikolay Kolev 306 | - Michael Alyn Miller 307 | - Leon Noel 308 | - Dave LeCompte 309 | - Bermi Ferrer 310 | - Bernard Chen 311 | - Jyri Tuulos 312 | - fMads 313 | - Matt Kowalski 314 | - Ishan Anand 315 | - Frederick Ostrander 316 | - Ryan Riley 317 | - Dominik Wagenknecht 318 | - Yves Ineichen 319 | - Raphael Salomon 320 | - John Wu 321 | - Gergő Tisza 322 | - Chris Bearcroft (@DJCMBear) 323 | - Eric Allam 324 | - David Souther 325 | - Rob van den Bogaard-Braaf 326 | - Anthony Scotti 327 | - de.henne (Henning Leutz) 328 | - Scott Sauyet 329 | - Trae Robbins 330 | - Jason Sallis 331 | - Andrew Maxwell 332 | - Sembiance 333 | - Christian Sullivan (@euforic) 334 | - Alan Effrig 335 | - Rob Quick 336 | - Will Blasko 337 | - ikeyasu@gmail.com 338 | - Ben Sheldon 339 | - Federico Weber 340 | - Ken Chen 341 | - Maxime Quandalle 342 | - Darwiin (Christophe R.) 343 | - Fabrizio Codello 344 | - John Pywtorak 345 | - Danny Coates 346 | - Desmond Morris 347 | - Will Dent 348 | - Antoine Brault 349 | - Bruno Vieira, @bmpvieira 350 | - Charles Moncrief 351 | 352 | [kickstarter project]: http://www.kickstarter.com/projects/creationix/js-git 353 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2013-2014 Tim Caswell 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in 13 | all copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 21 | THE SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # JS-Git 2 | [![Gitter](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/creationix/js-git?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) 3 | 4 | This project is a collection of modules that helps in implementing git powered 5 | applications in JavaScript. The original purpose for this is to enable better 6 | developer tools for authoring code in restricted environments like ChromeBooks 7 | and tablets. It also enables using git as a database to replace SQL and no-SQL 8 | data stores in many applications. 9 | 10 | This project was initially funded by two crowd-sourced fundraisers. See details 11 | in [BACKERS.md](BACKERS.md) and [BACKERS-2.md](BACKERS-2.md). Thanks to all of 12 | you who made this possible! 13 | 14 | ## Usage 15 | 16 | Detailed API docs are contained in the [doc](doc) subfolder of this repository. 17 | 18 | In general the way you use js-git is you create a JS object and then mixin the 19 | functionality you need. Here is an example of creating an in-memory database, 20 | creating some objects, and then walking that tree using the high-level walker 21 | APIs. 22 | 23 | ## Creating a repo object. 24 | 25 | ```js 26 | // This provides symbolic names for the octal modes used by git trees. 27 | var modes = require('js-git/lib/modes'); 28 | 29 | // Create a repo by creating a plain object. 30 | var repo = {}; 31 | 32 | // This provides an in-memory storage backend that provides the following APIs: 33 | // - saveAs(type, value) => hash 34 | // - loadAs(type, hash) => hash 35 | // - saveRaw(hash, binary) => 36 | // - loadRaw(hash) => binary 37 | require('js-git/mixins/mem-db')(repo); 38 | 39 | // This adds a high-level API for creating multiple git objects by path. 40 | // - createTree(entries) => hash 41 | require('js-git/mixins/create-tree')(repo); 42 | 43 | // This provides extra methods for dealing with packfile streams. 44 | // It depends on 45 | // - unpack(packStream, opts) => hashes 46 | // - pack(hashes, opts) => packStream 47 | require('js-git/mixins/pack-ops')(repo); 48 | 49 | // This adds in walker algorithms for quickly walking history or a tree. 50 | // - logWalk(ref|hash) => stream 51 | // - treeWalk(hash) => stream 52 | require('js-git/mixins/walkers')(repo); 53 | 54 | // This combines parallel requests for the same resource for efficiency under load. 55 | require('js-git/mixins/read-combiner')(repo); 56 | 57 | // This makes the object interface less strict. See its docs for details 58 | require('js-git/mixins/formats')(repo); 59 | ``` 60 | 61 | ## Generators vs Callbacks 62 | 63 | There are two control-flow styles that you can use to consume js-git APIs. All 64 | the examples here use `yield` style and assume the code is contained within a 65 | generator function that's yielding to a tool like [gen-run](https://github.com/creationix/gen-run). 66 | 67 | This style requires ES6 generators. This feature is currently in stable Firefox, 68 | in stable Chrome behind a user-configurable flag, in node.js 0.11.x or greater 69 | with a command-line flag. 70 | 71 | Also you can use generators on any ES5 platform if you use a source transform 72 | like Facebook's [regenerator](http://facebook.github.io/regenerator/) tool. 73 | 74 | You read more about how generators work at [Generators vs Fibers](http://howtonode.org/generators-vs-fibers). 75 | 76 | ```js 77 | var run = require('gen-run'); 78 | 79 | run(function*() { 80 | // Blocking logic goes here. You can use yield 81 | var result = yield someAction(withArgs); 82 | // The generator pauses at yield and resumes when the data is available. 83 | // The rest of your process is not blocked, just this generator body. 84 | // If there was an error, it will throw into this generator. 85 | }); 86 | ``` 87 | 88 | If you can't use this new feature or just plain prefer node-style callbacks, all 89 | js-git APIs also support that. The way this works is actually quite simple. 90 | If you don't pass in the callback, the function will return a partially applied 91 | version of your call expecting just the callback. 92 | 93 | ```js 94 | someAction(withArgs, function (err, value) { 95 | if (err) return handleMyError(err); 96 | // do something with value 97 | }); 98 | 99 | // The function would be implemented to support both style like this. 100 | function someAction(arg, callback) { 101 | if (!callback) return someAction.bind(this, arg); 102 | // We now have callback and arg 103 | } 104 | ``` 105 | 106 | ## Basic Object Creation 107 | 108 | Now we have an in-memory git repo useful for testing the network operations or 109 | just getting to know the available APIs. 110 | 111 | In this example, we'll create a blob, create a tree containing that blob, create 112 | a commit containing that tree. This shows how to create git objects manually. 113 | 114 | ```js 115 | // First we create a blob from a string. The `formats` mixin allows us to 116 | // use a string directly instead of having to pass in a binary buffer. 117 | var blobHash = yield repo.saveAs("blob", "Hello World\n"); 118 | 119 | // Now we create a tree that is a folder containing the blob as `greeting.txt` 120 | var treeHash = yield repo.saveAs("tree", { 121 | "greeting.txt": { mode: modes.file, hash: blobHash } 122 | }); 123 | 124 | // With that tree, we can create a commit. 125 | // Again the `formats` mixin allows us to omit details like committer, date, 126 | // and parents. It assumes sane defaults for these. 127 | var commitHash = yield repo.saveAs("commit", { 128 | author: { 129 | name: "Tim Caswell", 130 | email: "tim@creationix.com" 131 | }, 132 | tree: treeHash, 133 | message: "Test commit\n" 134 | }); 135 | 136 | ``` 137 | 138 | ## Basic Object Loading 139 | 140 | We can read objects back one at a time using `loadAs`. 141 | 142 | ```js 143 | // Reading the file "greeting.txt" from a commit. 144 | 145 | // We first read the commit. 146 | var commit = yield repo.loadAs("commit", commitHash); 147 | // We then read the tree using `commit.tree`. 148 | var tree = yield repo.loadAs("tree", commit.tree); 149 | // We then read the file using the entry hash in the tree. 150 | var file = yield repo.loadAs("blob", tree["greeting.txt"].hash); 151 | // file is now a binary buffer. 152 | ``` 153 | 154 | When using the `formats` mixin there are two new types for `loadAs`, they are 155 | `"text"` and `"array"`. 156 | 157 | ```js 158 | // When you're sure the file contains unicode text, you can load it as text directly. 159 | var fileAsText = yield repo.loadAs("text", blobHash); 160 | 161 | // Also if you prefer array format, you can load a directory as an array. 162 | var entries = yield repo.loadAs("array", treeHash); 163 | entries.forEach(function (entry) { 164 | // entry contains {name, mode, hash} 165 | }); 166 | ``` 167 | 168 | ## Using Walkers 169 | 170 | Now that we have a repo with some minimal data in it, we can query it. Since we 171 | included the `walkers` mixin, we can walk the history as a linear stream or walk 172 | the file tree as a depth-first linear stream. 173 | 174 | ```js 175 | // Create a log stream starting at the commit we just made. 176 | // You could also use symbolic refs like `refs/heads/master` for repos that 177 | // support them. 178 | var logStream = yield repo.logWalk(commitHash); 179 | 180 | // Looping through the stream is easy by repeatedly calling waiting on `read`. 181 | var commit, object; 182 | while (commit = yield logStream.read(), commit !== undefined) { 183 | 184 | console.log(commit); 185 | 186 | // We can also loop through all the files of each commit version. 187 | var treeStream = yield repo.treeWalk(commit.tree); 188 | while (object = yield treeStream.read(), object !== undefined) { 189 | console.log(object); 190 | } 191 | 192 | } 193 | ``` 194 | 195 | ## Filesystem Style Interface 196 | 197 | If you feel that creating a blob, then creating a tree, then creating the parent 198 | tree, etc is a lot of work to save just one file, I agree. While writing the 199 | tedit app, I discovered a nice high-level abstraction that you can mixin to make 200 | this much easier. This is the `create-tree` mixin referenced in the above 201 | config. 202 | 203 | ```js 204 | // We wish to create a tree that contains `www/index.html` and `README.me` files. 205 | // This will create these two blobs, create a tree for `www` and then create a 206 | // tree for the root containing `README.md` and the newly created `www` tree. 207 | var treeHash = yield repo.createTree({ 208 | "www/index.html": { 209 | mode: modes.file, 210 | content: "

Hello

\n

This is an HTML page?

\n" 211 | }, 212 | "README.md": { 213 | mode: modes.file, 214 | content: "# Sample repo\n\nThis is a sample\n" 215 | } 216 | }); 217 | ``` 218 | 219 | This is great for creating several files at once, but it can also be used to 220 | edit existing trees by adding new files, changing existing files, or deleting 221 | existing entries. 222 | 223 | ```js 224 | var changes = [ 225 | { 226 | path: "www/index.html" // Leaving out mode means to delete the entry. 227 | }, 228 | { 229 | path: "www/app.js", // Create a new file in the existing directory. 230 | mode: modes.file, 231 | content: "// this is a js file\n" 232 | } 233 | ]; 234 | 235 | // We need to use array form and specify the base tree hash as `base`. 236 | changes.base = treeHash; 237 | 238 | treeHash = yield repo.createTree(changes); 239 | ``` 240 | 241 | ## Creating Composite Filesystems 242 | 243 | The real fun begins when you create composite filesystems using git submodules. 244 | 245 | The code that handles this is not packaged as a repo mixin since it spans several 246 | independent repos. Instead look to the [git-tree](https://github.com/creationix/git-tree) 247 | repo for the code. It's interface is still slightly unstable and undocumented 248 | but is used in production by tedit and my node hosting service that complements tedit. 249 | 250 | Basically this module allows you to perform high-level filesystem style commands 251 | on a virtual filesystem that consists of many js-git repos. Until there are 252 | proper docs, you can see how tedit uses it at . 253 | 254 | ## Mounting Github Repos 255 | 256 | I've been asking Github to enable CORS headers to their HTTPS git servers, but 257 | they've refused to do it. This means that a browser can never clone from github 258 | because the browser will disallow XHR requests to the domain. 259 | 260 | They do, however, offer a REST interface to the raw [git data](https://developer.github.com/v3/git/). 261 | 262 | Using this I wrote a mixin for js-git that uses github *as* the backend store. 263 | 264 | Code at . Usage in tedit can be seen at 265 | . 266 | -------------------------------------------------------------------------------- /SPONSORS.md: -------------------------------------------------------------------------------- 1 | # Sponsored Development 2 | 3 | As a company, you can sponsor development of specific features to the js-git ecosystem. 4 | 5 | ## In Progress Sponsored Features 6 | 7 | - JS-Git - Encrypted Filesystem - Anonymous 8 | - Tedit - Web Runtime - Anonymous 9 | 10 | ## Completed Sponsored Features 11 | 12 | - Tedit - Live Export to VFS - Anonymous 13 | -------------------------------------------------------------------------------- /art/architecture.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 18 | 20 | 42 | 44 | 45 | 47 | image/svg+xml 48 | 50 | 51 | 52 | 53 | 54 | 59 | js-git 70 | 73 | git-node-platform 84 | git-mozapp-platform 95 | git-chromeapp-platform 106 | 107 | 110 | git-cli 121 | git-browser 132 | 133 | Platform Implementations 145 | 148 | git-fs-db 159 | git-tcp-proto 170 | git-http-proto 181 | git-ssh-proto 192 | git-proto 203 | 204 | Interface Adapters 216 | Projects 228 | Core 240 | Public Interfaces 252 | 254 | db 265 | proto 276 | 277 | trace 288 | inflate 299 | deflate 310 | tcp 321 | http 332 | ssh 343 | fs 354 | sha1 365 | bops 376 | Sub Interfaces 388 | 389 | 390 | -------------------------------------------------------------------------------- /doc/lib/config-codec.md: -------------------------------------------------------------------------------- 1 | # Config Codec 2 | 3 | This module implements a codec for reading and writing git config files (this 4 | includes the .gitmodules file). As far as I can tell, this is a variant of 5 | the INI format. 6 | 7 | ## codec.decode(ini) -> config 8 | 9 | Given the text of the config file, return the data as an object. 10 | 11 | The following config: 12 | 13 | ```ini 14 | [user] 15 | name = Tim Caswell 16 | email = tim@creationix.com 17 | [color] 18 | ui = true 19 | [color "branch"] 20 | current = yellow bold 21 | local = green bold 22 | remote = cyan bold 23 | ``` 24 | 25 | Will parse to this js object 26 | 27 | ```js 28 | { 29 | user: { 30 | name: "Tim Caswell", 31 | email: "tim@creationix.com" 32 | }, 33 | color: { 34 | ui: "true", 35 | branch: { 36 | current: "yellow bold", 37 | local: "green bold", 38 | remote: "cyan bold" 39 | } 40 | } 41 | } 42 | ``` 43 | 44 | ## codec.encode(config) -> ini 45 | 46 | This reverses the conversion and writes a string from a config object. -------------------------------------------------------------------------------- /doc/lib/deflate.md: -------------------------------------------------------------------------------- 1 | # Deflate 2 | 3 | This module implements a simple interface that when normal given data, returns the deflated version in a callback. This wraps the pako dependency. 4 | 5 | ## deflate(inflated) => deflated 6 | 7 | ```js 8 | var deflate = require('js-git/lib/deflate'); 9 | 10 | var deflated = deflate(original); 11 | ``` 12 | -------------------------------------------------------------------------------- /doc/lib/inflate-stream.md: -------------------------------------------------------------------------------- 1 | # Inflate Stream 2 | 3 | This module implements zlib inflate by hand with a special streaming interface. 4 | This is used in js-git to inflate git object fragments in a pack-stream. 5 | 6 | ## inflateStream(onEmit, onUnused) -> onInput 7 | 8 | ```js 9 | var onInput = inflateStream(onEmit, onUnused); 10 | 11 | someStream.on("data", function (chunk) { 12 | onInput(null, chunk); 13 | }); 14 | 15 | function onEmit(err, out) { 16 | if (err) throw err; 17 | // out is a chunk of inflated data 18 | } 19 | 20 | function onUnused(chunks) { 21 | // chunks is an array of extra buffers or buffer slices. 22 | } 23 | ``` 24 | -------------------------------------------------------------------------------- /doc/lib/inflate.md: -------------------------------------------------------------------------------- 1 | # Inflate 2 | 3 | This module implements a simple interface that when given deflated data returns the inflated version. 4 | 5 | ## inflate(deflated) -> inflated 6 | 7 | ```js 8 | var inflate = require('js-git/lib/inflate'); 9 | 10 | var inflated = inflate(deflated); 11 | ``` 12 | -------------------------------------------------------------------------------- /doc/lib/object-codec.md: -------------------------------------------------------------------------------- 1 | # Object Codec 2 | 3 | This module implements a codec for the binary git object format for blobs, trees, tags, and commits. 4 | 5 | This library is useful for writing new storage backends. Normal users will probably 6 | just use one of the existing mixins for object storage. 7 | 8 | ## codec.frame({type,body}) -> buffer 9 | 10 | This function accepts an object with `type` and `body` properties. The `type` 11 | property must be one of "blob", "tree", "commit" or "tag". The body can be a 12 | pre-encoded raw-buffer or a plain javascript value. See encoder docs below for 13 | the formats of the different body types. 14 | 15 | The returned binary value is the fully framed git object. The sha1 of this is 16 | the git hash of the object. 17 | 18 | ```js 19 | var codec = require('js-git/lib/object-codec'); 20 | var sha1 = require('git-sha1'); 21 | 22 | var bin = codec.frame({ type: "blob", body: "Hello World\n"}); 23 | var hash = sha1(bin); 24 | ``` 25 | 26 | ## codec.deframe(buffer, decode) -> {type,body} 27 | 28 | This function accepts a binary git buffer and returns the `{type,body}` object. 29 | 30 | If `decode` is true, then the body will also be decoded into a normal javascript 31 | value. If `decode` is false or missing, then the raw-buffer will be in body. 32 | 33 | ## codec.encoders 34 | 35 | This is an object containing 4 encoder function Each function has the signature: 36 | 37 | encode(body) -> raw-buffer 38 | 39 | Where body is the JS representation of the type and raw-buffer is the git encoded 40 | version of that value, but without the type and length framing. 41 | 42 | ```js 43 | var encoders = require('js-git/lib/object-codec').encoders; 44 | var modes = require('js-git/lib/modes'); 45 | ``` 46 | 47 | Blobs must be native binary values (Buffer in node, Uint8Array in browser). 48 | It's recommended to either use the `bodec` library to create binary values from 49 | strings directly or configure your system with the `formats` mixin that allows 50 | for unicode strings when working with blobs. 51 | 52 | ```js 53 | rawBin = encoders.blob(new Uint8Array([1,2,3,4,5,6])); 54 | rawBin = encoders.blob(bodec.fromUnicode("Hello World")); 55 | ``` 56 | 57 | Trees are objects with filename as key and object with {mode,hash} as value. 58 | The modes are integers. It's best to use the modes module to help. 59 | 60 | ```js 61 | rawBin = encoders.tree({ "greeting.txt": { 62 | mode: modes.file, 63 | hash: blobHash 64 | }}); 65 | ``` 66 | 67 | Commits are objects with required fields {tree,author,message} 68 | Also if there is a single parent, you specify it with `parent`. 69 | 70 | Since a commit can have zero or more parent commits, you specify the parent 71 | hashes via the `parents` property as an array of hashes. 72 | 73 | The `author` field is required and contains {name,email,date}. 74 | 75 | Commits also require a `committer` field with the same structure as `author`. 76 | 77 | The `date` property of `author` and `committer` is in the format {seconds,offset} 78 | Where seconds is a unix timestamp in seconds and offset is the number of minutes 79 | offset for the timezone. (Your local offset can be found with `(new Date).getTimezoneOffset()`) 80 | 81 | The `message` field is mandatory and a simple string. 82 | 83 | ```js 84 | rawBin = encoders.commit({ 85 | tree: treeHash, 86 | author: { 87 | name: "Tim Caswell", 88 | email: "tim@creationix.com", 89 | date: { 90 | seconds: 1391790910, 91 | offset: 7 * 60 92 | } 93 | }, 94 | parents: [ parentCommitHash ], 95 | message: "This is a test commit\n" 96 | }); 97 | ``` 98 | 99 | Annotated tags are like commits, except they have different fields. 100 | 101 | ```js 102 | rawBin = encoders.tag({ 103 | object: commitHash, 104 | type: "commit", 105 | tag: "mytag", 106 | tagger: { 107 | name: "Tim Caswell", 108 | email: "tim@creationix.com", 109 | date: { 110 | seconds: 1391790910, 111 | offset: 7 * 60 112 | } 113 | }, 114 | message: "Tag it!\n" 115 | }); 116 | ``` 117 | 118 | ## codec.decoders 119 | 120 | This is just like `codec.encoders` except these functions do the opposite. 121 | They have the format: 122 | 123 | decode(raw-buffer) -> body 124 | 125 | ```js 126 | var commit = decoders.commit(rawCommitBin); 127 | ``` 128 | -------------------------------------------------------------------------------- /doc/lib/pack-codec.md: -------------------------------------------------------------------------------- 1 | # Pack Codec 2 | 3 | This module implements a codec for packfile streams used in the git network 4 | protocols as well as the on-disk packfile format. 5 | 6 | These are a sync stream transforms. It accepts an emit function and returns a 7 | write function. Both of these have the same interface. You signal `end` to the 8 | input side by writing undefined (or nothing) and when emit gets called with 9 | undefined that is `end` on the output. 10 | 11 | Since this is sync, errors are simply thrown. If you want to use this in the 12 | context of an async stream with back-pressure, it's up to the consumer to handle 13 | exceptions and write to the input at the correct rate. Basically to implement 14 | back-pressure, you only need to keep writing values to the input till enough 15 | data comes out the output. It's sync so by the time `write()` returns, `emit()` 16 | will have been called as many times as it ever will (without more writes). 17 | 18 | Here is an example of using the decodePack in a node push stream that ignores 19 | backpressure. 20 | 21 | ```js 22 | var decodePack = require('js-git/lib/pack-codec').decodePack; 23 | 24 | var write = decodePack(onItem); 25 | stream.on("data", write); 26 | stream.on("end", write); 27 | var meta; 28 | function onItem(item) { 29 | if (item === undefined) { 30 | // END of Stream 31 | } 32 | else if (meta === undefined) { 33 | meta = item; 34 | } 35 | else { 36 | console.log(item); 37 | } 38 | } 39 | ``` 40 | 41 | The first output is the meta object: 42 | 43 | ```js 44 | { 45 | version: 2 46 | num: num-of-objects, 47 | } 48 | ``` 49 | 50 | ## codec.decodePack(emit) -> write 51 | 52 | Input in this is the raw buffer chunks in the packstream. The chunks can be 53 | broken up at any point so this is ideal for streaming from a disk or network. 54 | 55 | 56 | Version is the git pack protocol version, and num is the number of objects that 57 | will be in this stream. 58 | 59 | All output objects after this will be raw git objects. 60 | 61 | ```js 62 | { 63 | type: type, 64 | size: buffer-size, 65 | body: raw-buffer, 66 | offset: offset-in-stream, 67 | [ref: number-or-hash] 68 | } 69 | ``` 70 | 71 | There are two extra types here that aren't seen elsewhere. They are `ofs-delta` 72 | and `ref-delta`. In both cases, these are a diff that applies on top of another 73 | object in the stream. The different is `ofs-delta` stores a number in `ref` 74 | that is the number of bytes to go back in the stream to find the base object. 75 | But `ref-delta` includes the full hash of it's base object. 76 | 77 | 78 | ## codec.encodePack(emit) -> write 79 | 80 | This is the reverse. In fact, if you fed this the output from `decodePack`, 81 | it's output should match exactly the original stream. 82 | 83 | The objects don't need as much data as the parser outputs. In specefic, the meta 84 | object only need contain: 85 | 86 | ```js 87 | { num: num-of-objects } 88 | ``` 89 | 90 | And the items only need contain: 91 | 92 | ```js 93 | { 94 | type: type, 95 | body: raw-buffer, 96 | [ref: number-or-hash] 97 | } 98 | ``` 99 | -------------------------------------------------------------------------------- /doc/lib/readme.md: -------------------------------------------------------------------------------- 1 | # Library 2 | 3 | 4 | -------------------------------------------------------------------------------- /doc/mixins/fs-db.md: -------------------------------------------------------------------------------- 1 | 2 | # Filesystem Git Database 3 | 4 | JSGit repositories need `loadAs`, `saveAs`, `loadRaw`, `saveRaw`, `readRef`, and 5 | `updateRef` methods. 6 | Depending on the backing storage, there are various ways to implement these 7 | methods. 8 | The implementation for in-memory storage is `js-git/mixins/mem-db`, and there 9 | are variants for using Github or IndexDB for storage. 10 | 11 | The `js-git/mixins/fs-db` implementation provides these methods as well, but 12 | depends on a file system interface providing `readFile`, `readChunk`, 13 | `writeFile`, and `readDir`. 14 | These file system methods are implemented by the `git-fs-db` and 15 | `git-chrome-db` packages. 16 | 17 | For the purpose of this document, `=>` implies that the function does not block 18 | and accepts a Node.js-style callback. 19 | The arrow points to the type of the result. 20 | None of these methods need to return a continuable if the nodeback is missing. 21 | 22 | The type `binary` stands for whatever binary representation is appropriate for 23 | the underlying platform. 24 | For browsers, binary is a `Uint8Array`. 25 | For Node.js, binary is a `Buffer`. 26 | 27 | ## readFile(path) => binary | undefined 28 | 29 | Reads the entirety of the file at the given path and produces the binary. 30 | If the file does not exist, readFile provides `undefined` instead. 31 | 32 | ## readChunk(path, start, end) => binary | undefined 33 | 34 | Reads a byte range of the file at the given path. 35 | The byte range is a half open interval, including the byte at the initial index, 36 | and excluding the byte at the terminal index, such that the end minus the start 37 | is the length of the resulting binary data. 38 | The end offset may be negative, in which case it should count back from the end 39 | of the size of the file at the path, such that the size plus the negative end is 40 | the positive end. 41 | If the file does not exist, readChunk provides `undefined` instead. 42 | 43 | ## writeFile(path, binary) => undefined 44 | 45 | Writes the given bytes to the file at the given path. 46 | The method creates any directories leading up to the path if they do not already 47 | exist. 48 | 49 | ## readDir(path) => array of names | undefined 50 | 51 | Reads the names of the entries in the directory at the given path. 52 | The names are not fully qualified paths, just the name of the entry within the 53 | given directory. 54 | -------------------------------------------------------------------------------- /doc/mixins/mem-db.md: -------------------------------------------------------------------------------- 1 | # mem-db mixin 2 | 3 | This mixin implements object store (normal and raw) and stores the data in memory. 4 | 5 | ```js 6 | var memDb = require('js-git/mixins/mem-db'); 7 | var repo = {}; 8 | memDb(repo); 9 | repo.saveAs("blob", "Hello World", function (err, hash) { 10 | if (err) throw err; 11 | console.log("Blob saved with hash " + hash); 12 | }); 13 | ``` 14 | 15 | This attaches the following interfaces onto the repo object passed in: 16 | 17 | - `saveAs(type, body) => hash` 18 | - `loadAs(type, hash) => body` 19 | - `loadRaw(hash) => raw-binary` 20 | - `saveRaw(hash, raw-binary) =>` 21 | 22 | All these functions are async and accept either a callback last or return a continuable. 23 | 24 | ```js 25 | // Example using continuable interface from gen-run generator body. 26 | var commit = yield repo.loadAs("commit", commitHash); 27 | ``` -------------------------------------------------------------------------------- /doc/mixins/pack-ops.md: -------------------------------------------------------------------------------- 1 | # pack-ops mixin 2 | 3 | This mixin adds the ability to consume or create packfile streams. 4 | 5 | This depends on the repo already having: 6 | 7 | - `loadRaw(hash) => raw-binary` 8 | - `saveRaw(hash, raw-binary) =>` 9 | 10 | And then adds: 11 | 12 | - `unpack(stream, opts) => hashes` 13 | - `pack(hashes, opts) => stream` 14 | 15 | The streams are simple-stream format. This means they have a `.take(callback)` 16 | method for pulling items out of the stream. 17 | 18 | Example: 19 | 20 | ```js 21 | var packOps = require('js-git/mixins/pack-ops'); 22 | packOps(repo); 23 | 24 | repo.unpack(stream, opts, function (err, hashes) { 25 | // hashes is imported objects 26 | }); 27 | 28 | repo.pack(hashes, opts, function (err, stream) { 29 | if (err) throw err; 30 | stream.take(onRead); 31 | function onRead(err, chunk) { 32 | if (err) throw err; 33 | console.log(chunk); 34 | if (item) stream.take(onRead); 35 | } 36 | }); 37 | ``` 38 | -------------------------------------------------------------------------------- /doc/mixins/readme.md: -------------------------------------------------------------------------------- 1 | # Mixins 2 | 3 | There's three types of mixins thats documented: 4 | 5 | - [fs-db](fs-db.md) 6 | 7 | - [mem-db](mem-db.md) 8 | 9 | - [pack-ops](pack-ops.md) 10 | -------------------------------------------------------------------------------- /doc/readme.md: -------------------------------------------------------------------------------- 1 | # js-git documentation 2 | 3 | Go to: 4 | 5 | - [Library](lib) 6 | 7 | - [Mixins](mixins) 8 | 9 | -------------------------------------------------------------------------------- /lib/apply-delta.js: -------------------------------------------------------------------------------- 1 | var bodec = require('bodec'); 2 | 3 | module.exports = applyDelta; 4 | 5 | function applyDelta(delta, base) { 6 | var deltaOffset = 0; 7 | 8 | if (base.length !== readLength()) { 9 | throw new Error("Base length mismatch"); 10 | } 11 | 12 | // Create a new output buffer with length from header. 13 | var outOffset = 0; 14 | var out = bodec.create(readLength()); 15 | 16 | while (deltaOffset < delta.length) { 17 | var byte = delta[deltaOffset++]; 18 | // Copy command. Tells us offset in base and length to copy. 19 | if (byte & 0x80) { 20 | var offset = 0; 21 | var length = 0; 22 | if (byte & 0x01) offset |= delta[deltaOffset++] << 0; 23 | if (byte & 0x02) offset |= delta[deltaOffset++] << 8; 24 | if (byte & 0x04) offset |= delta[deltaOffset++] << 16; 25 | if (byte & 0x08) offset |= delta[deltaOffset++] << 24; 26 | if (byte & 0x10) length |= delta[deltaOffset++] << 0; 27 | if (byte & 0x20) length |= delta[deltaOffset++] << 8; 28 | if (byte & 0x40) length |= delta[deltaOffset++] << 16; 29 | if (length === 0) length = 0x10000; 30 | // copy the data 31 | bodec.copy(bodec.slice(base, offset, offset + length), out, outOffset); 32 | outOffset += length; 33 | } 34 | // Insert command, opcode byte is length itself 35 | else if (byte) { 36 | bodec.copy(bodec.slice(delta, deltaOffset, deltaOffset + byte), out, outOffset); 37 | deltaOffset += byte; 38 | outOffset += byte; 39 | } 40 | else throw new Error('Invalid delta opcode'); 41 | } 42 | 43 | if (outOffset !== out.length) { 44 | throw new Error("Size mismatch in check"); 45 | } 46 | 47 | return out; 48 | 49 | // Read a variable length number our of delta and move the offset. 50 | function readLength() { 51 | var byte = delta[deltaOffset++]; 52 | var length = byte & 0x7f; 53 | var shift = 7; 54 | while (byte & 0x80) { 55 | byte = delta[deltaOffset++]; 56 | length |= (byte & 0x7f) << shift; 57 | shift += 7; 58 | } 59 | return length; 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /lib/config-codec.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | // This is for working with git config files like .git/config and .gitmodules. 4 | // I believe this is just INI format. 5 | module.exports = { 6 | encode: encode, 7 | decode: decode 8 | }; 9 | 10 | function encode(config) { 11 | var lines = []; 12 | Object.keys(config).forEach(function (name) { 13 | var obj = config[name]; 14 | var deep = {}; 15 | var values = {}; 16 | var hasValues = false; 17 | Object.keys(obj).forEach(function (key) { 18 | var value = obj[key]; 19 | if (typeof value === 'object') { 20 | deep[key] = value; 21 | } 22 | else { 23 | hasValues = true; 24 | values[key] = value; 25 | } 26 | }); 27 | if (hasValues) { 28 | encodeBody('[' + name + ']', values); 29 | } 30 | 31 | Object.keys(deep).forEach(function (sub) { 32 | var child = deep[sub]; 33 | encodeBody('[' + name + ' "' + sub + '"]', child); 34 | }); 35 | }); 36 | 37 | return lines.join("\n") + "\n"; 38 | 39 | function encodeBody(header, obj) { 40 | lines.push(header); 41 | Object.keys(obj).forEach(function (name) { 42 | lines.push( "\t" + name + " = " + obj[name]); 43 | }); 44 | } 45 | 46 | } 47 | 48 | 49 | function decode(text) { 50 | var config = {}; 51 | var section; 52 | text.split(/[\r\n]+/).forEach(function (line) { 53 | var match = line.match(/\[([^ \t"\]]+) *(?:"([^"]+)")?\]/); 54 | if (match) { 55 | section = config[match[1]] || (config[match[1]] = {}); 56 | if (match[2]) { 57 | section = section[match[2]] = {}; 58 | } 59 | return; 60 | } 61 | match = line.match(/([^ \t=]+)[ \t]*=[ \t]*(.+)/); 62 | if (match) { 63 | section[match[1]] = match[2]; 64 | } 65 | }); 66 | return config; 67 | } 68 | -------------------------------------------------------------------------------- /lib/defer.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | var timeouts, messageName; 4 | 5 | // node.js 6 | if (typeof process === "object" && typeof process.nextTick === "function") { 7 | module.exports = process.nextTick; 8 | } 9 | // some browsers 10 | else if (typeof setImmediate === "function") { 11 | module.exports = setImmediate; 12 | } 13 | // most other browsers 14 | else { 15 | timeouts = []; 16 | messageName = "zero-timeout-message"; 17 | window.addEventListener("message", handleMessage, true); 18 | 19 | module.exports = function (fn) { 20 | timeouts.push(fn); 21 | window.postMessage(messageName, "*"); 22 | }; 23 | } 24 | 25 | function handleMessage(event) { 26 | if (event.source == window && event.data == messageName) { 27 | event.stopPropagation(); 28 | if (timeouts.length > 0) { 29 | var fn = timeouts.shift(); 30 | fn(); 31 | } 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /lib/deflate.js: -------------------------------------------------------------------------------- 1 | var pako = require('pako'); 2 | var Binary = require('bodec').Binary; 3 | if (Binary === Uint8Array) { 4 | module.exports = pako.deflate; 5 | } 6 | else { 7 | module.exports = function deflate(value) { 8 | return new Binary(pako.deflate(new Uint8Array(value))); 9 | }; 10 | } 11 | -------------------------------------------------------------------------------- /lib/find-common.js: -------------------------------------------------------------------------------- 1 | function oneCall(fn) { 2 | var done = false; 3 | return function () { 4 | if (done) return; 5 | done = true; 6 | return fn.apply(this, arguments); 7 | }; 8 | } 9 | 10 | module.exports = findCommon; 11 | 12 | function findCommon(repo, a, b, callback) { 13 | callback = oneCall(callback); 14 | var ahead = 0, behind = 0; 15 | var aStream, bStream; 16 | var aCommit, bCommit; 17 | 18 | if (a === b) return callback(null, ahead, behind); 19 | repo.logWalk(a, onAStream); 20 | repo.logWalk(b, onBStream); 21 | 22 | function onAStream(err, stream) { 23 | if (err) return callback(err); 24 | aStream = stream; 25 | aStream.read(onA); 26 | } 27 | 28 | function onBStream(err, stream) { 29 | if (err) return callback(err); 30 | bStream = stream; 31 | bStream.read(onB); 32 | } 33 | 34 | function onA(err, commit) { 35 | if (!commit) return callback(err || new Error("No common commit")); 36 | aCommit = commit; 37 | if (bCommit) compare(); 38 | } 39 | 40 | function onB(err, commit) { 41 | if (!commit) return callback(err || new Error("No common commit")); 42 | bCommit = commit; 43 | if (aCommit) compare(); 44 | } 45 | 46 | function compare() { 47 | if (aCommit.hash === bCommit.hash) return callback(null, ahead, behind); 48 | if (aCommit.author.date.seconds > bCommit.author.date.seconds) { 49 | ahead++; 50 | aStream.read(onA); 51 | } 52 | else { 53 | behind++; 54 | bStream.read(onB); 55 | } 56 | } 57 | 58 | } 59 | -------------------------------------------------------------------------------- /lib/git-fs.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | var modes = require('./modes'); 4 | var defer = require('./defer'); 5 | 6 | // options.encrypt(plain) -> encrypted 7 | // options.decrypt(encrypted) -> plain 8 | // options.shouldEncrypt(path) -> boolean 9 | // options.getRootTree() => hash 10 | // options.setRootTree(hash) => 11 | module.exports = function (repo, options) { 12 | var toWrite = {}; 13 | var callbacks = []; 14 | var writing = false; 15 | 16 | return { 17 | readFile: readFile, 18 | writeFile: writeFile, 19 | readDir: readDir 20 | }; 21 | 22 | function readFile(path, callback) { 23 | if (!callback) return readFile.bind(null, path); 24 | 25 | // If there is a pending write for this path, pull from the cache. 26 | if (toWrite[path]) return callback(null, toWrite[path]); 27 | 28 | // Otherwise read from the persistent storage 29 | options.getRootTree(onRootTree); 30 | 31 | function onRootTree(err, hash) { 32 | if (!hash) return callback(err); 33 | repo.pathToEntry(hash, path, onEntry); 34 | } 35 | 36 | function onEntry(err, entry) { 37 | if (!entry || !modes.isBlob(entry.mode)) return callback(err); 38 | 39 | repo.loadAs("blob", entry.hash, function (err, content) { 40 | if (!content) return callback(err); 41 | if (entry.mode === modes.sym) { 42 | content = options.decrypt(content); 43 | } 44 | callback(null, content); 45 | }); 46 | } 47 | } 48 | 49 | function writeFile(path, binary, callback) { 50 | if (!callback) return writeFile.bind(null, path, binary); 51 | toWrite[path] = binary; 52 | callbacks.push(callback); 53 | defer(check); 54 | } 55 | 56 | function readDir(path, callback) { 57 | if (!callback) return readDir.bind(null, path); 58 | 59 | options.getRootTree(onRootTree); 60 | 61 | function onRootTree(err, hash) { 62 | if (!hash) return callback(err); 63 | repo.pathToEntry(hash, path, onEntry); 64 | } 65 | 66 | function onEntry(err, entry) { 67 | if (!entry || entry.mode !== modes.tree) return callback(err); 68 | repo.loadAs("tree", entry.hash, onTree); 69 | } 70 | 71 | function onTree(err, tree) { 72 | if (!tree) return callback(err); 73 | callback(null, Object.keys(tree)); 74 | } 75 | } 76 | 77 | function check() { 78 | if (writing || !callbacks.length) return; 79 | writing = true; 80 | options.getRootTree(onRootTree); 81 | 82 | function onRootTree(err, hash) { 83 | if (err) return callall(err); 84 | var files = pullFiles(); 85 | if (hash) files.base = hash; 86 | repo.createTree(files, onNewTree); 87 | } 88 | 89 | function onNewTree(err, hash) { 90 | if (err) return callall(err); 91 | options.setRootTree(hash, onSaveRoot); 92 | } 93 | 94 | function onSaveRoot(err) { 95 | if (err) return callall(err); 96 | writing = false; 97 | callall(); 98 | defer(check); 99 | } 100 | } 101 | 102 | function pullFiles() { 103 | var files = Object.keys(toWrite).map(function (path) { 104 | var content = toWrite[path]; 105 | delete toWrite[path]; 106 | var mode = modes.blob; 107 | if (options.shouldEncrypt && options.shouldEncrypt(path)) { 108 | mode = modes.sym; 109 | content = options.encrypt(content); 110 | } 111 | return { 112 | path: path, 113 | mode: mode, 114 | content: content 115 | }; 116 | }); 117 | return files; 118 | } 119 | 120 | function callall(err) { 121 | callbacks.splice(0, callbacks.length).forEach(function (callback) { 122 | callback(err); 123 | }); 124 | } 125 | }; 126 | -------------------------------------------------------------------------------- /lib/inflate-stream.js: -------------------------------------------------------------------------------- 1 | var Inflate = require('pako').Inflate; 2 | var Binary = require('bodec').Binary; 3 | 4 | // Byte oriented inflate stream. Wrapper for pako's Inflate. 5 | // 6 | // var inf = inflate(); 7 | // inf.write(byte) -> more - Write a byte to inflate's state-machine. 8 | // Returns true if more data is expected. 9 | // inf.recycle() - Reset the internal state machine. 10 | // inf.flush() -> data - Flush the output as a binary buffer. 11 | // 12 | module.exports = function inflateStream() { 13 | var inf = new Inflate(); 14 | var b = new Uint8Array(1); 15 | var empty = new Binary(0); 16 | 17 | return { 18 | write: write, 19 | recycle: recycle, 20 | flush: Binary === Uint8Array ? flush : flushConvert 21 | }; 22 | 23 | function write(byte) { 24 | b[0] = byte; 25 | inf.push(b); 26 | return !inf.ended; 27 | } 28 | 29 | function recycle() { inf = new Inflate(); } 30 | 31 | function flush() { return inf.result || empty; } 32 | 33 | function flushConvert() { 34 | return inf.result ? new Binary(inf.result) : empty; 35 | } 36 | }; 37 | -------------------------------------------------------------------------------- /lib/inflate.js: -------------------------------------------------------------------------------- 1 | var pako = require('pako'); 2 | var Binary = require('bodec').Binary; 3 | if (Binary === Uint8Array) { 4 | module.exports = pako.inflate; 5 | } 6 | else { 7 | module.exports = function inflate(value) { 8 | return new Binary(pako.inflate(new Uint8Array(value))); 9 | }; 10 | } 11 | -------------------------------------------------------------------------------- /lib/modes.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | var masks = { 4 | mask: parseInt('100000', 8), 5 | blob: parseInt('140000', 8), 6 | file: parseInt('160000', 8) 7 | }; 8 | 9 | var modes = module.exports = { 10 | isBlob: function (mode) { 11 | return (mode & masks.blob) === masks.mask; 12 | }, 13 | isFile: function (mode) { 14 | return (mode & masks.file) === masks.mask; 15 | }, 16 | toType: function (mode) { 17 | if (mode === modes.commit) return "commit"; 18 | if (mode === modes.tree) return "tree"; 19 | if ((mode & masks.blob) === masks.mask) return "blob"; 20 | return "unknown"; 21 | }, 22 | tree: parseInt( '40000', 8), 23 | blob: parseInt('100644', 8), 24 | file: parseInt('100644', 8), 25 | exec: parseInt('100755', 8), 26 | sym: parseInt('120000', 8), 27 | commit: parseInt('160000', 8) 28 | }; 29 | -------------------------------------------------------------------------------- /lib/object-codec.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | var bodec = require('bodec'); 3 | var modes = require('./modes'); 4 | 5 | // (body) -> raw-buffer 6 | var encoders = exports.encoders = { 7 | blob: encodeBlob, 8 | tree: encodeTree, 9 | commit: encodeCommit, 10 | tag: encodeTag 11 | }; 12 | 13 | // ({type:type, body:raw-buffer}) -> buffer 14 | exports.frame = frame; 15 | 16 | // (raw-buffer) -> body 17 | var decoders = exports.decoders ={ 18 | blob: decodeBlob, 19 | tree: decodeTree, 20 | commit: decodeCommit, 21 | tag: decodeTag 22 | }; 23 | 24 | // (buffer) -> {type:type, body:raw-buffer} 25 | exports.deframe = deframe; 26 | 27 | // Export git style path sort in case it's wanted. 28 | exports.treeMap = treeMap; 29 | exports.treeSort = treeSort; 30 | 31 | function encodeBlob(body) { 32 | if (!bodec.isBinary(body)) throw new TypeError("Blobs must be binary values"); 33 | return body; 34 | } 35 | 36 | function treeMap(key) { 37 | /*jshint validthis:true*/ 38 | var entry = this[key]; 39 | return { 40 | name: key, 41 | mode: entry.mode, 42 | hash: entry.hash 43 | }; 44 | } 45 | 46 | function treeSort(a, b) { 47 | var aa = (a.mode === modes.tree) ? a.name + "/" : a.name; 48 | var bb = (b.mode === modes.tree) ? b.name + "/" : b.name; 49 | return aa > bb ? 1 : aa < bb ? -1 : 0; 50 | } 51 | 52 | function encodeTree(body) { 53 | var tree = ""; 54 | if (Array.isArray(body)) throw new TypeError("Tree must be in object form"); 55 | var list = Object.keys(body).map(treeMap, body).sort(treeSort); 56 | for (var i = 0, l = list.length; i < l; i++) { 57 | var entry = list[i]; 58 | tree += entry.mode.toString(8) + " " + bodec.encodeUtf8(entry.name) + 59 | "\0" + bodec.decodeHex(entry.hash); 60 | } 61 | return bodec.fromRaw(tree); 62 | } 63 | 64 | function encodeTag(body) { 65 | var str = "object " + body.object + 66 | "\ntype " + body.type + 67 | "\ntag " + body.tag + 68 | "\ntagger " + formatPerson(body.tagger) + 69 | "\n\n" + body.message; 70 | return bodec.fromUnicode(str); 71 | } 72 | 73 | function encodeCommit(body) { 74 | var str = "tree " + body.tree; 75 | for (var i = 0, l = body.parents.length; i < l; ++i) { 76 | str += "\nparent " + body.parents[i]; 77 | } 78 | str += "\nauthor " + formatPerson(body.author) + 79 | "\ncommitter " + formatPerson(body.committer) + 80 | "\n\n" + body.message; 81 | return bodec.fromUnicode(str); 82 | } 83 | 84 | 85 | function formatPerson(person) { 86 | return safe(person.name) + 87 | " <" + safe(person.email) + "> " + 88 | formatDate(person.date); 89 | } 90 | 91 | function safe(string) { 92 | return string.replace(/(?:^[\.,:;<>"']+|[\0\n<>]+|[\.,:;<>"']+$)/gm, ""); 93 | } 94 | 95 | function two(num) { 96 | return (num < 10 ? "0" : "") + num; 97 | } 98 | 99 | function formatDate(date) { 100 | var seconds, offset; 101 | if (date.seconds) { 102 | seconds = date.seconds; 103 | offset = date.offset; 104 | } 105 | // Also accept Date instances 106 | else { 107 | seconds = Math.floor(date.getTime() / 1000); 108 | offset = date.getTimezoneOffset(); 109 | } 110 | var neg = "+"; 111 | if (offset <= 0) offset = -offset; 112 | else neg = "-"; 113 | offset = neg + two(Math.floor(offset / 60)) + two(offset % 60); 114 | return seconds + " " + offset; 115 | } 116 | 117 | function frame(obj) { 118 | var type = obj.type; 119 | var body = obj.body; 120 | if (!bodec.isBinary(body)) body = encoders[type](body); 121 | return bodec.join([ 122 | bodec.fromRaw(type + " " + body.length + "\0"), 123 | body 124 | ]); 125 | } 126 | 127 | function decodeBlob(body) { 128 | return body; 129 | } 130 | 131 | function decodeTree(body) { 132 | var i = 0; 133 | var length = body.length; 134 | var start; 135 | var mode; 136 | var name; 137 | var hash; 138 | var tree = {}; 139 | while (i < length) { 140 | start = i; 141 | i = indexOf(body, 0x20, start); 142 | if (i < 0) throw new SyntaxError("Missing space"); 143 | mode = parseOct(body, start, i++); 144 | start = i; 145 | i = indexOf(body, 0x00, start); 146 | name = bodec.toUnicode(body, start, i++); 147 | hash = bodec.toHex(body, i, i += 20); 148 | tree[name] = { 149 | mode: mode, 150 | hash: hash 151 | }; 152 | } 153 | return tree; 154 | } 155 | 156 | function decodeCommit(body) { 157 | var i = 0; 158 | var start; 159 | var key; 160 | var parents = []; 161 | var commit = { 162 | tree: "", 163 | parents: parents, 164 | author: "", 165 | committer: "", 166 | message: "" 167 | }; 168 | while (body[i] !== 0x0a) { 169 | start = i; 170 | i = indexOf(body, 0x20, start); 171 | if (i < 0) throw new SyntaxError("Missing space"); 172 | key = bodec.toRaw(body, start, i++); 173 | start = i; 174 | i = indexOf(body, 0x0a, start); 175 | if (i < 0) throw new SyntaxError("Missing linefeed"); 176 | var value = bodec.toUnicode(body, start, i++); 177 | if (key === "parent") { 178 | parents.push(value); 179 | } 180 | else { 181 | if (key === "author" || key === "committer") { 182 | value = decodePerson(value); 183 | } 184 | commit[key] = value; 185 | } 186 | } 187 | i++; 188 | commit.message = bodec.toUnicode(body, i, body.length); 189 | return commit; 190 | } 191 | 192 | function decodeTag(body) { 193 | var i = 0; 194 | var start; 195 | var key; 196 | var tag = {}; 197 | while (body[i] !== 0x0a) { 198 | start = i; 199 | i = indexOf(body, 0x20, start); 200 | if (i < 0) throw new SyntaxError("Missing space"); 201 | key = bodec.toRaw(body, start, i++); 202 | start = i; 203 | i = indexOf(body, 0x0a, start); 204 | if (i < 0) throw new SyntaxError("Missing linefeed"); 205 | var value = bodec.toUnicode(body, start, i++); 206 | if (key === "tagger") value = decodePerson(value); 207 | tag[key] = value; 208 | } 209 | i++; 210 | tag.message = bodec.toUnicode(body, i, body.length); 211 | return tag; 212 | } 213 | 214 | function decodePerson(string) { 215 | var match = string.match(/^([^<]*) <([^>]*)> ([^ ]*) (.*)$/); 216 | if (!match) throw new Error("Improperly formatted person string"); 217 | return { 218 | name: match[1], 219 | email: match[2], 220 | date: { 221 | seconds: parseInt(match[3], 10), 222 | offset: parseInt(match[4], 10) / 100 * -60 223 | } 224 | }; 225 | } 226 | 227 | function deframe(buffer, decode) { 228 | var space = indexOf(buffer, 0x20); 229 | if (space < 0) throw new Error("Invalid git object buffer"); 230 | var nil = indexOf(buffer, 0x00, space); 231 | if (nil < 0) throw new Error("Invalid git object buffer"); 232 | var body = bodec.slice(buffer, nil + 1); 233 | var size = parseDec(buffer, space + 1, nil); 234 | if (size !== body.length) throw new Error("Invalid body length."); 235 | var type = bodec.toRaw(buffer, 0, space); 236 | return { 237 | type: type, 238 | body: decode ? decoders[type](body) : body 239 | }; 240 | } 241 | 242 | function indexOf(buffer, byte, i) { 243 | i |= 0; 244 | var length = buffer.length; 245 | for (;;i++) { 246 | if (i >= length) return -1; 247 | if (buffer[i] === byte) return i; 248 | } 249 | } 250 | 251 | function parseOct(buffer, start, end) { 252 | var val = 0; 253 | while (start < end) { 254 | val = (val << 3) + buffer[start++] - 0x30; 255 | } 256 | return val; 257 | } 258 | 259 | function parseDec(buffer, start, end) { 260 | var val = 0; 261 | while (start < end) { 262 | val = val * 10 + buffer[start++] - 0x30; 263 | } 264 | return val; 265 | } 266 | -------------------------------------------------------------------------------- /lib/pack-codec.js: -------------------------------------------------------------------------------- 1 | var inflateStream = require('./inflate-stream.js'); 2 | var inflate = require('./inflate.js'); 3 | var deflate = require('./deflate.js'); 4 | var sha1 = require('git-sha1'); 5 | var bodec = require('bodec'); 6 | 7 | var typeToNum = { 8 | commit: 1, 9 | tree: 2, 10 | blob: 3, 11 | tag: 4, 12 | "ofs-delta": 6, 13 | "ref-delta": 7 14 | }; 15 | var numToType = {}; 16 | for (var type in typeToNum) { 17 | var num = typeToNum[type]; 18 | numToType[num] = type; 19 | } 20 | exports.parseEntry = parseEntry; 21 | function parseEntry(chunk) { 22 | var offset = 0; 23 | var byte = chunk[offset++]; 24 | var type = numToType[(byte >> 4) & 0x7]; 25 | var size = byte & 0xf; 26 | var left = 4; 27 | while (byte & 0x80) { 28 | byte = chunk[offset++]; 29 | size |= (byte & 0x7f) << left; 30 | left += 7; 31 | } 32 | size = size >>> 0; 33 | var ref; 34 | if (type === "ref-delta") { 35 | ref = bodec.toHex(bodec.slice(chunk, offset, offset += 20)); 36 | } 37 | else if (type === "ofs-delta") { 38 | byte = chunk[offset++]; 39 | ref = byte & 0x7f; 40 | while (byte & 0x80) { 41 | byte = chunk[offset++]; 42 | ref = ((ref + 1) << 7) | (byte & 0x7f); 43 | } 44 | } 45 | 46 | var body = inflate(bodec.slice(chunk, offset)); 47 | if (body.length !== size) { 48 | throw new Error("Size mismatch"); 49 | } 50 | var result = { 51 | type: type, 52 | body: body 53 | }; 54 | if (typeof ref !== "undefined") { 55 | result.ref = ref; 56 | } 57 | return result; 58 | } 59 | 60 | 61 | exports.decodePack = decodePack; 62 | function decodePack(emit) { 63 | 64 | var state = $pack; 65 | var sha1sum = sha1(); 66 | var inf = inflateStream(); 67 | 68 | var offset = 0; 69 | var position = 0; 70 | var version = 0x4b434150; // PACK reversed 71 | var num = 0; 72 | var type = 0; 73 | var length = 0; 74 | var ref = null; 75 | var checksum = ""; 76 | var start = 0; 77 | var parts = []; 78 | 79 | 80 | return function (chunk) { 81 | if (chunk === undefined) { 82 | if (num || checksum.length < 40) throw new Error("Unexpected end of input stream"); 83 | return emit(); 84 | } 85 | 86 | for (var i = 0, l = chunk.length; i < l; i++) { 87 | // console.log([state, i, chunk[i].toString(16)]); 88 | if (!state) throw new Error("Unexpected extra bytes: " + bodec.slice(chunk, i)); 89 | state = state(chunk[i], i, chunk); 90 | position++; 91 | } 92 | if (!state) return; 93 | if (state !== $checksum) sha1sum.update(chunk); 94 | var buff = inf.flush(); 95 | if (buff.length) { 96 | parts.push(buff); 97 | } 98 | }; 99 | 100 | // The first four bytes in a packfile are the bytes 'PACK' 101 | function $pack(byte) { 102 | if ((version & 0xff) === byte) { 103 | version >>>= 8; 104 | return version ? $pack : $version; 105 | } 106 | throw new Error("Invalid packfile header"); 107 | } 108 | 109 | // The version is stored as an unsigned 32 integer in network byte order. 110 | // It must be version 2 or 3. 111 | function $version(byte) { 112 | version = (version << 8) | byte; 113 | if (++offset < 4) return $version; 114 | if (version >= 2 && version <= 3) { 115 | offset = 0; 116 | return $num; 117 | } 118 | throw new Error("Invalid version number " + num); 119 | } 120 | 121 | // The number of objects in this packfile is also stored as an unsigned 32 bit int. 122 | function $num(byte) { 123 | num = (num << 8) | byte; 124 | if (++offset < 4) return $num; 125 | offset = 0; 126 | emit({version: version, num: num}); 127 | return $header; 128 | } 129 | 130 | // n-byte type and length (3-bit type, (n-1)*7+4-bit length) 131 | // CTTTSSSS 132 | // C is continue bit, TTT is type, S+ is length 133 | function $header(byte) { 134 | if (start === 0) start = position; 135 | type = byte >> 4 & 0x07; 136 | length = byte & 0x0f; 137 | if (byte & 0x80) { 138 | offset = 4; 139 | return $header2; 140 | } 141 | return afterHeader(); 142 | } 143 | 144 | // Second state in the same header parsing. 145 | // CSSSSSSS* 146 | function $header2(byte) { 147 | length |= (byte & 0x7f) << offset; 148 | if (byte & 0x80) { 149 | offset += 7; 150 | return $header2; 151 | } 152 | return afterHeader(); 153 | } 154 | 155 | // Common helper for finishing tiny and normal headers. 156 | function afterHeader() { 157 | offset = 0; 158 | if (type === 6) { 159 | ref = 0; 160 | return $ofsDelta; 161 | } 162 | if (type === 7) { 163 | ref = ""; 164 | return $refDelta; 165 | } 166 | // console.log({type: type,length: length}) 167 | return $body; 168 | } 169 | 170 | // Big-endian modified base 128 number encoded ref offset 171 | function $ofsDelta(byte) { 172 | ref = byte & 0x7f; 173 | if (byte & 0x80) return $ofsDelta2; 174 | return $body; 175 | } 176 | 177 | function $ofsDelta2(byte) { 178 | ref = ((ref + 1) << 7) | (byte & 0x7f); 179 | if (byte & 0x80) return $ofsDelta2; 180 | return $body; 181 | } 182 | 183 | // 20 byte raw sha1 hash for ref 184 | function $refDelta(byte) { 185 | ref += toHex(byte); 186 | if (++offset < 20) return $refDelta; 187 | return $body; 188 | } 189 | 190 | // Common helper for generating 2-character hex numbers 191 | function toHex(num) { 192 | return num < 0x10 ? "0" + num.toString(16) : num.toString(16); 193 | } 194 | 195 | // Common helper for emitting all three object shapes 196 | function emitObject() { 197 | var body = bodec.join(parts); 198 | if (body.length !== length) { 199 | throw new Error("Body length mismatch"); 200 | } 201 | var item = { 202 | type: numToType[type], 203 | size: length, 204 | body: body, 205 | offset: start 206 | }; 207 | if (ref) item.ref = ref; 208 | parts.length = 0; 209 | start = 0; 210 | offset = 0; 211 | type = 0; 212 | length = 0; 213 | ref = null; 214 | emit(item); 215 | } 216 | 217 | // Feed the deflated code to the inflate engine 218 | function $body(byte, i, chunk) { 219 | if (inf.write(byte)) return $body; 220 | var buf = inf.flush(); 221 | if (buf.length !== length) throw new Error("Length mismatch, expected " + length + " got " + buf.length); 222 | inf.recycle(); 223 | if (buf.length) { 224 | parts.push(buf); 225 | } 226 | emitObject(); 227 | // If this was all the objects, start calculating the sha1sum 228 | if (--num) return $header; 229 | sha1sum.update(bodec.slice(chunk, 0, i + 1)); 230 | return $checksum; 231 | } 232 | 233 | // 20 byte checksum 234 | function $checksum(byte) { 235 | checksum += toHex(byte); 236 | if (++offset < 20) return $checksum; 237 | var actual = sha1sum.digest(); 238 | if (checksum !== actual) throw new Error("Checksum mismatch: " + actual + " != " + checksum); 239 | } 240 | 241 | } 242 | 243 | 244 | exports.encodePack = encodePack; 245 | function encodePack(emit) { 246 | var sha1sum = sha1(); 247 | var left; 248 | return function (item) { 249 | if (item === undefined) { 250 | if (left !== 0) throw new Error("Some items were missing"); 251 | return emit(); 252 | } 253 | if (typeof item.num === "number") { 254 | if (left !== undefined) throw new Error("Header already sent"); 255 | left = item.num; 256 | write(packHeader(item.num)); 257 | } 258 | else if (typeof item.type === "string" && bodec.isBinary(item.body)) { 259 | // The header must be sent before items. 260 | if (typeof left !== "number") throw new Error("Headers not sent yet"); 261 | 262 | // Make sure we haven't sent all the items already 263 | if (!left) throw new Error("All items already sent"); 264 | 265 | // Send the item in packstream format 266 | write(packFrame(item)); 267 | 268 | // Send the checksum after the last item 269 | if (!--left) { 270 | emit(bodec.fromHex(sha1sum.digest())); 271 | } 272 | } 273 | else { 274 | throw new Error("Invalid item"); 275 | } 276 | }; 277 | function write(chunk) { 278 | sha1sum.update(chunk); 279 | emit(chunk); 280 | } 281 | } 282 | 283 | function packHeader(length) { 284 | return bodec.fromArray([ 285 | 0x50, 0x41, 0x43, 0x4b, // PACK 286 | 0, 0, 0, 2, // version 2 287 | length >> 24, // Num of objects 288 | (length >> 16) & 0xff, 289 | (length >> 8) & 0xff, 290 | length & 0xff 291 | ]); 292 | } 293 | 294 | function packFrame(item) { 295 | var length = item.body.length; 296 | 297 | // write TYPE_AND_BASE128_SIZE 298 | var head = [(typeToNum[item.type] << 4) | (length & 0xf)]; 299 | var i = 0; 300 | length >>= 4; 301 | while (length) { 302 | head[i++] |= 0x80; 303 | head[i] = length & 0x7f; 304 | length >>= 7; 305 | } 306 | 307 | if (typeof item.ref === "number") { 308 | // write BIG_ENDIAN_MODIFIED_BASE_128_NUMBER 309 | var offset = item.ref; 310 | // Calculate how many digits we need in base 128 and move the pointer 311 | i += Math.floor(Math.log(offset) / Math.log(0x80)) + 1; 312 | // Write the last digit 313 | head[i] = offset & 0x7f; 314 | // Then write the rest 315 | while (offset >>= 7) { 316 | head[--i] = 0x80 | (--offset & 0x7f); 317 | } 318 | } 319 | 320 | var parts = [bodec.fromArray(head)]; 321 | if (typeof item.ref === "string") { 322 | parts.push(bodec.fromHex(item.ref)); 323 | } 324 | parts.push(deflate(item.body)); 325 | return bodec.join(parts); 326 | } 327 | -------------------------------------------------------------------------------- /lib/pkt-line.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | var bodec = require('bodec'); 4 | var PACK = bodec.fromRaw("PACK"); 5 | 6 | module.exports = { 7 | deframer: deframer, 8 | framer: framer 9 | }; 10 | 11 | function deframer(emit) { 12 | var state = 0; 13 | var offset = 4; 14 | var length = 0; 15 | var data; 16 | var more = true; 17 | 18 | return function (item) { 19 | 20 | // Forward the EOS marker 21 | if (item === undefined) return emit(); 22 | 23 | // Once we're in pack mode, everything goes straight through 24 | if (state === 3) return emit(item); 25 | 26 | // Otherwise parse the data using a state machine. 27 | for (var i = 0, l = item.length; i < l; i++) { 28 | var byte = item[i]; 29 | if (state === 0) { 30 | var val = fromHexChar(byte); 31 | if (val === -1) { 32 | if (byte === PACK[0]) { 33 | offset = 1; 34 | state = 2; 35 | continue; 36 | } 37 | state = -1; 38 | throw new SyntaxError("Not a hex char: " + String.fromCharCode(byte)); 39 | } 40 | length |= val << ((--offset) * 4); 41 | if (offset === 0) { 42 | if (length === 4) { 43 | offset = 4; 44 | more = emit(""); 45 | } 46 | else if (length === 0) { 47 | offset = 4; 48 | more = emit(null); 49 | } 50 | else if (length > 4) { 51 | length -= 4; 52 | data = bodec.create(length); 53 | state = 1; 54 | } 55 | else { 56 | state = -1; 57 | throw new SyntaxError("Invalid length: " + length); 58 | } 59 | } 60 | } 61 | else if (state === 1) { 62 | data[offset++] = byte; 63 | if (offset === length) { 64 | offset = 4; 65 | state = 0; 66 | length = 0; 67 | if (data[0] === 1) { 68 | more = emit(bodec.slice(data, 1)); 69 | } 70 | else if (data[0] === 2) { 71 | more = emit({progress: bodec.toUnicode(data, 1)}); 72 | } 73 | else if (data[0] === 3) { 74 | more = emit({error: bodec.toUnicode(data, 1)}); 75 | } 76 | else { 77 | more = emit(bodec.toUnicode(data).trim()); 78 | } 79 | } 80 | } 81 | else if (state === 2) { 82 | if (offset < 4 && byte === PACK[offset++]) { 83 | continue; 84 | } 85 | state = 3; 86 | more = emit(bodec.join([PACK, bodec.subarray(item, i)])); 87 | break; 88 | } 89 | else { 90 | throw new Error("pkt-line decoder in invalid state"); 91 | } 92 | } 93 | 94 | return more; 95 | }; 96 | 97 | } 98 | 99 | function framer(emit) { 100 | return function (item) { 101 | if (item === undefined) return emit(); 102 | if (item === null) { 103 | return emit(bodec.fromRaw("0000")); 104 | } 105 | if (typeof item === "string") { 106 | item = bodec.fromUnicode(item); 107 | } 108 | return emit(bodec.join([frameHead(item.length + 4), item])); 109 | }; 110 | } 111 | 112 | function frameHead(length) { 113 | var buffer = bodec.create(4); 114 | buffer[0] = toHexChar(length >>> 12); 115 | buffer[1] = toHexChar((length >>> 8) & 0xf); 116 | buffer[2] = toHexChar((length >>> 4) & 0xf); 117 | buffer[3] = toHexChar(length & 0xf); 118 | return buffer; 119 | } 120 | 121 | function fromHexChar(val) { 122 | return (val >= 0x30 && val < 0x40) ? val - 0x30 : 123 | ((val > 0x60 && val <= 0x66) ? val - 0x57 : -1); 124 | } 125 | 126 | function toHexChar(val) { 127 | return val < 0x0a ? val + 0x30 : val + 0x57; 128 | } 129 | -------------------------------------------------------------------------------- /lib/wrap-handler.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | module.exports = wrapHandler; 4 | 5 | function wrapHandler(fn, onError) { 6 | if (onError) { 7 | return function (err, value) { 8 | if (err) return onError(err); 9 | try { 10 | return fn(value); 11 | } 12 | catch (err) { 13 | return onError(err); 14 | } 15 | }; 16 | } 17 | return function (err, value) { 18 | if (err) throw err; 19 | return fn(value); 20 | }; 21 | } 22 | -------------------------------------------------------------------------------- /mixins/add-cache.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | module.exports = addCache; 4 | function addCache(repo, cache) { 5 | var loadAs = repo.loadAs; 6 | if (loadAs) repo.loadAs = loadAsCached; 7 | var saveAs = repo.saveAs; 8 | if (saveAs) repo.saveAs = saveAsCached; 9 | var createTree = repo.createTree; 10 | if (createTree) repo.createTree = createTreeCached; 11 | 12 | function loadAsCached(type, hash, callback) { 13 | // Next check in disk cache... 14 | cache.loadAs(type, hash, onCacheLoad); 15 | 16 | function onCacheLoad(err, value) { 17 | if (err) return callback(err); 18 | // ...and return if it's there. 19 | if (value !== undefined) { 20 | return callback(null, value, hash); 21 | } 22 | 23 | // Otherwise load from real data source... 24 | loadAs.call(repo, type, hash, onLoad); 25 | } 26 | 27 | function onLoad(err, value) { 28 | if (value === undefined) return callback(err); 29 | 30 | // Store it on disk too... 31 | // Force the hash to prevent mismatches. 32 | cache.saveAs(type, value, onSave, hash); 33 | 34 | function onSave(err) { 35 | if (err) return callback(err); 36 | // Finally return the value to caller. 37 | callback(null, value, hash); 38 | } 39 | } 40 | } 41 | 42 | function saveAsCached(type, value, callback) { 43 | saveAs.call(repo, type, value, onSave); 44 | 45 | function onSave(err, hash) { 46 | if (err) return callback(err); 47 | // Store in disk, forcing hash to match. 48 | cache.saveAs(type, value, callback, hash); 49 | } 50 | } 51 | 52 | function createTreeCached(entries, callback) { 53 | createTree.call(repo, entries, onTree); 54 | 55 | function onTree(err, hash, tree) { 56 | if (err) return callback(err); 57 | cache.saveAs("tree", tree, callback, hash); 58 | } 59 | } 60 | 61 | } 62 | -------------------------------------------------------------------------------- /mixins/create-tree.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | var modes = require('../lib/modes.js'); 4 | 5 | module.exports = function (repo) { 6 | repo.createTree = createTree; 7 | 8 | function createTree(entries, callback) { 9 | if (!callback) return createTree.bind(null, entries); 10 | callback = singleCall(callback); 11 | if (!Array.isArray(entries)) { 12 | entries = Object.keys(entries).map(function (path) { 13 | var entry = entries[path]; 14 | entry.path = path; 15 | return entry; 16 | }); 17 | } 18 | 19 | // Tree paths that we need loaded 20 | var toLoad = {}; 21 | function markTree(path) { 22 | while(true) { 23 | if (toLoad[path]) return; 24 | toLoad[path] = true; 25 | trees[path] = { 26 | add: [], 27 | del: [], 28 | tree: {} 29 | }; 30 | if (!path) break; 31 | path = path.substring(0, path.lastIndexOf("/")); 32 | } 33 | } 34 | 35 | // Commands to run organized by tree path 36 | var trees = {}; 37 | 38 | // Counter for parallel I/O operations 39 | var left = 1; // One extra counter to protect again zalgo cache callbacks. 40 | 41 | // First pass, stubs out the trees structure, sorts adds from deletes, 42 | // and saves any inline content blobs. 43 | entries.forEach(function (entry) { 44 | var index = entry.path.lastIndexOf("/"); 45 | var parentPath = entry.path.substr(0, index); 46 | var name = entry.path.substr(index + 1); 47 | markTree(parentPath); 48 | var tree = trees[parentPath]; 49 | var adds = tree.add; 50 | var dels = tree.del; 51 | 52 | if (!entry.mode) { 53 | dels.push(name); 54 | return; 55 | } 56 | var add = { 57 | name: name, 58 | mode: entry.mode, 59 | hash: entry.hash 60 | }; 61 | adds.push(add); 62 | if (entry.hash) return; 63 | left++; 64 | repo.saveAs("blob", entry.content, function (err, hash) { 65 | if (err) return callback(err); 66 | add.hash = hash; 67 | check(); 68 | }); 69 | }); 70 | 71 | // Preload the base trees 72 | if (entries.base) loadTree("", entries.base); 73 | 74 | // Check just in case there was no IO to perform 75 | check(); 76 | 77 | function loadTree(path, hash) { 78 | left++; 79 | delete toLoad[path]; 80 | repo.loadAs("tree", hash, function (err, tree) { 81 | if (err) return callback(err); 82 | trees[path].tree = tree; 83 | Object.keys(tree).forEach(function (name) { 84 | var childPath = path ? path + "/" + name : name; 85 | if (toLoad[childPath]) loadTree(childPath, tree[name].hash); 86 | }); 87 | check(); 88 | }); 89 | } 90 | 91 | function check() { 92 | if (--left) return; 93 | findLeaves().forEach(processLeaf); 94 | } 95 | 96 | function processLeaf(path) { 97 | var entry = trees[path]; 98 | delete trees[path]; 99 | var tree = entry.tree; 100 | entry.del.forEach(function (name) { 101 | delete tree[name]; 102 | }); 103 | entry.add.forEach(function (item) { 104 | tree[item.name] = { 105 | mode: item.mode, 106 | hash: item.hash 107 | }; 108 | }); 109 | left++; 110 | repo.saveAs("tree", tree, function (err, hash, tree) { 111 | if (err) return callback(err); 112 | if (!path) return callback(null, hash, tree); 113 | var index = path.lastIndexOf("/"); 114 | var parentPath = path.substring(0, index); 115 | var name = path.substring(index + 1); 116 | trees[parentPath].add.push({ 117 | name: name, 118 | mode: modes.tree, 119 | hash: hash 120 | }); 121 | if (--left) return; 122 | findLeaves().forEach(processLeaf); 123 | }); 124 | } 125 | 126 | function findLeaves() { 127 | var paths = Object.keys(trees); 128 | var parents = {}; 129 | paths.forEach(function (path) { 130 | if (!path) return; 131 | var parent = path.substring(0, path.lastIndexOf("/")); 132 | parents[parent] = true; 133 | }); 134 | return paths.filter(function (path) { 135 | return !parents[path]; 136 | }); 137 | } 138 | } 139 | }; 140 | 141 | function singleCall(callback) { 142 | var done = false; 143 | return function () { 144 | if (done) return console.warn("Discarding extra callback"); 145 | done = true; 146 | return callback.apply(this, arguments); 147 | }; 148 | } 149 | -------------------------------------------------------------------------------- /mixins/delay.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | module.exports = function (repo, ms) { 4 | var saveAs = repo.saveAs; 5 | var loadAs = repo.loadAs; 6 | var readRef = repo.readRef; 7 | var updateRef = repo.updateRef; 8 | var createTree = repo.createTree; 9 | 10 | repo.saveAs = saveAsDelayed; 11 | repo.loadAs = loadAsDelayed; 12 | repo.readRef = readRefDelayed; 13 | repo.updateRed = updateRefDelayed; 14 | if (createTree) repo.createTree = createTreeDelayed; 15 | 16 | function saveAsDelayed(type, value, callback) { 17 | if (!callback) return saveAsDelayed.bind(repo, type, value); 18 | setTimeout(function () { 19 | return saveAs.call(repo, type, value, callback); 20 | }, ms); 21 | } 22 | 23 | function loadAsDelayed(type, hash, callback) { 24 | if (!callback) return loadAsDelayed.bind(repo, type, hash); 25 | setTimeout(function () { 26 | return loadAs.call(repo, type, hash, callback); 27 | }, ms); 28 | } 29 | 30 | function readRefDelayed(ref, callback) { 31 | if (!callback) return readRefDelayed.bind(repo, ref); 32 | setTimeout(function () { 33 | return readRef.call(repo, ref, callback); 34 | }, ms); 35 | } 36 | 37 | function updateRefDelayed(ref, hash, callback) { 38 | if (!callback) return updateRefDelayed.bind(repo, ref, hash); 39 | setTimeout(function () { 40 | return updateRef.call(repo, ref, hash, callback); 41 | }, ms); 42 | } 43 | 44 | function createTreeDelayed(entries, callback) { 45 | if (!callback) return createTreeDelayed.bind(repo, entries); 46 | setTimeout(function () { 47 | return createTree.call(repo, entries, callback); 48 | }, ms); 49 | } 50 | 51 | }; 52 | -------------------------------------------------------------------------------- /mixins/fall-through.js: -------------------------------------------------------------------------------- 1 | var modes = require('../lib/modes'); 2 | 3 | module.exports = function (local, remote) { 4 | var loadAs = local.loadAs; 5 | local.loadAs = newLoadAs; 6 | function newLoadAs(type, hash, callback) { 7 | if (!callback) return newLoadAs.bind(local. type, hash); 8 | loadAs.call(local, type, hash, function (err, body) { 9 | if (err) return callback(err); 10 | if (body === undefined) return remote.loadAs(type, hash, callback); 11 | callback(null, body); 12 | }); 13 | } 14 | 15 | var readRef = local.readRef; 16 | local.readRef = newReadRef; 17 | function newReadRef(ref, callback) { 18 | if (!callback) return newReadRef.bind(local. ref); 19 | readRef.call(local, ref, function (err, body) { 20 | if (err) return callback(err); 21 | if (body === undefined) return remote.readRef(ref, callback); 22 | callback(null, body); 23 | }); 24 | } 25 | 26 | }; 27 | -------------------------------------------------------------------------------- /mixins/formats.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | var bodec = require('bodec'); 4 | var treeMap = require('../lib/object-codec').treeMap; 5 | 6 | module.exports = function (repo) { 7 | var loadAs = repo.loadAs; 8 | repo.loadAs = newLoadAs; 9 | var saveAs = repo.saveAs; 10 | repo.saveAs = newSaveAs; 11 | 12 | function newLoadAs(type, hash, callback) { 13 | if (!callback) return newLoadAs.bind(repo, type, hash); 14 | var realType = type === "text" ? "blob": 15 | type === "array" ? "tree" : type; 16 | return loadAs.call(repo, realType, hash, onLoad); 17 | 18 | function onLoad(err, body, hash) { 19 | if (body === undefined) return callback(err); 20 | if (type === "text") body = bodec.toUnicode(body); 21 | if (type === "array") body = toArray(body); 22 | return callback(err, body, hash); 23 | } 24 | } 25 | 26 | function newSaveAs(type, body, callback) { 27 | if (!callback) return newSaveAs.bind(repo, type, body); 28 | type = type === "text" ? "blob": 29 | type === "array" ? "tree" : type; 30 | if (type === "blob") { 31 | if (typeof body === "string") { 32 | body = bodec.fromUnicode(body); 33 | } 34 | } 35 | else if (type === "tree") { 36 | body = normalizeTree(body); 37 | } 38 | else if (type === "commit") { 39 | body = normalizeCommit(body); 40 | } 41 | else if (type === "tag") { 42 | body = normalizeTag(body); 43 | } 44 | return saveAs.call(repo, type, body, callback); 45 | } 46 | 47 | }; 48 | 49 | function toArray(tree) { 50 | return Object.keys(tree).map(treeMap, tree); 51 | } 52 | 53 | function normalizeTree(body) { 54 | var type = body && typeof body; 55 | if (type !== "object") { 56 | throw new TypeError("Tree body must be array or object"); 57 | } 58 | var tree = {}, i, l, entry; 59 | // If array form is passed in, convert to object form. 60 | if (Array.isArray(body)) { 61 | for (i = 0, l = body.length; i < l; i++) { 62 | entry = body[i]; 63 | tree[entry.name] = { 64 | mode: entry.mode, 65 | hash: entry.hash 66 | }; 67 | } 68 | } 69 | else { 70 | var names = Object.keys(body); 71 | for (i = 0, l = names.length; i < l; i++) { 72 | var name = names[i]; 73 | entry = body[name]; 74 | tree[name] = { 75 | mode: entry.mode, 76 | hash: entry.hash 77 | }; 78 | } 79 | } 80 | return tree; 81 | } 82 | 83 | function normalizeCommit(body) { 84 | if (!body || typeof body !== "object") { 85 | throw new TypeError("Commit body must be an object"); 86 | } 87 | if (!(body.tree && body.author && body.message)) { 88 | throw new TypeError("Tree, author, and message are required for commits"); 89 | } 90 | var parents = body.parents || (body.parent ? [ body.parent ] : []); 91 | if (!Array.isArray(parents)) { 92 | throw new TypeError("Parents must be an array"); 93 | } 94 | var author = normalizePerson(body.author); 95 | var committer = body.committer ? normalizePerson(body.committer) : author; 96 | return { 97 | tree: body.tree, 98 | parents: parents, 99 | author: author, 100 | committer: committer, 101 | message: body.message 102 | }; 103 | } 104 | 105 | function normalizeTag(body) { 106 | if (!body || typeof body !== "object") { 107 | throw new TypeError("Tag body must be an object"); 108 | } 109 | if (!(body.object && body.type && body.tag && body.tagger && body.message)) { 110 | throw new TypeError("Object, type, tag, tagger, and message required"); 111 | } 112 | return { 113 | object: body.object, 114 | type: body.type, 115 | tag: body.tag, 116 | tagger: normalizePerson(body.tagger), 117 | message: body.message 118 | }; 119 | } 120 | 121 | function normalizePerson(person) { 122 | if (!person || typeof person !== "object") { 123 | throw new TypeError("Person must be an object"); 124 | } 125 | if (typeof person.name !== "string" || typeof person.email !== "string") { 126 | throw new TypeError("Name and email are required for person fields"); 127 | } 128 | return { 129 | name: person.name, 130 | email: person.email, 131 | date: person.date || new Date() 132 | }; 133 | } 134 | -------------------------------------------------------------------------------- /mixins/fs-db.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | var bodec = require('bodec'); 3 | var inflate = require('../lib/inflate'); 4 | var deflate = require('../lib/deflate'); 5 | var codec = require('../lib/object-codec'); 6 | var parsePackEntry = require('../lib/pack-codec').parseEntry; 7 | var applyDelta = require('../lib/apply-delta'); 8 | var sha1 = require('git-sha1'); 9 | var pathJoin = require('path').join; 10 | 11 | // The fs object has the following interface: 12 | // - readFile(path) => binary 13 | // Must also call callback() with no arguments if the file does not exist. 14 | // - readChunk(path, start, end) => binary 15 | // Must also call callback() with no arguments if the file does not exist. 16 | // - writeFile(path, binary) => 17 | // Must also make every directory up to parent of path. 18 | // - readDir(path) => array 19 | // Must also call callback() with no arguments if the file does not exist. 20 | // The repo is expected to have a rootPath property that points to 21 | // the .git folder within the filesystem. 22 | module.exports = function (repo, fs) { 23 | 24 | var cachedIndexes = {}; 25 | 26 | repo.loadAs = loadAs; 27 | repo.saveAs = saveAs; 28 | repo.loadRaw = loadRaw; 29 | repo.saveRaw = saveRaw; 30 | repo.readRef = readRef; 31 | repo.updateRef = updateRef; 32 | repo.hasHash = hasHash; 33 | repo.init = init; 34 | repo.setShallow = setShallow; 35 | 36 | function init(ref, callback) { 37 | if (!callback) return init.bind(null, ref); 38 | ref = ref || "refs/heads/master"; 39 | var path = pathJoin(repo.rootPath, "HEAD"); 40 | fs.writeFile(path, "ref: " + ref, callback); 41 | } 42 | 43 | function setShallow(ref, callback) { 44 | if (!callback) return setShallow.bind(null, ref); 45 | var path = pathJoin(repo.rootPath, "shallow"); 46 | fs.writeFile(path, ref, callback); 47 | } 48 | 49 | function updateRef(ref, hash, callback) { 50 | if (!callback) return updateRef.bind(repo, ref, hash); 51 | var path = pathJoin(repo.rootPath, ref); 52 | var lock = path + ".lock"; 53 | fs.writeFile(lock, bodec.fromRaw(hash + "\n"), function(err) { 54 | if(err) return callback(err); 55 | fs.rename(lock, path, callback); 56 | }); 57 | } 58 | 59 | function readRef(ref, callback) { 60 | if (!callback) return readRef.bind(repo, ref); 61 | var path = pathJoin(repo.rootPath, ref); 62 | fs.readFile(path, function (err, binary) { 63 | if (err) return callback(err); 64 | if (binary === undefined) { 65 | return readPackedRef(ref, callback); 66 | } 67 | var hash; 68 | try { hash = bodec.toRaw(binary).trim(); } 69 | catch (err) { return callback(err); } 70 | callback(null, hash); 71 | }); 72 | } 73 | 74 | function readPackedRef(ref, callback) { 75 | var path = pathJoin(repo.rootPath, "packed-refs"); 76 | fs.readFile(path, function (err, binary) { 77 | if (binary === undefined) return callback(err); 78 | var hash; 79 | try { 80 | var text = bodec.toRaw(binary); 81 | var index = text.indexOf(ref); 82 | if (index >= 0) { 83 | hash = text.substring(index - 41, index - 1); 84 | } 85 | } 86 | catch (err) { 87 | return callback(err); 88 | } 89 | callback(null, hash); 90 | }); 91 | } 92 | 93 | function saveAs(type, body, callback) { 94 | if (!callback) return saveAs.bind(repo, type, body); 95 | var raw, hash; 96 | try { 97 | raw = codec.frame({ 98 | type: type, 99 | body: codec.encoders[type](body) 100 | }); 101 | hash = sha1(raw); 102 | } 103 | catch (err) { return callback(err); } 104 | saveRaw(hash, raw, function (err) { 105 | if (err) return callback(err); 106 | callback(null, hash); 107 | }); 108 | } 109 | 110 | function saveRaw(hash, raw, callback) { 111 | if (!callback) return saveRaw.bind(repo, hash, raw); 112 | var buffer, path; 113 | try { 114 | if (sha1(raw) !== hash) { 115 | throw new Error("Save data does not match hash"); 116 | } 117 | buffer = deflate(raw); 118 | path = hashToPath(hash); 119 | } 120 | catch (err) { return callback(err); } 121 | // Try to read the object first. 122 | loadRaw(hash, function (err, data) { 123 | // If it already exists, we're done 124 | if (data) return callback(); 125 | // Otherwise write a new file 126 | var tmp = path.replace(/[0-9a-f]+$/, 'tmp_obj_' + Math.random().toString(36).substr(2)) 127 | fs.writeFile(tmp, buffer, function(err) { 128 | if(err) return callback(err); 129 | fs.rename(tmp, path, callback); 130 | }); 131 | }); 132 | } 133 | 134 | function loadAs(type, hash, callback) { 135 | if (!callback) return loadAs.bind(repo, type, hash); 136 | loadRaw(hash, function (err, raw) { 137 | if (raw === undefined) return callback(err); 138 | var body; 139 | try { 140 | raw = codec.deframe(raw); 141 | if (raw.type !== type) throw new TypeError("Type mismatch"); 142 | body = codec.decoders[raw.type](raw.body); 143 | } 144 | catch (err) { return callback(err); } 145 | callback(null, body); 146 | }); 147 | } 148 | 149 | function hasHash(hash, callback) { 150 | if (!callback) return hasHash.bind(repo, hash); 151 | loadRaw(hash, function (err, body) { 152 | if (err) return callback(err); 153 | return callback(null, !!body); 154 | }); 155 | } 156 | 157 | function loadRaw(hash, callback) { 158 | if (!callback) return loadRaw.bind(repo, hash); 159 | var path = hashToPath(hash); 160 | fs.readFile(path, function (err, buffer) { 161 | if (err) return callback(err); 162 | if (buffer) { 163 | var raw; 164 | try { raw = inflate(buffer); } 165 | catch (err) { return callback(err); } 166 | return callback(null, raw); 167 | } 168 | return loadRawPacked(hash, callback); 169 | }); 170 | } 171 | 172 | function loadRawPacked(hash, callback) { 173 | var packDir = pathJoin(repo.rootPath, "objects/pack"); 174 | var packHashes = []; 175 | fs.readDir(packDir, function (err, entries) { 176 | if (!entries) return callback(err); 177 | entries.forEach(function (name) { 178 | var match = name.match(/pack-([0-9a-f]{40}).idx/); 179 | if (match) packHashes.push(match[1]); 180 | }); 181 | start(); 182 | }); 183 | 184 | function start() { 185 | var packHash = packHashes.pop(); 186 | var offsets; 187 | if (!packHash) return callback(); 188 | if (!cachedIndexes[packHash]) loadIndex(packHash); 189 | else onIndex(); 190 | 191 | function loadIndex() { 192 | var indexFile = pathJoin(packDir, "pack-" + packHash + ".idx" ); 193 | fs.readFile(indexFile, function (err, buffer) { 194 | if (!buffer) return callback(err); 195 | try { 196 | cachedIndexes[packHash] = parseIndex(buffer); 197 | } 198 | catch (err) { return callback(err); } 199 | onIndex(); 200 | }); 201 | } 202 | 203 | function onIndex() { 204 | var cached = cachedIndexes[packHash]; 205 | var packFile = pathJoin(packDir, "pack-" + packHash + ".pack" ); 206 | var index = cached.byHash[hash]; 207 | if (!index) return start(); 208 | offsets = cached.offsets; 209 | loadChunk(packFile, index.offset, callback); 210 | } 211 | 212 | function loadChunk(packFile, start, callback) { 213 | var index = offsets.indexOf(start); 214 | if (index < 0) { 215 | var error = new Error("Can't find chunk starting at " + start); 216 | return callback(error); 217 | } 218 | var end = index + 1 < offsets.length ? offsets[index + 1] : -20; 219 | fs.readChunk(packFile, start, end, function (err, chunk) { 220 | if (!chunk) return callback(err); 221 | var raw; 222 | try { 223 | var entry = parsePackEntry(chunk); 224 | if (entry.type === "ref-delta") { 225 | return loadRaw.call(repo, entry.ref, onBase); 226 | } 227 | else if (entry.type === "ofs-delta") { 228 | return loadChunk(packFile, start - entry.ref, onBase); 229 | } 230 | raw = codec.frame(entry); 231 | } 232 | catch (err) { return callback(err); } 233 | callback(null, raw); 234 | 235 | function onBase(err, base) { 236 | if (!base) return callback(err); 237 | var object = codec.deframe(base); 238 | var buffer; 239 | try { 240 | object.body = applyDelta(entry.body, object.body); 241 | buffer = codec.frame(object); 242 | } 243 | catch (err) { return callback(err); } 244 | callback(null, buffer); 245 | } 246 | }); 247 | } 248 | 249 | } 250 | } 251 | 252 | function hashToPath(hash) { 253 | return pathJoin(repo.rootPath, "objects", hash.substring(0, 2), hash.substring(2)); 254 | } 255 | 256 | }; 257 | 258 | function parseIndex(buffer) { 259 | if (readUint32(buffer, 0) !== 0xff744f63 || 260 | readUint32(buffer, 4) !== 0x00000002) { 261 | throw new Error("Only v2 pack indexes supported"); 262 | } 263 | 264 | // Get the number of hashes in index 265 | // This is the value of the last fan-out entry 266 | var hashOffset = 8 + 255 * 4; 267 | var length = readUint32(buffer, hashOffset); 268 | hashOffset += 4; 269 | var crcOffset = hashOffset + 20 * length; 270 | var lengthOffset = crcOffset + 4 * length; 271 | var largeOffset = lengthOffset + 4 * length; 272 | var checkOffset = largeOffset; 273 | var indexes = new Array(length); 274 | for (var i = 0; i < length; i++) { 275 | var start = hashOffset + i * 20; 276 | var hash = bodec.toHex(bodec.slice(buffer, start, start + 20)); 277 | var crc = readUint32(buffer, crcOffset + i * 4); 278 | var offset = readUint32(buffer, lengthOffset + i * 4); 279 | if (offset & 0x80000000) { 280 | offset = largeOffset + (offset &0x7fffffff) * 8; 281 | checkOffset = Math.max(checkOffset, offset + 8); 282 | offset = readUint64(buffer, offset); 283 | } 284 | indexes[i] = { 285 | hash: hash, 286 | offset: offset, 287 | crc: crc 288 | }; 289 | } 290 | var packChecksum = bodec.toHex(bodec.slice(buffer, checkOffset, checkOffset + 20)); 291 | var checksum = bodec.toHex(bodec.slice(buffer, checkOffset + 20, checkOffset + 40)); 292 | if (sha1(bodec.slice(buffer, 0, checkOffset + 20)) !== checksum) { 293 | throw new Error("Checksum mistmatch"); 294 | } 295 | 296 | var byHash = {}; 297 | indexes.sort(function (a, b) { 298 | return a.offset - b.offset; 299 | }); 300 | indexes.forEach(function (data) { 301 | byHash[data.hash] = { 302 | offset: data.offset, 303 | crc: data.crc, 304 | }; 305 | }); 306 | var offsets = indexes.map(function (entry) { 307 | return entry.offset; 308 | }).sort(function (a, b) { 309 | return a - b; 310 | }); 311 | 312 | return { 313 | offsets: offsets, 314 | byHash: byHash, 315 | checksum: packChecksum 316 | }; 317 | } 318 | 319 | function readUint32(buffer, offset) { 320 | return (buffer[offset] << 24 | 321 | buffer[offset + 1] << 16 | 322 | buffer[offset + 2] << 8 | 323 | buffer[offset + 3] << 0) >>> 0; 324 | } 325 | 326 | // Yes this will lose precision over 2^53, but that can't be helped when 327 | // returning a single integer. 328 | // We simply won't support packfiles over 8 petabytes. I'm ok with that. 329 | function readUint64(buffer, offset) { 330 | var hi = (buffer[offset] << 24 | 331 | buffer[offset + 1] << 16 | 332 | buffer[offset + 2] << 8 | 333 | buffer[offset + 3] << 0) >>> 0; 334 | var lo = (buffer[offset + 4] << 24 | 335 | buffer[offset + 5] << 16 | 336 | buffer[offset + 6] << 8 | 337 | buffer[offset + 7] << 0) >>> 0; 338 | return hi * 0x100000000 + lo; 339 | } 340 | -------------------------------------------------------------------------------- /mixins/indexed-db.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | /*global indexedDB*/ 3 | 4 | var codec = require('../lib/object-codec.js'); 5 | var sha1 = require('git-sha1'); 6 | var modes = require('../lib/modes.js'); 7 | var db; 8 | 9 | mixin.init = init; 10 | 11 | mixin.loadAs = loadAs; 12 | mixin.saveAs = saveAs; 13 | module.exports = mixin; 14 | 15 | function init(callback) { 16 | 17 | db = null; 18 | var request = indexedDB.open("tedit", 1); 19 | 20 | // We can only create Object stores in a versionchange transaction. 21 | request.onupgradeneeded = function(evt) { 22 | var db = evt.target.result; 23 | 24 | if (evt.dataLoss && evt.dataLoss !== "none") { 25 | return callback(new Error(evt.dataLoss + ": " + evt.dataLossMessage)); 26 | } 27 | 28 | // A versionchange transaction is started automatically. 29 | evt.target.transaction.onerror = onError; 30 | 31 | if(db.objectStoreNames.contains("objects")) { 32 | db.deleteObjectStore("objects"); 33 | } 34 | if(db.objectStoreNames.contains("refs")) { 35 | db.deleteObjectStore("refs"); 36 | } 37 | 38 | db.createObjectStore("objects", {keyPath: "hash"}); 39 | db.createObjectStore("refs", {keyPath: "path"}); 40 | }; 41 | 42 | request.onsuccess = function (evt) { 43 | db = evt.target.result; 44 | callback(); 45 | }; 46 | request.onerror = onError; 47 | } 48 | 49 | 50 | function mixin(repo, prefix) { 51 | if (!prefix) throw new Error("Prefix required"); 52 | repo.refPrefix = prefix; 53 | repo.saveAs = saveAs; 54 | repo.loadAs = loadAs; 55 | repo.readRef = readRef; 56 | repo.updateRef = updateRef; 57 | repo.hasHash = hasHash; 58 | } 59 | 60 | function onError(evt) { 61 | console.error("error", evt.target.error); 62 | } 63 | 64 | function saveAs(type, body, callback, forcedHash) { 65 | if (!callback) return saveAs.bind(this, type, body); 66 | var hash; 67 | try { 68 | var buffer = codec.frame({type:type,body:body}); 69 | hash = forcedHash || sha1(buffer); 70 | } 71 | catch (err) { return callback(err); } 72 | var trans = db.transaction(["objects"], "readwrite"); 73 | var store = trans.objectStore("objects"); 74 | var entry = { hash: hash, type: type, body: body }; 75 | var request = store.put(entry); 76 | request.onsuccess = function() { 77 | // console.warn("SAVE", type, hash); 78 | callback(null, hash, body); 79 | }; 80 | request.onerror = function(evt) { 81 | callback(new Error(evt.value)); 82 | }; 83 | } 84 | 85 | function loadAs(type, hash, callback) { 86 | if (!callback) return loadAs.bind(this, type, hash); 87 | loadRaw(hash, function (err, entry) { 88 | if (!entry) return callback(err); 89 | if (type !== entry.type) { 90 | return callback(new TypeError("Type mismatch")); 91 | } 92 | callback(null, entry.body, hash); 93 | }); 94 | } 95 | 96 | function loadRaw(hash, callback) { 97 | var trans = db.transaction(["objects"], "readwrite"); 98 | var store = trans.objectStore("objects"); 99 | var request = store.get(hash); 100 | request.onsuccess = function(evt) { 101 | var entry = evt.target.result; 102 | if (!entry) return callback(); 103 | return callback(null, entry); 104 | }; 105 | request.onerror = function(evt) { 106 | callback(new Error(evt.value)); 107 | }; 108 | } 109 | 110 | function hasHash(hash, callback) { 111 | if (!callback) return hasHash.bind(this, hash); 112 | loadRaw(hash, function (err, body) { 113 | if (err) return callback(err); 114 | return callback(null, !!body); 115 | }); 116 | } 117 | 118 | function readRef(ref, callback) { 119 | if (!callback) return readRef.bind(this, ref); 120 | var key = this.refPrefix + "/" + ref; 121 | var trans = db.transaction(["refs"], "readwrite"); 122 | var store = trans.objectStore("refs"); 123 | var request = store.get(key); 124 | request.onsuccess = function(evt) { 125 | var entry = evt.target.result; 126 | if (!entry) return callback(); 127 | callback(null, entry.hash); 128 | }; 129 | request.onerror = function(evt) { 130 | callback(new Error(evt.value)); 131 | }; 132 | } 133 | 134 | function updateRef(ref, hash, callback) { 135 | if (!callback) return updateRef.bind(this, ref, hash); 136 | var key = this.refPrefix + "/" + ref; 137 | var trans = db.transaction(["refs"], "readwrite"); 138 | var store = trans.objectStore("refs"); 139 | var entry = { path: key, hash: hash }; 140 | var request = store.put(entry); 141 | request.onsuccess = function() { 142 | callback(); 143 | }; 144 | request.onerror = function(evt) { 145 | callback(new Error(evt.value)); 146 | }; 147 | } 148 | -------------------------------------------------------------------------------- /mixins/mem-cache.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | var encoders = require('../lib/object-codec').encoders; 4 | var decoders = require('../lib/object-codec').decoders; 5 | var Binary = require('bodec').Binary; 6 | 7 | var cache = memCache.cache = {}; 8 | module.exports = memCache; 9 | 10 | function memCache(repo) { 11 | var loadAs = repo.loadAs; 12 | repo.loadAs = loadAsCached; 13 | function loadAsCached(type, hash, callback) { 14 | if (!callback) return loadAsCached.bind(this, type, hash); 15 | if (hash in cache) return callback(null, dupe(type, cache[hash]), hash); 16 | loadAs.call(repo, type, hash, function (err, value) { 17 | if (value === undefined) return callback(err); 18 | if (type !== "blob" || value.length < 100) { 19 | cache[hash] = dupe(type, value); 20 | } 21 | return callback.apply(this, arguments); 22 | }); 23 | } 24 | 25 | var saveAs = repo.saveAs; 26 | repo.saveAs = saveAsCached; 27 | function saveAsCached(type, value, callback) { 28 | if (!callback) return saveAsCached.bind(this, type, value); 29 | value = dupe(type, value); 30 | saveAs.call(repo, type, value, function (err, hash) { 31 | if (err) return callback(err); 32 | if (type !== "blob" || value.length < 100) { 33 | cache[hash] = value; 34 | } 35 | return callback(null, hash, value); 36 | }); 37 | } 38 | } 39 | function dupe(type, value) { 40 | if (type === "blob") { 41 | if (type.length >= 100) return value; 42 | return new Binary(value); 43 | } 44 | return decoders[type](encoders[type](value)); 45 | } 46 | 47 | function deepFreeze(obj) { 48 | Object.freeze(obj); 49 | Object.keys(obj).forEach(function (key) { 50 | var value = obj[key]; 51 | if (typeof value === "object") deepFreeze(value); 52 | }); 53 | } 54 | -------------------------------------------------------------------------------- /mixins/mem-db.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | var defer = require('../lib/defer.js'); 4 | var codec = require('../lib/object-codec.js'); 5 | var sha1 = require('git-sha1'); 6 | 7 | module.exports = mixin; 8 | var isHash = /^[0-9a-f]{40}$/; 9 | 10 | function mixin(repo) { 11 | var objects = {}; 12 | var refs = {}; 13 | 14 | repo.saveAs = saveAs; 15 | repo.loadAs = loadAs; 16 | repo.saveRaw = saveRaw; 17 | repo.loadRaw = loadRaw; 18 | repo.hasHash = hasHash; 19 | repo.readRef = readRef; 20 | repo.updateRef = updateRef; 21 | repo.listRefs = listRefs; 22 | 23 | function readRef(ref, callback) { 24 | return makeAsync(function () { 25 | return refs[ref]; 26 | }, callback); 27 | } 28 | 29 | function listRefs(prefix, callback) { 30 | return makeAsync(function () { 31 | var regex = prefix && new RegExp("^" + prefix + "[/$]"); 32 | var out = {}; 33 | Object.keys(refs).forEach(function (name) { 34 | if (regex && !regex.test(name)) return; 35 | out[name] = refs[name]; 36 | }); 37 | return out; 38 | }, callback); 39 | } 40 | 41 | function updateRef(ref, hash, callback) { 42 | return makeAsync(function () { 43 | return (refs[ref] = hash); 44 | }, callback); 45 | } 46 | 47 | function hasHash(hash, callback) { 48 | return makeAsync(function () { 49 | if (!isHash.test(hash)) hash = refs[hash]; 50 | return hash in objects; 51 | }, callback); 52 | } 53 | 54 | function saveAs(type, body, callback) { 55 | return makeAsync(function () { 56 | var buffer = codec.frame({type:type,body:body}); 57 | var hash = sha1(buffer); 58 | objects[hash] = buffer; 59 | return hash; 60 | }, callback); 61 | } 62 | 63 | function saveRaw(hash, buffer, callback) { 64 | return makeAsync(function () { 65 | objects[hash] = buffer; 66 | }, callback); 67 | } 68 | 69 | function loadAs(type, hash, callback) { 70 | return makeAsync(function () { 71 | if (!isHash.test(hash)) hash = refs[hash]; 72 | var buffer = objects[hash]; 73 | if (!buffer) return []; 74 | var obj = codec.deframe(buffer, true); 75 | if (obj.type !== type) throw new TypeError("Type mismatch"); 76 | return obj.body; 77 | }, callback); 78 | } 79 | 80 | function loadRaw(hash, callback) { 81 | return makeAsync(function () { 82 | return objects[hash]; 83 | }, callback); 84 | } 85 | } 86 | 87 | function makeAsync(fn, callback) { 88 | if (!callback) return makeAsync.bind(null, fn); 89 | defer(function () { 90 | var out; 91 | try { out = fn(); } 92 | catch (err) { return callback(err); } 93 | callback(null, out); 94 | }); 95 | } 96 | -------------------------------------------------------------------------------- /mixins/pack-ops.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | var sha1 = require('git-sha1'); 4 | var applyDelta = require('../lib/apply-delta.js'); 5 | var codec = require('../lib/object-codec.js'); 6 | var decodePack = require('../lib/pack-codec.js').decodePack; 7 | var encodePack = require('../lib/pack-codec.js').encodePack; 8 | var makeChannel = require('culvert'); 9 | 10 | module.exports = function (repo) { 11 | // packChannel is a writable culvert channel {put,drain} containing raw packfile binary data 12 | // opts can contain "onProgress" or "onError" hook functions. 13 | // callback will be called with a list of all unpacked hashes on success. 14 | repo.unpack = unpack; // (packChannel, opts) => hashes 15 | 16 | // hashes is an array of hashes to pack 17 | // packChannel will be a readable culvert channel {take} containing raw packfile binary data 18 | repo.pack = pack; // (hashes, opts) => packChannel 19 | }; 20 | 21 | function unpack(packChannel, opts, callback) { 22 | /*jshint validthis:true*/ 23 | if (!callback) return unpack.bind(this, packChannel, opts); 24 | 25 | packChannel = applyParser(packChannel, decodePack, callback); 26 | 27 | var repo = this; 28 | 29 | var version, num, numDeltas = 0, count = 0, countDeltas = 0; 30 | var done, startDeltaProgress = false; 31 | 32 | // hashes keyed by offset for ofs-delta resolving 33 | var hashes = {}; 34 | // key is hash, boolean is cached "has" value of true or false 35 | var has = {}; 36 | // key is hash we're waiting for, value is array of items that are waiting. 37 | var pending = {}; 38 | 39 | return packChannel.take(onStats); 40 | 41 | function onDone(err) { 42 | if (done) return; 43 | done = true; 44 | if (err) return callback(err); 45 | return callback(null, values(hashes)); 46 | } 47 | 48 | function onStats(err, stats) { 49 | if (err) return onDone(err); 50 | version = stats.version; 51 | num = stats.num; 52 | packChannel.take(onRead); 53 | } 54 | 55 | function objectProgress(more) { 56 | if (!more) startDeltaProgress = true; 57 | var percent = Math.round(count / num * 100); 58 | return opts.onProgress("Receiving objects: " + percent + "% (" + (count++) + "/" + num + ") " + (more ? "\r" : "\n")); 59 | } 60 | 61 | function deltaProgress(more) { 62 | if (!startDeltaProgress) return; 63 | var percent = Math.round(countDeltas / numDeltas * 100); 64 | return opts.onProgress("Applying deltas: " + percent + "% (" + (countDeltas++) + "/" + numDeltas + ") " + (more ? "\r" : "\n")); 65 | } 66 | 67 | function onRead(err, item) { 68 | if (err) return onDone(err); 69 | if (opts.onProgress) objectProgress(item); 70 | if (item === undefined) return onDone(); 71 | if (item.size !== item.body.length) { 72 | return onDone(new Error("Body size mismatch")); 73 | } 74 | if (item.type === "ofs-delta") { 75 | numDeltas++; 76 | item.ref = hashes[item.offset - item.ref]; 77 | return resolveDelta(item); 78 | } 79 | if (item.type === "ref-delta") { 80 | numDeltas++; 81 | return checkDelta(item); 82 | } 83 | return saveValue(item); 84 | } 85 | 86 | function resolveDelta(item) { 87 | if (opts.onProgress) deltaProgress(); 88 | return repo.loadRaw(item.ref, function (err, buffer) { 89 | if (err) return onDone(err); 90 | if (!buffer) return onDone(new Error("Missing base image at " + item.ref)); 91 | var target = codec.deframe(buffer); 92 | item.type = target.type; 93 | item.body = applyDelta(item.body, target.body); 94 | return saveValue(item); 95 | }); 96 | } 97 | 98 | function checkDelta(item) { 99 | var hasTarget = has[item.ref]; 100 | if (hasTarget === true) return resolveDelta(item); 101 | if (hasTarget === false) return enqueueDelta(item); 102 | return repo.hasHash(item.ref, function (err, value) { 103 | if (err) return onDone(err); 104 | has[item.ref] = value; 105 | if (value) return resolveDelta(item); 106 | return enqueueDelta(item); 107 | }); 108 | } 109 | 110 | function saveValue(item) { 111 | var buffer = codec.frame(item); 112 | var hash = sha1(buffer); 113 | hashes[item.offset] = hash; 114 | has[hash] = true; 115 | if (hash in pending) { 116 | // I have yet to come across a pack stream that actually needs this. 117 | // So I will only implement it when I have concrete data to test against. 118 | console.error({ 119 | list: pending[hash], 120 | item: item 121 | }); 122 | throw "TODO: pending value was found, resolve it"; 123 | } 124 | return repo.saveRaw(hash, buffer, onSave); 125 | } 126 | 127 | function onSave(err) { 128 | if (err) return callback(err); 129 | packChannel.take(onRead); 130 | } 131 | 132 | function enqueueDelta(item) { 133 | var list = pending[item.ref]; 134 | if (!list) pending[item.ref] = [item]; 135 | else list.push(item); 136 | packChannel.take(onRead); 137 | } 138 | 139 | } 140 | 141 | // TODO: Implement delta refs to reduce stream size 142 | function pack(hashes, opts, callback) { 143 | /*jshint validthis:true*/ 144 | if (!callback) return pack.bind(this, hashes, opts); 145 | var repo = this; 146 | var i = 0, first = true, done = false; 147 | return callback(null, applyParser({ take: take }, encodePack)); 148 | 149 | function take(callback) { 150 | if (done) return callback(); 151 | if (first) return readFirst(callback); 152 | var hash = hashes[i++]; 153 | if (hash === undefined) { 154 | return callback(); 155 | } 156 | repo.loadRaw(hash, function (err, buffer) { 157 | if (err) return callback(err); 158 | if (!buffer) return callback(new Error("Missing hash: " + hash)); 159 | // Reframe with pack format header 160 | callback(null, codec.deframe(buffer)); 161 | }); 162 | } 163 | 164 | function readFirst(callback) { 165 | first = false; 166 | callback(null, {num: hashes.length}); 167 | } 168 | } 169 | 170 | function values(object) { 171 | var keys = Object.keys(object); 172 | var length = keys.length; 173 | var out = new Array(length); 174 | for (var i = 0; i < length; i++) { 175 | out[i] = object[keys[i]]; 176 | } 177 | return out; 178 | } 179 | 180 | 181 | function applyParser(stream, parser, onError) { 182 | var extra = makeChannel(); 183 | extra.put = parser(extra.put); 184 | stream.take(onData); 185 | 186 | function onData(err, item) { 187 | if (err) return onError(err); 188 | var more; 189 | try { more = extra.put(item); } 190 | catch (err) { return onError(err); } 191 | if (more) stream.take(onData); 192 | else extra.drain(onDrain); 193 | } 194 | 195 | function onDrain(err) { 196 | if (err) return onError(err); 197 | stream.take(onData); 198 | } 199 | 200 | return { take: extra.take }; 201 | } 202 | -------------------------------------------------------------------------------- /mixins/path-to-entry.js: -------------------------------------------------------------------------------- 1 | var cache = require('./mem-cache').cache; 2 | var modes = require('../lib/modes'); 3 | 4 | module.exports = function (repo) { 5 | repo.pathToEntry = pathToEntry; 6 | }; 7 | 8 | function pathToEntry(rootTree, path, callback) { 9 | if (!callback) return pathToEntry.bind(this, rootTree, path); 10 | var repo = this; 11 | var mode = modes.tree; 12 | var hash = rootTree; 13 | var parts = path.split("/").filter(Boolean); 14 | var index = 0; 15 | var cached; 16 | loop(); 17 | function loop() { 18 | while (index < parts.length) { 19 | if (mode === modes.tree) { 20 | cached = cache[hash]; 21 | if (!cached) return repo.loadAs("tree", hash, onLoad); 22 | var entry = cached[parts[index]]; 23 | if (!entry) return callback(); 24 | mode = entry.mode; 25 | hash = entry.hash; 26 | index++; 27 | continue; 28 | } 29 | if (modes.isFile(mode)) return callback(); 30 | return callback(null, { 31 | last: { 32 | mode: mode, 33 | hash: hash, 34 | path: parts.slice(0, index).join("/"), 35 | rest: parts.slice(index).join("/"), 36 | } 37 | }); 38 | } 39 | callback(null, { 40 | mode: mode, 41 | hash: hash 42 | }); 43 | } 44 | 45 | function onLoad(err, value) { 46 | if (!value) return callback(err || new Error("Missing object: " + hash)); 47 | cache[hash] = value; 48 | loop(); 49 | } 50 | 51 | } 52 | -------------------------------------------------------------------------------- /mixins/read-combiner.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | // This replaces loadAs with a version that batches concurrent requests for 4 | // the same hash. 5 | module.exports = function (repo) { 6 | var pendingReqs = {}; 7 | 8 | var loadAs = repo.loadAs; 9 | repo.loadAs = newLoadAs; 10 | 11 | function newLoadAs(type, hash, callback) { 12 | if (!callback) return newLoadAs.bind(null, type, hash); 13 | var list = pendingReqs[hash]; 14 | if (list) { 15 | if (list.type !== type) callback(new Error("Type mismatch")); 16 | else list.push(callback); 17 | return; 18 | } 19 | list = pendingReqs[hash] = [callback]; 20 | list.type = type; 21 | loadAs.call(repo, type, hash, function () { 22 | delete pendingReqs[hash]; 23 | for (var i = 0, l = list.length; i < l; i++) { 24 | list[i].apply(this, arguments); 25 | } 26 | }); 27 | } 28 | }; 29 | -------------------------------------------------------------------------------- /mixins/sync.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | var modes = require('../lib/modes'); 4 | 5 | module.exports = function (local, remote) { 6 | local.fetch = fetch; 7 | local.send = send; 8 | local.readRemoteRef = remote.readRef.bind(remote); 9 | local.updateRemoteRef = remote.updateRef.bind(remote); 10 | 11 | function fetch(ref, depth, callback) { 12 | if (!callback) return fetch.bind(local, ref, depth); 13 | sync(local, remote, ref, depth, callback); 14 | } 15 | 16 | function send(ref, callback) { 17 | if (!callback) return send.bind(local, ref); 18 | sync(remote, local, ref, Infinity, callback); 19 | } 20 | }; 21 | 22 | // Download remote ref with depth 23 | // Make sure to use Infinity for depth on github mounts or anything that 24 | // doesn't allow shallow clones. 25 | function sync(local, remote, ref, depth, callback) { 26 | if (typeof ref !== "string") throw new TypeError("ref must be string"); 27 | if (typeof depth !== "number") throw new TypeError("depth must be number"); 28 | 29 | var hasCache = {}; 30 | 31 | remote.readRef(ref, function (err, hash) { 32 | if (!hash) return callback(err); 33 | importCommit(hash, depth, function (err) { 34 | if (err) return callback(err); 35 | callback(null, hash); 36 | }); 37 | }); 38 | 39 | // Caching has check. 40 | function check(type, hash, callback) { 41 | if (typeof type !== "string") throw new TypeError("type must be string"); 42 | if (typeof hash !== "string") throw new TypeError("hash must be string"); 43 | if (hasCache[hash]) return callback(null, true); 44 | local.hasHash(hash, function (err, has) { 45 | if (err) return callback(err); 46 | hasCache[hash] = has; 47 | callback(null, has); 48 | }); 49 | } 50 | 51 | function importCommit(hash, depth, callback) { 52 | check("commit", hash, onCheck); 53 | 54 | function onCheck(err, has) { 55 | if (err || has) return callback(err); 56 | remote.loadAs("commit", hash, onLoad); 57 | } 58 | 59 | function onLoad(err, commit) { 60 | if (!commit) return callback(err || new Error("Missing commit " + hash)); 61 | var i = 0; 62 | importTree(commit.tree, onImport); 63 | 64 | function onImport(err) { 65 | if (err) return callback(err); 66 | if (i >= commit.parents.length || depth <= 1) { 67 | return local.saveAs("commit", commit, onSave); 68 | } 69 | importCommit(commit.parents[i++], depth - 1, onImport); 70 | } 71 | } 72 | 73 | function onSave(err, newHash) { 74 | if (err) return callback(err); 75 | if (newHash !== hash) { 76 | return callback(new Error("Commit hash mismatch " + hash + " != " + newHash)); 77 | } 78 | hasCache[hash] = true; 79 | callback(); 80 | } 81 | } 82 | 83 | function importTree(hash, callback) { 84 | check("tree", hash, onCheck); 85 | 86 | function onCheck(err, has) { 87 | if (err || has) return callback(err); 88 | remote.loadAs("tree", hash, onLoad); 89 | } 90 | 91 | function onLoad(err, tree) { 92 | if (!tree) return callback(err || new Error("Missing tree " + hash)); 93 | var i = 0; 94 | var names = Object.keys(tree); 95 | onImport(); 96 | 97 | function onImport(err) { 98 | if (err) return callback(err); 99 | if (i >= names.length) { 100 | return local.saveAs("tree", tree, onSave); 101 | } 102 | var name = names[i++]; 103 | var entry = tree[name]; 104 | if (modes.isBlob(entry.mode)) { 105 | return importBlob(entry.hash, onImport); 106 | } 107 | if (entry.mode === modes.tree) { 108 | return importTree(entry.hash, onImport); 109 | } 110 | // Skip others. 111 | onImport(); 112 | } 113 | } 114 | 115 | function onSave(err, newHash) { 116 | if (err) return callback(err); 117 | if (newHash !== hash) { 118 | return callback(new Error("Tree hash mismatch " + hash + " != " + newHash)); 119 | } 120 | hasCache[hash] = true; 121 | callback(); 122 | } 123 | } 124 | 125 | function importBlob(hash, callback) { 126 | check("blob", hash, onCheck); 127 | 128 | function onCheck(err, has) { 129 | if (err || has) return callback(err); 130 | remote.loadAs("blob", hash, onLoad); 131 | } 132 | 133 | function onLoad(err, blob) { 134 | if (!blob) return callback(err || new Error("Missing blob " + hash)); 135 | local.saveAs("blob", blob, onSave); 136 | } 137 | 138 | function onSave(err, newHash) { 139 | if (err) return callback(err); 140 | if (newHash !== hash) { 141 | return callback(new Error("Blob hash mismatch " + hash + " != " + newHash)); 142 | } 143 | hasCache[hash] = true; 144 | callback(); 145 | } 146 | } 147 | } 148 | -------------------------------------------------------------------------------- /mixins/walkers.js: -------------------------------------------------------------------------------- 1 | var modes = require('../lib/modes.js'); 2 | 3 | module.exports = function (repo) { 4 | repo.logWalk = logWalk; // (ref) => stream 5 | repo.treeWalk = treeWalk; // (treeHash) => stream 6 | }; 7 | module.exports.walk = walk; 8 | 9 | function logWalk(ref, callback) { 10 | if (!callback) return logWalk.bind(this, ref); 11 | var last, seen = {}; 12 | var repo = this; 13 | if (!repo.readRef) return onShallow(); 14 | return repo.readRef("shallow", onShallow); 15 | 16 | function onShallow(err, shallow) { 17 | last = shallow; 18 | resolveRef(repo, ref, onHash); 19 | } 20 | 21 | function onHash(err, hash) { 22 | if (err) return callback(err); 23 | return repo.loadAs("commit", hash, function (err, commit) { 24 | if (commit === undefined) return callback(err); 25 | commit.hash = hash; 26 | seen[hash] = true; 27 | return callback(null, walk(commit, scan, loadKey, compare)); 28 | }); 29 | } 30 | 31 | function scan(commit) { 32 | if (last === commit) return []; 33 | return commit.parents.filter(function (hash) { 34 | return !seen[hash]; 35 | }); 36 | } 37 | 38 | function loadKey(hash, callback) { 39 | return repo.loadAs("commit", hash, function (err, commit) { 40 | if (!commit) return callback(err || new Error("Missing commit " + hash)); 41 | commit.hash = hash; 42 | if (hash === last) commit.last = true; 43 | return callback(null, commit); 44 | }); 45 | } 46 | 47 | } 48 | 49 | function compare(commit, other) { 50 | return commit.author.date < other.author.date; 51 | } 52 | 53 | function treeWalk(hash, callback) { 54 | if (!callback) return treeWalk.bind(this, hash); 55 | var repo = this; 56 | return repo.loadAs("tree", hash, onTree); 57 | 58 | function onTree(err, body) { 59 | if (!body) return callback(err || new Error("Missing tree " + hash)); 60 | var tree = { 61 | mode: modes.tree, 62 | hash: hash, 63 | body: body, 64 | path: "/" 65 | }; 66 | return callback(null, walk(tree, treeScan, treeLoadKey, treeCompare)); 67 | } 68 | 69 | function treeLoadKey(entry, callback) { 70 | if (entry.mode !== modes.tree) return callback(null, entry); 71 | var type = modes.toType(entry.mode); 72 | return repo.loadAs(type, entry.hash, function (err, body) { 73 | if (err) return callback(err); 74 | entry.body = body; 75 | return callback(null, entry); 76 | }); 77 | } 78 | 79 | } 80 | 81 | function treeScan(object) { 82 | if (object.mode !== modes.tree) return []; 83 | var tree = object.body; 84 | return Object.keys(tree).map(function (name) { 85 | var entry = tree[name]; 86 | var path = object.path + name; 87 | if (entry.mode === modes.tree) path += "/"; 88 | return { 89 | mode: entry.mode, 90 | hash: entry.hash, 91 | path: path 92 | }; 93 | }); 94 | } 95 | 96 | function treeCompare(first, second) { 97 | return first.path < second.path; 98 | } 99 | 100 | function resolveRef(repo, hashish, callback) { 101 | if (/^[0-9a-f]{40}$/.test(hashish)) { 102 | return callback(null, hashish); 103 | } 104 | repo.readRef(hashish, function (err, hash) { 105 | if (!hash) return callback(err || new Error("Bad ref " + hashish)); 106 | callback(null, hash); 107 | }); 108 | } 109 | 110 | function walk(seed, scan, loadKey, compare) { 111 | var queue = [seed]; 112 | var working = 0, error, cb; 113 | return {read: read, abort: abort}; 114 | 115 | function read(callback) { 116 | if (!callback) return read; 117 | if (cb) return callback(new Error("Only one read at a time")); 118 | if (working) { cb = callback; return; } 119 | var item = queue.shift(); 120 | if (!item) return callback(); 121 | try { scan(item).forEach(onKey); } 122 | catch (err) { return callback(err); } 123 | return callback(null, item); 124 | } 125 | 126 | function abort(callback) { return callback(); } 127 | 128 | function onError(err) { 129 | if (cb) { 130 | var callback = cb; cb = null; 131 | return callback(err); 132 | } 133 | error = err; 134 | } 135 | 136 | function onKey(key) { 137 | working++; 138 | loadKey(key, onItem); 139 | } 140 | 141 | function onItem(err, item) { 142 | working--; 143 | if (err) return onError(err); 144 | var index = queue.length; 145 | while (index && compare(item, queue[index - 1])) index--; 146 | queue.splice(index, 0, item); 147 | if (!working && cb) { 148 | var callback = cb; cb = null; 149 | return read(callback); 150 | } 151 | } 152 | } 153 | -------------------------------------------------------------------------------- /mixins/websql-db.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | var codec = require('../lib/object-codec.js'); 4 | var bodec = require('bodec'); 5 | var inflate = require('../lib/inflate'); 6 | var deflate = require('../lib/deflate'); 7 | 8 | var sha1 = require('git-sha1'); 9 | var modes = require('../lib/modes.js'); 10 | var db; 11 | 12 | mixin.init = init; 13 | 14 | mixin.loadAs = loadAs; 15 | mixin.saveAs = saveAs; 16 | mixin.loadRaw = loadRaw; 17 | mixin.saveRaw = saveRaw; 18 | module.exports = mixin; 19 | 20 | function mixin(repo, prefix) { 21 | if (!prefix) throw new Error("Prefix required"); 22 | repo.refPrefix = prefix; 23 | repo.saveAs = saveAs; 24 | repo.saveRaw = saveRaw; 25 | repo.loadAs = loadAs; 26 | repo.loadRaw = loadRaw; 27 | repo.readRef = readRef; 28 | repo.updateRef = updateRef; 29 | repo.hasHash = hasHash; 30 | } 31 | 32 | function init(callback) { 33 | 34 | db = openDatabase('tedit', '1.0', 'tedit local data', 10 * 1024 * 1024); 35 | db.transaction(function (tx) { 36 | tx.executeSql( 37 | 'CREATE TABLE IF NOT EXISTS objects (hash unique, body blob)' 38 | ); 39 | tx.executeSql( 40 | 'CREATE TABLE IF NOT EXISTS refs (path unique, value text)' 41 | ); 42 | }, function () { 43 | console.error(arguments); 44 | callback(new Error("Problem initializing database")); 45 | }, function () { 46 | callback(); 47 | }); 48 | } 49 | 50 | function saveAs(type, body, callback) { 51 | /*jshint: validthis: true */ 52 | if (!callback) return saveAs.bind(this, type, body); 53 | var hash, buffer; 54 | try { 55 | buffer = codec.frame({type:type,body:body}); 56 | hash = sha1(buffer); 57 | } 58 | catch (err) { return callback(err); } 59 | this.saveRaw(hash, buffer, callback); 60 | } 61 | 62 | function saveRaw(hash, buffer, callback) { 63 | /*jshint: validthis: true */ 64 | if (!callback) return saveRaw.bind(this, hash, buffer); 65 | var sql = 'INSERT INTO objects (hash, body) VALUES (?, ?)'; 66 | db.transaction(function (tx) { 67 | var text; 68 | try { 69 | text = bodec.toBase64(deflate(buffer)); 70 | } 71 | catch (err) { 72 | return callback(err); 73 | } 74 | tx.executeSql(sql, [hash, text], function () { 75 | callback(null, hash); 76 | }); 77 | }); 78 | } 79 | 80 | function loadAs(type, hash, callback) { 81 | /*jshint: validthis: true */ 82 | if (!callback) return loadAs.bind(this, type, hash); 83 | loadRaw(hash, function (err, buffer) { 84 | if (!buffer) return callback(err); 85 | var parts, body; 86 | try { 87 | parts = codec.deframe(buffer); 88 | if (parts.type !== type) throw new Error("Type mismatch"); 89 | body = codec.decoders[type](parts.body); 90 | } 91 | catch (err) { 92 | return callback(err); 93 | } 94 | callback(null, body); 95 | }); 96 | } 97 | 98 | function loadRaw(hash, callback) { 99 | /*jshint: validthis: true */ 100 | if (!callback) return loadRaw.bind(this, hash); 101 | var sql = 'SELECT * FROM objects WHERE hash=?'; 102 | db.readTransaction(function (tx) { 103 | tx.executeSql(sql, [hash], function (tx, result) { 104 | if (!result.rows.length) return callback(); 105 | var item = result.rows.item(0); 106 | var buffer; 107 | try { 108 | buffer = inflate(bodec.fromBase64(item.body)); 109 | } 110 | catch (err) { 111 | return callback(err); 112 | } 113 | callback(null, buffer); 114 | }, function (tx, error) { 115 | callback(new Error(error.message)); 116 | }); 117 | }); 118 | } 119 | 120 | function hasHash(type, hash, callback) { 121 | /*jshint: validthis: true */ 122 | loadAs(type, hash, function (err, value) { 123 | if (err) return callback(err); 124 | if (value === undefined) return callback(null, false); 125 | if (type !== "tree") return callback(null, true); 126 | var names = Object.keys(value); 127 | next(); 128 | function next() { 129 | if (!names.length) return callback(null, true); 130 | var name = names.pop(); 131 | var entry = value[name]; 132 | hasHash(modes.toType(entry.mode), entry.hash, function (err, has) { 133 | if (err) return callback(err); 134 | if (has) return next(); 135 | callback(null, false); 136 | }); 137 | } 138 | }); 139 | } 140 | 141 | function readRef(ref, callback) { 142 | /*jshint: validthis: true */ 143 | var key = this.refPrefix + "/" + ref; 144 | var sql = 'SELECT * FROM refs WHERE path=?'; 145 | db.transaction(function (tx) { 146 | tx.executeSql(sql, [key], function (tx, result) { 147 | if (!result.rows.length) return callback(); 148 | var item = result.rows.item(0); 149 | callback(null, item.value); 150 | }, function (tx, error) { 151 | callback(new Error(error.message)); 152 | }); 153 | }); 154 | } 155 | 156 | function updateRef(ref, hash, callback) { 157 | /*jshint: validthis: true */ 158 | var key = this.refPrefix + "/" + ref; 159 | var sql = 'INSERT INTO refs (path, value) VALUES (?, ?)'; 160 | db.transaction(function (tx) { 161 | tx.executeSql(sql, [key, hash], function () { 162 | callback(); 163 | }, function (tx, error) { 164 | callback(new Error(error.message)); 165 | }); 166 | }); 167 | } 168 | -------------------------------------------------------------------------------- /net/git-fetch-pack.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | var makeChannel = require('culvert'); 4 | var wrapHandler = require('../lib/wrap-handler'); 5 | var bodec = require('bodec'); 6 | 7 | module.exports = fetchPack; 8 | 9 | function fetchPack(transport, onError) { 10 | 11 | if (!onError) onError = throwIt; 12 | 13 | // Wrap our handler functions to route errors properly. 14 | onRef = wrapHandler(onRef, onError); 15 | onWant = wrapHandler(onWant, onError); 16 | onNak = wrapHandler(onNak, onError); 17 | onMore = wrapHandler(onMore, onError); 18 | onReady = wrapHandler(onReady, onError); 19 | 20 | var caps = null; 21 | var capsSent = false; 22 | var refs = {}; 23 | var haves = {}; 24 | var havesCount = 0; 25 | 26 | // Create a duplex channel for talking with the agent. 27 | var libraryChannel = makeChannel(); 28 | var agentChannel = makeChannel(); 29 | var api = { 30 | put: libraryChannel.put, 31 | drain: libraryChannel.drain, 32 | take: agentChannel.take 33 | }; 34 | 35 | // Start the connection and listen for the response. 36 | var socket = transport("git-upload-pack", onError); 37 | socket.take(onRef); 38 | 39 | // Return the other half of the duplex API channel. 40 | return { 41 | put: agentChannel.put, 42 | drain: agentChannel.drain, 43 | take: libraryChannel.take 44 | }; 45 | 46 | function onRef(line) { 47 | if (line === undefined) { 48 | throw new Error("Socket disconnected"); 49 | } 50 | if (line === null) { 51 | api.put(refs); 52 | api.take(onWant); 53 | return; 54 | } 55 | else if (!caps) { 56 | caps = {}; 57 | Object.defineProperty(refs, "caps", {value: caps}); 58 | Object.defineProperty(refs, "shallows", {value:[]}); 59 | var index = line.indexOf("\0"); 60 | if (index >= 0) { 61 | line.substring(index + 1).split(" ").forEach(function (cap) { 62 | var i = cap.indexOf("="); 63 | if (i >= 0) { 64 | caps[cap.substring(0, i)] = cap.substring(i + 1); 65 | } 66 | else { 67 | caps[cap] = true; 68 | } 69 | }); 70 | line = line.substring(0, index); 71 | } 72 | } 73 | var match = line.match(/(^[0-9a-f]{40}) (.*)$/); 74 | if (!match) { 75 | if (typeof line === "string" && /^ERR/i.test(line)) { 76 | throw new Error(line); 77 | } 78 | throw new Error("Invalid line: " + JSON.stringify(line)); 79 | } 80 | refs[match[2]] = match[1]; 81 | socket.take(onRef); 82 | } 83 | 84 | var packChannel; 85 | var progressChannel; 86 | var errorChannel; 87 | 88 | function onWant(line) { 89 | if (line === undefined) return socket.put(); 90 | if (line === null) { 91 | socket.put(null); 92 | return api.take(onWant); 93 | } 94 | if (line.deepen) { 95 | socket.put("deepen " + line.deepen + "\n"); 96 | return api.take(onWant); 97 | } 98 | if (line.have) { 99 | haves[line.have] = true; 100 | havesCount++; 101 | socket.put("have " + line.have + "\n"); 102 | return api.take(onWant); 103 | } 104 | if (line.want) { 105 | var extra = ""; 106 | if (!capsSent) { 107 | capsSent = true; 108 | if (caps["ofs-delta"]) extra += " ofs-delta"; 109 | if (caps["thin-pack"]) extra += " thin-pack"; 110 | // if (caps["multi_ack_detailed"]) extra += " multi_ack_detailed"; 111 | // else if (caps["multi_ack"]) extra +=" multi_ack"; 112 | if (caps["side-band-64k"]) extra += " side-band-64k"; 113 | else if (caps["side-band"]) extra += " side-band"; 114 | // if (caps["agent"]) extra += " agent=" + agent; 115 | if (caps.agent) extra += " agent=" + caps.agent; 116 | } 117 | extra += "\n"; 118 | socket.put("want " + line.want + extra); 119 | return api.take(onWant); 120 | } 121 | if (line.done) { 122 | socket.put("done\n"); 123 | return socket.take(onNak); 124 | } 125 | throw new Error("Invalid have/want command"); 126 | } 127 | 128 | function onNak(line) { 129 | if (line === undefined) return api.put(); 130 | if (line === null) return socket.take(onNak); 131 | if (bodec.isBinary(line) || line.progress || line.error) { 132 | packChannel = makeChannel(); 133 | progressChannel = makeChannel(); 134 | errorChannel = makeChannel(); 135 | api.put({ 136 | pack: { take: packChannel.take }, 137 | progress: { take: progressChannel.take }, 138 | error: { take: errorChannel.take }, 139 | }); 140 | return onMore(null, line); 141 | } 142 | var match = line.match(/^shallow ([0-9a-f]{40})$/); 143 | if (match) { 144 | refs.shallows.push(match[1]); 145 | return socket.take(onNak); 146 | } 147 | match = line.match(/^ACK ([0-9a-f]{40})$/); 148 | if (match) { 149 | return socket.take(onNak); 150 | } 151 | if (line === "NAK") { 152 | return socket.take(onNak); 153 | } 154 | throw new Error("Expected NAK, but got " + JSON.stringify(line)); 155 | } 156 | 157 | function onMore(line) { 158 | 159 | if (line === undefined) { 160 | packChannel.put(); 161 | progressChannel.put(); 162 | errorChannel.put(); 163 | return api.put(); 164 | } 165 | if (line === null) { 166 | api.put(line); 167 | } 168 | else { 169 | if (line.progress) { 170 | progressChannel.put(line.progress); 171 | } 172 | else if (line.error) { 173 | errorChannel.put(line.error); 174 | } 175 | else { 176 | if (!packChannel.put(line)) { 177 | return packChannel.drain(onReady); 178 | } 179 | } 180 | } 181 | socket.take(onMore); 182 | } 183 | 184 | function onReady() { 185 | socket.take(onMore); 186 | } 187 | 188 | } 189 | 190 | var defer = require('js-git/lib/defer'); 191 | function throwIt(err) { 192 | defer(function () { 193 | throw err; 194 | }); 195 | // throw err; 196 | } 197 | -------------------------------------------------------------------------------- /net/request-xhr.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | module.exports = request; 4 | 5 | function request(method, url, headers, body, callback) { 6 | if (typeof body === "function") { 7 | callback = body; 8 | body = undefined; 9 | } 10 | if (!callback) { 11 | return request.bind(null, method, url, headers, body); 12 | } 13 | var xhr = new XMLHttpRequest(); 14 | xhr.open(method, url, true); 15 | xhr.responseType = "arraybuffer"; 16 | 17 | Object.keys(headers).forEach(function (name) { 18 | xhr.setRequestHeader(name, headers[name]); 19 | }); 20 | 21 | xhr.onreadystatechange = function () { 22 | if (xhr.readyState !== 4) return; 23 | var resHeaders = {}; 24 | xhr.getAllResponseHeaders().trim().split("\r\n").forEach(function (line) { 25 | var index = line.indexOf(":"); 26 | resHeaders[line.substring(0, index).toLowerCase()] = line.substring(index + 1).trim(); 27 | }); 28 | 29 | callback(null, { 30 | statusCode: xhr.status, 31 | headers: resHeaders, 32 | body: xhr.response && new Uint8Array(xhr.response) 33 | }); 34 | }; 35 | xhr.send(body); 36 | } 37 | -------------------------------------------------------------------------------- /net/tcp-chrome-sockets.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | var makeChannel = require('culvert'); 4 | var wrapHandler = require('../lib/wrap-handler'); 5 | var tcp = window.chrome.sockets.tcp; 6 | var runtime = window.chrome.runtime; 7 | 8 | module.exports = connect; 9 | 10 | function connect(host, port, onError) { 11 | port = port|0; 12 | host = String(host); 13 | if (!port || !host) throw new TypeError("host and port are required"); 14 | 15 | onCreate = wrap(onCreate, onError); 16 | onConnect = wrap(onConnect, onError); 17 | onInfo = wrap(onInfo, onError); 18 | onReceive = wrap(onReceive, onError); 19 | onReceiveError = wrap(onReceiveError, onError); 20 | onData = wrapHandler(onData, onError); 21 | onWrite = wrap(onWrite, onError); 22 | 23 | var paused = false; 24 | var open = false; 25 | var socketId; 26 | 27 | var serverChannel = makeChannel(); 28 | var clientChannel = makeChannel(); 29 | var socket = { 30 | put: serverChannel.put, 31 | drain: serverChannel.drain, 32 | take: clientChannel.take 33 | }; 34 | 35 | tcp.onReceive.addListener(onReceive); 36 | tcp.onReceiveError.addListener(onReceiveError); 37 | tcp.create(onCreate); 38 | 39 | return { 40 | put: clientChannel.put, 41 | drain: clientChannel.drain, 42 | take: serverChannel.take 43 | }; 44 | 45 | function onCreate(createInfo) { 46 | socketId = createInfo.socketId; 47 | tcp.connect(socketId, host, port, onConnect); 48 | } 49 | 50 | function onConnect(result) { 51 | if (result < 0) throw new Error(runtime.lastError.message + " Connection error"); 52 | tcp.getInfo(socketId, onInfo); 53 | } 54 | 55 | function onInfo(socketInfo) { 56 | if (!socketInfo.connected) { 57 | throw new Error("Connection failed"); 58 | } 59 | open = true; 60 | socket.take(onData); 61 | } 62 | 63 | function onReceive(info) { 64 | if (info.socketId !== socketId) return; 65 | if (socket.put(new Uint8Array(info.data)) || paused) return; 66 | paused = true; 67 | tcp.setPaused(socketId, true); 68 | socket.drain(onDrain); 69 | } 70 | 71 | function onDrain() { 72 | if (!paused) return; 73 | paused = false; 74 | if (open) tcp.setPaused(socketId, false); 75 | } 76 | 77 | function onReceiveError(info) { 78 | if (info.socketId !== socketId) return; 79 | open = false; 80 | tcp.close(socketId); 81 | socket.put(); 82 | // TODO: find a way to tell close and error apart. 83 | // throw new Error("Code " + info.resultCode + " error while receiving."); 84 | } 85 | 86 | function onData(data) { 87 | tcp.send(socketId, data.buffer, onWrite); 88 | } 89 | 90 | function onWrite(info) { 91 | if (info.resultCode < 0) { 92 | throw new Error(runtime.lastError.message + " Error writing."); 93 | } 94 | socket.take(onData); 95 | } 96 | } 97 | 98 | 99 | function wrap(fn, onError) { 100 | return function () { 101 | try { 102 | return fn.apply(this, arguments); 103 | } 104 | catch (err) { 105 | onError(err); 106 | } 107 | }; 108 | } 109 | -------------------------------------------------------------------------------- /net/tcp-node.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | var makeChannel = require('culvert'); 4 | var wrapHandler = require('../lib/wrap-handler'); 5 | var net = require('net'); 6 | 7 | module.exports = connect; 8 | 9 | function connect(host, port, onError) { 10 | port = port|0; 11 | host = String(host); 12 | if (!port || !host) throw new TypeError("host and port are required"); 13 | 14 | // Wrap event handlers from node stream 15 | onConnect = wrap(onConnect, onError); 16 | pump = wrap(pump, onError); 17 | onEnd = wrap(onEnd, onError); 18 | onDrain = wrap(onDrain, onError); 19 | 20 | // Wrap event handlers from culvert socket 21 | onTake = wrapHandler(onTake, onError); 22 | 23 | var serverChannel = makeChannel(); 24 | var clientChannel = makeChannel(); 25 | var socket = { 26 | put: serverChannel.put, 27 | drain: serverChannel.drain, 28 | take: clientChannel.take 29 | }; 30 | 31 | var client = net.connect({ host: host, port: port }, onConnect); 32 | if (onError) client.on("error", onError); 33 | 34 | return { 35 | put: clientChannel.put, 36 | drain: clientChannel.drain, 37 | take: serverChannel.take 38 | }; 39 | 40 | function onConnect() { 41 | socket.take(onTake); 42 | client.on("end", onEnd); 43 | client.on("readable", pump); 44 | client.on("drain", onDrain); 45 | client.on("error", onError); 46 | } 47 | 48 | function pump() { 49 | var chunk; 50 | do { 51 | chunk = client.read(); 52 | if (!chunk) return; 53 | } while (socket.put(chunk)); 54 | socket.drain(pump); 55 | } 56 | 57 | function onEnd() { 58 | socket.put(); 59 | } 60 | 61 | function onTake(data) { 62 | if (data === undefined) { 63 | client.end(); 64 | } 65 | else if (client.write(data)) { 66 | socket.take(onTake); 67 | } 68 | } 69 | 70 | function onDrain() { 71 | socket.take(onTake); 72 | } 73 | 74 | } 75 | 76 | function wrap(fn, onError) { 77 | return function () { 78 | try { 79 | return fn.apply(this, arguments); 80 | } 81 | catch (err) { 82 | onError(err); 83 | } 84 | }; 85 | } 86 | -------------------------------------------------------------------------------- /net/tcp-ws-proxy.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | var makeChannel = require('culvert'); 4 | var wrapHandler = require('../lib/wrap-handler'); 5 | 6 | module.exports = function (proxyUrl) { 7 | if (proxyUrl[proxyUrl.length - 1] !== "/") proxyUrl += "/"; 8 | 9 | return function connect(host, port, onError) { 10 | port = port|0; 11 | host = String(host); 12 | if (!port || !host) throw new TypeError("host and port are required"); 13 | 14 | onData = wrapHandler(onData, onError); 15 | 16 | var serverChannel = makeChannel(); 17 | var clientChannel = makeChannel(); 18 | var socket = { 19 | put: serverChannel.put, 20 | drain: serverChannel.drain, 21 | take: clientChannel.take 22 | }; 23 | 24 | var connected = false; 25 | var ws = new WebSocket(proxyUrl + "tcp/" + host + "/" + port); 26 | ws.binaryType = "arraybuffer"; 27 | 28 | ws.onopen = wrap(onOpen, onError); 29 | ws.onclose = wrap(onClose, onError); 30 | ws.onmessage = wrap(onMessage, onError); 31 | ws.onerror = wrap(onWsError, onError); 32 | 33 | return { 34 | put: clientChannel.put, 35 | drain: clientChannel.drain, 36 | take: serverChannel.take 37 | }; 38 | 39 | function onOpen() { 40 | ws.send("connect"); 41 | } 42 | 43 | function onClose() { 44 | socket.put(); 45 | } 46 | 47 | function onMessage(evt) { 48 | if (!connected && evt.data === "connect") { 49 | connected = true; 50 | socket.take(onData); 51 | return; 52 | } 53 | 54 | socket.put(new Uint8Array(evt.data)); 55 | } 56 | 57 | function onWsError() { 58 | console.error(arguments); 59 | throw new Error("Generic websocket error"); 60 | } 61 | 62 | function onData(chunk) { 63 | ws.send(chunk.buffer); 64 | socket.take(onData); 65 | } 66 | 67 | }; 68 | }; 69 | 70 | function wrap(fn, onError) { 71 | return function () { 72 | try { 73 | return fn.apply(this, arguments); 74 | } 75 | catch (err) { 76 | onError(err); 77 | } 78 | }; 79 | } 80 | -------------------------------------------------------------------------------- /net/transport-http.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | var makeChannel = require('culvert'); 4 | var bodec = require('bodec'); 5 | var pktLine = require('../lib/pkt-line'); 6 | var wrapHandler = require('../lib/wrap-handler'); 7 | 8 | module.exports = function (request) { 9 | 10 | return function httpTransport(gitUrl, username, password) { 11 | // Send Auth header if username is set 12 | var auth; 13 | if (username) { 14 | auth = "Basic " + btoa(username + ":" + (password || "")); 15 | } 16 | 17 | return function (serviceName, onError) { 18 | 19 | // Wrap our handler functions to route errors properly. 20 | onResponse = wrapHandler(onResponse, onError); 21 | onWrite = wrapHandler(onWrite, onError); 22 | onResult = wrapHandler(onResult, onError); 23 | 24 | // Create a duplex channel with transform for internal use. 25 | var serverChannel = makeChannel();//0, "server"); 26 | var clientChannel = makeChannel();//0, "client"); 27 | var socket = { 28 | put: serverChannel.put, 29 | drain: serverChannel.drain, 30 | take: clientChannel.take 31 | }; 32 | 33 | // Send the initial request to start the connection. 34 | var headers = {}; 35 | if (auth) headers.Authorization = auth; 36 | request("GET", gitUrl + "/info/refs?service=" + serviceName, headers, onResponse); 37 | 38 | // Prep for later requests 39 | var bodyParts = []; 40 | var bodyWrite = pktLine.framer(function (chunk) { 41 | bodyParts.push(chunk); 42 | }); 43 | headers["Content-Type"] = "application/x-" + serviceName + "-request"; 44 | socket.take(onWrite); 45 | 46 | var verified = 0; 47 | var parseResponse = pktLine.deframer(function (line) { 48 | if (verified === 2) { 49 | socket.put(line); 50 | } 51 | else if (verified === 0) { 52 | if (line !== "# service=" + serviceName) { 53 | throw new Error("Illegal service response"); 54 | } 55 | verified = 1; 56 | } 57 | else if (verified === 1) { 58 | if (line !== null) { 59 | throw new Error("Expected null after service name"); 60 | } 61 | verified = 2; 62 | } 63 | }); 64 | 65 | // Return the other half of the duplex channel for the protocol logic to use. 66 | return { 67 | put: clientChannel.put, 68 | drain: clientChannel.drain, 69 | take: serverChannel.take 70 | }; 71 | 72 | function onResponse(res) { 73 | if (res.statusCode !== 200) { 74 | throw new Error("Invalid response: " + res.statusCode); 75 | } 76 | if (res.headers["content-type"] !== "application/x-" + serviceName + "-advertisement") { 77 | throw new Error("Not a smart http git server"); 78 | } 79 | parseResponse(res.body); 80 | } 81 | 82 | function onWrite(item) { 83 | if (item === undefined) return socket.put(); 84 | bodyWrite(item); 85 | socket.take(onWrite); 86 | if (item !== "done\n" || !bodyParts.length) return; 87 | var body = bodec.join(bodyParts); 88 | bodyParts.length = 0; 89 | request("POST", gitUrl + "/" + serviceName, headers, body, onResult); 90 | } 91 | 92 | function onResult(res) { 93 | if (res.statusCode !== 200) { 94 | throw new Error("Invalid result: " + res.statusCode); 95 | } 96 | if (res.headers["content-type"] !== "application/x-" + serviceName + "-result") { 97 | throw new Error("Not a smart http git server"); 98 | } 99 | parseResponse(res.body); 100 | } 101 | }; 102 | }; 103 | }; 104 | -------------------------------------------------------------------------------- /net/transport-tcp.js: -------------------------------------------------------------------------------- 1 | "use strict"; 2 | 3 | var makeChannel = require('culvert'); 4 | var bodec = require('bodec'); 5 | var pktLine = require('../lib/pkt-line'); 6 | var wrapHandler = require('../lib/wrap-handler'); 7 | 8 | module.exports = function (connect) { 9 | 10 | return function tcpTransport(path, host, port) { 11 | port = (port|0) || 9418; 12 | if (!path || !host) throw new Error("path and host are required"); 13 | 14 | return function (serviceName, onError) { 15 | 16 | onData = wrapHandler(onData, onError); 17 | onDrain = wrapHandler(onDrain, onError); 18 | 19 | var socket = connect(host, port, onError); 20 | var inter = makeChannel(); 21 | inter.put = pktLine.deframer(inter.put); 22 | 23 | socket.put = pktLine.framer(socket.put); 24 | var greeting = bodec.fromRaw(serviceName + " " + path + "\0host=" + host + "\0"); 25 | socket.put(greeting); 26 | 27 | // Pipe socket to inter with backpressure 28 | socket.take(onData); 29 | function onData(chunk) { 30 | if (inter.put(chunk)) { 31 | socket.take(onData); 32 | } 33 | else { 34 | inter.drain(onDrain); 35 | } 36 | } 37 | function onDrain() { 38 | socket.take(onData); 39 | } 40 | 41 | return { 42 | put: socket.put, 43 | drain: socket.drain, 44 | take: inter.take 45 | }; 46 | }; 47 | }; 48 | }; 49 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "js-git", 3 | "version": "0.7.8", 4 | "description": "Git Implemented in JavaScript", 5 | "keywords": [ 6 | "git", 7 | "js-git" 8 | ], 9 | "repository": { 10 | "type": "git", 11 | "url": "git://github.com/creationix/js-git.git" 12 | }, 13 | "author": "Tim Caswell ", 14 | "license": "MIT", 15 | "bugs": { 16 | "url": "https://github.com/creationix/js-git/issues" 17 | }, 18 | "scripts": { 19 | "test": "ls test/test-* | xargs -n1 node" 20 | }, 21 | "dependencies": { 22 | "bodec": "^0.1.0", 23 | "culvert": "^0.1.2", 24 | "git-sha1": "^0.1.2", 25 | "pako": "^0.2.5" 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /test/run.js: -------------------------------------------------------------------------------- 1 | // Ultra simple test runner with TAP output. 2 | 3 | var inspect = require('util').inspect; 4 | var defer = require('../lib/defer.js'); 5 | var log = console.log; 6 | console.log = function () { 7 | var args = [].slice.call(arguments).map(function (arg) { 8 | return inspect(arg, {colors:true}); 9 | }); 10 | log(args.join(" ").split("\n").map(function (line) { 11 | return "# " + line; 12 | }).join("\n")); 13 | }; 14 | 15 | module.exports = function (tests) { 16 | var timeout; 17 | var test; 18 | var index = 0; 19 | log("1.." + (tests.length)); 20 | next(); 21 | function next(err) { 22 | if (timeout) clearTimeout(timeout); 23 | if (index) { 24 | if (err) { 25 | log(err.stack.split("\n").map(function (line) { 26 | return "# " + line; 27 | }).join("\n")); 28 | log("not ok " + index + " - " + test.name); 29 | } 30 | else { 31 | log("ok " + index + " - " + test.name); 32 | } 33 | } 34 | test = tests[index++]; 35 | if (!test) return; 36 | timeout = setTimeout(onTimeout, 1000); 37 | try { 38 | if (test.length) test(next); 39 | else test(); 40 | } 41 | catch (err) { return next(err); } 42 | if (!test.length) defer(next); 43 | } 44 | 45 | function onTimeout() { 46 | next(new Error("Test timeout")); 47 | } 48 | }; -------------------------------------------------------------------------------- /test/sample-pack.js: -------------------------------------------------------------------------------- 1 | var bodec = require('bodec'); 2 | 3 | // This is a small sample packfile with couple offset deltas 4 | // pack-5851ce932ec42973b51d631afe25da247c3dc49a.pack 5 | module.exports = bodec.fromBase64('UEFDSwAAAAIAAAAQnQ54nJ3MWwoCMQxA0f+uIhtQ0nYeKYgobsENZNoEC/OQMTK6e2cN/l4411YRYCo5kseITVLpSmAfOVLSnFJB6kJqSukDuSevMhu0moed9CmrKjKFwpIxtT7TINh2vSqReHX8tseywr1OcOPXJuMIJ6vTJa/CVpe5fo55mc7gY2p86LFBOGCH6PY6VTP5x7prKfAVA54Xe+yLWTbQOor7AZUCSPmRDnicnctRCgIhEADQf08xFyjGUVeFiKIrdAEdZ0lYd8OM7fh1hn4fvNFFQEi8JCcuCoWSmakwY8xoHGMxkdgimZjVM3VZB8wUPMUJLWrRPml0IdspuJl1JHJBSijGRlLpPR5bh3ttcEuvXZYFTqO2C3dJo25r/Rx5a2fQJlpNHgnhgBOi+mmrY8g/V11LgVV2mOsi6guDiEL9mA94nJ3PTWrDMBBA4b1OMRdosDT6hRIKvkIuIMkjd6htGXVCkts3Z+j2wbd4MohA+5Cai874uiQXQmuIjagsAWMp3rWS0WCM6syDDgGbDCXEmhz5Zl00iayv2mpyHk2xVLVZlhJUvst3H3DjHeb8+6Btg0/h/asOysL94Oel9v0KGpPVxjtE+Jj8NKl33VmE/mPV3M8XrO8x4WOFkusPSIc+eOUjb9B4I/UHHmNMM5QOeJydy1sKwjAQRuH3rGI2oGQmlzYgIrgDcQNp8hcDTSsxostXt+B5/OD0BpAzMJmzJJs4J5Fh5OiCsB3nMFvoOAakkaHusWHtpJm1y9YYb4KXSawgR/GY9MQ+8OB/TZhVfPbb1uhaKp3j44VloUMv9ZQaYi/bWt77tNUjsQmWxTttaae91uqrtfSOf151wRorqN9Ac1mgPgYNRBeSDnicncvdCcIwEADg90xxCyiXn6YGRBRXcIG75IKBppX2xI6vM/j6waerCGAozFyCiA2Jx+QJh5Rd8l5cHUiSdcVTzeZFq8wKY5TkamYsIWO1Xkau8VRdHNhF5BLJsUWqht76XFZ4tA532j4yTXDW1q95FdK2zG0/5qVfwPoUrIshWThgRDQ/7U1V/rnmVgpsSxdQ2dV8AbwRRT6TC3icnczNCQIxEEDhe6qYBpT8JwuyCHvybgOTmOBAsoE4ouUrluD1wfd4lgLexpqjL9G5kG6YUtY56ohqccbVaEzQoaLXAp98HxOu1GHDx6u0Biemfs6zINPY6X3Mo6+gzGKV9jYEOEgvpfjWTszlHysuOzFhg+03ER9fQDcKqQl4nDM0MDAzMVFIL0pNLcnMS9crqShhEHwQ5TRdT6bE+tY/8blzjRyr9lYcMoSoy60kVmVeajlYifjVm28/SzW0d12ZKCB++trFC8ZKOxBKjMBqauylWlkm6kbyCrH0Gp01vHQ9NnMNAFftOrq1AXic80jNyclXCM8vyknhckxJUSjOz03lAgBQjAcOPXicS8zLL8lILVJIy8xJ5QIAI9cEvLEBeJyrTC1RSMzLL8lILVJIy8xJ5QIAOsAGLmWAPnicm8lYOqEUAAX6AhVkEHicKw2aEAQABEABqqoCeJwzNDAwMzFRyK1ML0pNLcnMS9crqShhEHwQ5TRdT6bE+tY/8blzjRyr9lYcAgAxUhBDqAJ4nDM0MDAzMVFIL0pNLcnMS9crqShhEHwQ5TRdT6bE+tY/8blzjRyr9lYcAgAPuQ9dqAJ4nDM0MDAzMVFIL0pNLcnMS9crqShhCK3dYPty+oksL6Y+ub1WMq+Voh9ZAAAZvA8xPHic80jNyclXCM8vyknhAgAcMgQnuZAj3ZpSLQckQi9VfpQYWt+hefM='); 6 | -------------------------------------------------------------------------------- /test/test-config-codec.js: -------------------------------------------------------------------------------- 1 | var run = require('./run.js'); 2 | 3 | // The thing we mean to test. 4 | var codec = require('../lib/config-codec.js'); 5 | 6 | var sample = '\ 7 | [user]\n\ 8 | \tname = Tim Caswell\n\ 9 | \temail = tim@creationix.com\n\ 10 | [core]\n\ 11 | \teditor = vim\n\ 12 | \twhitespace = fix,-indent-with-non-tab,trailing-space,cr-at-eol\n\ 13 | [web]\n\ 14 | \tbrowser = google-chrome\n\ 15 | [color]\n\ 16 | \tui = true\n\ 17 | [color "branch"]\n\ 18 | \tcurrent = yellow bold\n\ 19 | \tlocal = green bold\n\ 20 | \tremote = cyan bold\n\ 21 | [color "diff"]\n\ 22 | \tmeta = yellow bold\n\ 23 | \tfrag = magenta bold\n\ 24 | \told = red bold\n\ 25 | \tnew = green bold\n\ 26 | \twhitespace = red reverse\n\ 27 | [github]\n\ 28 | \tuser = creationix\n\ 29 | \ttoken = token'; 30 | 31 | var config; 32 | 33 | run([ 34 | function testDecode() { 35 | config = codec.decode(sample); 36 | if (config.user.name !== "Tim Caswell") { 37 | throw new Error("Failed to parse user.name"); 38 | } 39 | if (config.color.ui != "true") { 40 | throw new Error("Failed to parse color.ui"); 41 | } 42 | if (config.color.diff.meta !== "yellow bold") { 43 | throw new Error("Failed to parse color.diff.meta"); 44 | } 45 | }, 46 | function testEncode() { 47 | var encoded = codec.encode(config); 48 | var config2 = codec.decode(encoded); 49 | if (JSON.stringify(config) !== JSON.stringify(config2)) { 50 | console.log(config); 51 | console.log(encoded); 52 | console.log(config2); 53 | throw new Error("Encode failed"); 54 | } 55 | }, 56 | function testEncode2() { 57 | var encoded = codec.encode({ 58 | foo: { 59 | bar: { 60 | baz: true 61 | } 62 | } 63 | }); 64 | if (encoded !== '[foo "bar"]\n\tbaz = true\n') { 65 | console.log(encoded); 66 | throw new Error("Invalid encoding of single deep config"); 67 | } 68 | } 69 | ]); 70 | -------------------------------------------------------------------------------- /test/test-mem-db.js: -------------------------------------------------------------------------------- 1 | var run = require('./run.js'); 2 | var bodec = require('bodec'); 3 | var sha1 = require('git-sha1'); 4 | var codec = require('../lib/object-codec.js'); 5 | 6 | var repo = {}; 7 | require('../mixins/mem-db.js')(repo); 8 | 9 | var blob = bodec.fromUnicode("Hello World\n"); 10 | var blobHash = "557db03de997c86a4a028e1ebd3a1ceb225be238"; 11 | run([ 12 | function testSaveAs(end) { 13 | repo.saveAs("blob", blob, function (err, hash) { 14 | if (err) return end(err); 15 | if (hash !== blobHash) { 16 | console.log([hash, blobHash]); 17 | return end(new Error("Hash mismatch")); 18 | } 19 | end(); 20 | }); 21 | }, 22 | function testLoadRaw(end) { 23 | repo.loadRaw(blobHash, function (err, bin) { 24 | if (err) return end(err); 25 | var obj = codec.deframe(bin, true); 26 | if (obj.type !== "blob") return err(new Error("Wrong type")); 27 | if (bodec.toUnicode(obj.body) !== bodec.toUnicode(blob)) { 28 | return err(new Error("Wrong body")); 29 | } 30 | end(); 31 | }); 32 | }, 33 | function testLoadAs(end) { 34 | repo.loadAs("blob", blobHash, function (err, body) { 35 | if (err) return end(err); 36 | if (bodec.toUnicode(body) !== bodec.toUnicode(blob)) { 37 | return err(new Error("Wrong body")); 38 | } 39 | end(); 40 | }); 41 | }, 42 | function testSaveRaw(end) { 43 | var newBody = bodec.fromUnicode("A new body\n"); 44 | var bin = codec.frame({type:"blob",body:newBody}); 45 | var hash = sha1(bin); 46 | repo.saveRaw(hash, bin, function (err) { 47 | if (err) return end(err); 48 | repo.loadAs("blob", hash, function (err, body) { 49 | if (err) return end(err); 50 | if (bodec.toUnicode(body) !== bodec.toUnicode(newBody)) { 51 | return end(new Error("Body mismatch")); 52 | } 53 | end(); 54 | }); 55 | }); 56 | } 57 | ]); 58 | -------------------------------------------------------------------------------- /test/test-object-codec.js: -------------------------------------------------------------------------------- 1 | var modes = require('../lib/modes.js'); 2 | var bodec = require('bodec'); 3 | var sha1 = require('git-sha1'); 4 | var run = require('./run.js'); 5 | 6 | // The thing we mean to test. 7 | var codec = require('../lib/object-codec.js'); 8 | 9 | var blobHash, treeHash, commitHash, tagHash; 10 | var blob, tree, commit, tag; 11 | var blobBin, treeBin, commitBin, tagBin; 12 | 13 | run([ 14 | function testEncodeBlob() { 15 | blob = bodec.fromUnicode("Hello World\n"); 16 | blobBin = codec.frame({type: "blob", body: blob}); 17 | blobHash = sha1(blobBin); 18 | if (blobHash !== '557db03de997c86a4a028e1ebd3a1ceb225be238') { 19 | throw new Error("Invalid blob hash"); 20 | } 21 | }, 22 | function testEncodeBlobInvalidType() { 23 | try { 24 | codec.frame({type: "blob", body: "Not a binary value"}); 25 | } 26 | catch (err) { 27 | return; 28 | } 29 | throw new Error("Expected an error when passin in a non-binary blob"); 30 | }, 31 | function testEncodeTree() { 32 | tree = { 33 | "greeting.txt": { 34 | mode: modes.file, 35 | hash: blobHash 36 | } 37 | }; 38 | treeBin = codec.frame({type: "tree", body: tree}); 39 | treeHash = sha1(treeBin); 40 | if (treeHash !== "648fc86e8557bdabbc2c828a19535f833727fa62") { 41 | throw new Error("Invalid tree hash"); 42 | } 43 | }, 44 | function testTreeSort() { 45 | var tree = { 46 | "README.md": {"mode":modes.blob,"hash":"42bd87a816800cb87646e95b71273983a71a26dc"}, 47 | "a.js": {"mode":modes.blob,"hash":"e69de29bb2d1d6434b8b29ae775ad8c2e48c5391"}, 48 | "a-js": {"mode":modes.blob,"hash":"e69de29bb2d1d6434b8b29ae775ad8c2e48c5391"}, 49 | "b": {"mode":modes.blob,"hash":"e69de29bb2d1d6434b8b29ae775ad8c2e48c5391"}, 50 | "b-js": {"mode":modes.blob,"hash":"e69de29bb2d1d6434b8b29ae775ad8c2e48c5391"}, 51 | "c": {"mode":modes.blob,"hash":"e69de29bb2d1d6434b8b29ae775ad8c2e48c5391"}, 52 | "c.js": {"mode":modes.blob,"hash":"e69de29bb2d1d6434b8b29ae775ad8c2e48c5391"}, 53 | "a": {"mode":modes.tree,"hash":"496d6428b9cf92981dc9495211e6e1120fb6f2ba"}, 54 | "b.js": {"mode":modes.tree,"hash":"496d6428b9cf92981dc9495211e6e1120fb6f2ba"}, 55 | "c-js": {"mode":modes.tree,"hash":"496d6428b9cf92981dc9495211e6e1120fb6f2ba"}, 56 | }; 57 | var treeBin = codec.frame({type: "tree", body: tree}); 58 | var treeHash = sha1(treeBin); 59 | if (treeHash !== "f78893bf52bc695f343372d4210c8c0803c7c4db") { 60 | throw new Error("Invalid tree hash"); 61 | } 62 | }, 63 | function testEncodeCommit() { 64 | var person = { 65 | name: "Tim Caswell", 66 | email: "tim@creationix.com", 67 | date: { 68 | seconds: 1391790884, 69 | offset: 7 * 60 70 | } 71 | }; 72 | commit = { 73 | tree: treeHash, 74 | author: person, 75 | committer: person, 76 | message: "Test Commit\n", 77 | parents: [] 78 | }; 79 | commitBin = codec.frame({type: "commit", body: commit}); 80 | commitHash = sha1(commitBin); 81 | if (commitHash !== "500c37fc17988b90c82d812a2d6fc25b15354bf2") { 82 | throw new Error("Invalid commit hash"); 83 | } 84 | }, 85 | function testEncodeTag() { 86 | tag = { 87 | object: commitHash, 88 | type: "commit", 89 | tag: "mytag", 90 | tagger: { 91 | name: "Tim Caswell", 92 | email: "tim@creationix.com", 93 | date: { 94 | seconds: 1391790910, 95 | offset: 7 * 60 96 | } 97 | }, 98 | message: "Tag it!\n" 99 | }; 100 | tagBin = codec.frame({type: "tag", body: tag}); 101 | tagHash = sha1(tagBin); 102 | if (tagHash !== "49522787662a0183652dc9cafa5c008b5a0e0c2a") { 103 | throw new Error("Invalid tag hash"); 104 | } 105 | }, 106 | function testDecodeTag() { 107 | var obj = codec.deframe(tagBin, true); 108 | if (obj.type !== "tag") throw new Error("Invalid type"); 109 | if (!(obj.body.object === tag.object && obj.body.message === tag.message)) { 110 | throw new Error("Problem decoding"); 111 | } 112 | }, 113 | function testDecodeCommit() { 114 | var obj = codec.deframe(commitBin, true); 115 | if (obj.type !== "commit") throw new Error("Invalid type"); 116 | if (!(obj.body.tree === commit.tree && 117 | obj.body.message === commit.message && 118 | obj.body.author.date.seconds === commit.author.date.seconds)) { 119 | throw new Error("Problem decoding"); 120 | } 121 | }, 122 | function testDecodeTree() { 123 | var obj = codec.deframe(treeBin, true); 124 | if (obj.type !== "tree") throw new Error("Invalid type"); 125 | if (obj.body["greeting.txt"].hash !== tree["greeting.txt"].hash) { 126 | throw new Error("Problem decoding"); 127 | } 128 | }, 129 | function testDecodeBlob() { 130 | var obj = codec.deframe(blobBin, true); 131 | if (obj.type !== "blob") throw new Error("Invalid type"); 132 | if (bodec.toUnicode(obj.body) !== bodec.toUnicode(blob)) { 133 | throw new Error("Problem decoding"); 134 | } 135 | }, 136 | function testUnicodeFilePath() { 137 | var name = "æðelen"; 138 | var tree = {}; 139 | tree[name] = { 140 | mode: modes.file, 141 | hash: blobHash 142 | }; 143 | var bin = codec.frame({type:"tree", body: tree}); 144 | var obj = codec.deframe(bin, true); 145 | var newName = Object.keys(obj.body)[0]; 146 | if (newName !== name) { 147 | console.log(newName + " != " + name); 148 | throw new Error("Problem storing and retrieving utf8 paths"); 149 | } 150 | if (obj.body[name].hash !== tree[name].hash) { 151 | throw new Error("Problem decoding hash hex"); 152 | } 153 | }, 154 | function testUnicodeCommit() { 155 | var person = { 156 | name: "Laȝamon", 157 | email: "laȝamon@chronicles-of-england.org", 158 | date: { 159 | seconds: 1391790910, 160 | offset: 7 * 60 161 | } 162 | }; 163 | var commit = { 164 | tree: treeHash, 165 | author: person, 166 | committer: person, 167 | message: "An preost wes on leoden, Laȝamon was ihoten\nHe wes Leovenaðes sone -- liðe him be Drihten\n", 168 | parents: [] 169 | }; 170 | var bin = codec.frame({type:"commit", body:commit}); 171 | var obj = codec.deframe(bin, true); 172 | if (commit.author.name !== obj.body.author.name || 173 | commit.author.email !== obj.body.author.email || 174 | commit.message !== obj.body.message) { 175 | console.log([obj.body.author, obj.body.message]); 176 | throw new Error("Problem decoding utf8 parts in commit"); 177 | } 178 | }, 179 | function testUnicodeTag() { 180 | var tag = { 181 | object: commitHash, 182 | type: "commit", 183 | tag: "Laȝamon", 184 | tagger: { 185 | name: "Laȝamon", 186 | email: "laȝamon@chronicles-of-england.org", 187 | date: { 188 | seconds: 1391790910, 189 | offset: 7 * 60 190 | } 191 | }, 192 | message: "He wonede at Ernleȝe at æðelen are chirechen,\nUppen Sevarne staþe, sel þar him þuhte,\nOnfest Radestone, þer he bock radde.\n" 193 | }; 194 | var bin = codec.frame({type:"tag", body:tag}); 195 | var obj = codec.deframe(bin, true); 196 | if (tag.tagger.name !== obj.body.tagger.name || 197 | tag.tagger.email !== obj.body.tagger.email || 198 | tag.message !== obj.body.message) { 199 | console.log([obj.body.tagger, obj.body.message]); 200 | throw new Error("Problem decoding utf8 parts in tag"); 201 | } 202 | }, 203 | function testBinaryBlob() { 204 | var blob = bodec.create(256); 205 | for (var i = 0; i < 256; i++) { blob[i] = i; } 206 | var bin = codec.frame({type:"blob",body:blob}); 207 | var obj = codec.deframe(bin, true); 208 | if (bodec.toRaw(blob) !== bodec.toRaw(obj.body)) { 209 | throw new Error("Problem decoding binary blob"); 210 | } 211 | } 212 | ]); 213 | -------------------------------------------------------------------------------- /test/test-pack-codec.js: -------------------------------------------------------------------------------- 1 | var bodec = require('bodec'); 2 | var run = require('./run.js'); 3 | var decoders = require('../lib/object-codec.js').decoders; 4 | var encoders = require('../lib/object-codec.js').encoders; 5 | 6 | // The thing we mean to test. 7 | var codec = require('../lib/pack-codec.js'); 8 | 9 | var pack = require('./sample-pack.js'); 10 | var items = []; 11 | var newPack; 12 | 13 | function unpackStream(stream) { 14 | var meta, out = [], finished = false; 15 | var write = codec.decodePack(onItem); 16 | for (var i = 0, l = stream.length; i < l; i += 128) { 17 | var slice = bodec.slice(stream, i, i + 128); 18 | try { 19 | // console.log("SLICE", slice); 20 | write(slice); 21 | } 22 | catch (err) { 23 | throw err; 24 | } 25 | } 26 | write(); 27 | 28 | function onItem(item) { 29 | // console.log("UNPACK", item); 30 | if (item === undefined) { 31 | finished = true; 32 | } 33 | else if (!meta) { 34 | meta = item; 35 | } 36 | else { 37 | out.push(item); 38 | } 39 | } 40 | if (!finished) throw new Error("unpack stream didn't finish"); 41 | if (out.length !== meta.num) throw new Error("Item num mismatch"); 42 | return out; 43 | } 44 | 45 | 46 | run([ 47 | function testDecodePack() { 48 | var counts = {}; 49 | items = unpackStream(pack).map(function (item) { 50 | counts[item.type] = counts[item.type] || 0; 51 | counts[item.type]++; 52 | if (item.type === "tree" || item.type === "tag" || item.type === "commit") { 53 | item.body = decoders[item.type](item.body); 54 | } 55 | return item; 56 | }); 57 | if (counts.commit !== 6) throw new Error("Wrong number of commits parsed"); 58 | if (counts.tree !== 4) throw new Error("Wrong number of trees parsed"); 59 | if (counts.blob !== 4) throw new Error("Wrong number of blobs parsed"); 60 | if (counts['ofs-delta'] !== 2) throw new Error("Wrong number of offset deltas parsed"); 61 | }, 62 | function testEncodePack() { 63 | var done = false; 64 | var outs = []; 65 | 66 | var write = codec.encodePack(function (item) { 67 | if (item === undefined) { 68 | done = true; 69 | return; 70 | } 71 | if (!bodec.isBinary(item)) throw new Error("encode output must be buffers"); 72 | outs.push(item); 73 | }); 74 | write({num:items.length}); 75 | items.forEach(function (item) { 76 | if (!bodec.isBinary(item.body)) { 77 | item.body = encoders[item.type](item.body); 78 | } 79 | write(item); 80 | }); 81 | write(); 82 | 83 | if (!done) throw new Error("Output stream never ended"); 84 | 85 | newPack = bodec.join(outs); 86 | }, 87 | function verifyEncodePack() { 88 | try { 89 | unpackStream(newPack); 90 | if (bodec.toHex(pack) !== bodec.toHex(newPack)) { 91 | throw new Error("Final pack doesn't match original."); 92 | } 93 | } 94 | catch (err) { 95 | console.log(bodec.toHex(pack)); 96 | console.log(bodec.toHex(newPack)); 97 | throw err; 98 | } 99 | } 100 | ]); 101 | -------------------------------------------------------------------------------- /test/test-pack-ops.js: -------------------------------------------------------------------------------- 1 | var run = require('./run.js'); 2 | 3 | var repo = {}; 4 | require('../mixins/mem-db.js')(repo); 5 | 6 | var pack = require('./sample-pack.js'); 7 | var hashes; 8 | 9 | run([ 10 | function setup() { 11 | require('../mixins/pack-ops.js')(repo); 12 | }, 13 | function testUnpack(end) { 14 | repo.unpack(singleStream(pack), { 15 | onProgress: onProgress 16 | }, function (err, result) { 17 | if (err) return end(err); 18 | hashes = result; 19 | if (hashes.length !== 16) { 20 | return end(new Error("Wrong number of objects unpacked")); 21 | } 22 | end(); 23 | }); 24 | function onProgress(progress) { 25 | // console.log(progress); 26 | } 27 | }, 28 | function testPack(end) { 29 | var stream; 30 | var parts = []; 31 | repo.pack(hashes, {}, function (err, result) { 32 | if (err) return end(err); 33 | stream = result; 34 | stream.take(onRead); 35 | }); 36 | function onRead(err, chunk) { 37 | if (err) return end(err); 38 | // console.log(chunk); 39 | if (chunk) { 40 | parts.push(chunk); 41 | return stream.take(onRead); 42 | } 43 | end(); 44 | } 45 | } 46 | ]); 47 | 48 | function singleStream(item) { 49 | var done = false; 50 | return { take: function (callback) { 51 | if (done) return callback(); 52 | done = true; 53 | callback(null, item); 54 | }}; 55 | } -------------------------------------------------------------------------------- /test/test-zlib.js: -------------------------------------------------------------------------------- 1 | var run = require('./run.js'); 2 | var bodec = require('bodec'); 3 | 4 | // The thing we mean to test. 5 | var inflate = require('../lib/inflate.js'); 6 | var deflate = require('../lib/deflate.js'); 7 | var inflateStream = require('../lib/inflate-stream.js'); 8 | 9 | var bin = bodec.create(1024); 10 | for (var i = 0; i < 1024; i++) { 11 | bin[i] = i >> 2 | i % 4 & 0x7f; 12 | } 13 | 14 | run([ 15 | function testRoundTrip() { 16 | var deflated = deflate(bin); 17 | if (!bodec.isBinary(deflated)) { 18 | throw new Error("deflate output should be native binary"); 19 | } 20 | var inflated = inflate(deflated); 21 | if (!bodec.isBinary(inflated)) { 22 | throw new Error("inflate output should be native binary"); 23 | } 24 | if (bodec.toRaw(bin) !== bodec.toRaw(inflated)) { 25 | console.log([bin, inflated]); 26 | throw new Error("Problem with roundtrip"); 27 | } 28 | }, 29 | function testStream() { 30 | var done = false; 31 | var chunks = []; 32 | var deflated = deflate(bin); 33 | var inf = inflateStream(); 34 | 35 | for (var i = 0, l = deflated.length; i < l; ++i) { 36 | inf.write(deflated[i]); 37 | } 38 | var inflated = inf.flush(); 39 | if (bodec.toRaw(bin) !== bodec.toRaw(inflated)) { 40 | console.log([bin.length, inflated.length]); 41 | throw new Error("Problem with roundtrip"); 42 | } 43 | } 44 | ]); 45 | --------------------------------------------------------------------------------