├── .github ├── FUNDING.yml └── issue_template.md ├── .gitignore ├── .travis.yml ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── LICENSE ├── README.md ├── appveyor.yml ├── bin └── cli.js ├── changelog.md ├── download.sh ├── index.js ├── package-lock.json ├── package.json ├── package.sh ├── scripts └── auth-server.js ├── snap └── snapcraft.yaml ├── src ├── commands │ ├── auth │ │ ├── login.js │ │ ├── logout.js │ │ ├── register.js │ │ └── whoami.js │ ├── clone.js │ ├── create.js │ ├── doctor.js │ ├── keys.js │ ├── log.js │ ├── publish.js │ ├── pull.js │ ├── status.js │ ├── sync.js │ └── unpublish.js ├── extensions.js ├── lib │ ├── archive.js │ ├── discovery-exit.js │ ├── download.js │ ├── exit.js │ ├── import-progress.js │ ├── network.js │ ├── selective-sync.js │ ├── serve-http.js │ └── stats.js ├── parse-args.js ├── registry.js ├── ui │ ├── archive.js │ ├── components │ │ ├── download.js │ │ ├── import-progress.js │ │ ├── network.js │ │ ├── sources.js │ │ └── warnings.js │ ├── create.js │ ├── elements │ │ ├── key.js │ │ ├── pluralize.js │ │ └── version.js │ └── status.js └── usage.js └── test ├── auth.js ├── clone.js ├── create.js ├── dat-node.js ├── doctor.js ├── fixtures ├── all_hour.csv └── folder │ └── nested │ └── hello.txt ├── helpers ├── auth-server.js ├── index.js └── spawn.js ├── http.js ├── keys.js ├── pull.js ├── share.js ├── sync-owner.js ├── sync-remote.js └── usage.js /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | open_collective: dat 2 | -------------------------------------------------------------------------------- /.github/issue_template.md: -------------------------------------------------------------------------------- 1 | 2 | 9 | 10 | I am reporting: 11 | 12 | 20 | 21 | 26 | 27 | # Bug Report 28 | 29 | Please give us details about your installation to assist you. Run `dat -v` to see the version of Dat you are using. 30 | 31 | * Operating system: 32 | * Node Version: 33 | * Dat Version: 34 | 35 | ### Expected behavior 36 | 37 | 38 | 39 | ### Actual behavior 40 | 41 | 42 | 43 | ### Debug Logs 44 | 45 | 46 | 47 | ``` 48 | ``` 49 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | .DS_Store 3 | tmp 4 | .idea 5 | data 6 | yarn.lock 7 | test/fixtures/.dat 8 | test/fixtures/dat.json 9 | test/**.db 10 | test/.datrc-test 11 | dist 12 | builds -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: node_js 2 | 3 | node_js: 4 | - 'lts/*' 5 | - '12' 6 | - 'node' 7 | sudo: false 8 | 9 | script: 10 | - npm test 11 | 12 | notifications: 13 | irc: 14 | channels: 15 | - chat.freenode.net#datbots 16 | template: 17 | - '%{repository_slug} - %{commit_subject} - %{result} - %{build_url}' 18 | skip_join: true 19 | on_success: change 20 | on_failure: always 21 | 22 | before_deploy: npm run package 23 | deploy: 24 | provider: releases 25 | api_key: 26 | secure: GF+Ehh9kDu2m+KqSzciZRQmUfubnVGDEfxZKVX+psesKoxxDSq8/wkl7g1yR2H8DO0dg3lW8opbsKbfOOUWztyIfFxFukgwKIawUd7Krtr4XQLyywq49NdYARKP6bSxeEb8N3xVTo5fuq104KT0mMUB9di/iunsO/ITOzbCZyWE= 27 | skip_cleanup: true 28 | file_glob: true 29 | file: dist/* 30 | on: 31 | repo: datproject/dat 32 | node: '12' 33 | tags: true 34 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | Our code of conduct is under review in [this repo](https://github.com/datproject/Code-of-Conduct) - please check it out and give us feedback! 2 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Welcome to Dat! 2 | 3 | Please take a second to read over this before opening an issue. Providing complete information upfront will help us address any issue (and ship new features!) faster. Our time is limited, please respect it and create complete issues. 4 | 5 | We are available to chat in IRC at #dat. You can join [via Gitter](https://gitter.im/datproject/discussions). You can also [view and search](https://botbot.me/freenode/dat/) chat logs. 6 | 7 | We have a [faq](https://docs.datproject.org/faq) section on our docs that may address non-bug questions. 8 | 9 | ## Opening an Issue 10 | 11 | Please read this section before opening a new issue. 12 | 13 | `dat` is composed of many modules and this repository is just one of them. If you know your issue is with another module, we prefer you open it in that repository. There may be an exiting issue in another repository. If you aren't sure, then this is the perfect place. 14 | 15 | Any new issues should be *actionable*. If your issue cannot be solved (and thus closed) please reconsider opening it in our discussion repository or rewording it. 16 | 17 | ### Bug Reports 18 | 19 | A perfect bug report would have the following: 20 | 21 | 1. Summary of the issue you are experiencing. 22 | 2. Details on what versions of node and dat you have (`node -v` and `dat -v`). 23 | 3. A simple repeatable test case for us to run. Please try to run through it 2-3 times to ensure it is completely repeatable. 24 | 25 | We would like to avoid issues that require a follow up questions to identify the bug. These follow ups are difficult to do unless we have a repeatable test case. 26 | 27 | We can't all be perfect =). Do as much as you can and we'll try to help you with the rest. If we are slow to respond, a more complete bug report will help. 28 | 29 | ### Feature Requests 30 | 31 | Feature requests are more than welcome. Please search exiting issues (open and closed) to make sure it is not a duplicate. A good feature request would have examples of how to use it, some detailed use cases, and any concerns or possible edge cases. 32 | 33 | Keep in mind we have specific use cases we are building for (namely scientific data sharing). If, your feature request does not fit within that use case it may not be prioritized or implemented. 34 | 35 | ### General Discussion Issues 36 | 37 | We prefer to be able to close issues in this repository, which does not lend itself to discussion type questions. Open discussion type issue in the [datproject/discussions](https://github.com/datproject/discussions/issues) repository. 38 | 39 | ## For Developers 40 | 41 | Please read these guidelines if you are interested in contributing to Dat. 42 | 43 | ### Submitting pull requests 44 | 45 | Before taking the time to code something, feel free to open an issue first proposing your idea to other contributors, that way you can get feedback on the idea before taking time to write precious code. 46 | 47 | For any new functionality we like to see: 48 | 49 | * unit tests so that we can improve long term maintenance and catch regressions in the future 50 | * updates to the [change log](http://keepachangelog.com/) and relevant documentation 51 | 52 | ### For Collaborators 53 | 54 | Make sure to get a `:thumbsup:`, `+1` or `LGTM` from another collaborator before merging a PR. If you aren't sure if a release should happen, open an issue. 55 | 56 | Release process: 57 | 58 | - make sure the tests pass 59 | - Update changelog 60 | - `npm version ` 61 | - `git push && git push --tags` 62 | - `npm publish` 63 | 64 | ### Development workflow 65 | 66 | We use and write a lot of node modules and it introduces a bit of a learning curve when working on multiple modules simultaneously. There are lots of different and valid solutions to working on lots of modules at once, this is just one way. 67 | 68 | #### Developing inside a node_modules folder 69 | 70 | First make sure you are comfortable with [how require works](https://github.com/maxogden/art-of-node#how-require-works) in node. 71 | 72 | We recommend creating a folder somewhere manually called `node_modules`. For example in `~/code/node_modules`. Clone all of your git copies of modules that you want to work on into here, so for example: 73 | 74 | - `~/code/node_modules/dat` 75 | - `~/code/node_modules/hyperdrive` 76 | 77 | When you run `npm install` inside of `~/code/node_modules/dat`, dat will get its own copy of `hyperdrive` (one if its dependencies) inside `~/code/node_modules/dat/node_modules`. However, if you encounter a bug in hyperdrive that you need to fix, but you want to test your fix in dat, you want dat to use your git copy of hyperdrive at `~/code/node_modules/hyperdrive` and not the npm copy of hyperdrive at `~/code/node_modules/dat/node_modules/hyperdrive`. 78 | 79 | How do you get dat to use the git copy of hyperdrive? Just delete the npm copy! 80 | 81 | ``` 82 | rm -rf ~/code/node_modules/dat/node_modules/hyperdrive 83 | ``` 84 | 85 | Now when you run dat, and it tries to `require('hyperdrive')` it first looks in its own `node_modules` folder at `~/code/node_modules/dat/node_modules` but doesnt find hyperdrive. So it goes up to `~/code/node_modules` and finds `hyperdrive` there and uses that one, your git copy. 86 | 87 | If you want to switch back to an npm copy, just run `npm install` inside `~/code/node_modules/dat/` and npm will download any missing modules into `~/code/node_modules/dat/node_modules` but wont touch anything in `~/code/node_modules`. 88 | 89 | This might seem a bit complicated at first, but is simple once you get the hang of it. Here are some rules to help you get started: 90 | 91 | - Never make any meaningful edits to code inside an "npm-managed" node_modules folder (such as `~/code/node_modules/dat/node_modules`), because when you run `npm install` inside those folders it could inadvertently delete all of your edits when installing an updated copy of a module. This has happened to me many times, so I just always use my git copy and delete the npm copy (as described above) to make edits to a module. 92 | - You should never need to run any npm commands in terminal when at your "manually managed"" node_modules folder at `~/code/node_modules`. Never running npm commands at that folder also prevents npm from accidentally erasing your git copies of modules 93 | - The location of your "manually managed" node_modules folder should be somewhere isolated from your normal require path. E.g. if you put it at `~/node_modules`, then when you run `npm install dat` at `~/Desktop` npm might decide to erase your git copy of dat at `~/node_modules/dat` and replace it with a copy from npm, which could make you lose work. Putting your manually managed `node_modules` folder in a sub-folder like `~/code` gets it "out of the way" and prevents accidents like that from happening. 94 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2015 Max Ogden. All rights reserved. 2 | Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 3 | 4 | 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 5 | 6 | 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 7 | 8 | 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. 9 | 10 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 11 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | More info on active projects and modules at [dat-ecosystem.org](https://dat-ecosystem.org/) 2 | 3 | --- 4 | 5 | # Dat 6 | 7 | > npm install -g dat 8 | 9 | Use `dat` command line to share files with version control, back up data to servers, browse remote files on demand, and automate long-term data preservation. 10 | 11 | `dat` is the first application based upon the [Hypercore Protocol](https://github.com/hypercore-protocol), and drove the architectural design through iterative development between 2014 and 2017. There exists a large community around it. 12 | 13 | 14 | [][Dat Project] 15 | 16 | Have questions? Join our chat via IRC or Gitter: 17 | 18 | [![#dat IRC channel on freenode][irc-badge]][irc-channel] 19 | [![datproject/discussions][gitter-badge]][gitter-chat] 20 | 21 | ### Thanks to our financial supporters! 22 | ![OpenCollective](https://opencollective.com/dat/tiers/maintainer.svg?avatarHeight=36&width=600") 23 | 24 | ### Table of Contents 25 | 26 | - [Installation](#installation) 27 | - [Getting Started](#getting-started) 28 | - [Using Dat](#usage) 29 | - [Troubleshooting](#troubleshooting) 30 | - [Javascript API](#js-api) 31 | - [For Developers](#for-developers) 32 | 33 | ## Installation 34 | 35 | Dat can be used as a command line tool or a javascript library: 36 | 37 | * Install the `$ dat` CLI to use in the command line. 38 | * [require('dat')][dat-node] - dat-node, a library for downloading and sharing dat archives in javascript apps. 39 | 40 | ### Installing the `$ dat` command line tool 41 | 42 | The recommended way to install dat is through a single file binary distribution version of `dat` with the one line install command below. The binary includes a copy of node and dat packaged inside a single file, so you just have to download one file in order to start sharing data, with no other dependencies needed on your system: 43 | 44 | ``` 45 | wget -qO- https://raw.githubusercontent.com/datproject/dat/master/download.sh | bash 46 | ``` 47 | 48 | ## Next version 49 | 50 | Try the next version of dat! This version (14.0.0) is not compatible with older 51 | versions (13.x) and below, and works on node v12. 52 | 53 | ``` 54 | npm install -g dat@next 55 | ``` 56 | 57 | Maintainers wanted! 58 | 59 | #### NPM Prerequisites 60 | 61 | * **Node**: You'll need to [install Node JS][install-node] before installing Dat. Dat needs `node` version 4 or above and `npm` installed. You can run `node -v` to check your version. 62 | * **npm**: `npm` is installed with node. You can run `npm -v` to make sure it is installed. 63 | 64 | Once you have `npm` ready, install `dat` from npm with the `--global, -g` option, `npm install -g dat`. 65 | 66 | ## Getting started 67 | 68 | #### What is Dat? 69 | 70 | Share, backup, and publish your filesystem. You can turn any folder on your computer into a dat. Dat scans your folder, allowing you to: 71 | 72 | * Track your files with automatic version history. 73 | * Share files with others over a secure peer to peer network. 74 | * Automate live backups to external HDs or remote servers. 75 | * Publish and share files with built in HTTP server. 76 | 77 | Dat allows you to focus on the fun work without worrying about moving files around. **Secure**, **distributed**, **fast**. 78 | 79 | * Documentation: [docs.datproject.org](https://docs.datproject.org) 80 | * [Dat white paper](https://github.com/datprotocol/whitepaper/blob/master/dat-paper.pdf) 81 | 82 | ##### Desktop Applications 83 | 84 | Rather not use the command line? Check out these options: 85 | 86 | * [Beaker Browser] - An experimental p2p browser with built-in support for the Hypercore Protocol. 87 | * [Dat Desktop](https://github.com/datproject/dat-desktop) - A desktop app to manage multiple dats on your desktop machine. 88 | 89 | ### JS Library 90 | 91 | Add Dat to your `package.json`, `npm install dat --save`. Dat exports the [dat-node] API via `require('dat')`. Use it in your javascript applications! Dat Desktop and Dat command line both use dat-node to share and download dats. 92 | 93 | Full API documentation is available in the [dat-node] repository on Github. 94 | 95 | We have Dat installed, let's use it! 96 | 97 | Dat's unique design works wherever you store your data. You can create a new dat from any folder on your computer. 98 | 99 | A dat is some files from your computer and a `.dat` folder. Each dat has a unique `dat://` link. With your dat link, other users can download your files and live sync any updates. 100 | 101 | ### Sharing Data 102 | 103 | You can start sharing your files with a single command. Unlike `git`, you do not have to initialize a repository first, `dat share` or simply `dat` will do that for you: 104 | 105 | ``` 106 | dat 107 | ``` 108 | 109 | Use `dat` to create a dat and sync your files from your computer to other users. Dat scans your files inside ``, creating metadata in `/.dat`. Dat stores the public link, version history, and file information inside the dat folder. 110 | 111 | `dat sync` and `dat share` are aliases for the same command. 112 | ![share-gif] 113 | 114 | ### Downloading Data 115 | 116 | ``` 117 | dat dat:// 118 | ``` 119 | 120 | Use `dat` to download files from a remote computer sharing files with Dat. This will download the files from `dat://` to your ``. The download exits after it completes but you can continue to update the files later after the clone is done. Use `dat pull` to update new files or `dat sync` to live sync changes. 121 | 122 | `dat clone` is an alias for the same command. 123 | 124 | ![clone-gif] 125 | 126 | 127 | ### Misc Commands 128 | 129 | A few other highlights. Run `dat help` to see the full usage guide. 130 | 131 | * `dat create` or `dat init` - Create an empty dat and `dat.json` file. 132 | * `dat log ~/data/dat-folder/` or `dat log dat://` - view the history and metadata information for a dat. 133 | 134 | ### Quick Demos 135 | 136 | To get started using Dat, you can try downloading a dat and then sharing a dat of your own. 137 | 138 | #### Download Demo 139 | 140 | We made a demo folder just for this exercise. Inside the demo folder is a `dat.json` file and a gif. We shared these files via Dat and now you can download them with our dat key! 141 | 142 | Similar to git, you can download somebody's dat by running `dat clone `. You can also specify the directory: 143 | 144 | ``` 145 | ❯ dat clone dat://778f8d955175c92e4ced5e4f5563f69bfec0c86cc6f670352c457943666fe639 ~/Downloads/dat-demo 146 | dat v13.5.0 147 | Created new dat in /Users/joe/Downloads/dat-demo/.dat 148 | Cloning: 2 files (1.4 MB) 149 | 150 | 2 connections | Download 614 KB/s Upload 0 B/s 151 | 152 | dat sync complete. 153 | Version 4 154 | ``` 155 | 156 | This will download our demo files to the `~/Downloads/dat-demo` folder. These files are being shared by a server over Dat (to ensure high availability) but you may connect to any number of users also hosting the content. 157 | 158 | You can also also view the files online: [datbase.org/778f8d955175c92e4ced5e4f5563f69bfec0c86cc6f670352c457943666fe639](https://datbase.org/778f8d955175c92e4ced5e4f5563f69bfec0c86cc6f670352c457943666fe639/). datbase.org can download files over Dat and display them on HTTP as long as someone is hosting it. The website temporarily caches data for any visited links (do not view your dat on datbase.org if you do not want us to cache your data). 159 | 160 | #### Sharing Demo 161 | 162 | Dat can share files from your computer to anywhere. If you have a friend going through this demo with you, try sharing to them! If not we'll see what we can do. 163 | 164 | Find a folder on your computer to share. Inside the folder can be anything, Dat can handle all sorts of files (Dat works with really big folders too!). 165 | 166 | First, you can create a new dat inside that folder. Using the `dat create` command also walks us through making a `dat.json` file: 167 | 168 | ``` 169 | ❯ dat create 170 | Welcome to dat program! 171 | You can turn any folder on your computer into a Dat. 172 | A dat is a folder with some magic. 173 | ``` 174 | 175 | This will create a new (empty) dat. Dat will print a link, share this link to give others access to view your files. 176 | 177 | Once we have our dat, run `dat ` to scan your files and sync them to the network. Share the link with your friend to instantly start downloading files. 178 | 179 | #### Bonus HTTP Demo 180 | 181 | Dat makes it really easy to share live files on a HTTP server. This is a cool demo because we can also see how version history works! Serve dat files on HTTP with the `--http` option. For example, `dat --http`, serves your files to a HTTP website with live reloading and version history! This even works for dats you're downloading (add the `--sparse` option to only download files you select via HTTP). The default HTTP port is 8080. 182 | 183 | *Hint: Use `localhost:8080/?version=10` to view a specific version.* 184 | 185 | Get started using Dat today with the `share` and `clone` commands or read below for more details. 186 | 187 | ## Usage 188 | 189 | The first time you run a command, a `.dat` folder is created to store the dat metadata. 190 | Once a dat is created, you can run all the commands inside that folder, similar to git. 191 | 192 | Dat keeps secret keys in the `~/.dat/secret_keys` folder. These are required to write to any dats you create. 193 | 194 | #### Creating a dat & dat.json 195 | 196 | ``` 197 | dat create [] 198 | ``` 199 | 200 | The create command prompts you to make a `dat.json` file and creates a new dat. Import the files with sync or share. 201 | 202 | Optionally bypass Title and Description prompt: 203 | 204 | ```sh 205 | dat create --title "MY BITS" --description "are ready to synchronize! 😎" 206 | ``` 207 | 208 | Optionally bypass `dat.json` creation: 209 | 210 | ```sh 211 | dat create --yes 212 | dat create -y 213 | ``` 214 | 215 | ### Sharing 216 | 217 | The quickest way to get started sharing files is to `share`: 218 | 219 | ``` 220 | ❯ dat 221 | dat://3e830227b4b2be197679ff1b573cc85e689f202c0884eb8bdb0e1fcecbd93119 222 | Sharing dat: 24 files (383 MB) 223 | 224 | 0 connections | Download 0 B/s Upload 0 B/s 225 | 226 | Importing 528 files to Archive (165 MB/s) 227 | [=-----------------------------------------] 3% 228 | ADD: data/expn_cd.csv (403 MB / 920 MB) 229 | ``` 230 | 231 | ``` 232 | dat [] [--no-import] [--no-watch] 233 | ``` 234 | 235 | Start sharing your dat archive over the network. It will import new or updated files since you last ran `create` or `sync`. Dat watches files for changes and imports updated files. 236 | 237 | * Use `--no-import` to not import any new or updated files. 238 | * Use `--no-watch` to not watch directory for changes. `--import` must be true for `--watch` to work. 239 | 240 | #### Ignoring Files 241 | 242 | By default, Dat will ignore any files in a `.datignore` file, similar to git. Each file should be separated by a newline. Dat also ignores all hidden folders and files. Supports pattern wildcards (`/*.png`) and directory-wildcards (`/**/cache`). 243 | 244 | #### Selecting Files 245 | 246 | By default, Dat will download all files. If you want to only download a subset, you can create a `.datdownload` file which downloads only the files and folders specified. Each should be separated by a newline. 247 | 248 | 249 | ### Downloading 250 | 251 | Start downloading by running the `clone` command. This creates a folder, downloads the content and metadata, and a `.dat` folder inside. Once you started the download, you can resume at any time. 252 | 253 | ``` 254 | dat [] [--temp] 255 | ``` 256 | 257 | Clone a remote dat archive to a local folder. 258 | This will create a folder with the key name if no folder is specified. 259 | 260 | 261 | #### Downloading via `dat.json` key 262 | 263 | You can use a `dat.json` file to clone also. This is useful when combining Dat and git, for example. To clone a dat you can specify the path to a folder containing a `dat.json`: 264 | 265 | ``` 266 | git git@github.com:joehand/dat-clone-sparse-test.git 267 | dat ./dat-clone-sparse-test 268 | ``` 269 | 270 | This will download the dat specified in the `dat.json` file. 271 | 272 | #### Updating Downloaded Archives 273 | 274 | Once a dat is clone, you can run either `dat pull` or `dat sync` in the folder to update the archive. 275 | 276 | ``` 277 | dat pull [] 278 | ``` 279 | 280 | Download latest files and keep connection open to continue updating as remote source is updated. 281 | 282 | ### Shortcut commands 283 | 284 | * `dat ` will run `dat clone` for new dats or resume the existing dat in `` 285 | * `dat ` is the same as running `dat sync ` 286 | 287 | ### Key Management & Moving dats 288 | 289 | `dat keys` provides a few commands to help you move or backup your dats. 290 | 291 | Writing to a dat requires the secret key, stored in the `~/.dat` folder. You can export and import these keys between dats. First, clone your dat to the new location: 292 | 293 | * (original) `dat share` 294 | * (duplicate) `dat clone ` 295 | 296 | Then transfer the secret key: 297 | 298 | * (original) `dat keys export` - copy the secret key printed out. 299 | * (duplicate) `dat keys import` - this will prompt you for the secret key, paste it in here. 300 | 301 | ## Troubleshooting 302 | 303 | We've provided some troubleshooting tips based on issues users have seen. 304 | Please [open an issue][new-issue] or ask us in our [chat room][gitter-chat] if you need help troubleshooting and it is not covered here. 305 | 306 | If you have trouble sharing/downloading in a directory with a `.dat` folder, try deleting it and running the command again. 307 | 308 | #### Check Your Dat Version 309 | 310 | Knowing the version is really helpful if you run into any bugs, and will help us troubleshoot your issue. 311 | 312 | Check your Dat version: 313 | 314 | ``` 315 | dat -v 316 | ``` 317 | 318 | You should see the Dat semantic version printed, e.g. `14.0.0`. 319 | 320 | ### Installation Issues 321 | 322 | #### Node & npm 323 | 324 | To use the Dat command line tool you will need to have [node and npm installed][install-node-npm]. 325 | Make sure those are installed correctly before installing Dat. 326 | You can check the version of each: 327 | 328 | ``` 329 | node -v 330 | npm -v 331 | ``` 332 | 333 | #### Global Install 334 | 335 | The `-g` option installs Dat globally, allowing you to run it as a command. 336 | Make sure you installed with that option. 337 | 338 | * If you receive an `EACCES` error, read [this guide][fixing-npm-permissions] on fixing npm permissions. 339 | * If you receive an `EACCES` error, you may also install Dat with sudo: `sudo npm install -g dat`. 340 | * Have other installation issues? Let us know, you can [open an issue][new-issue] or ask us in our [chat room][gitter-chat]. 341 | 342 | ### Debugging Output 343 | 344 | If you are having trouble with a specific command, run with the debug environment variable set to `dat` (and optionally also `dat-node`). 345 | This will help us debug any issues: 346 | 347 | ``` 348 | DEBUG=dat,dat-node dat dat:// dir 349 | ``` 350 | 351 | ### Networking Issues 352 | 353 | Networking capabilities vary widely with each computer, network, and configuration. 354 | Whenever you run Dat there are several steps to share or download files with peers: 355 | 356 | 1. Discovering Peers 357 | 2. Connecting to Peers 358 | 3. Sending & Receiving Data 359 | 360 | With successful use, Dat will show `Connected to 1 peer` after connection. 361 | If you never see a peer connected, your network may be restricting discovery or connection. 362 | 363 | ## JS API 364 | 365 | You can use Dat in your javascript application: 366 | 367 | ```js 368 | var Dat = require('dat') 369 | 370 | Dat('/data', function (err, dat) { 371 | // use dat 372 | }) 373 | ``` 374 | 375 | **[Read more][dat-node] about the JS usage provided via `dat-node`.** 376 | 377 | ## For Developers 378 | 379 | Please see [guidelines on contributing] before submitting an issue or PR. 380 | 381 | This command line library uses [dat-node] to create and manage the archives and networking. 382 | If you'd like to build your own Dat application that is compatible with this command line tool, we suggest using dat-node. 383 | 384 | ### Installing from source 385 | 386 | Clone this repository and in a terminal inside of the folder you cloned run this command: 387 | 388 | ``` 389 | npm link 390 | ``` 391 | 392 | This should add a `dat` command line command to your PATH. 393 | Now you can run the `dat` command to try it out. 394 | 395 | The contribution guide also has more tips on our [development workflow]. 396 | 397 | * `npm run test` to run tests 398 | * `npm run auth-server` to run a local auth server for testing 399 | 400 | ## License 401 | 402 | BSD-3-Clause 403 | 404 | [Dat Project]: https://datproject.org 405 | [Code for Science & Society]: https://codeforscience.org 406 | [Dat white paper]: https://github.com/datproject/docs/blob/master/papers/dat-paper.pdf 407 | [Dat Desktop]: https://docs.datproject.org/install#desktop-application 408 | [Beaker Browser]: https://beakerbrowser.com 409 | [registry server]: https://github.com/datproject/datbase 410 | [share-gif]: https://raw.githubusercontent.com/datproject/docs/master/docs/assets/cli-share.gif 411 | [clone-gif]: https://raw.githubusercontent.com/datproject/docs/master/docs/assets/cli-clone.gif 412 | [Knight Foundation grant]: https://blog.datproject.org/2016/02/01/announcing-publicbits-org/ 413 | [dat-node]: https://github.com/datproject/dat-node 414 | [dat-ignore]: https://github.com/joehand/dat-ignore 415 | [new-issue]: https://github.com/datproject/dat/issues/new 416 | [dat#503]: https://github.com/datproject/dat/issues/503 417 | [install-node]: https://nodejs.org/en/download/ 418 | [install-node-npm]: https://docs.npmjs.com/getting-started/installing-node 419 | [fixing-npm-permissions]: https://docs.npmjs.com/getting-started/fixing-npm-permissions 420 | [guidelines on contributing]: https://github.com/datproject/dat/blob/master/CONTRIBUTING.md 421 | [development workflow]: https://github.com/datproject/dat/blob/master/CONTRIBUTING.md#development-workflow 422 | [travis-badge]: https://travis-ci.org/datproject/dat.svg?branch=master 423 | [travis-build]: https://travis-ci.org/datproject/dat 424 | [appveyor-badge]: https://ci.appveyor.com/api/projects/status/github/datproject/dat?branch=master&svg=true 425 | [appveyor-build]: https://ci.appveyor.com/project/joehand/dat/branch/master 426 | [npm-badge]: https://img.shields.io/npm/v/dat.svg 427 | [npm-package]: https://npmjs.org/package/dat 428 | [irc-badge]: https://img.shields.io/badge/irc%20channel-%23dat%20on%20freenode-blue.svg 429 | [irc-channel]: https://webchat.freenode.net/?channels=dat 430 | [gitter-badge]: https://badges.gitter.im/Join%20Chat.svg 431 | [gitter-chat]: https://gitter.im/datproject/discussions 432 | -------------------------------------------------------------------------------- /appveyor.yml: -------------------------------------------------------------------------------- 1 | # Test against this version of Node.js 2 | environment: 3 | matrix: 4 | - nodejs_version: "6" 5 | - nodejs_version: "8" 6 | - nodejs_version: "10" 7 | 8 | # Install scripts. (runs after repo cloning) 9 | install: 10 | - ps: Install-Product node $env:nodejs_version 11 | - npm install 12 | 13 | test_script: 14 | # Output useful info for debugging. 15 | - node --version 16 | - npm --version 17 | - npm test 18 | 19 | build: off 20 | -------------------------------------------------------------------------------- /bin/cli.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | 3 | var subcommand = require('subcommand') 4 | var debug = require('debug')('dat') 5 | var usage = require('../src/usage') 6 | var pkg = require('../package.json') 7 | 8 | process.title = 'dat' 9 | 10 | // Check node version to make sure we support 11 | var NODE_VERSION_SUPPORTED = 4 12 | var nodeMajorVer = process.version.match(/^v([0-9]+)\./)[1] 13 | var invalidNode = nodeMajorVer < NODE_VERSION_SUPPORTED 14 | if (invalidNode) exitInvalidNode() 15 | else { 16 | var notifier = require('update-notifier') 17 | notifier({ pkg: pkg }) 18 | .notify({ 19 | defer: true, 20 | isGlobal: true, 21 | boxenOpts: { 22 | align: 'left', 23 | borderColor: 'green', 24 | borderStyle: 'classic', 25 | padding: 1, 26 | margin: { top: 1, bottom: 1 } 27 | } 28 | }) 29 | } 30 | 31 | if (debug.enabled) { 32 | debug('Dat DEBUG mode engaged, enabling quiet mode') 33 | } 34 | 35 | var config = { 36 | defaults: [ 37 | { name: 'dir', abbr: 'd', help: 'set the directory for Dat' }, 38 | { name: 'logspeed', default: 400 }, 39 | { name: 'port', help: 'port to use for connections (default port: 3282 or first available)' }, 40 | { name: 'utp', default: true, boolean: true, help: 'use utp for discovery' }, 41 | { name: 'http', help: 'serve dat over http (default port: 8080)' }, 42 | { name: 'debug', default: !!process.env.DEBUG && !debug.enabled, boolean: true }, 43 | { name: 'quiet', default: debug.enabled, boolean: true }, // use quiet for dat debugging 44 | { name: 'sparse', default: false, boolean: true, help: 'download only requested data' }, 45 | { name: 'up', help: 'throttle upload bandwidth (1024, 1kb, 2mb, etc.)' }, 46 | { name: 'down', help: 'throttle download bandwidth (1024, 1kb, 2mb, etc.)' } 47 | ], 48 | root: { 49 | options: [ 50 | { 51 | name: 'version', 52 | boolean: true, 53 | default: false, 54 | abbr: 'v' 55 | } 56 | ], 57 | command: usage 58 | }, 59 | none: syncShorthand, 60 | commands: [ 61 | require('../src/commands/clone'), 62 | require('../src/commands/create'), 63 | require('../src/commands/log'), 64 | require('../src/commands/keys'), 65 | require('../src/commands/publish'), 66 | require('../src/commands/pull'), 67 | require('../src/commands/status'), 68 | require('../src/commands/sync'), 69 | require('../src/commands/unpublish'), 70 | require('../src/commands/auth/register'), 71 | require('../src/commands/auth/whoami'), 72 | require('../src/commands/auth/logout'), 73 | require('../src/commands/auth/login') 74 | ], 75 | usage: { 76 | command: showUsageOrRunExtension, 77 | option: { 78 | name: 'help', 79 | abbr: 'h' 80 | } 81 | }, 82 | aliases: { 83 | 'init': 'create', 84 | 'share': 'sync' 85 | }, 86 | // whitelist extensions for now 87 | extensions: [ 88 | 'store' 89 | ] 90 | } 91 | 92 | if (debug.enabled) { 93 | debug('dat', pkg.version) 94 | debug('node', process.version) 95 | } 96 | 97 | // Match Args + Run command 98 | var match = subcommand(config) 99 | match(alias(process.argv.slice(2))) 100 | 101 | function alias (argv) { 102 | var cmd = argv[0] 103 | if (!config.aliases[cmd]) return argv 104 | argv[0] = config.aliases[cmd] 105 | return argv 106 | } 107 | 108 | // CLI Shortcuts 109 | // Commands: 110 | // dat [] - clone/sync a key 111 | // dat - create dat + share a directory 112 | // dat 113 | function syncShorthand (opts) { 114 | if (!opts._.length) return usage(opts) 115 | debug('Sync shortcut command') 116 | 117 | debug('Trying extension', opts._[0]) 118 | // First try extension 119 | if (config.extensions.indexOf(opts._[0]) > -1) return require('../src/extensions')(opts) 120 | 121 | var parsed = require('../src/parse-args')(opts) 122 | 123 | // Download Key 124 | if (parsed.key) { 125 | // dat [] - clone/resume in [dir] 126 | debug('Clone sync') 127 | opts.dir = parsed.dir || parsed.key // put in `process.cwd()/key` if no dir 128 | opts.exit = opts.exit || false 129 | return require('../src/commands/clone').command(opts) 130 | } 131 | 132 | // Sync dir 133 | // dat - sync existing dat in {dir} 134 | if (parsed.dir) { 135 | opts.shortcut = true 136 | debug('Share sync') 137 | 138 | // Set default opts. TODO: use default opts in share 139 | opts.watch = opts.watch || true 140 | opts.import = opts.import || true 141 | return require('../src/commands/sync').command(opts) 142 | } 143 | 144 | // All else fails, show usage 145 | return usage(opts) 146 | } 147 | 148 | // This was needed so that we can show help messages from extensions 149 | function showUsageOrRunExtension (opts, help, usageMessage) { 150 | if (config.extensions.indexOf(opts._[0]) > -1) return require('../src/extensions')(opts) 151 | usage(opts, help, usageMessage) 152 | } 153 | 154 | function exitInvalidNode () { 155 | console.error('Node Version:', process.version) 156 | console.error('Unfortunately, we only support Node >= v4. Please upgrade to use Dat.') 157 | console.error('You can find the latest version at https://nodejs.org/') 158 | process.exit(0) 159 | } 160 | -------------------------------------------------------------------------------- /changelog.md: -------------------------------------------------------------------------------- 1 | # Change Log 2 | All notable changes to this project will be documented in this file. 3 | This project adheres to [Semantic Versioning](https://semver.org/). 4 | 5 | ## [Unreleased] 6 | 7 | ## 13.13.0 8 | * `dat pull --exit=NN` exits after `NN` number of seconds, when there are no updates to sync. 9 | 10 | ## 13.9.0 - 2017-10-11 11 | 12 | ### Changed 13 | * Use [datbase.org](https://datbase.org) as default registry (instead of datproject.org) 14 | 15 | ## 13.8.2 - 2017-09-28 16 | 17 | ### Fixed 18 | * Error not being handled (https://github.com/datproject/dat/issues/838) 19 | * Set `opts.debug` properly when using `DEBUG` that isn't `dat`. 20 | * Move discovery key to option in `dat keys` (#869) 21 | 22 | ## 13.8.1 - 2017-08-04 23 | 24 | ### Fixes 25 | * Error not being handled (https://github.com/datproject/dat/issues/838) 26 | 27 | ## 13.8.0 - 2017-08-04 28 | 29 | With this release, we are adding an exciting feature that really showcases how powerful Dat is, selective sync. Using the CLI you can now specify which files you want to download either with an option or using the `.datdownload` file. `dat sync` will download and keep updated on the selected files. This means you can put large datasets into Dat but have control over what files you download where. 30 | 31 | [Full release notes](https://github.com/datproject/dat/releases/tag/v13.8.0) 32 | 33 | ## Added 34 | * Selective Sync (https://github.com/datproject/dat/pull/834) 35 | * Key management (https://github.com/datproject/dat/pull/828) 36 | 37 | ## Changed 38 | * Commands run faster via lazy required modules (https://github.com/datproject/dat/pull/821) 39 | 40 | ## 13.7.0 - 2017-06-28 41 | ## Added 42 | * Throttling - sometimes Dat goes too fast, so you can limit the upload + download speeds. (https://github.com/datproject/dat/pull/806) 43 | * Publish metadata to registry when publishing (https://github.com/datproject/dat/pull/812) 44 | 45 | ## Changed 46 | * Use dat-node http support directly (https://github.com/datproject/dat/pull/817) 47 | 48 | ## Fixed 49 | * Use npm package for registry testing. 50 | 51 | ## 13.6.0 - 2017-06-05 52 | Full support for Dat registries! See our [full release notes](https://github.com/datproject/dat/releases/tag/v13.6.0). 53 | ### Added 54 | * Improved support for public Dat registries (https://github.com/datproject/dat/pull/794) 55 | * Add unpublish command 56 | 57 | ## 13.5.1 - 2017-05-30 58 | ### Changed 59 | * Big documentation update! 60 | * Force bump dat-node for a deletion bug that was a bit overzealous in deleting files (https://github.com/mafintosh/hyperdrive/issues/167). 61 | 62 | ## 13.5.0 - 2017-05-25 63 | ### Added 64 | * Dat version number is printed in header (https://github.com/datproject/dat/pull/788) 65 | * Add prompt and introduction to `dat create` command (https://github.com/datproject/dat/pull/782) and create dat.json file (https://github.com/datproject/dat/pull/765). 66 | * Tell user if new `.dat` was initialized. 67 | * Add `dat log` command to print archive history and size information (https://github.com/datproject/dat/pull/781). 68 | * Use `require('dat')` to get `dat-node` JS API (https://github.com/datproject/dat/pull/778). 69 | 70 | ### Changed 71 | * Default to upload true for `dat clone` and `dat pull`, enables better hole-punching (https://github.com/datproject/dat/pull/787). 72 | 73 | ### Fixed 74 | * Make argument parsing more consistent across commands (https://github.com/datproject/dat/pull/789) 75 | * Fix usage and help text (various). 76 | 77 | ## 13.4.1 - 2017-05-16 78 | ### Added 79 | * Document sparse option in cli help 80 | * add node/dat version to debug 81 | 82 | ### Changed 83 | * Use share for shortcut (create new dat if not created) 84 | 85 | ### Fixed 86 | * use exit option on clone shortcut if specified 87 | * [various ui fixes](https://github.com/datproject/dat/pull/764) 88 | 89 | ## 13.4.0 - 2017-05-11 90 | ### Added 91 | * Serve dat over http with `--http` option 92 | 93 | ## 13.3.0 - 2017-05-10 94 | ### Added 95 | * Add `--sources` option for debugging network issues 96 | 97 | ## 13.2.0 - 2017-05-10 98 | ### Added 99 | * Dat-* extensions ([#740](https://github.com/datproject/dat/pull/740)) 100 | * Ignore directories in import (dat-node v3.3.0) 101 | 102 | ## 13.1.1 - 2017-05-10 103 | ### Fixed 104 | * Set directory for publish command 105 | 106 | ### Changed 107 | * Improve `--show-key` help output 108 | * Always show download progress bar and make language more clear. 109 | 110 | ## 13.1.0 - 2017-05-09 111 | ### Fixed 112 | * Cleanup dat shortcut + directory creation 113 | * Check for any swarm.connecting before doing discovery failure. 114 | 115 | ### Added 116 | * Check node version, fail for anything older than node v4 (#669) 117 | * Add show-key option to display key on downloading cmds 118 | * `dat status` command to show key, file count, dir size, and archive version 119 | 120 | ## 13.0.0 - 2017-05-08 121 | ### Changed 122 | * Upgrade to Hyperdrive v8/9 (SLEEP archive format) and Dat-node v2/3. See [dat-node release docs](https://github.com/datproject/dat-node/releases/tag/v2.0.0) for more info. 123 | * UI updates 124 | 125 | ## 12.0.3 - 2017-03-29 126 | ### Fixed 127 | * Content progress for archives with history 128 | * Change `process.title` to `dat` from `dat-next` 129 | 130 | ### Changed 131 | * Use two decimals for content progress 132 | 133 | ## 12.0.2 - 2017-02-08 134 | ### Fixed 135 | * Remove `hyperdrive-import-files` from dependencies (it is a dependency of `dat-node`). It was accidentally added. 136 | * Always verify on read to avoid replication errors. 137 | 138 | ## 12.0.1 - 2017-02-07 139 | ### Fixed 140 | * Files getting truncated and edited with bad characters - issue [#626](https://github.com/datproject/dat/issues/626) and [#623](https://github.com/datproject/dat/issues/623) 141 | * Source files getting overwritten (issue [#628](https://github.com/datproject/dat/issues/628)) 142 | * Duplicate files getting imported 143 | 144 | ## 12.0.0 - 2017-02-06 145 | Big new release! See the [release notes](https://github.com/datproject/dat/releases/tag/v12.0.0) on Github. 146 | 147 | ## 11.6.0 - 2016-11-16 148 | ### Removed 149 | * webrtc support 150 | 151 | ### Fixed 152 | * Fail gracefully if another dat is running in directory 153 | * Handle `dat.open` errors 154 | * Progress bar incorrectly showing 100% complete and 0 bytes 155 | 156 | ### Added 157 | * Use graceful-fs to avoid EMFILE errors 158 | 159 | ## 11.5.5 - 2016-11-07 160 | ### Fixed 161 | * Better download statistics using blocks instead of bytes 162 | * Fix share stats on resuming without file changes 163 | * Fix calculating size UI for large files 164 | 165 | ### Changed 166 | * Update status logger. Uses [ansi-diff-stream](https://github.com/mafintosh/ansi-diff-stream) for updating CLI output now. 167 | 168 | ## 11.5.4 - 2016-10-28 169 | ### Changed 170 | * Turn off `--watchFiles` by default 171 | * Simplify progress UI 172 | 173 | ## 11.5.3 - 2016-10-28 174 | ### Fixed 175 | * Fix `dat` command with no arguments 176 | 177 | ## 11.5.2 - 2016-10-24 178 | ### Fixed 179 | * Fix `dat --doctor` 180 | 181 | ## 11.5.1 - 2016-10-24 182 | ### Fixed 183 | * Resuming a folder previously shared fixed. 184 | 185 | ## 11.5.0 - 2016-10-20 186 | ### Added 187 | * Accept dat.land links 188 | * Allow `dat ` to resume a downloaded link 189 | 190 | ### Fixed 191 | * Improved error output for incorrect params 192 | 193 | ## 11.4.0 - 2016-10-06 194 | ### Added 195 | * `--ignore-hidden` option. Ignores hidden files by default. 196 | * `--signalhub` option to override default signalhub URL. 197 | 198 | ### Fixed 199 | * Remove headless option from electron-webrtc. It is detected for us. 200 | * `utp` is true by default 201 | 202 | ## 11.3.1 - 2016-09-21 203 | ### Fixed 204 | * Use `--quiet` mode with `--debug` so output is easier to read. 205 | 206 | ## 11.3.0 - 2016-09-18 207 | ### Added 208 | * `--webrtc` option. Uses electron-webrtc to run via webrtc. 209 | 210 | ## 11.2.0 - 2016-09-14 211 | ### Added 212 | * `--temp` option. Uses memdb as database instead of `.dat` folder. 213 | * Print message when download finishes telling user they can exit. 214 | * Add option for turning off UTP 215 | * Use dat-js module (includes using hyperdrive-import-files for appending) 216 | 217 | ### Fixed 218 | * Download finished message not displayed when dat live updates 219 | * Download speed removed when download is finished 220 | 221 | ## 11.1.2 - 2016-07-18 222 | ### Fixed 223 | * Zero bytes total when downloading Dat with single file 224 | 225 | ## 11.1.1 - 2016-07-15 226 | ### Fixed 227 | * Create download directory if doesn't exist 228 | * Accept dat:// links for dat-desktop 229 | * Throw error when two different dats are downloaded to same folder 230 | 231 | ## 11.1.0 - 2016-07-15 232 | ### Fixed 233 | * Use yolowatch module for recursive live updates 234 | * Improved stats for edge cases 235 | * Print link with --quiet argument 236 | * Better stat & progress output with hyperdrive/hypercore events 237 | 238 | ### Changed 239 | * Simplified and clean up CLI output 240 | * Improve modularity of library 241 | * Move logger module into own npm package, status-logger 242 | * Store key in .dat db without encoding as hex string (#498) 243 | * upgrade to hyperdrive 7 244 | 245 | ### Removed 246 | * List download option (will be re-added pending a hyperdrive update) 247 | 248 | ### Added 249 | * Accept dat-encoding for 50 character links 250 | 251 | ## 11.0.2 - 2016-06-23 252 | ### Fixed 253 | * Live mode with recursive adding files! 254 | 255 | ### Changed 256 | * Separate yoloWatch to module 257 | 258 | ## 11.0.1 - 2016-06-20 259 | ### Fixed 260 | * Create download directory if it doesn't exist 261 | 262 | ### Added 263 | * Updated Docs 264 | 265 | ## 11.0.0 - 2016-06-17 266 | ### Added 267 | * Live dat by default 268 | * Added the `.dat` folder to store metadata and keys 269 | * Resume dat share and download in existing .dat directory 270 | * Store metadata using leveldb 271 | * --list option on download to list files 272 | * --exit option on download to close process on completion 273 | 274 | ### Changed 275 | * New proposed RC2 API 276 | * --static option change to --snapshot 277 | * Use Hyperdrive-archive-swarm module for swarm 278 | 279 | ### Removed 280 | * --seed option, stays open by default now 281 | * --no-color option 282 | * --append option, append by default now 283 | 284 | ## 10.1.1 - 2016-06-09 285 | ### Fixed 286 | * Fix file count on live share 287 | * Fix total percentage on share 288 | 289 | ## 10.1.0 - 2016-06-08 290 | ### Changed 291 | * Show progress in CLI output 292 | 293 | ## 10.0.2 - 2016-06-07 294 | ### Fixed 295 | * Fix --static sharing 296 | * Fix --doctor 297 | 298 | ## 10.0.1 - 2016-06-06 299 | ### Fixed 300 | * Share argument 301 | * Argument bugs 302 | 303 | ## 10.0.0 - 2016-06-06 304 | ### Added 305 | * Live sharing! 306 | 307 | ### Changed 308 | * Update to hyperdrive 6.0 309 | * Update API to RC2 candidate 310 | 311 | ## 9.x.x and earlier 312 | 313 | These refer to the pre-1.0 versions of dat and are omitted. 314 | -------------------------------------------------------------------------------- /download.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # gets latest dat release zip for platform and extracts runnable binary into ~/.dat 4 | # usage: wget -qO- https://raw.githubusercontent.com/datproject/dat/master/bin/install.sh | bash 5 | # based on https://github.com/jpillora/installer/blob/master/scripts/download.sh 6 | 7 | DAT_DIR="$HOME/.dat/releases" 8 | 9 | function cleanup { 10 | rm -rf $DAT_DIR/tmp.zip > /dev/null 11 | } 12 | 13 | function fail { 14 | cleanup 15 | msg=$1 16 | echo "============" 17 | echo "Error: $msg" 1>&2 18 | exit 1 19 | } 20 | 21 | function install { 22 | # bash check 23 | [ ! "$BASH_VERSION" ] && fail "Please use bash instead" 24 | GET="" 25 | if which curl > /dev/null; then 26 | GET="curl" 27 | GET="$GET --fail -# -L" 28 | elif which wget > /dev/null; then 29 | GET="wget" 30 | GET="$GET -qO-" 31 | else 32 | fail "neither wget/curl are installed" 33 | fi 34 | case `uname -s` in 35 | Darwin) OS="macos";; 36 | Linux) OS="linux";; 37 | *) fail "unsupported os: $(uname -s)";; 38 | esac 39 | if uname -m | grep 64 > /dev/null; then 40 | ARCH="x64" 41 | else 42 | fail "only arch x64 is currently supported for single file install. please use npm instead. your arch is: $(uname -m)" 43 | fi 44 | echo "Fetching latest Dat release version from GitHub" 45 | LATEST=$($GET -qs https://api.github.com/repos/datproject/dat/releases/latest | grep tag_name | head -n 1 | cut -d '"' -f 4); 46 | mkdir -p $DAT_DIR || fail "Could not create directory $DAT_DIR, try manually downloading zip and extracting instead." 47 | cd $DAT_DIR 48 | RELEASE="dat-${LATEST:1}-${OS}-${ARCH}" 49 | URL="https://github.com/datproject/dat/releases/download/${LATEST}/${RELEASE}.zip" 50 | which unzip > /dev/null || fail "unzip is not installed" 51 | echo "Downloading $URL" 52 | bash -c "$GET $URL" > $DAT_DIR/tmp.zip || fail "download failed" 53 | unzip -o -qq $DAT_DIR/tmp.zip || fail "unzip failed" 54 | BIN="$DAT_DIR/$RELEASE/dat" 55 | chmod +x $BIN || fail "chmod +x failed" 56 | cleanup 57 | printf "Dat $LATEST has been downloaded successfully. Execute it with this command:\n\n${BIN}\n\nAdd it to your PATH with this command (add this to .bash_profile/.bashrc):\n\nexport PATH=\"\$PATH:$DAT_DIR/$RELEASE\"\n" 58 | } 59 | 60 | install -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | module.exports = require('dat-node') 2 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "dat", 3 | "version": "14.0.3", 4 | "description": "Dat is the package manager for data. Easily share and version control data.", 5 | "keywords": [ 6 | "dat", 7 | "dat protocol", 8 | "hyperdrive", 9 | "decentralized", 10 | "file sharing" 11 | ], 12 | "main": "index.js", 13 | "bin": { 14 | "dat": "bin/cli.js" 15 | }, 16 | "scripts": { 17 | "auth-server": "DEBUG=* node scripts/auth-server.js", 18 | "install-precommit": "echo ./node_modules/.bin/standard > .git/hooks/pre-commit && chmod +x .git/hooks/pre-commit", 19 | "standard": "standard", 20 | "deps": "dependency-check . && dependency-check . --extra --no-dev", 21 | "test": "standard && npm run deps && tape 'test/*.js'", 22 | "test-only": "tape 'test/*.js'", 23 | "package": "rm -rf builds && npm run pkg && ./package.sh", 24 | "pkg": "pkg package.json -o builds/dat" 25 | }, 26 | "repository": { 27 | "type": "git", 28 | "url": "git://github.com/datproject/dat.git" 29 | }, 30 | "author": "Dat Project", 31 | "license": "BSD-3-Clause", 32 | "bugs": { 33 | "url": "https://github.com/datproject/dat/issues" 34 | }, 35 | "homepage": "https://datproject.org", 36 | "directories": { 37 | "test": "tests" 38 | }, 39 | "dependencies": { 40 | "bytes": "^3.1.0", 41 | "chalk": "^2.4.2", 42 | "cli-truncate": "^1.0.0", 43 | "dat-encoding": "^5.0.1", 44 | "dat-json": "^1.0.3", 45 | "dat-link-resolve": "^2.3.0", 46 | "dat-log": "^2.0.0", 47 | "dat-node": "^4.0.0", 48 | "dat-registry": "^4.0.1", 49 | "debug": "^4.0.0", 50 | "neat-log": "^3.1.0", 51 | "prettier-bytes": "^1.0.3", 52 | "progress-string": "^1.2.1", 53 | "prompt": "^1.0.0", 54 | "pump": "^3.0.0", 55 | "rimraf": "^2.7.1", 56 | "speedometer": "^1.1.0", 57 | "subcommand": "^2.1.1", 58 | "throttle": "^1.0.3", 59 | "update-notifier": "^2.3.0" 60 | }, 61 | "devDependencies": { 62 | "cross-zip-cli": "^1.0.0", 63 | "dependency-check": "^3.4.1", 64 | "hypercore": "^6.25.2", 65 | "mkdirp": "^0.5.4", 66 | "node-fetch": "^2.6.1", 67 | "pkg": "^4.4.4", 68 | "random-access-memory": "^3.1.1", 69 | "recursive-readdir-sync": "^1.0.6", 70 | "standard": "^12.0.0", 71 | "tape": "^4.13.2", 72 | "tape-spawn": "^1.4.2", 73 | "temporary-directory": "^1.0.2" 74 | }, 75 | "pkg": { 76 | "assets": [ 77 | "./node_modules/utp-native/prebuilds/**", 78 | "./node_modules/blake2b-wasm/blake2b.wasm", 79 | "./node_modules/siphash24/siphash24.wasm" 80 | ], 81 | "targets": [ 82 | "node10-linux-x64", 83 | "node10-macos-x64", 84 | "node10-win-x64" 85 | ] 86 | } 87 | } 88 | -------------------------------------------------------------------------------- /package.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env sh 2 | # couldnt figure out undocumented 'output template' mode for pkg so wrote this 3 | # also need to include .node files until pkg supports including them in binary 4 | 5 | NODE_ABI="node.napi" 6 | VERSION=$(node -pe "require('./package.json').version") 7 | 8 | rm -rf dist 9 | 10 | mkdir dist 11 | mkdir builds/dat-$VERSION-linux-x64 12 | mkdir builds/dat-$VERSION-macos-x64 13 | mkdir builds/dat-$VERSION-win-x64 14 | 15 | mv builds/dat-linux builds/dat-$VERSION-linux-x64/dat 16 | mv builds/dat-macos builds/dat-$VERSION-macos-x64/dat 17 | mv builds/dat-win.exe builds/dat-$VERSION-win-x64/dat.exe 18 | 19 | cp node_modules/utp-native/prebuilds/linux-x64/$NODE_ABI.node builds/dat-$VERSION-linux-x64/ 20 | cp node_modules/utp-native/prebuilds/darwin-x64/$NODE_ABI.node builds/dat-$VERSION-macos-x64/ 21 | cp node_modules/utp-native/prebuilds/win32-x64/$NODE_ABI.node builds/dat-$VERSION-win-x64/ 22 | 23 | cp LICENSE builds/dat-$VERSION-linux-x64/ 24 | cp LICENSE builds/dat-$VERSION-macos-x64/ 25 | cp LICENSE builds/dat-$VERSION-win-x64/ 26 | 27 | cp README.md builds/dat-$VERSION-linux-x64/README 28 | cp README.md builds/dat-$VERSION-macos-x64/README 29 | cp README.md builds/dat-$VERSION-win-x64/README 30 | 31 | cd builds 32 | ../node_modules/.bin/cross-zip dat-$VERSION-linux-x64 ../dist/dat-$VERSION-linux-x64.zip 33 | ../node_modules/.bin/cross-zip dat-$VERSION-macos-x64 ../dist/dat-$VERSION-macos-x64.zip 34 | ../node_modules/.bin/cross-zip dat-$VERSION-win-x64 ../dist/dat-$VERSION-win-x64.zip 35 | 36 | rm -rf builds 37 | 38 | # now travis will upload the 3 zips in dist to the release 39 | -------------------------------------------------------------------------------- /scripts/auth-server.js: -------------------------------------------------------------------------------- 1 | var createServer = require('../tests/helpers/auth-server') 2 | 3 | createServer(process.env.PORT || 8888, function (err, server, closeServer) { 4 | if (err) throw err 5 | 6 | process.on('exit', close) 7 | process.on('SIGINT', close) 8 | 9 | function close (cb) { 10 | closeServer(function () { 11 | process.exit() 12 | }) 13 | } 14 | }) 15 | -------------------------------------------------------------------------------- /snap/snapcraft.yaml: -------------------------------------------------------------------------------- 1 | name: dat 2 | version: '13.11.4' 3 | summary: Share & live sync files anywhere via command line 4 | description: | 5 | Use Dat command line to share files with version control, 6 | back up data to servers, browse remote files on demand, 7 | and automate long-term data preservation. 8 | 9 | grade: 'stable' 10 | confinement: 'strict' 11 | 12 | apps: 13 | dat: 14 | command: dat 15 | plugs: 16 | - home 17 | - network 18 | - network-bind 19 | - removable-media 20 | 21 | parts: 22 | dat: 23 | source: https://github.com/datproject/dat.git 24 | source-tag: 'v13.11.4' 25 | plugin: nodejs 26 | node-engine: 10.9.0 27 | -------------------------------------------------------------------------------- /src/commands/auth/login.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | name: 'login', 3 | command: login, 4 | help: [ 5 | 'Login to a Dat registry server', 6 | 'Usage: dat login []', 7 | '', 8 | 'Publish your dats so other users can discovery them.', 9 | 'Please register before trying to login.' 10 | ].join('\n'), 11 | options: [ 12 | { 13 | name: 'server', 14 | help: 'Your Dat registry server (must be registered to login).' 15 | } 16 | ] 17 | } 18 | 19 | function login (opts) { 20 | var prompt = require('prompt') 21 | var output = require('neat-log/output') 22 | var chalk = require('chalk') 23 | var Registry = require('../../registry') 24 | 25 | if (opts._[0]) opts.server = opts._[0] 26 | var welcome = output(` 27 | Welcome to ${chalk.green(`dat`)} program! 28 | Login to publish your dats. 29 | 30 | `) 31 | console.log(welcome) 32 | 33 | var schema = { 34 | properties: { 35 | server: { 36 | description: chalk.magenta('Dat registry'), 37 | default: opts.server || 'datbase.org', 38 | required: true 39 | }, 40 | email: { 41 | description: chalk.magenta('Email'), 42 | message: 'Email required', 43 | required: true 44 | }, 45 | password: { 46 | description: chalk.magenta('Password'), 47 | message: 'Password required', 48 | required: true, 49 | hidden: true, 50 | replace: '*' 51 | } 52 | } 53 | } 54 | 55 | prompt.override = opts 56 | prompt.message = '' 57 | prompt.start() 58 | prompt.get(schema, function (err, results) { 59 | if (err) return exitErr(err) 60 | opts.server = results.server 61 | makeRequest(results) 62 | }) 63 | 64 | function makeRequest (user) { 65 | var client = Registry(opts) 66 | client.login({ 67 | email: user.email, 68 | password: user.password 69 | }, function (err, resp, body) { 70 | if (err && err.message) return exitErr(err.message) 71 | else if (err) return exitErr(err.toString()) 72 | 73 | console.log(output(` 74 | Logged you in to ${chalk.green(opts.server)}! 75 | 76 | Now you can publish dats and share: 77 | * Run ${chalk.green(`dat publish`)} to publish a dat! 78 | * View & Share your dats at ${opts.server} 79 | `)) 80 | process.exit(0) 81 | }) 82 | } 83 | } 84 | 85 | function exitErr (err) { 86 | console.error(err) 87 | process.exit(1) 88 | } 89 | -------------------------------------------------------------------------------- /src/commands/auth/logout.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | name: 'logout', 3 | command: logout, 4 | help: [ 5 | 'Logout from current Dat registry server', 6 | 'Usage: dat logout []', 7 | '', 8 | 'Specify server if you want to from non-active other server.', 9 | 'Check active server with `dat whoami`.' 10 | ].join('\n'), 11 | options: [ 12 | { 13 | name: 'server', 14 | help: 'Server to log out of. Defaults to active login.' 15 | } 16 | ] 17 | } 18 | 19 | function logout (opts) { 20 | var chalk = require('chalk') 21 | var Registry = require('../../registry') 22 | 23 | if (opts._[0]) opts.server = opts._[0] 24 | 25 | var client = Registry(opts) 26 | 27 | var whoami = client.whoami() 28 | if (!whoami || !whoami.token) return exitErr('Not currently logged in to that server.') 29 | client.logout(function (err) { 30 | if (err) return exitErr(err) 31 | console.log(`Logged out of ${chalk.green(whoami.server)}`) 32 | process.exit(0) 33 | }) 34 | } 35 | 36 | function exitErr (err) { 37 | console.error(err) 38 | process.exit(1) 39 | } 40 | -------------------------------------------------------------------------------- /src/commands/auth/register.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | name: 'register', 3 | command: register, 4 | help: [ 5 | 'Register with a public Dat registry', 6 | 'Usage: dat register []', 7 | '', 8 | 'Register with datbase.org or other registries to publish your dats.' 9 | ].join('\n'), 10 | options: [ 11 | { 12 | name: 'server', 13 | help: 'Your Dat registry.' 14 | } 15 | ] 16 | } 17 | 18 | function register (opts) { 19 | var prompt = require('prompt') 20 | var output = require('neat-log/output') 21 | var chalk = require('chalk') 22 | var Registry = require('../../registry') 23 | 24 | // TODO: check if logged in? 25 | if (opts._[0]) opts.server = opts._[0] 26 | var welcome = output(` 27 | Welcome to ${chalk.green(`dat`)} program! 28 | Create a new account with a Dat registry. 29 | 30 | `) 31 | console.log(welcome) 32 | 33 | var schema = { 34 | properties: { 35 | server: { 36 | description: chalk.magenta('Dat registry'), 37 | default: opts.server || 'datbase.org', 38 | required: true 39 | }, 40 | username: { 41 | description: chalk.magenta('Username'), 42 | message: 'Username required', 43 | required: true 44 | }, 45 | email: { 46 | description: chalk.magenta('Email'), 47 | message: 'Email required', 48 | required: true 49 | }, 50 | password: { 51 | description: chalk.magenta('Password'), 52 | message: 'Password required', 53 | required: true, 54 | hidden: true, 55 | replace: '*' 56 | } 57 | } 58 | } 59 | 60 | prompt.override = opts 61 | prompt.message = '' 62 | prompt.start() 63 | prompt.get(schema, function (err, results) { 64 | if (err) return exitErr(err) 65 | opts.server = results.server 66 | makeRequest(results) 67 | }) 68 | 69 | function makeRequest (user) { 70 | var client = Registry(opts) 71 | 72 | client.register({ 73 | email: user.email, 74 | username: user.username, 75 | password: user.password 76 | }, function (err) { 77 | if (err && err.message) return exitErr(err.message) 78 | else if (err) return exitErr(err.toString()) 79 | console.log(output(` 80 | Created account on ${chalk.green(opts.server)}! 81 | 82 | Login to start publishing: ${chalk.green(`dat login`)} 83 | `)) 84 | process.exit(0) 85 | }) 86 | } 87 | } 88 | 89 | function exitErr (err) { 90 | console.error(err) 91 | process.exit(1) 92 | } 93 | -------------------------------------------------------------------------------- /src/commands/auth/whoami.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | name: 'whoami', 3 | command: whoami, 4 | help: [ 5 | 'Get login information', 6 | 'Usage: dat login []', 7 | '', 8 | 'Get information for active registry or specify your registry.' 9 | ].join('\n'), 10 | options: [ 11 | { 12 | name: 'server', 13 | help: 'Server to get login information for. Defaults to active login.' 14 | } 15 | ] 16 | } 17 | 18 | function whoami (opts) { 19 | var output = require('neat-log/output') 20 | var chalk = require('chalk') 21 | var Registry = require('../../registry') 22 | 23 | if (opts._[0]) opts.server = opts._[0] 24 | 25 | var client = Registry(opts) 26 | var login = client.whoami() 27 | if (!login || !login.token) { 28 | if (!opts.server) return exitErr('No login information found.') 29 | return exitErr('No login information found for that server.') 30 | } 31 | console.log(output(` 32 | Your active Dat registry information: 33 | 34 | --- 35 | ${chalk.green(login.server)} 36 | Email: ${login.email} 37 | Username: ${login.username} 38 | --- 39 | 40 | Change your registry by logging in again: 41 | ${chalk.dim.green('dat login ')} 42 | `)) 43 | process.exit(0) 44 | } 45 | 46 | function exitErr (err) { 47 | console.error(err) 48 | process.exit(1) 49 | } 50 | -------------------------------------------------------------------------------- /src/commands/clone.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | name: 'clone', 3 | command: clone, 4 | help: [ 5 | 'Clone a remote Dat archive', 6 | '', 7 | 'Usage: dat clone [download-folder]' 8 | ].join('\n'), 9 | options: [ 10 | { 11 | name: 'empty', 12 | boolean: false, 13 | default: false, 14 | help: 'Do not download files by default. Files must be synced manually.' 15 | }, 16 | { 17 | name: 'upload', 18 | boolean: true, 19 | default: true, 20 | help: 'announce your address on link (improves connection capability) and upload data to other downloaders.' 21 | }, 22 | { 23 | name: 'show-key', 24 | boolean: true, 25 | default: false, 26 | abbr: 'k', 27 | help: 'print out the dat key' 28 | } 29 | ] 30 | } 31 | 32 | function clone (opts) { 33 | var fs = require('fs') 34 | var path = require('path') 35 | var rimraf = require('rimraf') 36 | var Dat = require('dat-node') 37 | var linkResolve = require('dat-link-resolve') 38 | var neatLog = require('neat-log') 39 | var archiveUI = require('../ui/archive') 40 | var trackArchive = require('../lib/archive') 41 | var discoveryExit = require('../lib/discovery-exit') 42 | var onExit = require('../lib/exit') 43 | var parseArgs = require('../parse-args') 44 | var debug = require('debug')('dat') 45 | 46 | var parsed = parseArgs(opts) 47 | opts.key = parsed.key || opts._[0] // pass other links to resolver 48 | opts.dir = parsed.dir 49 | opts.showKey = opts['show-key'] // using abbr in option makes printed help confusing 50 | opts.sparse = opts.empty 51 | 52 | debug('clone()') 53 | 54 | // cmd: dat /path/to/dat.json (opts.key is path to dat.json) 55 | if (fs.existsSync(opts.key)) { 56 | try { 57 | opts.key = getDatJsonKey() 58 | } catch (e) { 59 | debug('error reading dat.json key', e) 60 | } 61 | } 62 | 63 | debug(Object.assign({}, opts, { key: '', _: null })) // don't show key 64 | 65 | var neat = neatLog(archiveUI, { logspeed: opts.logspeed, quiet: opts.quiet, debug: opts.debug }) 66 | neat.use(trackArchive) 67 | neat.use(discoveryExit) 68 | neat.use(onExit) 69 | neat.use(function (state, bus) { 70 | if (!opts.key) return bus.emit('exit:warn', 'key required to clone') 71 | 72 | state.opts = opts 73 | var createdDirectory = null // so we can delete directory if we get error 74 | 75 | // Force these options for clone command 76 | opts.exit = (opts.exit !== false) 77 | // opts.errorIfExists = true // TODO: do we want to force this? 78 | 79 | linkResolve(opts.key, function (err, key) { 80 | if (err && err.message.indexOf('Invalid key') === -1) return bus.emit('exit:error', 'Could not resolve link') 81 | else if (err) return bus.emit('exit:warn', 'Link is not a valid Dat link.') 82 | 83 | opts.key = key 84 | createDir(opts.key, function () { 85 | bus.emit('key', key) 86 | runDat() 87 | }) 88 | }) 89 | 90 | function createDir (key, cb) { 91 | debug('Checking directory for clone') 92 | // Create the directory if it doesn't exist 93 | // If no dir is specified, we put dat in a dir with name = key 94 | if (!opts.dir) opts.dir = key 95 | if (!Buffer.isBuffer(opts.dir) && typeof opts.dir !== 'string') { 96 | return bus.emit('exit:error', 'Directory path must be a string or Buffer') 97 | } 98 | fs.access(opts.dir, fs.F_OK, function (err) { 99 | if (!err) { 100 | createdDirectory = false 101 | return cb() 102 | } 103 | debug('No existing directory, creating it.') 104 | createdDirectory = true 105 | fs.mkdir(opts.dir, cb) 106 | }) 107 | } 108 | 109 | function runDat () { 110 | Dat(opts.dir, opts, function (err, dat) { 111 | if (err && err.name === 'ExistsError') return bus.emit('exit:warn', 'Existing archive in this directory. Use pull or sync to update.') 112 | if (err) { 113 | if (createdDirectory) rimraf.sync(dat.path) 114 | return bus.emit('exit:error', err) 115 | } 116 | if (dat.writable) return bus.emit('exit:warn', 'Archive is writable. Cannot clone your own archive =).') 117 | 118 | state.dat = dat 119 | state.title = 'Cloning' 120 | bus.emit('dat') 121 | bus.emit('render') 122 | }) 123 | } 124 | }) 125 | 126 | function getDatJsonKey () { 127 | var datPath = opts.key 128 | var stat = fs.lstatSync(datPath) 129 | 130 | if (stat.isDirectory()) datPath = path.join(datPath, 'dat.json') 131 | 132 | if (!fs.existsSync(datPath) || path.basename(datPath) !== 'dat.json') { 133 | if (stat.isFile()) throw new Error('must specify existing dat.json file to read key') 134 | throw new Error('directory must contain a dat.json') 135 | } 136 | 137 | debug('reading key from dat.json:', datPath) 138 | return JSON.parse(fs.readFileSync(datPath, 'utf8')).url 139 | } 140 | } 141 | -------------------------------------------------------------------------------- /src/commands/create.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | name: 'create', 3 | command: create, 4 | help: [ 5 | 'Create an empty dat and dat.json', 6 | '', 7 | 'Usage: dat create [directory]' 8 | ].join('\n'), 9 | options: [ 10 | { 11 | name: 'yes', 12 | boolean: true, 13 | default: false, 14 | abbr: 'y', 15 | help: 'Skip dat.json creation.' 16 | }, 17 | { 18 | name: 'title', 19 | help: 'the title property for dat.json' 20 | }, 21 | { 22 | name: 'description', 23 | help: 'the description property for dat.json' 24 | } 25 | ] 26 | } 27 | 28 | function create (opts) { 29 | var path = require('path') 30 | var fs = require('fs') 31 | var Dat = require('dat-node') 32 | var output = require('neat-log/output') 33 | var DatJson = require('dat-json') 34 | var prompt = require('prompt') 35 | var chalk = require('chalk') 36 | var parseArgs = require('../parse-args') 37 | var debug = require('debug')('dat') 38 | 39 | debug('dat create') 40 | if (!opts.dir) { 41 | opts.dir = parseArgs(opts).dir || process.cwd() 42 | } 43 | 44 | var welcome = `Welcome to ${chalk.green(`dat`)} program!` 45 | var intro = output(` 46 | You can turn any folder on your computer into a Dat. 47 | A Dat is a folder with some magic. 48 | 49 | Your dat is ready! 50 | We will walk you through creating a 'dat.json' file. 51 | (You can skip dat.json and get started now.) 52 | 53 | Learn more about dat.json: ${chalk.blue(`https://github.com/datprotocol/dat.json`)} 54 | 55 | ${chalk.dim('Ctrl+C to exit at any time')} 56 | 57 | `) 58 | var outro 59 | 60 | // Force certain options 61 | opts.errorIfExists = true 62 | 63 | console.log(welcome) 64 | Dat(opts.dir, opts, function (err, dat) { 65 | if (err && err.name === 'ExistsError') return exitErr('\nArchive already exists.\nYou can use `dat sync` to update.') 66 | if (err) return exitErr(err) 67 | 68 | outro = output(` 69 | 70 | Created empty Dat in ${dat.path}/.dat 71 | 72 | Now you can add files and share: 73 | * Run ${chalk.green(`dat share`)} to create metadata and sync. 74 | * Copy the unique dat link and securely share it. 75 | 76 | ${chalk.blue(`dat://${dat.key.toString('hex')}`)} 77 | `) 78 | 79 | if (opts.yes) return done() 80 | 81 | console.log(intro) 82 | var datjson = DatJson(dat.archive, { file: path.join(opts.dir, 'dat.json') }) 83 | fs.readFile(path.join(opts.dir, 'dat.json'), 'utf-8', function (err, data) { 84 | if (err || !data) return doPrompt() 85 | data = JSON.parse(data) 86 | debug('read existing dat.json data', data) 87 | doPrompt(data) 88 | }) 89 | 90 | function doPrompt (data) { 91 | if (!data) data = {} 92 | 93 | var schema = { 94 | properties: { 95 | title: { 96 | description: chalk.magenta('Title'), 97 | default: data.title || '', 98 | // pattern: /^[a-zA-Z\s\-]+$/, 99 | // message: 'Name must be only letters, spaces, or dashes', 100 | required: false 101 | }, 102 | description: { 103 | description: chalk.magenta('Description'), 104 | default: data.description || '' 105 | } 106 | } 107 | } 108 | 109 | prompt.override = { title: opts.title, description: opts.description } 110 | prompt.message = '' // chalk.green('> ') 111 | // prompt.delimiter = '' 112 | prompt.start() 113 | prompt.get(schema, writeDatJson) 114 | 115 | function writeDatJson (err, results) { 116 | if (err) return exitErr(err) // prompt error 117 | if (!results.title && !results.description) return done() 118 | datjson.create(results, done) 119 | } 120 | } 121 | 122 | function done (err) { 123 | if (err) return exitErr(err) 124 | console.log(outro) 125 | } 126 | }) 127 | 128 | function exitErr (err) { 129 | if (err && err.message === 'canceled') { 130 | console.log('') 131 | console.log(outro) 132 | process.exit(0) 133 | } 134 | console.error(err) 135 | process.exit(1) 136 | } 137 | } 138 | -------------------------------------------------------------------------------- /src/commands/doctor.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | name: 'doctor', 3 | help: [ 4 | 'Call the Doctor! Runs two tests:', 5 | ' 1. Check if you can connect to a peer on a public server.', 6 | ' 2. Gives you a link to test direct peer connections.', 7 | '', 8 | 'Usage: dat doctor []' 9 | ].join('\n'), 10 | options: [], 11 | command: function (opts) { 12 | var doctor = require('dat-doctor') 13 | 14 | opts.peerId = opts._[0] 15 | doctor(opts) 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /src/commands/keys.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | name: 'keys', 3 | command: keys, 4 | help: [ 5 | 'View & manage dat keys', 6 | '', 7 | 'Usage:', 8 | '', 9 | ' dat keys view dat key and discovery key', 10 | ' dat keys export export dat secret key', 11 | ' dat keys import import dat secret key to make a dat writable', 12 | '' 13 | ].join('\n'), 14 | options: [ 15 | { 16 | name: 'discovery', 17 | boolean: true, 18 | default: false, 19 | help: 'Print Discovery Key' 20 | } 21 | ] 22 | } 23 | 24 | function keys (opts) { 25 | var Dat = require('dat-node') 26 | var parseArgs = require('../parse-args') 27 | var debug = require('debug')('dat') 28 | 29 | debug('dat keys') 30 | if (!opts.dir) { 31 | opts.dir = parseArgs(opts).dir || process.cwd() 32 | } 33 | opts.createIfMissing = false // keys must always be a resumed archive 34 | 35 | Dat(opts.dir, opts, function (err, dat) { 36 | if (err && err.name === 'MissingError') return exit('Sorry, could not find a dat in this directory.') 37 | if (err) return exit(err) 38 | run(dat, opts) 39 | }) 40 | } 41 | 42 | function run (dat, opts) { 43 | var subcommand = require('subcommand') 44 | var prompt = require('prompt') 45 | 46 | var config = { 47 | root: { 48 | command: function () { 49 | console.log(`dat://${dat.key.toString('hex')}`) 50 | if (opts.discovery) console.log(`Discovery key: ${dat.archive.discoveryKey.toString('hex')}`) 51 | process.exit() 52 | } 53 | }, 54 | commands: [ 55 | { 56 | name: 'export', 57 | command: function foo (args) { 58 | if (!dat.writable) return exit('Dat must be writable to export.') 59 | console.log(dat.archive.metadata.secretKey.toString('hex')) 60 | } 61 | }, 62 | { 63 | name: 'import', 64 | command: function bar (args) { 65 | if (dat.writable) return exit('Dat is already writable.') 66 | importKey() 67 | } 68 | } 69 | ] 70 | } 71 | 72 | subcommand(config)(process.argv.slice(3)) 73 | 74 | function importKey () { 75 | // get secret key & write 76 | 77 | var schema = { 78 | properties: { 79 | key: { 80 | pattern: /^[a-z0-9]{128}$/, 81 | message: 'Use `dat keys export` to get the secret key (128 character hash).', 82 | hidden: true, 83 | required: true, 84 | description: 'dat secret key' 85 | } 86 | } 87 | } 88 | prompt.message = '' 89 | prompt.start() 90 | prompt.get(schema, function (err, data) { 91 | if (err) return done(err) 92 | var secretKey = data.key 93 | if (typeof secretKey === 'string') secretKey = Buffer.from(secretKey, 'hex') 94 | // Automatically writes the metadata.ogd file 95 | dat.archive.metadata._storage.secretKey.write(0, secretKey, done) 96 | }) 97 | 98 | function done (err) { 99 | if (err) return exit(err) 100 | console.log('Successful import. Dat is now writable.') 101 | exit() 102 | } 103 | } 104 | } 105 | 106 | function exit (err) { 107 | if (err) { 108 | console.error(err) 109 | process.exit(1) 110 | } 111 | process.exit(0) 112 | } 113 | -------------------------------------------------------------------------------- /src/commands/log.js: -------------------------------------------------------------------------------- 1 | 2 | module.exports = { 3 | name: 'log', 4 | help: [ 5 | 'View history and information about a dat', 6 | '', 7 | 'Usage: dat log [dir|link]' 8 | ].join('\n'), 9 | options: [ 10 | { 11 | name: 'live', 12 | boolean: true, 13 | default: false, 14 | help: 'View live updates to history.' 15 | } 16 | ], 17 | command: function (opts) { 18 | var log = require('dat-log') 19 | log(opts) 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /src/commands/publish.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | name: 'publish', 3 | command: publish, 4 | help: [ 5 | 'Publish your dat to a Dat registry', 6 | 'Usage: dat publish []', 7 | '', 8 | 'By default it will publish to your active registry.', 9 | 'Specify the server to change where the dat is published.' 10 | ].join('\n'), 11 | options: [ 12 | { 13 | name: 'server', 14 | help: 'Publish dat to this registry. Defaults to active login.' 15 | } 16 | ] 17 | } 18 | 19 | function publish (opts) { 20 | var path = require('path') 21 | var Dat = require('dat-node') 22 | var encoding = require('dat-encoding') 23 | var output = require('neat-log/output') 24 | var prompt = require('prompt') 25 | var chalk = require('chalk') 26 | var DatJson = require('dat-json') 27 | var xtend = Object.assign 28 | var Registry = require('../registry') 29 | 30 | if (!opts.dir) opts.dir = process.cwd() 31 | if (opts._[0]) opts.server = opts._[0] 32 | if (!opts.server) opts.server = 'datbase.org' // nicer error message if not logged in 33 | 34 | var client = Registry(opts) 35 | var whoami = client.whoami() 36 | if (!whoami || !whoami.token) { 37 | var loginErr = output(` 38 | Welcome to ${chalk.green(`dat`)} program! 39 | Publish your dats to ${chalk.green(opts.server)}. 40 | 41 | ${chalk.bold('Please login before publishing')} 42 | ${chalk.green('dat login')} 43 | 44 | New to ${chalk.green(opts.server)} and need an account? 45 | ${chalk.green('dat register')} 46 | 47 | Explore public dats at ${chalk.blue('datbase.org/explore')} 48 | `) 49 | return exitErr(loginErr) 50 | } 51 | 52 | opts.createIfMissing = false // publish must always be a resumed archive 53 | Dat(opts.dir, opts, function (err, dat) { 54 | if (err && err.name === 'MissingError') return exitErr('No existing dat in this directory. Create a dat before publishing.') 55 | else if (err) return exitErr(err) 56 | 57 | dat.joinNetwork() // join network to upload metadata 58 | 59 | var datjson = DatJson(dat.archive, { file: path.join(dat.path, 'dat.json') }) 60 | datjson.read(publish) 61 | 62 | function publish (_, data) { 63 | // ignore datjson.read() err, we'll prompt for name 64 | 65 | // xtend dat.json with opts 66 | var datInfo = xtend({ 67 | name: opts.name, 68 | url: 'dat://' + encoding.toStr(dat.key), // force correct url in publish? what about non-dat urls? 69 | title: opts.title, 70 | description: opts.description 71 | }, data) 72 | var welcome = output(` 73 | Publishing dat to ${chalk.green(opts.server)}! 74 | 75 | `) 76 | console.log(welcome) 77 | 78 | if (datInfo.name) return makeRequest(datInfo) 79 | 80 | prompt.message = '' 81 | prompt.start() 82 | prompt.get({ 83 | properties: { 84 | name: { 85 | description: chalk.magenta('dat name'), 86 | pattern: /^[a-zA-Z0-9-]+$/, 87 | message: `A dat name can only have letters, numbers, or dashes.\n Like ${chalk.bold('cool-cats-12meow')}`, 88 | required: true 89 | } 90 | } 91 | }, function (err, results) { 92 | if (err) return exitErr(err) 93 | datInfo.name = results.name 94 | makeRequest(datInfo) 95 | }) 96 | } 97 | 98 | function makeRequest (datInfo) { 99 | console.log(`Please wait, '${chalk.bold(datInfo.name)}' will soon be ready for its great unveiling...`) 100 | client.dats.create(datInfo, function (err, resp, body) { 101 | if (err) { 102 | if (err.message) { 103 | if (err.message === 'timed out') { 104 | return exitErr(output(`${chalk.red('\nERROR: ' + opts.server + ' could not connect to your computer.')} 105 | Troubleshoot here: ${chalk.green('https://docs.datproject.org/troubleshooting#networking-issues')} 106 | `)) 107 | } 108 | var str = err.message.trim() 109 | if (str === 'jwt expired') return exitErr(`Session expired, please ${chalk.green('dat login')} again`) 110 | return exitErr('ERROR: ' + err.message) // node error 111 | } 112 | 113 | // server response errors 114 | return exitErr('ERROR: ' + err.toString()) 115 | } 116 | if (body.statusCode === 400) return exitErr(new Error(body.message)) 117 | 118 | datjson.write(datInfo, function (err) { 119 | if (err) return exitErr(err) 120 | // TODO: write published url to dat.json (need spec) 121 | var msg = output(` 122 | 123 | We ${body.updated === 1 ? 'updated' : 'published'} your dat! 124 | ${chalk.blue.underline(`${opts.server}/${whoami.username}/${datInfo.name}`)} 125 | `)// TODO: get url back? it'd be better to confirm link than guess username/datname structure 126 | 127 | console.log(msg) 128 | if (body.updated === 1) { 129 | console.log(output(` 130 | 131 | ${chalk.dim.green('Cool fact #21')} 132 | ${opts.server} will live update when you are sharing your dat! 133 | You only need to publish again if your dat link changes. 134 | `)) 135 | } else { 136 | console.log(output(` 137 | 138 | Remember to use ${chalk.green('dat share')} before sharing. 139 | This will make sure your dat is available. 140 | `)) 141 | } 142 | process.exit(0) 143 | }) 144 | }) 145 | } 146 | }) 147 | } 148 | 149 | function exitErr (err) { 150 | console.error(err) 151 | process.exit(1) 152 | } 153 | -------------------------------------------------------------------------------- /src/commands/pull.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | name: 'pull', 3 | command: pull, 4 | help: [ 5 | 'Pull updates from a cloned Dat archive', 6 | '', 7 | 'Usage: dat pull' 8 | ].join('\n'), 9 | options: [ 10 | { 11 | name: 'exit', 12 | boolean: false, 13 | help: 'exit after specified number of seconds, to give the dat network time to find updates. (default: 12)' 14 | }, 15 | 16 | { 17 | name: 'upload', 18 | boolean: true, 19 | default: true, 20 | help: 'announce your address on link (improves connection capability) and upload data to other downloaders.' 21 | }, 22 | { 23 | name: 'selectFromFile', 24 | boolean: false, 25 | default: '.datdownload', 26 | help: 'Sync only the list of selected files or directories in the given file.', 27 | abbr: 'select-from-file' 28 | }, 29 | { 30 | name: 'select', 31 | boolean: false, 32 | default: false, 33 | help: 'Sync only the list of selected files or directories.' 34 | }, 35 | { 36 | name: 'show-key', 37 | boolean: true, 38 | default: false, 39 | abbr: 'k', 40 | help: 'print out the dat key' 41 | } 42 | ] 43 | } 44 | 45 | function pull (opts) { 46 | var Dat = require('dat-node') 47 | var neatLog = require('neat-log') 48 | var archiveUI = require('../ui/archive') 49 | var trackArchive = require('../lib/archive') 50 | var selectiveSync = require('../lib/selective-sync') 51 | var discoveryExit = require('../lib/discovery-exit') 52 | var onExit = require('../lib/exit') 53 | var parseArgs = require('../parse-args') 54 | var debug = require('debug')('dat') 55 | 56 | debug('dat pull') 57 | if (!opts.dir) { 58 | var parsed = parseArgs(opts) 59 | opts.key = parsed.key 60 | opts.dir = parsed.dir || process.cwd() 61 | } 62 | 63 | opts.showKey = opts['show-key'] // using abbr in option makes printed help confusing 64 | 65 | // Force these options for pull command 66 | opts.createIfMissing = false 67 | 68 | // If --exit is specified without a number of seconds, default to 12 69 | if (opts.exit) { 70 | opts.exit = typeof opts.exit === 'number' 71 | ? opts.exit 72 | : 12 73 | } 74 | 75 | var neat = neatLog(archiveUI, { logspeed: opts.logspeed, quiet: opts.quiet, debug: opts.debug }) 76 | neat.use(trackArchive) 77 | neat.use(discoveryExit) 78 | neat.use(onExit) 79 | neat.use(function (state, bus) { 80 | state.opts = opts 81 | selectiveSync(state, opts) 82 | 83 | Dat(opts.dir, opts, function (err, dat) { 84 | if (err && err.name === 'MissingError') return bus.emit('exit:warn', 'No existing archive in this directory. Use clone to download a new archive.') 85 | if (err) return bus.emit('exit:error', err) 86 | if (dat.writable) return bus.emit('exit:warn', 'Archive is writable. Cannot pull your own archive.') 87 | 88 | state.dat = dat 89 | bus.emit('dat') 90 | bus.emit('render') 91 | }) 92 | }) 93 | } 94 | -------------------------------------------------------------------------------- /src/commands/status.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | name: 'status', 3 | command: status, 4 | help: [ 5 | 'Get information on about the Dat in a directory.', 6 | '', 7 | 'Usage: dat status' 8 | ].join('\n'), 9 | options: [] 10 | } 11 | 12 | function status (opts) { 13 | var Dat = require('dat-node') 14 | var neatLog = require('neat-log') 15 | var statusUI = require('../ui/status') 16 | var onExit = require('../lib/exit') 17 | var parseArgs = require('../parse-args') 18 | var debug = require('debug')('dat') 19 | 20 | debug('dat status') 21 | if (!opts.dir) { 22 | opts.dir = parseArgs(opts).dir || process.cwd() 23 | } 24 | opts.createIfMissing = false // sync must always be a resumed archive 25 | 26 | var neat = neatLog(statusUI, { logspeed: opts.logspeed, quiet: opts.quiet, debug: opts.debug }) 27 | neat.use(onExit) 28 | neat.use(function (state, bus) { 29 | state.opts = opts 30 | 31 | Dat(opts.dir, opts, function (err, dat) { 32 | if (err && err.name === 'MissingError') return bus.emit('exit:warn', 'Sorry, could not find a dat in this directory.') 33 | if (err) return bus.emit('exit:error', err) 34 | 35 | state.dat = dat 36 | var stats = dat.trackStats() 37 | if (stats.get().version === dat.version) return exit() 38 | stats.on('update', function () { 39 | if (stats.get().version === dat.version) return exit() 40 | }) 41 | 42 | function exit () { 43 | bus.render() 44 | process.exit(0) 45 | } 46 | }) 47 | }) 48 | } 49 | -------------------------------------------------------------------------------- /src/commands/sync.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | name: 'sync', 3 | command: sync, 4 | help: [ 5 | 'Sync a Dat archive with the network', 6 | 'Watch and import file changes', 7 | '', 8 | 'Usage: dat sync' 9 | ].join('\n'), 10 | options: [ 11 | { 12 | name: 'import', 13 | boolean: true, 14 | default: true, 15 | help: 'Import files from the directory to the database (when writable).' 16 | }, 17 | { 18 | name: 'ignoreHidden', 19 | boolean: true, 20 | default: true, 21 | abbr: 'ignore-hidden' 22 | }, 23 | { 24 | name: 'selectFromFile', 25 | boolean: false, 26 | default: '.datdownload', 27 | help: 'Sync only the list of selected files or directories in the given file.', 28 | abbr: 'select-from-file' 29 | }, 30 | { 31 | name: 'select', 32 | boolean: false, 33 | default: false, 34 | help: 'Sync only the list of selected files or directories.' 35 | }, 36 | { 37 | name: 'watch', 38 | boolean: true, 39 | default: true, 40 | help: 'Watch for changes and import updated files (Dat Writable).' 41 | }, 42 | { 43 | name: 'show-key', 44 | boolean: true, 45 | default: true, 46 | abbr: 'k', 47 | help: 'Print out the dat key.' 48 | } 49 | ] 50 | } 51 | 52 | function sync (opts) { 53 | var Dat = require('dat-node') 54 | var neatLog = require('neat-log') 55 | var archiveUI = require('../ui/archive') 56 | var selectiveSync = require('../lib/selective-sync') 57 | var trackArchive = require('../lib/archive') 58 | var onExit = require('../lib/exit') 59 | var parseArgs = require('../parse-args') 60 | var debug = require('debug')('dat') 61 | 62 | debug('dat sync') 63 | var parsed = parseArgs(opts) 64 | opts.key = parsed.key 65 | opts.dir = parsed.dir || process.cwd() 66 | opts.showKey = opts['show-key'] // using abbr in option makes printed help confusing 67 | 68 | // TODO: if dat-store running, add this dat to the local store and then exit = true 69 | opts.exit = false 70 | 71 | var neat = neatLog(archiveUI, { logspeed: opts.logspeed, quiet: opts.quiet, debug: opts.debug }) 72 | neat.use(trackArchive) 73 | neat.use(onExit) 74 | neat.use(function (state, bus) { 75 | state.opts = opts 76 | selectiveSync(state, opts) 77 | Dat(opts.dir, opts, function (err, dat) { 78 | if (err && err.name === 'IncompatibleError') return bus.emit('exit:warn', 'Directory contains incompatible dat metadata. Please remove the .dat folder in this directory.') 79 | if (err) return bus.emit('exit:error', err) 80 | 81 | state.dat = dat 82 | bus.emit('dat') 83 | bus.emit('render') 84 | }) 85 | }) 86 | } 87 | -------------------------------------------------------------------------------- /src/commands/unpublish.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | name: 'unpublish', 3 | command: unpublish, 4 | options: [ 5 | { 6 | name: 'server', 7 | help: 'Unpublish dat from this Registry.' 8 | }, 9 | { 10 | name: 'confirm', 11 | default: false, 12 | boolean: true, 13 | abbr: 'y', 14 | help: 'Confirm you want to unpublish' 15 | } 16 | ] 17 | } 18 | 19 | function unpublish (opts) { 20 | var prompt = require('prompt') 21 | var path = require('path') 22 | var Dat = require('dat-node') 23 | var output = require('neat-log/output') 24 | var chalk = require('chalk') 25 | var DatJson = require('dat-json') 26 | var Registry = require('../registry') 27 | 28 | if (opts._[0]) opts.server = opts._[0] 29 | if (!opts.dir) opts.dir = process.cwd() // run in dir for `dat unpublish` 30 | 31 | var client = Registry(opts) 32 | var whoami = client.whoami() 33 | if (!whoami || !whoami.token) { 34 | var loginErr = output(` 35 | Welcome to ${chalk.green(`dat`)} program! 36 | 37 | ${chalk.bold('You must login before unpublishing.')} 38 | ${chalk.green('dat login')} 39 | `) 40 | return exitErr(loginErr) 41 | } 42 | 43 | opts.createIfMissing = false // unpublish dont try to create new one 44 | Dat(opts.dir, opts, function (err, dat) { 45 | if (err) return exitErr(err) 46 | // TODO better error msg for non-existing archive 47 | if (!dat.writable) return exitErr('Sorry, you can only publish a dat that you created.') 48 | 49 | var datjson = DatJson(dat.archive, { file: path.join(dat.path, 'dat.json') }) 50 | datjson.read(function (err, data) { 51 | if (err) return exitErr(err) 52 | if (!data.name) return exitErr('Try `dat unpublish ` with this dat, we are having trouble reading it.') 53 | confirm(data.name) 54 | }) 55 | }) 56 | 57 | function confirm (name) { 58 | console.log(`Unpublishing '${chalk.bold(name)}' from ${chalk.green(whoami.server)}.`) 59 | prompt.message = '' 60 | prompt.colors = false 61 | prompt.start() 62 | prompt.get([{ 63 | name: 'sure', 64 | description: 'Are you sure? This cannot be undone. [y/n]', 65 | pattern: /^[a-zA-Z\s-]+$/, 66 | message: '', 67 | required: true 68 | }], function (err, results) { 69 | if (err) return console.log(err.message) 70 | if (results.sure === 'yes' || results.sure === 'y') makeRequest(name) 71 | else exitErr('Cancelled.') 72 | }) 73 | } 74 | 75 | function makeRequest (name) { 76 | client.dats.delete({ name: name }, function (err, resp, body) { 77 | if (err && err.message) exitErr(err.message) 78 | else if (err) exitErr(err.toString()) 79 | if (body.statusCode === 400) return exitErr(new Error(body.message)) 80 | console.log(`Removed your dat from ${whoami.server}`) 81 | process.exit(0) 82 | }) 83 | } 84 | } 85 | 86 | function exitErr (err) { 87 | console.error(err) 88 | process.exit(1) 89 | } 90 | -------------------------------------------------------------------------------- /src/extensions.js: -------------------------------------------------------------------------------- 1 | var debug = require('debug')('dat') 2 | var os = require('os') 3 | 4 | module.exports = runExtension 5 | 6 | function runExtension (opts) { 7 | debug('Trying Extenion', opts._[0]) 8 | 9 | var extName = opts._.shift() 10 | trySpawn(function () { 11 | console.error('We could not run the extension. Please make sure it is installed:') 12 | console.error(`npm install -g dat-${extName}`) 13 | process.exit(1) 14 | }) 15 | 16 | function trySpawn (cb) { 17 | var spawn = require('child_process').spawn 18 | var name = 'dat-' + extName 19 | if (os.platform() === 'win32') { 20 | name += '.cmd' 21 | } 22 | var child = spawn(name, process.argv.splice(3)) 23 | child.stdout.pipe(process.stdout) 24 | child.stderr.pipe(process.stderr) 25 | child.on('error', function (err) { 26 | if (err.code === 'ENOENT') return cb() 27 | throw err 28 | }) 29 | child.on('close', function (code) { 30 | process.exit(code) 31 | }) 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /src/lib/archive.js: -------------------------------------------------------------------------------- 1 | var debug = require('debug')('dat') 2 | var path = require('path') 3 | var EventEmitter = require('events').EventEmitter 4 | var doImport = require('./import-progress') 5 | var stats = require('./stats') 6 | var network = require('./network') 7 | var download = require('./download') 8 | var serve = require('./serve-http') 9 | 10 | module.exports = function (state, bus) { 11 | state.warnings = state.warnings || [] 12 | bus.once('dat', function () { 13 | state.writable = state.dat.writable 14 | state.joinNetwork = !(state.joinNetwork === false) 15 | 16 | stats(state, bus) 17 | if (state.joinNetwork) network(state, bus) 18 | if (state.opts.http) serve(state, bus) 19 | 20 | if (state.writable && state.opts.import) doImport(state, bus) 21 | else if (state.opts.sparse) selectiveSync(state, bus) 22 | else download(state, bus) 23 | 24 | if (state.dat.archive.content) return bus.emit('archive:content') 25 | state.dat.archive.once('content', function () { 26 | bus.emit('archive:content') 27 | }) 28 | }) 29 | 30 | bus.once('archive:content', function () { 31 | state.hasContent = true 32 | }) 33 | } 34 | 35 | function selectiveSync (state, bus) { 36 | var archive = state.dat.archive 37 | debug('sparse mode. downloading metadata') 38 | var emitter = new EventEmitter() 39 | 40 | function download (entry) { 41 | debug('selected', entry) 42 | archive.stat(entry, function (err, stat) { 43 | if (err) return state.warnings.push(err.message) 44 | if (stat.isDirectory()) downloadDir(entry, stat) 45 | if (stat.isFile()) downloadFile(entry, stat) 46 | }) 47 | } 48 | 49 | function downloadDir (dirname, stat) { 50 | debug('downloading dir', dirname) 51 | archive.readdir(dirname, function (err, entries) { 52 | if (err) return bus.emit('exit:error', err) 53 | entries.forEach(function (entry) { 54 | emitter.emit('download', path.join(dirname, entry)) 55 | }) 56 | }) 57 | } 58 | 59 | function downloadFile (entry, stat) { 60 | var start = stat.offset 61 | var end = stat.offset + stat.blocks 62 | state.selectedByteLength += stat.size 63 | bus.emit('render') 64 | if (start === 0 && end === 0) return 65 | debug('downloading', entry, start, end) 66 | archive.content.download({ start, end }, function () { 67 | debug('success', entry) 68 | }) 69 | } 70 | 71 | emitter.on('download', download) 72 | if (state.opts.selectedFiles) state.opts.selectedFiles.forEach(download) 73 | 74 | if (state.opts.empty) { 75 | archive.metadata.update(function () { 76 | return bus.emit('exit:warn', `Dat successfully created in empty mode. Download files using pull or sync.`) 77 | }) 78 | } 79 | 80 | archive.on('update', function () { 81 | debug('archive update') 82 | bus.emit('render') 83 | }) 84 | } 85 | -------------------------------------------------------------------------------- /src/lib/discovery-exit.js: -------------------------------------------------------------------------------- 1 | var output = require('neat-log/output') 2 | 3 | module.exports = discoveryExit 4 | 5 | function discoveryExit (state, bus) { 6 | bus.once('network:callback', checkExit) 7 | 8 | function checkExit () { 9 | if (state.dat.network.connections || !state.opts.exit) return 10 | if (state.dat.network.connecting) return setTimeout(checkExit, 500) // wait to see if any connections resolve 11 | var msg = output(` 12 | Dat could not find any connections for that link. 13 | There may not be any sources online. 14 | 15 | Ensure that everyone is using the latest version, using dat -v 16 | `) 17 | bus.emit('exit:warn', msg) 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /src/lib/download.js: -------------------------------------------------------------------------------- 1 | var debug = require('debug')('dat') 2 | var xtend = Object.assign 3 | 4 | module.exports = trackDownload 5 | 6 | function trackDownload (state, bus) { 7 | if (state.hasContent) return track() 8 | bus.once('archive:content', track) 9 | 10 | function track () { 11 | var archive = state.dat.archive 12 | 13 | state.download = xtend({ 14 | modified: false, 15 | nsync: false 16 | }, {}) 17 | 18 | archive.content.on('clear', function () { 19 | debug('archive clear') 20 | state.download.modified = true 21 | }) 22 | 23 | archive.content.on('download', function (index, data) { 24 | state.download.modified = true 25 | }) 26 | 27 | archive.on('syncing', function () { 28 | debug('archive syncing') 29 | state.download.nsync = false 30 | }) 31 | 32 | archive.on('sync', function () { 33 | debug('archive sync', state.stats.get()) 34 | state.download.nsync = true 35 | // if we are supposed to exit, do so if we've pulled changes or have given the network the desired wait time 36 | if (state.opts.exit) { 37 | if (state.download.modified) { 38 | return exit() 39 | } else { 40 | var delayInMilliseconds = 1000 * state.opts.exit 41 | setTimeout(exit, delayInMilliseconds) 42 | } 43 | } 44 | if (state.dat.archive.version === 0) { 45 | // TODO: deal with this. 46 | // Sync sometimes fires early when it should wait for update. 47 | } 48 | bus.emit('render') 49 | }) 50 | 51 | archive.on('update', function () { 52 | debug('archive update') 53 | bus.emit('render') 54 | }) 55 | 56 | function exit () { 57 | if (state.stats.get().version !== archive.version) { 58 | return state.stats.on('update', exit) 59 | } 60 | state.exiting = true 61 | bus.render() 62 | process.exit(0) 63 | } 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /src/lib/exit.js: -------------------------------------------------------------------------------- 1 | 2 | module.exports = onExit 3 | 4 | function onExit (state, bus) { 5 | bus.on('exit:error', onError) 6 | bus.on('exit:warn', function (err) { 7 | onError(err, true) 8 | }) 9 | bus.on('exit', function () { 10 | state.exiting = true 11 | bus.render() 12 | process.exit() 13 | }) 14 | 15 | function onError (err, clear) { 16 | if (clear) bus.clear() 17 | console.error(err) 18 | process.exit(1) 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /src/lib/import-progress.js: -------------------------------------------------------------------------------- 1 | var xtend = Object.assign 2 | 3 | module.exports = trackImport 4 | 5 | function trackImport (state, bus) { 6 | if (state.dat) return track() 7 | bus.once('dat', track) 8 | 9 | function track () { 10 | var progress = state.dat.importFiles(state.opts, function (err) { 11 | if (err) return bus.emit('exit:error', err) 12 | state.importer.fileImport = null 13 | state.exiting = true 14 | bus.emit('render') 15 | }) 16 | state.importer = xtend({ 17 | importedBytes: 0, 18 | count: progress.count, 19 | liveImports: [], 20 | indexSpeed: progress.indexSpeed 21 | }, progress) 22 | bus.emit('dat:importer') 23 | 24 | var counting = setInterval(function () { 25 | // Update file count in progress counting (for big dirs) 26 | bus.emit('render') 27 | }, state.opts.logspeed) 28 | 29 | progress.on('count', function (count) { 30 | clearInterval(counting) 31 | state.count = count 32 | state.count.done = true 33 | bus.emit('render') 34 | }) 35 | 36 | progress.on('del', function (src, dst) { 37 | if (src.live) state.importer.liveImports.push({ src: src, dst: dst, type: 'del' }) 38 | }) 39 | 40 | progress.on('put', function (src, dst) { 41 | if (src.live) state.importer.liveImports.push({ src: src, dst: dst, type: 'put' }) 42 | if (src.stat.isDirectory()) return 43 | state.importer.fileImport = { 44 | src: src, 45 | dst: dst, 46 | progress: 0, 47 | type: 'put' 48 | } 49 | bus.emit('render') 50 | }) 51 | 52 | progress.on('put-data', function (chunk, src, dst) { 53 | state.importer.fileImport.progress += chunk.length 54 | if (!src.live) state.importer.importedBytes += chunk.length // don't include live in total 55 | state.importer.indexSpeed = progress.indexSpeed 56 | bus.emit('render') 57 | }) 58 | } 59 | } 60 | -------------------------------------------------------------------------------- /src/lib/network.js: -------------------------------------------------------------------------------- 1 | var bytes = require('bytes').parse 2 | var speed = require('speedometer') 3 | var throttle = require('throttle') 4 | var pump = require('pump') 5 | var debug = require('debug')('dat') 6 | var xtend = Object.assign 7 | 8 | module.exports = trackNetwork 9 | 10 | function trackNetwork (state, bus) { 11 | if (state.dat) return track() 12 | bus.once('dat', track) 13 | 14 | function track () { 15 | var opts = state.opts 16 | if (state.opts.up || state.opts.down) { 17 | opts = xtend({}, opts, { 18 | connect: function (local, remote) { 19 | var streams = [local, remote, local] 20 | if (state.opts.up) streams.splice(1, 0, throttle(bytes(state.opts.up))) 21 | if (state.opts.down) streams.splice(-1, 0, throttle(bytes(state.opts.down))) 22 | pump(streams) 23 | } 24 | }) 25 | } 26 | var network = state.dat.joinNetwork(opts, function () { 27 | bus.emit('network:callback') 28 | }) 29 | 30 | network.on('error', function (err) { 31 | if (err.code === 'EADDRINUSE') { 32 | if (opts.port) { 33 | bus.emit('exit:warn', `Specified port (${opts.port}) in use. Please use another port.`) 34 | } else { 35 | debug(err.message + ' trying random port') 36 | } 37 | } else { 38 | debug('network error:', err.message) 39 | // TODO return bus.emit('exit:error', err) 40 | } 41 | }) 42 | state.network = xtend(network, state.network) 43 | bus.emit('dat:network') 44 | 45 | network.on('connection', function (conn, info) { 46 | bus.emit('render') 47 | conn.on('close', function () { 48 | bus.emit('render') 49 | }) 50 | }) 51 | 52 | if (state.opts.sources) trackSources() 53 | if (state.stats) return trackSpeed() 54 | bus.once('dat:stats', trackSpeed) 55 | 56 | function trackSpeed () { 57 | setInterval(function () { 58 | bus.emit('render') 59 | }, state.opts.logspeed) 60 | } 61 | 62 | function trackSources () { 63 | state.sources = state.sources || {} 64 | network.on('connection', function (conn, info) { 65 | var id = info.id.toString('hex') 66 | var peerSpeed = speed() 67 | 68 | state.sources[id] = info 69 | state.sources[id].speed = peerSpeed() 70 | state.sources[id].getProgress = function () { 71 | 72 | // TODO: how to get right peer from archive.content? 73 | // var remote = conn.feeds[1].remoteLength 74 | // // state.dat.archive.content.sources[0].feed.id.toString('hex') 75 | // if (!remote) return 76 | // return remote / dat.archive.content.length 77 | } 78 | 79 | conn.feeds.map(function (feed) { 80 | feed.stream.on('data', function (data) { 81 | state.sources[id].speed = peerSpeed(data.length) 82 | bus.emit('render') 83 | }) 84 | feed.stream.on('error', function (err) { 85 | state.sources[id].error = err 86 | }) 87 | }) 88 | bus.emit('render') 89 | 90 | conn.on('close', function () { 91 | state.sources[id].speed = 0 92 | state.sources[id].closed = true 93 | bus.emit('render') 94 | }) 95 | }) 96 | } 97 | } 98 | } 99 | -------------------------------------------------------------------------------- /src/lib/selective-sync.js: -------------------------------------------------------------------------------- 1 | var fs = require('fs') 2 | var path = require('path') 3 | 4 | module.exports = function (state, opts) { 5 | // selective sync stuff 6 | var parsing = opts.selectFromFile !== '.datdownload' ? opts.selectFromFile : path.join(opts.dir, '.datdownload') 7 | opts.selectedFiles = parseFiles(parsing) 8 | if (opts.select && typeof opts.select === 'string') opts.selectedFiles = opts.select.split(',') 9 | if (opts.selectedFiles) { 10 | state.title = 'Syncing' 11 | state.selectedByteLength = 0 12 | opts.sparse = true 13 | } 14 | return state 15 | } 16 | 17 | function parseFiles (input) { 18 | var parsed = null 19 | 20 | try { 21 | if (fs.statSync(input).isFile()) { 22 | parsed = fs.readFileSync(input).toString().trim().split(/\r?\n/) 23 | } 24 | } catch (err) { 25 | if (err && !err.name === 'ENOENT') { 26 | console.error(err) 27 | process.exit(1) 28 | } 29 | } 30 | 31 | return parsed 32 | } 33 | -------------------------------------------------------------------------------- /src/lib/serve-http.js: -------------------------------------------------------------------------------- 1 | 2 | module.exports = runHttp 3 | 4 | function runHttp (state, bus) { 5 | if (state.dat) return serve() 6 | bus.once('dat', serve) 7 | 8 | function serve () { 9 | var port = (typeof state.opts.http === 'boolean') ? 8080 : state.opts.http 10 | var server = state.dat.serveHttp({ port: port }) 11 | 12 | server.on('listening', function () { 13 | state.http = { port: port, listening: true } 14 | bus.emit('render') 15 | }) 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /src/lib/stats.js: -------------------------------------------------------------------------------- 1 | var xtend = Object.assign 2 | 3 | module.exports = trackStats 4 | 5 | function trackStats (state, bus) { 6 | if (state.dat) return track() 7 | bus.once('dat', track) 8 | 9 | function track () { 10 | var stats = state.dat.trackStats(state.opts) 11 | state.stats = xtend(stats, state.stats) 12 | stats.on('update', function () { 13 | bus.emit('stats:update') 14 | bus.emit('render') 15 | }) 16 | bus.emit('stats') 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /src/parse-args.js: -------------------------------------------------------------------------------- 1 | var fs = require('fs') 2 | var path = require('path') 3 | var encoding = require('dat-encoding') 4 | 5 | module.exports = function (opts) { 6 | // dat [] arg1 arg2 [options] 7 | // parse args without options from opts._ 8 | // return parsed { dir, key } 9 | var parsed = { 10 | key: opts.key || null, 11 | dir: opts.dir || null // process.cwd() ? 12 | } 13 | 14 | // dat [] 15 | if (!opts._.length) return parsed 16 | 17 | // dat [] arg1 arg2 18 | // arg1 = key 19 | // arg2 = dir 20 | if (opts._.length === 2) { 21 | parsed.key = opts._[0] 22 | parsed.dir = opts._[1] 23 | return parsed 24 | } 25 | 26 | // dat [] arg 27 | // arg = dir or key 28 | 29 | // First, check if key 30 | try { 31 | parsed.key = encoding.toStr(opts._[0]) 32 | return parsed 33 | } catch (err) { 34 | if (err && err.message !== 'Invalid key') { 35 | // catch non-key errors 36 | console.error(err) 37 | process.exit(1) 38 | } 39 | } 40 | 41 | try { 42 | var stat = fs.statSync(opts._[0]) 43 | if (stat.isFile()) { 44 | parsed.dir = path.resolve(path.dirname(opts._[0])) 45 | } else { 46 | parsed.dir = opts._[0] 47 | } 48 | } catch (err) { 49 | if (err && !err.name === 'ENOENT') { 50 | console.error(err) 51 | process.exit(1) 52 | } 53 | } 54 | 55 | return parsed 56 | } 57 | -------------------------------------------------------------------------------- /src/registry.js: -------------------------------------------------------------------------------- 1 | var xtend = Object.assign 2 | var RegistryClient = require('dat-registry') 3 | 4 | module.exports = function (opts) { 5 | var townshipOpts = { 6 | server: opts.server, 7 | config: { 8 | filepath: opts.config // defaults to ~/.datrc via dat-registry 9 | } 10 | } 11 | var defaults = { 12 | // xtend doesn't overwrite when key is present but undefined 13 | // If we want a default, make sure it's not going to passed as undefined 14 | } 15 | var options = xtend(defaults, townshipOpts) 16 | return RegistryClient(options) 17 | } 18 | -------------------------------------------------------------------------------- /src/ui/archive.js: -------------------------------------------------------------------------------- 1 | var path = require('path') 2 | var output = require('neat-log/output') 3 | var pretty = require('prettier-bytes') 4 | var chalk = require('chalk') 5 | var downloadUI = require('./components/download') 6 | var importUI = require('./components/import-progress') 7 | var warningsUI = require('./components/warnings') 8 | var networkUI = require('./components/network') 9 | var sourcesUI = require('./components/sources') 10 | var keyEl = require('./elements/key') 11 | var pluralize = require('./elements/pluralize') 12 | var version = require('./elements/version') 13 | var pkg = require('../../package.json') 14 | 15 | module.exports = archiveUI 16 | 17 | function archiveUI (state) { 18 | if (!state.dat) return 'Starting Dat program...' 19 | if (!state.writable && !state.hasContent) return 'Connecting to dat network...' 20 | if (!state.warnings) state.warnings = [] 21 | 22 | var dat = state.dat 23 | var stats = dat.stats.get() 24 | var title = (state.dat.resumed) ? '' : `Created new dat in ${dat.path}${path.sep}.dat\n` 25 | var progressView 26 | 27 | if (state.writable || state.opts.showKey) { 28 | title += `${keyEl(dat.key)}\n` 29 | } 30 | if (state.title) title += state.title 31 | else if (state.writable) title += 'Sharing dat' 32 | else title += 'Downloading dat' 33 | if (state.opts.sparse) title += `: ${state.opts.selectedFiles.length} ${pluralize('file', state.opts.selectedFiles.length)} (${pretty(state.selectedByteLength)})` 34 | else if (stats.version > 0) title += `: ${stats.files} ${pluralize('file', stats.file)} (${pretty(stats.byteLength)})` 35 | else if (stats.version === 0) title += ': (empty archive)' 36 | if (state.http && state.http.listening) title += `\nServing files over http at http://localhost:${state.http.port}` 37 | 38 | if (!state.writable) { 39 | progressView = downloadUI(state) 40 | } else { 41 | if (state.opts.import) { 42 | progressView = importUI(state) 43 | } else { 44 | progressView = 'Not importing files.' // TODO: ? 45 | } 46 | } 47 | 48 | return output(` 49 | ${version(pkg.version)} 50 | ${title} 51 | ${state.joinNetwork ? '\n' + networkUI(state) : ''} 52 | 53 | ${progressView} 54 | ${state.opts.sources ? sourcesUI(state) : ''} 55 | ${state.warnings ? warningsUI(state) : ''} 56 | ${state.exiting ? 'Exiting the Dat program...' : chalk.dim('Ctrl+C to Exit')} 57 | `) 58 | } 59 | -------------------------------------------------------------------------------- /src/ui/components/download.js: -------------------------------------------------------------------------------- 1 | var output = require('neat-log/output') 2 | var bar = require('progress-string') 3 | 4 | module.exports = networkUI 5 | 6 | function networkUI (state) { 7 | var stats = state.stats.get() 8 | var download = state.download 9 | if (!stats || !download) return '' 10 | 11 | var title = 'Downloading updates...' 12 | var downBar = makeBar() 13 | 14 | if (download.nsync) { 15 | if (state.opts.exit && state.dat.archive.version === 0) { 16 | return 'dat synced. There is no content in this archive.' 17 | } 18 | if (state.opts.exit && download.modified) { 19 | return `dat sync complete.\nVersion ${stats.version}` 20 | } 21 | 22 | if (!download.modified && state.opts.exit) { 23 | title = `dat already in sync, waiting for updates.` 24 | } else { 25 | title = `dat synced, waiting for updates.` 26 | } 27 | } 28 | 29 | if (typeof state.opts.exit === 'number') { 30 | title = `dat synced, exiting in ${state.opts.exit} seconds.` 31 | } 32 | 33 | if (!stats.downloaded || !stats.length) { 34 | return '' // no metadata yet 35 | } 36 | 37 | return output(` 38 | ${title} 39 | ${downBar(stats.downloaded)} 40 | `) 41 | 42 | function makeBar () { 43 | var total = stats.length 44 | return bar({ 45 | total: total, 46 | style: function (a, b) { 47 | return `[${a}${b}] ${(100 * stats.downloaded / total).toFixed(2)}%` 48 | } 49 | }) 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /src/ui/components/import-progress.js: -------------------------------------------------------------------------------- 1 | var output = require('neat-log/output') 2 | var pretty = require('prettier-bytes') 3 | var bar = require('progress-string') 4 | var cliTruncate = require('cli-truncate') 5 | 6 | module.exports = importUI 7 | 8 | function importUI (state) { 9 | var watch = state.opts.watch 10 | var importState = state.importer 11 | var indexSpeed = importState.indexSpeed ? `(${pretty(importState.indexSpeed)}/s)` : '' 12 | 13 | if (importState.count && !importState.count.done) { 14 | // dry run in progress 15 | if (!importState.count.files) return 'Checking for file updates...' 16 | return output(` 17 | Metadata created for ${importState.putDone.files} of ${importState.count.files} files ${indexSpeed} 18 | (Calculating file count...) 19 | ${fileImport(importState.fileImport)} 20 | `) 21 | } else if (importState.putDone.files >= importState.count.files) { 22 | // Initial import done 23 | if (!watch) return 'Archive metadata updated for all files.' 24 | return liveImport() 25 | } 26 | 27 | var total = importState.count.bytes 28 | var totalBar = bar({ 29 | total: total, 30 | style: function (a, b) { 31 | return `[${a}${b}] ${(100 * importState.importedBytes / total).toFixed(0)}%` 32 | } 33 | }) 34 | 35 | return output(` 36 | Creating metadata for ${importState.count.files} files ${indexSpeed} 37 | ${totalBar(importState.importedBytes)} 38 | ${fileImport(importState.fileImport)} 39 | `) 40 | 41 | function liveImport () { 42 | // Live import 43 | var imports = importState.liveImports.slice(1).slice(-7) 44 | return output(` 45 | Watching for file updates 46 | ${imports.reverse().map(function (file) { return fileImport(file) }).join('\n')} 47 | `) 48 | } 49 | 50 | function fileImport (file) { 51 | if (!file) return '' 52 | if (file.type === 'del') return `DEL: ${file.src.name}` 53 | 54 | var total = file.src.stat.size 55 | var name = file.dst.name.substr(1) // remove '/' at start 56 | var size 57 | 58 | // >500 mb show progress 59 | if (total < 5e8 || !file.progress) size = `(${pretty(total)})` 60 | else size = `(${pretty(file.progress)} / ${pretty(total)})` 61 | return output(` 62 | ADD: ${cliTruncate(name, process.stdout.columns - 7 - size.length, { position: 'start' })} ${size} 63 | `) 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /src/ui/components/network.js: -------------------------------------------------------------------------------- 1 | var output = require('neat-log/output') 2 | var pretty = require('prettier-bytes') 3 | var pluralize = require('../elements/pluralize') 4 | 5 | module.exports = networkUI 6 | 7 | function networkUI (state) { 8 | var network = state.network 9 | var stats = state.stats 10 | 11 | if (!network) return '' 12 | var peers = stats.peers.total || 0 13 | // var complete = stats.peers.complete 14 | return output(` 15 | ${peers} ${pluralize('connection', peers)} ${speedUI()} 16 | `) 17 | 18 | function speedUI () { 19 | var output = '| ' 20 | var speed = state.stats.network 21 | var upSpeed = speed.uploadSpeed || 0 22 | var downSpeed = speed.downloadSpeed || 0 23 | output += `Download ${pretty(downSpeed)}/s` 24 | output += ` Upload ${pretty(upSpeed)}/s ` 25 | return output 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /src/ui/components/sources.js: -------------------------------------------------------------------------------- 1 | var output = require('neat-log/output') 2 | var pretty = require('prettier-bytes') 3 | var makeBar = require('progress-string') 4 | 5 | module.exports = peersUI 6 | 7 | function peersUI (state) { 8 | if (!state.network) return '' 9 | if (Object.keys(state.sources).length === 0) return '' 10 | 11 | var peers = state.sources 12 | // var stats = state.stats 13 | // var peerCount = stats.peers.total || 0 14 | // var complete = stats.peers.complete 15 | var info = Object.keys(peers).map(function (id, i) { 16 | return peerUI(peers[id], i) 17 | }).join('\n') 18 | 19 | return `\n${info}\n` 20 | 21 | function peerUI (peer, i) { 22 | var progress = peer.getProgress() 23 | var bar = makeBar({ 24 | total: 100, 25 | style: function (a, b) { 26 | return `[${a}${b}] ${(progress).toFixed(2)}%` 27 | } 28 | }) 29 | var theBar = progress ? bar(progress) : '' // progress bar todo 30 | return output(` 31 | [${i}] ${peer.closed ? 'CLOSED' : peer.type}: ${peer.host}:${peer.port} ${pretty(peer.speed)}/s 32 | ${peer.error ? peer.error : theBar} 33 | `) 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /src/ui/components/warnings.js: -------------------------------------------------------------------------------- 1 | var chalk = require('chalk') 2 | 3 | module.exports = function (state) { 4 | var warning = '' 5 | state.warnings.forEach(function (message) { 6 | warning += `${chalk.yellow(`Warning: ${message}`)}\n` 7 | }) 8 | return warning 9 | } 10 | -------------------------------------------------------------------------------- /src/ui/create.js: -------------------------------------------------------------------------------- 1 | var output = require('neat-log/output') 2 | var pretty = require('prettier-bytes') 3 | var chalk = require('chalk') 4 | var importUI = require('./components/import-progress') 5 | var keyEl = require('./elements/key') 6 | var pluralize = require('./elements/pluralize') 7 | 8 | module.exports = createUI 9 | 10 | function createUI (state) { 11 | if (!state.dat) { 12 | return output(` 13 | Creating a Dat! Add information to your dat.json file: 14 | `) 15 | } 16 | 17 | var dat = state.dat 18 | var stats = dat.stats.get() 19 | var title = '\n' 20 | var progressView 21 | var exitMsg = ` 22 | Your dat is created! Run ${chalk.green('dat sync')} to share: 23 | ${keyEl(dat.key)} 24 | ` 25 | if (!state.opts.import) { 26 | // set exiting right away 27 | state.exiting = true 28 | } 29 | 30 | if (!state.exiting) { 31 | // Only show key if not about to exit 32 | title = `${keyEl(dat.key)}\n` 33 | } 34 | if (state.title) title += state.title 35 | 36 | if (stats.version > 0) title += `: ${stats.files} ${pluralize('file', stats.files)} (${pretty(stats.byteLength)})` 37 | else if (stats.version === 0) title += ': (empty archive)' 38 | 39 | if (state.opts.import) { 40 | progressView = importUI(state) + '\n' 41 | } else { 42 | progressView = 'Not importing files.' 43 | } 44 | 45 | return output(` 46 | ${title} 47 | 48 | ${progressView} 49 | ${state.exiting ? exitMsg : chalk.dim('Ctrl+C to Exit')} 50 | `) 51 | } 52 | -------------------------------------------------------------------------------- /src/ui/elements/key.js: -------------------------------------------------------------------------------- 1 | var stringKey = require('dat-encoding').toStr 2 | var chalk = require('chalk') 3 | 4 | module.exports = function (key) { 5 | return `${chalk.blue(`dat://${stringKey(key)}`)}` 6 | } 7 | -------------------------------------------------------------------------------- /src/ui/elements/pluralize.js: -------------------------------------------------------------------------------- 1 | module.exports = function pluralize (str, val) { 2 | return `${str}${val === 1 ? '' : 's'}` 3 | } 4 | -------------------------------------------------------------------------------- /src/ui/elements/version.js: -------------------------------------------------------------------------------- 1 | var chalk = require('chalk') 2 | 3 | module.exports = function (version) { 4 | return `${chalk.green(`dat v${version}`)}` 5 | } 6 | -------------------------------------------------------------------------------- /src/ui/status.js: -------------------------------------------------------------------------------- 1 | var output = require('neat-log/output') 2 | var stringKey = require('dat-encoding').toStr 3 | var pretty = require('prettier-bytes') 4 | var chalk = require('chalk') 5 | 6 | module.exports = statusUI 7 | 8 | function statusUI (state) { 9 | if (!state.dat) return 'Starting Dat program...' 10 | 11 | var dat = state.dat 12 | var stats = dat.stats.get() 13 | 14 | return output(` 15 | ${chalk.blue('dat://' + stringKey(dat.key))} 16 | ${stats.files} files (${pretty(stats.byteLength)}) 17 | Version: ${chalk.bold(stats.version)} 18 | `) 19 | } 20 | -------------------------------------------------------------------------------- /src/usage.js: -------------------------------------------------------------------------------- 1 | module.exports = function (opts, help, usage) { 2 | if (opts.version) { 3 | var pkg = require('../package.json') 4 | console.error(pkg.version) 5 | process.exit(1) 6 | } 7 | var msg = ` 8 | 9 | dat [] clone or sync link to 10 | dat create and sync dat in directory 11 | 12 | Other commands: 13 | dat create create empty dat and dat.json 14 | dat sync live sync files with the network 15 | dat clone [] download a dat via link to 16 | dat pull update dat & exit 17 | dat log log history for a dat 18 | dat status get key & info about a local dat 19 | dat keys [import/export] import and export private keys 20 | 21 | Troubleshooting & Help: 22 | dat help print this usage guide 23 | dat --help, -h print help for a specific command 24 | dat --version, -v print the dat version 25 | 26 | ` 27 | console.error(msg) 28 | if (usage) { 29 | console.error('General Options:') 30 | console.error(usage) 31 | } 32 | console.error('Have fun using Dat! Learn more at docs.datproject.org') 33 | process.exit(0) 34 | } 35 | -------------------------------------------------------------------------------- /test/auth.js: -------------------------------------------------------------------------------- 1 | var test = require('tape') 2 | var path = require('path') 3 | var fs = require('fs') 4 | var rimraf = require('rimraf') 5 | var mkdirp = require('mkdirp') 6 | var spawn = require('./helpers/spawn') 7 | var help = require('./helpers') 8 | var authServer = require('./helpers/auth-server') 9 | 10 | var dat = path.resolve(path.join(__dirname, '..', 'bin', 'cli.js')) 11 | var baseTestDir = help.testFolder() 12 | var fixtures = path.join(__dirname, 'fixtures') 13 | 14 | var port = process.env.PORT || 3000 15 | var SERVER = 'http://localhost:' + port 16 | var config = path.join(__dirname, '.datrc-test') 17 | var opts = ' --server=' + SERVER + ' --config=' + config 18 | 19 | dat += opts 20 | rimraf.sync(config) 21 | 22 | authServer(port, function (err, server, closeServer) { 23 | if (err) throw err 24 | if (!server) return 25 | test('auth - whoami works when not logged in', function (t) { 26 | var cmd = dat + ' whoami ' 27 | var st = spawn(t, cmd, { cwd: baseTestDir }) 28 | st.stderr.match(function (output) { 29 | t.same(output.trim(), 'Not logged in.', 'printed correct output') 30 | return true 31 | }) 32 | st.stdout.empty() 33 | st.end() 34 | }) 35 | 36 | test('auth - register works', function (t) { 37 | var cmd = dat + ' register --email=hello@bob.com --password=joe --username=joe' 38 | var st = spawn(t, cmd, { cwd: baseTestDir }) 39 | st.stdout.match(function (output) { 40 | t.same(output.trim(), 'Registered successfully.', 'output success message') 41 | return true 42 | }) 43 | st.stderr.empty() 44 | st.end() 45 | }) 46 | 47 | test('auth - login works', function (t) { 48 | var cmd = dat + ' login --email=hello@bob.com --password=joe' 49 | var st = spawn(t, cmd, { cwd: baseTestDir }) 50 | st.stdout.match(function (output) { 51 | t.same(output.trim(), 'Logged in successfully.', 'output success message') 52 | return true 53 | }) 54 | st.stderr.empty() 55 | st.end() 56 | }) 57 | 58 | test('auth - whoami works', function (t) { 59 | var cmd = dat + ' whoami' 60 | var st = spawn(t, cmd, { cwd: baseTestDir }) 61 | st.stdout.match(function (output) { 62 | t.same('hello@bob.com', output.trim(), 'email printed') 63 | return true 64 | }) 65 | st.stderr.empty() 66 | st.end() 67 | }) 68 | 69 | test('auth - publish before create fails', function (t) { 70 | var cmd = dat + ' publish' 71 | rimraf.sync(path.join(fixtures, '.dat')) 72 | var st = spawn(t, cmd, { cwd: fixtures }) 73 | st.stdout.empty() 74 | st.stderr.match(function (output) { 75 | t.ok(output.indexOf('existing') > -1, 'Create archive before pub') 76 | return true 77 | }) 78 | st.end() 79 | }) 80 | 81 | test('auth - create dat to publish', function (t) { 82 | rimraf.sync(path.join(fixtures, '.dat')) 83 | rimraf.sync(path.join(fixtures, 'dat.json')) 84 | var cmd = dat + ' create --no-import' 85 | var st = spawn(t, cmd, { cwd: fixtures }) 86 | st.stdout.match(function (output) { 87 | var link = help.matchLink(output) 88 | if (!link) return false 89 | t.ok(link, 'prints link') 90 | return true 91 | }) 92 | st.stderr.empty() 93 | st.end() 94 | }) 95 | 96 | test('auth - publish our awesome dat', function (t) { 97 | var cmd = dat + ' publish --name awesome' 98 | var st = spawn(t, cmd, { cwd: fixtures }) 99 | st.stdout.match(function (output) { 100 | var published = output.indexOf('Successfully published') > -1 101 | if (!published) return false 102 | t.ok(published, 'published') 103 | return true 104 | }) 105 | st.stderr.empty() 106 | st.end() 107 | }) 108 | 109 | test('auth - publish our awesome dat with bad dat.json url', function (t) { 110 | fs.readFile(path.join(fixtures, 'dat.json'), function (err, contents) { 111 | t.ifError(err) 112 | var info = JSON.parse(contents) 113 | var oldUrl = info.url 114 | info.url = info.url.replace('e', 'a') 115 | fs.writeFile(path.join(fixtures, 'dat.json'), JSON.stringify(info), function (err) { 116 | t.ifError(err, 'error after write') 117 | var cmd = dat + ' publish --name awesome' 118 | var st = spawn(t, cmd, { cwd: fixtures }) 119 | st.stdout.match(function (output) { 120 | var published = output.indexOf('Successfully published') > -1 121 | if (!published) return false 122 | t.ok(published, 'published') 123 | t.same(help.datJson(fixtures).url, oldUrl, 'has dat.json with url') 124 | return true 125 | }) 126 | st.stderr.empty() 127 | st.end() 128 | }) 129 | }) 130 | }) 131 | 132 | test('auth - clone from registry', function (t) { 133 | // MAKE SURE THESE MATCH WHAT is published above 134 | // TODO: be less lazy and make a publish helper 135 | var shortName = 'localhost:' + port + '/joe/awesome' // they'll never guess who wrote these tests 136 | var baseDir = path.join(baseTestDir, 'dat_registry_dir') 137 | mkdirp.sync(baseDir) 138 | var downloadDir = path.join(baseDir, shortName.split('/').pop()) 139 | var cmd = dat + ' clone ' + shortName 140 | var st = spawn(t, cmd, { cwd: baseDir }) 141 | st.stdout.match(function (output) { 142 | var lookingFor = output.indexOf('Looking for') > -1 143 | if (!lookingFor) return false 144 | t.ok(lookingFor, 'starts looking for peers') 145 | t.ok(output.indexOf(downloadDir) > -1, 'prints dir') 146 | st.kill() 147 | return true 148 | }) 149 | st.stderr.empty() 150 | st.end(function () { 151 | rimraf.sync(downloadDir) 152 | }) 153 | }) 154 | 155 | test('auth - publish our awesome dat without a dat.json file', function (t) { 156 | rimraf(path.join(fixtures, 'dat.json'), function (err) { 157 | t.ifError(err) 158 | var cmd = dat + ' publish --name another-awesome' 159 | var st = spawn(t, cmd, { cwd: fixtures }) 160 | st.stdout.match(function (output) { 161 | var published = output.indexOf('Successfully published') > -1 162 | if (!published) return false 163 | t.ok(published, 'published') 164 | t.same(help.datJson(fixtures).name, 'another-awesome', 'has dat.json with name') 165 | return true 166 | }) 167 | st.stderr.empty() 168 | st.end(function () { 169 | rimraf.sync(path.join(fixtures, '.dat')) 170 | }) 171 | }) 172 | }) 173 | 174 | test('auth - bad clone from registry', function (t) { 175 | var shortName = 'localhost:' + port + '/joe/not-at-all-awesome' 176 | var baseDir = path.join(baseTestDir, 'dat_registry_dir_too') 177 | mkdirp.sync(baseDir) 178 | var downloadDir = path.join(baseDir, shortName.split('/').pop()) 179 | var cmd = dat + ' clone ' + shortName 180 | var st = spawn(t, cmd, { cwd: baseDir }) 181 | st.stderr.match(function (output) { 182 | t.same(output.trim(), 'Dat with that name not found.', 'not found') 183 | st.kill() 184 | return true 185 | }) 186 | st.stdout.empty() 187 | st.end(function () { 188 | rimraf.sync(downloadDir) 189 | }) 190 | }) 191 | 192 | test('auth - logout works', function (t) { 193 | var cmd = dat + ' logout' 194 | var st = spawn(t, cmd, { cwd: baseTestDir }) 195 | st.stdout.match(function (output) { 196 | t.same('Logged out.', output.trim(), 'output correct') 197 | return true 198 | }) 199 | st.stderr.empty() 200 | st.end() 201 | }) 202 | 203 | test('auth - logout prints correctly when trying to log out twice', function (t) { 204 | var cmd = dat + ' logout' 205 | var st = spawn(t, cmd, { cwd: baseTestDir }) 206 | st.stderr.match(function (output) { 207 | t.same('Not logged in.', output.trim(), 'output correct') 208 | return true 209 | }) 210 | st.stdout.empty() 211 | st.end() 212 | }) 213 | 214 | test('auth - whoami works after logging out', function (t) { 215 | var cmd = dat + ' whoami' 216 | var st = spawn(t, cmd, { cwd: baseTestDir }) 217 | st.stderr.match(function (output) { 218 | t.same('Not logged in.', output.trim()) 219 | return true 220 | }) 221 | st.stdout.empty() 222 | st.end() 223 | }) 224 | 225 | test.onFinish(function () { 226 | closeServer(function () { 227 | fs.unlink(config, function () { 228 | // done! 229 | }) 230 | }) 231 | }) 232 | }) 233 | -------------------------------------------------------------------------------- /test/clone.js: -------------------------------------------------------------------------------- 1 | var fs = require('fs') 2 | var path = require('path') 3 | var test = require('tape') 4 | var tempDir = require('temporary-directory') 5 | var spawn = require('./helpers/spawn.js') 6 | var help = require('./helpers') 7 | 8 | var dat = path.resolve(path.join(__dirname, '..', 'bin', 'cli.js')) 9 | 10 | test('clone - default opts', function (t) { 11 | help.shareFixtures(function (_, shareDat) { 12 | var key = shareDat.key.toString('hex') 13 | tempDir(function (_, dir, cleanup) { 14 | var cmd = dat + ' clone ' + key 15 | var st = spawn(t, cmd, { cwd: dir }) 16 | var datDir = path.join(dir, key) 17 | 18 | st.stdout.match(function (output) { 19 | var downloadFinished = output.indexOf('Exiting') > -1 20 | if (!downloadFinished) return false 21 | 22 | var stats = shareDat.stats.get() 23 | var fileRe = new RegExp(stats.files + ' files') 24 | var bytesRe = new RegExp(/1\.\d KB/) 25 | 26 | t.ok(output.match(fileRe), 'total size: files okay') 27 | t.ok(output.match(bytesRe), 'total size: bytes okay') 28 | t.ok(help.isDir(datDir), 'creates download directory') 29 | 30 | var fileList = help.fileList(datDir).join(' ') 31 | var hasCsvFile = fileList.indexOf('all_hour.csv') > -1 32 | t.ok(hasCsvFile, 'csv file downloaded') 33 | var hasDatFolder = fileList.indexOf('.dat') > -1 34 | t.ok(hasDatFolder, '.dat folder created') 35 | var hasSubDir = fileList.indexOf('folder') > -1 36 | t.ok(hasSubDir, 'folder created') 37 | var hasNestedDir = fileList.indexOf('nested') > -1 38 | t.ok(hasNestedDir, 'nested folder created') 39 | var hasHelloFile = fileList.indexOf('hello.txt') > -1 40 | t.ok(hasHelloFile, 'hello.txt file downloaded') 41 | 42 | st.kill() 43 | return true 44 | }) 45 | st.succeeds('exits after finishing download') 46 | st.stderr.empty() 47 | st.end(function () { 48 | cleanup() 49 | shareDat.close() 50 | }) 51 | }) 52 | }) 53 | }) 54 | 55 | // Right now we aren't forcing this 56 | // test('clone - errors on existing dir', function (t) { 57 | // tempDir(function (_, dir, cleanup) { 58 | // // make empty dat in directory 59 | // Dat(dir, function (err, shareDat) { 60 | // t.error(err, 'no error') 61 | // // Try to clone to same dir 62 | // shareDat.close(function () { 63 | // var cmd = dat + ' clone ' + shareDat.key.toString('hex') + ' ' + dir 64 | // var st = spawn(t, cmd) 65 | // st.stdout.empty() 66 | // st.stderr.match(function (output) { 67 | // t.same(output.trim(), 'Existing archive in this directory. Use pull or sync to update.', 'Existing archive.') 68 | // st.kill() 69 | // return true 70 | // }) 71 | // st.end(cleanup) 72 | // }) 73 | // }) 74 | // }) 75 | // }) 76 | 77 | test('clone - specify dir', function (t) { 78 | help.shareFixtures(function (_, shareDat) { 79 | tempDir(function (_, dir, cleanup) { 80 | var key = shareDat.key.toString('hex') 81 | var customDir = 'my_dir' 82 | var cmd = dat + ' clone ' + key + ' ' + customDir 83 | var st = spawn(t, cmd, { cwd: dir }) 84 | st.stdout.match(function (output) { 85 | var downloadFinished = output.indexOf('Exiting') > -1 86 | if (!downloadFinished) return false 87 | 88 | t.ok(help.isDir(path.join(dir, customDir)), 'creates download directory') 89 | st.kill() 90 | return true 91 | }) 92 | st.succeeds('exits after finishing download') 93 | st.stderr.empty() 94 | st.end(function () { 95 | cleanup() 96 | shareDat.close() 97 | }) 98 | }) 99 | }) 100 | }) 101 | 102 | test('clone - dat:// link', function (t) { 103 | help.shareFixtures(function (_, shareDat) { 104 | tempDir(function (_, dir, cleanup) { 105 | var key = 'dat://' + shareDat.key.toString('hex') + '/' 106 | var cmd = dat + ' clone ' + key + ' ' 107 | var downloadDir = path.join(dir, shareDat.key.toString('hex')) 108 | var st = spawn(t, cmd, { cwd: dir }) 109 | st.stdout.match(function (output) { 110 | var downloadFinished = output.indexOf('Exiting') > -1 111 | if (!downloadFinished) return false 112 | 113 | t.ok(help.isDir(path.join(downloadDir)), 'creates download directory') 114 | st.kill() 115 | return true 116 | }) 117 | st.succeeds('exits after finishing download') 118 | st.stderr.empty() 119 | st.end(function () { 120 | cleanup() 121 | shareDat.close() 122 | }) 123 | }) 124 | }) 125 | }) 126 | 127 | test('clone - datproject.org/key link', function (t) { 128 | help.shareFixtures(function (_, shareDat) { 129 | tempDir(function (_, dir, cleanup) { 130 | var key = 'datproject.org/' + shareDat.key.toString('hex') + '/' 131 | var cmd = dat + ' clone ' + key + ' ' 132 | var downloadDir = path.join(dir, shareDat.key.toString('hex')) 133 | var st = spawn(t, cmd, { cwd: dir }) 134 | st.stdout.match(function (output) { 135 | var downloadFinished = output.indexOf('Exiting') > -1 136 | if (!downloadFinished) return false 137 | 138 | t.ok(help.isDir(path.join(downloadDir)), 'creates download directory') 139 | st.kill() 140 | return true 141 | }) 142 | st.succeeds('exits after finishing download') 143 | st.stderr.empty() 144 | st.end(function () { 145 | cleanup() 146 | shareDat.close() 147 | }) 148 | }) 149 | }) 150 | }) 151 | 152 | // TODO: fix --temp for clones 153 | // test('clone - with --temp', function (t) { 154 | // // cmd: dat clone 155 | // help.shareFixtures(function (_, fixturesDat) { 156 | // shareDat = fixturesDat 157 | // var key = shareDat.key.toString('hex') 158 | // var cmd = dat + ' clone ' + key + ' --temp' 159 | // var st = spawn(t, cmd, {cwd: baseTestDir}) 160 | // var datDir = path.join(baseTestDir, key) 161 | // st.stdout.match(function (output) { 162 | // var downloadFinished = output.indexOf('Download Finished') > -1 163 | // if (!downloadFinished) return false 164 | 165 | // var stats = shareDat.stats.get() 166 | // var fileRe = new RegExp(stats.filesTotal + ' files') 167 | // var bytesRe = new RegExp(/1\.\d{1,2} kB/) 168 | 169 | // t.ok(help.matchLink(output), 'prints link') 170 | // t.ok(output.indexOf('dat-download-folder/' + key) > -1, 'prints dir') 171 | // t.ok(output.match(fileRe), 'total size: files okay') 172 | // t.ok(output.match(bytesRe), 'total size: bytes okay') 173 | // t.ok(help.isDir(datDir), 'creates download directory') 174 | 175 | // var fileList = help.fileList(datDir).join(' ') 176 | // var hasCsvFile = fileList.indexOf('all_hour.csv') > -1 177 | // t.ok(hasCsvFile, 'csv file downloaded') 178 | // var hasDatFolder = fileList.indexOf('.dat') > -1 179 | // t.ok(!hasDatFolder, '.dat folder not created') 180 | // var hasSubDir = fileList.indexOf('folder') > -1 181 | // t.ok(hasSubDir, 'folder created') 182 | // var hasNestedDir = fileList.indexOf('nested') > -1 183 | // t.ok(hasNestedDir, 'nested folder created') 184 | // var hasHelloFile = fileList.indexOf('hello.txt') > -1 185 | // t.ok(hasHelloFile, 'hello.txt file downloaded') 186 | 187 | // st.kill() 188 | // return true 189 | // }) 190 | // st.succeeds('exits after finishing download') 191 | // st.stderr.empty() 192 | // st.end() 193 | // }) 194 | // }) 195 | 196 | test('clone - invalid link', function (t) { 197 | var key = 'best-key-ever' 198 | var cmd = dat + ' clone ' + key 199 | tempDir(function (_, dir, cleanup) { 200 | var st = spawn(t, cmd, { cwd: dir }) 201 | var datDir = path.join(dir, key) 202 | st.stderr.match(function (output) { 203 | var error = output.indexOf('Could not resolve link') > -1 204 | if (!error) return false 205 | t.ok(error, 'has error') 206 | t.ok(!help.isDir(datDir), 'download dir removed') 207 | st.kill() 208 | return true 209 | }) 210 | st.end(cleanup) 211 | }) 212 | }) 213 | 214 | test('clone - shortcut/stateless clone', function (t) { 215 | help.shareFixtures(function (_, shareDat) { 216 | var key = shareDat.key.toString('hex') 217 | tempDir(function (_, dir, cleanup) { 218 | var datDir = path.join(dir, key) 219 | var cmd = dat + ' ' + key + ' ' + datDir + ' --exit' 220 | var st = spawn(t, cmd) 221 | 222 | st.stdout.match(function (output) { 223 | var downloadFinished = output.indexOf('Exiting') > -1 224 | if (!downloadFinished) return false 225 | 226 | t.ok(help.isDir(datDir), 'creates download directory') 227 | 228 | var fileList = help.fileList(datDir).join(' ') 229 | var hasCsvFile = fileList.indexOf('all_hour.csv') > -1 230 | t.ok(hasCsvFile, 'csv file downloaded') 231 | var hasDatFolder = fileList.indexOf('.dat') > -1 232 | t.ok(hasDatFolder, '.dat folder created') 233 | var hasSubDir = fileList.indexOf('folder') > -1 234 | t.ok(hasSubDir, 'folder created') 235 | var hasNestedDir = fileList.indexOf('nested') > -1 236 | t.ok(hasNestedDir, 'nested folder created') 237 | var hasHelloFile = fileList.indexOf('hello.txt') > -1 238 | t.ok(hasHelloFile, 'hello.txt file downloaded') 239 | 240 | st.kill() 241 | return true 242 | }) 243 | st.succeeds('exits after finishing download') 244 | st.stderr.empty() 245 | st.end(function () { 246 | cleanup() 247 | shareDat.close() 248 | }) 249 | }) 250 | }) 251 | }) 252 | 253 | // TODO: fix this 254 | // test('clone - hypercore link', function (t) { 255 | // help.shareFeed(function (_, key, close) { 256 | // tempDir(function (_, dir, cleanup) { 257 | // var cmd = dat + ' clone ' + key 258 | // var st = spawn(t, cmd, {cwd: dir}) 259 | // var datDir = path.join(dir, key) 260 | // st.stderr.match(function (output) { 261 | // var error = output.indexOf('not a Dat Archive') > -1 262 | // if (!error) return false 263 | // t.ok(error, 'has error') 264 | // t.ok(!help.isDir(datDir), 'download dir removed') 265 | // st.kill() 266 | // return true 267 | // }) 268 | // st.end(function () { 269 | // cleanup() 270 | // close() 271 | // }) 272 | // }) 273 | // }) 274 | // }) 275 | 276 | test('clone - specify directory containing dat.json', function (t) { 277 | help.shareFixtures(function (_, shareDat) { 278 | tempDir(function (_, dir, cleanup) { 279 | fs.writeFileSync(path.join(dir, 'dat.json'), JSON.stringify({ url: shareDat.key.toString('hex') }), 'utf8') 280 | 281 | // dat clone /dir 282 | var cmd = dat + ' clone ' + dir 283 | var st = spawn(t, cmd) 284 | var datDir = dir 285 | 286 | st.stdout.match(function (output) { 287 | var downloadFinished = output.indexOf('Exiting') > -1 288 | if (!downloadFinished) return false 289 | 290 | var fileList = help.fileList(datDir).join(' ') 291 | var hasCsvFile = fileList.indexOf('all_hour.csv') > -1 292 | t.ok(hasCsvFile, 'csv file downloaded') 293 | var hasDatFolder = fileList.indexOf('.dat') > -1 294 | t.ok(hasDatFolder, '.dat folder created') 295 | var hasSubDir = fileList.indexOf('folder') > -1 296 | t.ok(hasSubDir, 'folder created') 297 | var hasNestedDir = fileList.indexOf('nested') > -1 298 | t.ok(hasNestedDir, 'nested folder created') 299 | var hasHelloFile = fileList.indexOf('hello.txt') > -1 300 | t.ok(hasHelloFile, 'hello.txt file downloaded') 301 | 302 | st.kill() 303 | return true 304 | }) 305 | st.succeeds('exits after finishing download') 306 | st.stderr.empty() 307 | st.end(function () { 308 | cleanup() 309 | shareDat.close() 310 | }) 311 | }) 312 | }) 313 | }) 314 | 315 | test('clone - specify directory containing dat.json with cwd', function (t) { 316 | help.shareFixtures(function (_, shareDat) { 317 | tempDir(function (_, dir, cleanup) { 318 | fs.writeFileSync(path.join(dir, 'dat.json'), JSON.stringify({ url: shareDat.key.toString('hex') }), 'utf8') 319 | 320 | // cd dir && dat clone /dir/dat.json 321 | var cmd = dat + ' clone ' + dir 322 | var st = spawn(t, cmd, { cwd: dir }) 323 | var datDir = dir 324 | 325 | st.stdout.match(function (output) { 326 | var downloadFinished = output.indexOf('Exiting') > -1 327 | if (!downloadFinished) return false 328 | 329 | var fileList = help.fileList(datDir).join(' ') 330 | var hasCsvFile = fileList.indexOf('all_hour.csv') > -1 331 | t.ok(hasCsvFile, 'csv file downloaded') 332 | var hasDatFolder = fileList.indexOf('.dat') > -1 333 | t.ok(hasDatFolder, '.dat folder created') 334 | var hasSubDir = fileList.indexOf('folder') > -1 335 | t.ok(hasSubDir, 'folder created') 336 | var hasNestedDir = fileList.indexOf('nested') > -1 337 | t.ok(hasNestedDir, 'nested folder created') 338 | var hasHelloFile = fileList.indexOf('hello.txt') > -1 339 | t.ok(hasHelloFile, 'hello.txt file downloaded') 340 | 341 | st.kill() 342 | return true 343 | }) 344 | st.succeeds('exits after finishing download') 345 | st.stderr.empty() 346 | st.end(function () { 347 | cleanup() 348 | shareDat.close() 349 | }) 350 | }) 351 | }) 352 | }) 353 | 354 | test('clone - specify dat.json path', function (t) { 355 | help.shareFixtures(function (_, shareDat) { 356 | tempDir(function (_, dir, cleanup) { 357 | var datJsonPath = path.join(dir, 'dat.json') 358 | fs.writeFileSync(datJsonPath, JSON.stringify({ url: shareDat.key.toString('hex') }), 'utf8') 359 | 360 | // dat clone /dir/dat.json 361 | var cmd = dat + ' clone ' + datJsonPath 362 | var st = spawn(t, cmd) 363 | var datDir = dir 364 | 365 | st.stdout.match(function (output) { 366 | var downloadFinished = output.indexOf('Exiting') > -1 367 | if (!downloadFinished) return false 368 | 369 | var fileList = help.fileList(datDir).join(' ') 370 | var hasCsvFile = fileList.indexOf('all_hour.csv') > -1 371 | t.ok(hasCsvFile, 'csv file downloaded') 372 | var hasDatFolder = fileList.indexOf('.dat') > -1 373 | t.ok(hasDatFolder, '.dat folder created') 374 | var hasSubDir = fileList.indexOf('folder') > -1 375 | t.ok(hasSubDir, 'folder created') 376 | var hasNestedDir = fileList.indexOf('nested') > -1 377 | t.ok(hasNestedDir, 'nested folder created') 378 | var hasHelloFile = fileList.indexOf('hello.txt') > -1 379 | t.ok(hasHelloFile, 'hello.txt file downloaded') 380 | 381 | st.kill() 382 | return true 383 | }) 384 | st.succeeds('exits after finishing download') 385 | st.stderr.empty() 386 | st.end(function () { 387 | cleanup() 388 | shareDat.close() 389 | }) 390 | }) 391 | }) 392 | }) 393 | 394 | test('clone - specify dat.json path with cwd', function (t) { 395 | help.shareFixtures(function (_, shareDat) { 396 | tempDir(function (_, dir, cleanup) { 397 | var datJsonPath = path.join(dir, 'dat.json') 398 | fs.writeFileSync(datJsonPath, JSON.stringify({ url: shareDat.key.toString('hex') }), 'utf8') 399 | 400 | // cd /dir && dat clone /dir/dat.json 401 | var cmd = dat + ' clone ' + datJsonPath 402 | var st = spawn(t, cmd, { cwd: dir }) 403 | var datDir = dir 404 | 405 | st.stdout.match(function (output) { 406 | var downloadFinished = output.indexOf('Exiting') > -1 407 | if (!downloadFinished) return false 408 | 409 | var fileList = help.fileList(datDir).join(' ') 410 | var hasCsvFile = fileList.indexOf('all_hour.csv') > -1 411 | t.ok(hasCsvFile, 'csv file downloaded') 412 | var hasDatFolder = fileList.indexOf('.dat') > -1 413 | t.ok(hasDatFolder, '.dat folder created') 414 | var hasSubDir = fileList.indexOf('folder') > -1 415 | t.ok(hasSubDir, 'folder created') 416 | var hasNestedDir = fileList.indexOf('nested') > -1 417 | t.ok(hasNestedDir, 'nested folder created') 418 | var hasHelloFile = fileList.indexOf('hello.txt') > -1 419 | t.ok(hasHelloFile, 'hello.txt file downloaded') 420 | 421 | st.kill() 422 | return true 423 | }) 424 | st.succeeds('exits after finishing download') 425 | st.stderr.empty() 426 | st.end(function () { 427 | cleanup() 428 | shareDat.close() 429 | }) 430 | }) 431 | }) 432 | }) 433 | 434 | test('clone - specify dat.json + directory', function (t) { 435 | help.shareFixtures(function (_, shareDat) { 436 | tempDir(function (_, dir, cleanup) { 437 | var datDir = path.join(dir, 'clone-dest') 438 | var datJsonPath = path.join(dir, 'dat.json') // make dat.json in different dir 439 | 440 | fs.mkdirSync(datDir) 441 | fs.writeFileSync(datJsonPath, JSON.stringify({ url: shareDat.key.toString('hex') }), 'utf8') 442 | 443 | // dat clone /dir/dat.json /dir/clone-dest 444 | var cmd = dat + ' clone ' + datJsonPath + ' ' + datDir 445 | var st = spawn(t, cmd) 446 | 447 | st.stdout.match(function (output) { 448 | var downloadFinished = output.indexOf('Exiting') > -1 449 | if (!downloadFinished) return false 450 | 451 | var fileList = help.fileList(datDir).join(' ') 452 | var hasCsvFile = fileList.indexOf('all_hour.csv') > -1 453 | t.ok(hasCsvFile, 'csv file downloaded') 454 | var hasDatFolder = fileList.indexOf('.dat') > -1 455 | t.ok(hasDatFolder, '.dat folder created') 456 | var hasSubDir = fileList.indexOf('folder') > -1 457 | t.ok(hasSubDir, 'folder created') 458 | var hasNestedDir = fileList.indexOf('nested') > -1 459 | t.ok(hasNestedDir, 'nested folder created') 460 | var hasHelloFile = fileList.indexOf('hello.txt') > -1 461 | t.ok(hasHelloFile, 'hello.txt file downloaded') 462 | 463 | st.kill() 464 | return true 465 | }) 466 | st.succeeds('exits after finishing download') 467 | st.stderr.empty() 468 | st.end(function () { 469 | cleanup() 470 | shareDat.close() 471 | }) 472 | }) 473 | }) 474 | }) 475 | -------------------------------------------------------------------------------- /test/create.js: -------------------------------------------------------------------------------- 1 | var fs = require('fs') 2 | var path = require('path') 3 | var test = require('tape') 4 | var tempDir = require('temporary-directory') 5 | var Dat = require('dat-node') 6 | var spawn = require('./helpers/spawn.js') 7 | var help = require('./helpers') 8 | 9 | var dat = path.resolve(path.join(__dirname, '..', 'bin', 'cli.js')) 10 | var fixtures = path.join(__dirname, 'fixtures') 11 | 12 | // os x adds this if you view the fixtures in finder and breaks the file count assertions 13 | try { fs.unlinkSync(path.join(fixtures, '.DS_Store')) } catch (e) { /* ignore error */ } 14 | 15 | // start without dat.json 16 | try { fs.unlinkSync(path.join(fixtures, 'dat.json')) } catch (e) { /* ignore error */ } 17 | 18 | test('create - default opts no import', function (t) { 19 | tempDir(function (_, dir, cleanup) { 20 | var cmd = dat + ' create --title data --description thing' 21 | var st = spawn(t, cmd, { cwd: dir }) 22 | 23 | st.stdout.match(function (output) { 24 | var datCreated = output.indexOf('Created empty Dat') > -1 25 | if (!datCreated) return false 26 | 27 | t.ok(help.isDir(path.join(dir, '.dat')), 'creates dat directory') 28 | 29 | st.kill() 30 | return true 31 | }) 32 | st.succeeds('exits after create finishes') 33 | st.stderr.empty() 34 | st.end(cleanup) 35 | }) 36 | }) 37 | 38 | test('create - errors on existing archive', function (t) { 39 | tempDir(function (_, dir, cleanup) { 40 | Dat(dir, function (err, dat) { 41 | t.error(err, 'no error') 42 | dat.close(function () { 43 | var cmd = dat + ' create --title data --description thing' 44 | var st = spawn(t, cmd, { cwd: dir }) 45 | st.stderr.match(function (output) { 46 | t.ok(output, 'errors') 47 | st.kill() 48 | return true 49 | }) 50 | st.end() 51 | }) 52 | }) 53 | }) 54 | }) 55 | 56 | test('create - sync after create ok', function (t) { 57 | tempDir(function (_, dir, cleanup) { 58 | var cmd = dat + ' create --title data --description thing' 59 | var st = spawn(t, cmd, { cwd: dir, end: false }) 60 | st.stdout.match(function (output) { 61 | var connected = output.indexOf('Created empty Dat') > -1 62 | if (!connected) return false 63 | doSync() 64 | return true 65 | }) 66 | 67 | function doSync () { 68 | var cmd = dat + ' sync ' 69 | var st = spawn(t, cmd, { cwd: dir }) 70 | 71 | st.stdout.match(function (output) { 72 | var connected = output.indexOf('Sharing') > -1 73 | if (!connected) return false 74 | st.kill() 75 | return true 76 | }) 77 | st.stderr.empty() 78 | st.end(cleanup) 79 | } 80 | }) 81 | }) 82 | 83 | test('create - init alias', function (t) { 84 | tempDir(function (_, dir, cleanup) { 85 | var cmd = dat + ' init --title data --description thing' 86 | var st = spawn(t, cmd, { cwd: dir }) 87 | 88 | st.stdout.match(function (output) { 89 | var datCreated = output.indexOf('Created empty Dat') > -1 90 | if (!datCreated) return false 91 | 92 | t.ok(help.isDir(path.join(dir, '.dat')), 'creates dat directory') 93 | 94 | st.kill() 95 | return true 96 | }) 97 | st.succeeds('exits after create finishes') 98 | st.stderr.empty() 99 | st.end(cleanup) 100 | }) 101 | }) 102 | 103 | test('create - with path', function (t) { 104 | tempDir(function (_, dir, cleanup) { 105 | var cmd = dat + ' init ' + dir + ' --title data --description thing' 106 | var st = spawn(t, cmd) 107 | st.stdout.match(function (output) { 108 | var datCreated = output.indexOf('Created empty Dat') > -1 109 | if (!datCreated) return false 110 | 111 | t.ok(help.isDir(path.join(dir, '.dat')), 'creates dat directory') 112 | 113 | st.kill() 114 | return true 115 | }) 116 | st.succeeds('exits after create finishes') 117 | st.stderr.empty() 118 | st.end(cleanup) 119 | }) 120 | }) 121 | -------------------------------------------------------------------------------- /test/dat-node.js: -------------------------------------------------------------------------------- 1 | var test = require('tape') 2 | var ram = require('random-access-memory') 3 | var Dat = require('..') 4 | 5 | test('dat-node: require dat-node + make a dat', function (t) { 6 | Dat(ram, function (err, dat) { 7 | t.error(err, 'no error') 8 | t.ok(dat, 'makes dat') 9 | t.pass('yay') 10 | t.end() 11 | }) 12 | }) 13 | -------------------------------------------------------------------------------- /test/doctor.js: -------------------------------------------------------------------------------- 1 | // var path = require('path') 2 | // var test = require('tape') 3 | // var spawn = require('./helpers/spawn.js') 4 | // var help = require('./helpers') 5 | // 6 | 7 | // TODO 8 | // dat-doctor requires interactive testing right now... 9 | 10 | // var dat = path.resolve(path.join(__dirname, '..', 'bin', 'cli.js')) 11 | 12 | // test('misc - doctor option works ', function (t) { 13 | // var st = spawn(t, dat + ' doctor', {end: false}) 14 | // st.stderr.match(function (output) { 15 | // var readyPeer = output.indexOf('Waiting for incoming connections') > -1 16 | // if (!readyPeer) return false 17 | 18 | // if (!process.env.TRAVIS) { 19 | // // Not working on v4/v7 travis but can't reproduce locally 20 | // t.ok(output.indexOf('UTP') > -1, 'doctor connects to public peer via UTP') 21 | // } 22 | // t.ok(output.indexOf('TCP') > -1, 'doctor connects to public peer via TCP') 23 | 24 | // var key = help.matchLink(output) 25 | // startPhysiciansAssistant(key) 26 | // return true 27 | // }, 'doctor started') 28 | 29 | // function startPhysiciansAssistant (link) { 30 | // var assist = spawn(t, dat + ' doctor ' + link, {end: false}) 31 | // assist.stderr.match(function (output) { 32 | // var readyPeer = output.indexOf('Waiting for incoming connections') > -1 33 | // if (!readyPeer) return false 34 | 35 | // t.same(help.matchLink(output), link, 'key of peer matches') 36 | // t.ok(readyPeer, 'starts looking for peers') 37 | // t.skip(output.indexOf('Remote peer echoed expected data back') > -1, 'echo data back') 38 | // st.kill() 39 | // return true 40 | // }) 41 | // assist.end(function () { 42 | // t.end() 43 | // }) 44 | // } 45 | // }) 46 | -------------------------------------------------------------------------------- /test/fixtures/all_hour.csv: -------------------------------------------------------------------------------- 1 | time,latitude,longitude,depth,mag,magType,nst,gap,dmin,rms,net,id,updated,place,type 2 | 2014-04-30T03:34:57.000Z,60.0366,-141.2214,14.6,1.4,ml,,,,1.51,ak,ak11246293,2014-04-30T03:39:27.956Z,"67km E of Cape Yakataga, Alaska",earthquake 3 | 2014-04-30T03:16:54.860Z,33.9233322,-117.9376678,0.81,2.4,ml,71,51,0.02487,0.35,ci,ci37218696,2014-04-30T03:35:24.239Z,"1km SE of La Habra, California",earthquake 4 | 2014-04-30T03:03:09.000Z,61.126,-149.7035,28.2,1,ml,,,,0.75,ak,ak11246291,2014-04-30T03:09:51.716Z,"14km SE of Anchorage, Alaska",earthquake 5 | 2014-04-30T02:57:51.800Z,37.3798,-122.1912,4.5,1.3,Md,8,104.4,0.01796631,0.02,nc,nc72212216,2014-04-30T03:28:05.271Z,"2km SSE of Ladera, California",earthquake 6 | 2014-04-30T02:51:26.000Z,60.2062,-147.7298,0,1.8,ml,,,,0.1,ak,ak11246289,2014-04-30T02:54:55.916Z,"82km SE of Whittier, Alaska",earthquake 7 | 2014-04-30T02:48:54.000Z,60.4899,-149.4879,28.5,1.4,ml,,,,0.54,ak,ak11246287,2014-04-30T02:54:51.149Z,"36km N of Bear Creek, Alaska",earthquake 8 | 2014-04-30T02:47:44.100Z,38.819,-122.7952,3.3,0.8,Md,12,75.6,0.00898315,0.02,nc,nc72212211,2014-04-30T03:17:09.173Z,"5km NW of The Geysers, California",earthquake 9 | 2014-04-30T02:46:11.100Z,37.9812,-122.054,14.7,2.1,Md,35,126,0.09881468,0.09,nc,nc72212206,2014-04-30T03:38:06.391Z,"1km E of Pacheco, California",earthquake 10 | 2014-04-30T02:44:49.000Z,61.3482,-151.3611,48.5,1.7,ml,,,,0.92,ak,ak11246285,2014-04-30T02:54:49.809Z,"73km N of Nikiski, Alaska",earthquake 11 | -------------------------------------------------------------------------------- /test/fixtures/folder/nested/hello.txt: -------------------------------------------------------------------------------- 1 | code for science and society -------------------------------------------------------------------------------- /test/helpers/auth-server.js: -------------------------------------------------------------------------------- 1 | var path = require('path') 2 | var rimraf = require('rimraf') 3 | var Server, initDb 4 | try { 5 | Server = require('dat-registry-api/server') 6 | initDb = require('dat-registry-api/server/database/init') 7 | } catch (e) { 8 | console.log('Disabling auth tests, run `npm install dat-registry-api` to enable them.') 9 | } 10 | 11 | module.exports = createServer 12 | 13 | function createServer (port, cb) { 14 | if (!Server || !initDb) return cb(null) 15 | var config = { 16 | mixpanel: 'nothing', 17 | email: { 18 | fromEmail: 'hi@example.com' 19 | }, 20 | township: { 21 | secret: 'very secret code', 22 | db: path.join(__dirname, '..', 'test-township.db') 23 | }, 24 | db: { 25 | dialect: 'sqlite3', 26 | connection: { filename: path.join(__dirname, '..', 'test-sqlite.db') }, 27 | useNullAsDefault: true 28 | }, 29 | archiver: path.join(__dirname, '..', 'test-archiver'), 30 | whitelist: false, 31 | port: port || 8888 32 | } 33 | rimraf.sync(config.archiver) 34 | rimraf.sync(config.db.connection.filename) 35 | rimraf.sync(config.township.db) 36 | 37 | initDb(config.db, function (err, db) { 38 | if (err) return cb(err) 39 | 40 | const server = Server(config, db) 41 | server.listen(config.port, function () { 42 | console.log('listening', config.port) 43 | }) 44 | 45 | cb(null, server, close) 46 | 47 | function close (cb) { 48 | server.close(function () { 49 | rimraf.sync(config.township.db) 50 | rimraf.sync(config.db.connection.filename) 51 | process.exit() 52 | }) 53 | } 54 | }) 55 | } 56 | -------------------------------------------------------------------------------- /test/helpers/index.js: -------------------------------------------------------------------------------- 1 | var fs = require('fs') 2 | var os = require('os') 3 | var path = require('path') 4 | var mkdirp = require('mkdirp') 5 | var rimraf = require('rimraf') 6 | var encoding = require('dat-encoding') 7 | var recursiveReadSync = require('recursive-readdir-sync') 8 | var Dat = require('dat-node') 9 | var fetch = require('node-fetch') 10 | 11 | module.exports.matchLink = matchDatLink 12 | module.exports.isDir = isDir 13 | module.exports.testFolder = newTestFolder 14 | module.exports.datJson = datJson 15 | module.exports.shareFixtures = shareFixtures 16 | module.exports.fileList = fileList 17 | module.exports.fetchText = fetchText 18 | 19 | function shareFixtures (opts, cb) { 20 | if (typeof opts === 'function') return shareFixtures(null, opts) 21 | opts = opts || {} 22 | var fixtures = path.join(__dirname, '..', 'fixtures') 23 | // os x adds this if you view the fixtures in finder and breaks the file count assertions 24 | try { fs.unlinkSync(path.join(fixtures, '.DS_Store')) } catch (e) { /* ignore error */ } 25 | if (opts.resume !== true) rimraf.sync(path.join(fixtures, '.dat')) 26 | Dat(fixtures, {}, function (err, dat) { 27 | if (err) throw err 28 | dat.trackStats() 29 | dat.joinNetwork() 30 | if (opts.import === false) return cb(null, dat) 31 | dat.importFiles({ watch: false }, function (err) { 32 | if (err) throw err 33 | cb(null, dat) 34 | }) 35 | }) 36 | } 37 | 38 | function fileList (dir) { 39 | try { 40 | return recursiveReadSync(dir) 41 | } catch (e) { 42 | return [] 43 | } 44 | } 45 | 46 | function newTestFolder () { 47 | var tmpdir = path.join(os.tmpdir(), 'dat-download-folder') 48 | rimraf.sync(tmpdir) 49 | mkdirp.sync(tmpdir) 50 | return tmpdir 51 | } 52 | 53 | function matchDatLink (str) { 54 | var match = str.match(/[A-Za-z0-9]{64}/) 55 | if (!match) return false 56 | var key 57 | try { 58 | key = encoding.toStr(match[0].trim()) 59 | } catch (e) { 60 | return false 61 | } 62 | return key 63 | } 64 | 65 | function datJson (filepath) { 66 | try { 67 | return JSON.parse(fs.readFileSync(path.join(filepath, 'dat.json'))) 68 | } catch (e) { 69 | return {} 70 | } 71 | } 72 | 73 | function isDir (dir) { 74 | try { 75 | return fs.statSync(dir).isDirectory() 76 | } catch (e) { 77 | return false 78 | } 79 | } 80 | 81 | function fetchText (url) { 82 | let resp = {} 83 | return fetch(url).then(function (res) { 84 | resp = res 85 | return resp.text() 86 | }).then(function (body) { 87 | return { resp, body, error: null } 88 | }).catch(function (error) { 89 | return { error, resp: {}, body: null } 90 | }) 91 | } 92 | -------------------------------------------------------------------------------- /test/helpers/spawn.js: -------------------------------------------------------------------------------- 1 | var spawn = require('tape-spawn') 2 | var fs = require('fs') 3 | 4 | // happens once at require time 5 | // https://github.com/AndreasMadsen/execspawn/issues/2 6 | var hasBash = fs.existsSync('/bin/bash') 7 | 8 | module.exports = function (t, cmd, opts) { 9 | opts = opts || {} 10 | if (hasBash) opts.shell = '/bin/bash' // override default of /bin/sh 11 | return spawn(t, cmd, opts) 12 | } 13 | -------------------------------------------------------------------------------- /test/http.js: -------------------------------------------------------------------------------- 1 | var path = require('path') 2 | var test = require('tape') 3 | var rimraf = require('rimraf') 4 | var spawn = require('./helpers/spawn.js') 5 | var { fetchText } = require('./helpers') 6 | 7 | var dat = path.resolve(path.join(__dirname, '..', 'bin', 'cli.js')) 8 | var fixtures = path.join(__dirname, 'fixtures') 9 | 10 | test('http - share with http', function (t) { 11 | rimraf.sync(path.join(fixtures, '.dat')) 12 | var cmd = dat + ' share --http' 13 | var st = spawn(t, cmd, { cwd: fixtures }) 14 | 15 | st.stdout.match(function (output) { 16 | var sharingHttp = output.indexOf('Serving files over http') > -1 17 | if (!sharingHttp) return false 18 | 19 | fetchText('http://localhost:8080').then(function ({ resp, body, error }) { 20 | t.error(error, 'no error') 21 | t.ok(resp.status === 200, 'okay status') 22 | t.ok(body) 23 | 24 | fetchText('http://localhost:8080/folder/nested/hello.txt').then(function ({ resp, body, error }) { 25 | t.error(error, 'no error') 26 | t.ok(resp.status === 200, 'okay status') 27 | t.same(body, 'code for science and society', 'body of file okay') 28 | 29 | st.kill() 30 | }) 31 | }) 32 | return true 33 | }) 34 | st.stderr.empty() 35 | st.end() 36 | }) 37 | 38 | test('http - share with http other port', function (t) { 39 | rimraf.sync(path.join(fixtures, '.dat')) 40 | var cmd = dat + ' share --http 3333' 41 | var st = spawn(t, cmd, { cwd: fixtures }) 42 | 43 | st.stdout.match(function (output) { 44 | var sharingHttp = output.indexOf('Serving files over http') > -1 45 | if (!sharingHttp) return false 46 | 47 | fetchText('http://localhost:3333').then(function ({ resp, body, error }) { 48 | t.error(error, 'no error') 49 | t.ok(resp.status === 200, 'okay status') 50 | t.ok(body) 51 | 52 | fetchText('http://localhost:3333/folder/nested/hello.txt').then(function ({ resp, body, error }) { 53 | t.error(error, 'no error') 54 | t.ok(resp.status === 200, 'okay status') 55 | t.same(body, 'code for science and society', 'body of file okay') 56 | 57 | st.kill() 58 | }) 59 | }) 60 | return true 61 | }) 62 | st.stderr.empty() 63 | st.end() 64 | }) 65 | -------------------------------------------------------------------------------- /test/keys.js: -------------------------------------------------------------------------------- 1 | var fs = require('fs') 2 | var path = require('path') 3 | var test = require('tape') 4 | var rimraf = require('rimraf') 5 | var tempDir = require('temporary-directory') 6 | var spawn = require('./helpers/spawn.js') 7 | var help = require('./helpers') 8 | 9 | var dat = path.resolve(path.join(__dirname, '..', 'bin', 'cli.js')) 10 | var fixtures = path.join(__dirname, 'fixtures') 11 | 12 | test('keys - print keys', function (t) { 13 | help.shareFixtures(function (_, shareDat) { 14 | shareDat.close(function () { 15 | var cmd = dat + ' keys ' 16 | var st = spawn(t, cmd, { cwd: fixtures }) 17 | 18 | st.stdout.match(function (output) { 19 | if (output.indexOf('dat://') === -1) return false 20 | t.ok(output.indexOf(shareDat.key.toString('hex') > -1), 'prints key') 21 | st.kill() 22 | return true 23 | }) 24 | st.stderr.empty() 25 | st.end() 26 | }) 27 | }) 28 | }) 29 | 30 | test('keys - print discovery key', function (t) { 31 | help.shareFixtures(function (_, shareDat) { 32 | shareDat.close(function () { 33 | var cmd = dat + ' keys --discovery' 34 | var st = spawn(t, cmd, { cwd: fixtures }) 35 | 36 | st.stdout.match(function (output) { 37 | if (output.indexOf('Discovery') === -1) return false 38 | t.ok(output.indexOf(shareDat.key.toString('hex') > -1), 'prints key') 39 | t.ok(output.indexOf(shareDat.archive.discoveryKey.toString('hex') > -1), 'prints discovery key') 40 | st.kill() 41 | return true 42 | }) 43 | st.stderr.empty() 44 | st.end() 45 | }) 46 | }) 47 | }) 48 | 49 | if (!process.env.TRAVIS) { 50 | test('keys - export & import secret key', function (t) { 51 | help.shareFixtures(function (_, shareDat) { 52 | var key = shareDat.key.toString('hex') 53 | tempDir(function (_, dir, cleanup) { 54 | var cmd = dat + ' clone ' + key 55 | var st = spawn(t, cmd, { cwd: dir, end: false }) 56 | var datDir = path.join(dir, key) 57 | 58 | st.stdout.match(function (output) { 59 | var downloadFinished = output.indexOf('Exiting') > -1 60 | if (!downloadFinished) return false 61 | st.kill() 62 | shareDat.close(exchangeKeys) 63 | return true 64 | }) 65 | st.stderr.empty() 66 | 67 | function exchangeKeys () { 68 | var secretKey = null 69 | 70 | var exportKey = dat + ' keys export' 71 | var st = spawn(t, exportKey, { cwd: fixtures, end: false }) 72 | st.stdout.match(function (output) { 73 | if (!output) return false 74 | secretKey = output.trim() 75 | st.kill() 76 | importKey() 77 | return true 78 | }) 79 | st.stderr.empty() 80 | 81 | function importKey () { 82 | var exportKey = dat + ' keys import' 83 | var st = spawn(t, exportKey, { cwd: datDir }) 84 | st.stdout.match(function (output) { 85 | if (!output.indexOf('secret key') === -1) return false 86 | st.stdin.write(secretKey + '\r') 87 | if (output.indexOf('Successful import') === -1) return false 88 | t.ok(fs.statSync(path.join(datDir, '.dat', 'metadata.ogd')), 'original dat file exists') 89 | st.kill() 90 | return true 91 | }) 92 | st.stderr.empty() 93 | st.end(function () { 94 | rimraf.sync(path.join(fixtures, '.dat')) 95 | cleanup() 96 | }) 97 | } 98 | } 99 | }) 100 | }) 101 | }) 102 | } 103 | -------------------------------------------------------------------------------- /test/pull.js: -------------------------------------------------------------------------------- 1 | var path = require('path') 2 | var test = require('tape') 3 | var tempDir = require('temporary-directory') 4 | var spawn = require('./helpers/spawn.js') 5 | var help = require('./helpers') 6 | 7 | var dat = path.resolve(path.join(__dirname, '..', 'bin', 'cli.js')) 8 | 9 | test('pull - errors without clone first', function (t) { 10 | tempDir(function (_, dir, cleanup) { 11 | var cmd = dat + ' pull' 12 | var st = spawn(t, cmd, { cwd: dir }) 13 | st.stderr.match(function (output) { 14 | t.ok('No existing archive', 'Error: no existing archive') 15 | st.kill() 16 | return true 17 | }) 18 | st.end(cleanup) 19 | }) 20 | }) 21 | 22 | test('pull - default opts', function (t) { 23 | // import false so we can pull files later 24 | help.shareFixtures({ import: false }, function (_, fixturesDat) { 25 | tempDir(function (_, dir, cleanup) { 26 | // clone initial dat 27 | var cmd = dat + ' clone ' + fixturesDat.key.toString('hex') + ' ' + dir 28 | var st = spawn(t, cmd, { end: false }) 29 | st.stdout.match(function (output) { 30 | var synced = output.indexOf('dat synced') > -1 31 | if (!synced) return false 32 | st.kill() 33 | fixturesDat.close(doPull) 34 | return true 35 | }) 36 | 37 | function doPull () { 38 | // TODO: Finish this one. Need some bug fixes on empty pulls =( 39 | help.shareFixtures({ resume: true, import: true }, function (_, fixturesDat) { 40 | var cmd = dat + ' pull' 41 | var st = spawn(t, cmd, { cwd: dir }) 42 | st.stdout.match(function (output) { 43 | var downloadFinished = output.indexOf('dat sync') > -1 44 | if (!downloadFinished) return false 45 | st.kill() 46 | return true 47 | }) 48 | st.succeeds('exits after finishing download') 49 | st.stderr.empty() 50 | st.end(function () { 51 | fixturesDat.close() 52 | }) 53 | }) 54 | } 55 | }) 56 | }) 57 | }) 58 | 59 | // test('pull - default opts', function (t) { 60 | // // cmd: dat pull 61 | // // import the files to the sharer so we can pull new data 62 | // shareDat.importFiles(function (err) { 63 | // if (err) throw err 64 | 65 | // var datDir = path.join(baseTestDir, shareKey) 66 | // var cmd = dat + ' pull' 67 | // var st = spawn(t, cmd, {cwd: datDir}) 68 | // st.stdout.match(function (output) { 69 | // var downloadFinished = output.indexOf('Download Finished') > -1 70 | // if (!downloadFinished) return false 71 | 72 | // var stats = shareDat.stats.get() 73 | // var fileRe = new RegExp(stats.filesTotal + ' files') 74 | // var bytesRe = new RegExp(/1\.\d{1,2} kB/) 75 | 76 | // t.ok(help.matchLink(output), 'prints link') 77 | // t.ok(output.indexOf('dat-download-folder/' + shareKey) > -1, 'prints dir') 78 | // t.ok(output.match(fileRe), 'total size: files okay') 79 | // t.ok(output.match(bytesRe), 'total size: bytes okay') 80 | // t.ok(help.isDir(datDir), 'creates download directory') 81 | 82 | // var fileList = help.fileList(datDir).join(' ') 83 | // var hasCsvFile = fileList.indexOf('all_hour.csv') > -1 84 | // t.ok(hasCsvFile, 'csv file downloaded') 85 | // var hasDatFolder = fileList.indexOf('.dat') > -1 86 | // t.ok(hasDatFolder, '.dat folder created') 87 | // var hasSubDir = fileList.indexOf('folder') > -1 88 | // t.ok(hasSubDir, 'folder created') 89 | // var hasNestedDir = fileList.indexOf('nested') > -1 90 | // t.ok(hasNestedDir, 'nested folder created') 91 | // var hasHelloFile = fileList.indexOf('hello.txt') > -1 92 | // t.ok(hasHelloFile, 'hello.txt file downloaded') 93 | 94 | // st.kill() 95 | // return true 96 | // }) 97 | // st.succeeds('exits after finishing download') 98 | // st.stderr.empty() 99 | // st.end() 100 | // }) 101 | // }) 102 | 103 | // test('pull - with dir arg', function (t) { 104 | // var dirName = shareKey 105 | // var datDir = path.join(baseTestDir, shareKey) 106 | // var cmd = dat + ' pull ' + dirName 107 | // var st = spawn(t, cmd, {cwd: baseTestDir}) 108 | // st.stdout.match(function (output) { 109 | // var downloadFinished = output.indexOf('Download Finished') > -1 110 | // if (!downloadFinished) return false 111 | 112 | // t.ok(output.indexOf('dat-download-folder/' + dirName) > -1, 'prints dir') 113 | // t.ok(help.isDir(datDir), 'creates download directory') 114 | 115 | // st.kill() 116 | // return true 117 | // }) 118 | // st.succeeds('exits after finishing download') 119 | // st.stderr.empty() 120 | // st.end() 121 | // }) 122 | -------------------------------------------------------------------------------- /test/share.js: -------------------------------------------------------------------------------- 1 | // var fs = require('fs') 2 | // var path = require('path') 3 | // var test = require('tape') 4 | // var rimraf = require('rimraf') 5 | // var spawn = require('./helpers/spawn.js') 6 | // var help = require('./helpers') 7 | 8 | // var dat = path.resolve(path.join(__dirname, '..', 'bin', 'cli.js')) 9 | // if (process.env.TRAVIS) dat += ' --no-watch ' 10 | // var fixtures = path.join(__dirname, 'fixtures') 11 | 12 | // // os x adds this if you view the fixtures in finder and breaks the file count assertions 13 | // try { fs.unlinkSync(path.join(fixtures, '.DS_Store')) } catch (e) { /* ignore error */ } 14 | 15 | // // start without dat.json 16 | // try { fs.unlinkSync(path.join(fixtures, 'dat.json')) } catch (e) { /* ignore error */ } 17 | 18 | // test('share - default opts', function (t) { 19 | // rimraf.sync(path.join(fixtures, '.dat')) 20 | // var cmd = dat + ' share' 21 | // var st = spawn(t, cmd, {cwd: fixtures}) 22 | 23 | // st.stdout.match(function (output) { 24 | // var importFinished = output.indexOf('Total Size') > -1 25 | // if (!importFinished) return false 26 | 27 | // t.ok(help.isDir(path.join(fixtures, '.dat')), 'creates dat directory') 28 | // t.ok(output.indexOf('Looking for connections') > -1, 'network') 29 | 30 | // st.kill() 31 | // return true 32 | // }) 33 | // st.stderr.empty() 34 | // st.end() 35 | // }) 36 | 37 | // test('share - with dir arg', function (t) { 38 | // rimraf.sync(path.join(fixtures, '.dat')) 39 | // var cmd = dat + ' share ' + fixtures 40 | // var st = spawn(t, cmd) 41 | 42 | // st.stdout.match(function (output) { 43 | // var importFinished = output.indexOf('Total Size') > -1 44 | // if (!importFinished) return false 45 | 46 | // t.ok(help.isDir(path.join(fixtures, '.dat')), 'creates dat directory') 47 | // t.ok(output.indexOf('Looking for connections') > -1, 'network') 48 | 49 | // st.kill() 50 | // return true 51 | // }) 52 | // st.stderr.empty() 53 | // st.end() 54 | // }) 55 | 56 | // test.onFinish(function () { 57 | // rimraf.sync(path.join(fixtures, '.dat')) 58 | // }) 59 | -------------------------------------------------------------------------------- /test/sync-owner.js: -------------------------------------------------------------------------------- 1 | // var fs = require('fs') 2 | // var net = require('net') 3 | // var path = require('path') 4 | // var test = require('tape') 5 | // var mkdirp = require('mkdirp') 6 | // var rimraf = require('rimraf') 7 | // var Dat = require('dat-node') 8 | // var spawn = require('./helpers/spawn.js') 9 | // var help = require('./helpers') 10 | 11 | // var dat = path.resolve(path.join(__dirname, '..', 'bin', 'cli.js')) 12 | // if (process.env.TRAVIS) dat += ' --no-watch ' 13 | // var fixtures = path.join(__dirname, 'fixtures') 14 | // var downDat 15 | 16 | // // os x adds this if you view the fixtures in finder and breaks the file count assertions 17 | // try { fs.unlinkSync(path.join(fixtures, '.DS_Store')) } catch (e) { /* ignore error */ } 18 | 19 | // test('sync-owner - errors without create first', function (t) { 20 | // rimraf.sync(path.join(fixtures, '.dat')) 21 | // // cmd: dat sync 22 | // var cmd = dat + ' sync' 23 | // var st = spawn(t, cmd, {cwd: fixtures}) 24 | 25 | // st.stderr.match(function (output) { 26 | // var hasError = output.indexOf('No existing archive') > -1 27 | // t.ok(hasError, 'emits error') 28 | // st.kill() 29 | // return true 30 | // }) 31 | // st.end() 32 | // }) 33 | 34 | // test('sync-owner - create a dat for syncing', function (t) { 35 | // rimraf.sync(path.join(fixtures, '.dat')) 36 | // // cmd: dat create 37 | // var cmd = dat + ' create --import' 38 | // var st = spawn(t, cmd, {cwd: fixtures}) 39 | // st.stdout.match(function (output) { 40 | // var importFinished = output.indexOf('import finished') > -1 41 | // if (!importFinished) return false 42 | // st.kill() 43 | // return true 44 | // }) 45 | // st.stderr.empty() 46 | // st.end() 47 | // }) 48 | 49 | // test('sync-owner - default opts', function (t) { 50 | // // cmd: dat sync 51 | // var cmd = dat + ' sync' 52 | // var st = spawn(t, cmd, {cwd: fixtures, end: false}) 53 | 54 | // var key 55 | 56 | // st.stdout.match(function (output) { 57 | // var sharing = output.indexOf('Dat Network') > -1 58 | // if (!sharing) return false 59 | 60 | // key = help.matchLink(output) 61 | 62 | // t.ok(key, 'prints link') 63 | // t.ok(output.indexOf('tests/fixtures') > -1, 'prints dir') 64 | 65 | // downloadDat() 66 | // return true 67 | // }) 68 | // st.stderr.empty() 69 | // st.end() 70 | 71 | // function downloadDat () { 72 | // var downloadDir = path.join(help.testFolder(), '' + Date.now()) 73 | // mkdirp.sync(downloadDir) 74 | 75 | // Dat(downloadDir, { key: key }, function (err, tmpDat) { 76 | // if (err) throw err 77 | 78 | // downDat = tmpDat 79 | // downDat.joinNetwork() 80 | 81 | // downDat.network.swarm.once('connection', function () { 82 | // t.pass('downloader connects') 83 | // downDat.close(function () { 84 | // rimraf.sync(downDat.path) 85 | // t.end() 86 | // }) 87 | // }) 88 | // }) 89 | // } 90 | // }) 91 | 92 | // test('sync-owner - create without import for syncing', function (t) { 93 | // rimraf.sync(path.join(fixtures, '.dat')) 94 | // // cmd: dat create 95 | // var cmd = dat + ' create' 96 | // var st = spawn(t, cmd, {cwd: fixtures}) 97 | // st.stdout.match(function (output) { 98 | // if (output.indexOf('created') > -1) return true 99 | // return false 100 | // }) 101 | // st.succeeds() 102 | // st.end() 103 | // }) 104 | 105 | // test('sync-owner - imports after no-import create', function (t) { 106 | // // cmd: dat sync 107 | // var cmd = dat + ' sync' 108 | // var st = spawn(t, cmd, {cwd: fixtures}) 109 | 110 | // st.stdout.match(function (output) { 111 | // // have to check both for local test (watching) and travis (sharing) 112 | // var sharing = output.indexOf('Watching') > -1 || output.indexOf('Sharing latest') > -1 113 | // if (!sharing) return false 114 | 115 | // var fileRe = new RegExp('2 files') 116 | // var bytesRe = new RegExp(/1\.\d{1,2} kB/) 117 | 118 | // t.ok(help.matchLink(output), 'prints link') 119 | // t.ok(output.indexOf('tests/fixtures') > -1, 'prints dir') 120 | // t.ok(output.match(fileRe), 'total size: files okay') 121 | // t.ok(output.match(bytesRe), 'total size: bytes okay') 122 | 123 | // st.kill() 124 | // return true 125 | // }) 126 | // st.stderr.empty() 127 | // st.end() 128 | // }) 129 | 130 | // // TODO: this test is causing serious memory issues. 131 | // // HELP. Maybe related to https://github.com/datproject/dat-node/issues/71 132 | // // test('sync-owner - turn off ignore hidden', function (t) { 133 | // // // cmd: dat sync 134 | // // var hiddenFile = path.join(fixtures, '.hidden-file') 135 | // // var cmd = dat + ' sync --no-ignore-hidden' 136 | // // fs.writeFile(hiddenFile, 'You cannot see me', function (err) { 137 | // // t.error(err) 138 | 139 | // // var st = spawn(t, cmd, {cwd: fixtures, end: false}) 140 | // // var key 141 | 142 | // // st.stdout.match(function (output) { 143 | // // var sharing = output.indexOf('Dat Network') > -1 144 | // // if (!sharing) return false 145 | 146 | // // key = help.matchLink(output) 147 | 148 | // // downloadDat() 149 | // // return true 150 | // // }) 151 | // // st.stderr.empty() 152 | // // st.end() 153 | 154 | // // function downloadDat () { 155 | // // var downloadDir = path.join(help.testFolder(), '' + Date.now()) 156 | // // mkdirp.sync(downloadDir) 157 | 158 | // // Dat(downloadDir, { key: key }, function (err, downDat) { 159 | // // if (err) throw err 160 | 161 | // // downDat.joinNetwork() 162 | 163 | // // downDat.network.swarm.once('connection', function () { 164 | // // downDat.archive.list({live: false}, function (err, data) { 165 | // // t.error(err) 166 | // // var hasHiddenFile = data.filter(function (entry) { 167 | // // return entry.name === '.hidden-file' 168 | // // }) 169 | // // t.ok(hasHiddenFile.length, 'hidden file in archive') 170 | // // downDat.network.swarm.close(function () { 171 | // // process.nextTick(function () { 172 | // // downDat.close(function () { 173 | // // rimraf(downDat.path, function () { 174 | // // fs.unlink(hiddenFile, function () { 175 | // // t.end() 176 | // // }) 177 | // // }) 178 | // // }) 179 | // // }) 180 | // // }) 181 | // // }) 182 | // // }) 183 | // // }) 184 | // // } 185 | // // }) 186 | // // }) 187 | 188 | // test('sync-owner - port and utp options', function (t) { 189 | // var port = 3281 190 | // var cmd = dat + ' sync --port ' + port + ' --no-utp' 191 | // var st = spawn(t, cmd, {cwd: fixtures, end: false}) 192 | // st.stderr.empty() 193 | 194 | // var server = net.createServer() 195 | // server.once('error', function (err) { 196 | // if (err.code !== 'EADDRINUSE') return t.error(err) 197 | // t.skip('TODO: correct port in use') 198 | // done() 199 | // }) 200 | // server.once('listening', function () { 201 | // t.skip(`TODO: port ${server.address().port} should be in use`) 202 | // done() 203 | // }) 204 | // server.listen(port) 205 | 206 | // t.skip('TODO: check utp option') // TODO: how to check utp? 207 | 208 | // function done () { 209 | // server.close(function () { 210 | // st.kill() 211 | // t.end() 212 | // }) 213 | // } 214 | // }) 215 | 216 | // test('sync-owner - shorthand', function (t) { 217 | // var cmd = dat + ' .' 218 | // var st = spawn(t, cmd, {cwd: fixtures}) 219 | 220 | // st.stdout.match(function (output) { 221 | // var sharing = output.indexOf('Looking for connections') > -1 222 | // if (!sharing) return false 223 | 224 | // t.ok(help.matchLink(output), 'prints link') 225 | 226 | // st.kill() 227 | // return true 228 | // }) 229 | // st.stderr.empty() 230 | // st.end() 231 | // }) 232 | 233 | // test('sync-owner - dir argument', function (t) { 234 | // var cmd = dat + ' sync ' + fixtures 235 | // var st = spawn(t, cmd) 236 | 237 | // st.stdout.match(function (output) { 238 | // var sharing = output.indexOf('Looking for connections') > -1 239 | // if (!sharing) return false 240 | 241 | // t.ok(help.matchLink(output), 'prints link') 242 | 243 | // st.kill() 244 | // return true 245 | // }) 246 | // st.stderr.empty() 247 | // st.end() 248 | // }) 249 | 250 | // if (!process.env.TRAVIS) { 251 | // test('sync-owner - live', function (t) { 252 | // var liveFile = path.join(fixtures, 'live.txt') 253 | // var wroteFile = false 254 | 255 | // var cmd = dat + ' sync --watch' 256 | // var st = spawn(t, cmd, {cwd: fixtures}) 257 | 258 | // st.stdout.match(function (output) { 259 | // var watching = output.indexOf('Watching for file changes') > -1 260 | // if (!watching) return false 261 | // else if (!wroteFile) { 262 | // fs.writeFileSync(liveFile, 'hello') 263 | // wroteFile = true 264 | // } 265 | // var fileImported = output.indexOf('live.txt') > -1 266 | // if (!fileImported) return false 267 | 268 | // t.ok(fileImported, 'prints live file output') 269 | // t.ok(output.indexOf('3 files') > -1, 'total size: files okay') 270 | 271 | // fs.unlinkSync(liveFile) 272 | // st.kill() 273 | // return true 274 | // }) 275 | // st.stderr.empty() 276 | // st.end() 277 | // }) 278 | // } 279 | 280 | // test.onFinish(function () { 281 | // rimraf.sync(path.join(fixtures, '.dat')) 282 | // }) 283 | -------------------------------------------------------------------------------- /test/sync-remote.js: -------------------------------------------------------------------------------- 1 | // var path = require('path') 2 | // var test = require('tape') 3 | // var rimraf = require('rimraf') 4 | // var spawn = require('./helpers/spawn.js') 5 | // var help = require('./helpers') 6 | 7 | // var dat = path.resolve(path.join(__dirname, '..', 'bin', 'cli.js')) 8 | // var baseTestDir = help.testFolder() 9 | // var shareDat 10 | // var syncDir 11 | 12 | // test('sync-remote - default opts', function (t) { 13 | // // cmd: dat sync 14 | // var key 15 | 16 | // help.shareFixtures({import: false}, function (_, fixturesDat) { 17 | // shareDat = fixturesDat 18 | // key = shareDat.key.toString('hex') 19 | // syncDir = path.join(baseTestDir, key) 20 | 21 | // makeClone(function () { 22 | // shareDat.importFiles(function () { 23 | // var cmd = dat + ' sync' 24 | // var st = spawn(t, cmd, {cwd: syncDir}) 25 | // st.stdout.match(function (output) { 26 | // var updated = output.indexOf('Files updated') > -1 27 | // if (!updated) return false 28 | 29 | // var fileRe = new RegExp('3 files') 30 | // var bytesRe = new RegExp(/1\.\d{1,2} kB/) 31 | 32 | // key = help.matchLink(output) 33 | 34 | // t.ok(key, 'prints link') 35 | // t.ok(output.indexOf('dat-download-folder/' + key) > -1, 'prints dir') 36 | // t.ok(output.match(fileRe), 'total size: files okay') 37 | // t.ok(output.match(bytesRe), 'total size: bytes okay') 38 | 39 | // st.kill() 40 | // return true 41 | // }) 42 | // st.stderr.empty() 43 | // st.end() 44 | // }) 45 | // }) 46 | // }) 47 | 48 | // function makeClone (cb) { 49 | // var cmd = dat + ' clone ' + key 50 | // var st = spawn(t, cmd, {cwd: baseTestDir, end: false}) 51 | // st.stdout.match(function (output) { 52 | // var downloadFinished = output.indexOf('Download Finished') > -1 53 | // if (!downloadFinished) return false 54 | 55 | // st.kill() 56 | // cb() 57 | // return true 58 | // }) 59 | // st.stderr.empty() 60 | // } 61 | // }) 62 | 63 | // test('sync-remote - shorthand sync', function (t) { 64 | // // cmd: dat sync 65 | // var cmd = dat + ' .' 66 | // var st = spawn(t, cmd, {cwd: syncDir}) 67 | // st.stdout.match(function (output) { 68 | // var syncing = output.indexOf('Syncing Dat Archive') > -1 69 | // if (!syncing) return false 70 | // t.ok(help.matchLink(output), 'prints link') 71 | // st.kill() 72 | // return true 73 | // }) 74 | // st.stderr.empty() 75 | // st.end() 76 | // }) 77 | 78 | // test('sync-remote - dir arg', function (t) { 79 | // var cmd = dat + ' ' + syncDir 80 | // var st = spawn(t, cmd) 81 | // st.stdout.match(function (output) { 82 | // var syncing = output.indexOf('Syncing Dat Archive') > -1 83 | // if (!syncing) return false 84 | // t.ok(help.matchLink(output), 'prints link') 85 | // st.kill() 86 | // return true 87 | // }) 88 | // st.stderr.empty() 89 | // st.end() 90 | // }) 91 | 92 | // test('close sharer', function (t) { 93 | // shareDat.close(function () { 94 | // rimraf.sync(path.join(shareDat.path, '.dat')) 95 | // t.end() 96 | // }) 97 | // }) 98 | 99 | // test.onFinish(function () { 100 | // rimraf.sync(baseTestDir) 101 | // }) 102 | -------------------------------------------------------------------------------- /test/usage.js: -------------------------------------------------------------------------------- 1 | var path = require('path') 2 | var test = require('tape') 3 | var spawn = require('./helpers/spawn.js') 4 | 5 | var dat = path.resolve(path.join(__dirname, '..', 'bin', 'cli.js')) 6 | var version = require('../package.json').version 7 | 8 | test('usage - prints usage', function (t) { 9 | var d = spawn(t, dat) 10 | d.stderr.match(function (output) { 11 | var usage = output.indexOf('dat ') > -1 12 | if (!usage) return false 13 | return true 14 | }) 15 | d.end() 16 | }) 17 | 18 | test('usage - prints version', function (t) { 19 | var d = spawn(t, dat + ' -v') 20 | d.stderr.match(function (output) { 21 | var ver = output.indexOf(version) > -1 22 | if (!ver) return false 23 | return true 24 | }) 25 | d.end() 26 | }) 27 | 28 | test('usage - also prints version', function (t) { 29 | var d = spawn(t, dat + ' -v') 30 | d.stderr.match(function (output) { 31 | var ver = output.indexOf(version) > -1 32 | if (!ver) return false 33 | return true 34 | }) 35 | d.end() 36 | }) 37 | 38 | test('usage - help prints usage', function (t) { 39 | var d = spawn(t, dat + ' help') 40 | d.stderr.match(function (output) { 41 | var usage = output.indexOf('dat ') > -1 42 | if (!usage) return false 43 | return true 44 | }) 45 | d.end() 46 | }) 47 | --------------------------------------------------------------------------------