├── .gitattributes ├── .gitignore ├── .npmignore ├── .npmrc ├── .veendor.js ├── LICENSE ├── README.md ├── global.d.ts ├── package.json ├── src ├── bin │ ├── veendor-calc.ts │ ├── veendor-install.ts │ └── veendor.ts ├── lib │ ├── backends │ │ ├── git-lfs.ts │ │ ├── http.ts │ │ ├── local.ts │ │ └── s3.ts │ ├── commandWrappers │ │ ├── gitWrapper.ts │ │ ├── helpers.ts │ │ ├── npmWrapper.ts │ │ ├── rsyncWrapper.ts │ │ └── tarWrapper.ts │ ├── deepSortedJson.ts │ ├── errors.ts │ ├── install │ │ ├── hashGetters.ts │ │ ├── helpers.ts │ │ ├── index.ts │ │ └── pushBackends.ts │ ├── pkgjson.ts │ ├── resolveConfig.ts │ ├── resolveLockfile.ts │ ├── util │ │ ├── logger.ts │ │ └── progress.ts │ └── validateConfig.ts ├── serviceTypes.ts ├── test │ ├── integration │ │ ├── bundles │ │ │ └── 7d0db335c82dfd9aa2b96dabc485b89ebaa1496f.tar.gz │ │ ├── integration.test.ts │ │ ├── prepareNvm.sh │ │ ├── runTest.sh │ │ └── testCases │ │ │ ├── calcHashPlain │ │ │ ├── .veendor.js │ │ │ ├── package.json │ │ │ └── testcase.sh │ │ │ ├── calcHashWithPackageLock │ │ │ ├── .veendor.js │ │ │ ├── package.json │ │ │ └── testcase.sh │ │ │ ├── calcHashWithShrinkWrap │ │ │ ├── .veendor.js │ │ │ ├── npm-shrinkwrap.json │ │ │ ├── package.json │ │ │ └── testcase.sh │ │ │ ├── gitPull │ │ │ ├── .veendor.js │ │ │ ├── package.json │ │ │ └── testcase.sh │ │ │ ├── gitPush │ │ │ ├── .veendor.js │ │ │ ├── package.json │ │ │ └── testcase.sh │ │ │ ├── httpPull │ │ │ ├── .veendor.js │ │ │ ├── package.json │ │ │ └── testcase.sh │ │ │ ├── localPull │ │ │ ├── .veendor.js │ │ │ ├── package.json │ │ │ └── testcase.sh │ │ │ ├── localPush │ │ │ ├── .veendor.js │ │ │ ├── package.json │ │ │ └── testcase.sh │ │ │ ├── localPushWithPackageLock │ │ │ ├── .veendor.js │ │ │ ├── package.json │ │ │ └── testcase.sh │ │ │ ├── noSave │ │ │ ├── .npmrc │ │ │ ├── .veendor.js │ │ │ ├── package.json │ │ │ ├── package2.json │ │ │ ├── package3.json │ │ │ └── testcase.sh │ │ │ ├── s3Pull │ │ │ ├── .veendor.js │ │ │ ├── package.json │ │ │ └── testcase.sh │ │ │ └── s3Push │ │ │ ├── .veendor.js │ │ │ ├── package.json │ │ │ └── testcase.sh │ ├── setup.ts │ └── unit │ │ ├── backends │ │ ├── git-lfs.test.ts │ │ ├── http.test.ts │ │ ├── local.test.ts │ │ └── s3.test.ts │ │ ├── deepSortedJson.test.ts │ │ ├── gitWrapper.test.ts │ │ ├── helpers.ts │ │ ├── install │ │ ├── helpers.test.ts │ │ ├── index.test.ts │ │ └── pushBackends.test.ts │ │ ├── pkgjson.test.ts │ │ └── validateConfig.test.ts └── types.ts └── tsconfig.json /.gitattributes: -------------------------------------------------------------------------------- 1 | *.tar.gz filter=lfs diff=lfs merge=lfs -text 2 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | 6 | # Runtime data 7 | pids 8 | *.pid 9 | *.seed 10 | 11 | # Directory for instrumented libs generated by jscoverage/JSCover 12 | lib-cov 13 | 14 | # Coverage directory used by tools like istanbul 15 | coverage 16 | 17 | # nyc test coverage 18 | .nyc_output 19 | 20 | # Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files) 21 | .grunt 22 | 23 | # node-waf configuration 24 | .lock-wscript 25 | 26 | # Compiled binary addons (http://nodejs.org/api/addons.html) 27 | build/Release 28 | 29 | # Dependency directories 30 | node_modules 31 | jspm_packages 32 | 33 | # Optional npm cache directory 34 | .npm 35 | .veendor 36 | 37 | # Optional REPL history 38 | .node_repl_history 39 | 40 | /tmp 41 | /nvm 42 | /dist 43 | 44 | package-lock.json 45 | -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | /tmp 2 | /nvm 3 | /.veendor 4 | /.idea 5 | npm-debug.log* 6 | /test 7 | /dist/test 8 | -------------------------------------------------------------------------------- /.npmrc: -------------------------------------------------------------------------------- 1 | package-lock=false 2 | -------------------------------------------------------------------------------- /.veendor.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | const path = require('path'); 4 | 5 | module.exports = { 6 | backends: [ 7 | { 8 | alias: 'local', 9 | push: true, 10 | backend: 'local', 11 | options: { 12 | directory: path.resolve(process.env.HOME, '.veendor-local') 13 | } 14 | }, 15 | { 16 | alias: 's3', 17 | push: true, 18 | backend: 's3', 19 | options: { 20 | bucket: 'mcornholio-s3', 21 | } 22 | }, 23 | { 24 | alias: 'github', 25 | push: true, 26 | backend: 'git-lfs', 27 | options: { 28 | checkLfsAvailability: true, 29 | repo: 'git@github.com:mutantcornholio/veendor-vendors.git' 30 | } 31 | } 32 | ], 33 | useGitHistory: { 34 | depth: 5 35 | }, 36 | packageHash: { 37 | suffix: process.platform 38 | } 39 | }; 40 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2016 Cornholio 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # veendor 2 | A tool for storing your npm dependencies in arbitrary storage 3 | 4 | ### Features 5 | Veendor: 6 | * caches your `node_modules` in you-define-where. 7 | * bootstraps your deps **fast**. 8 | * only installs deps that have changed, effectively locking your deps. 9 | * provides multi-layered cache. 10 | * supports caching in git and local directory out-of-the-box. 11 | * supports customizing cache keys calculation. 12 | 13 | ### How it works 14 | It calculates SHA-1 of `dependencies` and `devDependencies` in your `package.json`, 15 | then searches for that hash in `backends` (cache providers). 16 | If you commit your `package-lock.json` or `npm-shrinkwrap.json`, it contents will be hashed too. 17 | If found, veendor downloads archive and unpacks your `node_modules`. Voila! 18 | If not, veendor looks at previous revisions of your `package.json` and 19 | tries to find older bundles, then installs only deps that have changed. 20 | After that, veendor uploads new bundle to all `backends`. 21 | If older bundles not found, veendor does clean `npm install` and 22 | pushes bundle for future use. 23 | 24 | ### Installation and use 25 | Install veendor globally: 26 | ``` 27 | npm install -g veendor 28 | ``` 29 | 30 | Go to your project and add a config file (`.veendor.js` or `.veendor.json`). 31 | See section about config file below. 32 | Run `veendor install`. 33 | That's all! 34 | 35 | ### Config file 36 | Veendor supports configs as nodejs-modules or JSON-files. 37 | Config file contains these sections: 38 | 39 | #### backends 40 | Required. 41 | Define your caches here. `backends` property is an array of objects. 42 | Bundles search/upload will be in order defined here. 43 | Each object has this format: 44 | ```js 45 | { 46 | alias: 'some_name', // Required, choose any name you like. 47 | backend: 'local', // String or module. See built-in backends and backend API sections. 48 | push: true, // Optional, defaults to `false`. Should bundles be pushed to this backend. 49 | pushMayFail: true // Optional, defaults to `false`. 50 | // `veendor install` won't fail if push to backend fails. 51 | options: {} // Backend-specific options. 52 | } 53 | ``` 54 | 55 | #### packageHash 56 | Optional, object. 57 | Used to extend cache key calculation. 58 | Right now, only `suffix` property is used. 59 | `suffix` may be string or function that returns string. 60 | Examples: 61 | ```js 62 | // Suffix by arch. 63 | // Hashes will look like this: d0d5f10c199f507ea6e1584082feea229d59275b-darwin 64 | packageHash: { 65 | suffix: process.platform 66 | } 67 | ``` 68 | 69 | ```js 70 | // Suffix by arch and node api version 71 | // d0d5f10c199f507ea6e1584082feea229d59275b-darwin-46 72 | packageHash: { 73 | suffix: process.platform + '-' + process.versions.modules 74 | } 75 | ``` 76 | 77 | ```js 78 | // Invalidate every month 79 | // d0d5f10c199f507ea6e1584082feea229d59275b-2017-7 80 | packageHash: { 81 | suffix: () => { 82 | const date = new Date(); 83 | return date.getFullYear() + '-' + date.getMonth(); 84 | } 85 | } 86 | ``` 87 | 88 | #### installDiff 89 | Optional, defaults to `true`. Enables diff installation. 90 | 91 | #### fallbackToNpm 92 | Optional, defaults to `true`. 93 | If true, runs `npm install` when bundle is not found. 94 | Use this if you want to lock deps with veendor. 95 | Should either be environmental-dependent or your backends should be populated manually. 96 | 97 | #### useGitHistory 98 | Optional. 99 | If contains `depth` property with number value, will look at 100 | that amount of git revisions of package.json. 101 | Note that only changes that affect dependencies and devDependencies count. 102 | Example: 103 | ```js 104 | useGitHistory: { 105 | depth: 5 106 | } 107 | ``` 108 | 109 | #### npmVersion 110 | Optional. 111 | Semver constraint on npm. Veendor will crash if npm version is incompatible. 112 | Example: 113 | ```js 114 | npmVersion: '^5' 115 | ``` 116 | 117 | #### veendorVersion 118 | Optional. 119 | Semver constraint on veendor itself. 120 | Use it if you want to force your team to update veendor and prohibit pushing of bundles created by older versions of veendor. 121 | Example: 122 | ```js 123 | veendorVersion: '>=2.1' 124 | ``` 125 | Please notice that `veendorVersion` option is not supported in veendor before 2.0, so if your team might use 1.x, add this to your .veendor.js: 126 | 127 | ```js 128 | if (!global.VEENDOR_VERSION) { 129 | throw new Error('veendor version is incompatible, please update'); 130 | } 131 | ``` 132 | 133 | #### dedupe 134 | Optional, defaults to `false` 135 | Run `npm dedupe` after each diff install (it doesn't make any sense to run `dedupe` after full install). 136 | When adding new packages to existing pile, NPM doesn't do a very good job keeping your `node_modules` in check. 137 | That way, if you've been using diff installations for a long time, you might discover a lot of library duplicates. 138 | Sometimes you might even get nonfunctional bundles. `dedupe` helps. 139 | 140 | Example: 141 | ```js 142 | dedupe: true 143 | ``` 144 | 145 | #### clearSharedCache 146 | Optional, defaults to `false` 147 | Some software (e.g. babel) uses `node_modules/.cache` as a place for caches. In cases when such software runs in postinstall scripts or just in cases of poor luck, these caches may end up in your veendor bundles. 148 | If you add `clearSharedCache: true` to your config, veendor will remove said directory from your `node_modules` before each push. 149 | Example: 150 | ```js 151 | clearSharedCache: true 152 | ``` 153 | 154 | ### Built-in backends 155 | #### http 156 | Read-only HTTP backend. 157 | Designed for anonymous access; you should upload your bundles some other way; 158 | Accepts these options: 159 | ```js 160 | { 161 | resolveUrl: hash => { // Required. Function for getting remote url for given hash. 162 | // Returns string or Promise. 163 | return `https://someserver.org/${hash}.tar.gz`; 164 | }, 165 | 166 | compression: 'xz', // Optional, defaults to 'gzip'. Also supports 'bzip2', 'xz'. 167 | strict: true // Optional, defaults to false. 168 | // If true, all codes other than 404 and 200 will abort installation. 169 | } 170 | ``` 171 | 172 | #### s3 173 | Stores bundles in Amazon S3 bucket. 174 | 175 | Accepts these options: 176 | ```js 177 | { 178 | bucket: 'veendor', // Required, name of S3 bucket. Bucket should already exist. 179 | s3Options: { // Optional, options for AWS-SDK (see below) 180 | endpoint: 'localhost:14569' 181 | }, 182 | objectAcl: 'authenticated-read', // Optional, defaults to your bucket settings. 183 | // ACL for created objects. See below. 184 | compression: 'xz', // Optional, defaults to 'gzip'. Also supports 'bzip2', 'xz'. 185 | } 186 | ``` 187 | 188 | veendor uses [aws-sdk](https://www.npmjs.com/package/aws-sdk) for s3 backend. 189 | `s3Options` is [AWS.Config](https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/Config.html#constructor-property) 190 | options hash, and may contain all of listed options. 191 | `objectAcl` is `ACL` parameter passed to `S3.upload()`. 192 | Check [the docs](https://docs.aws.amazon.com/AWSJavaScriptSDK/latest/AWS/S3.html#upload-property). 193 | 194 | You can use any of [these](https://docs.aws.amazon.com/sdk-for-javascript/v2/developer-guide/setting-credentials-node.html) 195 | methods to set your credentials. 196 | Also, you can directly pass `accessKeyId` and `secretAccessKey` params to `s3Options`. 197 | 198 | #### git-lfs 199 | Stores bundles in git repo. 200 | Accepts these options: 201 | ```js 202 | { 203 | repo: 'git@github.com:you/your-vendors.git', // Required. Git remote. 204 | compression: 'xz', // Optional, defaults to 'gzip'. Also supports 'bzip2', 'xz'. 205 | defaultBranch: 'braanch', // Default branch of your repo. Defaults to 'master'. 206 | checkLfsAvailability: true // Prevent veendor from running if git-lfs is not installed. 207 | // Optional, defaults to `false`. 208 | } 209 | ``` 210 | Note: while supporting git-lfs is not mandatory for your remote, 211 | it's pretty much required due to future repo size regressions. 212 | Don't forget to set it up — add following to your `.gitattributes`: 213 | ``` 214 | *.tar.gz filter=lfs diff=lfs merge=lfs -text 215 | ``` 216 | (replace `.tar.gz` with your selected compressison format) 217 | [more about git-lfs](git-lfs.github.com) 218 | 219 | #### local 220 | Stores bundles in local directory 221 | Accepts these options: 222 | ```js 223 | { 224 | directory: '/var/cache/veendor', // Required. Directory to store bundles in. 225 | compression: 'xz' // Optional, defaults to 'gzip'. Also supports 'bzip2', 'xz'. 226 | } 227 | ``` 228 | 229 | #### Example config 230 | ```js 231 | const path = require('path'); 232 | 233 | module.exports = { 234 | backends: [ 235 | { 236 | alias: 'local', 237 | push: true, 238 | backend: 'local', 239 | options: { 240 | directory: path.resolve(process.env.HOME, '.veendor-local') 241 | } 242 | }, 243 | { 244 | alias: 'github', 245 | push: true, 246 | backend: 'git-lfs', 247 | options: { 248 | repo: 'git@github.com:you/your-vendors.git' 249 | } 250 | } 251 | ], 252 | useGitHistory: { 253 | depth: 5 254 | } 255 | }; 256 | 257 | ``` 258 | 259 | ### Backends API 260 | Backend should be an object with these properties: 261 | #### pull(hash, options, cacheDir) => Promise 262 | Should search for bundle with provided hash and 263 | place node_modules into `process.cwd()`. 264 | Promise resolves if succeded, rejects if not. 265 | Promise must be rejected with `require('veendor/lib/errors').BundleNotFoundError` 266 | if bundles not found, or with any other error on generic fail. 267 | Failing with generic fail crash veendor. 268 | Options is object called `backend-specific options` earlier. 269 | If backend needs to store some temp data, 270 | veendor provides a clean `cacheDir` 271 | #### push(hash, options, cacheDir) => Promise 272 | Should take node_modules from `process.cwd()` and 273 | upload it to the remote as bundle with `hash`. 274 | `options` and `cacheDir` are same as in `pull`. 275 | Promise resolves if succeded, rejects if not. 276 | Promise must be rejected with `require('veendor/lib/errors').BundleAlreadyExistsError` 277 | if can't push because there's another bundle there. 278 | This is common race-condition and veendor will re-pull new bundle on this error. 279 | #### validateOptions(options) => undefined|Promise 280 | Called upon start while validating config. 281 | May be synchronous or asynchronous. 282 | Should throw error or reject returning promise if backend-specific options in config 283 | are invalid. 284 | If backend has some external dependencies, their availability may be checked here too. 285 | May mutate options to set default values. 286 | #### keepCache 287 | Boolean, optional, defaults to false. 288 | If your backend needs old calls cache for sake of efficiency, set it to true. 289 | Otherwise, `cacheDir` will be clean before every call. 290 | -------------------------------------------------------------------------------- /global.d.ts: -------------------------------------------------------------------------------- 1 | declare namespace NodeJS { 2 | interface Global { 3 | VEENDOR_VERSION: string 4 | } 5 | } 6 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "veendor", 3 | "version": "3.1.1", 4 | "description": "a tool for stroing your npm dependencies in arbitraty storage", 5 | "bin": { 6 | "veendor": "bin/veendor.js" 7 | }, 8 | "scripts": { 9 | "build": "npm run build:1_tsc && npm run build:2_tscpaths && npm run build:3_tests_dirs && npm run build:4_tests_files && npm run build:5_package.json", 10 | "build:1_tsc": "./node_modules/.bin/tsc", 11 | "build:2_tscpaths": "./node_modules/.bin/tscpaths -p tsconfig.json -s src -o dist", 12 | "build:3_tests_dirs": "find src/test/integration -type d |perl -pe 's/^src\\///' | xargs -I{} -n1 mkdir -p \"dist/{}\"", 13 | "build:4_tests_files": "find src/test/integration -type f \\( ! -name \"*.ts\" \\) |perl -pe 's/^src\\///' | xargs -I{} -n1 cp \"src/{}\" \"dist/{}\"", 14 | "build:5_package.json": "cp package.json dist/ && cp .npmrc dist/ && cp .npmignore dist/", 15 | "test": "npm run test:unit && npm run test:integration", 16 | "test:unit": "mocha --file dist/test/setup.js --reporter dot 'dist/test/unit/**/*.test.js'", 17 | "test:integration": "mocha --file dist/test/setup.js --reporter dot 'dist/test/integration/**/*.test.js'", 18 | "publish": "npm run build && npm run test && cd dist && npm publish --access=public" 19 | }, 20 | "repository": { 21 | "type": "git", 22 | "url": "https://github.com/mutantcornholio/veendor" 23 | }, 24 | "author": "cornholio <0@mcornholio.ru>", 25 | "license": "MIT", 26 | "bugs": { 27 | "url": "https://github.com/mutantcornholio/veendor/issues" 28 | }, 29 | "homepage": "https://github.com/mutantcornholio/veendor", 30 | "dependencies": { 31 | "@types/fs-extra": "^5.0.4", 32 | "aws-sdk": "^2.213.1", 33 | "cli-progress": "^2.1.0", 34 | "colors": "^1.3.1", 35 | "commander": "^2.16.0", 36 | "deep-object-diff": "^1.0.4", 37 | "fs-extra": "^4.0.0", 38 | "lodash": "^4.14.1", 39 | "semver": "^5.5.0", 40 | "tracer": "^0.9.1" 41 | }, 42 | "devDependencies": { 43 | "@types/chai": "^4.2.7", 44 | "@types/chai-as-promised": "^7.1.2", 45 | "@types/cli-progress": "^1.8.0", 46 | "@types/invariant": "^2.2.31", 47 | "@types/lodash": "^4.14.116", 48 | "@types/mocha": "^5.2.7", 49 | "@types/mock-fs": "^4.10.0", 50 | "@types/node": "^10.5.8", 51 | "@types/progress": "^2.0.1", 52 | "@types/s3rver": "0.0.32", 53 | "@types/semver": "^5.5.0", 54 | "@types/sinon": "^7.5.1", 55 | "chai": "^3.5.0", 56 | "chai-as-promised": "^6.0.0", 57 | "invariant": "^2.2.4", 58 | "mocha": "^5.2.0", 59 | "mock-fs": "^4.9.0", 60 | "mock-require": "^1.3.0", 61 | "nock": "^11.7.2", 62 | "s3rver": "^2.2.3", 63 | "sinon": "^4.5.0", 64 | "ts-node": "^8.6.1", 65 | "ts-sinon": "^1.0.24", 66 | "tsc-watch": "^4.0.0", 67 | "tscpaths": "0.0.9", 68 | "typescript": "^3.7.4" 69 | } 70 | } 71 | -------------------------------------------------------------------------------- /src/bin/veendor-calc.ts: -------------------------------------------------------------------------------- 1 | import fsExtra from 'fs-extra'; 2 | import path from 'path'; 3 | import program from 'commander'; 4 | 5 | import resolveConfig from '@/lib/resolveConfig'; 6 | import * as logger from '@/lib/util/logger'; 7 | import resolveLockfile from '@/lib/resolveLockfile'; 8 | import {getFSHash} from '@/lib/install/hashGetters'; 9 | 10 | program 11 | .description('Calculate and print your bundle id') 12 | .option('-c --config [configuration-file]') 13 | .option('--debug', 'don\'t remove .veendor-debug.log') 14 | .option('-v --verbose', 'Verbose output. Could be from `-v` to `-vvv`', increaseVerbosity, 0) 15 | .parse(process.argv); 16 | 17 | // @ts-ignore 18 | function increaseVerbosity(v, total: number) { 19 | return total + 1; 20 | } 21 | 22 | const daLogger = logger.setDefaultLogger(1, 4 - (program.verbose || 0)); 23 | 24 | resolveConfig(program.config) 25 | .then(async (resolvedConfig) => { 26 | const config = resolvedConfig; 27 | const lockfilePath = await resolveLockfile(); 28 | 29 | const hash = await getFSHash(config, path.resolve(process.cwd(), 'package.json'), lockfilePath); 30 | 31 | console.log(hash.hash); 32 | 33 | if (!(program.debug)) { 34 | return fsExtra.remove(path.resolve(process.cwd(), '.veendor-debug.log')); 35 | } 36 | 37 | process.exit(0); 38 | }).catch(error => { 39 | daLogger.error(error); 40 | process.exit(1); 41 | }); 42 | -------------------------------------------------------------------------------- /src/bin/veendor-install.ts: -------------------------------------------------------------------------------- 1 | import path from 'path'; 2 | import fsExtra from 'fs-extra'; 3 | import program from 'commander'; 4 | 5 | import resolveConfig from '@/lib/resolveConfig'; 6 | import * as logger from '@/lib/util/logger'; 7 | import * as gitWrapper from '@/lib/commandWrappers/gitWrapper'; 8 | import resolveLockfile from '@/lib/resolveLockfile'; 9 | import install, {NodeModulesAlreadyExistError} from '@/lib/install'; 10 | import {Config} from '@/types'; 11 | 12 | process.on('uncaughtException', console.error); 13 | process.on('unhandledRejection', console.error); 14 | 15 | program 16 | .description('Download and install node_modules') 17 | .option('-f, --force', 'overwrite node_modules if it already exists') 18 | .option('-c --config [configuration-file]') 19 | .option('-r --rsync', 'download node_modules into tmpdir \n' + 20 | 'and then rsync --delete them to working directory\n' + 21 | 'somewhat slower, but you-know-which IDE won\'t go crazy reindexing it') 22 | .option('--debug', 'don\'t remove .veendor-debug.log') 23 | .option('-v --verbose', 'Verbose output. Could be from `-v` to `-vvv`', increaseVerbosity, 0) 24 | .parse(process.argv); 25 | 26 | // @ts-ignore 27 | function increaseVerbosity(v, total: number) { 28 | return total + 1; 29 | } 30 | 31 | const daLogger = logger.setDefaultLogger(1, 3 - (program.verbose || 0)); 32 | 33 | let config: Config; 34 | 35 | resolveConfig(program.config) 36 | .then(async (resolvedConfig) => { 37 | config = resolvedConfig; 38 | const lockfilePath = await resolveLockfile(); 39 | 40 | await install({force: Boolean(program.force), config, lockfilePath, rsyncMode: program.rsync}); 41 | 42 | if (!(program.debug)) { 43 | await fsExtra.remove(path.resolve(process.cwd(), '.veendor-debug.log')); 44 | } 45 | }) 46 | .catch(e => { 47 | if (e instanceof NodeModulesAlreadyExistError) { 48 | daLogger.error('\'node_modules\' directory already exists. Use -f option to overwrite'); 49 | return; 50 | } else if (e instanceof gitWrapper.NotAGitRepoError && config.useGitHistory) { 51 | daLogger.error(`'useGitHistory' set in config, but ${process.cwd()} is not a git repo`); 52 | return; 53 | } 54 | 55 | daLogger.error(e); process.exit(1) 56 | }); 57 | 58 | -------------------------------------------------------------------------------- /src/bin/veendor.ts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | import program from 'commander'; 3 | const {version} = require('../package.json'); 4 | 5 | program 6 | .version(version) 7 | .description('A tool for vendoring your npm dependencies') 8 | .command('calc', 'calculate and print your bundle id') 9 | .command('install', 'download and install node_modules') 10 | .parse(process.argv); 11 | -------------------------------------------------------------------------------- /src/lib/backends/git-lfs.ts: -------------------------------------------------------------------------------- 1 | import path from 'path'; 2 | import fsExtra from 'fs-extra'; 3 | import * as gitWrapper from '../commandWrappers/gitWrapper'; 4 | import * as tarWrapper from '../commandWrappers/tarWrapper'; 5 | import * as errors from '../errors'; 6 | import {Compression} from '@/lib/commandWrappers/tarWrapper'; 7 | 8 | export const keepCache = true; 9 | 10 | let _remoteIsFresh: boolean = false; 11 | 12 | export function setRemoteFreshness(val: boolean): void { // Exporting this for tests 13 | _remoteIsFresh = val; 14 | } 15 | 16 | export type GitLfsOptions = { 17 | compression: Compression, 18 | repo: string, 19 | defaultBranch: string, 20 | checkLfsAvailability: boolean, 21 | } 22 | 23 | export function validateOptions(options: Partial) { 24 | return new Promise((resolve, reject) => { 25 | if (typeof options.repo !== 'string' || options.repo.length === 0) { 26 | return reject(new errors.InvalidOptionsError('Invalid git repo')); 27 | } 28 | 29 | if (options.compression && !(options.compression in tarWrapper.compression)) { 30 | return reject(new errors.InvalidOptionsError(`Invalid compression: ${options.compression}`)); 31 | } 32 | 33 | if (options.compression === undefined) { 34 | options.compression = 'gzip'; 35 | } 36 | 37 | if (options.defaultBranch === undefined) { 38 | options.defaultBranch = 'master'; 39 | } 40 | 41 | if (options.checkLfsAvailability === undefined) { 42 | options.checkLfsAvailability = false; 43 | 44 | resolve(); 45 | } else { 46 | if (typeof options.checkLfsAvailability !== 'boolean') { 47 | return reject(new errors.InvalidOptionsError( 48 | `Invalid 'checkLfsAvailability' option: ${options.checkLfsAvailability}` 49 | )); 50 | } 51 | 52 | if (options.checkLfsAvailability) { 53 | gitWrapper.isGitLfsAvailable().then(resolve, () => { 54 | 55 | reject(new gitWrapper.GitLfsNotAvailableError( 56 | 'git-lfs is not available. Check git-lfs.github.com for docs.' 57 | )); 58 | }); 59 | } else { 60 | resolve(); 61 | } 62 | } 63 | }); 64 | } 65 | 66 | export function pull(hash: string, options: GitLfsOptions, cacheDir: string) { 67 | const repoDir = path.resolve(cacheDir, 'repo'); 68 | return gitWrapper.isGitRepo(repoDir) 69 | .then(res => { 70 | if (res) { 71 | if (_remoteIsFresh) { 72 | return Promise.resolve(); 73 | } 74 | 75 | return gitWrapper.fetch(repoDir).then(() => {}); 76 | } else { 77 | 78 | if (_remoteIsFresh) { 79 | return Promise.resolve(); 80 | } 81 | 82 | return gitWrapper.clone(options.repo, repoDir).then(() => {}); 83 | } 84 | }) 85 | .then(() => { 86 | _remoteIsFresh = true; 87 | return gitWrapper.checkout(repoDir, `veendor-${hash}`) 88 | .then(() => { 89 | return new Promise((resolve, reject) => { 90 | gitWrapper.isGitLfsAvailable().then(() => { 91 | gitWrapper.lfsPull(repoDir).then(resolve, reject); 92 | }, resolve); 93 | }); 94 | }, () => { 95 | throw new errors.BundleNotFoundError; 96 | }) 97 | .then(() => { 98 | return tarWrapper.extractArchive( 99 | path.resolve( 100 | repoDir, 101 | `${hash}.tar${tarWrapper.compression[options.compression]}` 102 | ) 103 | ); 104 | }); 105 | }); 106 | } 107 | 108 | export function push(hash:string, options: GitLfsOptions, cacheDir: string) { 109 | const repoDir = path.resolve(cacheDir, 'repo'); 110 | const archivePath = path.resolve( 111 | repoDir, 112 | `${hash}.tar${tarWrapper.compression[options.compression]}` 113 | ); 114 | 115 | const tagName = `veendor-${hash}`; 116 | 117 | return fsExtra.access(repoDir, fsExtra.constants.F_OK) 118 | .then(() => { 119 | return gitWrapper.fetch(repoDir); 120 | }, () => { 121 | return gitWrapper.clone(options.repo, repoDir); 122 | }) 123 | .then(() => { 124 | return gitWrapper.checkout(repoDir, options.defaultBranch) 125 | }) 126 | .then(() => { 127 | return gitWrapper.resetToRemote(repoDir, options.defaultBranch) 128 | }) 129 | .then(() => { 130 | return tarWrapper.createArchive(archivePath, [path.resolve( 131 | process.cwd(), 132 | 'node_modules' 133 | )], options.compression); 134 | }) 135 | .then(() => { 136 | return gitWrapper.add(repoDir, [archivePath], true) 137 | }) 138 | .then(() => { 139 | return gitWrapper.commit(repoDir, hash) 140 | }) 141 | .then(() => { 142 | return gitWrapper.tag(repoDir, tagName) 143 | }) 144 | .then(() => { 145 | return gitWrapper.push(repoDir, tagName) 146 | }) 147 | .catch(error => { 148 | if (error instanceof gitWrapper.RefAlreadyExistsError) { 149 | throw new errors.BundleAlreadyExistsError(); 150 | } 151 | 152 | throw error; 153 | }); 154 | } 155 | -------------------------------------------------------------------------------- /src/lib/backends/http.ts: -------------------------------------------------------------------------------- 1 | import http from 'http'; 2 | import https from 'https'; 3 | import url from 'url'; 4 | import * as tarWrapper from '../commandWrappers/tarWrapper'; 5 | import * as errors from '../errors'; 6 | import {Compression} from '@/lib/commandWrappers/tarWrapper'; 7 | import {ControlToken} from '@/lib/commandWrappers/helpers'; 8 | import {BackendToolsProvider} from '@/types'; 9 | 10 | 11 | export type HttpOptions = { 12 | compression: Compression, 13 | resolveUrl: (hash: string) => string | Promise, 14 | strict: boolean, 15 | } 16 | 17 | type Transport = { 18 | get(options: http.RequestOptions | string | URL, callback?: (res: http.IncomingMessage) => void): http.ClientRequest; 19 | } 20 | 21 | export function validateOptions(options: Partial) { 22 | if (options.compression && !(options.compression in tarWrapper.compression)) { 23 | throw new errors.InvalidOptionsError(`Invalid compression: ${options.compression}`); 24 | } 25 | 26 | if (!options.compression) { 27 | options.compression = 'gzip'; 28 | } 29 | 30 | if (!options.resolveUrl) { 31 | throw new errors.InvalidOptionsError('`resolveUrl` function must be provided'); 32 | } 33 | 34 | if (!options.strict) { 35 | options.strict = false; 36 | } 37 | } 38 | 39 | export async function pull(hash: string, options: HttpOptions, _cachedir: string, toolsProvider: BackendToolsProvider) { 40 | let resolvedUrlPromise = options.resolveUrl(hash); 41 | 42 | if (!(resolvedUrlPromise instanceof Promise)) { 43 | resolvedUrlPromise = Promise.resolve(resolvedUrlPromise); 44 | } 45 | 46 | const resolvedUrl = await resolvedUrlPromise; 47 | 48 | const parsedUrl = url.parse(resolvedUrl); 49 | let done = false; 50 | 51 | if (parsedUrl.protocol !== 'http:' && parsedUrl.protocol !== 'https:') { 52 | done = true; 53 | 54 | throw new InvalidProtocolError( 55 | `http backend can't work with \`${parsedUrl.protocol}\` protocol. ` + 56 | `Only \`http:\` and \`https:\` are supported` 57 | ); 58 | } 59 | 60 | const transport: Transport = parsedUrl.protocol === 'https:' ? https : http; 61 | 62 | if (typeof parsedUrl.href !== 'string') { 63 | done = true; 64 | throw new InvalidUrlError(`${parsedUrl.href} is not a valid URL`); 65 | } 66 | 67 | const href = parsedUrl.href; 68 | 69 | return new Promise((resolve, reject) => { 70 | transport.get(href, res => { 71 | if (res.statusCode === 404) { 72 | done = true; 73 | 74 | reject(new errors.BundleNotFoundError()); 75 | return; 76 | } else if (res.statusCode !== 200) { 77 | if(done) { 78 | return; 79 | } 80 | 81 | done = true; 82 | if (options.strict) { 83 | reject(new InvalidStatusCodeError( 84 | `Request to \'${parsedUrl}\' failed. Invalid status code: \`${res.statusCode}\`` 85 | )); 86 | return; 87 | } 88 | 89 | reject(new errors.BundleNotFoundError()); 90 | return; 91 | } 92 | 93 | 94 | const contentLengthHeader = res.headers['content-length']; 95 | const contentLength = typeof contentLengthHeader === 'string' ? 96 | (parseInt(contentLengthHeader, 10)) : undefined; 97 | 98 | const progressStream = toolsProvider.getProgressStream('pull', contentLength); 99 | 100 | res.pipe(progressStream); 101 | 102 | progressStream.toggleVisibility(true); 103 | 104 | const tarWrapperToken: ControlToken = {}; 105 | tarWrapper.extractArchiveFromStream( 106 | progressStream, 107 | options.compression, 108 | {controlToken: tarWrapperToken} 109 | ) 110 | .then(() => { 111 | if (!done) { 112 | resolve(); 113 | done = true; 114 | } 115 | }, (error: Error) => { 116 | if (!done) { 117 | reject(error); 118 | done = true; 119 | } 120 | }); 121 | 122 | res.on('error', (error: Error) => { 123 | if (done) { 124 | return; 125 | } 126 | 127 | done = true; 128 | 129 | if (tarWrapperToken.terminate) { 130 | tarWrapperToken.terminate(); 131 | } 132 | 133 | if (options.strict) { 134 | return reject(new BundleDownloadError(error.stack)); 135 | } 136 | 137 | return reject(new errors.BundleNotFoundError(error.stack)); 138 | }) 139 | 140 | }); 141 | 142 | }); 143 | } 144 | 145 | export class InvalidUrlError extends errors.VeendorError {} 146 | export class InvalidProtocolError extends errors.VeendorError {} 147 | export class InvalidStatusCodeError extends errors.VeendorError {} 148 | export class BundleDownloadError extends errors.VeendorError {} 149 | 150 | export function push() { 151 | throw new errors.VeendorError('`http` backend is read-only, pushing is not implemented'); 152 | } 153 | -------------------------------------------------------------------------------- /src/lib/backends/local.ts: -------------------------------------------------------------------------------- 1 | import path from 'path'; 2 | import fs from 'fs'; 3 | import fsExtra from 'fs-extra'; 4 | import * as tarWrapper from '../commandWrappers/tarWrapper'; 5 | import * as errors from '../errors'; 6 | import {Compression} from '../commandWrappers/tarWrapper'; 7 | 8 | export type LocalOptions = { 9 | compression: Compression, 10 | directory: string, 11 | } 12 | 13 | export function validateOptions(options: Partial) { 14 | if (options.compression && !(options.compression in tarWrapper.compression)) { 15 | throw new errors.InvalidOptionsError(`Invalid compression: ${options.compression}`); 16 | } 17 | 18 | if (!options.compression) { 19 | options.compression = 'gzip'; 20 | } 21 | 22 | if (typeof options.directory !== 'string') { 23 | throw new errors.InvalidOptionsError(`Invalid directory '${options.directory}'`); 24 | } 25 | 26 | try { 27 | fs.readdirSync(options.directory); 28 | } catch (e) { 29 | throw new errors.InvalidOptionsError(`Invalid directory '${options.directory}': ${e.message}`); 30 | } 31 | } 32 | 33 | export function pull(hash: string, options: LocalOptions) { 34 | const archivePath = path.resolve( 35 | options.directory, 36 | `${hash}.tar${tarWrapper.compression[options.compression]}` 37 | ); 38 | 39 | return fsExtra.stat(archivePath) 40 | .then(() => { 41 | return tarWrapper.extractArchive(archivePath); 42 | }, () => { 43 | throw new errors.BundleNotFoundError(); 44 | }) 45 | } 46 | 47 | export function push(hash: string, options: LocalOptions) { 48 | const archivePath = path.resolve( 49 | options.directory, 50 | `${hash}.tar${tarWrapper.compression[options.compression]}` 51 | ); 52 | 53 | return fsExtra.stat(archivePath) 54 | .then(() => { 55 | throw new errors.BundleAlreadyExistsError(); 56 | }, () => { 57 | return tarWrapper 58 | .createArchive(archivePath, [path.resolve(process.cwd(), 'node_modules')], options.compression); 59 | }); 60 | } 61 | -------------------------------------------------------------------------------- /src/lib/backends/s3.ts: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | import AWS from 'aws-sdk'; 3 | import {AWSError} from 'aws-sdk/lib/error'; 4 | import path from 'path'; 5 | import * as tarWrapper from '../commandWrappers/tarWrapper'; 6 | import * as errors from '../errors'; 7 | import {ControlToken} from '@/lib/commandWrappers/helpers'; 8 | import {Compression} from '../commandWrappers/tarWrapper'; 9 | import {Readable} from 'stream'; 10 | import {BackendToolsProvider} from '@/types'; 11 | 12 | export type S3Options = { 13 | compression: Compression, 14 | bucket: string, 15 | objectAcl?: string, 16 | s3Options: { 17 | endpoint?: string, 18 | apiVersion?: string, 19 | }, 20 | __s3: AWS.S3, 21 | } 22 | 23 | export function validateOptions(options: Partial) { 24 | if (options.compression && !(options.compression in tarWrapper.compression)) { 25 | throw new errors.InvalidOptionsError(`Invalid compression: ${options.compression}`); 26 | } 27 | 28 | if (!options.compression) { 29 | options.compression = 'gzip'; 30 | } 31 | 32 | if (!options.bucket) { 33 | throw new errors.InvalidOptionsError('`bucket` option must be provided'); 34 | } 35 | 36 | if (!options.objectAcl) { 37 | options.objectAcl = 'public-read'; 38 | } 39 | 40 | const forcedS3Options = {apiVersion: '2006-03-01'}; 41 | 42 | if (!options.s3Options) { 43 | options.s3Options = forcedS3Options; 44 | } else { 45 | Object.assign(options.s3Options, forcedS3Options); 46 | } 47 | options.__s3 = new AWS.S3(options.s3Options); 48 | } 49 | 50 | export async function pull(hash: string, options: S3Options, _cachedir: string, toolsProvider: BackendToolsProvider) { 51 | const s3 = options.__s3; 52 | const filename = `${hash}.tar${tarWrapper.compression[options.compression]}`; 53 | 54 | let downloadStream: Readable; 55 | 56 | const s3Params = { 57 | Bucket: options.bucket, 58 | Key: filename, 59 | }; 60 | 61 | const logger = toolsProvider.getLogger(); 62 | 63 | let meta; 64 | let contentLength; 65 | try { 66 | logger.trace('[s3 pull] marking headObject request to S3'); 67 | meta = await s3.headObject(s3Params).promise(); 68 | contentLength = meta.ContentLength; 69 | } catch (error) { 70 | if (error.statusCode === 404) { 71 | throw new errors.BundleNotFoundError(); 72 | } else { 73 | throw new BundleDownloadError(error.stack); 74 | } 75 | } 76 | 77 | logger.trace('[s3 pull] marking getObject request to S3'); 78 | downloadStream = s3.getObject(s3Params).createReadStream(); 79 | 80 | const progressStream = toolsProvider.getProgressStream('pull', contentLength); 81 | 82 | const tarWrapperToken: ControlToken = {}; 83 | const extractPromise = new Promise((resolve, reject) => { 84 | downloadStream.once('readable', () => { 85 | logger.trace('[s3 pull] downloadStream is readable'); 86 | downloadStream.pipe(progressStream); 87 | tarWrapper.extractArchiveFromStream( 88 | progressStream, 89 | options.compression, 90 | {controlToken: tarWrapperToken} 91 | ).then(resolve, reject); 92 | progressStream.toggleVisibility(true); 93 | 94 | }) 95 | }); 96 | 97 | const downloadStreamPromise = new Promise((resolve, reject) => { 98 | let done = false; 99 | 100 | downloadStream.once('error', (error: AWSError) => { 101 | if (!done) { 102 | done = true; 103 | 104 | if (tarWrapperToken.terminate) { 105 | tarWrapperToken.terminate(); 106 | } 107 | 108 | progressStream.die(); 109 | 110 | if (error.statusCode === 404) { 111 | return reject(new errors.BundleNotFoundError()); 112 | } 113 | 114 | reject(new BundleDownloadError(error.message)); 115 | } 116 | }); 117 | 118 | downloadStream.once('end', () => { 119 | logger.trace('[s3 pull] downloadStream end'); 120 | if (!done) { 121 | done = true; 122 | progressStream.die(); 123 | resolve(); 124 | } 125 | }); 126 | 127 | downloadStream.once('close', () => { 128 | if (!done) { 129 | done = true; 130 | progressStream.die(); 131 | resolve(); 132 | } 133 | }); 134 | }); 135 | 136 | return Promise.all([downloadStreamPromise, extractPromise]); 137 | } 138 | 139 | export async function push(hash: string, options: S3Options, _cachedir: string, toolsProvider: BackendToolsProvider) { 140 | const filename = `${hash}.tar${tarWrapper.compression[options.compression]}`; 141 | const s3 = options.__s3; 142 | 143 | const controlToken: ControlToken = {}; 144 | 145 | let bundleExists = false; 146 | try { 147 | await s3.headObject({ 148 | Bucket: options.bucket, 149 | Key: filename, 150 | }).promise(); 151 | bundleExists = true; 152 | } catch (error) { 153 | if (error.statusCode !== 404) { 154 | throw error; 155 | } 156 | } 157 | 158 | if (bundleExists) { 159 | throw new errors.BundleAlreadyExistsError(); 160 | } 161 | 162 | const progressStream = toolsProvider.getProgressStream('push'); 163 | progressStream.toggleVisibility(true); 164 | 165 | const {stream: tarWrapperStream, promise: tarWrapperPromise} = tarWrapper 166 | .createStreamArchive([path.resolve(process.cwd(), 'node_modules')], options.compression, {controlToken}); 167 | 168 | tarWrapperStream.pipe(progressStream); 169 | 170 | const s3Promise = s3.upload({ 171 | Bucket: options.bucket, 172 | Key: filename, 173 | ACL: options.objectAcl, 174 | Body: progressStream, 175 | }).promise(); 176 | 177 | try { 178 | await Promise.all([tarWrapperPromise, s3Promise]); 179 | } catch (error) { 180 | if (error instanceof errors.VeendorError) { 181 | throw error; 182 | } 183 | 184 | throw new BundleUploadError(`${error.statusCode}: ${error.message}`); 185 | } finally { 186 | if (controlToken.terminate !== undefined) { 187 | controlToken.terminate(); 188 | } 189 | 190 | progressStream.die(); 191 | } 192 | } 193 | 194 | export class BundleDownloadError extends errors.VeendorError {} 195 | export class BundleUploadError extends errors.VeendorError {} 196 | -------------------------------------------------------------------------------- /src/lib/commandWrappers/gitWrapper.ts: -------------------------------------------------------------------------------- 1 | import path from 'path'; 2 | import * as helpers from './helpers'; 3 | import {StdioPolicy} from './helpers'; 4 | import * as errors from '../errors'; 5 | import {getLogger} from '../util/logger'; 6 | 7 | export class NotAGitRepoError extends errors.VeendorError {} 8 | export class GitIsNotADirectoryError extends errors.VeendorError {} 9 | export class GitLfsNotAvailableError extends errors.VeendorError {} 10 | export class TooOldRevisionError extends errors.VeendorError {} 11 | export class RefAlreadyExistsError extends errors.VeendorError {} 12 | 13 | 14 | export async function isGitRepo(directory: string): Promise { 15 | const logger = getLogger(); 16 | 17 | logger.trace(`isGitRepo: ${directory}`); 18 | 19 | try { 20 | await helpers.getOutput('git', ['rev-parse', '--git-dir'], {cwd: directory}); 21 | 22 | return true; 23 | } catch(e) { 24 | return false; 25 | } 26 | } 27 | 28 | export async function isGitLfsAvailable() { 29 | return helpers.getOutput('git', ['lfs']) 30 | .then(() => { 31 | return helpers.getOutput('git', ['config', '--list']) 32 | }) 33 | .then(gitConfig => { 34 | if (gitConfig.indexOf('filter.lfs.clean=') === -1 35 | || gitConfig.indexOf('filter.lfs.smudge=') === -1 36 | || gitConfig.indexOf('filter.lfs.process=') === -1) { 37 | throw new Error(); 38 | } 39 | }) 40 | .then(() => true, () => {throw new GitLfsNotAvailableError}) 41 | } 42 | 43 | 44 | /** 45 | * Returns contents of older revision of files 46 | * age == 1 means latest revision, age == 2 means previous, and so on 47 | */ 48 | export async function olderRevision( 49 | gitDirectory: string, filenames: Array, age: number 50 | ): Promise> { 51 | const gitRoot = await resolveGitRoot(gitDirectory); 52 | 53 | const relativeFilenames = filenames.map(filename => { 54 | if (typeof filename === 'string') { 55 | if (!path.isAbsolute(filename)) { 56 | return path.relative(gitRoot, path.resolve(gitDirectory, filename)); 57 | } 58 | 59 | return path.relative(gitRoot, filename); 60 | } 61 | 62 | return null; 63 | }); 64 | 65 | const gitArgs = ['--no-pager', 'log', `-${age}`, '--pretty=format:%h'].concat( 66 | relativeFilenames.filter(filename => typeof filename === 'string') as string[] 67 | ); 68 | 69 | const revisionsText = await helpers.getOutput('git', gitArgs, { 70 | cwd: gitRoot, 71 | }); 72 | 73 | const revisions = revisionsText.trim().split('\n'); 74 | 75 | if (revisions.length < age) { 76 | throw new TooOldRevisionError(); 77 | } 78 | 79 | return Promise.all(relativeFilenames.map(filename => { 80 | if (typeof filename === 'string') { 81 | return helpers.getOutput( 82 | 'git', 83 | ['--no-pager', 'show', revisions[revisions.length - 1] + ':' + filename], 84 | { 85 | cwd: gitRoot, 86 | } 87 | ); 88 | } else { 89 | return Promise.resolve(null); 90 | } 91 | })); 92 | } 93 | 94 | export async function clone(repo: string, directory: string) { 95 | return helpers.getOutput('git', ['clone', repo, directory], { 96 | stdout: StdioPolicy.copy, stderr: StdioPolicy.inherit 97 | }); 98 | } 99 | export async function fetch(gitDirectory: string) { 100 | return helpers.getOutput('git', ['fetch', '--tags'], { 101 | cwd: gitDirectory, stdout: StdioPolicy.copy, stderr: StdioPolicy.inherit 102 | }); 103 | } 104 | 105 | export async function lfsPull(gitDirectory: string) { 106 | return helpers.getOutput('git', ['lfs', 'pull'], { 107 | cwd: gitDirectory, stdout: StdioPolicy.copy, stderr: StdioPolicy.inherit 108 | }); 109 | } 110 | 111 | export async function checkout(gitDirectory: string, gitId: string) { 112 | return helpers.getOutput('git', ['checkout', gitId], {cwd: gitDirectory}); 113 | } 114 | 115 | export async function add(gitDirectory: string, paths: string[], force = false) { 116 | const args = ['add']; 117 | if (force) { 118 | args.push('--force'); 119 | } 120 | return helpers.getOutput('git', args.concat(paths), {cwd: gitDirectory}); 121 | } 122 | export async function commit(gitDirectory: string, message: string) { 123 | return helpers.getOutput('git', ['commit', '-m', message], {cwd: gitDirectory}); 124 | } 125 | 126 | export async function push(gitDirectory: string, gitId: string) { 127 | return helpers.getOutput('git', ['remote'], {cwd: gitDirectory}) 128 | .then(remote => { 129 | return helpers.getOutput( 130 | 'git', 131 | ['push', remote.trim(), gitId], 132 | {cwd: gitDirectory, stdout: StdioPolicy.copy, stderr: StdioPolicy.inherit} 133 | ); 134 | }).catch(error => { 135 | if (!(error instanceof helpers.CommandReturnedNonZeroError)) { 136 | throw error; 137 | } 138 | 139 | if (error.output.indexOf(' already exists') !== -1) { 140 | throw new RefAlreadyExistsError(); 141 | } 142 | 143 | throw error; 144 | }); 145 | } 146 | export async function tag(gitDirectory: string, tagName: string) { 147 | return helpers.getOutput('git', ['tag', tagName], {cwd: gitDirectory}) 148 | .catch(error => { 149 | if (!(error instanceof helpers.CommandReturnedNonZeroError)) { 150 | throw error; 151 | } 152 | 153 | if (error.output.indexOf(' already exists') !== -1) { 154 | throw new RefAlreadyExistsError(); 155 | } 156 | 157 | throw error; 158 | }); 159 | } 160 | 161 | export async function resetToRemote(gitDirectory: string, branch: string) { 162 | return helpers.getOutput('git', ['remote'], {cwd: gitDirectory}) 163 | .then(remote => 164 | helpers.getOutput( 165 | 'git', 166 | ['reset', '--hard', `${remote.trim()}/${branch}`], 167 | {cwd: gitDirectory, stdout: StdioPolicy.copy, stderr: StdioPolicy.inherit} 168 | ) 169 | ); 170 | } 171 | 172 | async function resolveGitRoot(directory: string): Promise { 173 | return (await helpers.getOutput('git', ['rev-parse', '--show-toplevel'], {cwd: directory})).trim(); 174 | } 175 | -------------------------------------------------------------------------------- /src/lib/commandWrappers/helpers.ts: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | import childProcess from 'child_process'; 4 | import {Writable, Readable} from 'stream'; 5 | 6 | import * as errors from '../errors'; 7 | import {getLogger} from '../util/logger'; 8 | 9 | export class CommandError extends errors.VeendorError { 10 | constructor(message: string, public output: string) { 11 | super(message); 12 | } 13 | } 14 | export class CommandTimeoutError extends CommandError {} 15 | export class CommandReturnedNonZeroError extends CommandError {} 16 | export class CommandWasKilledError extends CommandError {} 17 | 18 | export type ControlToken = { 19 | terminate?: () => void; 20 | stdio?: [Writable, Readable, Readable]; 21 | } 22 | 23 | export enum StdioPolicy { 24 | inherit, // `process.stdout` or `process.stderr` is gonna be passed to child process. 25 | // getOutput will not get data from corresponding stream 26 | copy, // each line sent stdout/stderr records, to `getOutput` result and sends to 27 | // `process.stdout` / `process.stderr` 28 | collect, // only record output to `getOutput` result 29 | pipe, // do not record output; corresponding stream will be available at controlToken's stdio 30 | ignore, // attach /dev/null to the stream 31 | } 32 | 33 | type GetOutputOptions = { 34 | controlToken?: ControlToken // You can pass an empty object here and it will populate 35 | // with useful stuff 36 | 37 | timeoutDuration?: number, // Terminate command after x msec 38 | 39 | cwd?: string, 40 | 41 | stdout?: StdioPolicy, 42 | stderr?: StdioPolicy, 43 | } 44 | 45 | function stdioPolicyToCpStdio(policy: StdioPolicy, fd: number): 'ignore' | 'pipe' | number { 46 | if (policy === StdioPolicy.inherit) { 47 | return fd; 48 | } else if (policy === StdioPolicy.ignore) { 49 | return 'ignore'; 50 | } 51 | 52 | return 'pipe'; 53 | } 54 | 55 | export function getOutput(executable: string, args: string[], { 56 | timeoutDuration = 0, 57 | cwd = process.cwd(), 58 | controlToken = {}, 59 | stdout = StdioPolicy.collect, 60 | stderr = StdioPolicy.collect, 61 | }: GetOutputOptions = {}): Promise { 62 | return new Promise((resolve, reject) => { 63 | const commandName = `[${executable} ${args.join(' ')}]`; 64 | const logger = getLogger(); 65 | 66 | let result = ''; 67 | let completed = false; 68 | let timeout: NodeJS.Timer; 69 | 70 | logger.debug(`Running ${commandName}; cwd: ${cwd}`); 71 | const proc = childProcess.spawn(executable, args, { 72 | stdio: ['pipe', stdioPolicyToCpStdio(stdout, 1), stdioPolicyToCpStdio(stderr, 2)], 73 | cwd, 74 | }); 75 | controlToken.terminate = () => { 76 | logger.debug(`Terminating ${commandName} using control token`); 77 | proc.kill(); 78 | }; 79 | 80 | const deathHand = () => proc.kill(); 81 | 82 | process.on('exit', deathHand); 83 | 84 | controlToken.stdio = proc.stdio; 85 | 86 | if (timeoutDuration !== 0) { 87 | timeout = setTimeout(() => { 88 | if (!completed) { 89 | const message = `command ${commandName} timed out (${timeoutDuration} ms)`; 90 | logger.debug(message); 91 | reject(new CommandTimeoutError(message, result)); 92 | 93 | completed = true; 94 | } 95 | }, timeoutDuration); 96 | } 97 | 98 | if (stdout === StdioPolicy.collect || stdout === StdioPolicy.copy) { 99 | proc.stdout.on('data', data => { 100 | result += data.toString(); 101 | 102 | if (stdout === StdioPolicy.copy) { 103 | process.stdout.write(data); 104 | } 105 | }); 106 | } 107 | 108 | if (stderr === StdioPolicy.collect || stderr === StdioPolicy.copy) { 109 | proc.stderr.on('data', data => { 110 | result += data.toString(); 111 | 112 | if (stdout === StdioPolicy.copy) { 113 | process.stderr.write(data); 114 | } 115 | }); 116 | } 117 | 118 | proc.on('exit', (code, signal) => { 119 | process.removeListener('exit', deathHand); 120 | if (!completed) { 121 | if (code === 0) { 122 | logger.debug(`Command ${commandName} exited with 0`); 123 | resolve(result); 124 | } else if (code) { 125 | const message = `command ${commandName} returned ${code}`; 126 | logger.debug(message); 127 | reject(new CommandReturnedNonZeroError(message, result)); 128 | } else { 129 | const message = `command ${commandName} killed with signal ${signal}`; 130 | logger.debug(message); 131 | reject(new CommandWasKilledError(message, result)); 132 | } 133 | clearTimeout(timeout); 134 | completed = true; 135 | } 136 | }); 137 | 138 | proc.on('error', error => { 139 | if (!completed) { 140 | const message = `command ${commandName} failed: ${error.message}`; 141 | logger.debug(message); 142 | reject(new CommandError(message, result)); 143 | clearTimeout(timeout); 144 | completed = true; 145 | } 146 | }); 147 | }); 148 | } 149 | -------------------------------------------------------------------------------- /src/lib/commandWrappers/npmWrapper.ts: -------------------------------------------------------------------------------- 1 | import _ from 'lodash'; 2 | import * as helpers from './helpers'; 3 | import {StringMap} from '@/serviceTypes'; 4 | import {StdioPolicy} from '@/lib/commandWrappers/helpers'; 5 | 6 | export function install(packages: StringMap, timeoutDuration = 0) { 7 | const args = ['install', '--no-save']; 8 | 9 | _.forOwn(packages, (version, pkgname) => { 10 | args.push(`${pkgname}@${version}`); 11 | }); 12 | 13 | return helpers.getOutput('npm', args, { 14 | timeoutDuration, stdout: StdioPolicy.copy, stderr: StdioPolicy.inherit 15 | }); 16 | } 17 | 18 | export function installAll(timeoutDuration = 0) { 19 | return helpers.getOutput('npm', ['install', '--no-save'], { 20 | timeoutDuration, stdout: StdioPolicy.copy, stderr: StdioPolicy.inherit 21 | }); 22 | } 23 | 24 | export function version() { 25 | return helpers.getOutput('npm', ['--version']); 26 | } 27 | 28 | export function uninstall(packages: string[], timeoutDuration = 0) { 29 | const args = ['uninstall'].concat(packages); 30 | 31 | return helpers.getOutput('npm', args, { 32 | timeoutDuration, stdout: StdioPolicy.copy, stderr: StdioPolicy.inherit 33 | }); 34 | } 35 | 36 | export function dedupe(timeoutDuration = 0) { 37 | return helpers.getOutput('npm', ['dedupe'], { 38 | timeoutDuration, stdout: StdioPolicy.copy, stderr: StdioPolicy.inherit 39 | }); 40 | } 41 | -------------------------------------------------------------------------------- /src/lib/commandWrappers/rsyncWrapper.ts: -------------------------------------------------------------------------------- 1 | import * as helpers from './helpers'; 2 | 3 | export function syncDirs(from: string, to: string) { 4 | return helpers.getOutput('rsync', ['-a', '--delete', from, to]); 5 | } 6 | 7 | export function rsyncAvailable(): Promise { 8 | return new Promise((resolve) => { 9 | helpers.getOutput('which', ['rsync']) 10 | .then(() => resolve(true), () => resolve(false)) 11 | }); 12 | } 13 | -------------------------------------------------------------------------------- /src/lib/commandWrappers/tarWrapper.ts: -------------------------------------------------------------------------------- 1 | import path from 'path'; 2 | import {Readable} from 'stream'; 3 | 4 | import * as errors from '../errors'; 5 | import * as helpers from './helpers'; 6 | import {ControlToken} from './helpers'; 7 | import {StdioPolicy} from '@/lib/commandWrappers/helpers'; 8 | 9 | 10 | export type Compression = 'gzip'| 'bzip2' | 'xz' 11 | 12 | export const compression = { 13 | gzip: '.gz', 14 | bzip2: '.bz2', 15 | xz: '.xz', 16 | }; 17 | 18 | 19 | export function createArchive(outPath: string, inputPaths: string[], compressionType: string) { 20 | const baseDir = path.dirname(inputPaths[0]); 21 | const pathsToAdd = inputPaths.map(p => path.relative(baseDir, p)); 22 | const args = [ 23 | '--create', 24 | `--${compressionType}`, 25 | '--file', 26 | outPath, 27 | ...pathsToAdd 28 | ]; 29 | 30 | return helpers.getOutput('tar', args, {cwd: baseDir, stdout: StdioPolicy.copy, stderr: StdioPolicy.inherit}); 31 | } 32 | 33 | export function extractArchive(archive: string) { 34 | const args = ['--extract', '--file', archive]; 35 | 36 | return helpers.getOutput('tar', args, {stdout: StdioPolicy.copy, stderr: StdioPolicy.inherit}); 37 | } 38 | 39 | class ControlTokenError extends errors.VeendorError {} 40 | 41 | export function createStreamArchive( 42 | inputPaths: string[], compressionType: Compression, {controlToken = {}}: {controlToken: ControlToken} 43 | ): {stream: NodeJS.ReadableStream, promise: Promise} { 44 | const baseDir = path.dirname(inputPaths[0]); 45 | const pathsToAdd = inputPaths.map(p => path.relative(baseDir, p)); 46 | const args = [ 47 | '--create', 48 | `--${compressionType}`, 49 | '--file', 50 | '-', 51 | ...pathsToAdd, 52 | ]; 53 | 54 | const procPromise = helpers.getOutput( 55 | 'tar', args, {stdout: StdioPolicy.pipe, stderr: StdioPolicy.pipe, controlToken} 56 | ); 57 | 58 | if (!controlToken.stdio) { 59 | throw new ControlTokenError('child_process stdio is not available'); 60 | } 61 | 62 | return { 63 | stream: controlToken.stdio[1], 64 | promise: procPromise, 65 | }; 66 | } 67 | 68 | export function extractArchiveFromStream(archiveStream: Readable, compressionType: Compression, {controlToken = {}}: {controlToken: ControlToken}) { 69 | const args = ['--extract', `--${compressionType}`, '--file', '-']; 70 | 71 | const procPromise = helpers.getOutput('tar', args, { 72 | stdout: StdioPolicy.pipe, stderr: StdioPolicy.pipe, controlToken 73 | }); 74 | if (controlToken.stdio) { 75 | archiveStream.pipe(controlToken.stdio[0]); 76 | return procPromise; 77 | } else { 78 | throw new ControlTokenError('child_process stdio is not available'); 79 | } 80 | } 81 | -------------------------------------------------------------------------------- /src/lib/deepSortedJson.ts: -------------------------------------------------------------------------------- 1 | /** 2 | * returns sorted array of object contents 3 | * see deepSortedJson.test.js for examples 4 | * @param {Object} jsonObject 5 | * @returns {string[]} 6 | */ 7 | 8 | import {JSONValue, JSONObject} from '@/serviceTypes'; 9 | 10 | function isJSONObject(obj: JSONValue): obj is JSONObject { 11 | return obj !== null && typeof obj === 'object'; 12 | } 13 | 14 | function deepSortedJson(jsonObject: JSONObject): string[] { 15 | const tmpObj = {...jsonObject}; 16 | const result = []; 17 | 18 | while (true) { 19 | const tmpObjkeys = Object.keys(tmpObj); 20 | 21 | if (tmpObjkeys.length === 0) { 22 | break; 23 | } 24 | 25 | for (const i of tmpObjkeys) { 26 | const val: JSONValue = tmpObj[i]; 27 | 28 | if (val instanceof Array) { 29 | for (const [index, value] of val.entries()) { 30 | tmpObj[`${i}[${index}]`] = value; 31 | } 32 | } else if (isJSONObject(val)) { 33 | const keys = Object.keys(val); 34 | 35 | if (keys.length === 0) { 36 | result.push(`${i}`); 37 | } else { 38 | for (const key of keys) { 39 | tmpObj[`${i}.${key}`] = val[key]; 40 | } 41 | } 42 | } else { 43 | result.push(`${i}=${val}`); 44 | } 45 | 46 | delete tmpObj[i]; 47 | } 48 | } 49 | 50 | return result.sort(); 51 | } 52 | 53 | export {deepSortedJson as transform}; 54 | -------------------------------------------------------------------------------- /src/lib/errors.ts: -------------------------------------------------------------------------------- 1 | export class VeendorError extends Error { 2 | constructor(message: string = ' ') { 3 | super(message); 4 | } 5 | } 6 | 7 | export class BundleAlreadyExistsError extends VeendorError {} 8 | export class BundleNotFoundError extends VeendorError {} 9 | export class InvalidOptionsError extends VeendorError {} 10 | export class RePullNeeded extends VeendorError {} 11 | -------------------------------------------------------------------------------- /src/lib/install/hashGetters.ts: -------------------------------------------------------------------------------- 1 | import fsExtra from 'fs-extra'; 2 | import {Config, ConfigWithHistory, PkgJson} from '@/types'; 3 | import {getLogger} from '@/lib/util/logger'; 4 | import * as gitWrapper from '@/lib/commandWrappers/gitWrapper'; 5 | import * as pkgJsonUtils from '@/lib/pkgjson'; 6 | import {BundlesNotFoundError, PkgJsonNotFoundError} from '@/lib/install/index'; 7 | import * as helpers from '@/lib/install/helpers'; 8 | 9 | const {pkgJsonPath, originalCwd} = helpers.paths; 10 | 11 | export async function getHistoryHash( 12 | config: ConfigWithHistory, lockfilePath: string | null = null, oldHash: string | null = null, historyIndexStart = 0 13 | ): Promise<{ hash: string, historyIndexEnd: number, pkgJson: PkgJson }> { 14 | const logger = getLogger(); 15 | 16 | logger.trace(`Running getHistoryHash. pkgJsonPath: ${pkgJsonPath};` + 17 | ` lockfilePath: ${lockfilePath}, historyIndexStart: ${historyIndexStart}`); 18 | 19 | let currentHistoryIndex = historyIndexStart; 20 | 21 | while (true) { 22 | currentHistoryIndex++; 23 | 24 | const [pkgJsonString, lockfileString] = await gitWrapper.olderRevision( 25 | originalCwd, [pkgJsonPath, lockfilePath], currentHistoryIndex 26 | ); 27 | 28 | const pkgJson = await pkgJsonUtils.parsePkgJson(pkgJsonString); 29 | const lockfileContents = typeof lockfileString === 'string' ? JSON.parse(lockfileString) : null; 30 | 31 | const hash = pkgJsonUtils.calcHash(pkgJson, lockfileContents, config.packageHash); 32 | 33 | if (hash === oldHash) { 34 | const message = `Hash at index '${historyIndexStart}' is still '${hash}'. Incrementing history depth`; 35 | logger.trace(message); 36 | config.useGitHistory.depth++; 37 | 38 | if (currentHistoryIndex > config.useGitHistory.depth) { 39 | throw new BundlesNotFoundError( 40 | `Backends don't have bundles up to ${config.useGitHistory.depth} entries in git history of ${pkgJsonPath}` 41 | ); 42 | } 43 | } else { 44 | return {hash, historyIndexEnd: currentHistoryIndex, pkgJson}; 45 | } 46 | } 47 | } 48 | 49 | export async function getFSHash( 50 | config: Config, pkgJsonPath: string, lockfilePath: string | null 51 | ): Promise<{ hash: string, pkgJson: PkgJson }> { 52 | 53 | const logger = getLogger(); 54 | const result = []; 55 | 56 | logger.trace(`Running getFSHash. pkgJsonPath: ${pkgJsonPath}; lockfilePath: ${lockfilePath}`); 57 | 58 | logger.trace('Reading package.json'); 59 | result.push(fsExtra 60 | .readFile(pkgJsonPath) 61 | .then(pkgJsonBuf => { 62 | const pkgJsonString = pkgJsonBuf.toString(); 63 | 64 | logger.trace('Parsing package.json'); 65 | return pkgJsonUtils.parsePkgJson(pkgJsonString); 66 | }, (err: Error | NodeJS.ErrnoException) => { 67 | if (isNodeJSException(err) && err.code === 'ENOENT') { 68 | throw new PkgJsonNotFoundError('Couldn\'t find package.json file'); 69 | } 70 | throw err; 71 | 72 | })); 73 | 74 | if (lockfilePath !== null) { 75 | logger.trace(`Reading ${lockfilePath}`); 76 | result.push(fsExtra 77 | .readFile(lockfilePath) 78 | .then(lockfileBuf => { 79 | logger.trace(`Parsing ${lockfilePath}`); 80 | return JSON.parse(lockfileBuf.toString()); 81 | })); 82 | } else { 83 | result.push(null); 84 | } 85 | 86 | const [pkgJson, lockfileContents] = await Promise.all(result); 87 | 88 | 89 | logger.debug(`Got dependencies:\t${JSON.stringify(pkgJson.dependencies)}`); 90 | logger.debug(`Got devDependencies:\t${JSON.stringify(pkgJson.devDependencies)}`); 91 | 92 | logger.trace('Calculating hash'); 93 | const hash = pkgJsonUtils.calcHash(pkgJson, lockfileContents, config.packageHash); 94 | return {hash, pkgJson: pkgJson}; 95 | } 96 | 97 | function isNodeJSException(err: Error | NodeJS.ErrnoException): err is NodeJS.ErrnoException { 98 | return (err as NodeJS.ErrnoException).code !== undefined; 99 | } 100 | 101 | -------------------------------------------------------------------------------- /src/lib/install/helpers.ts: -------------------------------------------------------------------------------- 1 | import crypto from 'crypto'; 2 | import fsExtra from 'fs-extra'; 3 | import os from 'os'; 4 | import path from 'path'; 5 | import {getLogger} from '../util/logger'; 6 | import {BackendConfig} from '@/types'; 7 | 8 | const originalCwd = process.cwd(); 9 | 10 | export async function createCleanCacheDir(backendConfig: BackendConfig): Promise { 11 | const logger = getLogger(); 12 | logger.trace(`Running 'createCleanCacheDir' for ${backendConfig.alias}`); 13 | 14 | const cacheDirPath = path.join(getTmpDir(), backendConfig.alias); 15 | 16 | if (backendConfig.backend.keepCache) { 17 | logger.trace(`Running 'ensureDir' for ${cacheDirPath}`); 18 | await fsExtra.ensureDir(cacheDirPath); 19 | return cacheDirPath; 20 | } 21 | 22 | logger.trace(`Running 'emptyDir' for ${cacheDirPath}`); 23 | return fsExtra.emptyDir(cacheDirPath) 24 | .then(() => { 25 | logger.trace(`Cache directory for backend '${backendConfig.alias}' is set`); 26 | return cacheDirPath; 27 | }); 28 | } 29 | 30 | export async function createCleanCwd(lockfilePath: string | null) { 31 | const logger = getLogger(); 32 | logger.trace('Running \'createCleanCwd\''); 33 | 34 | const newCwdDirPath = path.join(getTmpDir(), '__result'); 35 | await fsExtra.ensureDir(newCwdDirPath); 36 | 37 | logger.trace(`New CWD:'${newCwdDirPath}'`); 38 | process.chdir(newCwdDirPath); 39 | await fsExtra.emptyDir(process.cwd()); 40 | 41 | await fsExtra.copy(path.join(originalCwd, 'package.json'), path.join(process.cwd(), 'package.json')); 42 | if (lockfilePath !== null) { 43 | await fsExtra.copy(path.join(originalCwd, lockfilePath), path.join(process.cwd(), lockfilePath)); 44 | } 45 | } 46 | 47 | export function getTmpDir() { 48 | const tmpDir = os.tmpdir(); 49 | const cwdHash = crypto.createHash('sha1'); 50 | cwdHash.update(process.cwd()); 51 | return path.join(tmpDir, `veendor-${cwdHash.digest('hex')}`); 52 | } 53 | 54 | export function restoreCWD() { 55 | if (process.cwd() !== originalCwd) { 56 | const logger = getLogger(); 57 | logger.trace(`Restoring CWD from '${process.cwd()}' to ${originalCwd}`); 58 | process.chdir(originalCwd); 59 | } 60 | } 61 | 62 | export const paths = { 63 | nodeModules: path.resolve(process.cwd(), 'node_modules'), 64 | pkgJsonPath: path.resolve(process.cwd(), 'package.json'), 65 | originalCwd, 66 | }; 67 | -------------------------------------------------------------------------------- /src/lib/install/index.ts: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | import {getLogger} from '@/lib/util/logger'; 4 | import * as helpers from './helpers'; 5 | import {pushBackends} from './pushBackends'; 6 | import * as rsyncWrapper from '@/lib/commandWrappers/rsyncWrapper'; 7 | import * as npmWrapper from '@/lib/commandWrappers/npmWrapper'; 8 | import * as gitWrapper from '@/lib/commandWrappers/gitWrapper'; 9 | import * as errors from '@/lib/errors'; 10 | import _ from 'lodash'; 11 | import * as objectDiff from 'deep-object-diff'; 12 | import path from 'path'; 13 | import fsExtra from 'fs-extra'; 14 | 15 | import {BackendCalls, BackendConfig, Config, configHasHistory, PkgJson} from '@/types'; 16 | import {getFSHash, getHistoryHash} from "@/lib/install/hashGetters"; 17 | import {provideBackendCallTools} from '@/lib/util/progress'; 18 | 19 | const {nodeModules, pkgJsonPath, originalCwd} = helpers.paths; 20 | // let clearNodeModulesPromise: Promise; 21 | let isRsyncModeEnabled = false; 22 | 23 | type PullInfo = { 24 | missingBackends: BackendConfig[]; 25 | } 26 | 27 | enum InstallStages { 28 | firstPull, 29 | pullFromGitHistory, 30 | npmInstallDiff, 31 | npmDedupe, 32 | npmInstallAll, 33 | pushing, 34 | } 35 | 36 | export type InstallParams = { 37 | force?: boolean, // remove node_modules if exist 38 | config: Config, 39 | lockfilePath?: string | null, // path to lockfile, detected at startup. null, if no lockfile detected 40 | rsyncMode?: boolean, 41 | }; 42 | 43 | export default async function install( 44 | {force = false, config, lockfilePath = null, rsyncMode = false}: InstallParams 45 | ): Promise { 46 | const logger = getLogger(); 47 | 48 | let backendsToPush: BackendConfig[] = []; 49 | 50 | const [rsyncAvailable, nodeModulesInPlace] = await Promise.all([ 51 | rsyncWrapper.rsyncAvailable(), 52 | nodeModulesAlreadyExist(), 53 | ]); 54 | 55 | isRsyncModeEnabled = rsyncMode && rsyncAvailable && nodeModulesInPlace; 56 | 57 | if (isRsyncModeEnabled) { 58 | logger.info('Working in rsync mode'); 59 | } 60 | 61 | const isGitRepo = await gitWrapper.isGitRepo(originalCwd); 62 | 63 | if (nodeModulesInPlace) { 64 | if (!force) { 65 | throw new NodeModulesAlreadyExistError(); 66 | } 67 | 68 | if (!isRsyncModeEnabled) { 69 | logger.trace('Started removing node_modules'); 70 | clearNodeModules().then( 71 | () => {logger.trace('Successfully removed node_modules');}, 72 | err => {logger.debug(`Error during node_modules removal: ${err.stack}`);} 73 | ); 74 | } 75 | } 76 | 77 | /** 78 | * Calculating current hash 79 | */ 80 | let {hash, pkgJson} = await getFSHash(config, pkgJsonPath, lockfilePath); 81 | logger.info(`Got hash:\t${hash}`); 82 | 83 | /** 84 | * Downloading deps 85 | */ 86 | let installStage: InstallStages = InstallStages.firstPull; 87 | let tryingHash = hash; 88 | let tryingPkgJson = pkgJson; 89 | let historyIndexStart = 0; 90 | while (true) { 91 | try { 92 | if (installStage === InstallStages.firstPull) { 93 | const info = await pullBackends(tryingHash, config, lockfilePath); 94 | backendsToPush = info.missingBackends; 95 | installStage = InstallStages.pushing; 96 | break; 97 | } 98 | 99 | if (installStage === InstallStages.pullFromGitHistory) { 100 | await pullBackends(tryingHash, config, lockfilePath); 101 | installStage = InstallStages.npmInstallDiff; 102 | continue; 103 | } 104 | 105 | if (installStage === InstallStages.npmInstallDiff) { 106 | await installDiff(tryingPkgJson, pkgJson); 107 | backendsToPush = config.backends; 108 | if (config.dedupe) { 109 | installStage = InstallStages.npmDedupe; 110 | continue; 111 | } else { 112 | installStage = InstallStages.pushing; 113 | break; 114 | } 115 | } 116 | 117 | if (installStage === InstallStages.npmDedupe) { 118 | logger.info(`Running 'npm dedupe'`); 119 | await npmWrapper.dedupe(); 120 | installStage = InstallStages.pushing; 121 | break; 122 | } 123 | 124 | if (installStage === InstallStages.npmInstallAll) { 125 | await npmInstallAll(); 126 | backendsToPush = config.backends; 127 | installStage = InstallStages.pushing; 128 | break; 129 | } 130 | } catch (pullError) { 131 | if (pullError instanceof BundlesNotFoundError) { 132 | if (installStage === InstallStages.firstPull || installStage === InstallStages.pullFromGitHistory) { 133 | if (configHasHistory(config) && isGitRepo) { 134 | installStage = InstallStages.pullFromGitHistory; 135 | try { 136 | const res = await getHistoryHash(config, lockfilePath, tryingHash, historyIndexStart); 137 | tryingHash = res.hash; 138 | tryingPkgJson = res.pkgJson; 139 | historyIndexStart = res.historyIndexEnd; 140 | continue; 141 | } catch (historyHashError) { 142 | if (historyHashError instanceof BundlesNotFoundError) { 143 | logger.trace(historyHashError); 144 | } 145 | } 146 | } 147 | 148 | if (!config.fallbackToNpm) { 149 | logger.error( 150 | `Couldn't find bundle with hash '${hash}'. 'fallbackToNpm' isn't set. Exiting` 151 | ); 152 | throw pullError; 153 | } 154 | 155 | installStage = InstallStages.npmInstallAll; 156 | } 157 | } else { 158 | throw pullError; 159 | } 160 | } 161 | } 162 | 163 | /** 164 | * Pushing bundle 165 | */ 166 | try { 167 | await pushBackends(backendsToPush, hash, false, config.clearSharedCache); 168 | } catch (pushError) { 169 | if (pushError instanceof errors.RePullNeeded) { 170 | // this happens if we failed to push bundle because someone got faster then us 171 | // in this case, we're gonna download bundle someone else has built 172 | // if true, catching BundleAlreadyExistsError from backend will reject result 173 | // just to make sure, we won't fall into infinite loop here 174 | 175 | await pullBackends(hash, config, lockfilePath); 176 | 177 | } else { 178 | throw pushError; 179 | } 180 | } 181 | } 182 | 183 | async function nodeModulesAlreadyExist(): Promise { 184 | const logger = getLogger(); 185 | logger.trace('Checking node_modules'); 186 | 187 | try { 188 | await fsExtra.access(nodeModules); 189 | logger.trace('\'node_modules\' directory already exists'); 190 | return true 191 | } catch (err) { 192 | logger.trace('Node_modules not found'); 193 | return false; 194 | } 195 | } 196 | 197 | async function clearNodeModules(): Promise { 198 | const logger = getLogger(); 199 | if (isRsyncModeEnabled) { 200 | return; 201 | } 202 | 203 | logger.trace(`moving node_modules to node_modules.bak.0`); 204 | 205 | let bodyCount = 0; 206 | let bakDirname; 207 | while (true) { 208 | bakDirname = `${nodeModules}.bak.${bodyCount}`; 209 | logger.trace(`moving node_modules to ${bakDirname}`); 210 | try { 211 | await fsExtra.stat(bakDirname); 212 | logger.trace(`${bakDirname} already exists; incrementing`); 213 | bodyCount++; 214 | } catch (err) { 215 | if (err.code && err.code === 'ENOENT') { 216 | await fsExtra.rename(nodeModules, bakDirname); 217 | logger.trace(`move was successful; removing ${bakDirname} without blocking`); 218 | return fsExtra.remove(bakDirname); 219 | } 220 | } 221 | } 222 | } 223 | 224 | async function pullBackends( 225 | hash: string, config: Config, lockfilePath: string | null, backendIndex = 0 226 | ): Promise { 227 | const logger = getLogger(); 228 | const backendConfig = config.backends[backendIndex]; 229 | 230 | if (!backendConfig) { 231 | throw new BundlesNotFoundError(`Backends don't have bundle ${hash}`); 232 | } 233 | 234 | logger.info(`Trying backend '${backendConfig.alias}' with hash ${hash}`); 235 | 236 | try { 237 | const cacheDirPath = await helpers.createCleanCacheDir(backendConfig); 238 | 239 | if (isRsyncModeEnabled) { 240 | await helpers.createCleanCwd(lockfilePath); 241 | } 242 | 243 | await backendConfig.backend.pull( 244 | hash, backendConfig.options, cacheDirPath, 245 | provideBackendCallTools(backendConfig, BackendCalls.push) 246 | ); 247 | 248 | if (isRsyncModeEnabled) { 249 | logger.info(`Successfully fetched ${hash} from '${backendConfig.alias}'. Unpacking.`); 250 | 251 | const newNodeModules = path.resolve(process.cwd(), 'node_modules'); 252 | helpers.restoreCWD(); 253 | await rsyncWrapper.syncDirs(newNodeModules, process.cwd()); 254 | } 255 | 256 | logger.info(`Pulled ${hash} from backend '${backendConfig.alias}'`); 257 | 258 | return {missingBackends: config.backends.slice(0, backendIndex)}; 259 | } catch (error) { 260 | helpers.restoreCWD(); 261 | 262 | if (error instanceof errors.BundleNotFoundError) { 263 | return pullBackends(hash, config, lockfilePath, backendIndex + 1); 264 | } else { 265 | logger.error( 266 | `Backend '${backendConfig.alias}' failed on pull:` 267 | ); 268 | throw error; 269 | } 270 | } 271 | } 272 | 273 | async function installDiff(oldPkgJson: PkgJson, newPkgJson: PkgJson): Promise { 274 | const logger = getLogger(); 275 | const allDepsOld = Object.assign({}, oldPkgJson.devDependencies, oldPkgJson.dependencies); 276 | const allDepsNew = Object.assign({}, newPkgJson.devDependencies, newPkgJson.dependencies); 277 | const depsDiff = objectDiff.diff(allDepsOld, allDepsNew); 278 | const depsToInstall = _.omitBy(depsDiff, _.isUndefined); 279 | const depsToUninstall = _.keys(_.pickBy(depsDiff, _.isUndefined)); 280 | 281 | const loggingDepsToInstall = 'Installing dependencies: ' + 282 | Object.keys(depsToInstall).map(pkg => `${pkg}@${depsToInstall[pkg]}`).join(' '); 283 | 284 | const loggingDepsToUninstall = 'Uninstalling dependencies: ' + depsToUninstall.join(' '); 285 | 286 | if (_.keys(depsToInstall).length) { 287 | logger.info(loggingDepsToInstall); 288 | 289 | await npmWrapper.install(depsToInstall); 290 | } 291 | 292 | if (depsToUninstall.length) { 293 | logger.info(loggingDepsToUninstall); 294 | await npmWrapper.uninstall(depsToUninstall); 295 | } 296 | } 297 | 298 | function npmInstallAll() { 299 | const logger = getLogger(); 300 | 301 | logger.info('Couldn\'t find bundles. Running npm install'); 302 | 303 | return npmWrapper.installAll(); 304 | } 305 | 306 | export class PkgJsonNotFoundError extends errors.VeendorError {} 307 | export class NodeModulesAlreadyExistError extends errors.VeendorError { 308 | constructor() { 309 | super('NodeModulesAlreadyExistError'); 310 | } 311 | } 312 | export class BundlesNotFoundError extends errors.VeendorError {} 313 | -------------------------------------------------------------------------------- /src/lib/install/pushBackends.ts: -------------------------------------------------------------------------------- 1 | import fsExtra from 'fs-extra'; 2 | import path from 'path'; 3 | 4 | import {BackendCalls, BackendConfig} from '@/types'; 5 | 6 | import * as errors from '../errors'; 7 | import {getLogger} from '../util/logger'; 8 | import * as helpers from './helpers'; 9 | import {provideBackendCallTools} from '../util/progress'; 10 | 11 | export async function pushBackends( 12 | backendConfigs: BackendConfig[], hash: string, rePull?: boolean, clearCache?: boolean): Promise { 13 | const logger = getLogger(); 14 | logger.trace(`Pushing '${hash}' to backends`); 15 | 16 | const pushingBackends = backendConfigs.filter(backend => backend.push); 17 | if (pushingBackends.length === 0 && backendConfigs.length > 0) { 18 | logger.info(`No backends with push: true found. Exiting`); 19 | return; 20 | } 21 | 22 | const dirPromises = pushingBackends.map(backend => { 23 | return helpers.createCleanCacheDir(backend); 24 | }); 25 | 26 | const sharedCachePath = path.join(process.cwd(), 'node_modules', '.cache'); 27 | 28 | if (clearCache) { 29 | await fsExtra.pathExists(sharedCachePath).then(() => { 30 | logger.info(`Shared cache directory found at '${sharedCachePath}'. Removing`); 31 | return fsExtra.remove(sharedCachePath); 32 | }, (err) => { 33 | if (err.code !== 'ENOENT') { 34 | throw err; 35 | } 36 | }); 37 | } 38 | const cacheDirs = await Promise.all(dirPromises); 39 | 40 | const pushingPromises = []; 41 | 42 | for (const [index, backend] of pushingBackends.entries()) { 43 | logger.info(`Pushing '${hash}' to '${backend.alias}' backend`); 44 | 45 | let promise = backend.backend 46 | .push(hash, backend.options, cacheDirs[index], provideBackendCallTools(backend, BackendCalls.push)) 47 | .then(() => { 48 | logger.info(`Pushing ${hash}' to '${backend.alias}' backend completed succsessfully`); 49 | }); 50 | 51 | if (backend.pushMayFail) { 52 | promise = promise.catch((error: Error) => {logger.warn(error)}); 53 | } 54 | 55 | pushingPromises.push(promise); 56 | } 57 | 58 | try { 59 | await Promise.all(pushingPromises); 60 | } catch (error) { 61 | if (error instanceof errors.BundleAlreadyExistsError) { 62 | if (!rePull) { 63 | const message = `Bundle '${hash}' already exists in remote repo! Re-pulling it`; 64 | logger.error(message); 65 | throw new errors.RePullNeeded(message); 66 | } 67 | } 68 | 69 | throw error; 70 | } 71 | 72 | logger.debug('Pushing to all backends completed succsessfully'); 73 | }; 74 | -------------------------------------------------------------------------------- /src/lib/pkgjson.ts: -------------------------------------------------------------------------------- 1 | import crypto from 'crypto'; 2 | import * as errors from './errors'; 3 | import * as deepSortedJson from './deepSortedJson'; 4 | 5 | import {JSONObject} from '@/serviceTypes'; 6 | import {PkgJson, PackageHashOptions} from '@/types'; 7 | 8 | /** 9 | * package.json-related stuff 10 | */ 11 | 12 | /** 13 | * Calculates and returns hash of deps in package.json 14 | */ 15 | export function calcHash( 16 | pkgJson: PkgJson, 17 | lockfileContents: JSONObject | null = null, 18 | options: PackageHashOptions = {} 19 | ) { 20 | const resultSha1 = crypto.createHash('sha1'); 21 | 22 | let sortedDeps = deepSortedJson.transform({ 23 | dependencies: pkgJson.dependencies, 24 | devDependencies: pkgJson.devDependencies, 25 | }); 26 | 27 | if (lockfileContents) { 28 | sortedDeps = sortedDeps.concat(deepSortedJson.transform(lockfileContents)); 29 | } 30 | 31 | resultSha1.update(sortedDeps.join('\n')); 32 | 33 | const result = resultSha1.digest('hex'); 34 | 35 | if (typeof options.suffix === 'string') { 36 | return result + '-' + options.suffix; 37 | } 38 | 39 | if (typeof options.suffix === 'function') { 40 | return result + '-' + options.suffix(); 41 | } 42 | 43 | return result; 44 | } 45 | 46 | export function parsePkgJson(pkgJsonString: string): Promise { 47 | return new Promise((resolve, reject) => { 48 | let pkgJson; 49 | 50 | try { 51 | pkgJson = JSON.parse(pkgJsonString); 52 | } catch (e) { 53 | return reject(e); 54 | } 55 | 56 | if (!(pkgJson.dependencies instanceof Object) && !(pkgJson.devDependencies instanceof Object)) { 57 | return reject(new EmptyPkgJsonError('No dependencies or devDependencies supplied')); 58 | } 59 | 60 | resolve(pkgJson); 61 | }); 62 | } 63 | 64 | export class EmptyPkgJsonError extends errors.VeendorError {} 65 | -------------------------------------------------------------------------------- /src/lib/resolveConfig.ts: -------------------------------------------------------------------------------- 1 | import validateConfig, {PartialConfig} from './validateConfig'; 2 | import path from 'path'; 3 | import {Config} from '@/types'; 4 | 5 | function resolveConfig(explicitConfig: string): Promise { 6 | global.VEENDOR_VERSION = require('../package.json').version; 7 | 8 | return new Promise(resolve => { 9 | let config; 10 | let configLocations = ['.veendor.js', '.veendor.json']; 11 | 12 | if (explicitConfig) { 13 | configLocations = [explicitConfig]; 14 | } 15 | 16 | for (const location of configLocations) { 17 | try { 18 | config = require(path.resolve(process.cwd(), location)); 19 | } catch (e) { 20 | if (e.code === 'MODULE_NOT_FOUND' && e.message.indexOf(location) !== -1) { 21 | continue; 22 | } 23 | 24 | throw e; 25 | } 26 | } 27 | 28 | if (!config) { 29 | console.error('Config file not found'); 30 | process.exit(1); 31 | } 32 | 33 | validateConfig(config).then(config => { 34 | resolve(config); 35 | }, error => { 36 | console.error(error.message); 37 | process.exit(1); 38 | }); 39 | }); 40 | } 41 | 42 | export default resolveConfig; 43 | -------------------------------------------------------------------------------- /src/lib/resolveLockfile.ts: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | import fs from 'fs'; 4 | import util from 'util'; 5 | 6 | import fsExtra from 'fs-extra'; 7 | import path from 'path'; 8 | import {getLogger} from "@/lib/util/logger"; 9 | 10 | // add yarn.lock one day 11 | const LOCKFILE_TYPES = ['npm-shrinkwrap.json', 'package-lock.json']; 12 | 13 | export default function resolveLockfile(): Promise { 14 | const logger = getLogger(); 15 | logger.trace(`Looking for lockfiles: ${LOCKFILE_TYPES.join(', ')}`); 16 | const statPromises = LOCKFILE_TYPES.map( 17 | filename => fsExtra 18 | .stat(path.resolve(process.cwd(), filename)) 19 | .catch(error => error)); // not letting Promise.all to reject early 20 | 21 | return Promise.all(statPromises).then(getLockfile); 22 | } 23 | 24 | function getLockfile(results: Array): string | null { 25 | const logger = getLogger(); 26 | 27 | for (let i=0; i < LOCKFILE_TYPES.length; i++) { 28 | if (util.isError(results[i])) { 29 | const err = (results[i]); 30 | if (err.code && err.code === 'ENOENT') { 31 | continue; 32 | } 33 | 34 | throw err; 35 | } 36 | 37 | logger.info(`Found '${LOCKFILE_TYPES[i]}'. Using it to calculate bundle hashes.`); 38 | return LOCKFILE_TYPES[i]; 39 | } 40 | 41 | return null; 42 | } 43 | -------------------------------------------------------------------------------- /src/lib/util/logger.ts: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | import tracer, {Tracer} from 'tracer'; 4 | import fs from 'fs'; 5 | 6 | let loggerInstance: Tracer.Logger; 7 | 8 | export function setDefaultLogger(fileLevel: number, consoleLevel: number) { 9 | loggerInstance = tracer.colorConsole({ 10 | format: '{{message}}', 11 | transport: function (data) { 12 | if (data.level < fileLevel) { 13 | return; 14 | } 15 | 16 | if (data.level >= consoleLevel) { 17 | console.log(data.output); 18 | } 19 | 20 | let fileLogString = `${data.timestamp} ${data.title}:\t${data.message}\n`; 21 | 22 | if (data.stack) { 23 | fileLogString += `${data.stack}\n`; 24 | } 25 | 26 | fs.appendFileSync( 27 | './.veendor-debug.log', 28 | fileLogString 29 | ); 30 | } 31 | }); 32 | 33 | return loggerInstance; 34 | 35 | } 36 | 37 | export function setLogger(logger: Tracer.Logger) { 38 | loggerInstance = logger; 39 | } 40 | 41 | export function getLogger() { 42 | return loggerInstance; 43 | } 44 | -------------------------------------------------------------------------------- /src/lib/util/progress.ts: -------------------------------------------------------------------------------- 1 | import {BackendCalls, BackendConfig, BackendToolsProvider} from '@/types'; 2 | import {getLogger} from '@/lib/util/logger'; 3 | import cliProgress from 'cli-progress'; 4 | import colors from 'colors'; 5 | import {Transform, TransformCallback} from 'stream'; 6 | 7 | export type ProgressContolToken = { 8 | toggleBypass?: (bypassOn: boolean) => void, 9 | terminate?: () => void, 10 | toggleVisibility?: (shouldBeVisibe: boolean) => void, 11 | } 12 | 13 | let progressBlocked = false; 14 | 15 | export class ProgressStream extends Transform { 16 | protected progress: cliProgress.Bar; 17 | protected haveTotal: boolean; 18 | protected state: StreamState; 19 | completed: number; 20 | 21 | constructor(options: {}, title: string, private controlToken: ProgressContolToken, private total?: number) { 22 | super(options); 23 | 24 | this.state = StreamState.preparing; 25 | 26 | this.haveTotal = typeof this.total === 'number'; 27 | 28 | const progressWithTotal = rigthPad(2000, ` ${colors.green(title)} [{bar}] ` 29 | + `${colors.gray('{_value} / {_total} Mb')} {percentage}% {duration_formatted}`); 30 | 31 | const progressWithoutTotal = rigthPad(2000, ` ${colors.green(title)} ${colors.gray(' {_value} Mb')}` + 32 | ` {duration_formatted}`); 33 | 34 | this.progress = new cliProgress.Bar({ 35 | format: this.haveTotal ? progressWithTotal : progressWithoutTotal, 36 | barsize: 40, 37 | etaBuffer: 50, 38 | hideCursor: false, 39 | clearOnComplete: true, 40 | linewrap: false, 41 | fps: 50, 42 | }); 43 | 44 | this.completed = 0; 45 | 46 | this.once('end', () => { 47 | this.die(); 48 | }); 49 | 50 | this.on('pipe', () => { 51 | if (this.state !== StreamState.bypass && this.state !== StreamState.visible) { 52 | this.state = StreamState.connected; 53 | } 54 | }); 55 | 56 | this.on('unpipe', () => { 57 | if (this.state !== StreamState.bypass) { 58 | this.toggleVisibility(false); 59 | this.state = StreamState.preparing; 60 | } 61 | }); 62 | 63 | this.controlToken.toggleVisibility = (shouldBeVisibe) => this.toggleVisibility(shouldBeVisibe); 64 | this.controlToken.toggleBypass = (shouldBeVisibe) => this.toggleBypass(shouldBeVisibe); 65 | this.controlToken.terminate = () => this.die(); 66 | } 67 | 68 | toggleVisibility(shouldBeVisibe: boolean) { 69 | if (shouldBeVisibe && (this.state in [StreamState.connected, StreamState.hidden]) && !progressBlocked) { 70 | this.show(); 71 | this.state = StreamState.visible; 72 | return; 73 | } else if (shouldBeVisibe && this.state === StreamState.preparing) { 74 | setTimeout(() => this.toggleVisibility(true), 1000); 75 | } else if (!shouldBeVisibe && this.state === StreamState.visible) { 76 | this.hide(); 77 | this.state = StreamState.hidden; 78 | return; 79 | } 80 | } 81 | 82 | toggleBypass(bypassOn: boolean) { 83 | if (bypassOn && (this.state in [StreamState.connected, StreamState.hidden, StreamState.visible])) { 84 | this.state = StreamState.bypass; 85 | return; 86 | } else if (!bypassOn && this.state === StreamState.bypass) { 87 | this.state = StreamState.hidden; 88 | return; 89 | } 90 | } 91 | 92 | 93 | _transform(data: any, _encoding: string, callback: TransformCallback) { 94 | if (this.state !== StreamState.bypass) { 95 | this.completed += data.length; 96 | 97 | if (this.state === StreamState.visible && !progressBlocked) { 98 | const total = typeof this.total === 'number' ? this.total : 1000; 99 | 100 | this.progress.setTotal(total); 101 | this.progress.update(this.completed, { 102 | _value: formatMb(this.completed), 103 | _total: formatMb(total), 104 | }); 105 | } 106 | } 107 | 108 | callback(undefined, data); 109 | } 110 | 111 | private show() { 112 | this.progress.start(typeof this.total === 'number' ? this.total : 1000, this.completed); 113 | } 114 | 115 | private hide() { 116 | this.progress.stop(); 117 | } 118 | 119 | die() { 120 | if (this.state === StreamState.terminated) { 121 | return; 122 | } 123 | 124 | this.progress.stop(); 125 | this.state = StreamState.terminated; 126 | } 127 | } 128 | 129 | enum StreamState { 130 | preparing, 131 | connected, 132 | visible, 133 | hidden, 134 | bypass, 135 | terminated, 136 | } 137 | 138 | function roundMb(bytes: number): number { 139 | return Math.floor((bytes / 1024 / 1024) * 100) / 100 140 | } 141 | 142 | function formatMb(bytes: number): string { 143 | return leftPad(7, roundMb(bytes).toFixed(2)); 144 | } 145 | 146 | function leftPad(width: number, str: string): string { 147 | // https://stackoverflow.com/questions/5366849/convert-1-to-0001-in-javascript 148 | // @ts-ignore 149 | return Array(width).join(' ').substring(' ', width - str.length) + str; 150 | } 151 | 152 | function rigthPad(width: number, str: string): string { 153 | // https://stackoverflow.com/questions/5366849/convert-1-to-0001-in-javascript 154 | // @ts-ignore 155 | return str + Array(width).join(' ').substring(' ', width - str.length); 156 | } 157 | 158 | const allTokens: ProgressContolToken[] = []; 159 | 160 | export function provideBackendCallTools(backendConfig: BackendConfig, callType: BackendCalls): BackendToolsProvider { 161 | const controlToken = {}; 162 | allTokens.push(controlToken); 163 | 164 | return { 165 | getLogger() { 166 | return getLogger(); 167 | }, 168 | 169 | getProgressStream(label?: string, total?: number) { 170 | const resultLabel = label ? `${backendConfig.alias} ${label}` : `${backendConfig.alias} ${callType}`; 171 | return new ProgressStream({}, resultLabel, controlToken, total); 172 | }, 173 | }; 174 | } 175 | 176 | export function blockAllProgress(shouldBeBlocked: boolean) { 177 | progressBlocked = shouldBeBlocked; 178 | 179 | for (const token of allTokens) { 180 | if (token.toggleVisibility) { 181 | token.toggleVisibility(!shouldBeBlocked); 182 | } 183 | } 184 | } 185 | -------------------------------------------------------------------------------- /src/lib/validateConfig.ts: -------------------------------------------------------------------------------- 1 | import _ from 'lodash'; 2 | import semver from 'semver'; 3 | 4 | import * as errors from './errors'; 5 | import * as npmWrapper from './commandWrappers/npmWrapper'; 6 | import {Config, BackendConfig, Backend} from '@/types'; 7 | 8 | export type PartialConfig = { 9 | [P in keyof Config]?: P extends 'backends' ? Array : Config[P] 10 | } 11 | 12 | type InputPartialBackendConfig = { 13 | [P in keyof BackendConfig]?: P extends 'backend' ? string | PartialBackend | any : BackendConfig[P] | any 14 | } 15 | 16 | type PartialBackend = Partial; 17 | 18 | export default function validateConfig(config: PartialConfig): Promise { 19 | const validationPromises = []; 20 | 21 | if (!(config.backends instanceof Array) || config.backends.length === 0) { 22 | return Promise.reject(new EmptyBackendsPropertyError()); 23 | } 24 | 25 | const aliases = _.map(config.backends, 'alias'); 26 | 27 | if (_.uniq(aliases).length < aliases.length) { 28 | return Promise.reject(new AliasesNotUniqueError(`backends aliases are not unique`)); 29 | } 30 | 31 | for (const [position, backend] of config.backends.entries()) { 32 | validationPromises.push(validateBackend(backend, position)); 33 | } 34 | 35 | if (config.fallbackToNpm === undefined) { 36 | config.fallbackToNpm = true; 37 | } 38 | 39 | if (config.packageHash === undefined) { 40 | config.packageHash = {}; 41 | } 42 | 43 | if (config.installDiff === undefined) { 44 | config.installDiff = true; 45 | } 46 | 47 | if (!(config.installDiff) && config.useGitHistory) { 48 | return Promise.reject(new InvalidUseGitHistoryError( 49 | 'Setting both \'installDiff\' and \'useGitHistory\' doesn\'t make any sense' 50 | )); 51 | } 52 | 53 | if (config.useGitHistory) { 54 | if (!_.isNumber(config.useGitHistory.depth)) { 55 | return Promise.reject(new InvalidUseGitHistoryError( 56 | '\'useGitHistory\' should be used with \'depth\' option' 57 | )); 58 | } 59 | 60 | if (config.useGitHistory.depth < 1) { 61 | return Promise.reject(new InvalidUseGitHistoryError( 62 | '\'useGitHistory.depth\' should be positive number' 63 | )); 64 | } 65 | } 66 | 67 | if (typeof config.npmVersion === 'string') { 68 | const npmVersion = config.npmVersion; 69 | validationPromises.push( 70 | npmWrapper.version() 71 | .then(version => { 72 | if (!semver.satisfies(version, npmVersion)) { 73 | throw new InvalidNpmVersionError(npmVersion, version); 74 | } 75 | }) 76 | ); 77 | } 78 | 79 | if (config.dedupe === undefined) { 80 | config.dedupe = false; 81 | } 82 | 83 | if (config.clearSharedCache === undefined) { 84 | config.clearSharedCache = false; 85 | } 86 | 87 | if (config.veendorVersion !== undefined) { 88 | if (!semver.satisfies(global.VEENDOR_VERSION, config.veendorVersion)) { 89 | return Promise.reject(new InvalidVeendorVersionError(config.veendorVersion)); 90 | } 91 | } 92 | 93 | return Promise.all(validationPromises).then(() => config); 94 | }; 95 | 96 | function validateBackend(backendConfig: InputPartialBackendConfig, position: number) { 97 | if (!(typeof backendConfig.alias === 'string' && backendConfig.alias.length > 0)) { 98 | return Promise.reject(new EmptyBackendAliasError(position)); 99 | } 100 | 101 | if (typeof backendConfig.backend === 'string') { 102 | backendConfig.backend = require(`./backends/${backendConfig.backend}`); 103 | } else if (!(backendConfig.backend instanceof Object)) { 104 | return Promise.reject(new InvalidBackendError(backendConfig.alias, 'backend')); 105 | } 106 | 107 | if (typeof backendConfig.backend.pull !== 'function') { 108 | return Promise.reject(new InvalidBackendError(backendConfig.alias, 'pull')); 109 | } 110 | 111 | if (typeof backendConfig.backend.push !== 'function') { 112 | return Promise.reject(new InvalidBackendError(backendConfig.alias, 'push')); 113 | } 114 | 115 | if (typeof backendConfig.backend.validateOptions !== 'function') { 116 | return Promise.reject(new InvalidBackendError(backendConfig.alias, 'validateOptions')); 117 | } 118 | 119 | if (backendConfig.push === undefined) { 120 | backendConfig.push = false; 121 | } 122 | 123 | if (typeof backendConfig.push !== 'boolean') { 124 | return Promise.reject(new InvalidBackendOptionError(backendConfig.alias, 'push')); 125 | } 126 | 127 | if (backendConfig.pushMayFail === undefined) { 128 | backendConfig.pushMayFail = false; 129 | } 130 | 131 | if (typeof backendConfig.pushMayFail !== 'boolean') { 132 | return Promise.reject(new InvalidBackendOptionError(backendConfig.alias, 'pushMayFail')); 133 | } 134 | 135 | let validationResult; 136 | 137 | try { 138 | validationResult = backendConfig.backend.validateOptions(backendConfig.options); 139 | } catch (e) { 140 | return Promise.reject(e); 141 | } 142 | 143 | if (validationResult instanceof Promise) { 144 | return validationResult; 145 | } 146 | 147 | return Promise.resolve(); 148 | } 149 | 150 | export class EmptyBackendsPropertyError extends errors.VeendorError { 151 | constructor() { 152 | super('no backends found in config'); 153 | } 154 | } 155 | 156 | export class InvalidBackendError extends errors.VeendorError { 157 | constructor(alias: string, field: string) { 158 | super(`backend '${alias}' has lacks of has invalid '${field}' field`); 159 | } 160 | } 161 | 162 | export class InvalidBackendOptionError extends errors.VeendorError { 163 | constructor(alias: string, field: string) { 164 | super(`backend\'s '${alias}' '${field}' option in invalid`); 165 | } 166 | } 167 | 168 | export class EmptyBackendAliasError extends errors.VeendorError { 169 | constructor(position: number) { 170 | super(`backend at position '${position}' lacks or has invalid 'alias' field`); 171 | } 172 | } 173 | 174 | export class InvalidNpmVersionError extends errors.VeendorError { 175 | constructor(expected: string, actual: string) { 176 | super(`npm version '${actual}' does not comply with '${expected}' constraint`); 177 | } 178 | } 179 | 180 | export class InvalidVeendorVersionError extends errors.VeendorError { 181 | constructor(expected: string) { 182 | super(`veendor version '${global.VEENDOR_VERSION}' does not comply with '${expected}' constraint`); 183 | } 184 | } 185 | 186 | export class AliasesNotUniqueError extends errors.VeendorError {} 187 | export class InvalidUseGitHistoryError extends errors.VeendorError {} 188 | -------------------------------------------------------------------------------- /src/serviceTypes.ts: -------------------------------------------------------------------------------- 1 | 2 | export type JSONValue = string | number | boolean | JSONObject | JSONArray; 3 | 4 | export interface JSONObject { 5 | [x: string]: JSONValue; 6 | } 7 | 8 | export interface JSONArray extends Array {} 9 | 10 | export type StringMap = {[key: string]: string}; 11 | -------------------------------------------------------------------------------- /src/test/integration/bundles/7d0db335c82dfd9aa2b96dabc485b89ebaa1496f.tar.gz: -------------------------------------------------------------------------------- 1 | version https://git-lfs.github.com/spec/v1 2 | oid sha256:bd2433a1d7e2bd6ffa77e2ff629ba37fb60d214f657adf910a1237c4d902b332 3 | size 5644 4 | -------------------------------------------------------------------------------- /src/test/integration/integration.test.ts: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | import {after, before, describe, it} from 'mocha'; 4 | import chai from 'chai'; 5 | import chaiAsPromised from 'chai-as-promised'; 6 | import fsExtra from 'fs-extra'; 7 | import _ from 'lodash'; 8 | import crypto from 'crypto'; 9 | import os from 'os'; 10 | import path from 'path'; 11 | import AWS from 'aws-sdk'; 12 | import S3rver from 's3rver'; 13 | 14 | import {CommandError} from '@/lib/commandWrappers/helpers'; 15 | import * as helpers from '@/lib/commandWrappers/helpers'; 16 | 17 | chai.use(chaiAsPromised); 18 | 19 | /** 20 | * This is just for test runner. 21 | * Test cases are written in bash. 22 | */ 23 | 24 | const TEST_SCRIPT = 'dist/test/integration/runTest.sh'; 25 | const PREPARE_NVM_SCRIPT = 'dist/test/integration/prepareNvm.sh'; 26 | const s3Dir = path.resolve(process.cwd(), 'tmp', 'test', 'integration', 's3rver'); 27 | 28 | const NODE_VERSIONS = [{ 29 | nodeVersion: 'v6.13.0', 30 | npmVersions: ['v3.10.10', 'v5.6.0'], 31 | }, { 32 | nodeVersion: 'v8.9.4', 33 | npmVersions: ['v5.10.0', 'v6.4.0'], 34 | }, { 35 | nodeVersion: 'v10.9.0', 36 | npmVersions: ['v6.4.0'], 37 | }, { 38 | nodeVersion: 'v12.14.1', 39 | npmVersions: ['v6.13.4'], 40 | }]; 41 | 42 | let s3rverInstance: S3rver; 43 | 44 | describe('veendor', function () { 45 | before(function (done) { 46 | this.timeout(120000); 47 | const nvmDir = path.resolve(process.cwd(), 'nvm'); 48 | 49 | const resultArgs = ['-x', PREPARE_NVM_SCRIPT]; 50 | 51 | for (const nodeVersion of NODE_VERSIONS) { 52 | for (const npmVersion of nodeVersion.npmVersions) { 53 | resultArgs.push(nodeVersion.nodeVersion); 54 | resultArgs.push(npmVersion); 55 | } 56 | } 57 | 58 | helpers 59 | .getOutput('bash', resultArgs, {timeoutDuration: 120000}) 60 | .then(() => { 61 | done(); 62 | }, error => { 63 | if (error.output) { 64 | const outPath = path.resolve(nvmDir, 'output.txt'); 65 | fsExtra.ensureDirSync(nvmDir); 66 | fsExtra.writeFileSync(outPath, error.output); 67 | error.message += `. Output saved to ${outPath}`; 68 | } 69 | done(error); 70 | }); 71 | }); 72 | 73 | before(function (done) { 74 | this.timeout(15000); 75 | 76 | fsExtra.ensureDirSync(s3Dir); 77 | 78 | s3rverInstance = new S3rver({ 79 | port: 14569, 80 | silent: true, 81 | directory: s3Dir, 82 | }).run(err => { 83 | if(err) { 84 | return done(err); 85 | } 86 | 87 | new AWS.S3({ 88 | endpoint: `http://localhost:14569`, 89 | accessKeyId: "123", 90 | secretAccessKey: "abc", 91 | sslEnabled: false, 92 | s3ForcePathStyle: true, 93 | }); 94 | 95 | done(); 96 | }); 97 | }); 98 | 99 | after(done => { 100 | s3rverInstance.close(done); 101 | }); 102 | 103 | describe('install', function () { 104 | this.timeout(40000); 105 | 106 | it('shoud pull node_modules from git repo', ()=> { 107 | return runBashTest('gitPull'); 108 | }); 109 | 110 | it('shoud push archive to git repo', () => { 111 | return runBashTest('gitPush'); 112 | }); 113 | 114 | it('shoud pull node_modules from local directory', () => { 115 | return runBashTest('localPull'); 116 | }); 117 | 118 | it('shoud copy archive to local directory', () => { 119 | return runBashTest('localPush'); 120 | }); 121 | 122 | it('shoud copy archive to local directory when used with lockfile', () => { 123 | return runBashTest('localPushWithPackageLock'); 124 | }); 125 | 126 | it('shoud pull node_modules from http server', () => { 127 | return runBashTest('httpPull'); 128 | }); 129 | 130 | it('shoud not alter package.json upon install', () => { 131 | return runBashTest('noSave'); 132 | }); 133 | 134 | describe('s3', () => { 135 | beforeEach(() => { 136 | return fsExtra.emptyDir(path.join(s3Dir, 'testbucket')); 137 | }); 138 | 139 | it('shoud pull node_modules from s3 server', () => { 140 | return runBashTest('s3Pull'); 141 | }); 142 | 143 | it('shoud push node_modules to s3 server', () => { 144 | return runBashTest('s3Push'); 145 | }); 146 | }); 147 | }); 148 | 149 | describe('calc', function () { 150 | this.timeout(20000); 151 | 152 | it('shoud return hash on package.json', () => { 153 | return runBashTest('calcHashPlain'); 154 | }); 155 | 156 | it('shoud return hash on package.json + package-lock.json', () => { 157 | return runBashTest('calcHashWithPackageLock'); 158 | }); 159 | 160 | it('shoud return hash on package.json + npm-shrinkwrap.json', () => { 161 | return runBashTest('calcHashWithShrinkWrap'); 162 | }); 163 | 164 | xit('shoud return hash on package.json + yarn.lock', () => { 165 | return runBashTest('calcHashWithYarnLock'); 166 | }); 167 | }); 168 | }); 169 | 170 | function runBashTest(testCase: string) { 171 | const testPromises = []; 172 | const remainingVersions = _.cloneDeep(NODE_VERSIONS); 173 | 174 | while (remainingVersions.length !== 0) { 175 | const nodeVersion = remainingVersions[0].nodeVersion; 176 | const npmVersion = remainingVersions[0].npmVersions[0]; 177 | 178 | if (remainingVersions[0].npmVersions.length === 1) { 179 | remainingVersions.shift(); 180 | } else { 181 | remainingVersions[0].npmVersions.shift(); 182 | } 183 | 184 | testPromises.push(executeBashTest(testCase, nodeVersion, npmVersion)); 185 | } 186 | return Promise.all(testPromises); 187 | } 188 | 189 | function executeBashTest(testCase: string, nodeVersion: string, npmVersion: string) { 190 | return new Promise((resolve, reject) => { 191 | const testDir = path.resolve( 192 | process.cwd(), 'tmp', 'test', 'integration', testCase, `${nodeVersion}-${npmVersion}` 193 | ); 194 | 195 | const tmpDir = os.tmpdir(); 196 | const cwdHash = crypto.createHash('sha1'); 197 | cwdHash.update(testDir); 198 | const cacheDir = path.resolve(tmpDir, `veendor-${cwdHash.digest('hex')}`); 199 | 200 | return helpers 201 | .getOutput( 202 | 'bash', 203 | [TEST_SCRIPT, testCase, testDir, cacheDir, nodeVersion, npmVersion], 204 | {timeoutDuration: 40000} 205 | ).then(() => { 206 | resolve(); 207 | }, (error: CommandError) => { 208 | if (error.output) { 209 | const outPath = path.resolve(testDir, 'output.txt'); 210 | fsExtra.ensureDirSync(testDir); 211 | fsExtra.writeFileSync(outPath, error.output); 212 | error.message += `. Output saved to ${outPath}`; 213 | } 214 | reject(error); 215 | }); 216 | }); 217 | 218 | } 219 | -------------------------------------------------------------------------------- /src/test/integration/prepareNvm.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | export NVM_DIR="$(pwd)/nvm" 4 | 5 | mkdir -p "${NVM_DIR}" 6 | 7 | if [ ! -f "${NVM_DIR}/nvm.sh" ]; then 8 | curl "https://raw.githubusercontent.com/creationix/nvm/master/nvm.sh" > "${NVM_DIR}/nvm.sh"; 9 | fi 10 | 11 | export npm_config_prefix= 12 | source "${NVM_DIR}/nvm.sh" 13 | 14 | while [ $# -gt 0 ]; do 15 | node_version="$1"; shift 16 | npm_version="$1"; shift 17 | if nvm use "${node_version}-${npm_version}"; then 18 | : 19 | else 20 | nvm install "${node_version}" 21 | nvm use "${node_version}" 22 | npm install -g "npm@${npm_version}" 23 | mv "${NVM_DIR}/versions/node/${node_version}" "${NVM_DIR}/versions/node/${node_version}-${npm_version}" 24 | nvm alias default "${node_version}-${npm_version}" # nvm will use first installed version as default. 25 | # After moving it, deafult version will be lost 26 | # forcing it to remain 27 | fi 28 | done 29 | -------------------------------------------------------------------------------- /src/test/integration/runTest.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -ex 4 | 5 | create_empty_repo () { 6 | git init --bare "$1" 7 | git clone "$1" repolocal 8 | cd repolocal 9 | git checkout -b master 10 | git commit --allow-empty -m "Initial commit" 11 | git push -u 12 | cd - 13 | rm -rf repolocal 14 | } 15 | 16 | rootdir="$(pwd)/dist" 17 | testdir="$(pwd)/dist/test" 18 | dirname="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" 19 | testcase="$1" 20 | tmpdir="$2" 21 | cachedir="$3" 22 | node_version="$4" 23 | npm_version="$5" 24 | 25 | export npm_config_prefix= 26 | export NVM_DIR="$(pwd)/nvm" 27 | 28 | rm -rf "${tmpdir}" 29 | mkdir -p "${tmpdir}" 30 | 31 | rm -rf "${cachedir}" 32 | mkdir -p "${cachedir}" 33 | 34 | echo "source nvm.sh" 35 | set +x 36 | source "${NVM_DIR}/nvm.sh" 37 | set -x 38 | nvm use "${node_version}-${npm_version}" 39 | 40 | cp "$dirname/testCases/$testcase/package.json" "$tmpdir" 41 | cp "$dirname/testCases/$testcase/.veendor.js" "$tmpdir" 42 | 43 | if [[ -f "$dirname/testCases/$testcase/package-lock.json" ]]; then 44 | cp "$dirname/testCases/$testcase/package-lock.json" "$tmpdir" 45 | fi 46 | 47 | if [[ -f "$dirname/testCases/$testcase/npm-shrinkwrap.json" ]]; then 48 | cp "$dirname/testCases/$testcase/npm-shrinkwrap.json" "$tmpdir" 49 | fi 50 | 51 | cd "$tmpdir" 52 | 53 | source "$dirname/testCases/$testcase/testcase.sh" 54 | -------------------------------------------------------------------------------- /src/test/integration/testCases/calcHashPlain/.veendor.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = { 4 | backends: [ 5 | { 6 | alias: 'local-git', 7 | push: true, 8 | backend: 'git-lfs', 9 | options: { 10 | repo: `${process.env.TEST_REPO_DIR}` 11 | } 12 | } 13 | ], 14 | packageHash: { 15 | suffix: 'test' 16 | } 17 | }; 18 | -------------------------------------------------------------------------------- /src/test/integration/testCases/calcHashPlain/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "gp", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "test": "echo \"Error: no test specified\" && exit 1" 8 | }, 9 | "author": "", 10 | "license": "ISC", 11 | "dependencies": { 12 | "deep-object-diff": "^1.0.4" 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /src/test/integration/testCases/calcHashPlain/testcase.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -x 4 | 5 | expected_hash="83e0f500934b0f43f73cf05f3ef3e9d78228a70d-test" 6 | calc="$(node "$rootdir/bin/veendor.js" calc --debug)" 7 | 8 | if [[ "$calc" != "$expected_hash" ]]; then 9 | echo "veendor calc returned unexpected result: $calc" 10 | echo "expected: $expected_hash" 11 | exit 1; 12 | fi 13 | -------------------------------------------------------------------------------- /src/test/integration/testCases/calcHashWithPackageLock/.veendor.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = { 4 | backends: [ 5 | { 6 | alias: 'local-git', 7 | push: true, 8 | backend: 'git-lfs', 9 | options: { 10 | repo: `${process.env.TEST_REPO_DIR}` 11 | } 12 | } 13 | ], 14 | packageHash: { 15 | suffix: 'test' 16 | } 17 | }; 18 | -------------------------------------------------------------------------------- /src/test/integration/testCases/calcHashWithPackageLock/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "gp", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "test": "echo \"Error: no test specified\" && exit 1" 8 | }, 9 | "author": "", 10 | "license": "ISC", 11 | "dependencies": { 12 | "deep-object-diff": "^1.0.4" 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /src/test/integration/testCases/calcHashWithPackageLock/testcase.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -x 4 | 5 | expected_hash="dea50c9ec128b9868367a5668dea082b906d3116-test" 6 | calc="$(node "$rootdir/bin/veendor.js" calc --debug)" 7 | 8 | if [[ "$calc" != "$expected_hash" ]]; then 9 | echo "veendor calc returned unexpected result: $calc" 10 | echo "expected: $expected_hash" 11 | exit 1; 12 | fi 13 | -------------------------------------------------------------------------------- /src/test/integration/testCases/calcHashWithShrinkWrap/.veendor.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = { 4 | backends: [ 5 | { 6 | alias: 'local-git', 7 | push: true, 8 | backend: 'git-lfs', 9 | options: { 10 | repo: `${process.env.TEST_REPO_DIR}` 11 | } 12 | } 13 | ], 14 | packageHash: { 15 | suffix: 'test' 16 | } 17 | }; 18 | -------------------------------------------------------------------------------- /src/test/integration/testCases/calcHashWithShrinkWrap/npm-shrinkwrap.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "gp", 3 | "version": "1.0.0", 4 | "lockfileVersion": 1, 5 | "requires": true, 6 | "dependencies": { 7 | "deep-object-diff": { 8 | "version": "1.1.0", 9 | "resolved": "https://registry.npmjs.org/deep-object-diff/-/deep-object-diff-1.1.0.tgz", 10 | "integrity": "sha512-b+QLs5vHgS+IoSNcUE4n9HP2NwcHj7aqnJWsjPtuG75Rh5TOaGt0OjAYInh77d5T16V5cRDC+Pw/6ZZZiETBGw==" 11 | } 12 | } 13 | } 14 | -------------------------------------------------------------------------------- /src/test/integration/testCases/calcHashWithShrinkWrap/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "gp", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "test": "echo \"Error: no test specified\" && exit 1" 8 | }, 9 | "author": "", 10 | "license": "ISC", 11 | "dependencies": { 12 | "deep-object-diff": "^1.0.4" 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /src/test/integration/testCases/calcHashWithShrinkWrap/testcase.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -x 4 | 5 | expected_hash="dea50c9ec128b9868367a5668dea082b906d3116-test" 6 | calc="$(node "$rootdir/bin/veendor.js" calc --debug)" 7 | 8 | if [[ "$calc" != "$expected_hash" ]]; then 9 | echo "veendor calc returned unexpected result: $calc" 10 | echo "expected: $expected_hash" 11 | exit 1; 12 | fi 13 | -------------------------------------------------------------------------------- /src/test/integration/testCases/gitPull/.veendor.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = { 4 | backends: [ 5 | { 6 | alias: 'local-git', 7 | push: true, 8 | backend: 'git-lfs', 9 | options: { 10 | repo: `${process.env.TEST_REPO_DIR}` 11 | } 12 | } 13 | ] 14 | }; 15 | -------------------------------------------------------------------------------- /src/test/integration/testCases/gitPull/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "gp", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "test": "echo \"Error: no test specified\" && exit 1" 8 | }, 9 | "author": "", 10 | "license": "ISC", 11 | "dependencies": { 12 | "deep-object-diff": "1.0.4" 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /src/test/integration/testCases/gitPull/testcase.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -x 4 | 5 | create_empty_repo "$(pwd)/repo" 6 | export TEST_REPO_DIR="$(pwd)/repo" 7 | 8 | git clone "$(pwd)/repo" repolocal 9 | cd repolocal 10 | cp "$testdir/integration/bundles/7d0db335c82dfd9aa2b96dabc485b89ebaa1496f.tar.gz" . 11 | git add 7d0db335c82dfd9aa2b96dabc485b89ebaa1496f.tar.gz 12 | git commit -m "7d0db335c82dfd9aa2b96dabc485b89ebaa1496f.tar.gz" 13 | git tag veendor-7d0db335c82dfd9aa2b96dabc485b89ebaa1496f 14 | git push origin veendor-7d0db335c82dfd9aa2b96dabc485b89ebaa1496f 15 | cd - 16 | rm -rf repolocal 17 | 18 | node "$rootdir/bin/veendor.js" install -vvv --debug 19 | 20 | if [[ "$(cat node_modules/proof.txt)" != "this was pulled from archive" ]]; then 21 | cat node_modules/proof.txt 22 | echo "gitPull failed; node_modules/proof.txt is not there" 23 | exit 1; 24 | fi 25 | -------------------------------------------------------------------------------- /src/test/integration/testCases/gitPush/.veendor.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = { 4 | backends: [ 5 | { 6 | alias: 'local-git', 7 | push: true, 8 | backend: 'git-lfs', 9 | options: { 10 | repo: `${process.env.TEST_REPO_DIR}` 11 | } 12 | } 13 | ] 14 | }; 15 | -------------------------------------------------------------------------------- /src/test/integration/testCases/gitPush/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "gp", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "test": "echo \"Error: no test specified\" && exit 1" 8 | }, 9 | "author": "", 10 | "license": "ISC", 11 | "dependencies": { 12 | "deep-object-diff": "1.0.4" 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /src/test/integration/testCases/gitPush/testcase.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | create_empty_repo "$(pwd)/repo" 4 | export TEST_REPO_DIR="$(pwd)/repo" 5 | 6 | echo -e "\nRUNNING TEST\n" 7 | 8 | node "$rootdir/bin/veendor.js" install -vvv --debug 9 | 10 | git clone "$(pwd)/repo" repolocal 11 | cd repolocal 12 | 13 | npm_ver="$(npm --version)" 14 | 15 | git checkout veendor-7d0db335c82dfd9aa2b96dabc485b89ebaa1496f 16 | tar -xf 7d0db335c82dfd9aa2b96dabc485b89ebaa1496f.tar.gz 17 | 18 | if [[ ! -f "node_modules/deep-object-diff/package.json" ]]; then 19 | echo "gitPush failed; node_modules/deep-object-diff/package.json is not there" 20 | exit 1; 21 | fi 22 | 23 | -------------------------------------------------------------------------------- /src/test/integration/testCases/httpPull/.veendor.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = { 4 | backends: [ 5 | { 6 | alias: 'http', 7 | backend: 'http', 8 | options: { 9 | resolveUrl(hash) { 10 | return `https://s3.us-east-2.amazonaws.com/mcornholio-s3/${hash}.tar.gz`; 11 | }, 12 | strict: true, 13 | } 14 | } 15 | ] 16 | }; 17 | -------------------------------------------------------------------------------- /src/test/integration/testCases/httpPull/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "gp", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "test": "echo \"Error: no test specified\" && exit 1" 8 | }, 9 | "author": "", 10 | "license": "ISC", 11 | "dependencies": { 12 | "deep-object-diff": "1.0.4" 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /src/test/integration/testCases/httpPull/testcase.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | mkdir "$(pwd)/local" 4 | 5 | node "$rootdir/bin/veendor.js" install -vvv --debug 6 | 7 | if [[ "$(cat node_modules/proof.txt)" != "this was pulled from archive" ]]; then 8 | cat node_modules/proof.txt 9 | echo "localPull failed; node_modules/proof.txt is not there" 10 | exit 1; 11 | fi 12 | -------------------------------------------------------------------------------- /src/test/integration/testCases/localPull/.veendor.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = { 4 | backends: [ 5 | { 6 | alias: 'local', 7 | push: true, 8 | backend: 'local', 9 | options: { 10 | directory: `${process.env.TEST_DIR}` 11 | } 12 | } 13 | ] 14 | }; 15 | -------------------------------------------------------------------------------- /src/test/integration/testCases/localPull/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "gp", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "test": "echo \"Error: no test specified\" && exit 1" 8 | }, 9 | "author": "", 10 | "license": "ISC", 11 | "dependencies": { 12 | "deep-object-diff": "1.0.4" 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /src/test/integration/testCases/localPull/testcase.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | mkdir "$(pwd)/local" 4 | export TEST_DIR="$(pwd)/local" 5 | 6 | cp "$testdir/integration/bundles/7d0db335c82dfd9aa2b96dabc485b89ebaa1496f.tar.gz" local/ 7 | 8 | node "$rootdir/bin/veendor.js" install -vvv --debug 9 | 10 | if [[ "$(cat node_modules/proof.txt)" != "this was pulled from archive" ]]; then 11 | cat node_modules/proof.txt 12 | echo "localPull failed; node_modules/proof.txt is not there" 13 | exit 1; 14 | fi 15 | -------------------------------------------------------------------------------- /src/test/integration/testCases/localPush/.veendor.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = { 4 | backends: [ 5 | { 6 | alias: 'local', 7 | push: true, 8 | backend: 'local', 9 | options: { 10 | directory: `${process.env.TEST_DIR}` 11 | } 12 | } 13 | ] 14 | }; 15 | -------------------------------------------------------------------------------- /src/test/integration/testCases/localPush/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "gp", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "test": "echo \"Error: no test specified\" && exit 1" 8 | }, 9 | "author": "", 10 | "license": "ISC", 11 | "dependencies": { 12 | "deep-object-diff": "1.0.4" 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /src/test/integration/testCases/localPush/testcase.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | mkdir "$(pwd)/local" 4 | export TEST_DIR="$(pwd)/local" 5 | 6 | echo -e "\nRUNNING TEST\n" 7 | 8 | node "$rootdir/bin/veendor.js" install -vvv --debug 9 | 10 | cd "$(pwd)/local" 11 | tar -xf 7d0db335c82dfd9aa2b96dabc485b89ebaa1496f.tar.gz 12 | 13 | if [[ ! -f "node_modules/deep-object-diff/package.json" ]]; then 14 | echo "gitPush failed; node_modules/deep-object-diff/package.json is not there" 15 | exit 1; 16 | fi 17 | 18 | -------------------------------------------------------------------------------- /src/test/integration/testCases/localPushWithPackageLock/.veendor.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = { 4 | backends: [ 5 | { 6 | alias: 'local', 7 | push: true, 8 | backend: 'local', 9 | options: { 10 | directory: `${process.env.TEST_DIR}` 11 | } 12 | } 13 | ] 14 | }; 15 | -------------------------------------------------------------------------------- /src/test/integration/testCases/localPushWithPackageLock/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "gp", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "test": "echo \"Error: no test specified\" && exit 1" 8 | }, 9 | "author": "", 10 | "license": "ISC", 11 | "dependencies": { 12 | "deep-object-diff": "1.0.4" 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /src/test/integration/testCases/localPushWithPackageLock/testcase.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | mkdir "$(pwd)/local" 4 | export TEST_DIR="$(pwd)/local" 5 | 6 | echo -e "\nRUNNING TEST\n" 7 | 8 | node "$rootdir/bin/veendor.js" install -vvv --debug 9 | 10 | cd "$(pwd)/local" 11 | tar -xf 1722dc5c5ee28cf0bcdc5ac1da82e0608b655f88.tar.gz 12 | 13 | if [[ ! -f "node_modules/deep-object-diff/package.json" ]]; then 14 | echo "gitPush failed; node_modules/deep-object-diff/package.json is not there" 15 | exit 1; 16 | fi 17 | 18 | -------------------------------------------------------------------------------- /src/test/integration/testCases/noSave/.npmrc: -------------------------------------------------------------------------------- 1 | save=true 2 | -------------------------------------------------------------------------------- /src/test/integration/testCases/noSave/.veendor.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = { 4 | backends: [ 5 | { 6 | alias: 'local', 7 | push: true, 8 | backend: 'local', 9 | options: { 10 | directory: `${process.env.TEST_DIR}` 11 | } 12 | } 13 | ], 14 | useGitHistory: { 15 | depth: 15, 16 | }, 17 | }; 18 | -------------------------------------------------------------------------------- /src/test/integration/testCases/noSave/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "gp", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "test": "echo \"Error: no test specified\" && exit 1" 8 | }, 9 | "author": "", 10 | "license": "ISC", 11 | "dependencies": { 12 | "deep-object-diff": "1.0.4" 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /src/test/integration/testCases/noSave/package2.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "gp", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "test": "echo \"Error: no test specified\" && exit 1" 8 | }, 9 | "author": "", 10 | "license": "ISC", 11 | "dependencies": { 12 | "deep-object-diff": "^1.0.4", 13 | "repeat-string:": "^1.6.0" 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /src/test/integration/testCases/noSave/package3.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "gp", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "test": "echo \"Error: no test specified\" && exit 1" 8 | }, 9 | "author": "", 10 | "license": "ISC", 11 | "dependencies": { 12 | "deep-object-diff": "^1.0.4" 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /src/test/integration/testCases/noSave/testcase.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -x 4 | 5 | create_empty_repo "$(pwd)/repo" 6 | git clone "$(pwd)/repo" repolocal 7 | cd repolocal 8 | 9 | export TEST_DIR="$tmpdir/local" 10 | mkdir -p $TEST_DIR 11 | 12 | find "$testdir/integration/testCases/noSave" -type f -maxdepth 1 -print0 | xargs -0 -I{} -n1 cp "{}" . 13 | 14 | run_iteration() { 15 | cp package.json package.json.bak 16 | 17 | git add -A 18 | git commit -m "1" 19 | 20 | node "$rootdir/bin/veendor.js" install -vvv --debug 21 | 22 | if ! diff -q package.json package.json.bak; then 23 | echo "package.json changed and it shouldn't have" 24 | exit 1; 25 | fi 26 | } 27 | 28 | run_iteration 29 | 30 | cp package2.json package.json 31 | 32 | run_iteration 33 | 34 | cp package3.json package.json 35 | 36 | run_iteration 37 | -------------------------------------------------------------------------------- /src/test/integration/testCases/s3Pull/.veendor.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = { 4 | backends: [ 5 | { 6 | alias: 's3', 7 | backend: 's3', 8 | options: { 9 | bucket: 'testbucket', 10 | s3Options: { 11 | endpoint: 'localhost:14569', 12 | sslEnabled: false, 13 | s3ForcePathStyle: true, 14 | }, 15 | } 16 | } 17 | ] 18 | }; 19 | -------------------------------------------------------------------------------- /src/test/integration/testCases/s3Pull/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "gp", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "test": "echo \"Error: no test specified\" && exit 1" 8 | }, 9 | "author": "", 10 | "license": "ISC", 11 | "dependencies": { 12 | "deep-object-diff": "1.0.4" 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /src/test/integration/testCases/s3Pull/testcase.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | mkdir "$(pwd)/local" 4 | 5 | echo -e "\nRUNNING TEST\n" 6 | 7 | curl -X PUT -T "$testdir/integration/bundles/7d0db335c82dfd9aa2b96dabc485b89ebaa1496f.tar.gz" \ 8 | -H 'Host: testbucket.s3.amazonaws.com' \ 9 | http://localhost:14569/7d0db335c82dfd9aa2b96dabc485b89ebaa1496f.tar.gz 10 | 11 | node "$rootdir/bin/veendor.js" install -vvv --debug 12 | 13 | if [[ "$(cat node_modules/proof.txt)" != "this was pulled from archive" ]]; then 14 | cat node_modules/proof.txt 15 | echo "localPull failed; node_modules/proof.txt is not there" 16 | exit 1; 17 | fi 18 | -------------------------------------------------------------------------------- /src/test/integration/testCases/s3Push/.veendor.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | 3 | module.exports = { 4 | backends: [ 5 | { 6 | alias: 's3', 7 | backend: 's3', 8 | options: { 9 | bucket: 'testbucket', 10 | s3Options: { 11 | endpoint: 'localhost:14569', 12 | sslEnabled: false, 13 | s3ForcePathStyle: true, 14 | }, 15 | }, 16 | push: true, 17 | } 18 | ] 19 | }; 20 | -------------------------------------------------------------------------------- /src/test/integration/testCases/s3Push/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "gp", 3 | "version": "1.0.0", 4 | "description": "", 5 | "main": "index.js", 6 | "scripts": { 7 | "test": "echo \"Error: no test specified\" && exit 1" 8 | }, 9 | "author": "", 10 | "license": "ISC", 11 | "dependencies": { 12 | "deep-object-diff": "1.0.4" 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /src/test/integration/testCases/s3Push/testcase.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | mkdir "$(pwd)/local" 4 | 5 | echo -e "\nRUNNING TEST\n" 6 | 7 | node "$rootdir/bin/veendor.js" install -vvv --debug 8 | 9 | curl http://localhost:14569/7d0db335c82dfd9aa2b96dabc485b89ebaa1496f.tar.gz \ 10 | -H 'Host: testbucket.s3.amazonaws.com' \ 11 | -o 7d0db335c82dfd9aa2b96dabc485b89ebaa1496f.tar.gz 12 | 13 | tar -xf 7d0db335c82dfd9aa2b96dabc485b89ebaa1496f.tar.gz 14 | 15 | if [[ ! -f "node_modules/deep-object-diff/package.json" ]]; then 16 | echo "s3Push failed; node_modules/deep-object-diff/package.json is not there" 17 | exit 1; 18 | fi 19 | 20 | -------------------------------------------------------------------------------- /src/test/setup.ts: -------------------------------------------------------------------------------- 1 | import tracer from 'tracer'; 2 | 3 | import {setLogger} from '@/lib/util/logger'; 4 | import {blockAllProgress} from '@/lib/util/progress'; 5 | 6 | blockAllProgress(true); 7 | const logLevel = process.env.DEBUG ? 1 : 6; 8 | setLogger(tracer.console({level: logLevel})); 9 | -------------------------------------------------------------------------------- /src/test/unit/backends/git-lfs.test.ts: -------------------------------------------------------------------------------- 1 | import {describe, it, beforeEach, afterEach} from 'mocha'; 2 | import chai from 'chai'; 3 | import chaiAsPromised from 'chai-as-promised'; 4 | import sinon from 'sinon'; 5 | import mockfs from 'mock-fs'; 6 | import path from 'path'; 7 | 8 | import * as errors from '@/lib/errors'; 9 | import * as tarWrapper from '@/lib/commandWrappers/tarWrapper'; 10 | import * as gitWrapper from '@/lib/commandWrappers/gitWrapper'; 11 | import * as gitLfs from '@/lib/backends/git-lfs'; 12 | import {GitLfsOptions} from '@/lib/backends/git-lfs'; 13 | 14 | const assert = chai.assert; 15 | chai.use(chaiAsPromised); 16 | 17 | 18 | let fakeRepo: string; 19 | let sandbox: sinon.SinonSandbox; 20 | let fakeHash: string; 21 | let defaultOptions: GitLfsOptions; 22 | 23 | describe('git-lfs', () => { 24 | beforeEach(() => { 25 | mockfs({ 26 | '.veendor': { 27 | 'git-lfs.0': {} 28 | }, 29 | }); 30 | 31 | fakeRepo = 'git://fakehub.com/test/test.git'; 32 | fakeHash = '1234567890deadbeef1234567890'; 33 | sandbox = sinon.sandbox.create(); 34 | 35 | defaultOptions = { 36 | repo: fakeRepo, 37 | compression: 'gzip', 38 | defaultBranch: 'defaultBranchaster', 39 | checkLfsAvailability: false, 40 | }; 41 | 42 | gitLfs.setRemoteFreshness(false); 43 | 44 | sandbox.stub(gitWrapper, 'clone').resolves(); 45 | sandbox.stub(gitWrapper, 'fetch').resolves(); 46 | sandbox.stub(gitWrapper, 'checkout').resolves(); 47 | sandbox.stub(gitWrapper, 'add').resolves(); 48 | sandbox.stub(gitWrapper, 'commit').resolves(); 49 | sandbox.stub(gitWrapper, 'tag').resolves(); 50 | sandbox.stub(gitWrapper, 'push').resolves(); 51 | sandbox.stub(gitWrapper, 'isGitRepo').resolves(true); 52 | sandbox.stub(gitWrapper, 'resetToRemote').resolves(); 53 | sandbox.stub(gitWrapper, 'isGitLfsAvailable').resolves(); 54 | sandbox.stub(gitWrapper, 'lfsPull').resolves(); 55 | sandbox.stub(tarWrapper, 'createArchive').resolves(); 56 | sandbox.stub(tarWrapper, 'extractArchive').resolves(); 57 | }); 58 | 59 | afterEach(() => { 60 | mockfs.restore(); 61 | sandbox.restore(); 62 | }); 63 | 64 | describe('.pull', () => { 65 | it('clones repo to cache directory if isn\'t already there', async () => { 66 | // @ts-ignore 67 | gitWrapper.isGitRepo.restore(); gitWrapper.clone.restore(); 68 | const mock = sandbox.mock(gitWrapper); 69 | mock.expects('isGitRepo').resolves(false); 70 | mock.expects('clone').withArgs(fakeRepo, sinon.match('.veendor/git-lfs.0/repo')).resolves(''); 71 | 72 | await gitLfs.pull(fakeHash, defaultOptions, '.veendor/git-lfs.0'); 73 | 74 | mock.verify(); 75 | }); 76 | 77 | it('runs `fetch` if repo already exist', async () => { 78 | // @ts-ignore 79 | gitWrapper.fetch.restore(); 80 | const mock = sandbox.mock(gitWrapper); 81 | mock.expects('fetch').withArgs(sinon.match('.veendor/git-lfs.0/repo')).resolves(''); 82 | 83 | await gitLfs.pull(fakeHash, defaultOptions, '.veendor/git-lfs.0'); 84 | mock.verify(); 85 | }); 86 | 87 | it('checks out tag by passed hash', async () => { 88 | // @ts-ignore 89 | gitWrapper.checkout.restore(); 90 | const mock = sandbox.mock(gitWrapper); 91 | mock.expects('checkout').withArgs(sinon.match('.veendor/git-lfs.0/repo'), 'veendor-' + fakeHash); 92 | 93 | await gitLfs.pull(fakeHash, defaultOptions, '.veendor/git-lfs.0').catch(() => {}); 94 | mock.verify(); 95 | }); 96 | 97 | it('rejects with BundleNotFoundError if tag not found', () => { 98 | // @ts-ignore 99 | gitWrapper.checkout.restore(); 100 | sandbox.stub(gitWrapper, 'checkout').rejects(new Error); 101 | 102 | return assert.isRejected( 103 | gitLfs.pull(fakeHash, defaultOptions, '.veendor/git-lfs.0'), errors.BundleNotFoundError 104 | ); 105 | }); 106 | 107 | it('does not run tar if tag not found', async () => { 108 | // @ts-ignore 109 | gitWrapper.checkout.restore(); tarWrapper.extractArchive.restore(); 110 | sandbox.stub(gitWrapper, 'checkout').rejects(new Error); 111 | 112 | const mock = sandbox.mock(tarWrapper); 113 | mock.expects('extractArchive').never(); 114 | await gitLfs.pull(fakeHash, defaultOptions, '.veendor/git-lfs.0').catch(() => {}); 115 | 116 | mock.verify(); 117 | }); 118 | 119 | it('unpacks the archive to $(pwd)', async () => { 120 | // @ts-ignore 121 | tarWrapper.extractArchive.restore(); 122 | const mock = sandbox.mock(tarWrapper); 123 | mock.expects('extractArchive').withArgs(sinon.match(`.veendor/git-lfs.0/repo/${fakeHash}.tar.gz`)); 124 | 125 | await gitLfs.pull(fakeHash, defaultOptions, '.veendor/git-lfs.0'); 126 | mock.verify(); 127 | }); 128 | 129 | it('should run git fetch only once in a run', async () => { 130 | // @ts-ignore 131 | gitWrapper.fetch.restore(); 132 | const mock = sandbox.mock(gitWrapper); 133 | mock.expects('fetch').once().resolves(''); 134 | 135 | await gitLfs.pull(fakeHash, defaultOptions, '.veendor/git-lfs.0'); 136 | await gitLfs.pull(fakeHash, defaultOptions, '.veendor/git-lfs.0'); 137 | await gitLfs.pull(fakeHash, defaultOptions, '.veendor/git-lfs.0'); 138 | 139 | mock.verify(); 140 | }); 141 | 142 | it('should run git lfs pull if git lfs is available', async () => { 143 | // @ts-ignore 144 | gitWrapper.lfsPull.restore(); 145 | const mock = sandbox.mock(gitWrapper); 146 | mock.expects('lfsPull').withArgs(sinon.match('.veendor/git-lfs.0/repo')).resolves(''); 147 | 148 | await gitLfs.pull(fakeHash, defaultOptions, '.veendor/git-lfs.0'); 149 | mock.verify(); 150 | }); 151 | 152 | it('should not run git lfs pull if git lfs is not available', async () => { 153 | // @ts-ignore 154 | gitWrapper.isGitLfsAvailable.restore(); gitWrapper.lfsPull.restore(); 155 | const mock = sandbox.mock(gitWrapper); 156 | mock.expects('isGitLfsAvailable').rejects(gitWrapper.GitLfsNotAvailableError); 157 | mock.expects('lfsPull').never(); 158 | 159 | await gitLfs.pull(fakeHash, defaultOptions, '.veendor/git-lfs.0'); 160 | mock.verify(); 161 | }); 162 | }); 163 | 164 | describe('.push', () => { 165 | it('clones repo to cache directory if isn\'t already there', async () => { 166 | // @ts-ignore 167 | gitWrapper.clone.restore(); 168 | const mock = sandbox.mock(gitWrapper); 169 | mock.expects('clone').withArgs(fakeRepo, sinon.match('.veendor/git-lfs.0/repo')).resolves(''); 170 | 171 | await gitLfs.push(fakeHash, defaultOptions, '.veendor/git-lfs.0'); 172 | mock.verify(); 173 | }); 174 | 175 | it('runs `fetch` if repo already exist', async () => { 176 | mockfs({ 177 | '.veendor': { 178 | 'git-lfs.0': { 179 | repo: { 180 | '.git': {} 181 | } 182 | } 183 | }, 184 | }); 185 | 186 | // @ts-ignore 187 | gitWrapper.fetch.restore(); 188 | const mock = sandbox.mock(gitWrapper); 189 | mock.expects('fetch').withArgs(sinon.match('.veendor/git-lfs.0/repo')).resolves(''); 190 | 191 | await gitLfs.push(fakeHash, defaultOptions, '.veendor/git-lfs.0'); 192 | mock.verify(); 193 | }); 194 | 195 | it('checks out default branch', async () => { 196 | // @ts-ignore 197 | gitWrapper.checkout.restore(); 198 | const mock = sandbox.mock(gitWrapper); 199 | mock.expects('checkout') 200 | .withArgs(sinon.match('.veendor/git-lfs.0/repo'), defaultOptions.defaultBranch).resolves(''); 201 | 202 | await gitLfs.push(fakeHash, defaultOptions, '.veendor/git-lfs.0'); 203 | mock.verify(); 204 | }); 205 | 206 | it('resets branch to remote state', async () => { 207 | // @ts-ignore 208 | gitWrapper.resetToRemote.restore(); 209 | const mock = sandbox.mock(gitWrapper); 210 | mock.expects('resetToRemote') 211 | .withArgs(sinon.match('.veendor/git-lfs.0/repo'), defaultOptions.defaultBranch).resolves(''); 212 | 213 | await gitLfs.push(fakeHash, defaultOptions, '.veendor/git-lfs.0'); 214 | mock.verify(); 215 | }); 216 | 217 | it('archives node_modules', async () => { 218 | // @ts-ignore 219 | tarWrapper.createArchive.restore(); 220 | const mock = sandbox.mock(tarWrapper); 221 | mock.expects('createArchive') 222 | .withArgs( 223 | sinon.match(`.veendor/git-lfs.0/repo/${fakeHash}`), 224 | [path.resolve(process.cwd(), 'node_modules')], 225 | defaultOptions.compression 226 | ).resolves(''); 227 | 228 | await gitLfs.push(fakeHash, defaultOptions, '.veendor/git-lfs.0'); 229 | mock.verify(); 230 | }); 231 | 232 | it('adds archive to staging', async () => { 233 | // @ts-ignore 234 | gitWrapper.add.restore(); 235 | const mock = sandbox.mock(gitWrapper); 236 | mock.expects('add') 237 | .withArgs( 238 | sinon.match('.veendor/git-lfs.0/repo'), 239 | [sinon.match(`.veendor/git-lfs.0/repo/${fakeHash}.tar.gz`)] 240 | ).resolves(''); 241 | 242 | await gitLfs.push(fakeHash, defaultOptions, '.veendor/git-lfs.0'); 243 | mock.verify(); 244 | }); 245 | 246 | it('creates commit', async () => { 247 | // @ts-ignore 248 | gitWrapper.commit.restore(); 249 | const mock = sandbox.mock(gitWrapper); 250 | mock.expects('commit') 251 | .withArgs(sinon.match('.veendor/git-lfs.0/repo'), sinon.match.any).resolves(''); 252 | 253 | await gitLfs.push(fakeHash, defaultOptions, '.veendor/git-lfs.0'); 254 | mock.verify(); 255 | }); 256 | 257 | it('creates tag with hash name', async () => { 258 | // @ts-ignore 259 | gitWrapper.tag.restore(); 260 | const mock = sandbox.mock(gitWrapper); 261 | mock.expects('tag') 262 | .withArgs(sinon.match('.veendor/git-lfs.0/repo'), `veendor-${fakeHash}`).resolves(''); 263 | 264 | await gitLfs.push(fakeHash, defaultOptions, '.veendor/git-lfs.0'); 265 | mock.verify(); 266 | }); 267 | 268 | it('pushes tag', async () => { 269 | // @ts-ignore 270 | gitWrapper.push.restore(); 271 | const mock = sandbox.mock(gitWrapper); 272 | mock.expects('push') 273 | .withArgs(sinon.match('.veendor/git-lfs.0/repo'), `veendor-${fakeHash}`) 274 | .resolves(''); 275 | 276 | await gitLfs.push(fakeHash, defaultOptions, '.veendor/git-lfs.0'); 277 | mock.verify(); 278 | }); 279 | 280 | it('throws BundleAlreadyExistsError if git tag rejected with RefAlreadyExistsError', () => { 281 | // @ts-ignore 282 | gitWrapper.tag.restore(); 283 | sandbox.stub(gitWrapper, 'tag').rejects(new gitWrapper.RefAlreadyExistsError); 284 | 285 | const result = gitLfs.push(fakeHash, defaultOptions, '.veendor/git-lfs.0'); 286 | 287 | return assert.isRejected(result, errors.BundleAlreadyExistsError); 288 | }); 289 | 290 | it('throws BundleAlreadyExistsError if git push rejected with RefAlreadyExistsError', () => { 291 | // @ts-ignore 292 | gitWrapper.push.restore(); 293 | sandbox.stub(gitWrapper, 'push').rejects(new gitWrapper.RefAlreadyExistsError); 294 | 295 | const result = gitLfs.push(fakeHash, defaultOptions, '.veendor/git-lfs.0'); 296 | 297 | return assert.isRejected(result, errors.BundleAlreadyExistsError); 298 | }); 299 | }); 300 | 301 | describe('.validateOptions', () => { 302 | it('throws error if `repo` hasn\'t been passed', () => { 303 | delete defaultOptions.repo; 304 | 305 | return assert.isRejected(gitLfs.validateOptions(defaultOptions), errors.InvalidOptionsError); 306 | }); 307 | 308 | it('checks valid compression', () => { 309 | // @ts-ignore 310 | defaultOptions.compression = 'lsda'; 311 | 312 | return assert.isRejected(gitLfs.validateOptions(defaultOptions), errors.InvalidOptionsError); 313 | }); 314 | 315 | it('sets default compression type to `gzip`', async () => { 316 | delete defaultOptions.compression; 317 | 318 | await gitLfs.validateOptions(defaultOptions); 319 | assert.equal(defaultOptions.compression, 'gzip'); 320 | }); 321 | 322 | it('sets default default branch to `master`', async () => { 323 | delete defaultOptions.defaultBranch; 324 | 325 | await gitLfs.validateOptions(defaultOptions); 326 | assert.equal(defaultOptions.defaultBranch, 'master'); 327 | }); 328 | 329 | it('checks if checkLfsAvailability is boolean', () => { 330 | // @ts-ignore 331 | defaultOptions.checkLfsAvailability = 'test'; 332 | 333 | return assert.isRejected(gitLfs.validateOptions(defaultOptions), errors.InvalidOptionsError); 334 | }); 335 | 336 | it('sets default checkLfsAvailability to `false`', async () => { 337 | await gitLfs.validateOptions(defaultOptions); 338 | assert.equal(defaultOptions.checkLfsAvailability, false); 339 | }); 340 | 341 | it('rejects with `GitLfsNotAvailableError` if git lfs is not available ' + 342 | 'and `checkLfsAvailability` was set to \'true\'', () => { 343 | defaultOptions.checkLfsAvailability = true; 344 | 345 | // @ts-ignore 346 | gitWrapper.isGitLfsAvailable.restore(); 347 | sandbox.stub(gitWrapper, 'isGitLfsAvailable').rejects(new gitWrapper.GitLfsNotAvailableError); 348 | 349 | return assert.isRejected(gitLfs.validateOptions(defaultOptions), gitWrapper.GitLfsNotAvailableError); 350 | }) 351 | }) 352 | }); 353 | -------------------------------------------------------------------------------- /src/test/unit/backends/http.test.ts: -------------------------------------------------------------------------------- 1 | import {afterEach, beforeEach, describe, it} from 'mocha'; 2 | import chai from 'chai'; 3 | import chaiAsPromised from 'chai-as-promised'; 4 | import sinon from 'sinon'; 5 | import mockfs from 'mock-fs'; 6 | import fs from 'fs-extra'; 7 | import nock from 'nock'; 8 | import * as httpBackend from '@/lib/backends/http'; 9 | import * as tarWrapper from '@/lib/commandWrappers/tarWrapper'; 10 | import * as errors from '@/lib/errors'; 11 | import { 12 | makeFakeBackendToolsProvider, 13 | SuccessfulStream, 14 | FailingStream, 15 | fakeExtractArchiveFromStream, 16 | } from '../helpers'; 17 | import {HttpOptions} from '@/lib/backends/http'; 18 | 19 | const assert = chai.assert; 20 | chai.use(chaiAsPromised); 21 | 22 | let sandbox: sinon.SinonSandbox; 23 | let fakeHash: string; 24 | let defaultOptions: HttpOptions; 25 | let mockfsConfig; 26 | 27 | 28 | describe('http backend', () => { 29 | beforeEach(() => { 30 | fakeHash = '1234567890deadbeef1234567890'; 31 | 32 | mockfsConfig = { 33 | '.veendor': { 34 | 'http': {} 35 | }, 36 | }; 37 | 38 | mockfs(mockfsConfig); 39 | 40 | sandbox = sinon.sandbox.create(); 41 | 42 | sandbox 43 | .stub(tarWrapper, 'createArchive') 44 | .callsFake((outPath: string, _paths: string[], _compression: string) => { 45 | fs.writeFileSync(outPath, ''); 46 | return Promise.resolve(''); 47 | }); 48 | 49 | sandbox 50 | .stub(tarWrapper, 'extractArchiveFromStream') 51 | .callsFake(fakeExtractArchiveFromStream); 52 | 53 | defaultOptions = { 54 | resolveUrl: bundleId => `http://testhost.wat/${bundleId}.tar.gz`, 55 | compression: 'gzip', 56 | strict: false, 57 | }; 58 | 59 | if (!nock.isActive()) { 60 | nock.activate(); 61 | } 62 | 63 | nock.disableNetConnect(); 64 | }); 65 | 66 | afterEach(() => { 67 | mockfs.restore(); 68 | sandbox.restore(); 69 | nock.restore(); 70 | }); 71 | 72 | describe('pull', () => { 73 | it('should call `resolveUrl` function', async () => { 74 | nock('http://testhost.wat') 75 | .get(`/${fakeHash}.tar.gz`) 76 | .reply(200, 'wertyuiopasdfghj', {'Content-Type': 'application/x-gzip'}); 77 | const mock = sandbox.mock(defaultOptions); 78 | 79 | mock.expects('resolveUrl').withArgs(fakeHash).callThrough(); 80 | 81 | await httpBackend.pull(fakeHash, defaultOptions, '.veendor/http', makeFakeBackendToolsProvider()); 82 | mock.verify(); 83 | }); 84 | 85 | it('should call http.get with result of `resolveUrl`', async () => { 86 | const scope = nock('http://testhost.wat') 87 | .get(`/${fakeHash}.tar.gz`) 88 | .reply(200, 'wertyuiopasdfghj', {'Content-Type': 'application/x-gzip'}); 89 | 90 | await httpBackend.pull(fakeHash, defaultOptions, '.veendor/http', makeFakeBackendToolsProvider()); 91 | scope.done(); 92 | }); 93 | 94 | it('should use https if `resolveUrl` returns https-url', async () => { 95 | defaultOptions.resolveUrl = bundleId => `https://testhost.wat/${bundleId}.tar.gz`; 96 | 97 | const scope = nock('https://testhost.wat') 98 | .get(`/${fakeHash}.tar.gz`) 99 | .reply(200, 'wertyuiopasdfghj', {'Content-Type': 'application/x-gzip'}); 100 | 101 | await httpBackend.pull(fakeHash, defaultOptions, '.veendor/http', makeFakeBackendToolsProvider()); 102 | scope.done(); 103 | }); 104 | 105 | it('should call http.get with fullfilment of promise returned by `resolveUrl`', async () => { 106 | defaultOptions.resolveUrl = bundleId => Promise.resolve(`http://testhost.wat/${bundleId}.tar.gz`); 107 | 108 | const scope = nock('http://testhost.wat') 109 | .get(`/${fakeHash}.tar.gz`) 110 | .reply(200, 'wertyuiopasdfghj', {'Content-Type': 'application/x-gzip'}); 111 | 112 | await httpBackend.pull(fakeHash, defaultOptions, '.veendor/http', makeFakeBackendToolsProvider()); 113 | scope.done(); 114 | }); 115 | 116 | it('should reject with InvalidProtocolError if url resolved is not http/https', () => { 117 | defaultOptions.resolveUrl = bundleId => `ftp://testhost.wat/${bundleId}.tar.gz`; 118 | 119 | const result = httpBackend.pull(fakeHash, defaultOptions, '.veendor/http', makeFakeBackendToolsProvider()); 120 | return assert.isRejected(result, httpBackend.InvalidProtocolError); 121 | }); 122 | 123 | it('should pipe response stream to tar', () => { 124 | const bundleStream = new SuccessfulStream(); 125 | 126 | // @ts-ignore 127 | tarWrapper.extractArchiveFromStream.restore(); 128 | const tarWrapperMock = sandbox.mock(tarWrapper); 129 | tarWrapperMock.expects('extractArchiveFromStream') 130 | .callsFake(stream => fakeExtractArchiveFromStream(stream) 131 | .then(result => { 132 | assert.equal(result, ('wertyuiopasdfghjk').repeat(5)); 133 | })); 134 | 135 | nock('http://testhost.wat') 136 | .get(`/${fakeHash}.tar.gz`) 137 | .reply(200, bundleStream, {'Content-Type': 'application/x-gzip'}); 138 | 139 | return httpBackend.pull(fakeHash, defaultOptions, '.veendor/http', makeFakeBackendToolsProvider()) 140 | .then(() => tarWrapperMock.verify()); 141 | }); 142 | 143 | it('should reject with BundleNotFoundError on 404', () => { 144 | nock('http://testhost.wat') 145 | .get(`/${fakeHash}.tar.gz`) 146 | .reply(404); 147 | 148 | const result = httpBackend.pull(fakeHash, defaultOptions, '.veendor/http', makeFakeBackendToolsProvider()); 149 | return assert.isRejected(result, errors.BundleNotFoundError); 150 | }); 151 | 152 | it('should reject with BundleNotFoundError on non-200 if not in strict mode', () => { 153 | nock('http://testhost.wat') 154 | .get(`/${fakeHash}.tar.gz`) 155 | .reply(502); 156 | 157 | const result = httpBackend.pull(fakeHash, defaultOptions, '.veendor/http', makeFakeBackendToolsProvider()); 158 | return assert.isRejected(result, errors.BundleNotFoundError); 159 | }); 160 | 161 | it('should reject with InvalidStatusCodeError on non-200 if in strict mode', () => { 162 | defaultOptions.strict = true; 163 | nock('http://testhost.wat') 164 | .get(`/${fakeHash}.tar.gz`) 165 | .reply(502); 166 | 167 | const result = httpBackend.pull(fakeHash, defaultOptions, '.veendor/http', makeFakeBackendToolsProvider()); 168 | return assert.isRejected(result, httpBackend.InvalidStatusCodeError); 169 | }); 170 | 171 | it('should reject with BundleNotFoundError on stream fail if not in strict mode', () => { 172 | nock('http://testhost.wat') 173 | .get(`/${fakeHash}.tar.gz`) 174 | .reply(200, new FailingStream(), {'Content-Type': 'application/x-gzip'}); 175 | 176 | const result = httpBackend.pull(fakeHash, defaultOptions, '.veendor/http', makeFakeBackendToolsProvider()); 177 | return assert.isRejected(result, errors.BundleNotFoundError); 178 | }); 179 | 180 | it('should reject with BundleDownloadError on stream fail if in strict mode', () => { 181 | defaultOptions.strict = true; 182 | nock('http://testhost.wat') 183 | .get(`/${fakeHash}.tar.gz`) 184 | .reply(200, new FailingStream(), {'Content-Type': 'application/x-gzip'}); 185 | 186 | return assert.isRejected( 187 | httpBackend.pull(fakeHash, defaultOptions, '.veendor/http', makeFakeBackendToolsProvider()), 188 | httpBackend.BundleDownloadError 189 | ); 190 | }); 191 | }); 192 | 193 | describe('validateOptions', () => { 194 | it('checks valid compression', () => { 195 | // @ts-ignore 196 | defaultOptions.compression = 'lsda'; 197 | 198 | assert.throws(() => { 199 | httpBackend.validateOptions(defaultOptions); 200 | }, errors.InvalidOptionsError) 201 | }); 202 | 203 | it('sets default compression type to `gzip`', () => { 204 | delete defaultOptions.compression; 205 | 206 | httpBackend.validateOptions(defaultOptions); 207 | 208 | assert.equal(defaultOptions.compression, 'gzip'); 209 | }); 210 | 211 | it('sets strict option to `false`', () => { 212 | delete defaultOptions.strict; 213 | 214 | httpBackend.validateOptions(defaultOptions); 215 | 216 | assert.equal(defaultOptions.strict, false); 217 | }); 218 | 219 | it('should throw InvalidOptionsError if resolveUrl option is not provided', () => { 220 | delete defaultOptions.resolveUrl; 221 | assert.throws(() => { 222 | httpBackend.validateOptions(defaultOptions); 223 | }, errors.InvalidOptionsError) 224 | }); 225 | }); 226 | }); 227 | -------------------------------------------------------------------------------- /src/test/unit/backends/local.test.ts: -------------------------------------------------------------------------------- 1 | import {afterEach, beforeEach, describe, it} from 'mocha'; 2 | 3 | 4 | import chai from 'chai'; 5 | import chaiAsPromised from 'chai-as-promised'; 6 | import sinon from 'sinon'; 7 | import mockfs from 'mock-fs'; 8 | import * as local from '@/lib/backends/local'; 9 | import * as tarWrapper from '@/lib/commandWrappers/tarWrapper'; 10 | import * as errors from '@/lib/errors'; 11 | 12 | const assert = chai.assert; 13 | chai.use(chaiAsPromised); 14 | let sandbox: sinon.SinonSandbox; 15 | let fakeHash: string; 16 | let defaultOptions: local.LocalOptions; 17 | let mockfsConfig: {[key: string]: {} | string}; 18 | 19 | describe('local', () => { 20 | beforeEach(() => { 21 | fakeHash = '1234567890deadbeef1234567890'; 22 | 23 | mockfsConfig = { 24 | '.veendor': { 25 | 'local': {} 26 | }, 27 | 'target': {} 28 | }; 29 | 30 | mockfsConfig[`target/${fakeHash}.tar.gz`] = 'somestuff'; 31 | 32 | mockfs(mockfsConfig); 33 | 34 | sandbox = sinon.sandbox.create(); 35 | sandbox.stub(tarWrapper, 'createArchive').resolves(); 36 | sandbox.stub(tarWrapper, 'extractArchive').resolves(); 37 | 38 | defaultOptions = { 39 | directory: 'target', 40 | compression: 'gzip' 41 | }; 42 | }); 43 | 44 | afterEach(() => { 45 | mockfs.restore(); 46 | sandbox.restore(); 47 | }); 48 | 49 | describe('pull', () => { 50 | it('should unpack archive to pwd', async () => { 51 | // @ts-ignore 52 | tarWrapper.extractArchive.restore(); 53 | const mock = sinon.mock(tarWrapper); 54 | mock.expects('extractArchive').withArgs(sinon.match(`target/${fakeHash}.tar.gz`)).resolves(''); 55 | 56 | await local.pull(fakeHash, defaultOptions); 57 | mock.verify(); 58 | }); 59 | 60 | it('should respect desired compression', async () => { 61 | defaultOptions.compression = 'xz'; 62 | mockfsConfig[`target/${fakeHash}.tar.xz`] = 'somestuff'; 63 | mockfs(mockfsConfig); 64 | 65 | // @ts-ignore 66 | tarWrapper.extractArchive.restore(); 67 | const mock = sinon.mock(tarWrapper); 68 | mock.expects('extractArchive').withArgs(sinon.match(`target/${fakeHash}.tar.xz`)).resolves(''); 69 | 70 | await local.pull(fakeHash, defaultOptions); 71 | mock.verify(); 72 | }); 73 | 74 | it('should not call tar if archive is not in target directory', async () => { 75 | delete mockfsConfig[`target/${fakeHash}.tar.gz`]; 76 | mockfs(mockfsConfig); 77 | 78 | // @ts-ignore 79 | tarWrapper.extractArchive.restore(); 80 | const mock = sinon.mock(tarWrapper); 81 | mock.expects('extractArchive').never(); 82 | 83 | await local.pull(fakeHash, defaultOptions).catch(() => {}); 84 | mock.verify(); 85 | }); 86 | 87 | it('should reject with \'BundleNotFoundError\' if archive is not in target directory', () => { 88 | delete mockfsConfig[`target/${fakeHash}.tar.gz`]; 89 | mockfs(mockfsConfig); 90 | 91 | return assert.isRejected(local.pull(fakeHash, defaultOptions), errors.BundleNotFoundError); 92 | }); 93 | }); 94 | 95 | describe('push', () => { 96 | it('should pack node_modules to target directory', async () => { 97 | delete mockfsConfig[`target/${fakeHash}.tar.gz`]; 98 | mockfs(mockfsConfig); 99 | 100 | // @ts-ignore 101 | tarWrapper.createArchive.restore(); 102 | const mock = sinon.mock(tarWrapper); 103 | 104 | mock.expects('createArchive').withArgs( 105 | sinon.match(`target/${fakeHash}.tar.gz`), 106 | [sinon.match('node_modules')], 107 | defaultOptions.compression 108 | ); 109 | 110 | await local.push(fakeHash, defaultOptions); 111 | mock.verify(); 112 | }); 113 | 114 | it('should reject with BundleAlreadyExistsError if bundle with that name already in directory', () => { 115 | return assert.isRejected(local.push(fakeHash, defaultOptions), errors.BundleAlreadyExistsError); 116 | }); 117 | }); 118 | 119 | describe('validateOptions', () => { 120 | it('checks valid compression', () => { 121 | // @ts-ignore 122 | defaultOptions.compression = 'lsda'; 123 | 124 | assert.throws(() => { 125 | local.validateOptions(defaultOptions); 126 | }, errors.InvalidOptionsError) 127 | }); 128 | 129 | it('sets default compression type to `gzip`', () => { 130 | delete defaultOptions.compression; 131 | 132 | local.validateOptions(defaultOptions); 133 | 134 | assert.equal(defaultOptions.compression, 'gzip'); 135 | }); 136 | 137 | it('should throw InvalidOptionsError if target directory does\'n exist', () => { 138 | delete mockfsConfig.target; 139 | delete mockfsConfig[`target/${fakeHash}.tar.gz`]; 140 | mockfs(mockfsConfig); 141 | 142 | assert.throws(() => { 143 | local.validateOptions(defaultOptions); 144 | }, errors.InvalidOptionsError) 145 | }); 146 | }); 147 | }); 148 | -------------------------------------------------------------------------------- /src/test/unit/backends/s3.test.ts: -------------------------------------------------------------------------------- 1 | import {afterEach, beforeEach, describe, it} from 'mocha'; 2 | 3 | 4 | import chai from 'chai'; 5 | import chaiAsPromised from 'chai-as-promised'; 6 | import sinon from 'sinon'; 7 | import mockfs from 'mock-fs'; 8 | import nock from 'nock'; 9 | import fs from 'fs'; 10 | import AWS from 'aws-sdk'; 11 | import {Stream} from 'stream'; 12 | 13 | 14 | import * as s3Backend from '@/lib/backends/s3'; 15 | import * as tarWrapper from '@/lib/commandWrappers/tarWrapper'; 16 | import * as errors from '@/lib/errors'; 17 | import { 18 | AnError, 19 | AWSError, 20 | SuccessfulStream, 21 | FailingStream, 22 | DevNullStream, 23 | fakeExtractArchiveFromStream, 24 | fakeCreateStreamArchive, 25 | makeFakeBackendToolsProvider 26 | } from '../helpers'; 27 | import {S3Options} from '@/lib/backends/s3'; 28 | 29 | const assert = chai.assert; 30 | chai.use(chaiAsPromised); 31 | 32 | let sandbox: sinon.SinonSandbox; 33 | let fakeHash: string; 34 | let defaultOptions: S3Options; 35 | let mockfsConfig; 36 | let bundleStream: NodeJS.ReadableStream; 37 | let fakeS3: AWS.S3; 38 | let fakeS3UploadError: Error | null; 39 | let fakeS3HeadResultPromise: Promise<{}> | null; 40 | let tarWrapperCreateArchiveStub: sinon.SinonStubbedMember; 41 | let tarWrapperExctractArchiveFromStreamStub: sinon.SinonStubbedMember; 42 | 43 | 44 | describe('s3 backend', () => { 45 | before(() => { 46 | // AWS uses dynamic require's, so we'll populate require cache to be able to use mockfs later 47 | fs.readdirSync('node_modules/aws-sdk/apis') 48 | .filter(file => file.endsWith('.json')) 49 | .map(file => require(`aws-sdk/apis/${file}`)); 50 | }); 51 | 52 | beforeEach(() => { 53 | fakeHash = '1234567890deadbeef1234567890'; 54 | 55 | mockfsConfig = { 56 | '.veendor': { 57 | 's3': {} 58 | }, 59 | }; 60 | 61 | mockfs(mockfsConfig); 62 | 63 | sandbox = sinon.sandbox.create(); 64 | bundleStream = new SuccessfulStream(); 65 | 66 | fakeS3UploadError = null; 67 | fakeS3HeadResultPromise = Promise.resolve({ 68 | AcceptRanges: 'bytes', 69 | LastModified: new Date(), 70 | ContentLength: 5552, 71 | ETag: '"751d74b0c8051a560243092d2d5a53df"', 72 | ContentType: 'application/octet-stream', 73 | Metadata: {}, 74 | }); 75 | 76 | 77 | fakeS3 = { 78 | // @ts-ignore 79 | getObject() { 80 | return { 81 | createReadStream() { 82 | return bundleStream; 83 | } 84 | }; 85 | }, 86 | // @ts-ignore 87 | upload(params: {Body: Buffer|Uint8Array|Blob|string|Readable}) { 88 | params.Body.pipe(new DevNullStream()); 89 | 90 | return { 91 | promise() { 92 | return new Promise((resolve, reject) => { 93 | if (fakeS3UploadError === null) { 94 | params.Body.on('end', resolve()); 95 | } else { 96 | params.Body.on('end', reject(fakeS3UploadError)); 97 | } 98 | }); 99 | } 100 | } 101 | }, 102 | // @ts-ignore 103 | headObject() { 104 | return { 105 | promise() { 106 | if (fakeS3HeadResultPromise === null) { 107 | return Promise.reject(new AWSError('some error', 'NotFound', 404)); 108 | } else { 109 | return fakeS3HeadResultPromise; 110 | } 111 | } 112 | } 113 | } 114 | }; 115 | 116 | tarWrapperCreateArchiveStub = sandbox.stub(tarWrapper, 'createStreamArchive') 117 | .callsFake(fakeCreateStreamArchive); 118 | 119 | tarWrapperExctractArchiveFromStreamStub = sandbox.stub(tarWrapper, 'extractArchiveFromStream') 120 | .callsFake(fakeExtractArchiveFromStream); 121 | 122 | defaultOptions = { 123 | s3Options: { 124 | endpoint: 'http://localhost:12345' 125 | }, 126 | bucket: 'mybucket', 127 | objectAcl: 'authenticated-read', 128 | compression: 'gzip', 129 | __s3: fakeS3, 130 | }; 131 | 132 | if (!nock.isActive()) { 133 | nock.activate(); 134 | } 135 | 136 | nock.disableNetConnect(); 137 | }); 138 | 139 | afterEach(() => { 140 | mockfs.restore(); 141 | sandbox.restore(); 142 | nock.restore(); 143 | }); 144 | 145 | describe('pull', () => { 146 | it('calls s3.getObject with bucket name and hash + compression as key', () => { 147 | const s3Mock = sandbox.mock(defaultOptions.__s3); 148 | 149 | s3Mock.expects('getObject').withArgs({ 150 | Bucket: 'mybucket', 151 | Key: `${fakeHash}.tar.gz`, 152 | }).callThrough(); 153 | 154 | return s3Backend.pull(fakeHash, defaultOptions, '.veendor/s3', makeFakeBackendToolsProvider()) 155 | .then(() => s3Mock.verify()); 156 | }); 157 | 158 | it('should pipe response stream to tar', () => { 159 | tarWrapperCreateArchiveStub.restore(); 160 | tarWrapperExctractArchiveFromStreamStub.restore(); 161 | const tarWrapperMock = sandbox.mock(tarWrapper); 162 | tarWrapperMock.expects('extractArchiveFromStream') 163 | .withArgs(sinon.match.instanceOf(Stream)) 164 | .callsFake(fakeExtractArchiveFromStream); 165 | 166 | return s3Backend.pull(fakeHash, defaultOptions, '.veendor/s3', makeFakeBackendToolsProvider()) 167 | .then(() => tarWrapperMock.verify()); 168 | }); 169 | 170 | it('should reject with BundleDownloadError if stream fails', () => { 171 | bundleStream = new FailingStream(); 172 | 173 | return assert.isRejected( 174 | s3Backend.pull(fakeHash, defaultOptions, '.veendor/s3', makeFakeBackendToolsProvider()), 175 | s3Backend.BundleDownloadError 176 | ); 177 | }); 178 | 179 | it('should reject with BundleNotFoundError if stream fails with NoSuchKey', () => { 180 | bundleStream = new FailingStream(new AWSError('The specified key does not exist.', 'NoSuchKey', 404)); 181 | 182 | return assert.isRejected( 183 | s3Backend.pull(fakeHash, defaultOptions, '.veendor/s3', makeFakeBackendToolsProvider()), 184 | errors.BundleNotFoundError 185 | ); 186 | }); 187 | }); 188 | 189 | describe('push', () => { 190 | it('should call s3.headObject', () => { 191 | fakeS3HeadResultPromise = Promise.resolve({ 192 | AcceptRanges: 'bytes', 193 | LastModified: new Date(), 194 | ContentLength: 5552, 195 | ETag: '"751d74b0c8051a560243092d2d5a53df"', 196 | ContentType: 'application/octet-stream', 197 | Metadata: {}, 198 | }); 199 | 200 | const s3Mock = sandbox.mock(defaultOptions.__s3); 201 | 202 | s3Mock.expects('headObject').withArgs({ 203 | Bucket: 'mybucket', 204 | Key: `${fakeHash}.tar.gz`, 205 | }).callThrough(); 206 | 207 | s3Mock.expects('upload').never(); 208 | 209 | return s3Backend 210 | .push(fakeHash, defaultOptions, '.veendor/s3', makeFakeBackendToolsProvider()) 211 | .catch(() => s3Mock.verify()); 212 | }); 213 | 214 | it('should reject with BundleAlreadyExistsError is object exists', () => { 215 | fakeS3HeadResultPromise = Promise.resolve({ 216 | AcceptRanges: 'bytes', 217 | LastModified: new Date(), 218 | ContentLength: 5552, 219 | ETag: '"751d74b0c8051a560243092d2d5a53df"', 220 | ContentType: 'application/octet-stream', 221 | Metadata: {}, 222 | }); 223 | 224 | return assert.isRejected( 225 | s3Backend.push(fakeHash, defaultOptions, '.veendor/s3', makeFakeBackendToolsProvider()), 226 | errors.BundleAlreadyExistsError 227 | ); 228 | }); 229 | 230 | it('should create streamArchive and call s3.upload with it', () => { 231 | fakeS3HeadResultPromise = null; 232 | const controlToken = {}; 233 | fakeCreateStreamArchive(['node_moudles'], 'gzip', {controlToken}); 234 | 235 | const s3Mock = sandbox.mock(defaultOptions.__s3); 236 | 237 | s3Mock.expects('upload').withArgs({ 238 | Bucket: 'mybucket', 239 | Key: `${fakeHash}.tar.gz`, 240 | ACL: defaultOptions.objectAcl, 241 | Body: sinon.match.instanceOf(Stream), 242 | }).callThrough(); 243 | 244 | return s3Backend 245 | .push(fakeHash, defaultOptions, '.veendor/s3', makeFakeBackendToolsProvider()) 246 | .then(() => s3Mock.verify()); 247 | }); 248 | 249 | it('should reject with BundleUploadError if s3 upload fails', () => { 250 | fakeS3HeadResultPromise = null; 251 | fakeS3UploadError = new AnError('wat'); 252 | 253 | return assert.isRejected( 254 | s3Backend.push(fakeHash, defaultOptions, '.veendor/s3', makeFakeBackendToolsProvider()), 255 | s3Backend.BundleUploadError 256 | ); 257 | }); 258 | }); 259 | 260 | describe('validateOptions', () => { 261 | beforeEach(() => { 262 | delete defaultOptions.__s3; 263 | }); 264 | 265 | it('checks valid compression', () => { 266 | // @ts-ignore 267 | defaultOptions.compression = 'lsda'; 268 | 269 | assert.throws(() => { 270 | s3Backend.validateOptions(defaultOptions); 271 | }, errors.InvalidOptionsError) 272 | }); 273 | 274 | it('checks valid bucket name', () => { 275 | delete defaultOptions.bucket; 276 | 277 | assert.throws(() => { 278 | s3Backend.validateOptions(defaultOptions); 279 | }, errors.InvalidOptionsError) 280 | }); 281 | 282 | it('sets default compression type to `gzip`', () => { 283 | delete defaultOptions.compression; 284 | 285 | s3Backend.validateOptions(defaultOptions); 286 | 287 | assert.equal(defaultOptions.compression, 'gzip'); 288 | }); 289 | 290 | it('sets default objectACL to `public-read`', () => { 291 | delete defaultOptions.objectAcl; 292 | 293 | s3Backend.validateOptions(defaultOptions); 294 | 295 | assert.equal(defaultOptions.objectAcl, 'public-read'); 296 | }); 297 | 298 | it('creates s3Options object, if not passed', () => { 299 | delete defaultOptions.s3Options; 300 | 301 | s3Backend.validateOptions(defaultOptions); 302 | 303 | assert.isObject(defaultOptions.s3Options); 304 | }); 305 | 306 | it('creates AWS instance with passed AWS options and fixed API version', () => { 307 | const awsMock = sandbox.mock(AWS); 308 | // @ts-ignore 309 | defaultOptions.s3Options = {foo: 'bar'}; 310 | 311 | awsMock.expects('S3').withArgs({ 312 | foo: 'bar', 313 | apiVersion: '2006-03-01', 314 | }).returns(fakeS3); 315 | 316 | s3Backend.validateOptions(defaultOptions); 317 | 318 | awsMock.verify(); 319 | assert.equal(defaultOptions.__s3, fakeS3); 320 | }); 321 | }); 322 | }); 323 | -------------------------------------------------------------------------------- /src/test/unit/deepSortedJson.test.ts: -------------------------------------------------------------------------------- 1 | const {transform} = require('@/lib/deepSortedJson'); 2 | const assert = require('chai').assert; 3 | 4 | describe('transform', () => { 5 | it('should return concatenated strings for all keys and add = for values', () => { 6 | const result = transform({ 7 | a: { 8 | b: { 9 | c: { 10 | d: 'e' 11 | } 12 | }, 13 | f: { 14 | g: 'h' 15 | } 16 | } 17 | }); 18 | 19 | assert.deepEqual(result, [ 20 | 'a.b.c.d=e', 21 | 'a.f.g=h', 22 | ]); 23 | }); 24 | 25 | it('should return sorted array', () => { 26 | const result = transform({ 27 | a: { 28 | f: { 29 | c: { 30 | d: 'e' 31 | } 32 | }, 33 | b: { 34 | g: 'h' 35 | } 36 | } 37 | }); 38 | 39 | assert.deepEqual(result, [ 40 | 'a.b.g=h', 41 | 'a.f.c.d=e', 42 | ]); 43 | }); 44 | 45 | it('should not add = for empty objects', () => { 46 | const result = transform({ 47 | a: { 48 | f: { 49 | c: { 50 | d: {} 51 | } 52 | }, 53 | b: { 54 | g: 'h' 55 | } 56 | } 57 | }); 58 | 59 | assert.deepEqual(result, [ 60 | 'a.b.g=h', 61 | 'a.f.c.d', 62 | ]); 63 | }); 64 | 65 | it('should add [index] for array contents', () => { 66 | const result = transform({ 67 | a: { 68 | f: [ 69 | 'foo', 70 | 'bar', 71 | 'baz', 72 | ], 73 | } 74 | }); 75 | 76 | assert.deepEqual(result, [ 77 | 'a.f[0]=foo', 78 | 'a.f[1]=bar', 79 | 'a.f[2]=baz', 80 | ]); 81 | }); 82 | }); 83 | -------------------------------------------------------------------------------- /src/test/unit/helpers.ts: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | import fsExtra from 'fs-extra'; 3 | import path from 'path'; 4 | import stream from 'stream'; 5 | import sinon from 'sinon'; 6 | import nock from 'nock'; 7 | 8 | import {getLogger} from '@/lib/util/logger'; 9 | import * as wrapperHelpers from '@/lib/commandWrappers/helpers'; 10 | import {ProgressStream} from '@/lib/util/progress'; 11 | import {BackendConfig} from '@/types'; 12 | import {Compression} from '@/lib/commandWrappers/tarWrapper'; 13 | 14 | 15 | export function fakeBackendConfig(alias: string): BackendConfig { 16 | return { 17 | alias, 18 | options: {}, 19 | backend: { 20 | pull: () => createNodeModules(), 21 | push: () => Promise.resolve(), 22 | validateOptions: () => Promise.resolve(), 23 | } 24 | } 25 | } 26 | 27 | export function createNodeModules(): Promise { 28 | return fsExtra.ensureDir(path.join(process.cwd(), 'node_modules')) 29 | .then(() => fsExtra.writeFile( 30 | path.join(process.cwd(), 'node_modules', 'foobar'), 31 | 'deadbeef' 32 | )); 33 | } 34 | 35 | export function notifyAssert(assertion: () => void, done: (e?: Error) => void) { 36 | try { 37 | assertion(); 38 | done(); 39 | } catch (e) { 40 | done(e); 41 | } 42 | } 43 | 44 | export function checkMockResult(mocks: sinon.SinonMock[], done: (e?: Error) => void, error?: Error) { 45 | if (error && error.name === 'ExpectationError') { 46 | return done(error); 47 | } 48 | 49 | try { 50 | mocks.map(mock => mock.verify()); 51 | } catch (error) { 52 | return done(error); 53 | } 54 | 55 | done(); 56 | } 57 | 58 | export function checkNock(scopes: nock.Scope[], done: (err?: Error) => void) { 59 | try { 60 | scopes.map(scope => scope.done()); 61 | } catch (error) { 62 | return done(error); 63 | } 64 | 65 | done(); 66 | } 67 | 68 | 69 | // This class is used as `generic error`, 70 | // if we want to test error propagation 71 | export class AnError extends Error {} 72 | 73 | // Simulate AWSError 74 | export class AWSError extends Error { 75 | statusCode?: number; 76 | code: string; 77 | constructor(_message: string, code: string, _statusCode?: number) { 78 | super(); 79 | this.message = 'The specified key does not exist.'; 80 | this.statusCode = 404; 81 | this.code = 'NoSuchKey'; 82 | this.name = code; 83 | } 84 | } 85 | 86 | // A stream to simulate download error 87 | export class FailingStream extends stream.Readable { 88 | turn: number; 89 | failError: Error; 90 | constructor(failError = new AnError('read error')) { 91 | super(); 92 | this.turn = 0; 93 | this.failError = failError; 94 | } 95 | _read() { 96 | if (this.turn < 5) { 97 | this.turn++; 98 | setImmediate(() => { 99 | this.push('wertyuiopasdfghjk'); 100 | }); 101 | } else { 102 | this.emit('error', this.failError); 103 | this.push(null); 104 | } 105 | } 106 | } 107 | 108 | export class SuccessfulStream extends stream.Readable { 109 | turn: number; 110 | 111 | constructor() { 112 | super(); 113 | this.turn = 0; 114 | } 115 | 116 | _read() { 117 | if (this.turn < 5) { 118 | this.turn++; 119 | setImmediate(() => { 120 | this.push('wertyuiopasdfghjk'); 121 | }); 122 | } else { 123 | this.push(null); 124 | } 125 | } 126 | } 127 | 128 | export class DevNullStream extends stream.Writable { 129 | _write(_chunk: any, _encoding: string, callback: () => void) { 130 | callback(); 131 | } 132 | } 133 | 134 | export function fakeExtractArchiveFromStream(stream: NodeJS.ReadableStream): Promise { 135 | const allchunks: string[] = []; 136 | let interval: NodeJS.Timeout; 137 | return new Promise(resolve => { 138 | stream.read(); 139 | interval = setInterval(() => { 140 | const chunk = stream.read(); 141 | if (chunk === null) { 142 | clearInterval(interval); 143 | return resolve(allchunks.join()); 144 | } else { 145 | allchunks.push(chunk.toString()); 146 | } 147 | }, 10); 148 | }); 149 | } 150 | 151 | export function fakeCreateStreamArchive(_inputPaths: string[], _compressionType: Compression, _params: {}): { 152 | stream: NodeJS.ReadableStream, promise: Promise 153 | } { 154 | return { 155 | stream: new SuccessfulStream(), 156 | promise: Promise.resolve(''), 157 | }; 158 | } 159 | 160 | export function mockGetOutput(sandbox: sinon.SinonSandbox) { 161 | sandbox.stub(wrapperHelpers, 'getOutput').callsFake((executable, args) => { 162 | const commandName = `[${executable} ${args.join(' ')}]`; 163 | 164 | console.error(`${commandName} is being executed! Looks lile someone doesn't mock the env properly`); 165 | return Promise.reject(new Error('waaat')); 166 | }); 167 | } 168 | 169 | export function makeFakeBackendToolsProvider() { 170 | return { 171 | getLogger() { 172 | return getLogger(); 173 | }, 174 | 175 | getProgressStream(label?: string, total?: number) { 176 | return new ProgressStream({}, label || 'wat', {}, total); 177 | }, 178 | }; 179 | } 180 | -------------------------------------------------------------------------------- /src/test/unit/install/helpers.test.ts: -------------------------------------------------------------------------------- 1 | import {afterEach, beforeEach, describe, it} from 'mocha'; 2 | 3 | import chai from 'chai'; 4 | import chaiAsPromised from 'chai-as-promised'; 5 | import sinon from 'sinon'; 6 | import mockfs from 'mock-fs'; 7 | import fsExtra from 'fs-extra'; 8 | import path from 'path'; 9 | import os from 'os'; 10 | import crypto from 'crypto'; 11 | import * as helpers from '../helpers'; 12 | import {createCleanCacheDir} from '@/lib/install/helpers'; 13 | import {BackendConfig} from '@/types'; 14 | 15 | const assert = chai.assert; 16 | chai.use(chaiAsPromised); 17 | 18 | const FAKE_HASH = '1234567890deadbeef1234567890'; 19 | 20 | let sandbox: sinon.SinonSandbox; 21 | let fakeBackend: BackendConfig; 22 | let fakeSha1; 23 | 24 | describe('createCleanCacheDir', () => { 25 | beforeEach(() => { 26 | sandbox = sinon.sandbox.create(); 27 | mockfs({}); 28 | fakeBackend = helpers.fakeBackendConfig('fakeBackends[0]'); 29 | 30 | fakeSha1 = { 31 | update: () => {}, 32 | digest: () => FAKE_HASH, 33 | }; 34 | 35 | // @ts-ignore 36 | sandbox.stub(crypto, 'createHash').returns(fakeSha1); 37 | }); 38 | 39 | afterEach(() => { 40 | mockfs.restore(); 41 | sandbox.restore(); 42 | }); 43 | 44 | it('creates new cache dir', () => { 45 | return createCleanCacheDir(fakeBackend).then(dir => { 46 | assert(fsExtra.statSync(dir).isDirectory()); 47 | }); 48 | }); 49 | 50 | it('cleans cache directory if one already exists', async () => { 51 | let dir = await createCleanCacheDir(fakeBackend); 52 | await fsExtra.writeFile(path.join(dir, 'foo'), 'bar'); 53 | dir = await createCleanCacheDir(fakeBackend); 54 | 55 | return assert.throws( 56 | () => fsExtra.statSync((path.join(dir, 'foo'))), 57 | 'ENOENT' 58 | ); 59 | }); 60 | 61 | it('doesn\'t clean cache directory if backend has keepCache == true option ', async () => { 62 | fakeBackend.backend.keepCache = true; 63 | 64 | 65 | let dir = await createCleanCacheDir(fakeBackend); 66 | await fsExtra.writeFile(path.join(dir, 'foo'), 'bar'); 67 | dir = await createCleanCacheDir(fakeBackend); 68 | 69 | assert.equal(fsExtra.readFileSync(path.join(dir, 'foo')).toString(), 'bar'); 70 | }); 71 | 72 | it('creates cache directory in os.tmpdir() if can', () => { 73 | const tmpDir = os.tmpdir(); 74 | 75 | return createCleanCacheDir(fakeBackend).then(dir => assert.match(dir, new RegExp(`^${tmpDir}`))); 76 | }); 77 | 78 | it('contains hash of process.cwd() in tmpdir name', () => { 79 | return createCleanCacheDir(fakeBackend).then(dir => assert.include(dir, FAKE_HASH)); 80 | }); 81 | }); 82 | -------------------------------------------------------------------------------- /src/test/unit/install/pushBackends.test.ts: -------------------------------------------------------------------------------- 1 | import {afterEach, beforeEach, describe, it} from 'mocha'; 2 | import chai from 'chai'; 3 | import chaiAsPromised from 'chai-as-promised'; 4 | import sinon from 'sinon'; 5 | import mockfs from 'mock-fs'; 6 | import fsExtra from 'fs-extra'; 7 | 8 | import {pushBackends} from '@/lib/install/pushBackends'; 9 | import * as errors from '@/lib/errors'; 10 | 11 | import * as helpers from '../helpers'; 12 | import {BackendConfig, PkgJson} from '@/types'; 13 | 14 | const assert = chai.assert; 15 | chai.use(chaiAsPromised); 16 | 17 | let PKGJSON: PkgJson; 18 | 19 | describe('pushBackends', function () { 20 | let sandbox: sinon.SinonSandbox; 21 | let fakeBackends: BackendConfig[]; 22 | const fakeSha1 = '1234567890deadbeef1234567890'; 23 | 24 | beforeEach(function () { 25 | sandbox = sinon.sandbox.create(); 26 | fakeBackends = [helpers.fakeBackendConfig('fakeBackends[0]'), helpers.fakeBackendConfig('fakeBackends[1]')]; 27 | fakeBackends[0].backend.pull = () => Promise.reject(new errors.BundleNotFoundError); 28 | fakeBackends[0].push = true; 29 | 30 | PKGJSON = { 31 | dependencies: { 32 | foo: '2.2.8', 33 | c: '2.2.9' 34 | }, 35 | devDependencies: { 36 | baz: '6.6.6' 37 | } 38 | }; 39 | 40 | }); 41 | 42 | afterEach(function () { 43 | sandbox.restore(); 44 | }); 45 | 46 | it('failing to push on backends with pushMayFail === true should be ignored', () => { 47 | fakeBackends[0].backend.push = () => Promise.reject(new helpers.AnError()); 48 | fakeBackends[0].pushMayFail = true; 49 | 50 | return assert.isFulfilled(pushBackends(fakeBackends, fakeSha1)); 51 | }); 52 | 53 | it('failing to push on backends without pushMayFail === true should reject install', () => { 54 | fakeBackends[0].backend.push = () => Promise.reject(new helpers.AnError()); 55 | 56 | return assert.isRejected(pushBackends(fakeBackends, fakeSha1), helpers.AnError); 57 | }); 58 | 59 | it('should not clear node_modules/.cache, if `clearSharedCache` is set in config', async () => { 60 | mockfs({ 61 | 'package.json': JSON.stringify(PKGJSON), 62 | 'node_modules': { 63 | 'left-pad': { 64 | 'package.json': '{"a": "b"}', 65 | }, 66 | '.cache': { 67 | 'some': 'garbage', 68 | } 69 | } 70 | }); 71 | 72 | fakeBackends[0].backend.push = () => fsExtra 73 | .stat('node_modules/.cache') 74 | .then( 75 | () => assert(true, 'cache is not cleared before push'), 76 | () => assert(false, 'cache is cleared before push'), 77 | ); 78 | 79 | await pushBackends(fakeBackends, fakeSha1); 80 | }); 81 | it('should not clear node_modules/.cache, if parameter is not passed', async () => { 82 | mockfs({ 83 | 'package.json': JSON.stringify(PKGJSON), 84 | 'node_modules': { 85 | 'left-pad': { 86 | 'package.json': '{"a": "b"}', 87 | }, 88 | '.cache': { 89 | 'some': 'garbage', 90 | } 91 | } 92 | }); 93 | 94 | fakeBackends[0].backend.push = () => fsExtra 95 | .stat('node_modules/.cache') 96 | .then( 97 | () => assert(false, 'cache is not cleared before push'), 98 | () => assert(true, 'cache is cleared before push'), 99 | ); 100 | 101 | await pushBackends(fakeBackends, fakeSha1, false, true); 102 | }); 103 | }); 104 | -------------------------------------------------------------------------------- /src/test/unit/pkgjson.test.ts: -------------------------------------------------------------------------------- 1 | import {afterEach, beforeEach, describe, it} from 'mocha'; 2 | 3 | import chai from 'chai'; 4 | import sinon from 'sinon'; 5 | import crypto from 'crypto'; 6 | 7 | import {calcHash} from '@/lib/pkgjson'; 8 | import * as deepSortedJson from '@/lib/deepSortedJson'; 9 | 10 | const {assert} = chai; 11 | 12 | describe('pkgjson', () => { 13 | describe('#calcHash', () => { 14 | const PKGJSON_CONTENTS = { 15 | dependencies: { 16 | a: '666', 17 | b: '^228' 18 | }, 19 | devDependencies: { 20 | c: '1.4.88', 21 | d: '^0.0.1' 22 | }, 23 | otherField: { 24 | field: 'value' 25 | } 26 | }; 27 | 28 | const LOCKFILE_CONTENTS = { 29 | name: 'wat', 30 | dependencies: { 31 | a: {version: '666'}, 32 | b: {version: '^228'}, 33 | c: {version: '1.4.88'}, 34 | d: {version: '^0.0.1'}, 35 | }, 36 | otherField: { 37 | field: 'value', 38 | } 39 | }; 40 | 41 | const FAKE_HASH = '1234567890deadbeef1234567890'; 42 | 43 | let fakeSha1: { 44 | update(): void, 45 | digest(): string, 46 | }; 47 | let sandbox: sinon.SinonSandbox; 48 | 49 | beforeEach(function () { 50 | sandbox = sinon.sandbox.create(); 51 | 52 | fakeSha1 = { 53 | update: () => {}, 54 | digest: () => FAKE_HASH 55 | }; 56 | 57 | // @ts-ignore 58 | sandbox.stub(crypto, 'createHash').callsFake(() => fakeSha1); 59 | 60 | sandbox.stub(deepSortedJson, 'transform') 61 | .callsFake(data => { 62 | if (data === LOCKFILE_CONTENTS) { 63 | return ['lockfile.b.c=d'] 64 | } 65 | 66 | return ['a.b.c=d'] 67 | }); 68 | }); 69 | 70 | afterEach(() => { 71 | sandbox.restore(); 72 | }); 73 | 74 | it('should create SHA1 hash', () => { 75 | calcHash(PKGJSON_CONTENTS); 76 | // @ts-ignore ts-sinon does a very bad job here 77 | assert(crypto.createHash.calledWith('sha1'), 'crypto.createHash(\'sha1\') hasn\'t been called'); 78 | }); 79 | 80 | it('should call deepSortedJson with deps and dev-deps from pkgjson', () => { 81 | // @ts-ignore 82 | deepSortedJson.transform.restore(); 83 | const mock = sandbox.mock(deepSortedJson); 84 | mock.expects('transform') 85 | .withArgs({ 86 | dependencies: PKGJSON_CONTENTS.dependencies, 87 | devDependencies: PKGJSON_CONTENTS.devDependencies, 88 | }) 89 | .returns(['a.b.c=d']); 90 | 91 | calcHash(PKGJSON_CONTENTS); 92 | 93 | mock.verify(); 94 | }); 95 | 96 | it('should call deepSortedJson with lockfile contents', () => { 97 | // @ts-ignore 98 | deepSortedJson.transform.restore(); 99 | const mock = sandbox.mock(deepSortedJson); 100 | 101 | mock.expects('transform') 102 | .withArgs({ 103 | dependencies: PKGJSON_CONTENTS.dependencies, 104 | devDependencies: PKGJSON_CONTENTS.devDependencies, 105 | }) 106 | .returns(['a.b.c=d']); 107 | 108 | mock.expects('transform') 109 | .withArgs(LOCKFILE_CONTENTS) 110 | .returns(['lockfile.b.c=d']); 111 | 112 | calcHash(PKGJSON_CONTENTS, LOCKFILE_CONTENTS); 113 | 114 | mock.verify(); 115 | }); 116 | 117 | it('call call SHA1 update with results of deepSortedJson', () => { 118 | const mock = sandbox.mock(fakeSha1) 119 | .expects('update') 120 | .withArgs( 121 | 'a.b.c=d\n' + 122 | 'lockfile.b.c=d' 123 | ); 124 | 125 | calcHash(PKGJSON_CONTENTS, LOCKFILE_CONTENTS); 126 | 127 | mock.verify(); 128 | }); 129 | 130 | it('should return result of SHA1 digest', function () { 131 | const result = calcHash(PKGJSON_CONTENTS); 132 | 133 | assert.equal(result, FAKE_HASH); 134 | }); 135 | 136 | it('should add string suffixes', () => { 137 | const result = calcHash(PKGJSON_CONTENTS, null, {suffix: 'test'}); 138 | 139 | assert.equal(result, FAKE_HASH + '-test'); 140 | }); 141 | 142 | it('should add function suffixes', () => { 143 | const result = calcHash(PKGJSON_CONTENTS, null, {suffix: () => 'test'}); 144 | 145 | assert.equal(result, FAKE_HASH + '-test'); 146 | }); 147 | }); 148 | }); 149 | -------------------------------------------------------------------------------- /src/test/unit/validateConfig.test.ts: -------------------------------------------------------------------------------- 1 | import {afterEach, beforeEach, describe, it} from 'mocha'; 2 | import sinon from 'sinon'; 3 | import chaiAsPromised from 'chai-as-promised'; 4 | import chai from 'chai'; 5 | 6 | import {invariant} from '@/types'; 7 | 8 | import { 9 | AliasesNotUniqueError, 10 | EmptyBackendAliasError, 11 | InvalidBackendOptionError, 12 | PartialConfig 13 | } from '@/lib/validateConfig'; 14 | import * as npmWrapper from '@/lib/commandWrappers/npmWrapper'; 15 | 16 | import validateConfig, { 17 | EmptyBackendsPropertyError, 18 | InvalidVeendorVersionError, 19 | InvalidNpmVersionError, 20 | InvalidBackendError, 21 | InvalidUseGitHistoryError, 22 | } from '@/lib/validateConfig'; 23 | import * as helpers from './helpers'; 24 | 25 | 26 | const assert = chai.assert; 27 | chai.use(chaiAsPromised); 28 | let config: PartialConfig = { 29 | backends: [helpers.fakeBackendConfig('first'),helpers.fakeBackendConfig('second')] 30 | }; 31 | let sandbox: sinon.SinonSandbox; 32 | 33 | describe('validateConfig', function () { 34 | beforeEach(() => { 35 | config = { 36 | backends: [helpers.fakeBackendConfig('first'),helpers.fakeBackendConfig('second')] 37 | }; 38 | 39 | sandbox = sinon.sandbox.create(); 40 | 41 | const veendorVersion = require('../../../package.json').version; 42 | global.VEENDOR_VERSION = veendorVersion; 43 | }); 44 | 45 | afterEach(() => { 46 | sandbox.restore(); 47 | }); 48 | 49 | it('should reject with EmptyBackendsPropertyError if config does not contain \'backends\' section', () => { 50 | delete config.backends; 51 | 52 | return assert.isRejected(validateConfig(config), EmptyBackendsPropertyError); 53 | }); 54 | 55 | it('should throw error if \'backends\' section is empty', () => { 56 | config.backends = []; 57 | 58 | return assert.isRejected(validateConfig(config), EmptyBackendsPropertyError); 59 | }); 60 | 61 | it('should check whether backends have pull functions', () => { 62 | invariant(config.backends); 63 | delete config.backends[0].backend.pull; 64 | 65 | return assert.isRejected(validateConfig(config), InvalidBackendError); 66 | }); 67 | 68 | it('should check whether backends have push functions', () => { 69 | invariant(config.backends); 70 | delete config.backends[0].backend.push; 71 | 72 | return assert.isRejected(validateConfig(config), InvalidBackendError); 73 | }); 74 | 75 | it('should check whether backends have validateOptions functions', () => { 76 | invariant(config.backends); 77 | delete config.backends[0].backend.validateOptions; 78 | 79 | return assert.isRejected(validateConfig(config), InvalidBackendError); 80 | }); 81 | 82 | it('should check whether backends have aliases', () => { 83 | invariant(config.backends); 84 | delete config.backends[0].alias; 85 | 86 | return assert.isRejected(validateConfig(config), EmptyBackendAliasError); 87 | }); 88 | 89 | it('should check whether backend\'s push options are boolean[0]', () => { 90 | invariant(config.backends); 91 | config.backends[0].push = 'test'; 92 | 93 | return assert.isRejected(validateConfig(config), InvalidBackendOptionError); 94 | }); 95 | 96 | it('should check whether backend\'s push options are boolean[1]', () => { 97 | invariant(config.backends); 98 | config.backends[0].push = 1; 99 | 100 | return assert.isRejected(validateConfig(config), InvalidBackendOptionError); 101 | }); 102 | 103 | it('should check whether backend\'s push options are boolean[2]', () => { 104 | invariant(config.backends); 105 | config.backends[0].push = () => {}; 106 | 107 | return assert.isRejected(validateConfig(config), InvalidBackendOptionError); 108 | }); 109 | 110 | it('sets backend\'s push options to false', () => { 111 | invariant(config.backends); 112 | config.backends[0].push = true; 113 | validateConfig(config); 114 | 115 | assert(config.backends[0].push === true, 'defined option should stay'); 116 | assert(config.backends[1].push === false, 'config.backends[1].push should be `false`'); 117 | }); 118 | 119 | it('should check whether backend\'s pushMayFail options are boolean', () => { 120 | invariant(config.backends); 121 | config.backends[0].pushMayFail = 'test'; 122 | 123 | return assert.isRejected(validateConfig(config), InvalidBackendOptionError); 124 | }); 125 | 126 | it('should check whether backend\'s pushMayFail options are boolean', () => { 127 | invariant(config.backends); 128 | config.backends[0].pushMayFail = 1; 129 | 130 | return assert.isRejected(validateConfig(config), InvalidBackendOptionError); 131 | }); 132 | 133 | it('should check whether backend\'s pushMayFail options are boolean', () => { 134 | invariant(config.backends); 135 | config.backends[0].pushMayFail = () => {}; 136 | 137 | return assert.isRejected(validateConfig(config), InvalidBackendOptionError); 138 | }); 139 | 140 | it('sets backend\'s pushMayFail options to false', done => { 141 | invariant(config.backends); 142 | 143 | config.backends[0].pushMayFail = true; 144 | const checkResult = () => helpers.notifyAssert(() => { 145 | invariant(config.backends); 146 | 147 | assert(config.backends[0].pushMayFail === true, 'defined option should stay'); 148 | assert(config.backends[1].pushMayFail === false, 'config.backends[1].push should be `false`'); 149 | }, done); 150 | 151 | validateConfig(config).then(checkResult, checkResult); 152 | }); 153 | 154 | it('should check whether backends aliases are unique', () => { 155 | invariant(config.backends); 156 | config.backends[0].alias = config.backends[1].alias; 157 | 158 | return assert.isRejected(validateConfig(config), AliasesNotUniqueError); 159 | }); 160 | 161 | it('should call backend\'s validateOptions function', done => { 162 | invariant(config.backends); 163 | const backend0Mock = sinon.mock(config.backends[0].backend) 164 | .expects('validateOptions') 165 | .withArgs(sinon.match.same(config.backends[0].options)); 166 | 167 | const backend1Mock = sinon.mock(config.backends[1].backend) 168 | .expects('validateOptions') 169 | .withArgs(sinon.match.same(config.backends[1].options)); 170 | 171 | const checkResult = () => helpers.notifyAssert(() => { 172 | backend0Mock.verify(); 173 | backend1Mock.verify(); 174 | }, done); 175 | 176 | validateConfig(config).then(checkResult, checkResult); 177 | }); 178 | 179 | it('should reject if backend\'s validateOptions throws', () => { 180 | invariant(config.backends); 181 | sinon.stub(config.backends[0].backend, 'validateOptions').throws(new helpers.AnError); 182 | 183 | return assert.isRejected(validateConfig(config), helpers.AnError); 184 | }); 185 | 186 | it('sets fallbackToNpm to true', done => { 187 | const checkResult = () => helpers.notifyAssert(() => { 188 | assert(config.fallbackToNpm === true); 189 | }, done); 190 | 191 | validateConfig(config).then(checkResult, checkResult); 192 | }); 193 | 194 | it('sets installDiff to true', done => { 195 | const checkResult = () => helpers.notifyAssert(() => { 196 | assert(config.installDiff === true); 197 | }, done); 198 | 199 | validateConfig(config).then(checkResult, checkResult); 200 | }); 201 | 202 | it('sets packageHash to {}', done => { 203 | const checkResult = () => helpers.notifyAssert(() => { 204 | assert.isObject(config.packageHash); 205 | }, done); 206 | 207 | validateConfig(config).then(checkResult, checkResult); 208 | }); 209 | 210 | it('should throw error if useGitHistory is set and installDiff is false', () => { 211 | config.useGitHistory = {depth: 5}; 212 | config.installDiff = false; 213 | 214 | return assert.isRejected(validateConfig(config), InvalidUseGitHistoryError); 215 | }); 216 | 217 | it('should throw error if useGitHistory is set without depth option', () => { 218 | // @ts-ignore 219 | config.useGitHistory = {}; 220 | 221 | return assert.isRejected(validateConfig(config), InvalidUseGitHistoryError); 222 | }); 223 | 224 | it('should throw error if useGitHistory.depth is zero or below zero', done => { 225 | helpers.notifyAssert(() => { 226 | config.useGitHistory = {depth: 0}; 227 | 228 | assert.isRejected(validateConfig(config), InvalidUseGitHistoryError); 229 | 230 | config.useGitHistory = {depth: -2}; 231 | 232 | assert.isRejected(validateConfig(config), InvalidUseGitHistoryError); 233 | }, done); 234 | }); 235 | 236 | it('should resolve backend from string to module', async () => { 237 | invariant(config.backends); 238 | config.backends[0].backend = 'local'; 239 | config.backends[0].options = {directory: '.'}; 240 | 241 | await validateConfig(config); 242 | 243 | invariant(config.backends); 244 | assert.equal(config.backends[0].backend, require('@/lib/backends/local')); 245 | }); 246 | 247 | it('should throw if backend property is not defined', () => { 248 | // @ts-ignore 249 | config.backends[0].backend = undefined; 250 | 251 | return assert.isRejected(validateConfig(config), InvalidBackendError); 252 | }); 253 | 254 | it('should throw InvalidNpmVersionError if npmVersion returns incompatible version', () => { 255 | sandbox.stub(npmWrapper, 'version').returns(Promise.resolve('5.4.3')); 256 | 257 | config.npmVersion = '>6.6.6'; 258 | 259 | return assert.isRejected(validateConfig(config), InvalidNpmVersionError); 260 | }); 261 | 262 | it('should resolve, if npm version check passes', () => { 263 | sandbox.stub(npmWrapper, 'version').returns(Promise.resolve('5.4.3')); 264 | 265 | config.npmVersion = '5.x.x'; 266 | 267 | return assert.isFulfilled(validateConfig(config)); 268 | }); 269 | 270 | it('should throw InvalidVeendorVersionError if veendor does not comply with veendorVersion constraint', () => { 271 | global.VEENDOR_VERSION = '2.0.0'; 272 | config.veendorVersion = '>2.1.0'; 273 | 274 | return assert.isRejected(validateConfig(config), InvalidVeendorVersionError); 275 | }); 276 | 277 | it('should resolve, if veendor version is compatible', () => { 278 | global.VEENDOR_VERSION = '2.0.0'; 279 | config.veendorVersion = '^2'; 280 | 281 | return assert.isFulfilled(validateConfig(config)); 282 | }); 283 | 284 | it('should set default `dedupe` value', async () => { 285 | const res = await validateConfig(config); 286 | return assert.equal(res.dedupe, false); 287 | }); 288 | 289 | it('should set default `clearSharedCache` value', async () => { 290 | const res = await validateConfig(config); 291 | return assert.equal(res.clearSharedCache, false); 292 | }); 293 | }); 294 | -------------------------------------------------------------------------------- /src/types.ts: -------------------------------------------------------------------------------- 1 | import {StringMap} from '@/serviceTypes'; 2 | import {Tracer} from 'tracer'; 3 | import {ProgressStream} from '@/lib/util/progress'; 4 | 5 | export type BackendConfig = { 6 | backend: Backend, 7 | alias: string, 8 | push?: boolean, 9 | pushMayFail?: boolean, 10 | options: BackendOptions, 11 | } 12 | 13 | export type Backend = { 14 | pull: (hash: string, options: BackendOptions, cacheDir: string, toolsProvider: BackendToolsProvider) => Promise, 15 | push: (hash: string, options: BackendOptions, cacheDir: string, toolsProvider: BackendToolsProvider) => Promise, 16 | validateOptions: (options: BackendOptions) => Promise, 17 | keepCache?: boolean, 18 | } 19 | 20 | export type BackendToolsProvider = { 21 | getLogger: () => Tracer.Logger, 22 | getProgressStream: (label?: string, total?: number) => ProgressStream, 23 | } 24 | 25 | export type BackendOptions = object; 26 | 27 | export enum BackendCalls {pull, push, validateOptions} 28 | 29 | export type Config = { 30 | installDiff: boolean, 31 | fallbackToNpm: boolean, 32 | packageHash?: PackageHashOptions, 33 | useGitHistory?: { 34 | depth: number, 35 | }, 36 | backends: BackendConfig[], 37 | veendorVersion?: string, 38 | npmVersion?: string, 39 | dedupe?: boolean, 40 | clearSharedCache?: boolean, 41 | } 42 | 43 | export type ConfigWithHistory = Config & { 44 | useGitHistory: { 45 | depth: number, 46 | }, 47 | }; 48 | 49 | export function configHasHistory(config: Config): config is ConfigWithHistory { 50 | return typeof config.useGitHistory === 'object' && config.useGitHistory.depth > 0; 51 | } 52 | 53 | export type PkgJson = { 54 | dependencies: StringMap, 55 | devDependencies: StringMap, 56 | } 57 | 58 | export type PackageHashOptions = { 59 | suffix?: (() => string) | string 60 | } 61 | 62 | export function invariant(value: unknown, message = ''): asserts value { 63 | if (!Boolean(value)) { 64 | throw new Error(`This can not happen ${message}`); 65 | } 66 | } 67 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target": "ES6", 4 | "resolveJsonModule": true, 5 | "module": "commonjs", 6 | "allowJs": true, 7 | "checkJs": true, 8 | "outDir": "./dist", 9 | "rootDir": "./src", 10 | "strict": true, 11 | "noImplicitAny": true, 12 | "strictNullChecks": true, 13 | "strictFunctionTypes": true, 14 | "strictPropertyInitialization": true, 15 | "noImplicitThis": true, 16 | "alwaysStrict": true, 17 | "noUnusedLocals": true, 18 | "noUnusedParameters": true, 19 | "noImplicitReturns": true, 20 | "moduleResolution": "node", 21 | "baseUrl": ".", 22 | "paths": { 23 | "@/*": [ 24 | "src/*" 25 | ] 26 | }, 27 | "esModuleInterop": true 28 | }, 29 | "exclude": [ 30 | "node_modules", 31 | "dist", 32 | "nvm", 33 | "tmp" 34 | ] 35 | } 36 | --------------------------------------------------------------------------------