├── .babelrc ├── .eslintignore ├── .eslintrc ├── .gitignore ├── .npmignore ├── .travis.yml ├── CHANGELOG.md ├── LICENSE ├── README.md ├── askpass.sh ├── package.json ├── src ├── callbacks │ ├── apply.js │ ├── check.js │ └── initialize.js ├── commands │ ├── checkout.js │ ├── clone.js │ ├── fetch.js │ ├── lfsCommands.js │ ├── ls.js │ ├── pointer.js │ ├── prune.js │ ├── pull.js │ ├── push.js │ ├── track.js │ ├── untrack.js │ └── version.js ├── constants.js ├── helpers.js ├── index.js ├── initialize.js ├── register.js ├── unregister.js └── utils │ ├── GitAskPass.js │ ├── authService.js │ ├── checkDependencies.js │ ├── execHelper.js │ ├── generateResponse.js │ ├── shellOptions.js │ └── spawnHelper.js ├── test ├── home │ └── .gitconfig ├── mock-creds ├── runner.js ├── server │ ├── server.crt │ ├── server.js │ ├── server.key │ └── start.sh ├── tests │ ├── callbacks │ │ └── apply.spec.js │ ├── commands │ │ ├── clone.spec.js │ │ ├── ls.spec.js │ │ ├── pointer.spec.js │ │ ├── prune.spec.js │ │ ├── push.spec.js │ │ ├── track.spec.js │ │ ├── untrack.spec.js │ │ └── version.spec.js │ ├── index.spec.js │ ├── initialize.spec.js │ ├── register.spec.js │ └── utils │ │ ├── checkDependencies.spec.js │ │ ├── execHelper.spec.js │ │ ├── generateResponse.spec.js │ │ └── spawnHelper.spec.js └── utils.js └── yarn.lock /.babelrc: -------------------------------------------------------------------------------- 1 | { 2 | "plugins": ["@babel/plugin-transform-modules-commonjs"] 3 | } 4 | -------------------------------------------------------------------------------- /.eslintignore: -------------------------------------------------------------------------------- 1 | build/* -------------------------------------------------------------------------------- /.eslintrc: -------------------------------------------------------------------------------- 1 | { 2 | "env": { 3 | "es6": true, 4 | "mocha": true, 5 | "node": true 6 | }, 7 | "extends": "airbnb-base", 8 | "plugins": [ 9 | "chai-friendly" 10 | ], 11 | "rules": { 12 | "chai-friendly/no-unused-expressions": 2, 13 | "comma-dangle": 0, 14 | "consistent-return": 0, 15 | "implicit-arrow-linebreak": 0, 16 | "import/prefer-default-export": 0, 17 | "linebreak-style": 0, 18 | "no-bitwise": 0, 19 | "no-param-reassign": 0, 20 | "no-unnamed-functions": 0, 21 | "no-unused-expressions": 0, 22 | "prefer-promise-reject-errors": 0, 23 | 24 | // this is for the tests, don't worrry about it for now 25 | "import/no-unresolved": 1, 26 | "import/extensions": 1 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | 6 | # Runtime data 7 | pids 8 | *.pid 9 | *.seed 10 | 11 | # Directory for instrumented libs generated by jscoverage/JSCover 12 | lib-cov 13 | 14 | # Coverage directory used by tools like istanbul 15 | coverage 16 | .nyc_output 17 | 18 | # node-waf configuration 19 | .lock-wscript 20 | 21 | # Build directory 22 | build 23 | 24 | # Dependency directory 25 | # https://docs.npmjs.com/misc/faq#should-i-check-my-node-modules-folder-into-git 26 | node_modules 27 | 28 | # Optional npm cache directory 29 | .npm 30 | 31 | # Optional REPL history 32 | .node_repl_history 33 | 34 | # Editor Preferences 35 | .dir-locals.el 36 | .idea 37 | .vscode 38 | 39 | # Generated test files 40 | test/repos 41 | test/server/lfs-test-server 42 | 43 | \.DS_Store 44 | -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | # Test directories 2 | test 3 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | sudo: true 2 | dist: trusty 3 | 4 | language: node_js 5 | 6 | node_js: 7 | - "6" 8 | 9 | os: 10 | - osx 11 | - linux 12 | 13 | addons: 14 | addons: 15 | apt: 16 | packages: 17 | - build-essential 18 | - git 19 | - libgnome-keyring-dev 20 | - libxkbfile-dev 21 | - libxss-dev 22 | 23 | before_install: 24 | - git clean -xdff 25 | - npm i -g yarn@0.24.5 26 | - if [ $TRAVIS_OS_NAME == "linux" ]; then 27 | export CXX="g++-4.9" CC="gcc-4.9" DISPLAY=:99.0; 28 | sh -e /etc/init.d/xvfb start; 29 | sleep 3; 30 | fi 31 | 32 | env: 33 | - GOPATH=$TRAVIS_BUILD_DIR/test/server CC=clang CXX=clang++ npm_config_clang=1 34 | 35 | before_install: 36 | - cd .. && git clone -b fix/asyncify_methods https://github.com/cjhoward92/nodegit.git 37 | - cd nodegit && npm i && yarn link && cd ../nodegit-lfs 38 | - yarn link nodegit 39 | 40 | install: 41 | - yarn 42 | 43 | script: 44 | - yarn test -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | # Changelog for v1.0.0-alpha.6 2 | - No changes 3 | 4 | # Changelog for v1.0.0-alpha.5 5 | - Increase child_process exec's `maxBuffer` when executing `git lfs ls-files` 6 | 7 | # Changelog for v1.0.0-alpha.4 8 | - Added debug functionality to track performance of calls to filter. 9 | 10 | # Changelog for v1.0.0-alpha.3 11 | - init function now takes override paths for askpass.sh and GitAskPass.js for those in an asar 12 | 13 | # Changelog for v1.0.0-alpha.1 14 | - Changed credentials system for LFS to use GIT_ASKPASS 15 | - major change to credentials callback, now implemented as a channel. 16 | - Messages (all messages are of { type, credsRequestId } format): 17 | - CREDS_REQUESTED 18 | - CREDS_SUCCEEDED 19 | - Expects { username, password } as reply 20 | - CREDS_FAILED 21 | - CREDS_SPAWN_FAILED 22 | 23 | # Changelog for v0.2.0 24 | - Requires nodegit v0.25.0-alpha.9 and later because of change in NodeGit API 25 | 26 | # Changelog for v0.1.0 27 | 28 | - Removed the `repoHasLfsBin` helper 29 | 30 | # Changelog for v0.0.19 31 | 32 | - Update `node-pty` to fix the high sierra issue 33 | 34 | # Changelog for v0.0.18 35 | 36 | - `spawnHelper` now destroys sockets manually 37 | 38 | # Changelog for v0.0.17 39 | 40 | - `spawnHelper` now uses the `-i` flag with `node-pty` to allow for ssh agents to be recognized. 41 | - `spawnHelper` ignores the `EIO` error from `node-pty` 42 | 43 | # Changelog for v0.0.16 44 | 45 | - `dependencyCheck` now checks git and lfs even if one of them fails, and returns the parsed version number as well. 46 | 47 | # Changelog for v0.0.15 48 | 49 | - Linter errors... 50 | 51 | # Changelog for v0.0.14 52 | 53 | - Updated the `check` callback for filters to use the filter api properly to find the `gitattributes` associated with a file 54 | - Updated the `commands/ls` command to sort by files and not sha to avoid sha conflicts 55 | 56 | # Changelog for v0.0.13 57 | 58 | - Updated the `check` callback for filters to use the NodeGit api to find the `gitattributes` associated with a file 59 | 60 | # Changelog for v0.0.12 61 | 62 | - Updated the install routine to no longer create a `.gitattributes` for the repo. 63 | 64 | # Changelog for v0.0.11 65 | 66 | - Updated the `spawnHelper` to successfully kill processes. It was leaving rogue processes around before. 67 | 68 | # Changelog for v0.0.10 69 | 70 | - Added the credentials callback to `git lfs checkout` in case it invokes the smudge filters manually 71 | - Fixed `helpers.verifyOutput` to actually check for errors and ssh permission errors 72 | - Removed dead code 73 | - Refactored the `spawnHelper` credentials routine to do less work and just shell out to the parent process with the potential prompt results 74 | 75 | # Changelog for v0.0.9 76 | 77 | - Changed `version` to write errors to `stderr` only and not `raw` 78 | - Changed `checkDependencies` to use the correct response object on errors 79 | - Updated the version regexes 80 | 81 | # Changelog for v0.0.8 82 | 83 | - Fixed adding `/usr/local/bin` to exec path when it does not exist on `darwin` or `linux` as it was exploding in some situations and returning false negatives 84 | 85 | # Changelog for v0.0.6 86 | 87 | - Updated `/src/commands/fetch.js` to properly return error output when parsing fails 88 | - Updated `/src/commands/pull.js` to properly return error output when parsing fails 89 | - Removed the `tests` directory from the `eslint` command and addedd `eslint-full` to be able to lint tests 90 | 91 | # Changelog for v0.0.6 92 | 93 | - Added `/usr/local/bin` to exec path when it does not exist on `darwin` or `linux` 94 | - Added error handling in `checkDependencies` so we get nicer output when version checks fail or the binaries do not exist 95 | 96 | # Changelog for v0.0.5 97 | 98 | - Changed the `node-pty` dependency again to be a different forked version found [here](https://github.com/implausible/node-pty) 99 | 100 | # Changelog for v0.0.4 101 | 102 | - Changed the `node-pty` dependency to be a forked version found [here](https://github.com/implausible/node-pty) 103 | 104 | # Changelog for v0.0.3 105 | 106 | - Changed the username/password prompt to have a `needsUsername` param instead of `sshOnly` as `needsUsername` is more correct. 107 | 108 | # Changelog for v0.0.2 109 | 110 | - Altered the `hasLfsFilters` to be `repoHasLfs` as it now checks for filters in the `.gitattributes` file or for the `.git/lfs` file in the working directory of the current repo. 111 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright 2017 Axosoft, LLC 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 4 | 5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 6 | 7 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # NodeGit LFS 2 | 3 | Nodegit LFS is an extension library used to augment [NodeGit](http://www.nodegit.org/) with the capability to use [git lfs](https://git-lfs.github.com/) via the command line. 4 | 5 | To use this package, you must have [git](https://git-scm.com/) and [git lfs](https://git-lfs.github.com/) installed. 6 | 7 | This project is new, and is highly volatile. It should be noted that any subsequesnt releases may have breaking changes for the foreseeable future. 8 | 9 | ## How to get started 10 | 11 | `NodeGit` is a peer dependency of the `NodeGit LFS` package, so you should make sure to install `NodeGit` before `NodeGit LFS`. 12 | 13 | To install `NodeGit` run: 14 | 15 | `yarn add nodegit` 16 | 17 | To install `NodeGit LFS` run: 18 | 19 | `yarn add nodegit-lfs` 20 | 21 | Once installed, you only need to bootstrap `NodeGit LFS` once: 22 | 23 | ```javascript 24 | const nodegit = require('nodegit'); 25 | const addLfs = require('nodegit-lfs'); 26 | 27 | // Call the function returned from nodegit-lfs with nodegit as a parameter 28 | // and you are good to go! 29 | addLfs(nodegit); 30 | 31 | // After nodegit has been augmented you can use LFS via the LFS object 32 | nodegit.LFS.register() 33 | .then(() => { 34 | console.log('The LFS filter has been registered!'); 35 | }); 36 | ``` 37 | 38 | ## Building from source 39 | 40 | Clone the repo: 41 | 42 | `git clone https://github.com/axosoft/nodegit-lfs` 43 | 44 | Once cloned: 45 | 46 | ``` 47 | cd nodegit-lfs 48 | yarn 49 | ``` -------------------------------------------------------------------------------- /askpass.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | "$NODEGIT_LFS_NODE_PATH" "$NODEGIT_LFS_ASKPASS_PATH" $@ 4 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "nodegit-large-file-storage", 3 | "version": "1.0.0-alpha.6", 4 | "description": "A wrapper for NodeGit to facilitate LFS Support", 5 | "main": "build/src/index.js", 6 | "scripts": { 7 | "build": "yarn lint && yarn compile", 8 | "compile": "babel src -d build/src", 9 | "lint": "eslint src", 10 | "lint-full": "eslint src test/tests", 11 | "prepublish": "yarn lint && yarn compile", 12 | "test": "nyc mocha --require @babel/register test/runner \"test/**/*.spec.js\"" 13 | }, 14 | "repository": "git@github.com:Axosoft/nodegit-lfs.git", 15 | "author": "Axosoft, LLC", 16 | "license": "ISC", 17 | "keywords": [ 18 | "node", 19 | "git", 20 | "lfs", 21 | "nodegit" 22 | ], 23 | "files": [ 24 | "build/src", 25 | "askpass.sh" 26 | ], 27 | "bugs": { 28 | "url": "https://github.com/Axosoft/nodegit-lfs/issues" 29 | }, 30 | "homepage": "https://github.com/Axosoft/nodegit-lfs#readme", 31 | "devDependencies": { 32 | "@babel/cli": "^7.8.3", 33 | "@babel/core": "^7.8.3", 34 | "@babel/plugin-transform-modules-commonjs": "^7.8.3", 35 | "@babel/register": "^7.8.3", 36 | "eslint": "^6.8.0", 37 | "eslint-config-airbnb-base": "^14.0.0", 38 | "eslint-plugin-chai-friendly": "^0.5.0", 39 | "eslint-plugin-import": "^2.20.0", 40 | "eslint-plugin-mocha": "^6.2.2", 41 | "chai": "^4.2.0", 42 | "jsdoc-to-markdown": "^5.0.3", 43 | "mocha": "^7.0.0", 44 | "nyc": "^15.0.0", 45 | "prompt": "^1.0.0" 46 | }, 47 | "dependencies": { 48 | "default-shell": "^1.0.1", 49 | "fs-extra": "^8.1.0", 50 | "ignore": "^3.3.3", 51 | "lodash": "^4.17.15", 52 | "promisify-node": "^0.4.0", 53 | "ramda": "^0.24.1", 54 | "uuid": "^3.3.2" 55 | } 56 | } 57 | -------------------------------------------------------------------------------- /src/callbacks/apply.js: -------------------------------------------------------------------------------- 1 | import fse from 'fs-extra'; 2 | import path from 'path'; 3 | import { Error } from '../constants'; 4 | import spawn from '../utils/spawnHelper'; 5 | import exec from '../utils/execHelper'; 6 | 7 | const IS_WINDOWS = process.platform === 'win32'; 8 | const ticks = IS_WINDOWS ? '"' : '\''; 9 | 10 | export const LFS_DEBUG = (() => { 11 | let shouldRecordEvents = false; 12 | let events = []; 13 | return { 14 | clearEvents: () => { events = []; }, 15 | dumpEvents: (dumpPath) => fse.writeJsonSync(dumpPath, events, { spaces: 2 }), 16 | recordEvent: (event, eventData) => { 17 | if (shouldRecordEvents) { 18 | events.push({ 19 | event, 20 | data: eventData 21 | }); 22 | } 23 | }, 24 | retrieveEvents: () => [...events], 25 | toggle: () => { 26 | shouldRecordEvents = !shouldRecordEvents; 27 | } 28 | }; 29 | })(); 30 | 31 | export default (credentialsCallback) => { 32 | const clean = (to, from, source) => source.repo() 33 | .then((repo) => { 34 | const workdir = repo.workdir(); 35 | const filePath = path.join(workdir, source.path()); 36 | const command = `git lfs clean ${ticks}${source.path()}${ticks}`; 37 | 38 | return fse.readFile(filePath) 39 | .then((buf) => exec(command, buf, { cwd: workdir })); 40 | }) 41 | .then(({ stdout }) => { 42 | const sha = Buffer.from(stdout); 43 | return to.set(sha, sha.length); 44 | }) 45 | .then(() => Error.CODE.OK); 46 | 47 | const smudge = (to, from, source) => source.repo() 48 | .then((repo) => { 49 | const workdir = repo.workdir(); 50 | const parts = source.path().split('/'); 51 | const filepath = parts[parts.length - 1]; 52 | const ptr = from.ptr(); 53 | 54 | const promise = spawn( 55 | `git lfs smudge ${ticks}${filepath}${ticks}`, 56 | ptr, 57 | { cwd: workdir }, 58 | credentialsCallback, 59 | workdir 60 | ); 61 | 62 | return promise; 63 | }) 64 | .then(({ stdout }) => to.set(stdout, stdout.length)) 65 | .then(() => Error.CODE.OK); 66 | 67 | return (to, from, source) => { 68 | const mode = source.mode(); 69 | 70 | const runNextFilter = () => { 71 | const event = mode === 1 ? 'clean' : 'smudge'; 72 | const startTime = Date.now(); 73 | return Promise.resolve() 74 | .then(() => { 75 | if (mode === 1) { 76 | return clean(to, from, source); 77 | } 78 | return smudge(to, from, source); 79 | }) 80 | .then( 81 | () => Error.CODE.OK, 82 | () => Error.CODE.PASSTHROUGH 83 | ).finally(() => { 84 | const endTime = Date.now(); 85 | const deltaTime = endTime - startTime; 86 | LFS_DEBUG.recordEvent(event, { 87 | source: source.path(), 88 | duration: deltaTime 89 | }); 90 | }); 91 | }; 92 | 93 | return runNextFilter(); 94 | }; 95 | }; 96 | -------------------------------------------------------------------------------- /src/callbacks/check.js: -------------------------------------------------------------------------------- 1 | import R from 'ramda'; 2 | import { Error } from '../constants'; 3 | 4 | export default (src, attr) => (attr && R.equals('lfs', attr.toLowerCase()) ? Error.CODE.OK : Error.CODE.PASSTHROUGH); 5 | -------------------------------------------------------------------------------- /src/callbacks/initialize.js: -------------------------------------------------------------------------------- 1 | import { Error } from '../constants'; 2 | 3 | export default () => Error.CODE.OK; 4 | -------------------------------------------------------------------------------- /src/commands/checkout.js: -------------------------------------------------------------------------------- 1 | import R from 'ramda'; 2 | import { core } from './lfsCommands'; 3 | import { 4 | regex, 5 | BAD_CORE_RESPONSE, 6 | BAD_REGEX_PARSE_RESULT, 7 | } from '../constants'; 8 | 9 | import generateResponse from '../utils/generateResponse'; 10 | import { 11 | regexResult, 12 | verifyOutput, 13 | errorCatchHandler 14 | } from '../helpers'; 15 | 16 | const isValidLine = (str) => str !== ''; 17 | 18 | const generateCheckoutStats = (raw) => { 19 | if (raw && typeof raw === 'string') { 20 | const stats = {}; 21 | const outputLines = raw.split('Git LFS:'); 22 | const filteredLines = R.filter(isValidLine, outputLines); 23 | const statLine = filteredLines.pop(); 24 | 25 | const byteResults = regexResult(statLine, regex.TOTAL_BYTES); 26 | 27 | stats.total_bytes_checked_out = byteResults !== null 28 | ? byteResults[0].trim() : BAD_REGEX_PARSE_RESULT; 29 | 30 | stats.total_bytes = byteResults !== null 31 | ? byteResults[1].trim() : BAD_REGEX_PARSE_RESULT; 32 | 33 | const fileResults = regexResult(statLine, regex.TOTAL_FILES); 34 | 35 | stats.total_files_checked_out = fileResults !== null 36 | ? fileResults[0].trim() : BAD_REGEX_PARSE_RESULT; 37 | 38 | const skippedByteResults = regexResult(statLine, regex.SKIPPED_BYTES); 39 | 40 | stats.total_bytes_skipped = skippedByteResults !== null 41 | ? skippedByteResults[0].trim() : BAD_REGEX_PARSE_RESULT; 42 | 43 | const skippedFileResults = regexResult(statLine, regex.SKIPPED_FILES); 44 | 45 | stats.total_files_skipped = skippedFileResults !== null 46 | ? skippedFileResults[0].trim() : BAD_REGEX_PARSE_RESULT; 47 | 48 | verifyOutput(stats, raw); 49 | 50 | if (statLine.includes('error:')) { 51 | stats.checkout_error = statLine.split('error:')[1].trim(); 52 | } 53 | 54 | return stats; 55 | } 56 | return {}; 57 | }; 58 | 59 | function checkout(repo, options) { 60 | const response = generateResponse(); 61 | const repoPath = repo.workdir(); 62 | 63 | const { 64 | callback, 65 | shellOptions 66 | } = (options || {}); 67 | 68 | return core.checkout('', R.mergeDeepRight(shellOptions, { cwd: repoPath }), repoPath, callback) 69 | .then(({ stdout }) => { 70 | response.raw = stdout; 71 | response.checkout = generateCheckoutStats(stdout); 72 | 73 | if (response.checkout.checkout_error) { 74 | response.success = false; 75 | response.stderr = response.checkout.checkout_error; 76 | response.errno = BAD_CORE_RESPONSE; 77 | } 78 | 79 | return response; 80 | }, errorCatchHandler(response)); 81 | } 82 | 83 | export default checkout; 84 | -------------------------------------------------------------------------------- /src/commands/clone.js: -------------------------------------------------------------------------------- 1 | import R from 'ramda'; 2 | import { core } from './lfsCommands'; 3 | import { 4 | regex, 5 | BAD_REGEX_PARSE_RESULT, 6 | } from '../constants'; 7 | import generateResponse from '../utils/generateResponse'; 8 | import { 9 | regexResult, 10 | errorCatchHandler, 11 | verifyOutput 12 | } from '../helpers'; 13 | 14 | const isValidLine = (str) => str !== ''; 15 | 16 | const generateCloneStats = (raw) => { 17 | if (raw && typeof raw === 'string') { 18 | const stats = {}; 19 | const outputLines = raw.split('Git LFS:'); 20 | const filteredLines = R.filter(isValidLine, outputLines); 21 | const statLine = filteredLines.pop(); 22 | 23 | const byteResults = regexResult(statLine, regex.TOTAL_BYTES); 24 | 25 | stats.total_bytes_cloned = byteResults !== null 26 | ? byteResults[0].trim() : BAD_REGEX_PARSE_RESULT; 27 | 28 | stats.total_bytes = byteResults !== null 29 | ? byteResults[1].trim() : BAD_REGEX_PARSE_RESULT; 30 | 31 | const fileResults = regexResult(statLine, regex.TOTAL_FILES); 32 | 33 | stats.total_files_cloned = fileResults !== null 34 | ? fileResults[0].trim() : BAD_REGEX_PARSE_RESULT; 35 | 36 | const skippedByteResults = regexResult(statLine, regex.SKIPPED_BYTES); 37 | 38 | stats.total_bytes_skipped = skippedByteResults !== null 39 | ? skippedByteResults[0].trim() : BAD_REGEX_PARSE_RESULT; 40 | 41 | const skippedFileResults = regexResult(statLine, regex.SKIPPED_FILES); 42 | 43 | stats.total_files_skipped = skippedFileResults !== null 44 | ? skippedFileResults[0].trim() : BAD_REGEX_PARSE_RESULT; 45 | 46 | verifyOutput(stats, raw); 47 | 48 | if (statLine.includes('error:')) { 49 | stats.clone_error = statLine.split('error:')[1].trim(); 50 | } 51 | 52 | return stats; 53 | } 54 | return {}; 55 | }; 56 | 57 | function clone(url, cwd, options) { 58 | if (!url || !cwd) { 59 | throw new Error('A valid URL and working directory are required'); 60 | } 61 | 62 | const { 63 | branch, 64 | callback, 65 | shellOptions 66 | } = (options || {}); 67 | const args = branch ? `-b ${branch}` : ''; 68 | 69 | const response = generateResponse(); 70 | return core.clone(`${url} ${args}`, R.mergeDeepRight(shellOptions, { cwd }), url, callback) 71 | .then(({ stdout }) => { 72 | response.raw = stdout; 73 | response.clone = generateCloneStats(stdout); 74 | return response; 75 | }, errorCatchHandler(response)); 76 | } 77 | 78 | export default clone; 79 | -------------------------------------------------------------------------------- /src/commands/fetch.js: -------------------------------------------------------------------------------- 1 | import R from 'ramda'; 2 | import { core } from './lfsCommands'; 3 | import { 4 | regex, 5 | BAD_REGEX_PARSE_RESULT, 6 | BAD_CORE_RESPONSE, 7 | } from '../constants'; 8 | import generateResponse from '../utils/generateResponse'; 9 | import { 10 | regexResult, 11 | errorCatchHandler, 12 | verifyOutput 13 | } from '../helpers'; 14 | 15 | const isValidLine = (str) => str !== ''; 16 | 17 | const generateFetchStats = (raw) => { 18 | if (raw && typeof raw === 'string') { 19 | const stats = {}; 20 | const outputLines = raw.split('Git LFS:'); 21 | const filteredLines = R.filter(isValidLine, outputLines); 22 | const statLine = filteredLines.pop(); 23 | 24 | const byteResults = regexResult(statLine, regex.TOTAL_BYTES); 25 | 26 | stats.total_bytes_fetched = byteResults !== null 27 | ? byteResults[0].trim() : BAD_REGEX_PARSE_RESULT; 28 | 29 | stats.total_bytes = byteResults !== null 30 | ? byteResults[1].trim() : BAD_REGEX_PARSE_RESULT; 31 | 32 | const fileResults = regexResult(statLine, regex.TOTAL_FILES); 33 | 34 | stats.total_files_fetched = fileResults !== null 35 | ? fileResults[0].trim() : BAD_REGEX_PARSE_RESULT; 36 | 37 | const skippedByteResults = regexResult(statLine, regex.SKIPPED_BYTES); 38 | 39 | stats.total_bytes_skipped = skippedByteResults !== null 40 | ? skippedByteResults[0].trim() : BAD_REGEX_PARSE_RESULT; 41 | 42 | const skippedFileResults = regexResult(statLine, regex.SKIPPED_FILES); 43 | 44 | stats.total_files_skipped = skippedFileResults !== null 45 | ? skippedFileResults[0].trim() : BAD_REGEX_PARSE_RESULT; 46 | 47 | verifyOutput(stats, raw); 48 | 49 | if (statLine.includes('error:')) { 50 | stats.fetch_error = statLine.split('error:')[1].trim(); 51 | } 52 | 53 | return stats; 54 | } 55 | return {}; 56 | }; 57 | 58 | function fetch(repo, options) { 59 | const response = generateResponse(); 60 | const repoPath = repo.workdir(); 61 | 62 | const args = []; 63 | const { 64 | remoteName, 65 | branchName, 66 | callback, 67 | shellOptions 68 | } = (options || {}); 69 | 70 | if (remoteName) { 71 | args.push(remoteName); 72 | } 73 | 74 | if (branchName) { 75 | args.push(branchName); 76 | } 77 | 78 | const argsString = R.join(' ', args); 79 | return core.fetch( 80 | argsString, 81 | R.mergeDeepRight(shellOptions, { cwd: repoPath, shell: true }), 82 | repoPath, 83 | callback 84 | ) 85 | .then(({ stdout }) => { 86 | response.raw = stdout; 87 | response.fetch = generateFetchStats(stdout); 88 | 89 | if (response.fetch.fetch_error) { 90 | response.success = false; 91 | response.stderr = response.fetch.fetch_error; 92 | response.errno = BAD_CORE_RESPONSE; 93 | } 94 | 95 | return response; 96 | }, errorCatchHandler(response)); 97 | } 98 | 99 | export default fetch; 100 | -------------------------------------------------------------------------------- /src/commands/lfsCommands.js: -------------------------------------------------------------------------------- 1 | import spawn from '../utils/spawnHelper'; 2 | import exec from '../utils/execHelper'; 3 | 4 | export const core = { 5 | checkout: (args = '', options, repoPath, callback) => spawn(`git lfs checkout ${args}`, null, options, callback, repoPath), 6 | clone: (args = '', options, repoUrl, callback) => spawn(`git lfs clone ${args}`, null, options, callback, repoUrl), 7 | fetch: (args = '', options, repoPath, callback) => spawn(`git lfs fetch ${args}`, null, options, callback, repoPath), 8 | fsck: (options) => exec('git lfs fsck', null, options), 9 | git: (args = '', options) => exec(`git ${args}`, null, options), 10 | install: (args = '', options) => exec(`git lfs install ${args}`, null, options), 11 | logs: (args = '', options) => exec(`git lfs logs ${args}`, null, options), 12 | ls: (args = '', options) => exec(`git lfs ls-files ${args}`, null, options), 13 | pointer: (args = '', options) => exec(`git lfs pointer ${args}`, null, options), 14 | prune: (args = '', options) => exec(`git lfs prune ${args}`, null, options), 15 | pull: (args = '', options, repoPath, callback) => spawn(`git lfs pull ${args}`, null, options, callback, repoPath), 16 | push: (args = '', options, repoPath, callback) => spawn(`git lfs push ${args}`, null, options, callback, repoPath), 17 | status: (args = '', options) => exec(`git lfs status ${args}`, null, options), 18 | track: (args = '', options) => exec(`git lfs track ${args}`, null, options), 19 | untrack: (args = '', options) => exec(`git lfs untrack ${args}`, null, options), 20 | update: (args = '', options) => exec(`git lfs update ${args}`, null, options), 21 | version: (options) => exec('git lfs version', null, options), 22 | }; 23 | -------------------------------------------------------------------------------- /src/commands/ls.js: -------------------------------------------------------------------------------- 1 | import R from 'ramda'; 2 | import { core } from './lfsCommands'; 3 | import { 4 | BAD_CORE_RESPONSE, 5 | } from '../constants'; 6 | import generateResponse from '../utils/generateResponse'; 7 | 8 | const isValidFileOutput = (str) => str.includes('*') || str.includes('-'); 9 | 10 | const reduceResults = (acc, value) => { 11 | const separatorRegex = /[*-]/; 12 | const match = value.match(separatorRegex); 13 | if (!match || !match[0]) { 14 | return acc; 15 | } 16 | 17 | const shaAndFileName = value.split(match[0]); 18 | acc[shaAndFileName[1].trim()] = shaAndFileName[0].trim(); 19 | 20 | return acc; 21 | }; 22 | 23 | const extractFileNames = (raw) => { 24 | const output = (raw || ''); 25 | 26 | const outputLines = output.toString().split('\n'); 27 | const filteredLines = R.filter(isValidFileOutput, outputLines); 28 | // creating the object in which sha's point to the file name 29 | return R.reduce(reduceResults, {}, filteredLines); 30 | }; 31 | 32 | const buildArgs = (options) => { 33 | const opts = (options || {}); 34 | const args = []; 35 | 36 | // returns the full length OID with the file 37 | if (opts.long) { 38 | args.push('--long'); 39 | } 40 | 41 | // this should probably be last? 42 | if (opts.commitSha && opts.commitSha > '') { 43 | args.push(options.commitSha); 44 | } 45 | 46 | return R.join(' ', args); 47 | }; 48 | 49 | const ls = (repo, options) => { 50 | const response = generateResponse(); 51 | const repoPath = repo.workdir(); 52 | const args = buildArgs(options); 53 | const { shellOptions } = (options || {}); 54 | 55 | return core.ls( 56 | args, 57 | R.mergeDeepRight(shellOptions, { cwd: repoPath, maxBuffer: 4194304 }) 58 | ) 59 | .then(({ stdout, stderr }) => { 60 | response.raw = stdout; 61 | 62 | if (stderr.length > 0) { 63 | response.stderr = stderr; 64 | response.success = false; 65 | response.errno = BAD_CORE_RESPONSE; 66 | } 67 | 68 | response.files = extractFileNames(stdout); 69 | return response; 70 | }); 71 | }; 72 | 73 | export default ls; 74 | -------------------------------------------------------------------------------- /src/commands/pointer.js: -------------------------------------------------------------------------------- 1 | import { core } from './lfsCommands'; 2 | import { 3 | BAD_CORE_RESPONSE, 4 | } from '../constants'; 5 | import generateResponse from '../utils/generateResponse'; 6 | 7 | const pointer = (repo, filePath, pointerPath) => { 8 | let args = ''; 9 | if (filePath) { 10 | args += `--file=${filePath} `; 11 | } 12 | if (pointerPath) { 13 | args += `--file=${pointerPath} `; 14 | } 15 | 16 | const response = generateResponse(); 17 | const repoPath = repo.workdir(); 18 | 19 | return core.pointer(args, { cwd: repoPath }) 20 | .then(({ stdout, stderr }) => { 21 | response.raw = stdout; 22 | 23 | if (stderr) { 24 | response.success = false; 25 | response.errno = BAD_CORE_RESPONSE; 26 | response.stderr = stderr; 27 | return response; 28 | } 29 | 30 | response.buffer = Buffer.from(stdout); 31 | return response; 32 | }); 33 | }; 34 | 35 | export default pointer; 36 | -------------------------------------------------------------------------------- /src/commands/prune.js: -------------------------------------------------------------------------------- 1 | import R from 'ramda'; 2 | import { core } from './lfsCommands'; 3 | import generateResponse from '../utils/generateResponse'; 4 | import { BAD_CORE_RESPONSE } from '../constants'; 5 | 6 | const prune = (repo, options) => { 7 | const response = generateResponse(); 8 | const repoPath = repo.workdir(); 9 | 10 | const { 11 | callback, 12 | shellOptions 13 | } = (options || {}); 14 | 15 | return core.prune('', R.mergeDeepRight(shellOptions, { cwd: repoPath }), callback) 16 | .then(({ stdout, stderr }) => { 17 | response.raw = stdout; 18 | 19 | if (stderr) { 20 | response.errno = BAD_CORE_RESPONSE; 21 | response.stderr = stderr; 22 | response.success = false; 23 | return response; 24 | } 25 | 26 | return response; 27 | }); 28 | }; 29 | 30 | export default prune; 31 | -------------------------------------------------------------------------------- /src/commands/pull.js: -------------------------------------------------------------------------------- 1 | import R from 'ramda'; 2 | import { core } from './lfsCommands'; 3 | import { 4 | regex, 5 | BAD_REGEX_PARSE_RESULT, 6 | BAD_CORE_RESPONSE, 7 | } from '../constants'; 8 | import generateResponse from '../utils/generateResponse'; 9 | import { 10 | regexResult, 11 | verifyOutput, 12 | errorCatchHandler 13 | } from '../helpers'; 14 | 15 | const isValidLine = (str) => str !== ''; 16 | 17 | const generatePullStats = (raw) => { 18 | if (raw && typeof raw === 'string') { 19 | const stats = {}; 20 | const outputLines = raw.split('Git LFS:'); 21 | const filteredLines = R.filter(isValidLine, outputLines); 22 | const statLine = filteredLines.pop(); 23 | 24 | const byteResults = regexResult(statLine, regex.TOTAL_BYTES); 25 | 26 | stats.total_bytes_pulled = byteResults !== null 27 | ? byteResults[0].trim() : BAD_REGEX_PARSE_RESULT; 28 | 29 | stats.total_bytes = byteResults !== null 30 | ? byteResults[1].trim() : BAD_REGEX_PARSE_RESULT; 31 | 32 | const fileResults = regexResult(statLine, regex.TOTAL_FILES); 33 | 34 | stats.total_files_pulled = fileResults !== null 35 | ? fileResults[0].trim() : BAD_REGEX_PARSE_RESULT; 36 | 37 | const skippedByteResults = regexResult(statLine, regex.SKIPPED_BYTES); 38 | 39 | stats.total_bytes_skipped = skippedByteResults !== null 40 | ? skippedByteResults[0].trim() : BAD_REGEX_PARSE_RESULT; 41 | 42 | const skippedFileResults = regexResult(statLine, regex.SKIPPED_FILES); 43 | 44 | stats.total_files_skipped = skippedFileResults !== null 45 | ? skippedFileResults[0].trim() : BAD_REGEX_PARSE_RESULT; 46 | 47 | verifyOutput(stats, raw); 48 | 49 | if (statLine.includes('error:')) { 50 | stats.pull_error = statLine.split('error:')[1].trim(); 51 | } 52 | 53 | return stats; 54 | } 55 | return {}; 56 | }; 57 | 58 | function pull(repo, options) { 59 | const response = generateResponse(); 60 | const repoPath = repo.workdir(); 61 | 62 | const args = []; 63 | const { 64 | remoteName, 65 | branchName, 66 | callback, 67 | shellOptions 68 | } = (options || {}); 69 | 70 | if (remoteName) { 71 | args.push(remoteName); 72 | } 73 | if (branchName) { 74 | args.push(branchName); 75 | } 76 | const argsString = R.join(' ', args); 77 | 78 | return core.pull( 79 | argsString, 80 | R.mergeDeepRight(shellOptions, { cwd: repoPath, shell: true }), 81 | repoPath, 82 | callback 83 | ) 84 | .then(({ stdout }) => { 85 | response.raw = stdout; 86 | response.pull = generatePullStats(stdout); 87 | 88 | if (response.pull.pull_error) { 89 | response.success = false; 90 | response.stderr = response.pull.pull_error; 91 | response.errno = BAD_CORE_RESPONSE; 92 | } 93 | 94 | return response; 95 | }, errorCatchHandler(response)); 96 | } 97 | 98 | export default pull; 99 | -------------------------------------------------------------------------------- /src/commands/push.js: -------------------------------------------------------------------------------- 1 | import R from 'ramda'; 2 | import { core } from './lfsCommands'; 3 | import { 4 | regex, 5 | BAD_REGEX_PARSE_RESULT, 6 | } from '../constants'; 7 | import generateResponse from '../utils/generateResponse'; 8 | import { 9 | regexResult, 10 | errorCatchHandler, 11 | verifyOutput 12 | } from '../helpers'; 13 | 14 | /** 15 | * Note to future maintainers, I do not like this; at all. But at the moment this is the 16 | * best we got, inorder to parse the result from git core. Any slight change to the LFS output 17 | * in subsequent versions of CORE for LFS, will surely break this. Until we migrate off 18 | * git core dependency, we will have to regex the output. Godspeed. 19 | */ 20 | 21 | const isValidLine = (str) => str !== ''; 22 | 23 | const generatePushStats = (raw) => { 24 | if (!raw || typeof raw !== 'string') { 25 | return { 26 | error: 'invalid output', 27 | }; 28 | } 29 | const stats = {}; 30 | 31 | const outputLines = raw.split('Git LFS:'); 32 | const filteredLines = R.filter(isValidLine, outputLines); 33 | const statLine = filteredLines.pop(); 34 | 35 | const byteResults = regexResult(statLine, regex.TOTAL_BYTES); 36 | 37 | stats.total_bytes_transferred = byteResults !== null 38 | ? byteResults[0].trim() 39 | : BAD_REGEX_PARSE_RESULT; 40 | 41 | stats.total_bytes = byteResults !== null 42 | ? byteResults[1].trim() 43 | : BAD_REGEX_PARSE_RESULT; 44 | 45 | const fileResults = regexResult(statLine, regex.TOTAL_FILES); 46 | 47 | stats.total_files_transferred = fileResults !== null 48 | ? fileResults[0].trim() 49 | : BAD_REGEX_PARSE_RESULT; 50 | 51 | const skippedByteResults = regexResult(statLine, regex.SKIPPED_BYTES); 52 | 53 | stats.total_bytes_skipped = skippedByteResults !== null 54 | ? skippedByteResults[0].trim() 55 | : BAD_REGEX_PARSE_RESULT; 56 | 57 | const skippedFileResults = regexResult(statLine, regex.SKIPPED_FILES); 58 | 59 | stats.total_files_skipped = skippedFileResults !== null 60 | ? skippedFileResults[0].trim() 61 | : BAD_REGEX_PARSE_RESULT; 62 | 63 | verifyOutput(stats, raw); 64 | 65 | if (statLine.includes('error:')) { 66 | stats.error = statLine.split('error:')[1].trim(); 67 | } 68 | 69 | return stats; 70 | }; 71 | 72 | function push(repo, options) { 73 | const response = generateResponse(); 74 | const repoPath = repo.workdir(); 75 | 76 | const { 77 | remoteName, 78 | branchName, 79 | callback, 80 | shellOptions 81 | } = (options || {}); 82 | 83 | let branch = branchName; 84 | let remote = remoteName; 85 | let getRemoteAndBranchPromise = Promise.resolve(); 86 | 87 | if (!remote || !branch) { 88 | let remoteRef; 89 | getRemoteAndBranchPromise = repo.getCurrentBranch() 90 | .then((Reference) => { 91 | const promises = []; 92 | promises.push(this.NodeGit.Branch.upstream(Reference)); 93 | promises.push(this.NodeGit.Branch.name(Reference)); 94 | return Promise.all(promises); 95 | }) 96 | .then((results) => { 97 | ([remoteRef] = results); 98 | branch = branch || results[1]; 99 | return this.NodeGit.Branch.remoteName(repo, remoteRef.name()); 100 | }) 101 | .then((name) => { 102 | remote = remote || name; 103 | return Promise.resolve(); 104 | }); 105 | } 106 | 107 | return getRemoteAndBranchPromise 108 | .then(() => core.push(`${remote} ${branch}`, R.mergeDeepRight(shellOptions, { cwd: repoPath }), repoPath, callback)) 109 | .then(({ stdout }) => { 110 | response.raw = stdout; 111 | response.push = generatePushStats(stdout); 112 | return response; 113 | }, errorCatchHandler(response)); 114 | } 115 | 116 | export default push; 117 | -------------------------------------------------------------------------------- /src/commands/track.js: -------------------------------------------------------------------------------- 1 | import R from 'ramda'; 2 | import { core } from './lfsCommands'; 3 | import generateResponse from '../utils/generateResponse'; 4 | import { 5 | regex as Regex, 6 | BAD_CORE_RESPONSE, 7 | } from '../constants'; 8 | 9 | const isString = (str) => typeof str === 'string'; 10 | const ticks = process.platform === 'win32' ? '"' : "'"; 11 | 12 | const extractGlobs = (input, regex) => { 13 | const matches = input.match(regex); 14 | if (!matches || R.isEmpty(matches)) { return []; } 15 | return matches; 16 | }; 17 | 18 | const track = (repo, globs) => { 19 | if (!globs) { return; } 20 | 21 | const filteredGlobs = R.pipe( 22 | R.filter(isString), 23 | R.map((g) => `${ticks}${g}${ticks}`) 24 | )(globs); 25 | const response = generateResponse(); 26 | const repoPath = repo.workdir(); 27 | 28 | return core.track(R.join(' ', filteredGlobs), { cwd: repoPath }) 29 | .then(({ stdout, stderr }) => { 30 | response.raw = stdout; 31 | 32 | if (stderr) { 33 | response.success = false; 34 | response.errno = BAD_CORE_RESPONSE; 35 | response.stderr = stderr; 36 | return response; 37 | } 38 | 39 | response.new_globs = extractGlobs(stdout, Regex.TRACK); 40 | return response; 41 | }); 42 | }; 43 | 44 | export default track; 45 | -------------------------------------------------------------------------------- /src/commands/untrack.js: -------------------------------------------------------------------------------- 1 | import R from 'ramda'; 2 | import { core } from './lfsCommands'; 3 | import generateResponse from '../utils/generateResponse'; 4 | import { 5 | regex as Regex, 6 | BAD_CORE_RESPONSE, 7 | } from '../constants'; 8 | 9 | const isString = (str) => typeof str === 'string'; 10 | const ticks = process.platform === 'win32' ? '"' : "'"; 11 | 12 | const extractGlobs = (input, regex) => { 13 | const matches = input.match(regex); 14 | if (!matches || R.isEmpty(matches)) { return []; } 15 | return matches; 16 | }; 17 | 18 | const untrack = (repo, globs) => { 19 | if (!globs) { return; } 20 | 21 | const filteredGlobs = R.pipe( 22 | R.filter(isString), 23 | R.map((g) => `${ticks}${g}${ticks}`) 24 | )(globs); 25 | const response = generateResponse(); 26 | const repoPath = repo.workdir(); 27 | 28 | return core.untrack(R.join(' ', filteredGlobs), { cwd: repoPath }) 29 | .then(({ stdout, stderr }) => { 30 | response.raw = stdout; 31 | 32 | if (stderr) { 33 | response.success = false; 34 | response.errno = BAD_CORE_RESPONSE; 35 | response.stderr = stderr; 36 | return response; 37 | } 38 | 39 | response.untracked_globs = extractGlobs(stdout, Regex.TRACK); 40 | return response; 41 | }); 42 | }; 43 | 44 | export default untrack; 45 | -------------------------------------------------------------------------------- /src/commands/version.js: -------------------------------------------------------------------------------- 1 | import { core } from './lfsCommands'; 2 | import { parseVersion } from '../utils/checkDependencies'; 3 | import { 4 | regex, 5 | BAD_CORE_RESPONSE, 6 | } from '../constants'; 7 | import generateResponse from '../utils/generateResponse'; 8 | 9 | const version = () => { 10 | const response = generateResponse(); 11 | return core.version() 12 | .then(({ stdout, stderr }) => { 13 | response.raw = stdout; 14 | 15 | if (stderr) { 16 | response.stderr = stderr; 17 | response.success = false; 18 | response.errno = BAD_CORE_RESPONSE; 19 | } else { 20 | response.version = parseVersion(stdout, regex.LFS); 21 | } 22 | 23 | return response; 24 | }); 25 | }; 26 | 27 | export default version; 28 | -------------------------------------------------------------------------------- /src/constants.js: -------------------------------------------------------------------------------- 1 | export const LFS_ATTRIBUTE = 'filter=lfs diff=lfs merge=lfs'; 2 | 3 | export const LFS_FILTER_NAME = 'nodegit_lfs'; 4 | 5 | export const regex = { 6 | LFS: /(?:git-lfs\/\s*)?(\d+)(?:.(\d+))?(?:.(\d+))?.*/, 7 | GIT: /(?:git\s+version\s+)(\d+)\.(\d+)\.(\d+)/, 8 | VERSION: /(\d+\.){2}\d+/, 9 | TRACK: /([a-zA-Z*.]+(?="))/g, 10 | SKIPPED_BYTES: /[\d]+\s+B\s+(?=skipped)/g, 11 | SKIPPED_FILES: /[\d]\s+(?=skipped)/g, 12 | TOTAL_BYTES: /[\d]+\s+B/g, 13 | TOTAL_FILES: /[\d]\s+(?=files)/g, 14 | USERNAME: /username/g, 15 | PASSWORD: /password/g, 16 | PASSPHRASE: /passphrase/g, 17 | PERMISSION_DENIED: /permission\s+denied\s+\(.+\)\./g, 18 | CREDENTIALS_NOT_FOUND: /could not read Username for '(.+)': terminal prompts disabled/, 19 | CREDENTIALS_ERROR: /(Git credentials)|(Bad credentials)/ 20 | }; 21 | 22 | export const promptTypes = { 23 | USERNAME: 'username', 24 | PASSWORD: 'password', 25 | PASSPHRASE: 'passphrase', 26 | }; 27 | 28 | export const BAD_VERSION = '0'; 29 | export const BAD_CORE_RESPONSE = '-1'; 30 | export const BAD_REGEX_PARSE_RESULT = '-2'; 31 | 32 | export const minimumVersions = { 33 | GIT: '1.8.5', 34 | LFS: '2.0.0', 35 | }; 36 | 37 | // Copied from NodeGit for now... eventually we will find a way to change that 38 | export const Error = { 39 | CODE: { 40 | OK: 0, 41 | PASSTHROUGH: -30, 42 | } 43 | }; 44 | -------------------------------------------------------------------------------- /src/helpers.js: -------------------------------------------------------------------------------- 1 | import fse from 'fs-extra'; 2 | import path from 'path'; 3 | import R from 'ramda'; 4 | 5 | import { 6 | LFS_ATTRIBUTE, 7 | BAD_CORE_RESPONSE, 8 | BAD_REGEX_PARSE_RESULT, 9 | regex, 10 | } from './constants'; 11 | 12 | export const getGitattributesPathFromRepo = (repo) => path.join(repo.workdir(), '.gitattributes'); 13 | 14 | export const loadGitattributeFiltersFromRepo = (repo) => { 15 | const gitattrPath = getGitattributesPathFromRepo(repo); 16 | return fse.pathExists(gitattrPath) 17 | .then((exists) => { 18 | if (!exists) { 19 | return ''; 20 | } 21 | 22 | return fse.readFile(gitattrPath, 'utf8'); 23 | }) 24 | .then((fileContents) => { 25 | const attributes = fileContents.split('\n'); 26 | const lfsFilters = R.reduce((acc, line) => { 27 | if (!R.contains(LFS_ATTRIBUTE, line)) { 28 | return acc; 29 | } 30 | 31 | const start = line.indexOf(LFS_ATTRIBUTE); 32 | const filter = line.substring(0, start); 33 | acc.push(filter.trim()); 34 | 35 | return acc; 36 | }, [], attributes); 37 | 38 | return lfsFilters; 39 | }); 40 | }; 41 | 42 | export const repoHasLfsFilters = (repo) => loadGitattributeFiltersFromRepo(repo) 43 | .then((filters) => filters.length > 0) 44 | .catch(() => false); 45 | 46 | export const repoHasLfs = repoHasLfsFilters; 47 | 48 | export const regexResult = (input, regularExpression) => input.match(regularExpression); 49 | 50 | export const verifyOutput = (stats, raw) => { 51 | // Check to see if it was a permissions error 52 | const wasPermissionDenied = raw.trim().toLowerCase().match(regex.PERMISSION_DENIED); 53 | if (wasPermissionDenied) { 54 | const e = new Error(wasPermissionDenied[0]); 55 | e.errno = BAD_CORE_RESPONSE; 56 | throw e; 57 | } 58 | 59 | // We need to handle this manually because LFS isn't returning stderr 60 | const props = R.values(stats); 61 | const allErrored = R.pipe( 62 | R.filter(R.equals(BAD_REGEX_PARSE_RESULT)), 63 | R.sum, 64 | R.equals(props.length) 65 | )(props); 66 | 67 | // We have all errors 68 | if (allErrored) { 69 | const e = new Error(raw); 70 | e.errno = BAD_CORE_RESPONSE; 71 | throw e; 72 | } 73 | }; 74 | 75 | export const errorCatchHandler = (response) => (err) => { 76 | // This is a manually detected error we get from LFS 77 | if (err.errno === BAD_CORE_RESPONSE) { 78 | response.stderr = response.raw; 79 | response.errno = BAD_CORE_RESPONSE; 80 | response.success = false; 81 | return response; 82 | } 83 | 84 | throw err; 85 | }; 86 | -------------------------------------------------------------------------------- /src/index.js: -------------------------------------------------------------------------------- 1 | import initialize from './initialize'; 2 | import register from './register'; 3 | import unregister from './unregister'; 4 | import { core } from './commands/lfsCommands'; 5 | import { 6 | loadGitattributeFiltersFromRepo, 7 | repoHasLfs 8 | } from './helpers'; 9 | import checkout from './commands/checkout'; 10 | import push from './commands/push'; 11 | import track from './commands/track'; 12 | import untrack from './commands/untrack'; 13 | import version from './commands/version'; 14 | import fetch from './commands/fetch'; 15 | import prune from './commands/prune'; 16 | import list from './commands/ls'; 17 | import testPointer from './commands/pointer'; 18 | import pull from './commands/pull'; 19 | import clone from './commands/clone'; 20 | import { 21 | setNodeBinaryPath, 22 | setGitAskPassPath, 23 | setGitAskPassClientPath 24 | } from './utils/authService'; 25 | import { dependencyCheck } from './utils/checkDependencies'; 26 | import { setDefaultShellOptions } from './utils/shellOptions'; 27 | 28 | function LFS(nodegit) { 29 | this.NodeGit = nodegit; 30 | } 31 | 32 | LFS.prototype = { 33 | core, 34 | checkout, 35 | clone, 36 | dependencyCheck, 37 | fetch, 38 | filters: loadGitattributeFiltersFromRepo, 39 | repoHasLfs, 40 | initialize, 41 | list, 42 | register, 43 | testPointer, 44 | track, 45 | prune, 46 | pull, 47 | push, 48 | version, 49 | unregister, 50 | untrack, 51 | }; 52 | 53 | module.exports = ( 54 | nodegit, 55 | { 56 | nodeBinaryPath = process.execPath, 57 | defaultShellOptions = null, 58 | // when built, askpass.sh will be two directories above index.js 59 | gitAskPassPath = require.resolve('../../askpass.sh'), 60 | gitAskPassClientPath = require.resolve('./utils/GitAskPass') 61 | } = {} 62 | ) => { 63 | const _NodeGit = nodegit; // eslint-disable-line no-underscore-dangle 64 | 65 | Object.getPrototypeOf(_NodeGit).LFS = new LFS(_NodeGit); 66 | 67 | module.exports = _NodeGit; 68 | 69 | if (nodeBinaryPath) { 70 | setNodeBinaryPath(nodeBinaryPath); 71 | } 72 | 73 | if (gitAskPassPath) { 74 | setGitAskPassPath(gitAskPassPath); 75 | } 76 | 77 | if (gitAskPassClientPath) { 78 | setGitAskPassClientPath(gitAskPassClientPath); 79 | } 80 | 81 | if (defaultShellOptions) { 82 | setDefaultShellOptions(defaultShellOptions); 83 | } 84 | 85 | return _NodeGit; 86 | }; 87 | -------------------------------------------------------------------------------- /src/initialize.js: -------------------------------------------------------------------------------- 1 | import fse from 'fs-extra'; 2 | import path from 'path'; 3 | import R from 'ramda'; 4 | import { core } from './commands/lfsCommands'; 5 | 6 | const builldArgs = (options) => { 7 | const opts = (options || {}); 8 | const args = []; 9 | if (opts.local) { 10 | args.push('--local'); 11 | } 12 | return R.join(' ', args); 13 | }; 14 | 15 | const initialize = (repo, options) => { 16 | const workdir = repo.workdir(); 17 | const lfsDir = path.join(workdir, '.git', 'lfs'); 18 | 19 | return fse.pathExists(lfsDir) 20 | .then((exists) => { 21 | if (exists) { 22 | return Promise.resolve(); 23 | } 24 | return core.install(builldArgs(options), { cwd: workdir }); 25 | }); 26 | }; 27 | 28 | export default initialize; 29 | -------------------------------------------------------------------------------- /src/register.js: -------------------------------------------------------------------------------- 1 | import buildApply from './callbacks/apply'; 2 | import check from './callbacks/check'; 3 | import initialize from './callbacks/initialize'; 4 | import { LFS_FILTER_NAME } from './constants'; 5 | 6 | /** 7 | * Wrapper to register nodegit LFS filter and append status to LFS in nodegit 8 | * @return Promise 9 | */ 10 | function register(credentialsCallback) { 11 | const filter = { 12 | apply: buildApply(credentialsCallback), 13 | check, 14 | initialize, 15 | attributes: 'filter=lfs merge=lfs diff=lfs', 16 | }; 17 | 18 | return this.NodeGit.FilterRegistry.register(LFS_FILTER_NAME, filter, 0); 19 | } 20 | 21 | export default register; 22 | -------------------------------------------------------------------------------- /src/unregister.js: -------------------------------------------------------------------------------- 1 | import { LFS_FILTER_NAME } from './constants'; 2 | 3 | /** 4 | * Wrapper to unregister nodegit LFS filter and append status to NodeGit module 5 | * @return Promise 6 | */ 7 | function unregister() { 8 | return this.NodeGit.FilterRegistry.unregister(LFS_FILTER_NAME); 9 | } 10 | 11 | export default unregister; 12 | -------------------------------------------------------------------------------- /src/utils/GitAskPass.js: -------------------------------------------------------------------------------- 1 | const net = require('net'); 2 | 3 | const { 4 | NODEGIT_LFS_ASKPASS_STATE, 5 | NODEGIT_LFS_ASKPASS_PORT 6 | } = process.env; 7 | 8 | const request = { 9 | credsRequestId: NODEGIT_LFS_ASKPASS_STATE, 10 | property: null 11 | }; 12 | 13 | if (process.argv[2] === 'Username') { 14 | request.property = 'username'; 15 | } else if (process.argv[2] === 'Password') { 16 | request.property = 'password'; 17 | } 18 | 19 | if (!request.property) { 20 | process.exit(1); 21 | } 22 | 23 | const client = net.createConnection( 24 | { 25 | port: NODEGIT_LFS_ASKPASS_PORT, 26 | host: 'localhost' 27 | }, 28 | () => { 29 | client.pipe(process.stdout); 30 | client.write(JSON.stringify(request)); 31 | } 32 | ); 33 | -------------------------------------------------------------------------------- /src/utils/authService.js: -------------------------------------------------------------------------------- 1 | import net from 'net'; 2 | import R from 'ramda'; 3 | import uuid from 'uuid'; 4 | 5 | let tcpServer = null; 6 | 7 | const usernameAndPasswordByRequestId = {}; 8 | 9 | export const createCredRequestId = () => uuid.v4(); 10 | 11 | export const getUsernameAndPassword = (credsRequestId) => 12 | usernameAndPasswordByRequestId[credsRequestId]; 13 | 14 | export const storeUsernameAndPassword = (credsRequestId, username, password) => { 15 | usernameAndPasswordByRequestId[credsRequestId] = { username, password }; 16 | }; 17 | 18 | export const clearUsernameAndPassword = (credsRequestId) => { 19 | delete usernameAndPasswordByRequestId[credsRequestId]; 20 | }; 21 | 22 | const socketListener = (socket) => { 23 | socket.on('data', (data) => { 24 | const input = data.toString(); 25 | const { credsRequestId, property } = JSON.parse(input) || {}; 26 | 27 | if (!credsRequestId || !property || !R.contains(property, ['username', 'password'])) { 28 | socket.end(); 29 | throw new Error('Malformed request'); 30 | } 31 | 32 | const credentials = getUsernameAndPassword(credsRequestId); 33 | if (!credentials) { 34 | socket.end(); 35 | throw new Error('No matching credentials for credsRequestId'); 36 | } 37 | 38 | socket.end(credentials[property]); 39 | }); 40 | }; 41 | 42 | export const ensureAuthServer = () => new Promise((resolve, reject) => { 43 | if (tcpServer) { 44 | resolve(); 45 | return; 46 | } 47 | 48 | tcpServer = net.createServer(socketListener); 49 | tcpServer.on('error', reject); 50 | tcpServer.listen({ port: 0, host: 'localhost' }, resolve); 51 | }); 52 | 53 | export const getAuthServerPort = () => ( 54 | tcpServer 55 | ? tcpServer.address().port 56 | : undefined 57 | ); 58 | 59 | let nodeBinaryPath = ''; 60 | 61 | export const setNodeBinaryPath = (binaryPath) => { 62 | nodeBinaryPath = binaryPath; 63 | }; 64 | 65 | export const getNodeBinaryPath = () => nodeBinaryPath; 66 | 67 | let gitAskPassPath = null; 68 | let gitAskPassClientPath = null; 69 | 70 | export const getGitAskPassPath = () => gitAskPassPath; 71 | 72 | export const setGitAskPassPath = (path) => { 73 | gitAskPassPath = path; 74 | }; 75 | 76 | export const getGitAskPassClientPath = () => gitAskPassClientPath; 77 | 78 | export const setGitAskPassClientPath = (path) => { 79 | gitAskPassClientPath = path; 80 | }; 81 | -------------------------------------------------------------------------------- /src/utils/checkDependencies.js: -------------------------------------------------------------------------------- 1 | import R from 'ramda'; 2 | import generateResponse from './generateResponse'; 3 | import { core } from '../commands/lfsCommands'; 4 | 5 | import { 6 | regex as versionRegexes, 7 | minimumVersions, 8 | BAD_VERSION 9 | } from '../constants'; 10 | 11 | /** 12 | * @function normalizeVersion 13 | * @param Array versionArray array of version number eg: ['1', '8', '3'] => 1.8.3 14 | * @return Number normalized version number 15 | */ 16 | const normalizeVersion = (versionArray) => { 17 | if (!versionArray || versionArray.length === 0) { 18 | return BAD_VERSION; 19 | } 20 | return R.join('.', versionArray); 21 | }; 22 | 23 | export const parseVersion = (input, regex) => { 24 | if (!input) { 25 | return BAD_VERSION; 26 | } 27 | 28 | const matches = input.match(regex); 29 | if (!matches || R.isEmpty(matches)) { 30 | return BAD_VERSION; 31 | } 32 | 33 | const numericVersionNumbers = R.filter((match) => !Number.isNaN(match), matches); 34 | if (numericVersionNumbers.length > 0) { 35 | return normalizeVersion(numericVersionNumbers); 36 | } 37 | return matches[1]; 38 | }; 39 | 40 | export const isAtleastGitVersion = (gitInput) => 41 | parseVersion(gitInput, versionRegexes.GIT) >= minimumVersions.GIT; 42 | 43 | export const isAtleastLfsVersion = (lfsInput) => 44 | parseVersion(lfsInput, versionRegexes.LFS) >= minimumVersions.LFS; 45 | 46 | const setLfsFailed = (response) => { 47 | response.success = false; 48 | response.errno = BAD_VERSION; 49 | response.lfs_meets_version = false; 50 | response.lfs_exists = false; 51 | response.lfs_raw = null; 52 | response.lfs_version = null; 53 | }; 54 | 55 | const setGitFailed = (response) => { 56 | response.success = false; 57 | response.errno = BAD_VERSION; 58 | response.git_meets_version = false; 59 | response.git_exists = false; 60 | response.git_raw = null; 61 | response.git_version = null; 62 | }; 63 | 64 | export const dependencyCheck = () => { 65 | const response = generateResponse(); 66 | return core.git('--version') 67 | .then(({ stdout, stderr }) => { 68 | if (stderr) { 69 | setGitFailed(response); 70 | } else { 71 | response.git_version = parseVersion(stdout, versionRegexes.GIT); 72 | response.git_meets_version = isAtleastGitVersion(stdout); 73 | response.git_exists = response.git_version !== BAD_VERSION; 74 | response.git_raw = stdout; 75 | } 76 | }) 77 | .catch(() => { 78 | setGitFailed(response); 79 | }) 80 | .then(() => core.git('lfs version')) 81 | .then(({ stdout, stderr }) => { 82 | if (stderr) { 83 | setLfsFailed(response); 84 | } else { 85 | response.lfs_version = parseVersion(stdout, versionRegexes.LFS); 86 | response.lfs_meets_version = isAtleastLfsVersion(stdout); 87 | response.lfs_exists = response.lfs_version !== BAD_VERSION; 88 | response.lfs_raw = stdout; 89 | } 90 | 91 | return response; 92 | }) 93 | .catch(() => { 94 | setLfsFailed(response); 95 | return response; 96 | }); 97 | }; 98 | -------------------------------------------------------------------------------- /src/utils/execHelper.js: -------------------------------------------------------------------------------- 1 | import child from 'child_process'; 2 | import R from 'ramda'; 3 | 4 | import { combineShellOptions } from './shellOptions'; 5 | 6 | const exec = (command, input, opts = {}) => new Promise( 7 | (resolve, reject) => { 8 | const options = combineShellOptions(opts); 9 | if (process.platform !== 'win32' && !R.contains('/usr/local/bin', options.env.PATH)) { 10 | options.env.PATH = `${options.env.PATH}${':/usr/local/bin'}`; 11 | } 12 | 13 | const proc = child.exec(command, options, (err, stdout, stderr) => { 14 | if (err) { 15 | reject(err); 16 | } else { 17 | resolve({ proc, stdout, stderr }); 18 | } 19 | }); 20 | 21 | if (input) { 22 | proc.stdin.write(input); 23 | proc.stdin.end(); 24 | } 25 | } 26 | ); 27 | 28 | export default exec; 29 | -------------------------------------------------------------------------------- /src/utils/generateResponse.js: -------------------------------------------------------------------------------- 1 | import { Error } from '../constants'; 2 | 3 | const generateResponse = () => ({ 4 | success: true, 5 | errno: Error.CODE.OK, 6 | raw: '', 7 | stderr: '', 8 | }); 9 | 10 | export default generateResponse; 11 | -------------------------------------------------------------------------------- /src/utils/shellOptions.js: -------------------------------------------------------------------------------- 1 | import R from 'ramda'; 2 | 3 | let defaultShellOptions = { env: R.clone(process.env) }; 4 | 5 | export const setDefaultShellOptions = (options) => { 6 | defaultShellOptions = R.mergeDeepRight( 7 | { env: R.clone(process.env) }, 8 | R.clone(options) 9 | ); 10 | }; 11 | 12 | export const combineShellOptions = (options, requiredOptions) => R.mergeDeepRight( 13 | R.mergeDeepRight(defaultShellOptions, options || {}), 14 | requiredOptions || {} 15 | ); 16 | -------------------------------------------------------------------------------- /src/utils/spawnHelper.js: -------------------------------------------------------------------------------- 1 | import { spawn as nodeSpawn } from 'child_process'; 2 | import R from 'ramda'; 3 | import { 4 | createCredRequestId, 5 | clearUsernameAndPassword, 6 | ensureAuthServer, 7 | getAuthServerPort, 8 | getNodeBinaryPath, 9 | storeUsernameAndPassword, 10 | getGitAskPassPath, 11 | getGitAskPassClientPath 12 | } from './authService'; 13 | import { 14 | regex 15 | } from '../constants'; 16 | import { combineShellOptions } from './shellOptions'; 17 | 18 | const parseUrlFromErrorMessage = (errorMessage) => { 19 | let url = null; 20 | const matches = regex.CREDENTIALS_NOT_FOUND.exec(errorMessage); 21 | if (matches && matches.length > 1) { 22 | ([url] = matches); 23 | } 24 | return url; 25 | }; 26 | 27 | const spawnCommand = (command, opts, stdin = '') => new Promise((resolve, reject) => { 28 | const [cmd, ...args] = command.trim().split(' '); 29 | const childProcess = nodeSpawn(cmd, args, R.mergeDeepRight(opts, { stdio: 'pipe' })); 30 | 31 | const stdoutData = []; 32 | const stderrData = []; 33 | 34 | const makeDataAccumulator = (accumulator) => (data) => { 35 | accumulator.push(data); 36 | }; 37 | 38 | childProcess.stdout.on('data', makeDataAccumulator(stdoutData)); 39 | childProcess.stderr.on('data', makeDataAccumulator(stderrData)); 40 | childProcess.on('error', () => { 41 | reject({ status: null, stdout: '', stderr: '' }); 42 | }); 43 | childProcess.on('close', (status) => { 44 | const stdout = Buffer.concat(stdoutData); 45 | const stderr = Buffer.concat(stderrData); 46 | resolve({ status, stdout, stderr }); 47 | }); 48 | 49 | childProcess.stdin.write(stdin); 50 | childProcess.stdin.end(); 51 | }); 52 | 53 | const spawn = async (command, stdin, opts = {}, credentialsCallback, repoPath = null) => { 54 | const resolvedStdin = stdin || ''; 55 | const resolvedRepoPath = repoPath || R.path('cwd', opts); 56 | const noAuthResult = await spawnCommand( 57 | command, 58 | combineShellOptions( 59 | opts, 60 | { 61 | env: { 62 | GIT_TERMINAL_PROMPT: 0 63 | } 64 | } 65 | ), 66 | resolvedStdin 67 | ); 68 | if (noAuthResult.status === 0) { 69 | // then we're done, return the data 70 | return { stdout: noAuthResult.stdout }; 71 | } 72 | 73 | const errorMessage = noAuthResult.stderr.toString(); 74 | if (!regex.CREDENTIALS_ERROR.test(errorMessage)) { 75 | throw new Error(errorMessage); 76 | } 77 | 78 | await ensureAuthServer(); 79 | const url = parseUrlFromErrorMessage(errorMessage); 80 | const credRequestId = createCredRequestId(resolvedRepoPath); 81 | const tryCredentialsUntilCanceled = async () => { 82 | const { username, password } = await credentialsCallback({ 83 | type: 'CREDS_REQUESTED', 84 | credRequestId, 85 | repoPath: resolvedRepoPath, 86 | url 87 | }); 88 | storeUsernameAndPassword(credRequestId, username, password); 89 | try { 90 | const authResult = await spawnCommand( 91 | command, 92 | combineShellOptions( 93 | opts, 94 | { 95 | env: { 96 | GIT_TERMINAL_PROMPT: 0, 97 | GIT_ASKPASS: getGitAskPassPath(), 98 | NODEGIT_LFS_ASKPASS_STATE: credRequestId, 99 | NODEGIT_LFS_ASKPASS_PORT: getAuthServerPort(), 100 | NODEGIT_LFS_ASKPASS_PATH: getGitAskPassClientPath(), 101 | NODEGIT_LFS_NODE_PATH: getNodeBinaryPath() 102 | } 103 | } 104 | ), 105 | resolvedStdin 106 | ); 107 | if (authResult.status === 0) { 108 | await credentialsCallback({ 109 | type: 'CREDS_SUCCEEDED', 110 | credRequestId, 111 | repoPath: resolvedRepoPath, 112 | verifiedCredentials: { username, password }, 113 | url 114 | }); 115 | clearUsernameAndPassword(credRequestId); 116 | return { stdout: authResult.stdout }; 117 | } 118 | 119 | const stderr = authResult.stderr.toString(); 120 | if (regex.CREDENTIALS_ERROR.test(stderr)) { 121 | const authError = new Error('Auth error'); 122 | authError.isAuthError = true; 123 | throw authError; 124 | } 125 | 126 | throw new Error(stderr); 127 | } catch (e) { 128 | if (e.isAuthError) { 129 | clearUsernameAndPassword(credRequestId); 130 | await credentialsCallback({ 131 | type: 'CREDS_FAILED', 132 | credRequestId, 133 | repoPath: resolvedRepoPath, 134 | url 135 | }); 136 | return tryCredentialsUntilCanceled(); 137 | } 138 | 139 | throw e; 140 | } 141 | }; 142 | 143 | try { 144 | return await tryCredentialsUntilCanceled(); 145 | } catch (e) { 146 | await credentialsCallback({ 147 | type: 'CREDS_SPAWN_FAILED', 148 | error: e, 149 | credRequestId, 150 | repoPath 151 | }); 152 | throw e; 153 | } 154 | }; 155 | 156 | export default spawn; 157 | -------------------------------------------------------------------------------- /test/home/.gitconfig: -------------------------------------------------------------------------------- 1 | [user] 2 | name = John Doe 3 | email = johndoe@example.com -------------------------------------------------------------------------------- /test/mock-creds: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env -S-P/usr/local/bin:/usr/bin:${PATH} node 2 | const { exec: nodeExec } = require('child_process'); 3 | 4 | const exec = (...args) => new Promise((resolve, reject) => { 5 | nodeExec(...args, (err, stdout) => { 6 | if (err) { 7 | reject(err); 8 | } 9 | 10 | resolve(stdout); 11 | }); 12 | }); 13 | 14 | const askpassProgram = process.env.GIT_ASKPASS; 15 | if (!askpassProgram) { 16 | process.stderr.write('Git credentials for \'github.smashmouth.com/All-Star.git\' not found'); 17 | process.exit(1); 18 | } 19 | 20 | (async () => { 21 | const username = await exec(`${askpassProgram} Username for 'github.smashmouth.com':`); 22 | const password = await exec(`${askpassProgram} Password for 'github.smashmouth.com':`); 23 | if (username !== 'foo' || password !== 'bar') { 24 | process.exit(1); 25 | } 26 | 27 | process.stdout.write('Great success!'); 28 | })(); 29 | 30 | -------------------------------------------------------------------------------- /test/runner.js: -------------------------------------------------------------------------------- 1 | const fse = require('fs-extra'); 2 | const path = require('path'); 3 | const NodeGit = require('nodegit'); 4 | const LFS = require('../build/src'); 5 | 6 | const exec = require('../build/src/utils/execHelper').default; 7 | const git = require('../build/src/commands/lfsCommands').core.git; 8 | 9 | const testLFSServer = require('./server/server'); 10 | 11 | const testReposPath = path.join(__dirname, 'repos'); 12 | const lfsRepoPath = path.join(testReposPath, 'lfs-test-repository'); 13 | const emptyRepoPath = path.join(testReposPath, 'empty'); 14 | 15 | before(function () { // eslint-disable-line prefer-arrow-callback 16 | this.timeout(30000); 17 | 18 | const testRepoUrl = 'https://github.com/jgrosso/nodegit-lfs-test-repo'; 19 | return testLFSServer.start() 20 | .then(() => fse.remove(testReposPath)) 21 | .then(() => fse.mkdir(testReposPath)) 22 | .then(() => fse.mkdir(lfsRepoPath)) 23 | .then(() => fse.mkdir(emptyRepoPath)) 24 | .then(() => git(`init ${emptyRepoPath}`)) 25 | .then(() => git(`clone ${testRepoUrl} ${lfsRepoPath}`, { 26 | env: { 27 | GIT_SSL_NO_VERIFY: 1 28 | } 29 | })) 30 | .then(() => fse.appendFile( 31 | path.join(lfsRepoPath, '.git', 'config'), 32 | `[http] 33 | sslverify = false` 34 | )) 35 | .catch((err) => { throw new Error(err); }); 36 | }); 37 | 38 | beforeEach(() => { 39 | return exec('git clean -xdf && git reset --hard', null, { cwd: lfsRepoPath }) 40 | .then(() => exec('git clean -xdff', null, { cwd: emptyRepoPath })); 41 | }); 42 | 43 | after(() => { 44 | testLFSServer.stop(); 45 | }); 46 | 47 | afterEach(() => { 48 | const NodeGitLFS = LFS(NodeGit); 49 | return NodeGitLFS.LFS.unregister() 50 | .catch((error) => { 51 | // -3 implies LFS filter was not registered 52 | if (error.errno !== -3) { throw error; } 53 | }); 54 | }); 55 | -------------------------------------------------------------------------------- /test/server/server.crt: -------------------------------------------------------------------------------- 1 | -----BEGIN CERTIFICATE----- 2 | MIIETzCCAzegAwIBAgIJANo7OLiG67Y5MA0GCSqGSIb3DQEBCwUAMF8xCzAJBgNV 3 | BAYTAlVTMRAwDgYDVQQIDAdBcml6b25hMRMwEQYDVQQHDApTY290dHNkYWxlMRUw 4 | EwYDVQQKDAxBeG9zb2Z0LCBMTEMxEjAQBgNVBAMMCTEyNy4wLjAuMTAgFw0xNzA4 5 | MTEyMjI1MThaGA8yMDg1MDgyOTIyMjUxOFowXzELMAkGA1UEBhMCVVMxEDAOBgNV 6 | BAgMB0FyaXpvbmExEzARBgNVBAcMClNjb3R0c2RhbGUxFTATBgNVBAoMDEF4b3Nv 7 | ZnQsIExMQzESMBAGA1UEAwwJMTI3LjAuMC4xMIIBIjANBgkqhkiG9w0BAQEFAAOC 8 | AQ8AMIIBCgKCAQEA2zRDbgu4qKQcbZuYeqOMTCJUhtd/r9LoYsyxFngNnfy0fHXj 9 | tG68q0KNFLAdYJk2LxXC/oevNpr4zoI5Mo1NOfPQUc+s+1V9vS6roz35Ism2Exg9 10 | 87XZ8LlFumUvtw4hYbO8lkbaTQzX+i12U/oHwD/vH9EJgM3MfnjqHUQ+8BlGiUGD 11 | Qvy5Ek6W+cUQ+8VCHsdyOKyJPmJqdIg+jVzP9sUXYMPBdQaaJl9Nz8LYqlLGboV3 12 | sSz6PNO5nyr0BOimHJD3rh8qTurHWPB+2IKFzsnU/p2mpPUnp5eexwU1IDjkPVcI 13 | 4xn0ljPWawY+Z/Anwv4aqym2RHPcyEotgF+T4QIDAQABo4IBCjCCAQYwHQYDVR0O 14 | BBYEFH4yAfmhI4zuYVNYqX+rHmFPYsetMIGRBgNVHSMEgYkwgYaAFH4yAfmhI4zu 15 | YVNYqX+rHmFPYsetoWOkYTBfMQswCQYDVQQGEwJVUzEQMA4GA1UECAwHQXJpem9u 16 | YTETMBEGA1UEBwwKU2NvdHRzZGFsZTEVMBMGA1UECgwMQXhvc29mdCwgTExDMRIw 17 | EAYDVQQDDAkxMjcuMC4wLjGCCQDaOzi4huu2OTAMBgNVHRMEBTADAQH/MAsGA1Ud 18 | DwQEAwIC/DAaBgNVHREEEzARgglsb2NhbGhvc3SHBH8AAAEwGgYDVR0SBBMwEYIJ 19 | bG9jYWxob3N0hwR/AAABMA0GCSqGSIb3DQEBCwUAA4IBAQDHajjppRQdqB4/3+hL 20 | rv54Asfb0T6JCxxppYRn4/TBDng255/rhL9tjK1DpSAeL/TylOy+uCybnPsMlaws 21 | SaVJf9OnuqByxAQkCbb6Ktn5avSMQFy4IJEEaE1DkxcWKYP5Fx8lokoCjWe/aGqX 22 | 7BVPop5NPuu+YIFkLViLuwOu51nL6fMYcrF3cHAAptc+abAUX9htzzdwEKg5wOV/ 23 | sb66owiSbzewt/11+eAgwBZidg0n89l3X8wP1d/cOxeIB4bwM31//55sie01T4bl 24 | QpA2GouflJLFecpwbpfb5r+NN8NPT8DIgkx+BFfaVW1NkqLfUlIdXRC6l4p7bYw4 25 | XujT 26 | -----END CERTIFICATE----- 27 | -------------------------------------------------------------------------------- /test/server/server.js: -------------------------------------------------------------------------------- 1 | const fse = require('fs-extra'); 2 | const { spawn } = require('child_process'); 3 | 4 | let serverPid = null; 5 | 6 | const getWin32BashCommand = () => { 7 | let shPath = 'C:\\Program Files\\Git\\bin\\sh.exe'; 8 | if (fse.pathExistsSync(shPath)) { 9 | return `"${shPath}" `; 10 | } 11 | 12 | shPath = 'C:\\Program Files (x86)\\Git\\bin\\sh.exe'; 13 | if (!fse.pathExistsSync(shPath)) { 14 | throw new Error('Cannot find git-bash. Please install it in the Program Files directory'); 15 | } 16 | 17 | return `"${shPath}" `; 18 | }; 19 | 20 | module.exports = { 21 | start() { 22 | if (serverPid) { 23 | throw new Error('LFS test server has already been started!'); 24 | } 25 | 26 | return new Promise((resolve, reject) => { 27 | const cmdRunner = process.platform === 'win32' 28 | ? getWin32BashCommand() 29 | : './'; 30 | const server = spawn(`${cmdRunner}start.sh`, { 31 | cwd: __dirname, 32 | shell: true 33 | }); 34 | server.stdout.on('data', (data) => { 35 | // Store outputted server PID 36 | const pid = data.toString().match(/pid=(\d+)/); 37 | if (pid) { 38 | serverPid = parseInt(pid[1], 10); 39 | return resolve(); 40 | } 41 | 42 | // Handle Go errors 43 | const err = data.toString().match(/ err=(.*)/); 44 | if (err) { 45 | throw new Error(err[1]); 46 | } 47 | }); 48 | server.stderr.on('data', (err) => { 49 | throw new Error(err.toString()); 50 | }); 51 | }); 52 | }, 53 | 54 | stop() { 55 | if (!serverPid) { 56 | throw new Error("LFS test server hasn't been started!"); 57 | } 58 | 59 | process.kill(serverPid, 'SIGKILL'); 60 | } 61 | }; 62 | -------------------------------------------------------------------------------- /test/server/server.key: -------------------------------------------------------------------------------- 1 | -----BEGIN RSA PRIVATE KEY----- 2 | MIIEpAIBAAKCAQEA2zRDbgu4qKQcbZuYeqOMTCJUhtd/r9LoYsyxFngNnfy0fHXj 3 | tG68q0KNFLAdYJk2LxXC/oevNpr4zoI5Mo1NOfPQUc+s+1V9vS6roz35Ism2Exg9 4 | 87XZ8LlFumUvtw4hYbO8lkbaTQzX+i12U/oHwD/vH9EJgM3MfnjqHUQ+8BlGiUGD 5 | Qvy5Ek6W+cUQ+8VCHsdyOKyJPmJqdIg+jVzP9sUXYMPBdQaaJl9Nz8LYqlLGboV3 6 | sSz6PNO5nyr0BOimHJD3rh8qTurHWPB+2IKFzsnU/p2mpPUnp5eexwU1IDjkPVcI 7 | 4xn0ljPWawY+Z/Anwv4aqym2RHPcyEotgF+T4QIDAQABAoIBAQDKa5XE/k/0cy/J 8 | S8Dfpz18ZMtXhZLk40czTUBOtmgkt2JX+Glgik3xfFBZhVRrirANhPP/JX74Vt1h 9 | ncGB3BgvZuW9VqCvqz+kvn9DDIyOwmQ/d8hl0E1ETWKFgMEyMntDBL4mq6EVO8IJ 10 | xYeafq4/qwp/gAJdji3DsJbIlefkwExVgsxktnMAO9tmZSrRqxaLzSqPit96aF4/ 11 | qcFPgN5uu7SuyFmuBZSoA8iDZ9AvWI1cj07XVs7iOGIP4oTwFt+CxcE3KpgaR6px 12 | xaos5H1Jmy9PNMcIFy8Cwknq68l3+quK4EfL6CvYXCUDVS4ik2/KiVxSaAWDM88I 13 | tvLvvi4NAoGBAPmkOPUWQaYkA4T84RX3hYcYtc8RjLdJKLO2JNGjImmb5pEwnjwb 14 | zsgoLGIkBsxmI16lIZ+WFNXCzildg6JnmDUnDKtT2Sj17kZZmqk6HQ3hpLpuDNlL 15 | ZyfhUenq3WiodWkAXRtjnOkWuFlR8gJU6k4H4thGL8Ew9Wl8mJG48fJzAoGBAODJ 16 | k0+M7ov2jzW7HnGKMMN+J01fUavoPWDoJ/jKMFL1H2QAtMQoyoD3sgcmC7xfSdPk 17 | 7vYSvLZxm2Mr0dgyMDbHsyAQZnP6nlkItWv1IIWND5xxfbpMUV+MQ5SC/sLWQSRf 18 | CrW3FrZCewE+WF7qDEQXkAf9olk7EKDM9fSQvcdbAoGBAMRQKV5Kp0uQjJf7CMJu 19 | iLnJa8evhFnqFTDlrqSQxhIIwFMyyWx5ba4nMWfn7tS0CWJ3Ny65Jh9Hdzo+SLLh 20 | BmfeEm90m5RDeaIm+qHTil+NLy5QNph2XGOXJARwZrbTqqjs2UsdlZoS/Nt4vnio 21 | 8Cx2tSPgZdNlvce7bHtD1jgTAoGAGE7UBkM4PJxyCh5tAVwvP8SxnVVUcKGRmZwh 22 | Tm8W4LGvLlJBOgErPN0MPGep1zn6stZL3QJTs+r/Ia9IZveubQ4VWb9MRPhyDTC6 23 | 5azBUo/Zg1JmhTHhjtKNME8mG42tx7AbaWEeLKFtycmjJshqgSaHJcYQu1N+zdS8 24 | ODE6xIcCgYB9wn9DP4UwOg4SS5N0YbMTNgZFB8bZ+r71cPu6IHwYawdZA201XD8Y 25 | ik0qd1aUY7aT60d3J1+AJDHuuthAWVl8DRQSZYViGKDC4aJJn7h2NWL25KzvKqmq 26 | 4VwoRi1FWD+TZz2Kyr+nSbrbIRX593vNBqXMzo0pbHP54MB4AnADxg== 27 | -----END RSA PRIVATE KEY----- 28 | -------------------------------------------------------------------------------- /test/server/start.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | set -eu 3 | set -o pipefail 4 | 5 | # Download LFS test server 6 | GOPATH=`pwd`/lfs-test-server go get github.com/github/lfs-test-server 7 | 8 | # Configure LFS test server 9 | LFS_LISTEN=tcp://:9001 10 | LFS_HOST=127.0.0.1:9001 11 | LFS_ADMINUSER=admin 12 | LFS_ADMINPASS=admin 13 | LFS_CERT=`pwd`/server.crt 14 | LFS_KEY=`pwd`/server.key 15 | LFS_SCHEME=https 16 | export LFS_LISTEN LFS_HOST LFS_ADMINUSER LFS_ADMINPASS LFS_CERT LFS_KEY LFS_SCHEME 17 | 18 | # Run LFS test server 19 | cd lfs-test-server # `cd` into the `lfs-test-server` directory so that server-created files don't pollute `test/server` 20 | ./bin/lfs-test-server& 21 | echo "pid=$!" # Echo the server's PID so that it can be manually killed 22 | -------------------------------------------------------------------------------- /test/tests/callbacks/apply.spec.js: -------------------------------------------------------------------------------- 1 | import path from 'path'; 2 | import NodeGit from 'nodegit'; // eslint-disable-line import/no-unresolved 3 | import { todo } from '../../utils'; 4 | import LFS from '../../../build/src'; 5 | import exec from '../../../build/src/utils/execHelper'; 6 | import track from '../../../build/src/commands/track'; 7 | 8 | const commitFile = (repo, fileName, commitMessage) => { 9 | let index; 10 | let treeOid; 11 | let parent; 12 | 13 | return repo.refreshIndex() 14 | .then((indexResult) => { 15 | index = indexResult; 16 | }) 17 | .then(() => index.addByPath(fileName)) 18 | .then(() => index.write()) 19 | .then(() => index.writeTree()) 20 | .then((oidResult) => { 21 | treeOid = oidResult; 22 | return NodeGit.Reference.nameToId(repo, 'HEAD'); 23 | }) 24 | .then((head) => repo.getCommit(head)) 25 | .then((parentResult) => { 26 | parent = parentResult; 27 | return NodeGit.Signature.default(repo); 28 | }) 29 | .then((signatures) => 30 | repo.createCommit( 31 | 'HEAD', 32 | signatures, 33 | signatures, 34 | commitMessage, 35 | treeOid, 36 | [parent] 37 | )); 38 | }; 39 | 40 | describe('Apply', () => { 41 | it('Clean', () => { 42 | const workdirPath = path.resolve(__dirname, '..', '..', 'repos', 'lfs-test-repository'); 43 | const NodeGitLFS = LFS(NodeGit); 44 | let repository; 45 | 46 | return NodeGitLFS.Repository.open(workdirPath) 47 | .then((repo) => { 48 | repository = repo; 49 | return track(repo, ['*.md']); 50 | }) 51 | .then(() => NodeGitLFS.LFS.register()) 52 | .then(() => exec('base64 /dev/urandom | head -c 20 > big_file_test.md', null, { cwd: workdirPath })) 53 | .then(() => commitFile(repository, 'big_file_test.md', 'LFS Clean Test')) 54 | .then(() => todo()); 55 | }); 56 | 57 | it('Smudge', () => { 58 | const workdirPath = path.resolve(__dirname, '..', '..', 'repos', 'lfs-test-repository'); 59 | const NodeGitLFS = LFS(NodeGit); 60 | let repository; 61 | 62 | return NodeGitLFS.Repository.open(workdirPath) 63 | .then((repo) => { 64 | repository = repo; 65 | return repo; 66 | }) 67 | .then(() => NodeGitLFS.LFS.register()) 68 | .then(() => exec('base64 /dev/urandom | head -c 20 > big_file_test.txt', null, { cwd: workdirPath })) 69 | .then(() => { 70 | const opts = { 71 | checkoutStrategy: NodeGit.Checkout.STRATEGY.FORCE, 72 | }; 73 | return NodeGit.Checkout.head(repository, opts); 74 | }) 75 | .then(() => todo()); 76 | }); 77 | }); 78 | -------------------------------------------------------------------------------- /test/tests/commands/clone.spec.js: -------------------------------------------------------------------------------- 1 | import path from 'path'; 2 | import NodeGit from 'nodegit'; // eslint-disable-line import/no-unresolved 3 | import { todo } from '../../utils'; 4 | import LFS from '../../../build/src'; 5 | 6 | describe('Clone', () => { 7 | it('should generate clone repsonse', () => { 8 | const emptyRepoPath = path.resolve(__dirname, '..', '..', 'repos', 'empty'); 9 | const NodeGitLFS = LFS(NodeGit); 10 | const url = 'https://github.com/mohseenrm/nodegit-lfs-test-repo'; 11 | 12 | return NodeGitLFS.LFS.clone(url, emptyRepoPath, { branch: 'test' }) 13 | .then(() => todo()); 14 | }); 15 | }); 16 | -------------------------------------------------------------------------------- /test/tests/commands/ls.spec.js: -------------------------------------------------------------------------------- 1 | import NodeGit from 'nodegit'; // eslint-disable-line import/no-unresolved 2 | import path from 'path'; 3 | import { todo } from '../../utils'; 4 | import LFS from '../../../build/src'; 5 | import ls from '../../../build/src/commands/ls'; 6 | 7 | describe('ls-files', () => { 8 | it('does generate ls response', () => { 9 | const workdirPath = path.resolve(__dirname, '..', '..', 'repos', 'lfs-test-repository'); 10 | const NodeGitLFS = LFS(NodeGit); 11 | 12 | return NodeGitLFS.Repository.open(workdirPath) 13 | .then((repo) => ls(repo, { long: true })) 14 | .then(() => todo()); 15 | }); 16 | }); 17 | -------------------------------------------------------------------------------- /test/tests/commands/pointer.spec.js: -------------------------------------------------------------------------------- 1 | import NodeGit from 'nodegit'; // eslint-disable-line import/no-unresolved 2 | import path from 'path'; 3 | import { todo } from '../../utils'; 4 | import LFS from '../../../build/src'; 5 | import pointer from '../../../build/src/commands/pointer'; 6 | 7 | describe('Pointer', () => { 8 | it('does generate pointer response', () => { 9 | const workdirPath = path.resolve(__dirname, '..', '..', 'repos', 'lfs-test-repository'); 10 | const NodeGitLFS = LFS(NodeGit); 11 | 12 | const packageJson = path.join(workdirPath, 'package.json'); 13 | 14 | return NodeGitLFS.Repository.open(workdirPath) 15 | .then((repo) => pointer(repo, packageJson)) 16 | .then(() => todo()); 17 | }); 18 | }); 19 | -------------------------------------------------------------------------------- /test/tests/commands/prune.spec.js: -------------------------------------------------------------------------------- 1 | import NodeGit from 'nodegit'; // eslint-disable-line import/no-unresolved 2 | import path from 'path'; 3 | import { todo } from '../../utils'; 4 | import LFS from '../../../build/src'; 5 | import prune from '../../../build/src/commands/prune'; 6 | 7 | describe('Prune', () => { 8 | it('does generate prune response', () => { 9 | const workdirPath = path.resolve(__dirname, '..', '..', 'repos', 'lfs-test-repository'); 10 | const NodeGitLFS = LFS(NodeGit); 11 | 12 | return NodeGitLFS.Repository.open(workdirPath) 13 | .then((repo) => prune(repo)) 14 | .then(() => todo()); 15 | }); 16 | }); 17 | -------------------------------------------------------------------------------- /test/tests/commands/push.spec.js: -------------------------------------------------------------------------------- 1 | import path from 'path'; 2 | import NodeGit from 'nodegit'; // eslint-disable-line import/no-unresolved 3 | import { todo } from '../../utils'; 4 | import LFS from '../../../build/src'; 5 | import exec from '../../../build/src/utils/execHelper'; 6 | 7 | describe('Push', () => { 8 | it('should generate push response', () => { 9 | const workdirPath = path.resolve(__dirname, '..', '..', 'repos', 'lfs-test-repository'); 10 | const NodeGitLFS = LFS(NodeGit); 11 | 12 | return exec('base64 /dev/urandom | head -c 20 > test_file.txt', null, { cwd: workdirPath }) 13 | .then(() => exec('git add test_file.txt', null, { cwd: workdirPath })) 14 | .then(() => exec('git commit -m "LFS: push unit test"', null, { cwd: workdirPath })) 15 | .then(() => NodeGitLFS.Repository.open(workdirPath)) 16 | .then((repo) => NodeGitLFS.LFS.push(repo, 'origin', 'test')) 17 | .then(() => todo()); 18 | }); 19 | }).timeout(5000); 20 | -------------------------------------------------------------------------------- /test/tests/commands/track.spec.js: -------------------------------------------------------------------------------- 1 | import NodeGit from 'nodegit'; // eslint-disable-line import/no-unresolved 2 | import path from 'path'; 3 | import { todo } from '../../utils'; 4 | import LFS from '../../../build/src'; 5 | import track from '../../../build/src/commands/track'; 6 | 7 | describe('Track', () => { 8 | it('does generate track response', () => { 9 | const workdirPath = path.resolve(__dirname, '..', '..', 'repos', 'lfs-test-repository'); 10 | const NodeGitLFS = LFS(NodeGit); 11 | 12 | return NodeGitLFS.Repository.open(workdirPath) 13 | .then((repo) => track(repo, ['*.png', '*.dmg'])) 14 | .then(() => todo()); 15 | }); 16 | }); 17 | -------------------------------------------------------------------------------- /test/tests/commands/untrack.spec.js: -------------------------------------------------------------------------------- 1 | import NodeGit from 'nodegit'; // eslint-disable-line import/no-unresolved 2 | import path from 'path'; 3 | import { todo } from '../../utils'; 4 | import LFS from '../../../build/src'; 5 | import track from '../../../build/src/commands/track'; 6 | import untrack from '../../../build/src/commands/untrack'; 7 | 8 | describe('Untrack', () => { 9 | it('does generate untrack response', () => { 10 | const workdirPath = path.resolve(__dirname, '..', '..', 'repos', 'lfs-test-repository'); 11 | const NodeGitLFS = LFS(NodeGit); 12 | let repository; 13 | return NodeGitLFS.Repository.open(workdirPath) 14 | .then((repo) => { 15 | repository = repo; 16 | return track(repo, ['*.png', '*.dmg', '*.txt', '*.a']); 17 | }) 18 | .then(() => untrack(repository, ['*.dmg', '*.a'])) 19 | .then(() => todo()); 20 | }); 21 | }); 22 | -------------------------------------------------------------------------------- /test/tests/commands/version.spec.js: -------------------------------------------------------------------------------- 1 | import NodeGit from 'nodegit'; // eslint-disable-line import/no-unresolved 2 | import path from 'path'; 3 | import { todo } from '../../utils'; 4 | import LFS from '../../../build/src'; 5 | import version from '../../../build/src/commands/version'; 6 | 7 | describe('Version', () => { 8 | it('does provide version number', () => { 9 | const workdirPath = path.resolve(__dirname, '..', '..', 'repos', 'lfs-test-repository'); 10 | const NodeGitLFS = LFS(NodeGit); 11 | 12 | return NodeGitLFS.Repository.open(workdirPath) 13 | .then((repo) => version(repo)) 14 | .then(() => todo()); 15 | }); 16 | }); 17 | -------------------------------------------------------------------------------- /test/tests/index.spec.js: -------------------------------------------------------------------------------- 1 | import { expect } from 'chai'; 2 | import NodeGit from 'nodegit'; // eslint-disable-line import/no-unresolved 3 | import LFS from '../../build/src'; 4 | 5 | describe('LFS', () => { 6 | it('LFS exists', () => { 7 | const NodeGitLFS = LFS(NodeGit); 8 | expect(NodeGitLFS).to.have.property('LFS'); 9 | }); 10 | 11 | it('initialize exists', () => { 12 | const NodeGitLFS = LFS(NodeGit); 13 | expect(NodeGitLFS.LFS).to.have.property('initialize'); 14 | }); 15 | 16 | it('register exists', () => { 17 | const NodeGitLFS = LFS(NodeGit); 18 | expect(NodeGitLFS.LFS).to.have.property('register'); 19 | expect(NodeGitLFS.LFS.register).to.be.a('function'); 20 | }); 21 | 22 | it('unregister exists', () => { 23 | const NodeGitLFS = LFS(NodeGit); 24 | expect(NodeGitLFS.LFS).to.have.property('unregister'); 25 | expect(NodeGitLFS.LFS.unregister).to.be.a('function'); 26 | }); 27 | }); 28 | -------------------------------------------------------------------------------- /test/tests/initialize.spec.js: -------------------------------------------------------------------------------- 1 | import path from 'path'; 2 | import fs from 'fs'; 3 | import NodeGit from 'nodegit'; // eslint-disable-line import/no-unresolved 4 | import { expect } from 'chai'; 5 | import LFS from '../../build/src'; 6 | 7 | describe('Initialize', () => { 8 | it('initialize is a promise', () => { 9 | const NodeGitLFS = LFS(NodeGit); 10 | const workdirPath = path.resolve(__dirname, '..', 'repos', 'lfs-test-repository'); 11 | 12 | return NodeGitLFS.Repository.open(workdirPath) 13 | .then((repo) => { 14 | const init = NodeGitLFS.LFS.initialize(repo); 15 | expect(init).to.be.a('promise'); 16 | }); 17 | }); 18 | 19 | it('creates .gitattributes for empty repo', () => { 20 | const NodeGitLFS = LFS(NodeGit); 21 | const emptydirPath = path.resolve(__dirname, '..', 'repos', 'empty'); 22 | expect(fs.existsSync(path.join(emptydirPath, '.gitattributes'))).to.be.false; 23 | 24 | return NodeGitLFS.Repository.open(emptydirPath) 25 | .then((repo) => NodeGitLFS.LFS.initialize(repo)) 26 | .then(() => { 27 | expect(fs.existsSync(path.join(emptydirPath, '.gitattributes'))).to.be.true; 28 | }); 29 | }); 30 | }); 31 | -------------------------------------------------------------------------------- /test/tests/register.spec.js: -------------------------------------------------------------------------------- 1 | import { expect } from 'chai'; 2 | import NodeGit from 'nodegit'; // eslint-disable-line import/no-unresolved 3 | import LFS from '../../build/src'; 4 | 5 | describe('Register:', () => { 6 | it('has register callback', () => { 7 | const NodeGitLFS = LFS(NodeGit); 8 | return NodeGitLFS.LFS.register() 9 | .then((result) => { 10 | expect(result).to.be.a('number'); 11 | expect(result).to.equal(0); 12 | }); 13 | }); 14 | 15 | it('cannot re-register LFS filter twice', () => { 16 | const NodeGitLFS = LFS(NodeGit); 17 | return NodeGitLFS.LFS.register() 18 | .then((result) => { 19 | expect(result).to.be.a('number'); 20 | expect(result).to.equal(0); 21 | }) 22 | .then(() => NodeGitLFS.LFS.register()) 23 | .then(() => expect.fail('Failed to re-register')) 24 | .catch((err) => { 25 | expect(err.errno).to.be.a('number'); 26 | expect(err.errno).to.equal(-4); 27 | }); 28 | }); 29 | }); 30 | -------------------------------------------------------------------------------- /test/tests/utils/checkDependencies.spec.js: -------------------------------------------------------------------------------- 1 | import { expect } from 'chai'; 2 | import { todo } from '../../utils'; 3 | import * as checker from '../../../build/src/utils/checkDependencies'; 4 | import { regex, BAD_VERSION } from '../../../build/src/constants'; 5 | import { core } from '../../../build/src/commands/lfsCommands'; 6 | 7 | describe('Depenendency Helpers', () => { 8 | describe('parseVersion', () => { 9 | it('parses the LFS version', () => { 10 | const versionText = 'git-lfs/2.1.0 (GitHub; windows 386; go 1.8.1; git bd2c9987)'; 11 | const version = checker.parseVersion(versionText, regex.LFS); 12 | expect(version).to.equal('2.1.0'); 13 | }); 14 | 15 | it('parses the Git version', () => { 16 | const versionText = 'git version 2.13.0.windows.1'; 17 | const version = checker.parseVersion(versionText, regex.GIT); 18 | expect(version).to.equal('2.13.0'); 19 | }); 20 | 21 | it('returns null when the regex matches nothing', () => { 22 | const versionText = 'dsadsadas'; 23 | const version = checker.parseVersion(versionText, regex.GIT); 24 | expect(version).to.equal(BAD_VERSION); 25 | }); 26 | 27 | it('returns null when the regex does not match numbers', () => { 28 | const versionText = 'dsadsadas'; 29 | const version = checker.parseVersion(versionText, /w+/); 30 | expect(version).to.equal(BAD_VERSION); 31 | }); 32 | }); 33 | 34 | describe('isAtleastGitVersion', () => { 35 | it('is at least version 2.0.0', () => { 36 | expect(checker.isAtleastGitVersion('git version 2.13.0.windows.1')).to.be.true; 37 | }); 38 | 39 | it('is not at least version 2.0.0', () => { 40 | expect(checker.isAtleastGitVersion('git version 1.13.0.windows.1')).to.be.false; 41 | }); 42 | }); 43 | 44 | describe('isAtleastLfsVersion', () => { 45 | it('is at least version 2.0.0', () => { 46 | expect(checker.isAtleastLfsVersion('git-lfs/2.1.1')).to.be.true; 47 | }); 48 | 49 | it('is not at least version 2.0.0', () => { 50 | expect(checker.isAtleastLfsVersion('git-lfs/1.1.1')).to.be.false; 51 | }); 52 | }); 53 | 54 | describe('checkDependencies', () => { 55 | it('check git version number is at least the minimum version', () => 56 | core.git('--version') 57 | .then(({ stdout }) => checker.isAtleastGitVersion(stdout)) 58 | .then((result) => expect(result).to.equal(true)) 59 | .catch(() => expect.fail('sould not have done this'))); 60 | 61 | it('check LFS version number is at least the minimum version', () => 62 | core.git('lfs version') 63 | .then(({ stdout }) => checker.isAtleastLfsVersion(stdout)) 64 | .then((result) => expect(result).to.equal(true)) 65 | .catch(() => expect.fail('sould not have done this'))); 66 | }); 67 | 68 | describe('Dependency Check', () => { 69 | it('generate valid response', () => 70 | checker.dependencyCheck() 71 | .then(() => todo())); 72 | }); 73 | }); 74 | -------------------------------------------------------------------------------- /test/tests/utils/execHelper.spec.js: -------------------------------------------------------------------------------- 1 | import { expect } from 'chai'; 2 | import exec from '../../../build/src/utils/execHelper'; 3 | 4 | describe('exec', () => { 5 | it('returns a promise', () => { 6 | const result = exec('woo', null, {}).catch(() => {}); 7 | expect(result).to.be.a('Promise'); 8 | }); 9 | }); 10 | -------------------------------------------------------------------------------- /test/tests/utils/generateResponse.spec.js: -------------------------------------------------------------------------------- 1 | import { todo } from '../../utils'; 2 | 3 | describe('generateResponse', () => { 4 | it('does generate response', () => { 5 | todo(); 6 | }); 7 | }); 8 | -------------------------------------------------------------------------------- /test/tests/utils/spawnHelper.spec.js: -------------------------------------------------------------------------------- 1 | import { 2 | expect 3 | } from 'chai'; 4 | import path from 'path'; 5 | import R from 'ramda'; 6 | 7 | import { todo } from '../../utils'; 8 | import spawn from '../../../build/src/utils/spawnHelper'; 9 | 10 | describe('spawn', () => { 11 | it('mimics child_process.spawn when no process credentials needed', () => 12 | spawn('ls') 13 | .then(() => todo())); 14 | 15 | it('can take a callback', () => { 16 | const callback = (message) => R.cond([ 17 | [R.propEq('type', 'CREDS_REQUESTED'), () => ({ username: 'foo', password: 'bar' })], 18 | [R.propEq('type', 'CREDS_SUCCEEDED'), () => {}], 19 | [R.propEq('type', 'CREDS_FAILED'), () => {}] 20 | ])(message); 21 | 22 | return spawn('./mock-creds', null, { cwd: path.resolve(__dirname, '..', '..') }, callback) 23 | .then((output) => { 24 | expect(output.stdout).eq(Buffer.from('Great success!')); 25 | }); 26 | }); 27 | }); 28 | -------------------------------------------------------------------------------- /test/utils.js: -------------------------------------------------------------------------------- 1 | import { expect } from 'chai'; 2 | 3 | export const fail = (msg) => { 4 | expect.fail(true, true, msg); 5 | }; 6 | 7 | export const todo = () => { 8 | fail('TODO'); 9 | }; 10 | --------------------------------------------------------------------------------