├── .envrc ├── .gitattributes ├── .gitignore ├── .hindent.yaml ├── .travis.yml ├── LICENSE ├── PUBLISHING.md ├── README.md ├── Setup.hs ├── app └── Main.hs ├── nix ├── hindent-imposter.nix ├── inputs.nix └── pkgs.nix ├── package.yaml ├── package └── npm │ ├── .eslintrc.json │ ├── .gitignore │ ├── README.md │ ├── bin │ └── jetpack │ ├── index.js │ ├── package-lock.json │ └── package.json ├── resources └── hmr.js ├── script ├── format.sh └── install-stack.sh ├── shell.nix ├── src ├── Alternative │ └── IO.hs ├── Builder.hs ├── Cleaner.hs ├── CliArguments.hs ├── Compile.hs ├── ConcatModule.hs ├── Config.hs ├── Dependencies.hs ├── DependencyTree.hs ├── EntryPoints.hs ├── Error.hs ├── HotReload.hs ├── HotReload │ └── Server.hs ├── Init.hs ├── Logger.hs ├── Message.hs ├── Notify.hs ├── Parser │ ├── Ast.hs │ ├── Comment.hs │ ├── JetpackVersion.hs │ ├── PackageJson.hs │ └── Require.hs ├── Resolver.hs ├── Safe │ └── IO.hs ├── ToolPaths.hs ├── Utils │ ├── Files.hs │ ├── Parser.hs │ ├── Text.hs │ └── Tree.hs ├── Version.hs └── Watcher.hs ├── stack.yaml └── test ├── ConcatModuleSpec.hs ├── ConfigSpec.hs ├── DependenciesSpec.hs ├── Helper └── Property.hs ├── Parser ├── CommentSpec.hs └── RequireSpec.hs ├── Spec.hs └── fixtures ├── basics ├── .gitignore ├── modules │ ├── test.js │ └── test_no_parse.js ├── node_modules │ ├── clipboard │ │ └── index.js │ └── lodash │ │ ├── index.js │ │ ├── lodash.dist.js │ │ ├── lodash.js │ │ └── node_modules │ │ └── debug.js ├── sources │ ├── index.coffee │ └── no_parse_index.js └── tmp │ ├── .gitkeep │ └── deps.json ├── concat ├── js │ └── .gitkeep └── tmp │ ├── test___fixtures___concat___modules___Page___Foo.js.js │ └── test___fixtures___concat___sources___Page___Moo.js.js ├── failing ├── .gitignore ├── modules │ └── test.js ├── node_modules │ └── lodash │ │ └── lodash.dist.js ├── sources │ └── index.kaffe └── tmp │ ├── .gitkeep │ └── deps.json └── jetpack.json /.envrc: -------------------------------------------------------------------------------- 1 | ssh-add -K &> /dev/null || true 2 | 3 | my_ssh_auth_sock=$SSH_AUTH_SOCK 4 | 5 | 6 | # Usage: use nix_shell 7 | # 8 | # Works like use_nix, except that it's only rebuilt if the shell.nix or shell.nix file changes. 9 | # This avoids scenarios where the nix-channel is being updated and all the projects now need to be re-built. 10 | # 11 | # To force the reload the derivation, run `touch shell.nix` 12 | use_nix_shell() { 13 | local nixcache=".direnv.cache" 14 | local tempfile= 15 | 16 | if [[ ! -e "$nixcache" ]] || [[ ".envrc" -nt "$nixcache" ]] || [[ "shell.nix" -nt "$nixcache" ]] || [[ "shell.nix" -nt "$nixcache" ]]; then 17 | tempfile="$(mktemp "$nixcache.XXXXXX")" 18 | nix-shell --show-trace --run 'direnv dump' > "$tempfile" && mv "$tempfile" "$nixcache" 19 | rm -f "$tempfile" 20 | tempfile= 21 | fi 22 | direnv_load cat "$nixcache" 23 | for f in $(git ls-files '*.nix'); do 24 | watch_file $f 25 | done 26 | } 27 | 28 | # Configure a custom nix Cache 29 | export NIX_CONF_DIR="$(pwd)" 30 | 31 | use_nix_shell 32 | 33 | export SSH_AUTH_SOCK=$my_ssh_auth_sock 34 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | binaries/*.tgz filter=lfs diff=lfs merge=lfs -text 2 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.hi 2 | *.o 3 | *.swp 4 | *.tag 5 | *~ 6 | *_flymake.hs 7 | .hsenv 8 | .stack-work/ 9 | /.cabal-sandbox/ 10 | TAGS 11 | cabal-dev/ 12 | cabal.sandbox.config 13 | dist/ 14 | tags 15 | /_release/ 16 | .vagrant/ 17 | *.imports 18 | /.idea/ 19 | /*.iml 20 | /src/highlight.js 21 | /src/style.css 22 | /_site/ 23 | /.dir-locals.el 24 | /.git/ 25 | elm-stuff 26 | dist 27 | cabal-dev 28 | .cabal-sandbox/ 29 | cabal.sandbox.config 30 | .DS_Store 31 | *~ 32 | test/index.html 33 | *.cabal 34 | binaries 35 | .direnv.cache 36 | -------------------------------------------------------------------------------- /.hindent.yaml: -------------------------------------------------------------------------------- 1 | indent-size: 2 2 | line-length: 80 3 | force-trailing-newline: true 4 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | addons: 2 | apt: 3 | packages: 4 | - libgmp-dev 5 | before_install: 6 | - sh script/install-stack.sh 7 | cache: 8 | directories: 9 | - "${HOME}/.local/bin" 10 | - "${HOME}/.stack" 11 | - ".stack-work" 12 | env: 13 | global: 14 | - CASHER_TIME_OUT=1000 15 | - BUILD="jetpack-${TRAVIS_OS_NAME}.tar.gz" 16 | language: nix 17 | os: 18 | - linux 19 | - osx 20 | sudo: false 21 | script: 22 | - stack setup 23 | - stack test 24 | - stack build --ghc-options -O2 25 | - tar zcvf "${BUILD}" -C "$(stack path --local-install-root)/bin" jetpack 26 | - nix-shell --command script/format.sh 27 | deploy: 28 | provider: releases 29 | api_key: 30 | secure: Uuk1YmJNwJ3DPMzT3pAjEHx6Gn5cR8bR09hKQmt/FihlasHpIfiNHEn72LMQsntOQoNNjN+VYaKpIsfZ2w8TNKVn648SHfb0zSEZ+XjhPWX6EQtN1DZd4LyChgSyHllZuNERmbaL7+NWuM9w1ItKzWVZ6ZU7Mj3jKIGkhWrCkroPKG9Z3zXNgdtT+LGbHVSBdVyQhEQTFYxeHSAONQXB/UqZYMJxzzdr8cjtfJkBN7O8lP/8AhSj0ofcJEINTiM/T9UcpxGtLe8CV2ZJdc8DNN/wejcqAj9FqHwSR6By3SwrRLBJIFtKkz1zQTTOc8jUGU2PiINiHnaYtCN4ciXwH+QZyGIGJJ1vORv3vUo1HWqENmFaurlgQOVNOc+MQleTTnxr4VmrUkMyAjb8siKlhPfkbtcDFq/fd/0rheuEY6pfCy8+yyIdRet9p4nvPyZ6LS+rGasiCCVrf5d493G0DR+8/FzGANrwWYJztizzUvhjU4mqhyY+BEsGzTiHjhxlnmRQZKrHO2+fQQZNJ+eyBZXW+w6fx6Q50yi8xhm7LHVMkUrhf3/LHeIcEECsvPBNB6uGdvfPKIq+8iVaEvy/I/VQC+npq7JPOdeBDlCy6yf7AMu04K9ok1VWY4SokZDzqTQtXXwQzxsT9Z/M7wEHgZS8ZSf42KZmb5eKj3evNVU= 31 | file: "${BUILD}" 32 | skip_cleanup: true 33 | on: 34 | repo: NoRedInk/jetpack 35 | tags: true 36 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c) 2018, NoRedInk 4 | All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without 7 | modification, are permitted provided that the following conditions are met: 8 | 9 | * Redistributions of source code must retain the above copyright notice, this 10 | list of conditions and the following disclaimer. 11 | 12 | * Redistributions in binary form must reproduce the above copyright notice, 13 | this list of conditions and the following disclaimer in the documentation 14 | and/or other materials provided with the distribution. 15 | 16 | * Neither the name of the copyright holder nor the names of its 17 | contributors may be used to endorse or promote products derived from 18 | this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | 31 | Jetpack uses code from klazuka/elm-hot _______________ which has this license: ___________________ 32 | Copyright 2018 Keith Lazuka 33 | 34 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: 35 | 36 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. 37 | 38 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 39 | 40 | --------------------------------------- 41 | 42 | Based on fluxxu/elm-hot-loader (https://github.com/fluxxu/elm-hot-loader) 43 | 44 | The MIT License (MIT) 45 | 46 | Copyright (c) 2016 Flux Xu 47 | 48 | Permission is hereby granted, free of charge, to any person obtaining a copy 49 | of this software and associated documentation files (the "Software"), to deal 50 | in the Software without restriction, including without limitation the rights 51 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 52 | copies of the Software, and to permit persons to whom the Software is 53 | furnished to do so, subject to the following conditions: 54 | 55 | The above copyright notice and this permission notice shall be included in all 56 | copies or substantial portions of the Software. 57 | 58 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 59 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 60 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 61 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 62 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 63 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 64 | SOFTWARE. 65 | -------------------------------------------------------------------------------- /PUBLISHING.md: -------------------------------------------------------------------------------- 1 | # How to publish releases of jetpack (this is based on @avh4's great work in elm-format) 2 | 3 | 4 | ## Preparation 5 | 6 | 1. Bump version in package.yaml, package/npm/package.json, and package-lock.json 7 | 2. Commit the changes "Bump version to *new version*" 8 | 3. Create a tag for the new version. `git tag -s -m ` 9 | 4. Push the tag. `git push && git push --tags` 10 | 5. Wait for [CI to successfully build the tag](https://travis-ci.org/NoRedInk/jetpack/builds), this will create a new github release. 11 | 12 | 13 | ## Publishing 14 | 15 | 1. Check [release page](https://github.com/NoRedInk/jetpack/releases) on github if both OSX and Linux binaries are there. 16 | 2. Write the release notes on github. 17 | 3. Publish new npm version 18 | ``` 19 | cd package/npm 20 | npm publish --access=public 21 | ``` 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # jetpack [![Build Status](https://travis-ci.org/NoRedInk/jetpack.svg?branch=master)](https://travis-ci.org/NoRedInk/jetpack) 2 | 3 | > **NOTE:** Jetpack is an internal NoRedInk build tool that we decided to open source in hopes that it might be useful to others. It has never been our goal to develop a fully featured front-end build system for other use cases; it currently compiles only Elm and CoffeeScript because that's all our build needs. We're open to potentially accepting outside contributions to support other use cases, but please understand that this remains an internal tool first, and we can't make any promises that anything will be accepted. Thanks for understanding, and by all means don't feel bad about forking! ❤️ 4 | 5 | ## Motivation 6 | 7 | Webpack didn't work out for us; it did more than we needed, and builds were taking too long. 8 | Jetpack is focused on compilation speed. It only compiles the stuff we need and creates the concatenated output js. 9 | 10 | Given an entry point, it follows all `require` statements and creates a dependency tree. It then builds all files using the appropriate compiler, concatenates the compiled js, and wraps modules in functions so that the requires know how to get the right functions. 11 | 12 | ## Configuration 13 | 14 | ```jsonc 15 | { 16 | "entry_points": "./FOLDER/TO/THE/ENTRY_POINTS", 17 | "modules_directories": [ // folders where jetpack should try to find modules 18 | "./node_modules", 19 | "./vendor/assets/javascripts/FOO" 20 | ], 21 | "source_directory": "./ui/src", // jetpack will try to resolve modules in source_directory before checking in modules_directories 22 | "elm_root_directory": "./ui", // where your elm-package.json is 23 | "temp_directory": "./.jetpack/build_artifacts", // jetpack's build_artifacts will be here 24 | "log_directory": "./.jetpack/logs", // jetpack will log stuff in here 25 | "output_js_directory": "assets/javascripts", // js will be written to this folder 26 | "elm_bin_path": "./node_modules/.bin/elm-make", // path to elm binary 27 | "coffee_path": "./node_modules/.bin/coffee", // path to coffee 28 | "no_parse": ["./node_modules/clipboard/dist/clipboard.js"] // files that shouldn't be parsed 29 | } 30 | ``` 31 | 32 | ## Development 33 | 34 | ```bash 35 | direnv allow 36 | stack setup 37 | stack build 38 | stack test 39 | ``` 40 | 41 | Run `./script/format.sh` to format everything correctly. 42 | -------------------------------------------------------------------------------- /Setup.hs: -------------------------------------------------------------------------------- 1 | import Distribution.Simple 2 | 3 | main = defaultMain 4 | -------------------------------------------------------------------------------- /app/Main.hs: -------------------------------------------------------------------------------- 1 | module Main where 2 | 3 | import qualified Builder 4 | import qualified Cleaner 5 | import CliArguments (Args (..), RunMode (..), readArguments) 6 | import qualified Config 7 | import Control.Monad (void, when) 8 | import Data.Foldable (traverse_) 9 | import qualified Data.Text as T 10 | import qualified Data.Text.IO as TIO 11 | import qualified HotReload.Server 12 | import qualified Message 13 | import qualified Parser.JetpackVersion as JetpackVersion 14 | import qualified Version 15 | import qualified Watcher 16 | 17 | main :: IO () 18 | main = 19 | -- SETUP 20 | do 21 | maybeVersion <- JetpackVersion.load 22 | case Version.check maybeVersion of 23 | Just err -> Message.warning err 24 | Nothing -> return () 25 | config <- Config.readConfig 26 | args@Args {clean, runMode} <- readArguments 27 | when clean (Cleaner.clean config) 28 | case runMode of 29 | Version -> TIO.putStrLn Version.print 30 | Watch -> Watcher.watch config args Builder.DontHotReload 31 | HotReloading -> HotReload.Server.start config args Builder.HotReload 32 | RunOnce -> void $ Builder.build config args Builder.DontHotReload 33 | -------------------------------------------------------------------------------- /nix/hindent-imposter.nix: -------------------------------------------------------------------------------- 1 | { pkgs, ormolu }: 2 | 3 | # Ormolu is very new and doesn't have support in many editors yet. By exposing 4 | # it under an alias `hindent` we can trick editors into running Ormolu. 5 | pkgs.writeShellScriptBin "hindent" '' 6 | #!/usr/bin/env bash 7 | mode="inplace" 8 | file=() 9 | args=() 10 | 11 | while [[ $# -gt 0 ]] 12 | do 13 | key="$1" 14 | 15 | case $key in 16 | -h|--help) 17 | echo "Hi! I'm Ormolu, a fancy new Haskell code formatter." 18 | echo "Not a lot of editors support me yet, so I'm pretending to be hindent." 19 | echo "Editors can call me like they would hindent, but secretly I will format the code they pass me." 20 | echo "If you're not an editor, you should probably call me directly. Run 'ormolu --help' to learn more!" 21 | exit 0; 22 | ;; 23 | --validate) 24 | mode="check" 25 | shift 26 | ;; 27 | --version) # same in ormolu and hindent 28 | args+=("$1") 29 | shift 30 | ;; 31 | --line-length|--indent-size|--tab-size|--style|-X) 32 | # not supported by ormolu 33 | shift 34 | shift 35 | ;; 36 | --no-force-newline|--sort-imports|--no-sort-imports) 37 | # not supported by ormolu 38 | shift 39 | ;; 40 | *) 41 | file+=("$1") 42 | shift 43 | ;; 44 | esac 45 | done 46 | 47 | # get input from stdin 48 | if [ ''${#file[@]} -eq 0 ]; then 49 | tmpfile=$(mktemp) 50 | cat - > $tmpfile 51 | file=("$tmpfile") 52 | mode="stdout" 53 | fi 54 | 55 | ormolu --mode $mode "''${file[@]}" 56 | '' 57 | -------------------------------------------------------------------------------- /nix/inputs.nix: -------------------------------------------------------------------------------- 1 | let 2 | pkgs = (import ./pkgs.nix).pkgs; 3 | 4 | ormoluSrc = pkgs.fetchFromGitHub { 5 | owner = "tweag"; 6 | repo = "ormolu"; 7 | rev = "d61b9101ca7e1bd9edbc7767eae69e9c7732a0a8"; 8 | sha256 = "1hkrwm1bsriw6pwvml4wlbpfnsvciipdlp2qy8xmmb1wbx0ssiy5"; 9 | }; 10 | 11 | ormolu = import ormoluSrc; 12 | 13 | hindent-imposter = import ./hindent-imposter.nix { pkgs = pkgs; ormolu = ormolu.ormolu; }; 14 | in 15 | [ 16 | pkgs.gcc 17 | pkgs.libiconv 18 | pkgs.ncurses 19 | pkgs.nodejs-10_x 20 | pkgs.packer 21 | pkgs.pcre 22 | pkgs.pkgconfig 23 | pkgs.stack 24 | pkgs.zlib 25 | hindent-imposter 26 | ormolu.ormolu 27 | ] ++ 28 | (if pkgs.stdenv.system == "x86_64-darwin" then 29 | [ 30 | # MacOS dependency of http-client-tls haskell lib, used by content_creation binary. 31 | pkgs.darwin.apple_sdk.frameworks.Cocoa 32 | pkgs.darwin.apple_sdk.frameworks.CoreServices 33 | ] 34 | else []) 35 | -------------------------------------------------------------------------------- /nix/pkgs.nix: -------------------------------------------------------------------------------- 1 | let 2 | fetch = { rev, sha256 }: 3 | builtins.fetchTarball { 4 | url = "https://github.com/NixOS/nixpkgs/archive/${rev}.tar.gz"; 5 | sha256 = sha256; 6 | }; 7 | 8 | 9 | config = { 10 | packageOverrides = pkgs: rec { 11 | haskellPackages = pkgs.haskellPackages.override { 12 | overrides = haskellPackagesNew: haskellPackagesOld: rec { 13 | # Example of an override: 14 | # dotenv = haskellPackagesNew.callPackage ./dotenv.nix { }; 15 | }; 16 | }; 17 | }; 18 | }; 19 | 20 | pkgsPath = fetch { 21 | # This comes from https://nixos.org/channels/ 22 | # 23 | # Pick a release (e.g. nixpkgs-18.09-darwin) and then open the `git-revision` 24 | # file. It will contain a rev like this. 25 | # 26 | # Example: https://releases.nixos.org/nixpkgs/18.09-darwin/nixpkgs-darwin-18.09pre154040.58fbebc4ea5/git-revision 27 | rev = "9ec7625cee5365c741dee7b45a19aff5d5d56205"; 28 | 29 | # Generate this sha using the following command: 30 | # 31 | # $ nix-prefetch-url --unpack --type sha256 https://github.com/NixOS/nixpkgs/archive/.tar.gz 32 | sha256 = "0rh26fhdvnp9ssk8g63ysyzigw9zg43k9bd2fzrvhrk75sav723h"; 33 | }; 34 | in { 35 | pkgs = import pkgsPath { config = config; }; 36 | } 37 | -------------------------------------------------------------------------------- /package.yaml: -------------------------------------------------------------------------------- 1 | name: jetpack 2 | version: 3.0.0-rc1 3 | category: Web 4 | author: NoRedink 5 | maintainer: example@example.com 6 | copyright: 2017 NoRedink 7 | license: BSD3 8 | github: noredink/jetpack 9 | extra-source-files: 10 | - README.md 11 | data-files: 12 | - resources/hmr.js 13 | default-extensions: 14 | - DeriveFunctor 15 | - DeriveGeneric 16 | - DuplicateRecordFields 17 | - NamedFieldPuns 18 | - OverloadedStrings 19 | - PackageImports 20 | dependencies: 21 | - file-embed == 0.0.11 22 | - websockets == 0.12.5.3 23 | - concurrent-output == 1.10.9 24 | - lens == 4.17.1 25 | - Glob == 0.9.3 26 | - MissingH == 1.4.1.0 27 | - aeson == 1.4.2.0 28 | - base >= 4.7 && < 5 29 | - bytestring == 0.10.8.2 30 | - clock == 0.7.2 31 | - containers == 0.6.0.1 32 | - directory == 1.3.3.0 33 | - filepath == 1.4.2.1 34 | - formatting == 6.3.7 35 | - fsnotify == 0.3.0.1 36 | - lifted-async == 0.10.0.4 37 | - mtl == 2.2.2 38 | - optparse-applicative == 0.14.3.0 39 | - parsec == 3.1.13.0 40 | - process == 1.6.5.0 41 | - rainbow == 0.30.0.2 42 | - regex-compat == 0.95.1 43 | - safe == 0.3.17 44 | - safe-exceptions == 0.1.7.0 45 | - semver == 0.3.4 46 | - terminal-size == 0.3.2.1 47 | - text == 1.2.3.1 48 | - time == 1.8.0.2 49 | - transformers == 0.5.6.2 50 | - unix == 2.7.2.2 51 | library: 52 | source-dirs: 53 | - src 54 | ghc-options: 55 | - -Wall 56 | - -Werror 57 | - -fno-warn-name-shadowing 58 | other-modules: 59 | - Paths_jetpack 60 | - Utils.Files 61 | - Utils.Parser 62 | - Utils.Text 63 | - Utils.Tree 64 | executables: 65 | jetpack: 66 | main: Main.hs 67 | source-dirs: 68 | - app 69 | ghc-options: 70 | - -threaded 71 | - -rtsopts 72 | - -with-rtsopts=-N 73 | dependencies: 74 | - jetpack 75 | tests: 76 | jetpack-test: 77 | main: Spec.hs 78 | source-dirs: test 79 | ghc-options: 80 | - -threaded 81 | - -rtsopts 82 | - -with-rtsopts=-N 83 | dependencies: 84 | - doctest == 0.16.0.1 85 | - jetpack 86 | - tasty == 1.2 87 | - tasty-hunit == 0.10.0.1 88 | - tasty-quickcheck == 0.10.1 89 | -------------------------------------------------------------------------------- /package/npm/.eslintrc.json: -------------------------------------------------------------------------------- 1 | { 2 | "env": { 3 | "node": true, 4 | "es6": true 5 | }, 6 | "extends": "eslint:recommended", 7 | "rules": { 8 | "no-console": "off" 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /package/npm/.gitignore: -------------------------------------------------------------------------------- 1 | /node_modules/ 2 | /unpacked_bin/ 3 | -------------------------------------------------------------------------------- /package/npm/README.md: -------------------------------------------------------------------------------- 1 | 2 | See [https://github.com/noredink/jetpack](https://github.com/noredink/jetpack) 3 | -------------------------------------------------------------------------------- /package/npm/bin/jetpack: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | var path = require("path"); 3 | var spawn = require("child_process").spawn; 4 | var fs = require("fs"); 5 | var packageInfo = require(path.join(__dirname, "..", "package.json")); 6 | var package = require(path.join(__dirname, "..", packageInfo.main)); 7 | 8 | var os = process.env.BINWRAP_PLATFORM || process.platform; 9 | var arch = process.env.BINWRAP_ARCH || process.arch; 10 | 11 | var requested = os + "-" + arch; 12 | var current = process.platform + "-" + process.arch; 13 | if (requested !== current ) { 14 | console.error("WARNING: Using binaries for the requested platform (" + requested + ") instead of for the actual platform (" + current + ").") 15 | } 16 | 17 | var binExt = ""; 18 | if (os == "win32") { 19 | binExt = ".exe"; 20 | } 21 | 22 | var unpackedBinPath = path.join(__dirname, "..", "unpacked_bin"); 23 | var binPath = path.join(unpackedBinPath, "jetpack" + binExt); 24 | 25 | function execBin() { 26 | spawn( 27 | binPath, 28 | process.argv.slice(2), 29 | {stdio: 'inherit'} 30 | ).on('exit', process.exit); 31 | } 32 | 33 | if (fs.existsSync(binPath)) { 34 | execBin(); 35 | } else { 36 | console.error("INFO: Running " + path.basename(__filename) + " for the first time; downloading the actual binary"); 37 | package.install(unpackedBinPath, os, arch).then(function(result) { 38 | execBin(); 39 | }, function(err) { 40 | console.log("ERR", err); 41 | process.exit(1); 42 | }); 43 | } 44 | -------------------------------------------------------------------------------- /package/npm/index.js: -------------------------------------------------------------------------------- 1 | var binwrap = require("binwrap"); 2 | var path = require("path"); 3 | 4 | var packageInfo = require(path.join(__dirname, "package.json")); 5 | var root = 6 | "https://github.com/NoRedInk/jetpack/releases/download/" + 7 | packageInfo.version + 8 | "/jetpack"; 9 | 10 | module.exports = binwrap({ 11 | dirname: __dirname, 12 | binaries: ["jetpack"], 13 | urls: { 14 | "darwin-x64": root + "-osx.tar.gz", 15 | "linux-x64": root + "-linux.tar.gz" 16 | } 17 | }); 18 | -------------------------------------------------------------------------------- /package/npm/package-lock.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@noredink/jetpack", 3 | "version": "3.0.0-rc1", 4 | "lockfileVersion": 1, 5 | "requires": true, 6 | "dependencies": { 7 | "ajv": { 8 | "version": "6.10.0", 9 | "resolved": "https://registry.npmjs.org/ajv/-/ajv-6.10.0.tgz", 10 | "integrity": "sha512-nffhOpkymDECQyR0mnsUtoCE8RlX38G0rYP+wgLWFyZuUyuuojSSvi/+euOiQBIn63whYwYVIIH1TvE3tu4OEg==", 11 | "requires": { 12 | "fast-deep-equal": "^2.0.1", 13 | "fast-json-stable-stringify": "^2.0.0", 14 | "json-schema-traverse": "^0.4.1", 15 | "uri-js": "^4.2.2" 16 | } 17 | }, 18 | "asn1": { 19 | "version": "0.2.4", 20 | "resolved": "https://registry.npmjs.org/asn1/-/asn1-0.2.4.tgz", 21 | "integrity": "sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg==", 22 | "requires": { 23 | "safer-buffer": "~2.1.0" 24 | } 25 | }, 26 | "assert-plus": { 27 | "version": "1.0.0", 28 | "resolved": "https://registry.npmjs.org/assert-plus/-/assert-plus-1.0.0.tgz", 29 | "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=" 30 | }, 31 | "asynckit": { 32 | "version": "0.4.0", 33 | "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", 34 | "integrity": "sha1-x57Zf380y48robyXkLzDZkdLS3k=" 35 | }, 36 | "aws-sign2": { 37 | "version": "0.7.0", 38 | "resolved": "https://registry.npmjs.org/aws-sign2/-/aws-sign2-0.7.0.tgz", 39 | "integrity": "sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg=" 40 | }, 41 | "aws4": { 42 | "version": "1.8.0", 43 | "resolved": "https://registry.npmjs.org/aws4/-/aws4-1.8.0.tgz", 44 | "integrity": "sha512-ReZxvNHIOv88FlT7rxcXIIC0fPt4KZqZbOlivyWtXLt8ESx84zd3kMC6iK5jVeS2qt+g7ftS7ye4fi06X5rtRQ==" 45 | }, 46 | "bcrypt-pbkdf": { 47 | "version": "1.0.2", 48 | "resolved": "https://registry.npmjs.org/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz", 49 | "integrity": "sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4=", 50 | "requires": { 51 | "tweetnacl": "^0.14.3" 52 | } 53 | }, 54 | "binary": { 55 | "version": "0.3.0", 56 | "resolved": "https://registry.npmjs.org/binary/-/binary-0.3.0.tgz", 57 | "integrity": "sha1-n2BVO8XOjDOG87VTz/R0Yq3sqnk=", 58 | "requires": { 59 | "buffers": "~0.1.1", 60 | "chainsaw": "~0.1.0" 61 | } 62 | }, 63 | "binwrap": { 64 | "version": "0.2.1", 65 | "resolved": "https://registry.npmjs.org/binwrap/-/binwrap-0.2.1.tgz", 66 | "integrity": "sha512-kILc2+zMfFEv66/NLfO2GIpmWRPE8hL68fv+o5A94OlN9AIIG4zernpgn9bpPAImb5t4QwFxnqAGSyP1+tGKrA==", 67 | "requires": { 68 | "mustache": "^2.3.0", 69 | "request": "^2.87.0", 70 | "request-promise": "^4.2.0", 71 | "tar": "^4.4.8", 72 | "unzip-stream": "^0.3.0" 73 | } 74 | }, 75 | "bluebird": { 76 | "version": "3.5.5", 77 | "resolved": "https://registry.npmjs.org/bluebird/-/bluebird-3.5.5.tgz", 78 | "integrity": "sha512-5am6HnnfN+urzt4yfg7IgTbotDjIT/u8AJpEt0sIU9FtXfVeezXAPKswrG+xKUCOYAINpSdgZVDU6QFh+cuH3w==" 79 | }, 80 | "buffers": { 81 | "version": "0.1.1", 82 | "resolved": "https://registry.npmjs.org/buffers/-/buffers-0.1.1.tgz", 83 | "integrity": "sha1-skV5w77U1tOWru5tmorn9Ugqt7s=" 84 | }, 85 | "caseless": { 86 | "version": "0.12.0", 87 | "resolved": "https://registry.npmjs.org/caseless/-/caseless-0.12.0.tgz", 88 | "integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw=" 89 | }, 90 | "chainsaw": { 91 | "version": "0.1.0", 92 | "resolved": "https://registry.npmjs.org/chainsaw/-/chainsaw-0.1.0.tgz", 93 | "integrity": "sha1-XqtQsor+WAdNDVgpE4iCi15fvJg=", 94 | "requires": { 95 | "traverse": ">=0.3.0 <0.4" 96 | } 97 | }, 98 | "chownr": { 99 | "version": "1.1.1", 100 | "resolved": "https://registry.npmjs.org/chownr/-/chownr-1.1.1.tgz", 101 | "integrity": "sha512-j38EvO5+LHX84jlo6h4UzmOwi0UgW61WRyPtJz4qaadK5eY3BTS5TY/S1Stc3Uk2lIM6TPevAlULiEJwie860g==" 102 | }, 103 | "combined-stream": { 104 | "version": "1.0.8", 105 | "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", 106 | "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", 107 | "requires": { 108 | "delayed-stream": "~1.0.0" 109 | } 110 | }, 111 | "core-util-is": { 112 | "version": "1.0.2", 113 | "resolved": "https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.2.tgz", 114 | "integrity": "sha1-tf1UIgqivFq1eqtxQMlAdUUDwac=" 115 | }, 116 | "dashdash": { 117 | "version": "1.14.1", 118 | "resolved": "https://registry.npmjs.org/dashdash/-/dashdash-1.14.1.tgz", 119 | "integrity": "sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA=", 120 | "requires": { 121 | "assert-plus": "^1.0.0" 122 | } 123 | }, 124 | "delayed-stream": { 125 | "version": "1.0.0", 126 | "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", 127 | "integrity": "sha1-3zrhmayt+31ECqrgsp4icrJOxhk=" 128 | }, 129 | "ecc-jsbn": { 130 | "version": "0.1.2", 131 | "resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz", 132 | "integrity": "sha1-OoOpBOVDUyh4dMVkt1SThoSamMk=", 133 | "requires": { 134 | "jsbn": "~0.1.0", 135 | "safer-buffer": "^2.1.0" 136 | } 137 | }, 138 | "extend": { 139 | "version": "3.0.2", 140 | "resolved": "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz", 141 | "integrity": "sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g==" 142 | }, 143 | "extsprintf": { 144 | "version": "1.3.0", 145 | "resolved": "https://registry.npmjs.org/extsprintf/-/extsprintf-1.3.0.tgz", 146 | "integrity": "sha1-lpGEQOMEGnpBT4xS48V06zw+HgU=" 147 | }, 148 | "fast-deep-equal": { 149 | "version": "2.0.1", 150 | "resolved": "https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-2.0.1.tgz", 151 | "integrity": "sha1-ewUhjd+WZ79/Nwv3/bLLFf3Qqkk=" 152 | }, 153 | "fast-json-stable-stringify": { 154 | "version": "2.0.0", 155 | "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.0.0.tgz", 156 | "integrity": "sha1-1RQsDK7msRifh9OnYREGT4bIu/I=" 157 | }, 158 | "forever-agent": { 159 | "version": "0.6.1", 160 | "resolved": "https://registry.npmjs.org/forever-agent/-/forever-agent-0.6.1.tgz", 161 | "integrity": "sha1-+8cfDEGt6zf5bFd60e1C2P2sypE=" 162 | }, 163 | "form-data": { 164 | "version": "2.3.3", 165 | "resolved": "https://registry.npmjs.org/form-data/-/form-data-2.3.3.tgz", 166 | "integrity": "sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ==", 167 | "requires": { 168 | "asynckit": "^0.4.0", 169 | "combined-stream": "^1.0.6", 170 | "mime-types": "^2.1.12" 171 | } 172 | }, 173 | "fs-minipass": { 174 | "version": "1.2.6", 175 | "resolved": "https://registry.npmjs.org/fs-minipass/-/fs-minipass-1.2.6.tgz", 176 | "integrity": "sha512-crhvyXcMejjv3Z5d2Fa9sf5xLYVCF5O1c71QxbVnbLsmYMBEvDAftewesN/HhY03YRoA7zOMxjNGrF5svGaaeQ==", 177 | "requires": { 178 | "minipass": "^2.2.1" 179 | } 180 | }, 181 | "getpass": { 182 | "version": "0.1.7", 183 | "resolved": "https://registry.npmjs.org/getpass/-/getpass-0.1.7.tgz", 184 | "integrity": "sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo=", 185 | "requires": { 186 | "assert-plus": "^1.0.0" 187 | } 188 | }, 189 | "har-schema": { 190 | "version": "2.0.0", 191 | "resolved": "https://registry.npmjs.org/har-schema/-/har-schema-2.0.0.tgz", 192 | "integrity": "sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI=" 193 | }, 194 | "har-validator": { 195 | "version": "5.1.3", 196 | "resolved": "https://registry.npmjs.org/har-validator/-/har-validator-5.1.3.tgz", 197 | "integrity": "sha512-sNvOCzEQNr/qrvJgc3UG/kD4QtlHycrzwS+6mfTrrSq97BvaYcPZZI1ZSqGSPR73Cxn4LKTD4PttRwfU7jWq5g==", 198 | "requires": { 199 | "ajv": "^6.5.5", 200 | "har-schema": "^2.0.0" 201 | } 202 | }, 203 | "http-signature": { 204 | "version": "1.2.0", 205 | "resolved": "https://registry.npmjs.org/http-signature/-/http-signature-1.2.0.tgz", 206 | "integrity": "sha1-muzZJRFHcvPZW2WmCruPfBj7rOE=", 207 | "requires": { 208 | "assert-plus": "^1.0.0", 209 | "jsprim": "^1.2.2", 210 | "sshpk": "^1.7.0" 211 | } 212 | }, 213 | "is-typedarray": { 214 | "version": "1.0.0", 215 | "resolved": "https://registry.npmjs.org/is-typedarray/-/is-typedarray-1.0.0.tgz", 216 | "integrity": "sha1-5HnICFjfDBsR3dppQPlgEfzaSpo=" 217 | }, 218 | "isstream": { 219 | "version": "0.1.2", 220 | "resolved": "https://registry.npmjs.org/isstream/-/isstream-0.1.2.tgz", 221 | "integrity": "sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo=" 222 | }, 223 | "jsbn": { 224 | "version": "0.1.1", 225 | "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", 226 | "integrity": "sha1-peZUwuWi3rXyAdls77yoDA7y9RM=" 227 | }, 228 | "json-schema": { 229 | "version": "0.2.3", 230 | "resolved": "https://registry.npmjs.org/json-schema/-/json-schema-0.2.3.tgz", 231 | "integrity": "sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM=" 232 | }, 233 | "json-schema-traverse": { 234 | "version": "0.4.1", 235 | "resolved": "https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz", 236 | "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==" 237 | }, 238 | "json-stringify-safe": { 239 | "version": "5.0.1", 240 | "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", 241 | "integrity": "sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus=" 242 | }, 243 | "jsprim": { 244 | "version": "1.4.1", 245 | "resolved": "https://registry.npmjs.org/jsprim/-/jsprim-1.4.1.tgz", 246 | "integrity": "sha1-MT5mvB5cwG5Di8G3SZwuXFastqI=", 247 | "requires": { 248 | "assert-plus": "1.0.0", 249 | "extsprintf": "1.3.0", 250 | "json-schema": "0.2.3", 251 | "verror": "1.10.0" 252 | } 253 | }, 254 | "lodash": { 255 | "version": "4.17.15", 256 | "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz", 257 | "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==" 258 | }, 259 | "mime-db": { 260 | "version": "1.40.0", 261 | "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.40.0.tgz", 262 | "integrity": "sha512-jYdeOMPy9vnxEqFRRo6ZvTZ8d9oPb+k18PKoYNYUe2stVEBPPwsln/qWzdbmaIvnhZ9v2P+CuecK+fpUfsV2mA==" 263 | }, 264 | "mime-types": { 265 | "version": "2.1.24", 266 | "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.24.tgz", 267 | "integrity": "sha512-WaFHS3MCl5fapm3oLxU4eYDw77IQM2ACcxQ9RIxfaC3ooc6PFuBMGZZsYpvoXS5D5QTWPieo1jjLdAm3TBP3cQ==", 268 | "requires": { 269 | "mime-db": "1.40.0" 270 | } 271 | }, 272 | "minimist": { 273 | "version": "0.0.8", 274 | "resolved": "https://registry.npmjs.org/minimist/-/minimist-0.0.8.tgz", 275 | "integrity": "sha1-hX/Kv8M5fSYluCKCYuhqp6ARsF0=" 276 | }, 277 | "minipass": { 278 | "version": "2.3.5", 279 | "resolved": "https://registry.npmjs.org/minipass/-/minipass-2.3.5.tgz", 280 | "integrity": "sha512-Gi1W4k059gyRbyVUZQ4mEqLm0YIUiGYfvxhF6SIlk3ui1WVxMTGfGdQ2SInh3PDrRTVvPKgULkpJtT4RH10+VA==", 281 | "requires": { 282 | "safe-buffer": "^5.1.2", 283 | "yallist": "^3.0.0" 284 | } 285 | }, 286 | "minizlib": { 287 | "version": "1.2.1", 288 | "resolved": "https://registry.npmjs.org/minizlib/-/minizlib-1.2.1.tgz", 289 | "integrity": "sha512-7+4oTUOWKg7AuL3vloEWekXY2/D20cevzsrNT2kGWm+39J9hGTCBv8VI5Pm5lXZ/o3/mdR4f8rflAPhnQb8mPA==", 290 | "requires": { 291 | "minipass": "^2.2.1" 292 | } 293 | }, 294 | "mkdirp": { 295 | "version": "0.5.1", 296 | "resolved": "https://registry.npmjs.org/mkdirp/-/mkdirp-0.5.1.tgz", 297 | "integrity": "sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=", 298 | "requires": { 299 | "minimist": "0.0.8" 300 | } 301 | }, 302 | "mustache": { 303 | "version": "2.3.2", 304 | "resolved": "https://registry.npmjs.org/mustache/-/mustache-2.3.2.tgz", 305 | "integrity": "sha512-KpMNwdQsYz3O/SBS1qJ/o3sqUJ5wSb8gb0pul8CO0S56b9Y2ALm8zCfsjPXsqGFfoNBkDwZuZIAjhsZI03gYVQ==" 306 | }, 307 | "oauth-sign": { 308 | "version": "0.9.0", 309 | "resolved": "https://registry.npmjs.org/oauth-sign/-/oauth-sign-0.9.0.tgz", 310 | "integrity": "sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ==" 311 | }, 312 | "performance-now": { 313 | "version": "2.1.0", 314 | "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", 315 | "integrity": "sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns=" 316 | }, 317 | "psl": { 318 | "version": "1.1.32", 319 | "resolved": "https://registry.npmjs.org/psl/-/psl-1.1.32.tgz", 320 | "integrity": "sha512-MHACAkHpihU/REGGPLj4sEfc/XKW2bheigvHO1dUqjaKigMp1C8+WLQYRGgeKFMsw5PMfegZcaN8IDXK/cD0+g==" 321 | }, 322 | "punycode": { 323 | "version": "2.1.1", 324 | "resolved": "https://registry.npmjs.org/punycode/-/punycode-2.1.1.tgz", 325 | "integrity": "sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==" 326 | }, 327 | "qs": { 328 | "version": "6.5.2", 329 | "resolved": "https://registry.npmjs.org/qs/-/qs-6.5.2.tgz", 330 | "integrity": "sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA==" 331 | }, 332 | "request": { 333 | "version": "2.88.0", 334 | "resolved": "https://registry.npmjs.org/request/-/request-2.88.0.tgz", 335 | "integrity": "sha512-NAqBSrijGLZdM0WZNsInLJpkJokL72XYjUpnB0iwsRgxh7dB6COrHnTBNwN0E+lHDAJzu7kLAkDeY08z2/A0hg==", 336 | "requires": { 337 | "aws-sign2": "~0.7.0", 338 | "aws4": "^1.8.0", 339 | "caseless": "~0.12.0", 340 | "combined-stream": "~1.0.6", 341 | "extend": "~3.0.2", 342 | "forever-agent": "~0.6.1", 343 | "form-data": "~2.3.2", 344 | "har-validator": "~5.1.0", 345 | "http-signature": "~1.2.0", 346 | "is-typedarray": "~1.0.0", 347 | "isstream": "~0.1.2", 348 | "json-stringify-safe": "~5.0.1", 349 | "mime-types": "~2.1.19", 350 | "oauth-sign": "~0.9.0", 351 | "performance-now": "^2.1.0", 352 | "qs": "~6.5.2", 353 | "safe-buffer": "^5.1.2", 354 | "tough-cookie": "~2.4.3", 355 | "tunnel-agent": "^0.6.0", 356 | "uuid": "^3.3.2" 357 | } 358 | }, 359 | "request-promise": { 360 | "version": "4.2.4", 361 | "resolved": "https://registry.npmjs.org/request-promise/-/request-promise-4.2.4.tgz", 362 | "integrity": "sha512-8wgMrvE546PzbR5WbYxUQogUnUDfM0S7QIFZMID+J73vdFARkFy+HElj4T+MWYhpXwlLp0EQ8Zoj8xUA0he4Vg==", 363 | "requires": { 364 | "bluebird": "^3.5.0", 365 | "request-promise-core": "1.1.2", 366 | "stealthy-require": "^1.1.1", 367 | "tough-cookie": "^2.3.3" 368 | } 369 | }, 370 | "request-promise-core": { 371 | "version": "1.1.2", 372 | "resolved": "https://registry.npmjs.org/request-promise-core/-/request-promise-core-1.1.2.tgz", 373 | "integrity": "sha512-UHYyq1MO8GsefGEt7EprS8UrXsm1TxEvFUX1IMTuSLU2Rh7fTIdFtl8xD7JiEYiWU2dl+NYAjCTksTehQUxPag==", 374 | "requires": { 375 | "lodash": "^4.17.11" 376 | } 377 | }, 378 | "safe-buffer": { 379 | "version": "5.1.2", 380 | "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", 381 | "integrity": "sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==" 382 | }, 383 | "safer-buffer": { 384 | "version": "2.1.2", 385 | "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", 386 | "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==" 387 | }, 388 | "sshpk": { 389 | "version": "1.16.1", 390 | "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.16.1.tgz", 391 | "integrity": "sha512-HXXqVUq7+pcKeLqqZj6mHFUMvXtOJt1uoUx09pFW6011inTMxqI8BA8PM95myrIyyKwdnzjdFjLiE6KBPVtJIg==", 392 | "requires": { 393 | "asn1": "~0.2.3", 394 | "assert-plus": "^1.0.0", 395 | "bcrypt-pbkdf": "^1.0.0", 396 | "dashdash": "^1.12.0", 397 | "ecc-jsbn": "~0.1.1", 398 | "getpass": "^0.1.1", 399 | "jsbn": "~0.1.0", 400 | "safer-buffer": "^2.0.2", 401 | "tweetnacl": "~0.14.0" 402 | } 403 | }, 404 | "stealthy-require": { 405 | "version": "1.1.1", 406 | "resolved": "https://registry.npmjs.org/stealthy-require/-/stealthy-require-1.1.1.tgz", 407 | "integrity": "sha1-NbCYdbT/SfJqd35QmzCQoyJr8ks=" 408 | }, 409 | "tar": { 410 | "version": "4.4.8", 411 | "resolved": "https://registry.npmjs.org/tar/-/tar-4.4.8.tgz", 412 | "integrity": "sha512-LzHF64s5chPQQS0IYBn9IN5h3i98c12bo4NCO7e0sGM2llXQ3p2FGC5sdENN4cTW48O915Sh+x+EXx7XW96xYQ==", 413 | "requires": { 414 | "chownr": "^1.1.1", 415 | "fs-minipass": "^1.2.5", 416 | "minipass": "^2.3.4", 417 | "minizlib": "^1.1.1", 418 | "mkdirp": "^0.5.0", 419 | "safe-buffer": "^5.1.2", 420 | "yallist": "^3.0.2" 421 | } 422 | }, 423 | "tough-cookie": { 424 | "version": "2.4.3", 425 | "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-2.4.3.tgz", 426 | "integrity": "sha512-Q5srk/4vDM54WJsJio3XNn6K2sCG+CQ8G5Wz6bZhRZoAe/+TxjWB/GlFAnYEbkYVlON9FMk/fE3h2RLpPXo4lQ==", 427 | "requires": { 428 | "psl": "^1.1.24", 429 | "punycode": "^1.4.1" 430 | }, 431 | "dependencies": { 432 | "punycode": { 433 | "version": "1.4.1", 434 | "resolved": "https://registry.npmjs.org/punycode/-/punycode-1.4.1.tgz", 435 | "integrity": "sha1-wNWmOycYgArY4esPpSachN1BhF4=" 436 | } 437 | } 438 | }, 439 | "traverse": { 440 | "version": "0.3.9", 441 | "resolved": "https://registry.npmjs.org/traverse/-/traverse-0.3.9.tgz", 442 | "integrity": "sha1-cXuPIgzAu3tE5AUUwisui7xw2Lk=" 443 | }, 444 | "tunnel-agent": { 445 | "version": "0.6.0", 446 | "resolved": "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz", 447 | "integrity": "sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0=", 448 | "requires": { 449 | "safe-buffer": "^5.0.1" 450 | } 451 | }, 452 | "tweetnacl": { 453 | "version": "0.14.5", 454 | "resolved": "https://registry.npmjs.org/tweetnacl/-/tweetnacl-0.14.5.tgz", 455 | "integrity": "sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q=" 456 | }, 457 | "unzip-stream": { 458 | "version": "0.3.0", 459 | "resolved": "https://registry.npmjs.org/unzip-stream/-/unzip-stream-0.3.0.tgz", 460 | "integrity": "sha512-NG1h/MdGIX3HzyqMjyj1laBCmlPYhcO4xEy7gEqqzGiSLw7XqDQCnY4nYSn5XSaH8mQ6TFkaujrO8d/PIZN85A==", 461 | "requires": { 462 | "binary": "^0.3.0", 463 | "mkdirp": "^0.5.1" 464 | } 465 | }, 466 | "uri-js": { 467 | "version": "4.2.2", 468 | "resolved": "https://registry.npmjs.org/uri-js/-/uri-js-4.2.2.tgz", 469 | "integrity": "sha512-KY9Frmirql91X2Qgjry0Wd4Y+YTdrdZheS8TFwvkbLWf/G5KNJDCh6pKL5OZctEW4+0Baa5idK2ZQuELRwPznQ==", 470 | "requires": { 471 | "punycode": "^2.1.0" 472 | } 473 | }, 474 | "uuid": { 475 | "version": "3.3.2", 476 | "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.2.tgz", 477 | "integrity": "sha512-yXJmeNaw3DnnKAOKJE51sL/ZaYfWJRl1pK9dr19YFCu0ObS231AB1/LbqTKRAQ5kw8A90rA6fr4riOUpTZvQZA==" 478 | }, 479 | "verror": { 480 | "version": "1.10.0", 481 | "resolved": "https://registry.npmjs.org/verror/-/verror-1.10.0.tgz", 482 | "integrity": "sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA=", 483 | "requires": { 484 | "assert-plus": "^1.0.0", 485 | "core-util-is": "1.0.2", 486 | "extsprintf": "^1.2.0" 487 | } 488 | }, 489 | "yallist": { 490 | "version": "3.0.3", 491 | "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.0.3.tgz", 492 | "integrity": "sha512-S+Zk8DEWE6oKpV+vI3qWkaK+jSbIK86pCwe2IF/xwIpQ8jEuxpw9NyaGjmp9+BoJv5FV2piqCDcoCtStppiq2A==" 493 | } 494 | } 495 | } 496 | -------------------------------------------------------------------------------- /package/npm/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "@noredink/jetpack", 3 | "version": "3.0.0-rc1", 4 | "description": "Install jetpack", 5 | "preferGlobal": true, 6 | "engines": { 7 | "node": ">=5.2.0" 8 | }, 9 | "main": "index.js", 10 | "scripts": { 11 | "install": "npx binwrap-install", 12 | "prepare": "npx binwrap-prepare", 13 | "test": "npx binwrap-test", 14 | "prepublish": "npm test" 15 | }, 16 | "repository": { 17 | "type": "git", 18 | "url": "git+https://github.com/NoRedInk/jetpack.git" 19 | }, 20 | "keywords": [ 21 | "jetpack" 22 | ], 23 | "author": "Christoph Hermann", 24 | "license": "BSD-3-Clause", 25 | "bugs": { 26 | "url": "https://github.com/NoRedInk/jetpack/issues" 27 | }, 28 | "files": [ 29 | "index.js", 30 | "bin", 31 | "bin/jetpack" 32 | ], 33 | "homepage": "https://github.com/NoRedInk/jetpack", 34 | "bin": { 35 | "jetpack": "bin/jetpack" 36 | }, 37 | "dependencies": { 38 | "binwrap": "0.2.1" 39 | }, 40 | "devDependencies": {} 41 | } 42 | -------------------------------------------------------------------------------- /resources/hmr.js: -------------------------------------------------------------------------------- 1 | //////////////////// HMR BEGIN //////////////////// 2 | 3 | /* 4 | MIT License http://www.opensource.org/licenses/mit-license.php 5 | Original Author: Flux Xu @fluxxu 6 | */ 7 | 8 | /* 9 | A note about the environment that this code runs in... 10 | 11 | assumed globals: 12 | - `module` (from Node.js module system and webpack) 13 | 14 | assumed in scope after injection into the Elm IIFE: 15 | - `scope` (has an 'Elm' property which contains the public Elm API) 16 | - various functions defined by Elm which we have to hook such as `_Platform_initialize` and `_Scheduler_binding` 17 | */ 18 | 19 | if (moduleHot.hot) { 20 | (function () { 21 | "use strict"; 22 | 23 | //polyfill for IE: https://github.com/fluxxu/elm-hot-loader/issues/16 24 | if (typeof Object.assign != 'function') { 25 | Object.assign = function (target) { 26 | 'use strict'; 27 | if (target == null) { 28 | throw new TypeError('Cannot convert undefined or null to object'); 29 | } 30 | 31 | target = Object(target); 32 | for (var index = 1; index < arguments.length; index++) { 33 | var source = arguments[index]; 34 | if (source != null) { 35 | for (var key in source) { 36 | if (Object.prototype.hasOwnProperty.call(source, key)) { 37 | target[key] = source[key]; 38 | } 39 | } 40 | } 41 | } 42 | return target; 43 | }; 44 | } 45 | 46 | var instances = moduleHot.hot.data 47 | ? moduleHot.hot.data.instances || {} 48 | : {}; 49 | var uid = moduleHot.hot.data 50 | ? moduleHot.hot.data.uid || 0 51 | : 0; 52 | 53 | if (Object.keys(instances).length === 0) { 54 | console.log("[elm-hot] Enabled"); 55 | } 56 | 57 | var cancellers = []; 58 | 59 | // These 2 variables act as dynamically-scoped variables which are set only when the 60 | // Elm module's hooked init function is called. 61 | var initializingInstance = null; 62 | var swappingInstance = null; 63 | 64 | moduleHot.hot.accept(); 65 | moduleHot.hot.dispose(function (data) { 66 | data.instances = instances; 67 | data.uid = uid; 68 | 69 | // Cleanup pending async tasks 70 | 71 | // First, make sure that no new tasks can be started until we finish replacing the code 72 | _Scheduler_binding = function () { 73 | return _Scheduler_fail(new Error('[elm-hot] Inactive Elm instance.')) 74 | }; 75 | 76 | // Second, kill pending tasks belonging to the old instance 77 | if (cancellers.length) { 78 | console.log('[elm-hot] Killing ' + cancellers.length + ' running processes...'); 79 | try { 80 | cancellers.forEach(function (cancel) { 81 | cancel(); 82 | }); 83 | } catch (e) { 84 | console.warn('[elm-hot] Kill process error: ' + e.message); 85 | } 86 | } 87 | }); 88 | 89 | function getId() { 90 | return ++uid; 91 | } 92 | 93 | function findPublicModules(parent, path) { 94 | var modules = []; 95 | for (var key in parent) { 96 | var child = parent[key]; 97 | var currentPath = path ? path + '.' + key : key; 98 | if ('init' in child) { 99 | modules.push({ 100 | path: currentPath, 101 | module: child 102 | }); 103 | } else { 104 | modules = modules.concat(findPublicModules(child, currentPath)); 105 | } 106 | } 107 | return modules; 108 | } 109 | 110 | function registerInstance(domNode, flags, path, portSubscribes, portSends) { 111 | var id = getId(); 112 | 113 | var instance = { 114 | id: id, 115 | path: path, 116 | domNode: domNode, 117 | flags: flags, 118 | portSubscribes: portSubscribes, 119 | portSends: portSends, 120 | navKeyPath: null, // array of JS property names by which the Browser.Navigation.Key can be found in the model 121 | lastState: null // last Elm app state (root model) 122 | }; 123 | 124 | return instances[id] = instance 125 | } 126 | 127 | function isFullscreenApp() { 128 | // Returns true if the Elm app will take over the entire DOM body. 129 | return typeof elm$browser$Browser$application !== 'undefined' 130 | || typeof elm$browser$Browser$document !== 'undefined'; 131 | } 132 | 133 | function wrapDomNode(node) { 134 | // When embedding an Elm app into a specific DOM node, Elm will replace the provided 135 | // DOM node with the Elm app's content. When the Elm app is compiled normally, the 136 | // original DOM node is reused (its attributes and content changes, but the object 137 | // in memory remains the same). But when compiled using `--debug`, Elm will completely 138 | // destroy the original DOM node and instead replace it with 2 brand new nodes: one 139 | // for your Elm app's content and the other for the Elm debugger UI. In this case, 140 | // if you held a reference to the DOM node provided for embedding, it would be orphaned 141 | // after Elm module initialization. 142 | // 143 | // So in order to make both cases consistent and isolate us from changes in how Elm 144 | // does this, we will insert a dummy node to wrap the node for embedding and hold 145 | // a reference to the dummy node. 146 | // 147 | // We will also put a tag on the dummy node so that the Elm developer knows who went 148 | // behind their back and rudely put stuff in their DOM. 149 | var dummyNode = document.createElement("div"); 150 | dummyNode.setAttribute("data-elm-hot", "true"); 151 | var parentNode = node.parentNode; 152 | parentNode.replaceChild(dummyNode, node); 153 | dummyNode.appendChild(node); 154 | return dummyNode; 155 | } 156 | 157 | function wrapPublicModule(path, module) { 158 | var originalInit = module.init; 159 | if (originalInit) { 160 | module.init = function (args) { 161 | var elm; 162 | var portSubscribes = {}; 163 | var portSends = {}; 164 | var domNode = null; 165 | var flags = null; 166 | if (typeof args !== 'undefined') { 167 | // normal case 168 | domNode = args['node'] && !isFullscreenApp() 169 | ? wrapDomNode(args['node']) 170 | : document.body; 171 | flags = args['flags']; 172 | } else { 173 | // rare case: Elm allows init to be called without any arguments at all 174 | domNode = document.body; 175 | flags = undefined 176 | } 177 | initializingInstance = registerInstance(domNode, flags, path, portSubscribes, portSends); 178 | elm = originalInit(args); 179 | wrapPorts(elm, portSubscribes, portSends); 180 | initializingInstance = null; 181 | return elm; 182 | }; 183 | } else { 184 | console.error("Could not find a public module to wrap at path " + path) 185 | } 186 | } 187 | 188 | function swap(Elm, instance) { 189 | console.log('[elm-hot] Hot-swapping module: ' + instance.path); 190 | 191 | swappingInstance = instance; 192 | 193 | // remove from the DOM everything that had been created by the old Elm app 194 | var containerNode = instance.domNode; 195 | while (containerNode.lastChild) { 196 | containerNode.removeChild(containerNode.lastChild); 197 | } 198 | 199 | var m = getAt(instance.path.split('.'), Elm); 200 | var elm; 201 | if (m) { 202 | // prepare to initialize the new Elm module 203 | var args = {flags: instance.flags}; 204 | if (containerNode === document.body) { 205 | // fullscreen case: no additional args needed 206 | } else { 207 | // embed case: provide a new node for Elm to use 208 | var nodeForEmbed = document.createElement("div"); 209 | containerNode.appendChild(nodeForEmbed); 210 | args['node'] = nodeForEmbed; 211 | } 212 | 213 | elm = m.init(args); 214 | 215 | Object.keys(instance.portSubscribes).forEach(function (portName) { 216 | if (portName in elm.ports && 'subscribe' in elm.ports[portName]) { 217 | var handlers = instance.portSubscribes[portName]; 218 | if (!handlers.length) { 219 | return; 220 | } 221 | console.log('[elm-hot] Reconnect ' + handlers.length + ' handler(s) to port \'' 222 | + portName + '\' (' + instance.path + ').'); 223 | handlers.forEach(function (handler) { 224 | elm.ports[portName].subscribe(handler); 225 | }); 226 | } else { 227 | delete instance.portSubscribes[portName]; 228 | console.log('[elm-hot] Port was removed: ' + portName); 229 | } 230 | }); 231 | 232 | Object.keys(instance.portSends).forEach(function (portName) { 233 | if (portName in elm.ports && 'send' in elm.ports[portName]) { 234 | console.log('[elm-hot] Replace old port send with the new send'); 235 | instance.portSends[portName] = elm.ports[portName].send; 236 | } else { 237 | delete instance.portSends[portName]; 238 | console.log('[elm-hot] Port was removed: ' + portName); 239 | } 240 | }); 241 | } else { 242 | console.log('[elm-hot] Module was removed: ' + instance.path); 243 | } 244 | 245 | swappingInstance = null; 246 | } 247 | 248 | function wrapPorts(elm, portSubscribes, portSends) { 249 | var portNames = Object.keys(elm.ports || {}); 250 | //hook ports 251 | if (portNames.length) { 252 | // hook outgoing ports 253 | portNames 254 | .filter(function (name) { 255 | return 'subscribe' in elm.ports[name]; 256 | }) 257 | .forEach(function (portName) { 258 | var port = elm.ports[portName]; 259 | var subscribe = port.subscribe; 260 | var unsubscribe = port.unsubscribe; 261 | elm.ports[portName] = Object.assign(port, { 262 | subscribe: function (handler) { 263 | console.log('[elm-hot] ports.' + portName + '.subscribe called.'); 264 | if (!portSubscribes[portName]) { 265 | portSubscribes[portName] = [handler]; 266 | } else { 267 | //TODO handle subscribing to single handler more than once? 268 | portSubscribes[portName].push(handler); 269 | } 270 | return subscribe.call(port, handler); 271 | }, 272 | unsubscribe: function (handler) { 273 | console.log('[elm-hot] ports.' + portName + '.unsubscribe called.'); 274 | var list = portSubscribes[portName]; 275 | if (list && list.indexOf(handler) !== -1) { 276 | list.splice(list.lastIndexOf(handler), 1); 277 | } else { 278 | console.warn('[elm-hot] ports.' + portName + '.unsubscribe: handler not subscribed'); 279 | } 280 | return unsubscribe.call(port, handler); 281 | } 282 | }); 283 | }); 284 | 285 | // hook incoming ports 286 | portNames 287 | .filter(function (name) { 288 | return 'send' in elm.ports[name]; 289 | }) 290 | .forEach(function (portName) { 291 | var port = elm.ports[portName]; 292 | portSends[portName] = port.send; 293 | elm.ports[portName] = Object.assign(port, { 294 | send: function (val) { 295 | return portSends[portName].call(port, val); 296 | } 297 | }); 298 | }); 299 | } 300 | return portSubscribes; 301 | } 302 | 303 | /* 304 | Breadth-first search for a `Browser.Navigation.Key` in the user's app model. 305 | Returns the key and keypath or null if not found. 306 | */ 307 | function findNavKey(rootModel) { 308 | var queue = []; 309 | if (isDebuggerModel(rootModel)) { 310 | /* 311 | Extract the user's app model from the Elm Debugger's model. The Elm debugger 312 | can hold multiple references to the user's model (e.g. in its "history"). So 313 | we must be careful to only search within the "state" part of the Debugger. 314 | */ 315 | queue.push({value: rootModel['state'], keypath: ['state']}); 316 | } else { 317 | queue.push({value: rootModel, keypath: []}); 318 | } 319 | 320 | while (queue.length !== 0) { 321 | var item = queue.shift(); 322 | 323 | // The nav key is identified by a runtime tag added by the elm-hot injector. 324 | if (item.value.hasOwnProperty("elm-hot-nav-key")) { 325 | // found it! 326 | return item; 327 | } 328 | 329 | if (typeof item.value !== "object") { 330 | continue; 331 | } 332 | 333 | for (var propName in item.value) { 334 | if (!item.value.hasOwnProperty(propName)) continue; 335 | var newKeypath = item.keypath.slice(); 336 | newKeypath.push(propName); 337 | queue.push({value: item.value[propName], keypath: newKeypath}) 338 | } 339 | } 340 | 341 | return null; 342 | } 343 | 344 | 345 | function isDebuggerModel(model) { 346 | return model && model.hasOwnProperty("expando") && model.hasOwnProperty("state"); 347 | } 348 | 349 | function getAt(keyPath, obj) { 350 | return keyPath.reduce(function (xs, x) { 351 | return (xs && xs[x]) ? xs[x] : null 352 | }, obj) 353 | } 354 | 355 | function removeNavKeyListeners(navKey) { 356 | window.removeEventListener('popstate', navKey.value); 357 | window.navigator.userAgent.indexOf('Trident') < 0 || window.removeEventListener('hashchange', navKey.value); 358 | } 359 | 360 | // hook program creation 361 | var initialize = _Platform_initialize; 362 | _Platform_initialize = function (flagDecoder, args, init, update, subscriptions, stepperBuilder) { 363 | var instance = initializingInstance || swappingInstance; 364 | var tryFirstRender = !!swappingInstance; 365 | 366 | var hookedInit = function (args) { 367 | var initialStateTuple = init(args); 368 | if (swappingInstance) { 369 | var oldModel = swappingInstance.lastState; 370 | var newModel = initialStateTuple.a; 371 | 372 | if (typeof elm$browser$Browser$application !== 'undefined') { 373 | // attempt to find the Browser.Navigation.Key in the newly-constructed model 374 | // and bring it along with the rest of the old data. 375 | var newKeyLoc = findNavKey(newModel); 376 | var error = null; 377 | if (newKeyLoc === null) { 378 | error = "could not find Browser.Navigation.Key in the new app model"; 379 | } else if (instance.navKeyPath === null) { 380 | error = "could not find Browser.Navigation.Key in the old app model."; 381 | } else if (newKeyLoc.keypath.toString() !== instance.navKeyPath.toString()) { 382 | error = "the location of the Browser.Navigation.Key in the model has changed."; 383 | } else { 384 | var oldNavKey = getAt(instance.navKeyPath, oldModel); 385 | if (oldNavKey === null) { 386 | error = "keypath " + instance.navKeyPath + " is invalid. Please report a bug." 387 | } else { 388 | // remove event listeners attached to the old nav key 389 | removeNavKeyListeners(oldNavKey); 390 | 391 | // insert the new nav key into the old model in the exact same location 392 | var parentKeyPath = newKeyLoc.keypath.slice(0, -1); 393 | var lastSegment = newKeyLoc.keypath.slice(-1)[0]; 394 | var oldParent = getAt(parentKeyPath, oldModel); 395 | oldParent[lastSegment] = newKeyLoc.value; 396 | } 397 | } 398 | 399 | if (error !== null) { 400 | console.error("[elm-hot] Hot-swapping " + instance.path + " not possible: " + error); 401 | oldModel = newModel; 402 | } 403 | } 404 | 405 | // the heart of the app state hot-swap 406 | initialStateTuple.a = oldModel; 407 | 408 | // ignore any Cmds returned by the init during hot-swap 409 | initialStateTuple.b = elm$core$Platform$Cmd$none; 410 | } else { 411 | // capture the initial state for later 412 | initializingInstance.lastState = initialStateTuple.a; 413 | 414 | // capture Browser.application's navigation key for later 415 | if (typeof elm$browser$Browser$application !== 'undefined') { 416 | var navKeyLoc = findNavKey(initializingInstance.lastState); 417 | if (!navKeyLoc) { 418 | console.error("[elm-hot] Hot-swapping disabled for " + instance.path 419 | + ": could not find Browser.Navigation.Key in your model."); 420 | instance.navKeyPath = null; 421 | } else { 422 | instance.navKeyPath = navKeyLoc.keypath; 423 | } 424 | } 425 | } 426 | 427 | return initialStateTuple 428 | }; 429 | 430 | var hookedStepperBuilder = function (sendToApp, model) { 431 | var result; 432 | // first render may fail if shape of model changed too much 433 | if (tryFirstRender) { 434 | tryFirstRender = false; 435 | try { 436 | result = stepperBuilder(sendToApp, model) 437 | } catch (e) { 438 | throw new Error('[elm-hot] Hot-swapping ' + instance.path + 439 | ' is not possible, please reload page. Error: ' + e.message) 440 | } 441 | } else { 442 | result = stepperBuilder(sendToApp, model) 443 | } 444 | 445 | return function (nextModel, isSync) { 446 | if (instance) { 447 | // capture the state after every step so that later we can restore from it during a hot-swap 448 | instance.lastState = nextModel 449 | } 450 | return result(nextModel, isSync) 451 | } 452 | }; 453 | 454 | return initialize(flagDecoder, args, hookedInit, update, subscriptions, hookedStepperBuilder) 455 | }; 456 | 457 | // hook process creation 458 | var originalBinding = _Scheduler_binding; 459 | _Scheduler_binding = function (originalCallback) { 460 | return originalBinding(function () { 461 | // start the scheduled process, which may return a cancellation function. 462 | var cancel = originalCallback.apply(this, arguments); 463 | if (cancel) { 464 | cancellers.push(cancel); 465 | return function () { 466 | cancellers.splice(cancellers.indexOf(cancel), 1); 467 | return cancel(); 468 | }; 469 | } 470 | return cancel; 471 | }); 472 | }; 473 | 474 | scope['_elm_hot_loader_init'] = function (Elm) { 475 | // swap instances 476 | var removedInstances = []; 477 | for (var id in instances) { 478 | var instance = instances[id]; 479 | if (instance.domNode.parentNode) { 480 | swap(Elm, instance); 481 | } else { 482 | removedInstances.push(id); 483 | } 484 | } 485 | 486 | removedInstances.forEach(function (id) { 487 | delete instance[id]; 488 | }); 489 | 490 | // wrap all public modules 491 | var publicModules = findPublicModules(Elm); 492 | publicModules.forEach(function (m) { 493 | wrapPublicModule(m.path, m.module); 494 | }); 495 | } 496 | })(); 497 | 498 | scope['_elm_hot_loader_init'](scope['Elm']); 499 | } 500 | //////////////////// HMR END //////////////////// 501 | -------------------------------------------------------------------------------- /script/format.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -euo pipefail 4 | IFS=$'\n\t' 5 | 6 | DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" 7 | cd "${DIR}/.." 8 | # End Boilerplate 9 | 10 | # Get a list of all haskell files and pass them through ormolu. 11 | git ls-files '*.hs' -z | xargs -0 -I {} ormolu --mode inplace {}; 12 | 13 | # Expect no changes in the working directory. Complain if there are any. 14 | git diff --exit-code; 15 | -------------------------------------------------------------------------------- /script/install-stack.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | set -o errexit 4 | set -o nounset 5 | set -o verbose 6 | 7 | if [ -f "${HOME}/.local/bin/stack" ] 8 | then 9 | echo 'Stack is already installed.' 10 | else 11 | echo "Installing Stack for ${TRAVIS_OS_NAME}..." 12 | URL="https://www.stackage.org/stack/${TRAVIS_OS_NAME}-x86_64" 13 | curl --location "${URL}" > stack.tar.gz 14 | gunzip stack.tar.gz 15 | tar -x -f stack.tar --strip-components 1 16 | mkdir -p "${HOME}/.local/bin" 17 | mv stack "${HOME}/.local/bin/" 18 | rm stack.tar 19 | fi 20 | 21 | stack --version 22 | -------------------------------------------------------------------------------- /shell.nix: -------------------------------------------------------------------------------- 1 | { pkgs ? (import ./nix/pkgs.nix).pkgs, ghc ? pkgs.ghc }: 2 | pkgs.haskell.lib.buildStackProject { 3 | inherit ghc; 4 | name = "jetpack"; 5 | 6 | buildInputs = import ./nix/inputs.nix; 7 | } 8 | -------------------------------------------------------------------------------- /src/Alternative/IO.hs: -------------------------------------------------------------------------------- 1 | module Alternative.IO 2 | ( AlternativeIO 3 | , tryNext 4 | , lift 5 | ) 6 | where 7 | 8 | import Control.Monad.Except (ExceptT) 9 | import Control.Monad.Except (lift, throwError) 10 | 11 | type AlternativeIO = ExceptT () IO 12 | 13 | tryNext :: AlternativeIO a 14 | tryNext = throwError () 15 | -------------------------------------------------------------------------------- /src/Builder.hs: -------------------------------------------------------------------------------- 1 | module Builder 2 | ( build 3 | , HotReload (HotReload, DontHotReload) 4 | ) 5 | where 6 | 7 | import CliArguments (Args (..)) 8 | import qualified Compile 9 | import ConcatModule 10 | import Config (Config (Config)) 11 | import qualified Config 12 | import qualified Control.Concurrent 13 | import qualified Control.Concurrent.Async.Lifted as Concurrent 14 | import qualified Control.Exception.Safe as ES 15 | import Control.Lens.Indexed as Indexed hiding ((<.>)) 16 | import Control.Monad ((<=<)) 17 | import qualified Data.Aeson as Aeson 18 | import Data.Foldable (traverse_) 19 | import qualified Data.List as L 20 | import qualified Data.List.Utils as LU 21 | import Data.Semigroup ((<>)) 22 | import qualified Data.Text as T 23 | import qualified Data.Tree as Tree 24 | import qualified Dependencies 25 | import qualified DependencyTree 26 | import qualified EntryPoints 27 | import qualified HotReload 28 | import qualified Init 29 | import qualified Logger 30 | import qualified Message 31 | import qualified Parser.Ast as Ast 32 | import qualified Safe.IO 33 | import qualified System.Console.Regions as CR 34 | import qualified System.Directory as Dir 35 | import qualified System.Exit 36 | import System.FilePath ((<.>), ()) 37 | import qualified System.FilePath as FP 38 | import qualified System.FilePath.Glob as Glob 39 | import qualified ToolPaths 40 | 41 | data HotReload 42 | = HotReload 43 | | DontHotReload 44 | 45 | build :: Config -> Args -> HotReload -> IO (Maybe [Compile.Result]) 46 | build config args hotReloading = do 47 | result <- ES.tryAny $ buildHelp config args hotReloading 48 | printResult $ snd <$> result 49 | return $ either (const Nothing) Just $ fst <$> result 50 | 51 | printResult :: Either ES.SomeException [FilePath] -> IO () 52 | printResult result = 53 | case result of 54 | Right entryPoints -> do 55 | _ <- Message.list $ T.pack <$> entryPoints 56 | Message.success $ T.pack "Succeeded" 57 | Left err -> do 58 | _ <- putStrLn $ show err 59 | _ <- Message.error $ T.pack "Failed!" 60 | System.Exit.exitFailure 61 | 62 | buildHelp 63 | :: Config.Config -> Args -> HotReload -> IO ([Compile.Result], [FilePath]) 64 | buildHelp 65 | config@Config 66 | { Config.tempDir 67 | , Config.logDir 68 | , Config.elmRoot 69 | , Config.outputDir 70 | , Config.elmPath 71 | , Config.coffeePath 72 | , Config.entryPoints 73 | } 74 | args 75 | hotReloading = 76 | CR.displayConsoleRegions $ do 77 | toolPaths <- Init.setup tempDir logDir outputDir elmPath coffeePath 78 | traverse_ (Logger.clearLog logDir) Logger.allLogs 79 | checkElmStuffConsistency logDir elmRoot 80 | entryPoints <- EntryPoints.find args entryPoints 81 | -- GETTING DEPENDENCY TREE 82 | deps <- 83 | withSpinner $ \subRegion endSpinner -> do 84 | _ <- 85 | CR.setConsoleRegion subRegion $ 86 | T.pack " Finding dependencies for entrypoints." 87 | cache <- DependencyTree.readTreeCache tempDir 88 | deps <- 89 | Concurrent.mapConcurrently 90 | (DependencyTree.build config cache) 91 | entryPoints 92 | DependencyTree.writeTreeCache tempDir deps 93 | endSpinner "All dependencies found." 94 | pure deps 95 | -- COMPILATION 96 | let modules = LU.uniq $ concatMap Tree.flatten deps 97 | let Compile.Groupped {elm, js, coffee} = Compile.group modules 98 | result <- 99 | mconcat . mconcat . (\(a, b) -> [a, b]) <$> 100 | Concurrent.concurrently 101 | ( traverse 102 | (maybeInjectHotReload hotReloading <=< compile args config toolPaths) 103 | [(Ast.Elm, elm)] 104 | ) 105 | ( Concurrent.mapConcurrently 106 | (parallelCompile args config toolPaths) 107 | [(Ast.Js, js), (Ast.Coffee, coffee)] 108 | ) 109 | logCompileResults logDir result 110 | withSpinner $ \subRegion endSpinner -> do 111 | CR.setConsoleRegion subRegion $ T.pack " Writing modules." 112 | modules <- 113 | Concurrent.mapConcurrently 114 | ( \dep -> do 115 | (outPath, content) <- 116 | case hotReloading of 117 | DontHotReload -> ConcatModule.wrap config dep 118 | HotReload -> 119 | HotReload.wrap (Config.hotReloadingPort config) <$> 120 | ConcatModule.wrap config dep 121 | Safe.IO.writeFile outPath content 122 | return (dep, outPath) 123 | ) 124 | deps 125 | createdModulesJson tempDir (fmap snd modules) 126 | endSpinner "Modules written." 127 | traverse_ (Compile.printTime args) result 128 | -- RETURN WARNINGS IF ANY 129 | return (result, entryPoints) 130 | 131 | compile 132 | :: (Show a, TraversableWithIndex a t) 133 | => Args 134 | -> Config 135 | -> ToolPaths.ToolPaths 136 | -> (Ast.SourceType, t Dependencies.Dependency) 137 | -> IO (t Compile.Result) 138 | compile args config toolPaths (sourceType, modules) = 139 | withSpinner $ \subRegion endSpinner -> do 140 | _ <- 141 | CR.setConsoleRegion subRegion $ 142 | " " <> 143 | show sourceType <> 144 | " (0/" <> 145 | show (length modules) <> 146 | ") " 147 | CR.withConsoleRegion (CR.InLine subRegion) $ \region -> do 148 | result <- 149 | Indexed.itraverse 150 | ( \index m -> do 151 | r <- Compile.compile region args config toolPaths m 152 | CR.setConsoleRegion subRegion $ 153 | " " <> 154 | show sourceType <> 155 | " (" <> 156 | show index <> 157 | "/" <> 158 | show (length modules) <> 159 | ") " 160 | pure r 161 | ) 162 | modules 163 | endSpinner $ T.pack $ "Compiling " <> show sourceType <> " successful." 164 | pure result 165 | 166 | parallelCompile 167 | :: (Show a, TraversableWithIndex a t) 168 | => Args 169 | -> Config 170 | -> ToolPaths.ToolPaths 171 | -> (Ast.SourceType, t Dependencies.Dependency) 172 | -> IO (t Compile.Result) 173 | parallelCompile args config toolPaths (sourceType, modules) = 174 | withSpinner $ \subRegion endSpinner -> do 175 | _ <- CR.setConsoleRegion subRegion $ " " <> show sourceType <> " " 176 | CR.withConsoleRegion (CR.InLine subRegion) $ \region -> do 177 | result <- 178 | Concurrent.mapConcurrently 179 | ( \m -> do 180 | r <- Compile.compile region args config toolPaths m 181 | pure r 182 | ) 183 | modules 184 | endSpinner $ T.pack $ "Compiling " <> show sourceType <> " successful." 185 | pure result 186 | 187 | logCompileResults :: Traversable t => Config.LogDir -> t Compile.Result -> IO () 188 | logCompileResults logDir result = do 189 | _ <- 190 | traverse (Logger.appendLog logDir Logger.compileLog . T.pack . show) result 191 | traverse_ 192 | ( \Compile.Result {compiledFile, duration} -> 193 | Logger.appendLog logDir Logger.compileTime $ 194 | T.pack compiledFile <> 195 | ": " <> 196 | T.pack (show duration) <> 197 | "\n" 198 | ) 199 | result 200 | 201 | withSpinner :: (CR.ConsoleRegion -> (T.Text -> IO ()) -> IO a) -> IO a 202 | withSpinner go = 203 | let spin spinnerRegion counter = do 204 | Control.Concurrent.threadDelay 100000 205 | CR.setConsoleRegion spinnerRegion $ symbol counter 206 | spin spinnerRegion ((counter + 1) `mod` 8) 207 | in CR.withConsoleRegion CR.Linear $ \parentRegion -> do 208 | CR.withConsoleRegion (CR.InLine parentRegion) $ \spinnerRegion -> do 209 | CR.appendConsoleRegion spinnerRegion $ T.pack "\\" 210 | threadId <- Control.Concurrent.forkIO $ spin spinnerRegion 0 211 | result <- 212 | CR.withConsoleRegion (CR.InLine parentRegion) $ \subRegion -> do 213 | go subRegion $ \message -> do 214 | Control.Concurrent.killThread threadId 215 | CR.finishConsoleRegion parentRegion (message <> " ✔") 216 | pure result 217 | 218 | symbol :: Int -> T.Text 219 | symbol 0 = "⣾" 220 | symbol 1 = "⣽" 221 | symbol 2 = "⣻" 222 | symbol 3 = "⢿" 223 | symbol 4 = "⡿" 224 | symbol 5 = "⣟" 225 | symbol 6 = "⣯" 226 | symbol _ = "⣷" 227 | 228 | checkElmStuffConsistency :: Config.LogDir -> Config.ElmRoot -> IO () 229 | checkElmStuffConsistency logDir elmRoot = do 230 | files <- 231 | mconcat . 232 | filter ((/=) 2 . length) . 233 | L.groupBy sameModule . 234 | L.sortBy sortModules <$> 235 | Glob.glob (Config.unElmRoot elmRoot "elm-stuff/0.19.0/*.elm[io]") 236 | Logger.appendLog logDir Logger.consistencyLog . mconcat $ 237 | L.intersperse "\n" $ 238 | fmap T.pack files 239 | traverse_ Dir.removeFile files 240 | 241 | sameModule :: FilePath -> FilePath -> Bool 242 | sameModule a b = FP.dropExtension a == FP.dropExtension b 243 | 244 | sortModules :: FilePath -> FilePath -> Ordering 245 | sortModules a b = compare (FP.dropExtension a) (FP.dropExtension b) 246 | 247 | createdModulesJson :: Config.TempDir -> [FilePath] -> IO () 248 | createdModulesJson tempDir paths = do 249 | let encodedPaths = Aeson.encode paths 250 | let jsonPath = Config.unTempDir tempDir "modules" <.> "json" 251 | _ <- Safe.IO.writeFileByteString jsonPath encodedPaths 252 | return () 253 | 254 | maybeInjectHotReload 255 | :: (Show a, TraversableWithIndex a t) 256 | => HotReload 257 | -> t Compile.Result 258 | -> IO (t Compile.Result) 259 | maybeInjectHotReload mode result = 260 | case mode of 261 | DontHotReload -> pure result 262 | HotReload -> do 263 | traverse_ (HotReload.inject . Compile.outputFile) result 264 | return result 265 | -------------------------------------------------------------------------------- /src/Cleaner.hs: -------------------------------------------------------------------------------- 1 | module Cleaner where 2 | 3 | import Config (Config (Config)) 4 | import qualified Config 5 | import Data.Semigroup ((<>)) 6 | import qualified Data.Text as T 7 | import qualified Message 8 | import System.Directory (removeDirectoryRecursive) 9 | import System.FilePath (()) 10 | 11 | clean :: Config -> IO () 12 | clean Config {Config.elmRoot, Config.tempDir} = do 13 | let elmStuff = 14 | (Config.unElmRoot elmRoot "elm-stuff" "build-artifacts") 15 | removeDirectoryRecursive elmStuff 16 | removeDirectoryRecursive $ Config.unTempDir tempDir 17 | Message.warning 18 | ( "Removed " <> T.pack elmStuff <> " and " <> 19 | T.pack (Config.unTempDir tempDir) 20 | ) 21 | -------------------------------------------------------------------------------- /src/CliArguments.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE ApplicativeDo #-} 2 | 3 | module CliArguments 4 | ( Args (..) 5 | , RunMode (..) 6 | , CompileMode (..) 7 | , readArguments 8 | ) 9 | where 10 | 11 | import Data.Semigroup ((<>)) 12 | import Options.Applicative 13 | import System.FilePath () 14 | 15 | data Args 16 | = Args 17 | { entryPointGlob :: [String] 18 | , configPath :: Maybe FilePath 19 | , compileMode :: CompileMode 20 | , time :: Bool 21 | , clean :: Bool 22 | , runMode :: RunMode 23 | } 24 | 25 | data CompileMode 26 | = Normal 27 | | Debug 28 | | Optimize 29 | 30 | data RunMode 31 | = RunOnce 32 | | Watch 33 | | HotReloading 34 | | Version 35 | 36 | readArguments :: IO Args 37 | readArguments = 38 | execParser (info (parser <**> helper) $ fullDesc <> progDesc "🚀 📦") 39 | 40 | parser :: Parser Args 41 | parser = do 42 | entryPointGlob <- many (strArgument (help "Entry points to compile.")) 43 | configPath <- 44 | option 45 | auto 46 | ( long "config" <> short 'c' <> value Nothing <> 47 | help "Path to config file." 48 | ) 49 | debug <- 50 | switch (long "debug" <> short 'd' <> help "Run jetpack in debug mode.") 51 | optimize <- 52 | switch 53 | (long "optimize" <> short 'O' <> help "Compile Elm in optimized mode.") 54 | version <- 55 | switch 56 | (long "version" <> short 'v' <> help "display the version of jetpack.") 57 | time <- switch (long "time" <> short 't' <> help "display compile times.") 58 | watch <- switch (long "watch" <> short 'w' <> help "watch for changes.") 59 | hotReloading <- 60 | switch 61 | (long "hot" <> help "watch for changes and hot reload in the browser.") 62 | clean <- 63 | switch 64 | (long "clean" <> short 'c' <> help "Cleans elm-stuff and removes .jetpack") 65 | return 66 | Args 67 | { entryPointGlob = entryPointGlob 68 | , configPath = configPath 69 | , compileMode = if debug 70 | then Debug 71 | else 72 | if optimize 73 | then Optimize 74 | else Normal 75 | , time = time 76 | , clean = clean 77 | , runMode = if version 78 | then Version 79 | else 80 | if hotReloading 81 | then HotReloading 82 | else 83 | if watch 84 | then Watch 85 | else RunOnce 86 | } 87 | -------------------------------------------------------------------------------- /src/Compile.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE DeriveAnyClass #-} 2 | 3 | {-| 4 | -} 5 | module Compile where 6 | 7 | import CliArguments (Args (..), CompileMode (..)) 8 | import Config (Config (Config)) 9 | import qualified Config 10 | import Control.Exception.Safe (Exception) 11 | import qualified Control.Exception.Safe as ES 12 | import Control.Monad (when) 13 | import qualified Data.ByteString as BS 14 | import qualified Data.ByteString.Char8 as BSC 15 | import Data.Semigroup ((<>)) 16 | import qualified Data.Text as T 17 | import qualified Data.Text.IO as TIO 18 | import Data.Time.Clock (UTCTime, getCurrentTime) 19 | import Data.Typeable (Typeable) 20 | import Dependencies (Dependency (..)) 21 | import Formatting (sformat) 22 | import Formatting.Clock (timeSpecs) 23 | import GHC.IO.Handle 24 | import Parser.Ast as Ast 25 | import System.Clock 26 | ( Clock (Monotonic) 27 | , TimeSpec 28 | , diffTimeSpec 29 | , getTime 30 | , toNanoSecs 31 | ) 32 | import qualified System.Console.Regions as CR 33 | import System.Directory (copyFile) 34 | import System.Exit 35 | import System.FilePath (()) 36 | import System.IO (utf8) 37 | import System.Process 38 | import ToolPaths 39 | import Utils.Files (pathToFileName) 40 | 41 | data Result 42 | = Result 43 | { duration :: Duration 44 | , compiledAt :: UTCTime 45 | , command :: T.Text 46 | , stdout :: Maybe T.Text 47 | , compiledFile :: FilePath 48 | , outputFile :: FilePath 49 | } 50 | deriving (Show) 51 | 52 | elmFiles :: [Compile.Result] -> [FilePath] 53 | elmFiles [] = [] 54 | elmFiles (Result {outputFile} : rest) 55 | | T.isSuffixOf ".elm.js" (T.pack outputFile) = outputFile : elmFiles rest 56 | | otherwise = elmFiles rest 57 | 58 | printTime :: Args -> Compile.Result -> IO () 59 | printTime Args {time} Compile.Result {compiledFile, duration} = 60 | when time $ 61 | TIO.putStrLn $ 62 | T.pack compiledFile <> 63 | ": " <> 64 | formatDuration duration 65 | 66 | formatDuration :: Duration -> T.Text 67 | formatDuration (Duration start end) = sformat timeSpecs start end 68 | 69 | data Duration 70 | = Duration 71 | { start :: TimeSpec 72 | , end :: TimeSpec 73 | } 74 | 75 | instance Show Duration where 76 | 77 | show (Duration start end) = 78 | show (div (toNanoSecs (diffTimeSpec end start)) 1000000) 79 | 80 | compile 81 | :: CR.ConsoleRegion -> Args -> Config -> ToolPaths -> Dependency -> IO Result 82 | compile 83 | region 84 | args 85 | config@Config {Config.tempDir} 86 | toolPaths 87 | Dependency 88 | { fileType 89 | , filePath 90 | } = 91 | runCompiler 92 | region 93 | args 94 | config 95 | fileType 96 | toolPaths 97 | Arguments 98 | { input = filePath 99 | , output = buildArtifactPath tempDir fileType filePath 100 | } 101 | 102 | data Arguments 103 | = Arguments 104 | { input :: FilePath 105 | , output :: FilePath 106 | } 107 | 108 | runCompiler 109 | :: CR.ConsoleRegion 110 | -> Args 111 | -> Config 112 | -> Ast.SourceType 113 | -> ToolPaths 114 | -> Arguments 115 | -> IO Result 116 | runCompiler region args config fileType ToolPaths {elm, coffee} arguments = 117 | case fileType of 118 | Ast.Elm -> elmCompiler elm region args config arguments 119 | Ast.Js -> jsCompiler region arguments 120 | Ast.Coffee -> coffeeCompiler coffee region arguments 121 | 122 | data Groupped 123 | = Groupped 124 | { elm :: [Dependency] 125 | , coffee :: [Dependency] 126 | , js :: [Dependency] 127 | } 128 | 129 | group :: [Dependency] -> Groupped 130 | group = 131 | foldl 132 | ( \Groupped {elm, js, coffee} dep -> 133 | case Dependencies.fileType dep of 134 | Ast.Elm -> Groupped {elm = dep : elm, js, coffee} 135 | Ast.Js -> Groupped {js = dep : js, elm, coffee} 136 | Ast.Coffee -> Groupped {coffee = dep : coffee, elm, js} 137 | ) 138 | Groupped {elm = [], js = [], coffee = []} 139 | 140 | buildArtifactPath :: Config.TempDir -> Ast.SourceType -> FilePath -> String 141 | buildArtifactPath tempDir fileType inputPath = 142 | Config.unTempDir tempDir pathToFileName inputPath extension 143 | where 144 | extension = 145 | case fileType of 146 | Ast.Elm -> "js" 147 | Ast.Js -> "js" 148 | Ast.Coffee -> "js" 149 | 150 | --------------- 151 | -- COMPILERS -- 152 | --------------- 153 | elmCompiler 154 | :: Config.ElmPath 155 | -> CR.ConsoleRegion 156 | -> Args 157 | -> Config 158 | -> Arguments 159 | -> IO Result 160 | elmCompiler elm region args Config {elmRoot} Arguments {input, output} = do 161 | let Args {compileMode} = args 162 | let modeFlag = 163 | case compileMode of 164 | Debug -> " --debug" 165 | Optimize -> " --optimize" 166 | Normal -> "" 167 | let cmd = 168 | T.pack (Config.unElmPath elm) <> " " <> "make" <> " " <> "../" <> 169 | T.pack input <> 170 | " --output " <> 171 | "../" <> 172 | T.pack output <> 173 | T.pack modeFlag 174 | runCmd region input output cmd $ Just $ Config.unElmRoot elmRoot 175 | 176 | coffeeCompiler 177 | :: Config.CoffeePath -> CR.ConsoleRegion -> Arguments -> IO Result 178 | coffeeCompiler coffee region Arguments {input, output} = do 179 | let cmd = 180 | T.pack (Config.unCoffeePath coffee) <> " -p " <> T.pack input <> " > " <> 181 | T.pack output 182 | runCmd region input output cmd Nothing 183 | 184 | {-| The js compiler will basically only copy the file into the tmp dir. 185 | -} 186 | jsCompiler :: CR.ConsoleRegion -> Arguments -> IO Result 187 | jsCompiler region Arguments {input, output} = do 188 | start <- getTime Monotonic 189 | _ <- copyFile input output 190 | CR.setConsoleRegion region $ T.pack input <> " -> " <> T.pack output 191 | currentTime <- getCurrentTime 192 | end <- getTime Monotonic 193 | return 194 | Result 195 | { duration = Duration start end 196 | , compiledAt = currentTime 197 | , command = T.unwords ["moved", T.pack input, "=>", T.pack output] 198 | , stdout = Nothing 199 | , compiledFile = input 200 | , outputFile = output 201 | } 202 | 203 | runCmd 204 | :: CR.ConsoleRegion 205 | -> FilePath 206 | -> FilePath 207 | -> T.Text 208 | -> Maybe String 209 | -> IO Result 210 | runCmd region input output cmd maybeCwd = do 211 | CR.setConsoleRegion region $ T.pack input 212 | start <- getTime Monotonic 213 | (ec, errContent, content) <- runAndWaitForProcess (T.unpack cmd) maybeCwd 214 | end <- getTime Monotonic 215 | case ec of 216 | ExitSuccess -> do 217 | CR.setConsoleRegion region $ T.pack "Compiled: " <> T.pack input 218 | currentTime <- getCurrentTime 219 | return 220 | Result 221 | { duration = Duration start end 222 | , compiledAt = currentTime 223 | , command = cmd 224 | , stdout = Just $ T.pack content 225 | , compiledFile = input 226 | , outputFile = output 227 | } 228 | ExitFailure _ -> 229 | ES.throwM $ CompileError cmd (T.pack (content <> errContent)) 230 | 231 | runAndWaitForProcess :: String -> Maybe String -> IO (ExitCode, String, String) 232 | runAndWaitForProcess cmd maybeCwd = do 233 | (_, Just out, Just err, ph) <- 234 | createProcess 235 | (proc "bash" ["-c", cmd]) 236 | { std_out = CreatePipe 237 | , std_err = CreatePipe 238 | , cwd = maybeCwd 239 | } 240 | hSetEncoding out utf8 241 | hSetEncoding err utf8 242 | gatherOutput ph err out 243 | 244 | -- https://passingcuriosity.com/2015/haskell-reading-process-safe-deadlock/ 245 | gatherOutput 246 | :: ProcessHandle -> Handle -> Handle -> IO (ExitCode, String, String) 247 | gatherOutput ph h1 h2 = work mempty mempty 248 | where 249 | work acc1 acc2 = 250 | -- Read any outstanding input. 251 | do 252 | bs1 <- BS.hGetNonBlocking h1 (64 * 1024) 253 | let acc1' = acc1 <> bs1 254 | bs2 <- BS.hGetNonBlocking h2 (64 * 1024) 255 | let acc2' = acc2 <> bs2 256 | -- Check on the process. 257 | s <- getProcessExitCode ph 258 | -- Exit or loop. 259 | case s of 260 | Nothing -> work acc1' acc2' 261 | Just ec -> 262 | -- Get any last bit written between the read and the status 263 | -- check. 264 | do 265 | last1 <- BS.hGetContents h1 266 | last2 <- BS.hGetContents h2 267 | pure $ (ec, BSC.unpack $ acc1' <> last1, BSC.unpack $ acc2' <> last2) 268 | 269 | data Error 270 | = CompileError 271 | T.Text 272 | T.Text 273 | deriving (Typeable, Exception) 274 | 275 | instance Show Error where 276 | 277 | show (CompileError cmd msg) = 278 | T.unpack $ T.unlines ["Command:", "", " $ " <> cmd, "", msg] 279 | -------------------------------------------------------------------------------- /src/ConcatModule.hs: -------------------------------------------------------------------------------- 1 | {-| Concat all modules required in an entrypoint into one file. 2 | -} 3 | module ConcatModule 4 | ( wrap 5 | , wrapModule 6 | , replaceRequire 7 | ) 8 | where 9 | 10 | import qualified Config 11 | import Config (Config (Config)) 12 | import Data.Char (isSpace) 13 | import Data.Foldable (all) 14 | import qualified Data.List.Utils as LU 15 | import Data.Semigroup ((<>)) 16 | import qualified Data.Text as T 17 | import qualified Data.Tree as Tree 18 | import Dependencies (Dependency (..), DependencyTree) 19 | import qualified Parser.Ast as Ast 20 | import System.Directory (createDirectoryIfMissing) 21 | import System.FilePath as FP 22 | import Text.Regex (mkRegex, subRegex) 23 | import qualified Utils.Files as F 24 | import qualified Utils.Tree as UT 25 | 26 | wrap :: Config -> DependencyTree -> IO (FilePath, T.Text) 27 | wrap Config {Config.outputDir, Config.entryPoints, Config.tempDir} dep = do 28 | module' <- traverse (withContent tempDir) $ uniqNodes dep 29 | let wrapped = fmap wrapDependency module' 30 | out <- 31 | writeJsModule outputDir entryPoints wrapped $ 32 | Dependencies.filePath $ 33 | Tree.rootLabel dep 34 | return out 35 | 36 | uniqNodes :: DependencyTree -> [(Dependency, [Dependency])] 37 | uniqNodes = LU.uniq . UT.nodesWithChildren 38 | 39 | data Module 40 | = Module 41 | { filePath :: FilePath 42 | , dependencies :: [Dependency] 43 | , content :: T.Text 44 | } 45 | 46 | withContent :: Config.TempDir -> (Dependency, [Dependency]) -> IO Module 47 | withContent tempDir (Dependency {filePath, fileType}, dependencies) = do 48 | let name = F.pathToFileName filePath "js" 49 | rawContent <- fmap T.pack $ readFile $ Config.unTempDir tempDir name 50 | let content = 51 | case fileType of 52 | Ast.Elm -> ensureElmIife rawContent 53 | _ -> rawContent 54 | return Module {filePath, dependencies, content} 55 | 56 | ensureElmIife :: T.Text -> T.Text 57 | ensureElmIife input = 58 | "(function() {\n\n" <> input <> 59 | "\n\nwindow.Elm = this.Elm;\n\n}.call(exports))" 60 | 61 | wrapDependency :: Module -> T.Text 62 | wrapDependency Module {filePath, dependencies, content} = 63 | wrapModule filePath (foldr replaceRequire content dependencies) 64 | 65 | replaceRequire :: Dependency -> T.Text -> T.Text 66 | replaceRequire Dependency {requiredAs, filePath} body = 67 | T.pack $ subRegex requireRegex (T.unpack body) (T.unpack jetpackRequire) 68 | where 69 | fnName = pathToFunctionName filePath "js" 70 | requireRegex = 71 | mkRegex $ "require\\([ \t]*['\"]" <> requiredAs <> "['\"][ \t]*\\)" 72 | jetpackRequire = "jetpackRequire(" <> fnName <> ", \"" <> fnName <> "\")" 73 | 74 | writeJsModule 75 | :: Config.OutputDir 76 | -> Config.EntryPoints 77 | -> [T.Text] 78 | -> FilePath 79 | -> IO (FilePath, T.Text) 80 | writeJsModule outputDir entryPoints fns rootFilePath = do 81 | let out = 82 | Config.unOutputDir outputDir 83 | FP.makeRelative (Config.unEntryPoints entryPoints) rootFilePath 84 | let rootName = pathToFunctionName rootFilePath "js" 85 | createDirectoryIfMissing True $ FP.takeDirectory out 86 | let wrapped = addBoilerplate rootName fns 87 | return (out, wrapped) 88 | 89 | addBoilerplate :: T.Text -> [T.Text] -> T.Text 90 | addBoilerplate root fns = 91 | T.unlines 92 | [ "(function() {" 93 | , "var jetpackCache = {};" 94 | , "function jetpackRequire(fn, fnName) {" 95 | , " var e = {};" 96 | , " var m = { exports : e };" 97 | , " if (typeof fn !== \"function\") {" 98 | , " console.error(\"Required function isn't a jetpack module.\", fn)" 99 | , " return;" 100 | , " }" 101 | , " if (jetpackCache[fnName]) {" 102 | , " return jetpackCache[fnName];" 103 | , " }" 104 | , " jetpackCache[fnName] = m.exports;" 105 | , " fn(m, e); " 106 | , " jetpackCache[fnName] = m.exports;" 107 | , " return m.exports;" 108 | , "}" 109 | , T.concat fns 110 | , T.concat ["jetpackRequire(", root, ", \"", root, "\");"] -- calling the entry point 111 | , "})();" 112 | ] 113 | 114 | {-| Wraps a module in a function and injects require, module, exports. 115 | >>> wrapModule "foo" "console.log(42);" 116 | "/* START: foo */\nfunction foo_js(module, exports) {\nconsole.log(42);\n} /* END: foo */\n" 117 | -} 118 | wrapModule :: FilePath -> T.Text -> T.Text 119 | wrapModule path body = 120 | T.concat 121 | [ "/* START: " 122 | , filePath 123 | , " */" 124 | , if all isSpace $ T.unpack body 125 | then " console.warn(\"" <> filePath <> ": is an empty module!\");" 126 | else "" 127 | , "\n" 128 | , T.concat ["function ", fnName, "(module, exports) {\n"] 129 | , body 130 | , "\n} /* END: " 131 | , filePath 132 | , " */" 133 | , "\n" 134 | ] 135 | where 136 | fnName = pathToFunctionName path "js" 137 | filePath = T.replace "___" "/" $ T.pack path 138 | 139 | pathToFunctionName :: FilePath -> String -> T.Text 140 | pathToFunctionName filePath = 141 | T.replace "@" "_" . T.replace "." "_" . T.pack . F.pathToFileName filePath 142 | -------------------------------------------------------------------------------- /src/Config.hs: -------------------------------------------------------------------------------- 1 | module Config 2 | ( Config (..) 3 | , readConfig 4 | , load 5 | , ElmPath (ElmPath) 6 | , unElmPath 7 | , CoffeePath (CoffeePath) 8 | , unCoffeePath 9 | , EntryPoints (EntryPoints) 10 | , unEntryPoints 11 | , ModulesDir (ModulesDir) 12 | , unModulesDir 13 | , SourceDir (SourceDir) 14 | , unSourceDir 15 | , ElmRoot (ElmRoot) 16 | , unElmRoot 17 | , TempDir (TempDir) 18 | , unTempDir 19 | , LogDir (LogDir) 20 | , unLogDir 21 | , OutputDir (OutputDir) 22 | , unOutputDir 23 | , NoParse (NoParse) 24 | , unNoParse 25 | , WatchFileExt (WatchFileExt) 26 | , unWatchFileExt 27 | , WatchIgnorePatterns (WatchIgnorePatterns) 28 | , unWatchIgnorePatterns 29 | , HotReloadingPort (HotReloadingPort) 30 | , unHotReloadingPort 31 | ) 32 | where 33 | 34 | import qualified Data.Aeson as Aeson 35 | import Data.Aeson ((.!=), (.:), (.:?)) 36 | import Data.Aeson.Types (typeMismatch) 37 | import qualified Data.ByteString.Lazy as BL 38 | import Data.Semigroup ((<>)) 39 | import qualified Data.Text as T 40 | import Message 41 | import qualified System.Directory as Dir 42 | import System.Exit 43 | import System.FilePath (()) 44 | 45 | data Config 46 | = Config 47 | { entryPoints :: EntryPoints 48 | , modulesDirs :: [ModulesDir] 49 | , sourceDir :: SourceDir 50 | , elmRoot :: ElmRoot 51 | , tempDir :: TempDir 52 | , logDir :: LogDir 53 | , outputDir :: OutputDir 54 | , elmPath :: Maybe ElmPath 55 | , coffeePath :: Maybe CoffeePath 56 | , noParse :: [NoParse] 57 | , watchFileExt :: [WatchFileExt] 58 | , watchIgnorePatterns :: [WatchIgnorePatterns] 59 | , hotReloadingPort :: HotReloadingPort 60 | } 61 | deriving (Show, Eq) 62 | 63 | newtype ElmPath 64 | = ElmPath 65 | { unElmPath :: FilePath 66 | } 67 | deriving (Show, Eq) 68 | 69 | newtype CoffeePath 70 | = CoffeePath 71 | { unCoffeePath :: FilePath 72 | } 73 | deriving (Show, Eq) 74 | 75 | newtype EntryPoints 76 | = EntryPoints 77 | { unEntryPoints :: FilePath 78 | } 79 | deriving (Show, Eq) 80 | 81 | newtype ModulesDir 82 | = ModulesDir 83 | { unModulesDir :: FilePath 84 | } 85 | deriving (Show, Eq) 86 | 87 | newtype SourceDir 88 | = SourceDir 89 | { unSourceDir :: FilePath 90 | } 91 | deriving (Show, Eq) 92 | 93 | newtype ElmRoot 94 | = ElmRoot 95 | { unElmRoot :: FilePath 96 | } 97 | deriving (Show, Eq) 98 | 99 | newtype TempDir 100 | = TempDir 101 | { unTempDir :: FilePath 102 | } 103 | deriving (Show, Eq) 104 | 105 | newtype LogDir 106 | = LogDir 107 | { unLogDir :: FilePath 108 | } 109 | deriving (Show, Eq) 110 | 111 | newtype OutputDir 112 | = OutputDir 113 | { unOutputDir :: FilePath 114 | } 115 | deriving (Show, Eq) 116 | 117 | newtype NoParse 118 | = NoParse 119 | { unNoParse :: FilePath 120 | } 121 | deriving (Show, Eq) 122 | 123 | newtype WatchFileExt 124 | = WatchFileExt 125 | { unWatchFileExt :: T.Text 126 | } 127 | deriving (Show, Eq) 128 | 129 | newtype WatchIgnorePatterns 130 | = WatchIgnorePatterns 131 | { unWatchIgnorePatterns :: T.Text 132 | } 133 | deriving (Show, Eq) 134 | 135 | newtype HotReloadingPort 136 | = HotReloadingPort 137 | { unHotReloadingPort :: Int 138 | } 139 | deriving (Show, Eq) 140 | 141 | instance Aeson.FromJSON Config where 142 | 143 | parseJSON (Aeson.Object v) = 144 | Config <$> -- 145 | (EntryPoints <$> v .: "entry_points") <*> 146 | (fmap ModulesDir <$> v .: "modules_directories") <*> 147 | (SourceDir <$> v .: "source_directory") <*> 148 | (ElmRoot <$> v .: "elm_root_directory") <*> 149 | (TempDir <$> v .: "temp_directory" .!= "./.jetpack/build_artifacts") <*> 150 | (LogDir <$> v .:? "log_directory" .!= "./.jetpack/logs") <*> 151 | (OutputDir <$> v .: "output_js_directory") <*> 152 | (fmap ElmPath <$> v .:? "elm_bin_path") <*> 153 | (fmap CoffeePath <$> v .:? "coffee_path") <*> 154 | (fmap NoParse <$> v .:? "no_parse" .!= []) <*> 155 | ( fmap WatchFileExt <$> 156 | v .:? 157 | "watch_file_extensions" .!= 158 | [".elm", ".coffee", ".js", ".json"] 159 | ) <*> 160 | ( fmap WatchIgnorePatterns <$> 161 | v .:? 162 | "watch_file_ignore_patterns" .!= 163 | ["/[.]#[^/]*$", "/~[^/]*$"] 164 | ) <*> 165 | (HotReloadingPort <$> v .:? "hot_reloading_port" .!= 31337) 166 | parseJSON invalid = typeMismatch "Config" invalid 167 | 168 | readConfig :: IO Config 169 | readConfig = do 170 | cwd <- Dir.getCurrentDirectory 171 | load cwd 172 | 173 | {-| Loads configuration for jetpack from `jetpack.json`. 174 | -} 175 | load :: FilePath -> IO (Config) 176 | load root = do 177 | let path = root "jetpack.json" 178 | exists <- Dir.doesFileExist path 179 | if exists 180 | then 181 | do 182 | content <- BL.readFile path 183 | case Aeson.eitherDecode content of 184 | Right config -> return config 185 | Left err -> do 186 | _ <- 187 | Message.error $ 188 | T.unlines 189 | [ "Invalid jetpack.json: " <> T.pack path 190 | , "" 191 | , " " <> T.pack err 192 | , "" 193 | ] 194 | System.Exit.exitFailure 195 | else 196 | do 197 | _ <- 198 | Message.error $ 199 | T.unlines ["I didn't find a config for jetpack at " <> T.pack path] 200 | System.Exit.exitFailure 201 | -------------------------------------------------------------------------------- /src/Dependencies.hs: -------------------------------------------------------------------------------- 1 | module Dependencies 2 | ( Dependencies 3 | , Dependency (..) 4 | , DependencyTree 5 | ) 6 | where 7 | 8 | import Data.Aeson as Aeson 9 | import Data.Time.Clock 10 | import qualified Data.Tree as Tree 11 | import GHC.Generics (Generic) 12 | import qualified Parser.Ast as Ast 13 | import System.FilePath () 14 | 15 | data Dependency 16 | = Dependency 17 | { fileType :: Ast.SourceType 18 | , requiredAs :: FilePath 19 | , filePath :: FilePath 20 | , lastModificationTime :: Maybe UTCTime 21 | } 22 | deriving (Eq, Generic) 23 | 24 | instance FromJSON Dependency 25 | 26 | instance ToJSON Dependency 27 | 28 | instance Show Dependency where 29 | 30 | show (Dependency t r p l) = 31 | "(Dependency: " ++ 32 | show r ++ 33 | " " ++ 34 | show t ++ 35 | " <" ++ 36 | show p ++ 37 | "> " ++ 38 | show l ++ 39 | ")" 40 | 41 | type DependencyTree = Tree.Tree Dependency 42 | 43 | type Dependencies = [DependencyTree] 44 | -------------------------------------------------------------------------------- /src/DependencyTree.hs: -------------------------------------------------------------------------------- 1 | {-| Finds all dependencies of a module. It creates a try like the following for each module. 2 | ``` 3 | (Dependency "./app/assets/modules/js/Super/foo_bar.js" Js ) 4 | | 5 | +- (Dependency "Page/Super/FooBar/Main.elm" Elm ) 6 | | 7 | `- (Dependency "Page/Super/FooBar/index.coffee" Coffee ) 8 | | 9 | `- (Dependency "lodash" Js) 10 | ``` 11 | 12 | 13 | Finding modules 14 | --------------- 15 | 16 | We are searching in the following folders. 17 | 18 | 1. relative to the file requiring the module 19 | 2. relative in node_modules 20 | 3. in `modules_directory` 21 | 4. in `source_directory` 22 | 5. in `{sourceDir}/../node_modules` 23 | 6. in `{root}/node_modules` 24 | 7. in `{root}/vendor/assets/components` 25 | 8. in `{root}/vendor/assets/javascripts` 26 | 9. woop woop! module not found 27 | 28 | In each directory we search for the following names. 29 | `{name}` is the string from the `require` statement 30 | 31 | 1. `{folder}/{name}` from `browser` field in `package.json` 32 | 2. `{folder}/{name}` from `main` field in `package.json` 33 | 3. `{folder}/{name}` 34 | 4. `{folder}/{name}.js` 35 | 5. `{folder}/{name}/index.js` 36 | 6. `{folder}/{name}/{name}` 37 | 7. `{folder}/{name}/{name}.js` 38 | 8. `{folder}/{name}` 39 | 9. `{folder}/{name}.coffee` 40 | 10. `{folder}/{name}/index.coffee` 41 | 42 | -} 43 | module DependencyTree 44 | ( build 45 | , readTreeCache 46 | , writeTreeCache 47 | ) 48 | where 49 | 50 | import Config (Config) 51 | import qualified Config 52 | import Control.Monad ((<=<)) 53 | import Data.Aeson as Aeson 54 | import qualified Data.ByteString.Lazy as BL 55 | import Data.Maybe as M 56 | import qualified Data.Text as T 57 | import Data.Time.Clock () 58 | import Data.Time.Clock.POSIX 59 | import qualified Data.Tree as Tree 60 | import Dependencies 61 | import qualified Parser.Ast as Ast 62 | import qualified Parser.Require 63 | import qualified Resolver 64 | import Safe 65 | import qualified Safe.IO 66 | import System.FilePath ((<.>), (), takeDirectory) 67 | import System.Posix.Files 68 | import Utils.Tree (searchNode) 69 | 70 | {-| Find all dependencies for the given entry points 71 | -} 72 | build :: Config -> Dependencies -> FilePath -> IO DependencyTree 73 | build config cache entryPoint = do 74 | dep <- toDependency (Config.entryPoints config) entryPoint 75 | tree <- buildTree config cache dep 76 | return tree 77 | 78 | buildTree :: Config -> Dependencies -> Dependency -> IO DependencyTree 79 | buildTree config cache = 80 | Tree.unfoldTreeM 81 | (resolveChildren config <=< findRequires cache (Config.noParse config)) <=< 82 | Resolver.resolve config Nothing 83 | 84 | readTreeCache :: Config.TempDir -> IO Dependencies 85 | readTreeCache tempDir = 86 | (fromMaybe [] . Aeson.decode) <$> 87 | BL.readFile (Config.unTempDir tempDir "deps" <.> "json") 88 | 89 | writeTreeCache :: Config.TempDir -> Dependencies -> IO () 90 | writeTreeCache tempDir = 91 | Safe.IO.writeFileByteString (Config.unTempDir tempDir "deps" <.> "json") . 92 | Aeson.encode 93 | 94 | toDependency :: Config.EntryPoints -> FilePath -> IO Dependency 95 | toDependency entryPoints path = do 96 | status <- getFileStatus $ Config.unEntryPoints entryPoints path 97 | let lastModificationTime = 98 | posixSecondsToUTCTime $ modificationTimeHiRes status 99 | return $ Dependency Ast.Js path path $ Just lastModificationTime 100 | 101 | requireToDep :: FilePath -> Ast.Require -> Dependency 102 | requireToDep path (Ast.Require t n) = Dependency t n path Nothing 103 | 104 | findRequires 105 | :: Dependencies 106 | -> [Config.NoParse] 107 | -> Dependency 108 | -> IO (Dependency, [Dependency]) 109 | findRequires cache noParse parent@Dependency {filePath, fileType} = 110 | if Config.NoParse filePath `elem` noParse 111 | then return (parent, []) 112 | else 113 | case fileType of 114 | Ast.Js -> parseModule cache parent Parser.Require.jsRequires 115 | Ast.Coffee -> parseModule cache parent Parser.Require.coffeeRequires 116 | Ast.Elm -> return (parent, []) 117 | 118 | findInCache :: Dependency -> Dependencies -> Maybe (Dependency, [Dependency]) 119 | findInCache dep = headMay . M.catMaybes . fmap (findInCache_ dep) 120 | 121 | findInCache_ :: Dependency -> DependencyTree -> Maybe (Dependency, [Dependency]) 122 | findInCache_ dep = fmap toTuple . searchNode ((==) dep . Tree.rootLabel) 123 | where 124 | toTuple Tree.Node {Tree.rootLabel, Tree.subForest} = 125 | (rootLabel, fmap Tree.rootLabel subForest) 126 | 127 | parseModule 128 | :: Dependencies 129 | -> Dependency 130 | -> (T.Text -> [Ast.Require]) 131 | -> IO (Dependency, [Dependency]) 132 | parseModule cache dep@Dependency {filePath} parser = 133 | case findInCache dep cache of 134 | Just cached -> return cached 135 | Nothing -> do 136 | content <- readFile filePath 137 | let requires = parser $ T.pack content 138 | let dependencies = fmap (requireToDep $ takeDirectory filePath) requires 139 | return (dep, dependencies) 140 | 141 | resolveChildren 142 | :: Config -> (Dependency, [Dependency]) -> IO (Dependency, [Dependency]) 143 | resolveChildren config (parent, children) = do 144 | resolved <- traverse (Resolver.resolve config (Just parent)) children 145 | return (parent, resolved) 146 | -------------------------------------------------------------------------------- /src/EntryPoints.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE DeriveAnyClass #-} 2 | 3 | {-| Finds all entrypoints. Find either uses a passed glob or **/*.* to search in the `entry_points`. 4 | -} 5 | module EntryPoints 6 | ( find 7 | ) 8 | where 9 | 10 | import CliArguments (Args (..)) 11 | import qualified Config 12 | import Control.Exception.Safe (Exception) 13 | import qualified Control.Exception.Safe as ES 14 | import Data.Semigroup ((<>)) 15 | import qualified Data.Text as T 16 | import Data.Typeable (Typeable) 17 | import System.FilePath 18 | ( () 19 | , makeRelative 20 | , normalise 21 | , takeDirectory 22 | ) 23 | import "Glob" System.FilePath.Glob (glob) 24 | 25 | find :: Args -> Config.EntryPoints -> IO [FilePath] 26 | find args entryPoints = do 27 | let entryPointsGlob = normalisedEntryPointsGlob entryPoints args 28 | paths <- findFilesIn entryPointsGlob 29 | case paths of 30 | [] -> ES.throwM $ NoModulesPresent (takeDirectory <$> entryPointsGlob) 31 | _ -> return $ makeRelative (Config.unEntryPoints entryPoints) <$> paths 32 | 33 | normalisedEntryPointsGlob :: Config.EntryPoints -> Args -> [FilePath] 34 | normalisedEntryPointsGlob entryPoints args = 35 | case entryPointGlob args of 36 | [] -> [Config.unEntryPoints entryPoints "**" "*.*"] 37 | entryPoints -> 38 | -- handle arguments with and without a leading "./" 39 | (\entry -> "." normalise entry) <$> entryPoints 40 | 41 | findFilesIn :: [FilePath] -> IO [FilePath] 42 | findFilesIn paths = concat <$> traverse glob paths 43 | 44 | data Error 45 | = NoModulesPresent [FilePath] 46 | deriving (Typeable, Exception) 47 | 48 | instance Show Error where 49 | 50 | show (NoModulesPresent paths) = 51 | T.unpack $ 52 | T.unlines 53 | [ "It seems to me that you either provided a wrong `entry_points` or you don't have any modules." 54 | , "" 55 | , "I didn't find anything in " <> T.pack (show paths) 56 | , "" 57 | ] 58 | -------------------------------------------------------------------------------- /src/Error.hs: -------------------------------------------------------------------------------- 1 | module Error 2 | ( Error (..) 3 | , description 4 | ) 5 | where 6 | 7 | import Data.Semigroup ((<>)) 8 | import qualified Data.Text as T 9 | import System.FilePath () 10 | 11 | data Error 12 | = FileNotFound T.Text 13 | | JsonInvalid 14 | FilePath 15 | T.Text 16 | | NoModulesPresent [FilePath] 17 | | ModuleNotFound 18 | (Maybe FilePath) 19 | FilePath 20 | | BinNotFound T.Text 21 | | CompileError 22 | T.Text 23 | T.Text 24 | | HookFailed 25 | T.Text 26 | T.Text 27 | | ConfigInvalid 28 | FilePath 29 | T.Text 30 | | NoConfigFound FilePath 31 | deriving (Eq, Show) 32 | 33 | description :: Error -> T.Text 34 | description (FileNotFound file) = "Couldn't find file: " <> file 35 | description (JsonInvalid file err) = 36 | T.unlines ["Invalid json file: " <> T.pack file, "", " " <> err, ""] 37 | description (ConfigInvalid file err) = 38 | T.unlines ["Invalid jetpack.json: " <> T.pack file, "", " " <> err, ""] 39 | description (NoModulesPresent paths) = 40 | T.unlines 41 | [ "It seems to me that you either provided a wrong `entry_points` or you don't have any modules." 42 | , "" 43 | , "I didn't find anything in " <> T.pack (show paths) 44 | , "" 45 | ] 46 | description (ModuleNotFound (Just requiredIn) file) = 47 | T.unlines 48 | [ "" 49 | , "" 50 | , "I had troubles finding " <> T.pack file <> " required in " <> 51 | T.pack requiredIn <> 52 | "." 53 | ] 54 | description (ModuleNotFound Nothing file) = 55 | T.unlines 56 | ["", "", "I had troubles finding the entry point " <> T.pack file <> "."] 57 | description (BinNotFound bin) = 58 | T.unlines 59 | [ "I had troubles finding the " <> bin <> " command." 60 | , "" 61 | , "You might want to install it." 62 | ] 63 | description (CompileError cmd msg) = 64 | T.unlines ["Command:", "", " $ " <> cmd, "", msg] 65 | description (HookFailed msg hookScript) = 66 | T.unlines ["Hook:", "", " $ " <> hookScript, "", msg] 67 | description (NoConfigFound path) = 68 | T.unlines ["I didn't find a config for jetpack at " <> T.pack path] 69 | -------------------------------------------------------------------------------- /src/HotReload.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE TemplateHaskell #-} 2 | 3 | module HotReload 4 | ( inject 5 | , wrap 6 | ) 7 | where 8 | 9 | import qualified Config 10 | import qualified Data.FileEmbed 11 | import Data.Semigroup ((<>)) 12 | import qualified Data.Text as T 13 | -- This code is directly ported from https://github.com/klazuka/elm-hot/blob/master/src/inject.js 14 | import qualified Data.Text.Encoding as E 15 | import qualified Data.Text.IO as TIO 16 | import qualified Safe.IO 17 | import System.FilePath ((<.>), ()) 18 | import qualified Text.Parsec as P 19 | 20 | wrap :: Config.HotReloadingPort -> (a, T.Text) -> (a, T.Text) 21 | wrap hotReloadingPort (a, content) = 22 | ( a 23 | , T.unlines 24 | [ "// Expose the Webpack HMR API" 25 | , "var myDisposeCallback = function() {};" 26 | , "// simulate the HMR api exposed by webpack" 27 | , "var moduleHot = {" 28 | , " hot: {" 29 | , " accept: function () {" 30 | , " }," 31 | , " dispose: function (callback) {" 32 | , " myDisposeCallback = callback" 33 | , " }," 34 | , " data: null," 35 | , " apply: function () {" 36 | , " var newData = {};" 37 | , " myDisposeCallback(newData);" 38 | , " moduleHot.hot.data = newData" 39 | , " }" 40 | , " }" 41 | , "};" 42 | , content 43 | , "// Listen for data from the websocket. When we get a message, eval it." 44 | , "var socketHotReloading = new WebSocket('ws://localhost:" <> 45 | T.pack (show $ Config.unHotReloadingPort hotReloadingPort) <> 46 | "');" 47 | , "socketHotReloading.onmessage = function(event) {" 48 | , " console.warn('Jetpack reloading...');" 49 | , " moduleHot.hot.apply();" 50 | , " delete window.Elm;" 51 | , " try {" 52 | , " eval(event.data);" 53 | , " } catch (e) {" 54 | , " console.warn('Jetpack reloading failed!');" 55 | , " console.error(e);" 56 | , " };" 57 | , " console.warn('Jetpack reloaded!');" 58 | , "};" 59 | ] 60 | ) 61 | 62 | inject :: FilePath -> IO () 63 | inject path = do 64 | originalElmCodeJS <- TIO.readFile path 65 | let hmrCode = $(Data.FileEmbed.embedFile $ "resources" "hmr" <.> "js") 66 | let fixedNavKey = fixNavigationKey originalElmCodeJS 67 | case P.parse platformExportParser "" fixedNavKey of 68 | Left err -> print err 69 | Right (before, after) -> do 70 | let modifiedCode = 71 | T.unlines [before, "\n", E.decodeUtf8 hmrCode, "\n", after] 72 | Safe.IO.writeFile path modifiedCode 73 | 74 | -- Attach a tag to Browser.Navigation.Key values. 75 | -- We will add a property to the key immediately after it's created so that we can find it. 76 | fixNavigationKey :: T.Text -> T.Text 77 | fixNavigationKey code = do 78 | if T.isInfixOf "elm$browser$Browser$application" code 79 | then 80 | let navKeyDefinition = 81 | "var key = function() { key.a(onUrlChange(_Browser_getUrl())); };" 82 | navKeyTag = "key['elm-hot-nav-key'] = true" 83 | modifiedCode = 84 | T.replace 85 | navKeyDefinition 86 | (navKeyDefinition <> "\n" <> navKeyTag) 87 | code 88 | in modifiedCode 89 | else code 90 | 91 | platformExportParser :: P.Parsec T.Text st (T.Text, T.Text) 92 | platformExportParser = do 93 | let untilString = P.manyTill P.anyChar . P.try . P.string 94 | platformExport = "_Platform_export(" 95 | this = "}(this));" 96 | before <- untilString platformExport 97 | platform <- untilString this 98 | return (T.pack $ before <> platformExport <> platform, T.pack this) 99 | -------------------------------------------------------------------------------- /src/HotReload/Server.hs: -------------------------------------------------------------------------------- 1 | module HotReload.Server 2 | ( start 3 | ) 4 | where 5 | 6 | import qualified Builder 7 | import CliArguments (Args (..)) 8 | import qualified Compile 9 | import qualified Config 10 | import Config (Config) 11 | import Data.Foldable (traverse_) 12 | import qualified Data.Text as T 13 | import qualified Network.WebSockets as WS 14 | import qualified Watcher 15 | 16 | start :: Config -> Args -> Builder.HotReload -> IO () 17 | start config args hotReloading = do 18 | putStrLn "Wait until the first build succeeded!" 19 | putStrLn "Refresh your browser as soon as the build was successful." 20 | _ <- Builder.build config args hotReloading 21 | WS.runServer 22 | "127.0.0.1" 23 | (Config.unHotReloadingPort $ Config.hotReloadingPort config) $ 24 | application config args hotReloading 25 | 26 | application :: Config -> Args -> Builder.HotReload -> WS.ServerApp 27 | application config args hotReloading pending = do 28 | putStrLn "Hot-reloading server is ready." 29 | conn <- WS.acceptRequest pending 30 | state <- 31 | Watcher.startWatcher 32 | config 33 | ( do 34 | maybeResult <- Builder.build config args hotReloading 35 | case maybeResult of 36 | Nothing -> pure () 37 | Just result -> traverse_ (reload conn) $ Compile.elmFiles result 38 | ) 39 | Watcher.listenToCommands state 40 | 41 | reload :: WS.Connection -> FilePath -> IO () 42 | reload conn filePath = do 43 | content <- readFile filePath 44 | WS.sendTextData conn $ 45 | T.unlines 46 | [ "jetpack___hot__reloading();" 47 | , "function jetpack___hot__reloading() {" 48 | , T.pack content 49 | , "}" 50 | ] 51 | -------------------------------------------------------------------------------- /src/Init.hs: -------------------------------------------------------------------------------- 1 | {-| Setup working dir for jetpack. 2 | -} 3 | module Init where 4 | 5 | import qualified Config 6 | import Data.Foldable (traverse_) 7 | import qualified Safe.IO 8 | import System.Directory (createDirectoryIfMissing, doesFileExist) 9 | import System.FilePath ((<.>), ()) 10 | import qualified ToolPaths 11 | 12 | setup 13 | :: Config.TempDir 14 | -> Config.LogDir 15 | -> Config.OutputDir 16 | -> Maybe Config.ElmPath 17 | -> Maybe Config.CoffeePath 18 | -> IO ToolPaths.ToolPaths 19 | setup tempDir logDir outputDir elmPath coffeePath = do 20 | requiredBins <- ToolPaths.find elmPath coffeePath 21 | traverse_ 22 | (createDirectoryIfMissing True) 23 | [ Config.unTempDir tempDir 24 | , Config.unLogDir logDir 25 | , Config.unOutputDir outputDir 26 | ] 27 | createDepsJsonIfMissing tempDir 28 | return requiredBins 29 | 30 | createDepsJsonIfMissing :: Config.TempDir -> IO () 31 | createDepsJsonIfMissing tempDir = do 32 | let depsJSONPath = Config.unTempDir tempDir "deps" <.> "json" 33 | exists <- doesFileExist depsJSONPath 34 | if exists 35 | then return () 36 | else Safe.IO.writeFile depsJSONPath "[]" 37 | -------------------------------------------------------------------------------- /src/Logger.hs: -------------------------------------------------------------------------------- 1 | module Logger 2 | ( clearLog 3 | , appendLog 4 | , compileLog 5 | , consistencyLog 6 | , compileTime 7 | , allLogs 8 | ) 9 | where 10 | 11 | import qualified Config 12 | import qualified Data.Text as T 13 | import qualified Data.Text.IO as TIO 14 | import qualified Safe.IO 15 | import System.FilePath ((<.>), ()) 16 | 17 | compileLog, compileTime, consistencyLog :: FilePath 18 | compileLog = "compile" <.> "log" 19 | 20 | compileTime = "compile" <.> "time" 21 | 22 | consistencyLog = "elm-stuff__consistency" <.> "log" 23 | 24 | allLogs :: [FilePath] 25 | allLogs = [compileTime, compileLog, consistencyLog] 26 | 27 | appendLog :: Config.LogDir -> FilePath -> T.Text -> IO () 28 | appendLog logDir fileName msg = 29 | TIO.appendFile (Config.unLogDir logDir fileName) msg 30 | 31 | clearLog :: Config.LogDir -> FilePath -> IO () 32 | clearLog logDir fileName = 33 | Safe.IO.writeFile (Config.unLogDir logDir fileName) "" 34 | -------------------------------------------------------------------------------- /src/Message.hs: -------------------------------------------------------------------------------- 1 | module Message 2 | ( warning 3 | , success 4 | , error 5 | , list 6 | ) 7 | where 8 | 9 | import Data.Foldable (traverse_) 10 | import Data.Monoid ((<>)) 11 | import qualified Data.Text as T 12 | import qualified Data.Text.IO as TIO 13 | import Rainbow 14 | ( Chunk 15 | , Radiant 16 | , back 17 | , black 18 | , brightGreen 19 | , brightRed 20 | , brightWhite 21 | , chunk 22 | , cyan 23 | , fore 24 | , green 25 | , only256 26 | , putChunkLn 27 | , red 28 | , white 29 | ) 30 | import Prelude hiding (error) 31 | 32 | success :: T.Text -> IO () 33 | success = block Theme {bg = green <> only256 brightGreen, fg = black} 34 | 35 | warning :: T.Text -> IO () 36 | warning = block Theme {bg = white <> only256 brightWhite, fg = black} 37 | 38 | error :: T.Text -> IO () 39 | error = block Theme {bg = red <> only256 brightRed, fg = black} 40 | 41 | list :: [T.Text] -> IO () 42 | list = traverse_ (putChunkLn . fore cyan . chunk . (<>) ("- ")) 43 | 44 | data Theme 45 | = Theme 46 | { bg :: Radiant 47 | , fg :: Radiant 48 | } 49 | 50 | block :: Theme -> T.Text -> IO () 51 | block Theme {bg, fg} = surroundedByNL . back bg . fore fg . chunk . spaced 52 | 53 | surroundedByNL :: Chunk T.Text -> IO () 54 | surroundedByNL msg = do 55 | _ <- TIO.putStrLn "" 56 | _ <- putChunkLn msg 57 | TIO.putStrLn "" 58 | 59 | spaced :: T.Text -> T.Text 60 | spaced text = " " <> text <> " " 61 | -------------------------------------------------------------------------------- /src/Notify.hs: -------------------------------------------------------------------------------- 1 | module Notify 2 | ( Config (..) 3 | , State 4 | , watch 5 | , buildNow 6 | , end 7 | ) 8 | where 9 | 10 | import Control.Concurrent 11 | import Data.Foldable (traverse_) 12 | import Data.Maybe (isJust) 13 | import qualified Data.Text as T 14 | import System.FSNotify 15 | import System.FilePath 16 | import System.Posix.Process 17 | import System.Posix.Signals 18 | import System.Posix.Types (ProcessID) 19 | import System.Process () 20 | import Text.Regex (Regex, matchRegex) 21 | 22 | {-| Internal state of the watcher. 23 | We keep track of running processes and the config. 24 | You might need this if you want to use `end` or `force`. 25 | -} 26 | data State 27 | = State 28 | { onChange :: IO () 29 | , mVar :: MVar (Maybe ProcessID) 30 | } 31 | 32 | {-| Configuration for a watcher. 33 | -} 34 | data Config 35 | = Config 36 | { pathToWatch :: FilePath -- Watch files recursivelly under this path. 37 | , relevantExtensions :: [T.Text] -- Which extensions do we care about? Empty list will accept all. 38 | , ignorePatterns :: [Regex] -- Which filename patterns do we want to ignore? Empty list will accept all. 39 | } 40 | 41 | watch :: Config -> IO () -> IO State 42 | watch config onChange = do 43 | mVar <- newMVar Nothing 44 | let state = State {onChange = onChange, mVar = mVar} 45 | _ <- forkIO (start mVar config onChange) 46 | pure state 47 | 48 | start :: MVar (Maybe ProcessID) -> Config -> IO () -> IO () 49 | start mVar Config {pathToWatch, relevantExtensions, ignorePatterns} onChange = do 50 | manager <- 51 | startManagerConf 52 | ( WatchConfig 53 | { confDebounce = Debounce 0.2 54 | , confUsePolling = False 55 | , confPollInterval = 10 ^ (6 :: Int) 56 | } 57 | ) 58 | _ <- 59 | watchTree 60 | manager 61 | pathToWatch 62 | (eventIsRelevant relevantExtensions ignorePatterns) 63 | (actOnEvent mVar onChange) 64 | pure () 65 | 66 | eventIsRelevant :: [T.Text] -> [Regex] -> Event -> Bool 67 | eventIsRelevant relevantExtensions ignorePatterns event = 68 | getExtensionFromEvent event `elem` relevantExtensions && 69 | getFilepathFromEvent event `matchesNone` 70 | ignorePatterns 71 | 72 | getExtensionFromEvent :: Event -> T.Text 73 | getExtensionFromEvent = T.pack . takeExtension . getFilepathFromEvent 74 | 75 | actOnEvent :: MVar (Maybe ProcessID) -> IO () -> Event -> IO () 76 | actOnEvent mVar onChange _event = startProcess mVar onChange 77 | 78 | startProcess :: MVar (Maybe ProcessID) -> IO () -> IO () 79 | startProcess mVar cb = do 80 | stopProcess mVar 81 | processId <- forkProcess cb 82 | putMVar mVar (Just processId) 83 | 84 | stopProcess :: MVar (Maybe ProcessID) -> IO () 85 | stopProcess mVar = do 86 | runningProcess <- takeMVar mVar 87 | traverse_ (signalProcess softwareTermination) runningProcess 88 | traverse_ (getProcessStatus True False) runningProcess -- here be dragons, potentially 89 | 90 | buildNow :: State -> IO () 91 | buildNow State {mVar, onChange} = startProcess mVar onChange 92 | 93 | end :: State -> IO () 94 | end State {mVar} = stopProcess mVar 95 | 96 | getFilepathFromEvent :: Event -> FilePath 97 | getFilepathFromEvent (Added filepath _ _) = filepath 98 | getFilepathFromEvent (Modified filepath _ _) = filepath 99 | getFilepathFromEvent (Removed filepath _ _) = filepath 100 | getFilepathFromEvent (Unknown filepath _ _) = filepath 101 | 102 | matchesNone :: FilePath -> [Regex] -> Bool 103 | matchesNone filepath = not . any (isJust . flip matchRegex filepath) 104 | -------------------------------------------------------------------------------- /src/Parser/Ast.hs: -------------------------------------------------------------------------------- 1 | module Parser.Ast 2 | ( Require (..) 3 | , SourceType (..) 4 | ) 5 | where 6 | 7 | import Data.Aeson as Aeson 8 | import GHC.Generics (Generic) 9 | import System.FilePath () 10 | 11 | data Require 12 | = Require 13 | SourceType 14 | FilePath 15 | deriving (Eq) 16 | 17 | instance Show Require where 18 | 19 | show (Require t n) = "(Require " ++ show n ++ " " ++ show t ++ ")" 20 | 21 | data SourceType 22 | = Coffee 23 | | Js 24 | | Elm 25 | deriving (Show, Eq, Generic) 26 | 27 | instance FromJSON SourceType 28 | 29 | instance ToJSON SourceType 30 | -------------------------------------------------------------------------------- /src/Parser/Comment.hs: -------------------------------------------------------------------------------- 1 | {-| Parser for line and block comments in js or coffeescript. 2 | 3 | 4 | imports for doctests 5 | >>> import qualified Data.Text as T 6 | -} 7 | module Parser.Comment 8 | ( eatJsComments 9 | , eatCoffeeComments 10 | , eatElmComments 11 | ) 12 | where 13 | 14 | import Data.Functor (void) 15 | import qualified Data.List as L 16 | import qualified Data.Text as T 17 | import Text.Parsec 18 | 19 | {-| Removes block and line comments from text. 20 | >>> :{ 21 | eatElmComments $ 22 | T.unlines 23 | [ "import Page.Foo.Bar" 24 | , "-- import Maybe.Extra" 25 | , "type Msg = NoOp" 26 | , "{- some comment -}" 27 | , "foo = 42" 28 | ] 29 | :} 30 | "import Page.Foo.Bar\ntype Msg = NoOp\n\nfoo = 42\n" 31 | -} 32 | eatElmComments :: T.Text -> T.Text 33 | eatElmComments = eatComments elmBlockCommentParser elmLineCommentParser 34 | 35 | {-| Removes block and line comments from text. 36 | >>> :{ 37 | eatJsComments $ 38 | T.unlines 39 | [ "var x = require('x.js');" 40 | , "// var x = require('x.js');" 41 | , "x.foo(); /* comment */" 42 | ] 43 | :} 44 | "var x = require('x.js');\nx.foo(); \n" 45 | -} 46 | eatJsComments :: T.Text -> T.Text 47 | eatJsComments = eatComments jsBlockCommentParser jsLineCommentParser 48 | 49 | {-| Removes block and line comments from text. 50 | >>> :{ 51 | eatCoffeeComments $ 52 | T.unlines 53 | [ "var x = require('x.js');" 54 | , "# var x = require('x.js');" 55 | , "x.foo();" 56 | , "###" 57 | , "ignore" 58 | , "###" 59 | ] 60 | :} 61 | "var x = require('x.js');\nx.foo();\n\n" 62 | -} 63 | eatCoffeeComments :: T.Text -> T.Text 64 | eatCoffeeComments = eatComments coffeeBlockCommentParser coffeeLineCommentParser 65 | 66 | eatComments :: Parsec T.Text () () -> Parsec T.Text () () -> T.Text -> T.Text 67 | eatComments blockParser lineParser str = 68 | case parse parser "Error" str of 69 | Right parsed -> parsed 70 | Left _ -> str 71 | where 72 | parser = eatCommentsParser (try blockParser <|> try lineParser) 73 | 74 | eatCommentsParser :: Parsec T.Text st () -> Parsec T.Text st T.Text 75 | eatCommentsParser parser = do 76 | optional parser 77 | xs <- sepBy (notCommentParser parser) parser 78 | optional parser 79 | return $ T.pack $ L.intercalate "" xs 80 | 81 | elmBlockCommentParser :: Parsec T.Text st () 82 | elmBlockCommentParser = 83 | string "{-" >> manyTill anyChar (try $ string "-}") >> return () 84 | 85 | elmLineCommentParser :: Parsec T.Text st () 86 | elmLineCommentParser = 87 | string "--" >> manyTill anyChar (void newline <|> eof) >> return () 88 | 89 | jsBlockCommentParser :: Parsec T.Text st () 90 | jsBlockCommentParser = 91 | string "/*" >> manyTill anyChar (try $ string "*/") >> return () 92 | 93 | jsLineCommentParser :: Parsec T.Text st () 94 | jsLineCommentParser = 95 | string "//" >> manyTill anyChar (void newline <|> eof) >> return () 96 | 97 | coffeeBlockCommentParser :: Parsec T.Text st () 98 | coffeeBlockCommentParser = 99 | string "###" >> manyTill anyChar (string "###") >> return () 100 | 101 | coffeeLineCommentParser :: Parsec T.Text st () 102 | coffeeLineCommentParser = 103 | string "#" >> manyTill anyChar (void newline <|> eof) >> return () 104 | 105 | notCommentParser :: Parsec T.Text st () -> Parsec T.Text st String 106 | notCommentParser parser = manyTill anyChar (lookAhead (parser <|> eof)) 107 | -------------------------------------------------------------------------------- /src/Parser/JetpackVersion.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE DeriveAnyClass #-} 2 | 3 | module Parser.JetpackVersion 4 | ( Version (..) 5 | , load 6 | ) 7 | where 8 | 9 | import Control.Exception.Safe (Exception) 10 | import qualified Control.Exception.Safe as ES 11 | import Control.Monad (fail) 12 | import Data.Aeson as Aeson 13 | import Data.Aeson.Types (Parser) 14 | import qualified Data.ByteString.Lazy as BL 15 | import qualified Data.SemVer as SemVer 16 | import Data.Semigroup ((<>)) 17 | import qualified Data.Text as T 18 | import Data.Typeable (Typeable) 19 | import GHC.Generics (Generic) 20 | import qualified System.Directory as Dir 21 | import System.FilePath ((), FilePath, dropFileName) 22 | 23 | data Version 24 | = Version 25 | { version :: SemVer.Version 26 | } 27 | deriving (Show, Eq, Generic) 28 | 29 | instance FromJSON Version where 30 | 31 | parseJSON = 32 | withObject "devDependencies" $ \v -> 33 | Version <$> 34 | (v .: "devDependencies" >>= (.: "@noredink/jetpack") >>= toSemVer) 35 | 36 | toSemVer :: T.Text -> Parser SemVer.Version 37 | toSemVer v = 38 | case SemVer.fromText v of 39 | Left err -> fail err 40 | Right semVer -> return semVer 41 | 42 | {-| Loads a package.json 43 | -} 44 | load :: IO Version 45 | load = do 46 | cwd <- Dir.getCurrentDirectory 47 | let path = cwd "package.json" 48 | content <- BL.readFile path 49 | case Aeson.eitherDecode content of 50 | Left err -> ES.throwM $ JsonInvalid path $ T.pack err 51 | Right json -> return json 52 | 53 | data Error 54 | = JsonInvalid 55 | FilePath 56 | T.Text 57 | deriving (Typeable, Exception) 58 | 59 | instance Show Error where 60 | 61 | show (JsonInvalid file err) = 62 | T.unpack $ 63 | T.unlines 64 | [ "I couldn't decode package.json in " <> (T.pack $ dropFileName file) 65 | , "" 66 | , " " <> err 67 | , "" 68 | ] 69 | -------------------------------------------------------------------------------- /src/Parser/PackageJson.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE DeriveAnyClass #-} 2 | 3 | module Parser.PackageJson 4 | ( load 5 | , PackageJson (..) 6 | ) 7 | where 8 | 9 | import Control.Exception.Safe (Exception) 10 | import qualified Control.Exception.Safe as ES 11 | import Data.Aeson as Aeson 12 | import qualified Data.ByteString.Lazy as BL 13 | import Data.Semigroup ((<>)) 14 | import qualified Data.Text as T 15 | import Data.Typeable (Typeable) 16 | import GHC.Generics (Generic) 17 | import System.FilePath (FilePath, dropFileName) 18 | 19 | data PackageJson 20 | = PackageJson 21 | { main :: Maybe FilePath 22 | , browser :: Maybe FilePath 23 | } 24 | deriving (Show, Eq, Generic) 25 | 26 | instance FromJSON PackageJson 27 | 28 | {-| Loads a package.json 29 | -} 30 | load :: FilePath -> IO PackageJson 31 | load path = do 32 | content <- BL.readFile path 33 | case Aeson.eitherDecode content of 34 | Left err -> ES.throwM $ JsonInvalid path $ T.pack err 35 | Right json -> return json 36 | 37 | data Error 38 | = JsonInvalid 39 | FilePath 40 | T.Text 41 | deriving (Typeable, Exception) 42 | 43 | instance Show Error where 44 | 45 | show (JsonInvalid file err) = 46 | T.unpack $ 47 | T.unlines 48 | [ "I couldn't decode package.json in " <> (T.pack $ dropFileName file) 49 | , "" 50 | , " " <> err 51 | , "" 52 | ] 53 | -------------------------------------------------------------------------------- /src/Parser/Require.hs: -------------------------------------------------------------------------------- 1 | {-| Parser for `require` in js and coffeescript. 2 | * It returns a list of `Require (Coffee|Js|Elm) fileName` 3 | * It ignores `require` in commments. 4 | 5 | 6 | imports for doctests 7 | >>> import Parser.Ast 8 | >>> import qualified Data.Text as T 9 | -} 10 | module Parser.Require 11 | ( requires 12 | , jsRequires 13 | , coffeeRequires 14 | , require 15 | , getFileType 16 | ) 17 | where 18 | 19 | import qualified Data.Text as T 20 | import Parser.Ast as Ast 21 | import Parser.Comment as Comment 22 | import System.FilePath ((<.>), splitExtension) 23 | import Text.Parsec 24 | import qualified Utils.Parser as UP 25 | 26 | {-| returns all requires of a file 27 | >>> :{ 28 | requires Js $ 29 | T.unlines 30 | [ "var _ = require('lodash')" 31 | , "var Main = require('Foo.Bar.Main.elm')" 32 | , "" 33 | , "// var Main = require('Foo.Bar.Main.elm')" 34 | , "console.log('42'); /*" 35 | , "var Main = require('Foo.Bar.Main.elm')" 36 | , "*/" 37 | , "Main.embed(document.getElementById('host'), {})" 38 | , "function require(foo) {" 39 | , " console.log('local require')" 40 | , "}" 41 | ] 42 | :} 43 | [(Require "lodash" Js),(Require "Foo.Bar.Main.elm" Elm)] 44 | -} 45 | requires :: Ast.SourceType -> T.Text -> [Ast.Require] 46 | requires sourceType = concatMap require . T.lines . eatComments 47 | where 48 | eatComments = 49 | case sourceType of 50 | Ast.Js -> Comment.eatJsComments 51 | Ast.Coffee -> Comment.eatCoffeeComments 52 | _ -> id 53 | 54 | {-| Partially applied `requires` for js files. 55 | -} 56 | jsRequires :: T.Text -> [Ast.Require] 57 | jsRequires = requires Ast.Js 58 | 59 | {-| Partially applied `requires` for coffee files. 60 | -} 61 | coffeeRequires :: T.Text -> [Ast.Require] 62 | coffeeRequires = requires Ast.Coffee 63 | 64 | {-| Parses a require statement and returns the filename and the type base on the extensions. 65 | 66 | >>> require "require('lodash')" 67 | [(Require "lodash" Js)] 68 | 69 | >>> require "require('Main.elm')" 70 | [(Require "Main.elm" Elm)] 71 | 72 | >>> require "require('Main.elm';" 73 | [] 74 | -} 75 | require :: T.Text -> [Ast.Require] 76 | require content = 77 | case extractRequire content of 78 | Right rs -> 79 | fmap (\(path, ext) -> Ast.Require (getFileType ext) $ path <.> ext) rs 80 | Left _ -> [] 81 | 82 | {-| Converts a file extension into a union type. 83 | 84 | >>> getFileType ".coffee" 85 | Coffee 86 | 87 | Default for an empty extension or something unknown is js. 88 | This is because you might importe something like `require('MyModule.Foo')` 89 | >>> getFileType "" 90 | Js 91 | -} 92 | getFileType :: String -> Ast.SourceType 93 | getFileType ".coffee" = Coffee 94 | getFileType ".elm" = Elm 95 | getFileType ".js" = Js 96 | getFileType _ = Js 97 | 98 | {-| running the parser 99 | -} 100 | extractRequire :: T.Text -> Either ParseError [(FilePath, String)] 101 | extractRequire = parse (many $ try requireParser) "Error" 102 | 103 | requireParser :: Parsec T.Text u (FilePath, String) 104 | requireParser = do 105 | _ <- UP.eatTill requireKeyword 106 | _ <- requireKeyword 107 | _ <- spaces 108 | content <- choice [UP.betweenParens UP.stringContent, UP.stringContent] 109 | return $ splitExtension content 110 | 111 | requireKeyword :: Parsec T.Text u String 112 | requireKeyword = string "require" 113 | -------------------------------------------------------------------------------- /src/Resolver.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE DeriveAnyClass #-} 2 | 3 | {-| Resolves `requires`-statements. It tries to locate the module in the following directories. 4 | 5 | 1. relative to the file requiring the module 6 | 2. relative in node_modules 7 | 3. in `modules_directory` 8 | 4. in `source_directory` 9 | 7. in `{root}/vendor/assets/components` 10 | 8. in `{root}/vendor/assets/javascripts` 11 | 5. in `{source_directory}/../node_modules` 12 | 6. in `{root}/node_modules` 13 | 9. woop woop! module not found 14 | 15 | In each directory we search for the following names. 16 | `{name}` is the string from the `require` statement 17 | 18 | 1. `{folder}/{name}` from `browser` field in `package.json` 19 | 2. `{folder}/{name}` from `main` field in `package.json` 20 | 3. `{folder}/{name}` 21 | 4. `{folder}/{name}.js` 22 | 5. `{folder}/{name}/index.js` 23 | 6. `{folder}/{name}` 24 | 7. `{folder}/{name}.coffee` 25 | 8. `{folder}/{name}/index.coffee` 26 | 27 | -} 28 | module Resolver 29 | ( resolve 30 | ) 31 | where 32 | 33 | import Alternative.IO (AlternativeIO) 34 | import qualified Alternative.IO as AIO 35 | import Config (Config (Config)) 36 | import qualified Config 37 | import Control.Applicative ((<|>)) 38 | import Control.Exception.Safe (Exception) 39 | import qualified Control.Exception.Safe as ES 40 | import qualified Control.Monad.Except as ME 41 | import Data.Semigroup ((<>)) 42 | import qualified Data.Text as T 43 | import Data.Time.Clock.POSIX 44 | import Data.Typeable (Typeable) 45 | import Dependencies (Dependency (..)) 46 | import Parser.PackageJson as PackageJson 47 | import qualified Parser.Require 48 | import System.Directory (doesFileExist) 49 | import System.FilePath ((<.>), (), takeExtension) 50 | import System.Posix.Files 51 | 52 | resolve :: Config -> Maybe Dependency -> Dependency -> IO Dependency 53 | resolve Config {Config.modulesDirs, Config.entryPoints, Config.sourceDir} requiredIn dep = do 54 | result <- ME.runExceptT (resolveHelp modulesDirs entryPoints sourceDir dep) 55 | case result of 56 | Left _ -> 57 | ES.throwM $ ModuleNotFound (filePath <$> requiredIn) $ requiredAs dep 58 | Right dep -> return dep 59 | 60 | resolveHelp 61 | :: [Config.ModulesDir] 62 | -> Config.EntryPoints 63 | -> Config.SourceDir 64 | -> Dependency 65 | -> AlternativeIO Dependency 66 | resolveHelp modulesDirs entryPoints sourceDir dep = do 67 | resolved <- 68 | findRelative dep <|> findRelativeNodeModules dep <|> 69 | findInEntryPoints entryPoints dep <|> 70 | findInSources (Config.unSourceDir sourceDir) dep <|> 71 | findInModules modulesDirs dep 72 | updateDepTime $ updateDepType resolved 73 | 74 | findRelative :: Dependency -> AlternativeIO Dependency 75 | findRelative dep@Dependency {filePath, requiredAs} = 76 | tryToFind filePath requiredAs dep 77 | 78 | findRelativeNodeModules :: Dependency -> AlternativeIO Dependency 79 | findRelativeNodeModules dep@Dependency {filePath, requiredAs} = 80 | tryToFind (filePath "node_modules") requiredAs dep 81 | 82 | findInEntryPoints 83 | :: Config.EntryPoints -> Dependency -> AlternativeIO Dependency 84 | findInEntryPoints entryPoints dep@Dependency {requiredAs} = do 85 | tryToFind (Config.unEntryPoints entryPoints) requiredAs dep 86 | 87 | findInModules :: [Config.ModulesDir] -> Dependency -> AlternativeIO Dependency 88 | findInModules [] _parent = AIO.tryNext 89 | findInModules (x : xs) dep@Dependency {requiredAs} = 90 | tryToFind (Config.unModulesDir x) requiredAs dep <|> findInModules xs dep 91 | 92 | findInSources :: FilePath -> Dependency -> AlternativeIO Dependency 93 | findInSources sourceDir dep@Dependency {requiredAs} = do 94 | tryToFind sourceDir requiredAs dep 95 | 96 | tryToFind :: FilePath -> FilePath -> Dependency -> AlternativeIO Dependency 97 | tryToFind basePath fileName require = do 98 | let ext = takeExtension fileName 99 | case ext of 100 | ".js" -> tryJsWithExt basePath fileName require 101 | ".coffee" -> tryCoffeeWithExt basePath fileName require 102 | _ -> tryJs basePath fileName require <|> tryCoffee basePath fileName require 103 | 104 | tryJs :: FilePath -> FilePath -> Dependency -> AlternativeIO Dependency 105 | tryJs basePath fileName require = 106 | tryMainFromPackageJson basePath fileName require <|> 107 | moduleExistsInBase "" require <|> 108 | moduleExistsInBase fileName require <|> 109 | moduleExistsInBase (fileName <.> "js") require <|> 110 | moduleExistsInBase (fileName "index.js") require 111 | where 112 | moduleExistsInBase = moduleExists basePath 113 | 114 | tryJsWithExt :: FilePath -> FilePath -> Dependency -> AlternativeIO Dependency 115 | tryJsWithExt basePath fileName require = 116 | tryMainFromPackageJson basePath fileName require <|> 117 | moduleExistsInBase "" require <|> 118 | moduleExistsInBase fileName require 119 | where 120 | moduleExistsInBase = moduleExists basePath 121 | 122 | tryCoffee :: FilePath -> FilePath -> Dependency -> AlternativeIO Dependency 123 | tryCoffee basePath fileName require = 124 | moduleExistsInBase fileName require <|> 125 | moduleExistsInBase (fileName <.> "coffee") require <|> 126 | moduleExistsInBase (fileName "index.coffee") require 127 | where 128 | moduleExistsInBase = moduleExists basePath 129 | 130 | tryCoffeeWithExt 131 | :: FilePath -> FilePath -> Dependency -> AlternativeIO Dependency 132 | tryCoffeeWithExt basePath fileName require = 133 | moduleExistsInBase "" require <|> moduleExistsInBase fileName require 134 | where 135 | moduleExistsInBase = moduleExists basePath 136 | 137 | {-| check if we have a package.json. It contains information about the main file. 138 | -} 139 | tryMainFromPackageJson 140 | :: FilePath -> FilePath -> Dependency -> AlternativeIO Dependency 141 | tryMainFromPackageJson basePath fileName require = do 142 | let packageJsonPath = basePath fileName "package" <.> "json" 143 | exists <- AIO.lift (doesFileExist packageJsonPath) 144 | if exists 145 | then 146 | do 147 | PackageJson {main, browser} <- AIO.lift (PackageJson.load packageJsonPath) 148 | case browser <|> main of 149 | Just packageIndex -> 150 | moduleExists basePath (fileName packageIndex) require 151 | Nothing -> AIO.tryNext 152 | else AIO.tryNext 153 | 154 | moduleExists :: FilePath -> FilePath -> Dependency -> AlternativeIO Dependency 155 | moduleExists basePath path require = do 156 | let searchPath = basePath path 157 | exists <- AIO.lift (doesFileExist searchPath) 158 | if exists 159 | then return (require {filePath = searchPath}) 160 | else AIO.tryNext 161 | 162 | updateDepType :: Dependency -> Dependency 163 | updateDepType (Dependency _ r p l) = Dependency newType r p l 164 | where 165 | newType = Parser.Require.getFileType $ takeExtension p 166 | 167 | updateDepTime :: Dependency -> AlternativeIO Dependency 168 | updateDepTime (Dependency t r p _) = do 169 | status <- AIO.lift (getFileStatus p) 170 | let lastModificationTime = 171 | posixSecondsToUTCTime $ modificationTimeHiRes status 172 | return $ Dependency t r p $ Just lastModificationTime 173 | 174 | data Error 175 | = ModuleNotFound 176 | (Maybe FilePath) 177 | FilePath 178 | deriving (Typeable, Exception) 179 | 180 | instance Show Error where 181 | 182 | show (ModuleNotFound (Just requiredIn) file) = 183 | T.unpack $ 184 | T.unlines 185 | [ "" 186 | , "" 187 | , "I had troubles finding '" <> T.pack file <> "' required in '" <> 188 | T.pack requiredIn <> 189 | "'." 190 | , "" 191 | , "Make sure that you spelled the name of the module correctly." 192 | , "You might also want to make sure that all dependencies are updated." 193 | ] 194 | show (ModuleNotFound Nothing file) = 195 | T.unpack $ 196 | T.unlines 197 | ["", "", "I had troubles finding the entry point " <> T.pack file <> "."] 198 | -------------------------------------------------------------------------------- /src/Safe/IO.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE ScopedTypeVariables #-} 2 | 3 | module Safe.IO 4 | ( writeFile 5 | , writeFileByteString 6 | ) 7 | where 8 | 9 | import qualified Control.Exception as Exception 10 | import qualified Data.ByteString.Lazy as BL 11 | import qualified Data.Text as T 12 | import qualified Data.Text.IO as TIO 13 | import qualified System.Directory as Dir 14 | import System.FilePath ((<.>)) 15 | import Prelude hiding (writeFile) 16 | 17 | writeFile :: FilePath -> T.Text -> IO () 18 | writeFile path content = do 19 | let writeThenMove tmp = do 20 | TIO.writeFile tmp content 21 | Dir.renameFile tmp path 22 | tmp = path <.> "tmp" 23 | writeThenMove tmp `Exception.finally` 24 | (Dir.removeFile tmp `Exception.catch` \(_ :: IOError) -> return ()) 25 | 26 | writeFileByteString :: FilePath -> BL.ByteString -> IO () 27 | writeFileByteString path content = do 28 | let writeThenMove tmp = do 29 | BL.writeFile tmp content 30 | Dir.renameFile tmp path 31 | tmp = path <.> "tmp" 32 | writeThenMove tmp `Exception.finally` 33 | (Dir.removeFile tmp `Exception.catch` \(_ :: IOError) -> return ()) 34 | -------------------------------------------------------------------------------- /src/ToolPaths.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE DeriveAnyClass #-} 2 | 3 | {-| This is used to check if the necessary tools for jetpack exist. 4 | -} 5 | module ToolPaths 6 | ( find 7 | , ToolPaths (..) 8 | ) 9 | where 10 | 11 | import qualified Config 12 | import Control.Exception.Safe (Exception) 13 | import qualified Control.Exception.Safe as ES 14 | import Data.Semigroup ((<>)) 15 | import qualified Data.Text as T 16 | import Data.Typeable (Typeable) 17 | import qualified System.Directory as Dir 18 | import System.FilePath (FilePath) 19 | 20 | data ToolPaths 21 | = ToolPaths 22 | { elm :: Config.ElmPath 23 | , coffee :: Config.CoffeePath 24 | } 25 | 26 | {-| Check if tool from config exists. It falls back to a globally installed bin. 27 | -} 28 | find :: Maybe Config.ElmPath -> Maybe Config.CoffeePath -> IO ToolPaths 29 | find elmPath coffeePath = do 30 | elmPath' <- toAbsPathOrBin "elm" (Config.unElmPath <$> elmPath) 31 | _ <- binExists elmPath' 32 | let elm = Config.ElmPath elmPath' 33 | coffeePath' <- toAbsPathOrBin "coffee" (Config.unCoffeePath <$> coffeePath) 34 | _ <- binExists coffeePath' 35 | let coffee = Config.CoffeePath coffeePath' 36 | pure $ ToolPaths {elm, coffee} 37 | 38 | toAbsPathOrBin :: String -> Maybe FilePath -> IO FilePath 39 | toAbsPathOrBin _ (Just pathToBin) = Dir.makeAbsolute pathToBin 40 | toAbsPathOrBin defaultBin Nothing = return defaultBin 41 | 42 | binExists :: String -> IO () 43 | binExists bin = do 44 | exists <- Dir.findExecutable bin 45 | case exists of 46 | Just _ -> return () 47 | Nothing -> ES.throwM $ BinNotFound $ T.pack bin 48 | 49 | data Error 50 | = BinNotFound T.Text 51 | deriving (Typeable, Exception) 52 | 53 | instance Show Error where 54 | 55 | show (BinNotFound bin) = 56 | T.unpack $ 57 | T.unlines 58 | [ "I had troubles finding the " <> bin <> " command." 59 | , "" 60 | , "You might want to install it." 61 | ] 62 | -------------------------------------------------------------------------------- /src/Utils/Files.hs: -------------------------------------------------------------------------------- 1 | {-| Helpers for working with files/paths/dirs) 2 | -} 3 | module Utils.Files 4 | ( pathToFileName 5 | ) 6 | where 7 | 8 | import qualified Data.List as L 9 | import qualified Data.Text as T 10 | import System.FilePath ((<.>), splitDirectories) 11 | 12 | {-| Converts a path into a flat filename. 13 | >>> import System.FilePath ((), (<.>)) 14 | >>> pathToFileName ("." "foo" "bar" <.> "elm") "js" 15 | "foo___bar.elm.js" 16 | 17 | >>> pathToFileName ("." "bar" <.> "elm") "js" 18 | "bar.elm.js" 19 | -} 20 | pathToFileName :: FilePath -> String -> FilePath 21 | pathToFileName filePath extension = safeFileName filePath <.> extension 22 | 23 | safeFileName :: FilePath -> FilePath 24 | safeFileName = 25 | T.unpack . 26 | T.replace "-" "_" . 27 | T.concat . 28 | L.intersperse "___" . 29 | filter ((/=) ".") . 30 | fmap T.pack . 31 | splitDirectories 32 | -------------------------------------------------------------------------------- /src/Utils/Parser.hs: -------------------------------------------------------------------------------- 1 | module Utils.Parser 2 | ( stringContent 3 | , betweenParens 4 | , eatTill 5 | ) 6 | where 7 | 8 | import qualified Data.Text as T 9 | import qualified Text.Parsec as P 10 | 11 | {-| imports for doctests 12 | >>> import qualified Text.Parsec as P 13 | >>> import qualified Data.Text as T 14 | -} 15 | {-| parses text between quotes or double qoutes 16 | >>> P.parse stringContent "invalid" "\"hello\"" 17 | Right "hello" 18 | 19 | >>> P.parse stringContent "invalid" "'hello'" 20 | Right "hello" 21 | 22 | >>> P.parse stringContent "invalid" "''" 23 | Left "invalid" (line 1, column 2): 24 | unexpected "'" 25 | 26 | >>> P.parse stringContent "invalid" "(foo)" 27 | Left "invalid" (line 1, column 1): 28 | unexpected "(" 29 | expecting "'" or "\"" 30 | -} 31 | stringContent :: P.Parsec T.Text u String 32 | stringContent = P.choice [quotes manyNotQuotes, doubleQuotes manyNotQuotes] 33 | 34 | {-| parses text between parens 35 | >>> P.parse (betweenParens stringContent) "invalid" "('hello')" 36 | Right "hello" 37 | 38 | >>> P.parse (betweenParens stringContent) "invalid" "\"foo\"" 39 | Left "invalid" (line 1, column 1): 40 | unexpected "\"" 41 | expecting "(" 42 | -} 43 | betweenParens :: P.Parsec T.Text u String -> P.Parsec T.Text u String 44 | betweenParens = P.between openAndSpaces spacesAndClose 45 | where 46 | openAndSpaces = (P.char '(') *> (P.skipMany P.space) 47 | spacesAndClose = (P.skipMany P.space) *> (P.char ')') 48 | 49 | manyNotQuotes :: P.Parsec T.Text u String 50 | manyNotQuotes = P.many1 $ P.noneOf "'\"" 51 | 52 | between 53 | :: P.Parsec T.Text u Char 54 | -> P.Parsec T.Text u String 55 | -> P.Parsec T.Text u String 56 | between c = P.between c c 57 | 58 | quotes :: P.Parsec T.Text u String -> P.Parsec T.Text u String 59 | quotes = between $ P.char '\'' 60 | 61 | doubleQuotes :: P.Parsec T.Text u String -> P.Parsec T.Text u String 62 | doubleQuotes = between $ P.char '"' 63 | 64 | {-| parses text between parens 65 | >>> P.parse (eatTill $ P.string "end") "invalid" "beginn foo end" 66 | Right "beginn foo " 67 | -} 68 | eatTill :: P.Parsec T.Text u String -> P.Parsec T.Text u String 69 | eatTill p = P.manyTill P.anyChar (P.lookAhead $ P.try p) 70 | -------------------------------------------------------------------------------- /src/Utils/Text.hs: -------------------------------------------------------------------------------- 1 | module Utils.Text where 2 | 3 | import Data.Text as T 4 | 5 | {-| Check if a text starts with a given prefix. 6 | 7 | >>> startsWith "--" "-- foo" 8 | True 9 | 10 | >>> startsWith "--" "-/ foo" 11 | False 12 | -} 13 | startsWith :: T.Text -> T.Text -> Bool 14 | startsWith start text = start == textStart 15 | where 16 | len = T.length start 17 | (textStart, _) = T.splitAt len text 18 | -------------------------------------------------------------------------------- /src/Utils/Tree.hs: -------------------------------------------------------------------------------- 1 | module Utils.Tree 2 | ( searchNode 3 | , foldTree_ 4 | , roots 5 | , nodesWithChildren 6 | ) 7 | where 8 | 9 | import Data.Tree 10 | 11 | {-| The first child that satisfies a predicate. 12 | >>> let myTree = Node 1 [ Node 11 [], Node 12 [ Node 21 [ Node 31 [] ] , Node 22 [] ] ] 13 | >>> searchNode ((==) 22 . rootLabel) myTree 14 | Just (Node {rootLabel = 22, subForest = []}) 15 | 16 | >>> searchNode ((==) 1 . rootLabel) myTree == Just myTree 17 | True 18 | 19 | >>> searchNode ((==) 23 . rootLabel) myTree 20 | Nothing 21 | 22 | >>> searchNode ((==) 12 . rootLabel) myTree 23 | Just (Node {rootLabel = 12, subForest = [Node {rootLabel = 21, subForest = [Node {rootLabel = 31, subForest = []}]},Node {rootLabel = 22, subForest = []}]}) 24 | -} 25 | searchNode :: (Tree a -> Bool) -> Tree a -> Maybe (Tree a) 26 | searchNode p = find . pure 27 | where 28 | find (x : _) 29 | | p x = Just x 30 | find (x : xs) = find (xs ++ subForest x) 31 | find [] = Nothing 32 | 33 | {-| Fold over a tree. 34 | >>> let myTree = Node 1 [ Node 11 [], Node 12 [ Node 21 [ Node 31 [] ] , Node 22 [] ] ] 35 | >>> foldTree_ (\a as -> show a : fmap show as) myTree 36 | ["1","11","12","11","12","21","22","21","31","31","22"] 37 | -} 38 | foldTree_ :: Monoid m => (a -> [a] -> m) -> Tree a -> m 39 | foldTree_ f (Node x ts) = mconcat $ f x (roots ts) : fmap (foldTree_ f) ts 40 | 41 | {-| Get all nodes and the children of each node. 42 | >>> let myTree = Node 1 [ Node 11 [], Node 12 [ Node 21 [ Node 31 [] ] , Node 22 [] ] ] 43 | >>> nodesWithChildren myTree 44 | [(1,[11,12]),(11,[]),(12,[21,22]),(21,[31]),(31,[]),(22,[])] 45 | -} 46 | nodesWithChildren :: Tree a -> [(a, [a])] 47 | nodesWithChildren = foldTree_ (\a as -> [(a, as)]) 48 | 49 | {-| Get all rootLabels of a Forest. 50 | >>> let myForest = [ Node 11 [], Node 12 [ Node 21 [ Node 31 [] ] , Node 22 [] ], Node 13 [] ] 51 | >>> roots myForest 52 | [11,12,13] 53 | -} 54 | roots :: Forest a -> [a] 55 | roots = fmap rootLabel 56 | -------------------------------------------------------------------------------- /src/Version.hs: -------------------------------------------------------------------------------- 1 | module Version 2 | ( print 3 | , check 4 | ) 5 | where 6 | 7 | import qualified Data.SemVer as SemVer 8 | import Data.Semigroup ((<>)) 9 | import qualified Data.Text as T 10 | import Data.Version (showVersion) 11 | import qualified Parser.JetpackVersion as JetpackVersion 12 | import Paths_jetpack (version) 13 | import Prelude hiding (print) 14 | 15 | print :: T.Text 16 | print = T.pack $ showVersion version 17 | 18 | check :: JetpackVersion.Version -> Maybe T.Text 19 | check JetpackVersion.Version {version} = 20 | case SemVer.fromText print of 21 | Left _ -> 22 | Just 23 | "The version defined in your jetpack.json seems to be incorrect. Check your package.json to find the correct version." 24 | Right actual -> 25 | if version == actual 26 | then Nothing 27 | else 28 | if version > actual 29 | then 30 | Just 31 | ( "Running jetpack@" <> print <> 32 | " the config expects a newer version " <> 33 | SemVer.toText version 34 | ) 35 | else 36 | Just 37 | ( "Running jetpack@" <> print <> 38 | " the config expects an older version" <> 39 | SemVer.toText version 40 | ) 41 | -------------------------------------------------------------------------------- /src/Watcher.hs: -------------------------------------------------------------------------------- 1 | module Watcher 2 | ( watch 3 | , startWatcher 4 | , listenToCommands 5 | ) 6 | where 7 | 8 | import qualified Builder 9 | import CliArguments (Args (..)) 10 | import Config (Config (Config)) 11 | import qualified Config 12 | import Control.Monad (void) 13 | import Data.Semigroup ((<>)) 14 | import qualified Data.Text as T 15 | import qualified Data.Text.IO as TIO 16 | import qualified Notify 17 | import System.FilePath () 18 | import Text.Regex (mkRegex) 19 | 20 | watch :: Config -> Args -> Builder.HotReload -> IO () 21 | watch config args hotReloading = do 22 | putStrLn "Watching. Enter '?' to see the help." 23 | state <- startWatcher config (void $ Builder.build config args hotReloading) 24 | Notify.buildNow state 25 | listenToCommands state 26 | 27 | startWatcher :: Config -> IO () -> IO Notify.State 28 | startWatcher 29 | Config 30 | { Config.sourceDir 31 | , Config.watchFileExt 32 | , Config.watchIgnorePatterns 33 | } = 34 | Notify.watch 35 | Notify.Config 36 | { pathToWatch = Config.unSourceDir sourceDir 37 | , relevantExtensions = Config.unWatchFileExt <$> watchFileExt 38 | , ignorePatterns = mkRegex . T.unpack <$> Config.unWatchIgnorePatterns <$> 39 | watchIgnorePatterns 40 | } 41 | 42 | listenToCommands :: Notify.State -> IO () 43 | listenToCommands state = do 44 | value <- TIO.getLine 45 | case commandFromStr value of 46 | Just Rebuild -> do 47 | _ <- TIO.putStrLn "Forcing a rebuild..." 48 | _ <- Notify.buildNow state 49 | listenToCommands state 50 | Just Quit -> do 51 | Notify.end state 52 | putStrLn "Thanks for compiling with jetpack today. Have a great day!" 53 | Just Help -> do 54 | _ <- TIO.putStrLn "Help" 55 | _ <- TIO.putStrLn "====" 56 | _ <- TIO.putStrLn "" 57 | _ <- TIO.putStrLn "r: rebuild" 58 | _ <- TIO.putStrLn "q: quit" 59 | _ <- TIO.putStrLn "?: help" 60 | listenToCommands state 61 | Just (Unknown str) -> do 62 | TIO.putStrLn ("Unknown command \"" <> str <> "\"") 63 | listenToCommands state 64 | Nothing -> listenToCommands state 65 | 66 | data Command 67 | = Rebuild 68 | | Quit 69 | | Help 70 | | Unknown T.Text 71 | 72 | commandFromStr :: T.Text -> Maybe Command 73 | commandFromStr "r" = Just Rebuild 74 | commandFromStr "q" = Just Quit 75 | commandFromStr "?" = Just Help 76 | commandFromStr "" = Nothing 77 | commandFromStr char = Just (Unknown char) 78 | -------------------------------------------------------------------------------- /stack.yaml: -------------------------------------------------------------------------------- 1 | flags: {} 2 | extra-package-dbs: [] 3 | packages: 4 | - '.' 5 | resolver: lts-13.30 6 | nix: 7 | enable: true 8 | shell-file: shell.nix 9 | -------------------------------------------------------------------------------- /test/ConcatModuleSpec.hs: -------------------------------------------------------------------------------- 1 | module ConcatModuleSpec where 2 | 3 | import ConcatModule 4 | import Config 5 | import Control.Monad.State (modify) 6 | import Data.Foldable 7 | import Data.Text as T 8 | import Data.Tree as Tree 9 | import Dependencies as D 10 | import Parser.Ast as Ast 11 | import System.Directory (removeFile) 12 | import System.FilePath ((<.>), ()) 13 | import Test.Tasty 14 | import Test.Tasty.HUnit 15 | 16 | mockModule :: T.Text 17 | mockModule = T.unlines ["var foo = require('foo.js');", "", "foo(42)"] 18 | 19 | wrappedModule :: T.Text 20 | wrappedModule = 21 | T.unlines 22 | [ "/* START: testFunction */" 23 | , "function testFunction_js(module, exports) {" 24 | , "var foo = require('foo.js');" 25 | , "" 26 | , "foo(42)" 27 | , "\n} /* END: testFunction */" 28 | ] 29 | 30 | mockDependencyTree :: D.DependencyTree 31 | mockDependencyTree = 32 | Tree.Node 33 | (dependency "index") 34 | [Tree.Node (dependency "main") [], Tree.Node (dependency "index") []] 35 | where 36 | dependency fileName = 37 | D.Dependency 38 | Ast.Js 39 | (fileName <.> "js") 40 | ("ui" "src" fileName <.> "js") 41 | Nothing 42 | 43 | mockConfig :: Config 44 | mockConfig = 45 | Config 46 | { entryPoints = EntryPoints ("." "test" "fixtures" "concat" "modules") 47 | , modulesDirs = [] 48 | , sourceDir = SourceDir ("." "test" "fixtures" "concat" "sources") 49 | , elmRoot = ElmRoot ("." "test" "fixtures" "concat" "sources") 50 | , tempDir = TempDir ("." "test" "fixtures" "concat" "tmp") 51 | , logDir = LogDir ("." "test" "fixtures" "concat" "logs") 52 | , outputDir = OutputDir ("." "test" "fixtures" "concat" "js") 53 | , elmPath = Nothing 54 | , coffeePath = Nothing 55 | , noParse = [] 56 | , watchFileExt = [] 57 | , watchIgnorePatterns = [] 58 | } 59 | 60 | mockDependency :: FilePath -> FilePath -> D.Dependency 61 | mockDependency f p = D.Dependency Ast.Js f p Nothing 62 | 63 | mockDependencies :: D.Dependencies 64 | mockDependencies = 65 | [ Tree.Node 66 | (dependency "modules" "Foo") 67 | [Tree.Node (dependency "sources" "Moo") []] 68 | ] 69 | where 70 | dependency location fileName = 71 | mockDependency 72 | ("." fileName) 73 | ( "." "test" "fixtures" "concat" location "Page" 74 | fileName <.> 75 | "js" 76 | ) 77 | 78 | expectedOutput :: [T.Text] 79 | expectedOutput = 80 | [ T.unlines $ 81 | [ "(function() {" 82 | , "var jetpackCache = {};" 83 | , "function jetpackRequire(fn, fnName) {" 84 | , " var e = {};" 85 | , " var m = { exports : e };" 86 | , " if (typeof fn !== \"function\") {" 87 | , " console.error(\"Required function isn't a jetpack module.\", fn)" 88 | , " return;" 89 | , " }" 90 | , " if (jetpackCache[fnName]) {" 91 | , " return jetpackCache[fnName];" 92 | , " }" 93 | , " jetpackCache[fnName] = m.exports;" 94 | , " fn(m, e); " 95 | , " jetpackCache[fnName] = m.exports;" 96 | , " return m.exports;" 97 | , "}" 98 | , "/* START: ./test/fixtures/concat/modules/Page/Foo.js */" 99 | , "function test___fixtures___concat___modules___Page___Foo_js_js(module, exports) {" 100 | , "var moo = jetpackRequire(test___fixtures___concat___sources___Page___Moo_js_js, \"test___fixtures___concat___sources___Page___Moo_js_js\");" 101 | , "moo(4, 2);" 102 | , "\n} /* END: ./test/fixtures/concat/modules/Page/Foo.js */" 103 | , "/* START: ./test/fixtures/concat/sources/Page/Moo.js */" 104 | , "function test___fixtures___concat___sources___Page___Moo_js_js(module, exports) {" 105 | , "module.exports = function(a, b) {" 106 | , " console.log(a + b + \"\");" 107 | , "};" 108 | , "\n} /* END: ./test/fixtures/concat/sources/Page/Moo.js */" 109 | , "" 110 | , "jetpackRequire(test___fixtures___concat___modules___Page___Foo_js_js, \"test___fixtures___concat___modules___Page___Foo_js_js\");" 111 | , "})();" 112 | ] 113 | ] 114 | 115 | suite :: TestTree 116 | suite = 117 | testGroup 118 | "ConcatModule" 119 | [ testCase "#wrapModule" $ do 120 | wrapModule "a___b.elm" "" @?= 121 | "/* START: a/b.elm */ console.warn(\"a/b.elm: is an empty module!\");\nfunction a___b_elm_js(module, exports) {\n\n} /* END: a/b.elm */\n" 122 | , testCase "#wrapModule wraps a module in a function" $ 123 | wrapModule "testFunction" mockModule @?= 124 | wrappedModule 125 | , testCase 126 | "#replaceRequire replaces require('string') with jetpackRequire(function, fnName)" $ 127 | replaceRequire 128 | (mockDependency "foo" $ "ui" "src" "foo") 129 | "var x = require('foo')" @?= 130 | "var x = jetpackRequire(ui___src___foo_js, \"ui___src___foo_js\")" 131 | , testCase 132 | "#replaceRequire replaces require(\"string\") with jetpackRequire(function, fnName)" $ 133 | replaceRequire 134 | (mockDependency "foo" $ "ui" "src" "foo") 135 | "var x = require(\"foo\")" @?= 136 | "var x = jetpackRequire(ui___src___foo_js, \"ui___src___foo_js\")" 137 | , testCase 138 | "#replaceRequire replaces require( 'string' ) with jetpackRequire(function, fnName)" $ 139 | replaceRequire 140 | (mockDependency "foo" $ "ui" "src" "foo") 141 | "var x = require( 'foo' )" @?= 142 | "var x = jetpackRequire(ui___src___foo_js, \"ui___src___foo_js\")" 143 | , testCase "#wrap" $ do 144 | (paths, actual) <- unzip <$> traverse (wrap mockConfig) mockDependencies 145 | paths @?= ["./test/fixtures/concat/js/Page/Foo.js"] 146 | actual @?= expectedOutput 147 | ] 148 | -------------------------------------------------------------------------------- /test/ConfigSpec.hs: -------------------------------------------------------------------------------- 1 | module ConfigSpec where 2 | 3 | import Config 4 | import System.FilePath (()) 5 | import Test.Tasty 6 | import Test.Tasty.HUnit 7 | 8 | suite :: TestTree 9 | suite = 10 | testGroup 11 | "Config" 12 | [ testCase "#load success" $ do 13 | config <- Config.load "./test/fixtures" 14 | config @=? 15 | ( Config.Config 16 | { entryPoints = EntryPoints $ "app" "modules" 17 | , modulesDirs = [] 18 | , sourceDir = SourceDir $ "app" "sources" 19 | , elmRoot = ElmRoot $ "app" "sources" 20 | , tempDir = TempDir $ "app" "tmp" 21 | , logDir = LogDir $ "app" "logs" 22 | , outputDir = OutputDir $ "app" "js" 23 | , elmPath = Nothing 24 | , coffeePath = Nothing 25 | , noParse = [ NoParse $ 26 | "." 27 | "node_modules" 28 | "clipboard" 29 | "clipboard.js" 30 | ] 31 | , watchFileExt = WatchFileExt <$> [".elm", ".coffee", ".js", ".json"] 32 | , watchIgnorePatterns = WatchIgnorePatterns <$> ["/[.]#[^/]*$", "/~[^/]*$"] 33 | , hotReloadingPort = Config.HotReloadingPort 31337 34 | } 35 | ) 36 | ] 37 | -------------------------------------------------------------------------------- /test/DependenciesSpec.hs: -------------------------------------------------------------------------------- 1 | module DependenciesSpec where 2 | 3 | import Config 4 | import qualified Control.Exception.Safe as ES 5 | import Control.Monad.Except (runExceptT) 6 | import Control.Monad.State (modify) 7 | import Data.List as L 8 | import Data.Tree as Tree 9 | import Dependencies 10 | import DependencyTree 11 | import Parser.Ast as Ast 12 | import System.FilePath ((<.>), ()) 13 | import Test.Tasty 14 | import Test.Tasty.HUnit 15 | 16 | basicsFixtures :: Config 17 | basicsFixtures = 18 | Config 19 | { entryPoints = EntryPoints ("." "test" "fixtures" "basics" "modules") 20 | , modulesDirs = [ ModulesDir 21 | ("." "test" "fixtures" "basics" "node_modules") 22 | ] 23 | , sourceDir = SourceDir ("." "test" "fixtures" "basics" "sources") 24 | , elmRoot = ElmRoot ("." "test" "fixtures" "basics" "sources") 25 | , tempDir = TempDir ("." "test" "fixtures" "basics" "tmp") 26 | , logDir = LogDir ("." "test" "fixtures" "basics" "logs") 27 | , outputDir = OutputDir ("." "test" "fixtures" "basics" "js") 28 | , elmPath = Nothing 29 | , coffeePath = Nothing 30 | , noParse = [ NoParse $ 31 | "." 32 | "test" 33 | "fixtures" 34 | "basics" 35 | "node_modules" 36 | "clipboard" 37 | "index.js" 38 | ] 39 | , watchFileExt = [] 40 | , watchIgnorePatterns = [] 41 | } 42 | 43 | failingFixtures :: Config 44 | failingFixtures = 45 | Config 46 | { entryPoints = EntryPoints ("." "test" "fixtures" "failing" "modules") 47 | , modulesDirs = [] 48 | , sourceDir = SourceDir ("." "test" "fixtures" "failing" "sources") 49 | , elmRoot = ElmRoot ("." "test" "fixtures" "failing" "sources") 50 | , tempDir = TempDir ("." "test" "fixtures" "failing" "tmp") 51 | , logDir = LogDir ("." "test" "fixtures" "failing" "logs") 52 | , outputDir = OutputDir ("." "test" "fixtures" "failing" "js") 53 | , elmPath = Nothing 54 | , coffeePath = Nothing 55 | , noParse = [] 56 | , watchFileExt = [] 57 | , watchIgnorePatterns = [] 58 | } 59 | 60 | suite :: TestTree 61 | suite = 62 | testGroup 63 | "Dependencies" 64 | [ testCase "#build success" $ do 65 | dep <- DependencyTree.build basicsFixtures [] ("test" <.> "js") 66 | (fmap dropLastMod $ Tree.flatten dep) @?= 67 | [ ( Ast.Js 68 | , "" "test.js" 69 | , "." "test" "fixtures" "basics" "modules" 70 | "test.js" 71 | ) 72 | , ( Ast.Coffee 73 | , "" "index" 74 | , "." "test" "fixtures" "basics" "sources" 75 | "index.coffee" 76 | ) 77 | , ( Ast.Js 78 | , "" "lodash" 79 | , "." "test" "fixtures" "basics" "node_modules" 80 | "lodash" 81 | "index.js" 82 | ) 83 | , ( Ast.Js 84 | , "." "lodash.dist.js" 85 | , "." "test" "fixtures" "basics" "node_modules" 86 | "lodash" 87 | "." 88 | "lodash.dist.js" 89 | ) 90 | , ( Ast.Js 91 | , "." "lodash" 92 | , "." "test" "fixtures" "basics" "node_modules" 93 | "lodash" 94 | "." 95 | "." 96 | "lodash.js" 97 | ) 98 | , ( Ast.Js 99 | , "" "debug" 100 | , "." "test" "fixtures" "basics" "node_modules" 101 | "lodash" 102 | "." 103 | "node_modules" 104 | "debug.js" 105 | ) 106 | ] 107 | , testCase "#build no_parse" $ do 108 | dep <- DependencyTree.build basicsFixtures [] ("test_no_parse" <.> "js") 109 | (fmap dropLastMod $ Tree.flatten dep) @?= 110 | [ ( Ast.Js 111 | , "" "test_no_parse.js" 112 | , "." "test" "fixtures" "basics" "modules" 113 | "test_no_parse.js" 114 | ) 115 | , ( Ast.Js 116 | , "" "no_parse_index" 117 | , "." "test" "fixtures" "basics" "sources" 118 | "no_parse_index.js" 119 | ) 120 | , ( Ast.Js 121 | , "clipboard" 122 | , "." "test" "fixtures" "basics" "node_modules" 123 | "clipboard" 124 | "index.js" 125 | ) 126 | ] 127 | , testCase "#build failing" $ do 128 | result <- 129 | ES.tryAny $ DependencyTree.build failingFixtures [] ("test" <.> "js") 130 | case result of 131 | Left err -> 132 | show err @?= 133 | "\n\nI had troubles finding 'index' required in './test/fixtures/failing/modules/test.js'.\n\nMake sure that you spelled the name of the module correctly.\nYou might also want to make sure that all dependencies are updated.\n" 134 | Right _ -> assertFailure $ "This shouldn't pass" 135 | ] 136 | 137 | dropLastMod :: Dependency -> (Ast.SourceType, FilePath, FilePath) 138 | dropLastMod Dependency {fileType, requiredAs, filePath} = 139 | (fileType, requiredAs, filePath) 140 | -------------------------------------------------------------------------------- /test/Helper/Property.hs: -------------------------------------------------------------------------------- 1 | {-| Helpers for property based tests. 2 | -} 3 | module Helper.Property where 4 | 5 | import Data.Text as T 6 | import Test.Tasty.QuickCheck as QC 7 | 8 | newtype AlphaNum 9 | = AlphaNum T.Text 10 | deriving (Show) 11 | 12 | instance (QC.Arbitrary AlphaNum) where 13 | 14 | arbitrary = fmap (AlphaNum . T.pack) letterOrDigit 15 | 16 | newtype CodeNoComments 17 | = CodeNoComments T.Text 18 | deriving (Show) 19 | 20 | instance (QC.Arbitrary CodeNoComments) where 21 | 22 | arbitrary = fmap (CodeNoComments . T.pack) code 23 | 24 | alphaFreqList :: [(Int, QC.Gen Char)] 25 | alphaFreqList = 26 | [ (26, QC.choose ('a', 'z')) 27 | , (26, QC.choose ('A', 'Z')) 28 | , (1, QC.elements ['_']) 29 | ] 30 | 31 | digitFreqList :: [(Int, QC.Gen Char)] 32 | digitFreqList = [(10, QC.choose ('0', '9'))] 33 | 34 | symbolsFreqList :: [(Int, QC.Gen Char)] 35 | symbolsFreqList = [(5, QC.elements ['+', ';', '*', '%'])] 36 | 37 | letter :: QC.Gen Char 38 | letter = QC.frequency alphaFreqList 39 | 40 | letterOrDigit :: QC.Gen String 41 | letterOrDigit = listOf1 $ QC.frequency $ alphaFreqList ++ digitFreqList 42 | 43 | code :: QC.Gen String 44 | code = 45 | listOf1 $ QC.frequency $ alphaFreqList ++ digitFreqList ++ symbolsFreqList 46 | -------------------------------------------------------------------------------- /test/Parser/CommentSpec.hs: -------------------------------------------------------------------------------- 1 | module Parser.CommentSpec where 2 | 3 | import Control.Monad.Except (runExceptT) 4 | import Data.Maybe as M 5 | import Data.Text as T 6 | import Helper.Property 7 | import qualified Parser.Comment 8 | import System.FilePath ((<.>), ()) 9 | import Test.Tasty 10 | import Test.Tasty.HUnit 11 | import Test.Tasty.QuickCheck 12 | 13 | suite :: TestTree 14 | suite = 15 | testGroup 16 | "Parser.Comment" 17 | [ testCase "parse no comments" $ 18 | Parser.Comment.eatJsComments "asdf" @?= 19 | "asdf" 20 | , testCase "parse block comments" $ 21 | Parser.Comment.eatJsComments "moo/*foo*/boo" @?= 22 | "mooboo" 23 | , testCase "parse block comments starting at the beginning" $ 24 | Parser.Comment.eatJsComments "/*foo*/boo" @?= 25 | "boo" 26 | , testCase "parse block comments" $ 27 | "a\n\nb c\n\n" @=? 28 | ( Parser.Comment.eatJsComments $ 29 | T.unlines ["a\n", "b /* ignore", "xxxxxx", " */ c\n"] 30 | ) 31 | , testCase "parse line comments" $ 32 | "a\n\nb c\n\n" @=? 33 | ( Parser.Comment.eatJsComments $ 34 | T.unlines ["a\n", "b // ignore", "//xxxxxx", " c\n"] 35 | ) 36 | , testCase "parse block and line comments" $ 37 | "a\n\nb c\n\nb \nOOOO\noooo\n\n" @=? 38 | ( Parser.Comment.eatJsComments $ 39 | T.unlines 40 | [ "a\n" 41 | , "b // ignore" 42 | , "//xxxxxx" 43 | , " c\n" 44 | , "b /* BLOCK" 45 | , "BLOCK" 46 | , "*/" 47 | , "OOOO" 48 | , "// ignore" 49 | , "oooo" 50 | , "/**" 51 | , " * IGNORE" 52 | , " */" 53 | ] 54 | ) 55 | , testCase "only comments" $ 56 | "" @=? 57 | (Parser.Comment.eatJsComments $ T.concat ["// foo", "/* asdf */"]) 58 | , testCase "only comments" $ 59 | "" @=? 60 | (Parser.Comment.eatJsComments $ T.concat ["/* foo */", "// asdf"]) 61 | , testCase "only comments" $ 62 | "" @=? 63 | (Parser.Comment.eatJsComments $ T.concat ["// foo", "/* asdf */"]) 64 | ] 65 | 66 | properties :: TestTree 67 | properties = 68 | testGroup 69 | "Parser.Comment Properties" 70 | [ testProperty "#eatJsComments" $ \b1 b2 b3 -> 71 | ( Parser.Comment.eatJsComments $ 72 | Parser.Comment.eatJsComments $ 73 | codeWithJsComments b1 b2 b3 74 | ) == 75 | (Parser.Comment.eatJsComments $ codeWithJsComments b1 b2 b3) 76 | , testProperty "#eatJsComments" $ \b1 b2 b3 -> 77 | (Parser.Comment.eatJsComments $ codeWithJsComments b1 b2 b3) /= 78 | (codeWithJsComments b1 b2 b3) 79 | , testProperty "#eatJsComments" $ \b1 b2 b3 -> 80 | (Parser.Comment.eatJsComments $ codeWithJsComments b1 b2 b3) == 81 | code b1 b2 b3 82 | , testProperty "#eatCoffeeComments" $ \b1 b2 b3 -> 83 | ( Parser.Comment.eatCoffeeComments $ 84 | Parser.Comment.eatCoffeeComments $ 85 | codeWithCoffeeComments b1 b2 b3 86 | ) == 87 | (Parser.Comment.eatCoffeeComments $ codeWithCoffeeComments b1 b2 b3) 88 | , testProperty "#eatCoffeeComments" $ \b1 b2 b3 -> 89 | (Parser.Comment.eatCoffeeComments $ codeWithCoffeeComments b1 b2 b3) /= 90 | (codeWithCoffeeComments b1 b2 b3) 91 | , testProperty "#eatCoffeeComments" $ \b1 b2 b3 -> 92 | (Parser.Comment.eatCoffeeComments $ codeWithCoffeeComments b1 b2 b3) == 93 | code b1 b2 b3 94 | , testProperty "#eatElmComments" $ \b1 b2 b3 -> 95 | ( Parser.Comment.eatElmComments $ 96 | Parser.Comment.eatElmComments $ 97 | codeWithElmComments b1 b2 b3 98 | ) == 99 | (Parser.Comment.eatElmComments $ codeWithElmComments b1 b2 b3) 100 | , testProperty "#eatElmComments" $ \b1 b2 b3 -> 101 | (Parser.Comment.eatElmComments $ codeWithElmComments b1 b2 b3) /= 102 | (codeWithElmComments b1 b2 b3) 103 | , testProperty "#eatElmComments" $ \b1 b2 b3 -> 104 | (Parser.Comment.eatElmComments $ codeWithElmComments b1 b2 b3) == 105 | code b1 b2 b3 106 | ] 107 | where 108 | code (CodeNoComments b1) (CodeNoComments b2) (CodeNoComments b3) = 109 | T.unlines [b1, T.concat [b2, "\n"], b3] 110 | codeWithJsComments (CodeNoComments b1) (CodeNoComments b2) (CodeNoComments b3) = 111 | T.unlines [b1, "// hello", b2, "/*\n world \n*/", b3] 112 | codeWithCoffeeComments (CodeNoComments b1) (CodeNoComments b2) (CodeNoComments b3) = 113 | T.unlines [b1, "# hello", b2, "###\nworld\n###", b3] 114 | codeWithElmComments (CodeNoComments b1) (CodeNoComments b2) (CodeNoComments b3) = 115 | T.unlines [b1, "-- hello", b2, "{-\nworld\n-}", b3] 116 | -------------------------------------------------------------------------------- /test/Parser/RequireSpec.hs: -------------------------------------------------------------------------------- 1 | module Parser.RequireSpec where 2 | 3 | import Control.Monad.Except (runExceptT) 4 | import Data.Maybe as M 5 | import Data.Text as T 6 | import Helper.Property 7 | import Parser.Ast as Ast 8 | import qualified Parser.Require as Require 9 | import System.FilePath ((<.>), ()) 10 | import Test.Tasty 11 | import Test.Tasty.HUnit 12 | import Test.Tasty.QuickCheck 13 | 14 | assertRequire :: T.Text -> [Ast.Require] -> Assertion 15 | assertRequire content requires = 16 | case Require.require content of 17 | [] -> assertFailure "failed" 18 | rs -> rs @?= requires 19 | 20 | assertParsingFails :: T.Text -> Assertion 21 | assertParsingFails = assertBool "unexpected success" . (==) [] . Require.require 22 | 23 | suite :: TestTree 24 | suite = 25 | testGroup 26 | "Require" 27 | [ testCase ".elm" $ 28 | assertRequire "var x = require(\"x.elm\")" [Ast.Require Ast.Elm "x.elm"] 29 | , testCase ".elm with namespace" $ 30 | assertRequire 31 | "let foo = require(\"foo.elm\")" 32 | [Ast.Require Ast.Elm "foo.elm"] 33 | , testCase ".elm" $ 34 | assertRequire 35 | "require(\"foo.bar.elm\");" 36 | [Ast.Require Ast.Elm $ "foo" <.> "bar.elm"] 37 | , testCase ".coffee" $ 38 | assertRequire 39 | "require(\"foo.bar.coffee\")" 40 | [Ast.Require Ast.Coffee $ "foo" <.> "bar.coffee"] 41 | , testCase "no ext" $ 42 | assertRequire 43 | "require(\"foo.bar\")" 44 | [Ast.Require Ast.Js $ "foo" <.> "bar"] 45 | , testCase "with whitespaces" $ 46 | assertRequire "require( \t\"jquery\"\t )" [Ast.Require Ast.Js $ "jquery"] 47 | , testCase "js" $ 48 | assertRequire 49 | "require(\"foo.bar.js\")" 50 | [Ast.Require Ast.Js $ "foo" <.> "bar.js"] 51 | , testCase "js with ;" $ 52 | assertRequire 53 | "require(\"foo.bar.js\");" 54 | [Ast.Require Ast.Js $ "foo" <.> "bar.js"] 55 | , testCase "js" $ 56 | assertRequire 57 | "require(\"foo.bar.js\"), require(\"moo.bar.js\")" 58 | [ Ast.Require Ast.Js $ "foo" <.> "bar.js" 59 | , Ast.Require Ast.Js $ "moo" <.> "bar.js" 60 | ] 61 | , testCase "coffee" $ 62 | assertRequire 63 | "coffee = require \"foo.bar.js\"" 64 | [Ast.Require Ast.Js $ "foo" <.> "bar.js"] 65 | , testCase "js" $ 66 | assertRequire 67 | "require 'foo.bar.js' " 68 | [Ast.Require Ast.Js $ "foo" <.> "bar.js"] 69 | , testCase "node_module" $ 70 | assertRequire "require 'lodash'" [Ast.Require Ast.Js "lodash"] 71 | , testCase "multilines" $ 72 | assertRequire 73 | (T.unlines ["// test", "moo = require 'foo.bar.js'", "moo(42)"]) 74 | [Ast.Require Ast.Js $ "foo" <.> "bar.js"] 75 | , testCase "multilines" $ 76 | assertRequire 77 | (T.unlines ["// test", "var moo = require('foo.bar.js');", "moo(42)"]) 78 | [Ast.Require Ast.Js $ "foo" <.> "bar.js"] 79 | , testCase "fails" $ 80 | assertParsingFails $ 81 | T.unlines ["// test", "var moo = require('foo.bar.js';", "moo(42)"] 82 | ] 83 | 84 | properties :: TestTree 85 | properties = 86 | testGroup 87 | "Require Properties" 88 | [ testProperty "#require" $ \(AlphaNum name) (AlphaNum ext) -> 89 | case require name ext of 90 | [] -> False 91 | c -> 92 | c == 93 | [ ( Ast.Require (Require.getFileType $ T.unpack ext) $ 94 | T.unpack name <.> 95 | T.unpack ext 96 | ) 97 | ] 98 | ] 99 | where 100 | require name ext = 101 | Require.require $ T.concat ["\nrequire '", name, ".", ext, "'\n"] 102 | -------------------------------------------------------------------------------- /test/Spec.hs: -------------------------------------------------------------------------------- 1 | import ConcatModuleSpec 2 | import ConfigSpec 3 | import DependenciesSpec 4 | import Parser.CommentSpec 5 | import Parser.RequireSpec 6 | import System.FilePath.Glob (glob) 7 | import Test.DocTest 8 | import Test.Tasty 9 | import Test.Tasty.HUnit 10 | 11 | main :: IO () 12 | main = runDocTests >> runTests 13 | 14 | runTests :: IO () 15 | runTests = do 16 | defaultMain $ 17 | testGroup 18 | "jetpack" 19 | [ testGroup 20 | "suites" 21 | [ ConfigSpec.suite 22 | , ConcatModuleSpec.suite 23 | , DependenciesSpec.suite 24 | , Parser.RequireSpec.suite 25 | , Parser.CommentSpec.suite 26 | ] 27 | , testGroup 28 | "properties" 29 | [Parser.RequireSpec.properties, Parser.CommentSpec.properties] 30 | ] 31 | 32 | runDocTests :: IO () 33 | runDocTests = do 34 | tests <- glob "src/**/*.hs" 35 | doctest 36 | ( [ "-XDeriveFunctor" 37 | , "-XDeriveGeneric" 38 | , "-XDuplicateRecordFields" 39 | , "-XNamedFieldPuns" 40 | , "-XOverloadedStrings" 41 | , "-XPackageImports" 42 | ] ++ 43 | tests 44 | ) 45 | -------------------------------------------------------------------------------- /test/fixtures/basics/.gitignore: -------------------------------------------------------------------------------- 1 | tmp 2 | -------------------------------------------------------------------------------- /test/fixtures/basics/modules/test.js: -------------------------------------------------------------------------------- 1 | require("index"); 2 | -------------------------------------------------------------------------------- /test/fixtures/basics/modules/test_no_parse.js: -------------------------------------------------------------------------------- 1 | require("no_parse_index"); 2 | -------------------------------------------------------------------------------- /test/fixtures/basics/node_modules/clipboard/index.js: -------------------------------------------------------------------------------- 1 | // 2 | require('./clipboard-action'); 3 | -------------------------------------------------------------------------------- /test/fixtures/basics/node_modules/lodash/index.js: -------------------------------------------------------------------------------- 1 | require('./lodash.dist.js') 2 | -------------------------------------------------------------------------------- /test/fixtures/basics/node_modules/lodash/lodash.dist.js: -------------------------------------------------------------------------------- 1 | console.log('lodash.dist.js') 2 | require('./lodash') 3 | require('debug') 4 | -------------------------------------------------------------------------------- /test/fixtures/basics/node_modules/lodash/lodash.js: -------------------------------------------------------------------------------- 1 | console.log('lodash.js') 2 | -------------------------------------------------------------------------------- /test/fixtures/basics/node_modules/lodash/node_modules/debug.js: -------------------------------------------------------------------------------- 1 | // this is debug 2 | -------------------------------------------------------------------------------- /test/fixtures/basics/sources/index.coffee: -------------------------------------------------------------------------------- 1 | _ = require "lodash" 2 | # var Main = require("Main.elm") 3 | -------------------------------------------------------------------------------- /test/fixtures/basics/sources/no_parse_index.js: -------------------------------------------------------------------------------- 1 | var clipboard = require("clipboard"); 2 | -------------------------------------------------------------------------------- /test/fixtures/basics/tmp/.gitkeep: -------------------------------------------------------------------------------- 1 | deps.json 2 | -------------------------------------------------------------------------------- /test/fixtures/basics/tmp/deps.json: -------------------------------------------------------------------------------- 1 | [[{"filePath":"./test/fixtures/basics/modules/test.js","requiredAs":"test.js","fileType":"Js","lastModificationTime":"2017-03-09T10:29:49Z"},[[{"filePath":"./test/fixtures/basics/sources/index.coffee","requiredAs":"index","fileType":"Coffee","lastModificationTime":"2017-03-27T12:09:54Z"},[[{"filePath":"./test/fixtures/basics/sources/../node_modules/lodash/index.js","requiredAs":"lodash","fileType":"Js","lastModificationTime":"2017-03-09T14:05:40Z"},[[{"filePath":"./test/fixtures/basics/sources/../node_modules/lodash/./lodash.dist.js","requiredAs":"./lodash.dist.js","fileType":"Js","lastModificationTime":"2017-03-09T14:12:11Z"},[[{"filePath":"./test/fixtures/basics/sources/../node_modules/lodash/././lodash.js","requiredAs":"./lodash","fileType":"Js","lastModificationTime":"2017-03-09T14:05:40Z"},[]],[{"filePath":"./test/fixtures/basics/sources/../node_modules/lodash/./node_modules/debug.js","requiredAs":"debug","fileType":"Js","lastModificationTime":"2017-03-09T14:11:49Z"},[]]]]]]]]]]] -------------------------------------------------------------------------------- /test/fixtures/concat/js/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NoRedInk/jetpack/721d12226b593c117cba26ceb7c463c7c3334b8b/test/fixtures/concat/js/.gitkeep -------------------------------------------------------------------------------- /test/fixtures/concat/tmp/test___fixtures___concat___modules___Page___Foo.js.js: -------------------------------------------------------------------------------- 1 | var moo = require('./Moo'); 2 | moo(4, 2); 3 | -------------------------------------------------------------------------------- /test/fixtures/concat/tmp/test___fixtures___concat___sources___Page___Moo.js.js: -------------------------------------------------------------------------------- 1 | module.exports = function(a, b) { 2 | console.log(a + b + ""); 3 | }; 4 | -------------------------------------------------------------------------------- /test/fixtures/failing/.gitignore: -------------------------------------------------------------------------------- 1 | tmp 2 | -------------------------------------------------------------------------------- /test/fixtures/failing/modules/test.js: -------------------------------------------------------------------------------- 1 | require("index"); 2 | -------------------------------------------------------------------------------- /test/fixtures/failing/node_modules/lodash/lodash.dist.js: -------------------------------------------------------------------------------- 1 | console.log('lodash') 2 | -------------------------------------------------------------------------------- /test/fixtures/failing/sources/index.kaffe: -------------------------------------------------------------------------------- 1 | _ = require "lodash/lodash.dist.js" 2 | # var Main = require("Main.elm") 3 | -------------------------------------------------------------------------------- /test/fixtures/failing/tmp/.gitkeep: -------------------------------------------------------------------------------- 1 | deps.json 2 | -------------------------------------------------------------------------------- /test/fixtures/failing/tmp/deps.json: -------------------------------------------------------------------------------- 1 | [[{"filePath":"./test/fixtures/failing/modules/test.js","requiredAs":"test.js","fileType":"Js","lastModificationTime":"2017-03-09T12:05:25Z"},[]]] -------------------------------------------------------------------------------- /test/fixtures/jetpack.json: -------------------------------------------------------------------------------- 1 | { 2 | "entry_points" : "app/modules", 3 | "modules_directories" : [], 4 | "source_directory" : "app/sources", 5 | "elm_root_directory" : "app/sources", 6 | "temp_directory" : "app/tmp", 7 | "log_directory" : "app/logs", 8 | "output_js_directory" : "app/js", 9 | "no_parse": [ 10 | "./node_modules/clipboard/clipboard.js" 11 | ] 12 | } 13 | --------------------------------------------------------------------------------