├── .gitignore ├── .travis.yml ├── LICENSE ├── README.md ├── Setup.hs ├── TODO.smos ├── app └── Main.hs ├── doc ├── faq.md ├── getting-started.md ├── installation.md ├── language.md ├── pillars.md └── usage.md ├── examples ├── bash.sus ├── complex.sus ├── compressed.sus ├── env.sus ├── nested-example.sus ├── other-file-spark.sus ├── short.sus ├── test.sus ├── two-lines-card.sus └── uncompressed.sus ├── hooks.sus ├── scripts ├── cabal2nix ├── code_health.sh ├── hlint_health.sh ├── indentation.sh ├── install.sh ├── lib.sh ├── pre_commit_test.sh ├── pre_push_test.sh ├── refactor.sh ├── sanity.sh ├── test.sh └── trailing_whitespace_test.sh ├── src ├── Import.hs ├── SuperUserSpark.hs └── SuperUserSpark │ ├── Bake.hs │ ├── Bake │ ├── Internal.hs │ └── Types.hs │ ├── Check.hs │ ├── Check │ ├── Internal.hs │ └── Types.hs │ ├── Compiler.hs │ ├── Compiler │ ├── Internal.hs │ ├── Types.hs │ └── Utils.hs │ ├── Constants.hs │ ├── CoreTypes.hs │ ├── Deployer.hs │ ├── Deployer │ ├── Internal.hs │ └── Types.hs │ ├── Diagnose.hs │ ├── Diagnose │ ├── Internal.hs │ └── Types.hs │ ├── Language │ └── Types.hs │ ├── OptParse.hs │ ├── OptParse │ └── Types.hs │ ├── Parser.hs │ ├── Parser │ ├── Internal.hs │ └── Types.hs │ ├── PreCompiler.hs │ ├── PreCompiler │ └── Types.hs │ └── Utils.hs ├── stack.yaml ├── super-user-spark.cabal ├── test ├── MainTest.hs ├── SuperUserSpark │ ├── Bake │ │ └── Gen.hs │ ├── BakeSpec.hs │ ├── Check │ │ ├── Gen.hs │ │ └── TestUtils.hs │ ├── CheckSpec.hs │ ├── Compiler │ │ ├── Gen.hs │ │ └── TestUtils.hs │ ├── CompilerSpec.hs │ ├── Deployer │ │ └── Gen.hs │ ├── DeployerSpec.hs │ ├── Diagnose │ │ ├── Gen.hs │ │ └── TestUtils.hs │ ├── DiagnoseSpec.hs │ ├── EndToEnd │ │ └── RegressionSpec.hs │ ├── EndToEndSpec.hs │ ├── Language │ │ └── Gen.hs │ ├── OptParse │ │ └── Gen.hs │ ├── Parser │ │ ├── Gen.hs │ │ └── TestUtils.hs │ ├── ParserSpec.hs │ └── PreCompiler │ │ └── Gen.hs ├── TestImport.hs └── TestUtils.hs └── test_resources ├── end-to-end ├── bash.sus └── bash.sus.res ├── exact_compile_test_src ├── alternatives.sus ├── alternatives.sus.res ├── bash.sus ├── bash.sus.res ├── internal_sparkoff.sus ├── internal_sparkoff.sus.res ├── nesting.sus ├── nesting.sus.res ├── sub.sus ├── sub.sus.res └── sub │ ├── subfile.sus │ └── subfile.sus.res ├── hop_test ├── hop1dir │ ├── hop1.sus │ └── hop2dir │ │ ├── hop2.sus │ │ └── hop3dir │ │ └── hop3.sus └── root.sus ├── shouldCompile ├── bash.sus └── complex.sus ├── shouldNotParse ├── empty_file.sus └── missing_implementation.sus └── shouldParse ├── empty_card.sus ├── littered_with_comments.sus ├── short_syntax.sus └── with_quotes.sus /.gitignore: -------------------------------------------------------------------------------- 1 | # binary 2 | spark 3 | .stack-work 4 | stack.yaml.lock 5 | 6 | .HTF/ 7 | 8 | dist 9 | cabal-dev 10 | *.o 11 | *.tix 12 | *.hi 13 | *.chi 14 | *.chs.h 15 | *.dyn_o 16 | *.dyn_hi 17 | .virtualenv 18 | .hpc 19 | .hsenv 20 | .cabal-sandbox/ 21 | cabal.sandbox.config 22 | *.prof 23 | *.aux 24 | *.hp 25 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | # Choose a lightweight base image; we provide our own build tools. 2 | language: c 3 | 4 | # Enable caching 5 | sudo: false 6 | 7 | 8 | # GHC depends on GMP. You can add other dependencies here as well. 9 | addons: 10 | apt: 11 | packages: 12 | - libgmp-dev 13 | 14 | 15 | # The different configurations we want to test. You could also do things like 16 | # change flags or use --stack-yaml to point to a different file. 17 | env: 18 | - ARGS="" 19 | 20 | matrix: 21 | allow_failures: 22 | - env: ARGS="--resolver nightly" 23 | 24 | # Caching so the next build will be fast too. 25 | cache: 26 | directories: 27 | - $HOME/.stack/ 28 | 29 | before_install: 30 | - mkdir -p ~/.local/bin 31 | - export PATH=~/.local/bin:$PATH 32 | - travis_retry curl -L https://www.stackage.org/stack/linux-x86_64 | tar xz --wildcards --strip-components=1 -C ~/.local/bin '*/stack' 33 | - chmod a+x ~/.local/bin/stack 34 | 35 | # This line does all of the work: installs GHC if necessary, build the library, 36 | # executables, and test suites, and runs the test suites. --no-terminal works 37 | # around some quirks in Travis's terminal implementation. 38 | script: stack $ARGS --no-terminal --install-ghc test --test-arguments="--seed=42" --haddock 39 | 40 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2015 Tom Sydney Kerckhove 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | 23 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Super User Spark 2 | [![Build Status](https://travis-ci.org/NorfairKing/super-user-spark.svg?branch=master)](https://travis-ci.org/NorfairKing/super-user-spark) 3 | 4 | A safe way to never worry about your beautifully configured system again. 5 | 6 | ## Example 7 | 8 | If your dotfiles repository looks like this... 9 | 10 | ``` 11 | dotfiles 12 | ├── bashrc 13 | ├── bash_aliases 14 | ├── bash_profile 15 | └── README 16 | ``` 17 | 18 | ... then you can now deploy those dotfiles with this `.sus` file using `spark`! 19 | 20 | ``` super-user-spark 21 | card bash { 22 | into ~ 23 | 24 | .bashrc 25 | .bash_aliases 26 | .bash_profile 27 | } 28 | ``` 29 | 30 | Find out more in the documentation below. 31 | 32 | ## Documentation 33 | Most of the documentation is in the `doc` directory. 34 | 35 | - [Installation](https://github.com/NorfairKing/super-user-spark/blob/master/doc/installation.md) 36 | - [Getting Started](https://github.com/NorfairKing/super-user-spark/blob/master/doc/getting-started.md) 37 | - [Usage](https://github.com/NorfairKing/super-user-spark/blob/master/doc/usage.md) 38 | - [Design](https://github.com/NorfairKing/super-user-spark/blob/master/doc/pillars.md) 39 | - [Language Specifications](https://github.com/NorfairKing/super-user-spark/blob/master/doc/language.md) 40 | - [FAQ](https://github.com/NorfairKing/super-user-spark/blob/master/doc/faq.md) 41 | 42 | ## SUS Depot Examples 43 | If you would like to have your name on this list, just send a pull request. 44 | 45 | - [NorfairKing](https://github.com/NorfairKing/sus-depot) 46 | - [plietar](https://github.com/plietar/dotfiles) 47 | - [mkirsche](https://github.com/mkirsche/sus-depot) 48 | - [badi](https://github.com/badi/dotfiles/blob/master/deploy.sus) 49 | - [tilal6991](https://github.com/tilal6991/.dotfiles) 50 | 51 | ## Contributing 52 | Before contributing, make sure you installed the pre-commit tests: 53 | 54 | ``` 55 | spark deploy hooks.sus 56 | ``` 57 | 58 | ## Found a problem? 59 | 60 | Raise an issue or, even better, do a pull-request with a failing test! 61 | -------------------------------------------------------------------------------- /Setup.hs: -------------------------------------------------------------------------------- 1 | import Distribution.Simple 2 | main = defaultMain 3 | -------------------------------------------------------------------------------- /TODO.smos: -------------------------------------------------------------------------------- 1 | - entry: Features 2 | forest: 3 | - header: A good story about storing secrets 4 | contents: The pipe deployment idea may be useful already there. 5 | - Refactor the parsing of 'Maybe DeploymentKind' to a seperate type for that 6 | - Implement completions for the relevant arguments 7 | - Parse should be able to output the AST, and compile should be able to accept it. 8 | - Show all files in the depot that are not configured to be deployed 9 | - entry: Bugs 10 | forest: 11 | - It says deployment is possible in cases that deployment is not possible 12 | - the replace flag has a capital D 13 | - Figure out ahead of time that things aren't going to work if there are conflicting 14 | destinations 15 | - ! 'removeLink: does not exist bug' 16 | -------------------------------------------------------------------------------- /app/Main.hs: -------------------------------------------------------------------------------- 1 | module Main (main) where 2 | 3 | import SuperUserSpark 4 | 5 | main :: IO () 6 | main = spark 7 | -------------------------------------------------------------------------------- /doc/faq.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Frequently Asked Questions 3 | --- 4 | 5 | - When I enter `spark parse some-file`, I get the following error, what does it mean? 6 | ``` 7 | "some-file" (line ..., column ...): 8 | unexpected end of input 9 | expecting Comment 10 | ``` 11 | 12 | The first thing `spark` does when parsing a file, is removing all the comments. 13 | If your file is so badly formatted that even this goes wrong, you're probably trying to parse the wrong file. 14 | 15 | - When I try to use the variable `HOST`, spark cannot resolve it even though it is there when I enter `echo $HOST`. 16 | 17 | This means the variable `HOST` has not been exported. 18 | Bash does not do this automatically. 19 | You can export `HOST` manually with `export HOST`. 20 | -------------------------------------------------------------------------------- /doc/getting-started.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Getting Started 3 | --- 4 | 5 | - [Getting started](http://cs-syd.eu/posts/2015-09-27-super-user-spark-getting-started) 6 | - [Outof and Alternatives](http://cs-syd.eu/posts/2015-10-04-super-user-spark-outof-and-alternatives) 7 | - [Blocks and Cards](http://cs-syd.eu/posts/2015-10-11-super-user-spark-blocks-and-cards) 8 | -------------------------------------------------------------------------------- /doc/installation.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Installation 3 | --- 4 | 5 | `super-user-spark` is on hackage! 6 | 7 | ## With Cabal 8 | 9 | ``` 10 | $ cabal install super-user-spark 11 | ``` 12 | 13 | ## With Stack 14 | 15 | ``` 16 | $ stack install super-user-spark 17 | ``` 18 | -------------------------------------------------------------------------------- /doc/language.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Language Specification 3 | --- 4 | 5 | ## The language 6 | ### Cards 7 | A card is a unit of control within spark. 8 | A card has a name and contains a number of statements. 9 | Cards should be declared inside a file with a `.sus` extension. 10 | 11 | ``` 12 | card { 13 | 14 | } 15 | ``` 16 | 17 | #### References 18 | A card can be referenced: 19 | 20 | - By name. 21 | 22 | ``` 23 | card 24 | ``` 25 | 26 | - By file. 27 | 28 | ``` 29 | file 30 | ``` 31 | 32 | The `` argument is optional, if it is not given the reference will lead to the first card in the file. 33 | 34 | ### Declarations 35 | #### File deployment 36 | This is the main operation. 37 | 38 | ``` 39 | 40 | ``` 41 | 42 | This will deploy `` to ``. 43 | 44 | There is also a shorthand syntax: 45 | 46 | ``` 47 | 48 | ``` 49 | 50 | This deploys `` to ``. 51 | It works very well if the name of the source and destination files are the same. 52 | 53 | Because spark is mainly used to deploy *dot*files, there is an implicit shorthand if the destination starts with a dot: 54 | 55 | ``` 56 | . 57 | ``` 58 | 59 | Along with the regular `.` to `.` deployment, this will result in an implicit alternative `` to `.` deployment if the first one doesn't work out. 60 | 61 | ##### Deployment Kind 62 | There are two deployment kinds but three ways of declaring a deployment. 63 | 64 | - Link: `l->` 65 | - Copy: `c->` 66 | - Unspecified: `->` 67 | 68 | An unspecified deployment defaults to a link, but can be specified otherwise both at the command line and in the card. 69 | 70 | #### Sparkoff 71 | A sparkoff is used to run spark on other cards from within a card. 72 | 73 | ``` 74 | spark 75 | ``` 76 | 77 | If the referenced card is in a remote repository, spark will fetch the repository within the directory that spark is run in. 78 | 79 | #### Into directory 80 | After an `into` declaration, all deployment sources will be prefixed by the argument path. 81 | 82 | ``` 83 | into 84 | ``` 85 | 86 | Successive `into` declarations will append to previous `into` declarations' directories (intersperced with `/`). 87 | 88 | #### Out of directory 89 | After an `outof` declaration, all deployment destinations will be prefixed by the argument path. 90 | 91 | ``` 92 | outof 93 | ``` 94 | 95 | Successive `outof` declarations will append to previous `outof` declarations' directories (intersperced with `/`). 96 | 97 | #### Deployment Kind Override 98 | What happens to unspecified deployments can be specified using a deployment kind override declaration: 99 | 100 | ``` 101 | kind copy 102 | ``` 103 | 104 | or 105 | 106 | ``` 107 | kind link 108 | ``` 109 | 110 | #### Alternatives 111 | Alternative source directories can be specified. 112 | 113 | ``` 114 | alternatives ... 115 | ``` 116 | 117 | Spark will look for the source in the primary directory, then the secondary, etc. 118 | This path is further prepended to the `into` directory. 119 | 120 | ### Block 121 | 122 | A block is a part of the code between braces `{`, `}`. 123 | It is used to scope declaratons: 124 | 125 | ``` 126 | { 127 | into a 128 | b 129 | } 130 | c 131 | ``` 132 | 133 | ... will result in ... 134 | 135 | ``` 136 | b -> a/b 137 | c -> c 138 | ``` 139 | 140 | 141 | ### Comments 142 | Line comments start with `#` and block comments are surrounded by `[[` and `]]` 143 | -------------------------------------------------------------------------------- /doc/pillars.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: The general idea 3 | --- 4 | 5 | Configuring a system is hard work. 6 | You don't want to do it all over again when you reinstall a system. 7 | Moreover, you want to synchronise config files accross systems. 8 | 9 | 10 | ## Design ideas 11 | ### Usage 12 | The most used options should require the least amount of characters to type. 13 | 14 | ### Spark cards 15 | Spark is configured using spark cards. 16 | These cards are written using a declarative language that describe the entire deployment. 17 | 18 | ### Configuration 19 | Everything about Spark is configurable. 20 | Every option can be controlled directly on the command-line *and* in the cards. 21 | You should be able to set up your cards once and deploy them many times. 22 | 23 | ### Grammar 24 | The grammar should be simple to use when you don't need many options, but clear when you do. 25 | 26 | -------------------------------------------------------------------------------- /doc/usage.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: Usage 3 | abbreviations: NYI="Not Yet Implemented" 4 | --- 5 | 6 | ## Commands 7 | 8 | ### Syntax checking 9 | A `.sus` file's syntax can be checked by entering the following command: 10 | 11 | ``` 12 | $ spark parse path/to/card.sus 13 | ``` 14 | 15 | `spark` will exit... 16 | 17 | - ... with exit code 0 to show that the file is syntactically correct 18 | - ... with a non 0 exit code to show that the file contains a syntax error and give some indication as to where the error is. 19 | 20 | `spark` will try its best to present you with a good indication of what went wrong but as with all parsers, errors can still seem cryptic. 21 | 22 | Note that the parser will not follow `spark` declarations in the file. It's just a parser. 23 | 24 | ### Compiling 25 | Compile a spark card. 26 | This, unlike deployment, happens independtly of the system that `spark` is being run on. 27 | 28 | ``` 29 | $ spark compile path/to/card.sus 30 | $ spark compile "path/to/card.sus card-name" 31 | ``` 32 | 33 | Note that the quotes are required if you specify a card name. 34 | This will compile the spark card to a list of deployments. 35 | 36 | #### Compiling options 37 | 38 | - `--output FILE` output to a `FILE` instead of stdout. 39 | - `--kind KIND`: Specify any unspecified deployments (`->`) to be `KIND` deployments (`c->` or `l->`). (Options for `KIND`: `copy`, `link`) 40 | - `--override KIND`: Override _all_ deployments to be `KIND` deployments. (Options for `KIND`: `copy`, `link`) 41 | 42 | 43 | ### Bake 44 | 45 | Bake a spark card. 46 | This turns raw compiled deployments into so-called baked deployments. 47 | 48 | ``` 49 | $ spark bake path/to/card.sus 50 | $ spark bake "path/to/card.sus card-name" 51 | ``` 52 | 53 | ### Diagnose 54 | 55 | Diagnose the current state of the system. 56 | This outputs the raw state of the system. 57 | You probably want to use `spark check` instead. 58 | 59 | ``` 60 | $ spark diagnose path/to/card.sus 61 | $ spark diagnose "path/to/card.sus card-name" 62 | ``` 63 | 64 | ### Check 65 | 66 | Assess the current state of the system. 67 | Look at what is done already and what needs to be done for a given card to be deployed 68 | 69 | You can reference a starting card by file: 70 | 71 | ``` 72 | $ spark check path/to/card.sus 73 | $ spark check "path/to/card.sus card-name" 74 | ``` 75 | 76 | Note that the quotes are required if you specify a card name. 77 | 78 | You can also supply a compiled card: 79 | 80 | ``` 81 | $ spark check path/to/compiled/card 82 | ``` 83 | 84 | Don't use the `.sus` extension for compiled cards or `spark` will interpret them as uncompiled card files. 85 | 86 | ### Deployment 87 | Deployment doesn't require any special commands, just a card reference. 88 | 89 | You can reference a starting card by file: 90 | 91 | ``` 92 | $ spark deploy path/to/card.sus # By file 93 | $ spark deploy "path/to/card.sus card-name" # By file with card name 94 | ``` 95 | 96 | Note that the quotes are required if you specify a card name. 97 | 98 | You can also supply a compiled card: 99 | 100 | ``` 101 | $ spark deploy path/to/compiled/card 102 | ``` 103 | 104 | Don't use the `.sus` extension for compiled cards or `spark` will interpret them as uncompiled card files. 105 | 106 | #### Deployment options 107 | 108 | - `--replace-files`: replace existing files at deploy destinations 109 | - `--replace-directories`: replace existing directories at deploy destinations 110 | - `--replace-links`: replace links at deploy destinations 111 | - `--replace-all`: equivalent to `--replace-files --replace-directories --replace-links` 112 | -------------------------------------------------------------------------------- /examples/bash.sus: -------------------------------------------------------------------------------- 1 | card bash { 2 | into ~ 3 | 4 | .bashrc 5 | .bash_login 6 | .bash_aliases 7 | } 8 | -------------------------------------------------------------------------------- /examples/complex.sus: -------------------------------------------------------------------------------- 1 | card main { 2 | spark card configs 3 | spark card poems 4 | } 5 | 6 | card configs { 7 | spark file bash.sus bash 8 | 9 | alternatives super-laptop shared 10 | into ~ 11 | 12 | { 13 | outof zsh 14 | 15 | .zshrc 16 | .zshenv 17 | .zlogin 18 | } 19 | 20 | vim -> .vim 21 | } 22 | 23 | card poems { 24 | outof poems 25 | into poems 26 | { 27 | kind copy 28 | 29 | "A windows file with spaces.txt" -> clean 30 | } 31 | { 32 | kind link 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /examples/compressed.sus: -------------------------------------------------------------------------------- 1 | cardcard-name{into~;outofdirectory;kindcopy;alternativesone two;"file-one"-> file-two;something c->"copied"} 2 | -------------------------------------------------------------------------------- /examples/env.sus: -------------------------------------------------------------------------------- 1 | card env_test { 2 | into $(HOME) 3 | 4 | test_file 5 | } 6 | -------------------------------------------------------------------------------- /examples/nested-example.sus: -------------------------------------------------------------------------------- 1 | card nested { 2 | into ~ 3 | { 4 | outof bash 5 | 6 | .bashrc 7 | .bash_aliases 8 | .bash_login 9 | } 10 | { 11 | outof zsh 12 | 13 | .zshrc 14 | .zlogin 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /examples/other-file-spark.sus: -------------------------------------------------------------------------------- 1 | card "other card spark" { 2 | spark file bash.sus 3 | } 4 | -------------------------------------------------------------------------------- /examples/short.sus: -------------------------------------------------------------------------------- 1 | card short { 2 | into ~ 3 | README.txt 4 | } 5 | -------------------------------------------------------------------------------- /examples/test.sus: -------------------------------------------------------------------------------- 1 | card test { 2 | outof files 3 | into ~ 4 | test.txt c-> test.txt 5 | } 6 | -------------------------------------------------------------------------------- /examples/two-lines-card.sus: -------------------------------------------------------------------------------- 1 | card two { 2 | zshrc -> .zshrc 3 | zlogin -> .zlogin 4 | } 5 | -------------------------------------------------------------------------------- /examples/uncompressed.sus: -------------------------------------------------------------------------------- 1 | card card-name { 2 | into ~ 3 | outof directory 4 | kind copy 5 | alternatives one two 6 | 7 | file-one -> file-two 8 | something c-> copied 9 | } 10 | -------------------------------------------------------------------------------- /hooks.sus: -------------------------------------------------------------------------------- 1 | card hooks { 2 | into .git/hooks 3 | outof scripts 4 | pre_commit_test.sh -> pre-commit 5 | pre_push_test.sh -> pre-push 6 | } 7 | -------------------------------------------------------------------------------- /scripts/cabal2nix: -------------------------------------------------------------------------------- 1 | #! /usr/bin/env nix-shell 2 | #! nix-shell -i bash -p cabal2nix 3 | 4 | usage() { 5 | cat <= 1.10 is installed and that nix-shell is in 15 | PATH. 16 | 17 | Arguments: 18 | 19 | -o OUTPATH Path to the generated Nix-expression. 20 | -s Generate the expression for nix-shell usage. 21 | PACKAGEDIR Location of the directory containing the .cabal file. 22 | EOF 23 | } 24 | 25 | ################################################## input parameters 26 | outpath="" 27 | outpath_default="super-user-spark.nix" 28 | 29 | packagedir="" 30 | packagedir_default="$PWD" 31 | 32 | extra_args="" 33 | 34 | while getopts ":o:sh" flag; do 35 | case $flag in 36 | o) 37 | outpath="$OPTARG" 38 | ;; 39 | s) 40 | extra_args="$extra_args --shell" 41 | outpath_default="shell.nix" 42 | ;; 43 | h) 44 | usage 45 | exit 1 46 | ;; 47 | esac 48 | done 49 | 50 | shift $(expr $OPTIND - 1 ) 51 | packagedir="$1" 52 | 53 | 54 | ################################################## defaults 55 | test -z "$outpath" && outpath="$outpath_default" 56 | test -z "$packagedir" && packagedir="$packagedir_default" 57 | 58 | 59 | ################################################## run 60 | set -x 61 | cabal2nix --jailbreak $extra_args "$packagedir" | tee "$outpath" 62 | 63 | -------------------------------------------------------------------------------- /scripts/code_health.sh: -------------------------------------------------------------------------------- 1 | set -e # Abort on error 2 | 3 | ./scripts/trailing_whitespace_test.sh 4 | ./scripts/indentation.sh 5 | # ./scripts/hlint_health.sh 6 | ./scripts/sanity.sh 7 | -------------------------------------------------------------------------------- /scripts/hlint_health.sh: -------------------------------------------------------------------------------- 1 | source scripts/lib.sh 2 | h () { 3 | hlint src \ 4 | --ignore "Use hierarchical imports" \ 5 | --ignore "Use camelCase" \ 6 | --ignore "Redundant do" \ 7 | --ignore "Redundant $" 8 | } 9 | check "Hlint" h 10 | -------------------------------------------------------------------------------- /scripts/indentation.sh: -------------------------------------------------------------------------------- 1 | source scripts/lib.sh 2 | 3 | indentation () { 4 | local RESULT_FILE="/tmp/line_length" 5 | for f in $(find src -type f -name '*.hs') 6 | do 7 | # White space must be 4 spaces (or more) unless it's a where then it must be more than 2 spaces 8 | grep -P --line-number '^\s{1,3}(?!where)[^ ].*$' "$f" > "$RESULT_FILE" 9 | if [[ "$?" == "0" ]] 10 | then 11 | print_colored_text RED $f 12 | echo 13 | cat "$RESULT_FILE" 14 | print_colored_text RED "Incorrect whitespace: 4 spaces in general, no tabs. 2 spaces before 'where'" 15 | echo 16 | return -1 17 | fi 18 | done 19 | } 20 | 21 | check "Indentation" indentation 22 | -------------------------------------------------------------------------------- /scripts/install.sh: -------------------------------------------------------------------------------- 1 | source scripts/lib.sh 2 | check "Install" stack install 3 | -------------------------------------------------------------------------------- /scripts/lib.sh: -------------------------------------------------------------------------------- 1 | ESC_SEQ="\x1b[" 2 | COL_RESET=$ESC_SEQ"39;49;00m" 3 | COL_RED=$ESC_SEQ"31;11m" 4 | COL_GREEN=$ESC_SEQ"32;11m" 5 | COL_YELLOW=$ESC_SEQ"33;11m" 6 | COL_BLUE=$ESC_SEQ"34;11m" 7 | COL_MAGENTA=$ESC_SEQ"35;11m" 8 | COL_CYAN=$ESC_SEQ"36;11m" 9 | print_colored_text () { 10 | local color="$1" 11 | local text="$2" 12 | local color_code="COL_$color" 13 | echo -n -e "${!color_code}$text$COL_RESET" 14 | } 15 | 16 | error () { 17 | local text="$1" 18 | print_colored_text RED "ERROR: $text\n" 19 | exit 1 20 | } 21 | 22 | warning () { 23 | local text="$1" 24 | print_colored_text YELLOW "WARNING: $text\n" 25 | } 26 | 27 | good () { 28 | local text="$1" 29 | print_colored_text GREEN "CHECK: $text\n" 30 | } 31 | 32 | check () { 33 | set +x 34 | name="$1" 35 | shift 36 | command="$*" 37 | echo -n "Check: ${name}... " 38 | OUT=/tmp/out.txt 39 | ERR=/tmp/err.txt 40 | $command > $OUT 2> $ERR 41 | if [[ "$?" == "0" ]]; then 42 | print_colored_text GREEN "Success\n" 43 | else 44 | print_colored_text RED "Failure\n" 45 | echo $command 46 | cat $OUT 47 | cat $ERR 48 | set -x 49 | return -1 50 | fi 51 | } 52 | 53 | promptY () { 54 | prompt="$1" 55 | print_colored_text BLUE "$prompt [Y/n] > " 56 | read answer 57 | if [[ "$answer" == "n" ]] 58 | then 59 | return -1 60 | else 61 | return 0 62 | fi 63 | } 64 | 65 | promptN () { 66 | prompt="$1" 67 | print_colored_text BLUE "$prompt [y/N] > " 68 | read answer 69 | if [[ "$answer" == "y" ]] 70 | then 71 | return 0 72 | else 73 | return -1 74 | fi 75 | } 76 | 77 | 78 | 79 | 80 | 81 | 82 | 83 | 84 | 85 | 86 | 87 | 88 | 89 | 90 | -------------------------------------------------------------------------------- /scripts/pre_commit_test.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Abort on error 4 | set -e 5 | 6 | ./scripts/code_health.sh 7 | ./scripts/test.sh 8 | -------------------------------------------------------------------------------- /scripts/pre_push_test.sh: -------------------------------------------------------------------------------- 1 | set -e # abort on error 2 | 3 | ./scripts/code_health.sh 4 | ./scripts/test.sh 5 | ./scripts/install.sh 6 | -------------------------------------------------------------------------------- /scripts/refactor.sh: -------------------------------------------------------------------------------- 1 | source scripts/lib.sh 2 | 3 | from="$1" 4 | to="$2" 5 | 6 | usage () { 7 | echo " 8 | refactor FROM TO 9 | " 10 | } 11 | 12 | if [[ "$from" == "" || "$to" == "" ]] 13 | then 14 | usage 15 | exit 0 16 | fi 17 | 18 | # Check if the string already exists somewhere 19 | grep --color=auto --line-number --word-regexp "$to" --recursive --include \*.hs 20 | if [[ "$?" == "0" ]] 21 | then 22 | warning "Destination already exists" 23 | if ! promptN "Go on?" 24 | then 25 | exit 1 26 | fi 27 | else 28 | good "Destination is not in use yet." 29 | fi 30 | 31 | grep --color=auto --line-number --word-regexp "$from" --recursive --include \*.hs 32 | if ! promptY "Above are the matches for $from, continue?" 33 | then 34 | exit 1 35 | fi 36 | 37 | 38 | find src -type f -name "*.hs" -exec sed -i "s/\b$from\b/$to/g" {} \; 39 | 40 | if promptY "make a commit out of it?" 41 | then 42 | git add . 43 | git commit -m "Refactoring $from to $to" 44 | fi 45 | -------------------------------------------------------------------------------- /scripts/sanity.sh: -------------------------------------------------------------------------------- 1 | source scripts/lib.sh 2 | bld_ () { 3 | stack clean && stack build --pedantic 4 | } 5 | check "Pedantic checking" bld_ 6 | -------------------------------------------------------------------------------- /scripts/test.sh: -------------------------------------------------------------------------------- 1 | source scripts/lib.sh 2 | check "Test" make -B test 3 | -------------------------------------------------------------------------------- /scripts/trailing_whitespace_test.sh: -------------------------------------------------------------------------------- 1 | source scripts/lib.sh 2 | t () { 3 | TMP=/tmp/files.txt 4 | find src -type f -name "*.hs" -exec egrep -l " +$" {} \; > $TMP 5 | if [[ -s $TMP ]] 6 | then 7 | code="1" 8 | echo "These files contain trailing whitespace, please fix: " 9 | else 10 | code="0" 11 | fi 12 | 13 | while read f 14 | do 15 | echo $f 16 | done < $TMP 17 | 18 | return $code 19 | } 20 | check "Trailing whitespace" t 21 | -------------------------------------------------------------------------------- /src/Import.hs: -------------------------------------------------------------------------------- 1 | module Import 2 | ( module X 3 | ) where 4 | 5 | import Prelude as X hiding (appendFile) 6 | 7 | import Control.Arrow as X 8 | import Data.Maybe as X 9 | import GHC.Generics as X 10 | import System.Exit as X 11 | import Text.Read as X (readEither) 12 | 13 | import Path as X 14 | import Path.IO as X 15 | 16 | import Control.Monad as X 17 | import Control.Monad.Except as X 18 | import Control.Monad.IO.Class as X (MonadIO(..)) 19 | import Control.Monad.Identity as X 20 | import Control.Monad.Reader as X 21 | import Control.Monad.State as X 22 | import Control.Monad.Writer as X 23 | 24 | import Data.Validity as X 25 | import Data.Validity.Path as X () 26 | -------------------------------------------------------------------------------- /src/SuperUserSpark.hs: -------------------------------------------------------------------------------- 1 | module SuperUserSpark 2 | ( spark 3 | ) where 4 | 5 | import Import 6 | 7 | import SuperUserSpark.Bake 8 | import SuperUserSpark.Check 9 | import SuperUserSpark.Compiler 10 | import SuperUserSpark.Deployer 11 | import SuperUserSpark.Diagnose 12 | import SuperUserSpark.OptParse 13 | import SuperUserSpark.Parser 14 | 15 | spark :: IO () 16 | spark = getDispatch >>= dispatch 17 | 18 | dispatch :: Dispatch -> IO () 19 | dispatch (DispatchParse pas) = parseFromArgs pas 20 | dispatch (DispatchCompile cas) = compileFromArgs cas 21 | dispatch (DispatchBake bas) = bakeFromArgs bas 22 | dispatch (DispatchDiagnose bas) = diagnoseFromArgs bas 23 | dispatch (DispatchCheck cas) = checkFromArgs cas 24 | dispatch (DispatchDeploy das) = deployFromArgs das 25 | -------------------------------------------------------------------------------- /src/SuperUserSpark/Bake.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE RecordWildCards #-} 2 | 3 | {- 4 | The responsibility of the baker is to turn raw deployments into baked 5 | deployments. This takes care of everything that couldn't happen during 6 | compilation yet. The differences between raw deployments and baked 7 | deployments are: 8 | - Baked deployments only deal with absolute filepaths so as to be 9 | working-directory-independent. 10 | - Baked deployments are aware of the kind of things the checker/deployer 11 | will be operating on (files versus directories). 12 | 13 | The baker is not responsible for checking any existences. 14 | -} 15 | module SuperUserSpark.Bake where 16 | 17 | import Import 18 | 19 | import qualified Data.Aeson.Encode.Pretty as JSON 20 | import qualified Data.ByteString.Lazy.Char8 as LB 21 | import System.Environment (getEnvironment) 22 | import System.FilePath (takeExtension) 23 | 24 | import SuperUserSpark.Bake.Internal 25 | import SuperUserSpark.Bake.Types 26 | import SuperUserSpark.Compiler 27 | import SuperUserSpark.Compiler.Types 28 | import SuperUserSpark.Language.Types 29 | import SuperUserSpark.OptParse.Types 30 | import SuperUserSpark.Utils 31 | 32 | bakeFromArgs :: BakeArgs -> IO () 33 | bakeFromArgs ba = do 34 | errOrAss <- bakeAssignment ba 35 | case errOrAss of 36 | Left be -> die $ unwords ["Failed to build bake assignment:", be] 37 | Right ass -> bake ass 38 | 39 | bakeAssignment :: BakeArgs -> IO (Either String BakeAssignment) 40 | bakeAssignment BakeArgs {..} = do 41 | errOrCardRef <- parseBakeCardReference bakeCardRef 42 | case errOrCardRef of 43 | Left err -> pure $ Left err 44 | Right cardRef -> 45 | BakeAssignment cardRef <$$> deriveBakeSettings cardRef bakeFlags 46 | 47 | parseBakeCardReference :: String -> IO (Either String BakeCardReference) 48 | parseBakeCardReference s = 49 | case words s of 50 | [fp] -> 51 | if takeExtension fp == ".sus" 52 | then BakeCardUncompiled <$$> parseStrongCardFileReference fp 53 | else BakeCardCompiled <$$> resolveFile'Either fp 54 | [f, c] -> 55 | BakeCardUncompiled <$$> 56 | ((\(StrongCardFileReference p _) -> 57 | StrongCardFileReference p (Just $ CardNameReference c)) <$$> 58 | parseStrongCardFileReference f) 59 | _ -> pure $ Left $ unwords ["Could not parse card reference from:", s] 60 | 61 | deriveBakeSettings :: BakeCardReference -> BakeFlags -> IO (Either String BakeSettings) 62 | deriveBakeSettings bcr BakeFlags {..} = 63 | BakeSettings (rootOf bcr) <$$> (Right <$> getEnvironment) <**> 64 | deriveCompileSettings bakeCompileFlags 65 | 66 | rootOf :: BakeCardReference -> Path Abs Dir 67 | rootOf bcr = 68 | parent $ 69 | case bcr of 70 | (BakeCardCompiled fp) -> fp 71 | (BakeCardUncompiled (StrongCardFileReference fp _)) -> fp 72 | 73 | bake :: BakeAssignment -> IO () 74 | bake BakeAssignment {..} = do 75 | errOrDone <- 76 | runReaderT (runExceptT $ bakeByCardRef bakeCardReference) bakeSettings 77 | case errOrDone of 78 | Left err -> die $ formatBakeError err 79 | Right () -> pure () 80 | 81 | formatBakeError :: BakeError -> String 82 | formatBakeError (BakeCompileError ce) = formatCompileError ce 83 | formatBakeError (BakeError s) = unwords ["Bake failed:", s] 84 | 85 | bakeByCardRef :: BakeCardReference -> SparkBaker () 86 | bakeByCardRef bakeCardReference = do 87 | deps <- compileBakeCardRef bakeCardReference 88 | bdeps <- bakeDeployments deps 89 | liftIO . LB.putStrLn $ JSON.encodePretty bdeps 90 | 91 | compileBakeCardRef :: BakeCardReference -> SparkBaker [RawDeployment] 92 | compileBakeCardRef (BakeCardCompiled fp) = bakerCompile $ inputCompiled fp 93 | compileBakeCardRef (BakeCardUncompiled bcf) = bakerCompile $ compileJob bcf 94 | 95 | bakerCompile :: ImpureCompiler a -> SparkBaker a 96 | bakerCompile = 97 | withExceptT BakeCompileError . mapExceptT (withReaderT bakeCompileSettings) 98 | -------------------------------------------------------------------------------- /src/SuperUserSpark/Bake/Internal.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE RecordWildCards #-} 2 | 3 | module SuperUserSpark.Bake.Internal where 4 | 5 | import Import 6 | 7 | import Control.Exception (try) 8 | import System.FilePath (isAbsolute, replaceDirectory, takeDirectory) 9 | 10 | import SuperUserSpark.Bake.Types 11 | import SuperUserSpark.Compiler.Types 12 | 13 | bakeDeployments :: [RawDeployment] -> SparkBaker [BakedDeployment] 14 | bakeDeployments = mapM bakeDeployment 15 | 16 | bakeDeployment :: RawDeployment -> SparkBaker BakedDeployment 17 | bakeDeployment Deployment {..} = do 18 | d <- bakeDirections deploymentDirections 19 | pure Deployment {deploymentDirections = d, deploymentKind = deploymentKind} 20 | 21 | bakeDirections :: 22 | DeploymentDirections FilePath -> SparkBaker (DeploymentDirections AbsP) 23 | bakeDirections (Directions srcs dst) = 24 | Directions <$> mapM bakeFilePath srcs <*> bakeFilePath dst 25 | 26 | -- | Bake asingle 'FilePath' 27 | -- 28 | -- The result should: 29 | -- 30 | -- * ... not contain any more variables. 31 | -- * ... not contain any reference to the home directory: @~@. 32 | -- * ... be absolute. 33 | bakeFilePath :: FilePath -> SparkBaker AbsP 34 | bakeFilePath fp = do 35 | env <- asks bakeEnvironment 36 | root <- asks bakeRoot 37 | case complete env fp of 38 | Left err -> throwError $ BakeError err 39 | Right cp -> if isAbsolute cp 40 | then case parseAbsFile cp of 41 | Left err -> throwError $ BakeError $ show err 42 | Right af -> pure $ AbsP af 43 | else do 44 | let dir = takeDirectory cp 45 | errOrAp <- 46 | liftIO $ 47 | try $ do 48 | d <- resolveFile root dir 49 | parseAbsFile $ replaceDirectory cp $ toFilePath d 50 | case errOrAp of 51 | Left err -> 52 | throwError $ 53 | BakeError $ show (err :: PathParseException) 54 | Right absp -> pure $ AbsP absp 55 | 56 | type Environment = [(String, String)] 57 | 58 | complete :: Environment -> FilePath -> Either String FilePath 59 | complete env fp = do 60 | let ids = parseId fp 61 | strs <- mapM (replaceId env) ids 62 | return $ concat strs 63 | 64 | parseId :: FilePath -> [ID] 65 | parseId fp = 66 | case fp of 67 | ('~':rest) -> Var "HOME" : go rest 68 | _ -> go fp 69 | where 70 | go :: FilePath -> [ID] 71 | go [] = [] 72 | go ('$':'(':rest) = Var id_ : go next 73 | where 74 | (id_, ')':next) = break (== ')') rest 75 | go (s:ss) = 76 | case go ss of 77 | Plain str:r -> Plain (s : str) : r 78 | r -> Plain [s] : r 79 | 80 | replaceId :: Environment -> ID -> Either String FilePath 81 | replaceId _ (Plain str) = return str 82 | replaceId e (Var str) = 83 | case lookup str e of 84 | Nothing -> 85 | Left $ 86 | unwords ["variable", str, "could not be resolved from environment."] 87 | Just fp -> Right fp 88 | -------------------------------------------------------------------------------- /src/SuperUserSpark/Bake/Types.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings #-} 2 | {-# LANGUAGE RecordWildCards #-} 3 | {-# LANGUAGE TemplateHaskell #-} 4 | {-# LANGUAGE DeriveGeneric #-} 5 | 6 | module SuperUserSpark.Bake.Types where 7 | 8 | import Import 9 | 10 | import Data.Aeson 11 | 12 | import SuperUserSpark.Compiler.Types 13 | 14 | data BakeAssignment = BakeAssignment 15 | { bakeCardReference :: BakeCardReference 16 | , bakeSettings :: BakeSettings 17 | } deriving (Show, Eq, Generic) 18 | 19 | instance Validity BakeAssignment 20 | 21 | data BakeCardReference 22 | = BakeCardCompiled (Path Abs File) 23 | | BakeCardUncompiled StrongCardFileReference 24 | deriving (Show, Eq, Generic) 25 | 26 | instance Validity BakeCardReference 27 | 28 | data BakeSettings = BakeSettings 29 | { bakeRoot :: Path Abs Dir 30 | , bakeEnvironment :: [(String, String)] 31 | , bakeCompileSettings :: CompileSettings 32 | } deriving (Show, Eq, Generic) 33 | 34 | instance Validity BakeSettings 35 | 36 | defaultBakeSettings :: BakeSettings 37 | defaultBakeSettings = 38 | BakeSettings 39 | { bakeRoot = $(mkAbsDir "/") 40 | , bakeEnvironment = [] 41 | , bakeCompileSettings = defaultCompileSettings 42 | } 43 | 44 | type SparkBaker = ExceptT BakeError (ReaderT BakeSettings IO) 45 | 46 | data BakeError 47 | = BakeCompileError CompileError 48 | | BakeError String 49 | deriving (Show, Eq, Generic) 50 | 51 | instance Validity BakeError 52 | 53 | type BakedDeployment = Deployment AbsP 54 | 55 | -- | An absolute path. 56 | -- 57 | -- This is kept as a 'Path Abs File' to avoid existential quantification, but 58 | -- that is an implementation detail and should not be exposed as functionality. 59 | newtype AbsP = AbsP 60 | { unAbsP :: Path Abs File 61 | } deriving (Show, Eq, Generic) 62 | 63 | instance Validity AbsP 64 | 65 | instance ToJSON AbsP where 66 | toJSON (AbsP p) = toJSON p 67 | 68 | instance FromJSON AbsP where 69 | parseJSON v = AbsP <$> parseJSON v 70 | 71 | toPath :: AbsP -> FilePath 72 | toPath = toFilePath . unAbsP 73 | 74 | data ID 75 | = Plain String 76 | | Var String 77 | deriving (Show, Eq, Generic) 78 | 79 | instance Validity ID 80 | -------------------------------------------------------------------------------- /src/SuperUserSpark/Check.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE RecordWildCards #-} 2 | 3 | module SuperUserSpark.Check 4 | ( checkFromArgs 5 | , checkAssignment 6 | , deriveCheckSettings 7 | , check 8 | , formatCheckError 9 | , formatDeploymentChecks 10 | , checkDeployments 11 | ) where 12 | 13 | import Import hiding (check) 14 | 15 | import SuperUserSpark.Bake 16 | import SuperUserSpark.Bake.Internal 17 | import SuperUserSpark.Bake.Types 18 | import SuperUserSpark.Check.Internal 19 | import SuperUserSpark.Check.Types 20 | import SuperUserSpark.Diagnose 21 | import SuperUserSpark.Diagnose.Types 22 | import SuperUserSpark.OptParse.Types 23 | import SuperUserSpark.Utils 24 | 25 | checkFromArgs :: CheckArgs -> IO () 26 | checkFromArgs cas = do 27 | errOrAss <- checkAssignment cas 28 | case errOrAss of 29 | Left err -> die $ unwords ["Failed to build Check assignment:", err] 30 | Right ass -> check ass 31 | 32 | checkAssignment :: CheckArgs -> IO (Either String CheckAssignment) 33 | checkAssignment CheckArgs {..} = do 34 | errOrCardRef <- parseBakeCardReference checkArgCardRef 35 | case errOrCardRef of 36 | Left err -> pure $ Left err 37 | Right cardRef -> 38 | CheckAssignment cardRef <$$> deriveCheckSettings cardRef checkFlags 39 | 40 | deriveCheckSettings :: BakeCardReference 41 | -> CheckFlags 42 | -> IO (Either String CheckSettings) 43 | deriveCheckSettings bcr CheckFlags {..} = 44 | CheckSettings <$$> deriveDiagnoseSettings bcr checkDiagnoseFlags 45 | 46 | check :: CheckAssignment -> IO () 47 | check CheckAssignment {..} = do 48 | errOrDone <- 49 | runReaderT 50 | (runExceptT $ checkByCardRef checkCardReference) 51 | checkSettings 52 | case errOrDone of 53 | Left err -> die $ formatCheckError err 54 | Right () -> pure () 55 | 56 | formatCheckError :: CheckError -> String 57 | formatCheckError (CheckDiagnoseError ce) = formatDiagnoseError ce 58 | formatCheckError (CheckError s) = unwords ["Check failed:", s] 59 | 60 | checkByCardRef :: BakeCardReference -> SparkChecker () 61 | checkByCardRef checkCardReference = do 62 | ddeps <- 63 | checkerDiagnose $ 64 | diagnoserBake 65 | (compileBakeCardRef checkCardReference >>= bakeDeployments) >>= 66 | (liftIO . diagnoseDeployments) 67 | liftIO $ putStrLn $ formatDeploymentChecks $ zip ddeps $ checkDeployments ddeps 68 | 69 | checkerDiagnose :: SparkDiagnoser a -> SparkChecker a 70 | checkerDiagnose = 71 | withExceptT CheckDiagnoseError . 72 | mapExceptT (withReaderT checkDiagnoseSettings) 73 | 74 | checkDeployments :: [DiagnosedDeployment] -> [DeploymentCheckResult] 75 | checkDeployments = map checkDeployment 76 | -------------------------------------------------------------------------------- /src/SuperUserSpark/Check/Internal.hs: -------------------------------------------------------------------------------- 1 | module SuperUserSpark.Check.Internal where 2 | 3 | import Import 4 | 5 | import Data.Maybe (catMaybes) 6 | 7 | import SuperUserSpark.Bake.Types 8 | import SuperUserSpark.Check.Types 9 | import SuperUserSpark.Compiler.Types 10 | import SuperUserSpark.CoreTypes 11 | import SuperUserSpark.Diagnose.Types 12 | 13 | checkDeployment :: DiagnosedDeployment -> DeploymentCheckResult 14 | checkDeployment (Deployment (Directions [] (D dst _ _)) _) = 15 | ImpossibleDeployment 16 | [unwords ["No source for deployment with destination", toPath dst]] 17 | checkDeployment (Deployment (Directions srcs dst) kind) = 18 | bestResult $ map (\src -> checkSingle src dst kind) srcs 19 | 20 | bestResult :: [CheckResult] -> DeploymentCheckResult 21 | bestResult cs 22 | | all impossible cs = ImpossibleDeployment $ map (\(Impossible s) -> s) cs 23 | | otherwise 24 | -- Will not be empty as per line above 25 | = 26 | case head $ dropWhile impossible cs of 27 | AlreadyDone -> DeploymentDone 28 | Ready i -> ReadyToDeploy i 29 | Dirty s i c -> DirtySituation s i c 30 | Impossible _ -> error "Cannot be the case" 31 | 32 | impossible :: CheckResult -> Bool 33 | impossible (Impossible _) = True 34 | impossible _ = False 35 | 36 | impossibleDeployment :: DeploymentCheckResult -> Bool 37 | impossibleDeployment (ImpossibleDeployment _) = True 38 | impossibleDeployment _ = False 39 | 40 | dirtyDeployment :: DeploymentCheckResult -> Bool 41 | dirtyDeployment DirtySituation{} = True 42 | dirtyDeployment _ = False 43 | 44 | deploymentReadyToDeploy :: DeploymentCheckResult -> Bool 45 | deploymentReadyToDeploy (ReadyToDeploy _) = True 46 | deploymentReadyToDeploy _ = False 47 | 48 | deploymentIsDone :: DeploymentCheckResult -> Bool 49 | deploymentIsDone DeploymentDone = True 50 | deploymentIsDone _ = False 51 | 52 | -- | Check a single (@source@, @destination@, @kind@) triple. 53 | checkSingle :: DiagnosedFp -> DiagnosedFp -> DeploymentKind -> CheckResult 54 | checkSingle (D src srcd srch) (D dst dstd dsth) kind = 55 | let parseBoth cons p = 56 | case (p $ toPath src, p $ toPath dst) of 57 | (Left err1, Left err2) -> 58 | Impossible $ unwords [show err1, show err2] 59 | (Left err, _) -> Impossible $ show err 60 | (_, Left err) -> Impossible $ show err 61 | (Right s, Right d) -> Ready $ cons s d 62 | readyCopyFile = parseBoth CopyFile parseAbsFile 63 | readyCopyDir = parseBoth CopyDir parseAbsDir 64 | readyLinkFile = parseBoth LinkFile parseAbsFile 65 | readyLinkDir = parseBoth LinkDir parseAbsDir 66 | in case (srcd, dstd, kind) of 67 | (IsFile, Nonexistent, CopyDeployment) -> readyCopyFile 68 | (IsFile, Nonexistent, LinkDeployment) -> readyLinkFile 69 | (IsFile, IsFile, LinkDeployment) -> 70 | e 71 | readyLinkFile 72 | [ "Both the source:" 73 | , toPath src 74 | , "and the destination:" 75 | , toPath dst 76 | , "are files for a link deployment." 77 | ] 78 | (IsFile, IsFile, CopyDeployment) -> 79 | if srch == dsth 80 | then AlreadyDone 81 | else e 82 | readyCopyFile 83 | [ "Both the source:" 84 | , toPath src 85 | , "and the destination:" 86 | , toPath dst 87 | , "are files for a copy deployment, but they are not equal." 88 | ] 89 | (IsFile, IsDirectory, LinkDeployment) -> 90 | e 91 | readyLinkFile 92 | [ "The source: " 93 | , toPath src 94 | , "is a file but the destination:" 95 | , toPath dst 96 | , "is a directory for a link deployment." 97 | ] 98 | (IsFile, IsDirectory, CopyDeployment) -> 99 | e 100 | readyCopyFile 101 | [ "The source: " 102 | , toPath src 103 | , "is a file but the destination:" 104 | , toPath dst 105 | , "is a directory for a copy deployment." 106 | ] 107 | (IsFile, IsLinkTo l, LinkDeployment) -> 108 | if l == src 109 | then AlreadyDone 110 | else e 111 | readyLinkFile 112 | [ "The source:" 113 | , toPath src 114 | , "is a file and the destination:" 115 | , toPath dst 116 | , "is a link for a link deployment but the destination does not point to the source. Instead it points to:" 117 | , toPath l ++ "." 118 | ] 119 | (IsFile, IsLinkTo _, CopyDeployment) -> 120 | e 121 | readyCopyFile 122 | [ "The source:" 123 | , toPath src 124 | , "is a file and the destination:" 125 | , toPath dst 126 | , "is a link for a copy deployment." 127 | ] 128 | (IsDirectory, Nonexistent, LinkDeployment) -> readyLinkDir 129 | (IsDirectory, Nonexistent, CopyDeployment) -> readyCopyDir 130 | (IsDirectory, IsFile, LinkDeployment) -> 131 | e 132 | readyLinkDir 133 | [ "The source:" 134 | , toPath src 135 | , "is a directory and the destination:" 136 | , toPath dst 137 | , "is a file for a link deployment" 138 | ] 139 | (IsDirectory, IsFile, CopyDeployment) -> 140 | e 141 | readyCopyDir 142 | [ "The source:" 143 | , toPath src 144 | , "is a directory and the destination:" 145 | , toPath dst 146 | , "is a file for a copy deployment" 147 | ] 148 | (IsDirectory, IsDirectory, LinkDeployment) -> 149 | e 150 | readyLinkDir 151 | [ "The source:" 152 | , toPath src 153 | , "and the destination:" 154 | , toPath dst 155 | , "are directories for a link deployment." 156 | ] 157 | (IsDirectory, IsDirectory, CopyDeployment) -> 158 | if srch == dsth 159 | then AlreadyDone 160 | else e 161 | readyCopyDir 162 | [ "The source:" 163 | , toPath src 164 | , "and destination:" 165 | , toPath dst 166 | , "are directories for a copy deployment, but they are not equal." 167 | ] 168 | (IsDirectory, IsLinkTo l, LinkDeployment) -> 169 | if l == src 170 | then AlreadyDone 171 | else e 172 | readyLinkDir 173 | [ "The source:" 174 | , toPath src 175 | , "is a directory and the destination:" 176 | , toPath dst 177 | , "is a link for a link deployment but the destination does not point to the source. Instead it points to:" 178 | , toPath l ++ "." 179 | ] 180 | (IsDirectory, IsLinkTo _, CopyDeployment) -> 181 | e 182 | readyCopyDir 183 | [ "The source:" 184 | , toPath src 185 | , "is a directory and the destination:" 186 | , toPath dst 187 | , "is a link for a copy deployment." 188 | ] 189 | (Nonexistent, _, _) -> 190 | i ["The source:", toPath src, "does not exist."] 191 | (IsLinkTo _, _, _) -> i ["The source:", toPath src, "is a link."] 192 | (IsWeird, IsWeird, _) -> 193 | i 194 | [ "Both the source:" 195 | , toPath src 196 | , "and the destination:" 197 | , toPath dst 198 | , "are weird." 199 | ] 200 | (IsWeird, _, _) -> i ["The source:", toPath src, "is weird."] 201 | (_, IsWeird, _) -> i ["The destination:", toPath dst, "is weird."] 202 | where 203 | i = Impossible . unlines 204 | e mins s = 205 | case mins of 206 | (Impossible _) -> mins 207 | (Ready ins) -> 208 | case dstd of 209 | IsFile -> Dirty (unlines s) ins $ CleanFile $ unAbsP dst 210 | IsLinkTo _ -> Dirty (unlines s) ins $ CleanLink $ unAbsP dst 211 | IsDirectory -> 212 | case parseAbsDir $ toPath dst of 213 | Left err -> Impossible $ show err -- Should not happen, but just in case. 214 | Right dir -> 215 | Dirty (unlines s) ins $ CleanDirectory dir 216 | _ -> Impossible "should not occur" 217 | _ -> Impossible "should not occur." 218 | 219 | formatDeploymentChecks :: [(DiagnosedDeployment, DeploymentCheckResult)] 220 | -> String 221 | formatDeploymentChecks dss = 222 | if null output 223 | then "Deployment is done already." 224 | else unlines output ++ 225 | if all (impossibleDeployment . snd) dss 226 | then "Deployment is impossible." 227 | else "Deployment is possible." 228 | where 229 | output = mapMaybe formatDeploymentCheck dss 230 | 231 | formatDeploymentCheck :: (DiagnosedDeployment, DeploymentCheckResult) 232 | -> Maybe String 233 | formatDeploymentCheck (_, ReadyToDeploy is) = 234 | Just $ "READY: " ++ formatInstruction is 235 | formatDeploymentCheck (_, DeploymentDone) = Nothing 236 | formatDeploymentCheck (d, ImpossibleDeployment ds) = 237 | Just $ 238 | concat 239 | [ "IMPOSSIBLE: " 240 | , toPath $ 241 | diagnosedFilePath $ directionDestination $ deploymentDirections d 242 | , " cannot be deployed:\n" 243 | , unlines ds 244 | , "\n" 245 | ] 246 | formatDeploymentCheck (d, DirtySituation str is c) = 247 | Just $ 248 | concat 249 | [ "DIRTY: " 250 | , toPath $ 251 | diagnosedFilePath $ directionDestination $ deploymentDirections d 252 | , "\n" 253 | , str 254 | , "planned: " 255 | , formatInstruction is 256 | , "\n" 257 | , "cleanup needed:\n" 258 | , formatCleanupInstruction c 259 | , "\n" 260 | ] 261 | 262 | formatInstruction :: Instruction -> String 263 | formatInstruction (CopyFile from to) = 264 | unwords [toFilePath from, "c->", toFilePath to] 265 | formatInstruction (CopyDir from to) = 266 | unwords [toFilePath from, "c->", toFilePath to] 267 | formatInstruction (LinkFile from to) = 268 | unwords [toFilePath from, "l->", toFilePath to] 269 | formatInstruction (LinkDir from to) = 270 | unwords [toFilePath from, "l->", toFilePath to] 271 | 272 | formatCleanupInstruction :: CleanupInstruction -> String 273 | formatCleanupInstruction (CleanFile fp) = "remove file " ++ toFilePath fp 274 | formatCleanupInstruction (CleanDirectory dir) = 275 | "remove directory " ++ toFilePath dir 276 | formatCleanupInstruction (CleanLink link) = "remove link " ++ toFilePath link 277 | -------------------------------------------------------------------------------- /src/SuperUserSpark/Check/Types.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE DeriveGeneric #-} 2 | 3 | module SuperUserSpark.Check.Types where 4 | 5 | import Import 6 | 7 | import SuperUserSpark.Bake.Types 8 | import SuperUserSpark.Diagnose.Types 9 | 10 | data CheckAssignment = CheckAssignment 11 | { checkCardReference :: BakeCardReference 12 | , checkSettings :: CheckSettings 13 | } deriving (Show, Eq, Generic) 14 | 15 | instance Validity CheckAssignment 16 | 17 | newtype CheckSettings = CheckSettings 18 | { checkDiagnoseSettings :: DiagnoseSettings 19 | } deriving (Show, Eq, Generic) 20 | 21 | instance Validity CheckSettings 22 | 23 | defaultCheckSettings :: CheckSettings 24 | defaultCheckSettings = 25 | CheckSettings {checkDiagnoseSettings = defaultDiagnoseSettings} 26 | 27 | type SparkChecker = ExceptT CheckError (ReaderT CheckSettings IO) 28 | 29 | data CheckError 30 | = CheckDiagnoseError DiagnoseError 31 | | CheckError String 32 | deriving (Show, Eq, Generic) 33 | 34 | instance Validity CheckError 35 | 36 | data Instruction 37 | = CopyFile (Path Abs File) 38 | (Path Abs File) 39 | | CopyDir (Path Abs Dir) 40 | (Path Abs Dir) 41 | | LinkFile (Path Abs File) 42 | (Path Abs File) 43 | | LinkDir (Path Abs Dir) 44 | (Path Abs Dir) 45 | deriving (Show, Eq, Generic) 46 | 47 | instance Validity Instruction 48 | 49 | data CleanupInstruction 50 | = CleanFile (Path Abs File) 51 | | CleanDirectory (Path Abs Dir) 52 | | CleanLink (Path Abs File) 53 | deriving (Show, Eq, Generic) 54 | 55 | instance Validity CleanupInstruction 56 | 57 | data DeploymentCheckResult 58 | = DeploymentDone -- ^ Done already 59 | | ReadyToDeploy Instruction -- ^ Immediately possible 60 | | DirtySituation String 61 | Instruction 62 | CleanupInstruction -- ^ Possible after cleanup of destination 63 | | ImpossibleDeployment [String] -- ^ Entirely impossible 64 | deriving (Show, Eq, Generic) 65 | 66 | instance Validity DeploymentCheckResult 67 | 68 | data CheckResult 69 | = AlreadyDone -- ^ Done already 70 | | Ready Instruction -- ^ Immediately possible 71 | | Dirty String 72 | Instruction 73 | CleanupInstruction -- ^ Possible after cleanup 74 | | Impossible String -- ^ Entirely impossible 75 | deriving (Show, Eq, Generic) 76 | 77 | instance Validity CheckResult 78 | -------------------------------------------------------------------------------- /src/SuperUserSpark/Compiler.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE RecordWildCards #-} 2 | {-# LANGUAGE TemplateHaskell #-} 3 | 4 | {- 5 | The Compiler is responsible for transforming an AST into a list of 6 | deployments. A deployment knows about the possible sources, the 7 | destination, and how to deploy a source to a destination. 8 | 9 | Everything that the compiler does needs to be independent of the host 10 | system because compilation could have happened independently of deployment. 11 | 12 | As such, raw deployments still contain references to variables such as: 13 | - Environment variables 14 | - The home directory: @~@ 15 | -} 16 | module SuperUserSpark.Compiler where 17 | 18 | import Import hiding (()) 19 | 20 | import Control.Exception (try) 21 | import Data.Aeson (eitherDecode) 22 | import Data.Aeson.Encode.Pretty (encodePretty) 23 | import qualified Data.ByteString.Lazy.Char8 as LB 24 | import Data.List (find) 25 | 26 | import SuperUserSpark.Compiler.Internal 27 | import SuperUserSpark.Compiler.Types 28 | import SuperUserSpark.CoreTypes 29 | import SuperUserSpark.Language.Types 30 | import SuperUserSpark.OptParse.Types 31 | import SuperUserSpark.Parser 32 | import SuperUserSpark.PreCompiler 33 | import SuperUserSpark.Utils 34 | 35 | compileFromArgs :: CompileArgs -> IO () 36 | compileFromArgs ca = do 37 | errOrrAss <- compileAssignment ca 38 | case errOrrAss of 39 | Left ce -> die $ unwords ["Failed to build compile assignment:", ce] 40 | Right ass -> compile ass 41 | 42 | compileAssignment :: CompileArgs -> IO (Either String CompileAssignment) 43 | compileAssignment CompileArgs {..} = 44 | CompileAssignment <$$> (parseStrongCardFileReference compileArgCardRef) <**> 45 | (case compileArgOutput of 46 | Nothing -> pure $ pure Nothing 47 | Just f -> do 48 | af <- 49 | left (show :: PathParseException -> String) <$> 50 | try (resolveFile' f) 51 | pure $ Just <$> af) <**> 52 | deriveCompileSettings compileFlags 53 | 54 | resolveFile'Either :: FilePath -> IO (Either String (Path Abs File)) 55 | resolveFile'Either fp = do 56 | errOrSp <- try $ resolveFile' fp 57 | pure $ left (show :: PathParseException -> String) $ errOrSp 58 | 59 | parseStrongCardFileReference :: FilePath 60 | -> IO (Either String StrongCardFileReference) 61 | parseStrongCardFileReference fp = 62 | (\sfp -> StrongCardFileReference sfp Nothing) <$$> resolveFile'Either fp 63 | 64 | deriveCompileSettings :: CompileFlags -> IO (Either String CompileSettings) 65 | deriveCompileSettings CompileFlags {..} = 66 | CompileSettings <$$> 67 | (pure $ 68 | case compileDefaultKind of 69 | Nothing -> Right LinkDeployment 70 | Just s -> readEither s) <**> 71 | (pure $ 72 | case compileKindOverride of 73 | Nothing -> Right Nothing 74 | Just s -> readEither s) 75 | 76 | compile :: CompileAssignment -> IO () 77 | compile CompileAssignment {..} = do 78 | errOrDone <- 79 | runReaderT 80 | (runExceptT $ 81 | compileJob compileCardReference >>= outputCompiled compileOutput) 82 | compileSettings 83 | case errOrDone of 84 | Left ce -> die $ formatCompileError ce 85 | Right () -> pure () 86 | 87 | formatCompileError :: CompileError -> String 88 | formatCompileError (CompileParseError s) = unlines ["Parse failed:", show s] 89 | formatCompileError (PreCompileErrors ss) = 90 | unlines $ "Precompilation checks failed:" : map show ss 91 | formatCompileError (DuringCompilationError s) = 92 | unlines ["Compilation failed:", s] 93 | 94 | decideCardToCompile :: StrongCardFileReference 95 | -> [Card] 96 | -> Either CompileError Card 97 | decideCardToCompile (StrongCardFileReference fp mcn) scope = 98 | case mcn of 99 | Nothing -> 100 | case scope of 101 | [] -> 102 | Left $ 103 | DuringCompilationError $ 104 | unwords 105 | [ "No cards found for compilation in file:" 106 | , toFilePath fp 107 | ] 108 | -- TODO more detailed error here 109 | (fst_:_) -> pure fst_ 110 | Just (CardNameReference name) -> do 111 | case find (\c -> cardName c == name) scope of 112 | Nothing -> 113 | Left $ 114 | DuringCompilationError $ 115 | unwords ["Card", name, "not found for compilation."] -- TODO more detailed error here 116 | Just cu -> return cu 117 | 118 | throwEither :: Either CompileError a -> ImpureCompiler a 119 | throwEither (Left e) = throwError e 120 | throwEither (Right a) = pure a 121 | 122 | injectBase :: Maybe (Path Rel Dir) -> Card -> Card 123 | injectBase Nothing c = c 124 | injectBase (Just base) (Card name s) = 125 | Card name $ Block [OutofDir $ toFilePath base, s] 126 | 127 | compileJob :: StrongCardFileReference -> ImpureCompiler [RawDeployment] 128 | compileJob cr@(StrongCardFileReference root _) = 129 | compileJobWithRoot root Nothing cr 130 | 131 | compileJobWithRoot 132 | :: Path Abs File 133 | -> Maybe (Path Rel Dir) 134 | -> StrongCardFileReference 135 | -> ImpureCompiler [RawDeployment] 136 | compileJobWithRoot root base cfr@(StrongCardFileReference fp _) = do 137 | sf <- compilerParse fp 138 | unit <- throwEither $ decideCardToCompile cfr $ sparkFileCards sf 139 | -- Inject base outofDir 140 | let injected = injectBase base unit 141 | -- Precompile checks 142 | let pces = preCompileChecks injected 143 | when (not . null $ pces) $ throwError $ PreCompileErrors pces 144 | -- Compile this unit 145 | (deps, crfs) <- embedPureCompiler $ compileUnit injected 146 | -- Compile the rest of the units 147 | rcrfs <- mapM (resolveCardReferenceRelativeTo fp) crfs 148 | restDeps <- 149 | fmap concat $ 150 | forM rcrfs $ \rcrf -> 151 | case rcrf of 152 | (StrongCardFile cfr_@(StrongCardFileReference base2 _)) -> do 153 | let (newRoot, newBase) = 154 | case stripDir (parent root) (parent base2) of 155 | Nothing -> (base2, Nothing) 156 | Just d -> (root, Just d) 157 | compileJobWithRoot newRoot newBase cfr_ 158 | (StrongCardName cnr) -> 159 | compileJobWithRoot 160 | root 161 | base 162 | (StrongCardFileReference fp $ Just cnr) 163 | return $ deps ++ restDeps 164 | 165 | resolveCardReferenceRelativeTo :: Path Abs File 166 | -> CardReference 167 | -> ImpureCompiler StrongCardReference 168 | resolveCardReferenceRelativeTo fp (CardFile (CardFileReference cfp mcn)) = do 169 | nfp <- liftIO $ resolveFile (parent fp) cfp 170 | pure $ StrongCardFile $ StrongCardFileReference nfp mcn 171 | resolveCardReferenceRelativeTo _ (CardName cnr) = pure $ StrongCardName cnr 172 | 173 | compilerParse :: Path Abs File -> ImpureCompiler SparkFile 174 | compilerParse fp = do 175 | esf <- liftIO $ parseFile fp 176 | case esf of 177 | Left pe -> throwError $ CompileParseError pe 178 | Right sf_ -> pure sf_ 179 | 180 | embedPureCompiler :: PureCompiler a -> ImpureCompiler a 181 | embedPureCompiler = withExceptT id . mapExceptT (mapReaderT idToIO) 182 | where 183 | idToIO :: Identity a -> IO a 184 | idToIO = return . runIdentity 185 | 186 | outputCompiled :: Maybe (Path Abs File) -> [RawDeployment] -> ImpureCompiler () 187 | outputCompiled out deps = 188 | liftIO $ do 189 | let bs = encodePretty deps 190 | case out of 191 | Nothing -> LB.putStrLn bs 192 | Just fp -> LB.writeFile (toFilePath fp) bs 193 | 194 | inputCompiled :: Path Abs File -> ImpureCompiler [RawDeployment] 195 | inputCompiled fp = do 196 | bs <- liftIO . LB.readFile $ toFilePath fp 197 | case eitherDecode bs of 198 | Left err -> 199 | throwError $ 200 | DuringCompilationError $ 201 | "Something went wrong while deserialising json data: " ++ err 202 | Right ds -> return ds 203 | -------------------------------------------------------------------------------- /src/SuperUserSpark/Compiler/Internal.hs: -------------------------------------------------------------------------------- 1 | module SuperUserSpark.Compiler.Internal where 2 | 3 | import Import hiding (()) 4 | 5 | import System.FilePath (()) 6 | 7 | import SuperUserSpark.Compiler.Types 8 | import SuperUserSpark.Compiler.Utils 9 | import SuperUserSpark.Language.Types 10 | 11 | compileUnit :: Card -> PureCompiler ([RawDeployment], [CardReference]) 12 | compileUnit card = 13 | execWriterT $ evalStateT (compileDecs [cardContent card]) initialState 14 | 15 | compileDecs :: [Declaration] -> InternalCompiler () 16 | compileDecs = mapM_ compileDec 17 | 18 | compileDec :: Declaration -> InternalCompiler () 19 | compileDec (Deploy src dst kind) = do 20 | defaultKind <- asks compileDefaultKind 21 | localOverride <- gets stateDeploymentKindLocalOverride 22 | superOverride <- asks compileKindOverride 23 | let resultKind = fromMaybe defaultKind 24 | $ msum [superOverride, localOverride, kind] 25 | outof <- gets stateOutofPrefix 26 | into <- gets stateInto 27 | let directions = 28 | Directions 29 | { directionSources = resolvePrefix $ outof ++ [sources src] 30 | , directionDestination = into dst 31 | } 32 | addDeployment $ Deployment directions resultKind 33 | compileDec (SparkOff cr) = addCardRef cr 34 | compileDec (IntoDir dir) = do 35 | ip <- gets stateInto 36 | if null ip 37 | then modify (\s -> s {stateInto = dir}) 38 | else modify (\s -> s {stateInto = ip dir}) 39 | compileDec (OutofDir dir) = do 40 | op <- gets stateOutofPrefix 41 | modify (\s -> s {stateOutofPrefix = op ++ [Literal dir]}) 42 | compileDec (DeployKindOverride kind) = 43 | modify (\s -> s {stateDeploymentKindLocalOverride = Just kind}) 44 | compileDec (Block ds) = do 45 | before <- get 46 | compileDecs ds 47 | put before 48 | compileDec (Alternatives ds) = do 49 | op <- gets stateOutofPrefix 50 | modify (\s -> s {stateOutofPrefix = op ++ [Alts ds]}) 51 | -------------------------------------------------------------------------------- /src/SuperUserSpark/Compiler/Types.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings #-} 2 | {-# LANGUAGE DeriveGeneric #-} 3 | 4 | module SuperUserSpark.Compiler.Types where 5 | 6 | import Import 7 | 8 | import Data.Aeson 9 | (FromJSON(..), ToJSON(..), object, (.:), (.=), withObject) 10 | 11 | import SuperUserSpark.CoreTypes 12 | import SuperUserSpark.Language.Types 13 | import SuperUserSpark.Parser.Types 14 | import SuperUserSpark.PreCompiler.Types 15 | 16 | data CompileAssignment = CompileAssignment 17 | { compileCardReference :: StrongCardFileReference 18 | , compileOutput :: Maybe (Path Abs File) 19 | , compileSettings :: CompileSettings 20 | } deriving (Show, Eq, Generic) 21 | 22 | instance Validity CompileAssignment 23 | 24 | data StrongCardFileReference = 25 | StrongCardFileReference (Path Abs File) 26 | (Maybe CardNameReference) 27 | deriving (Show, Eq, Generic) 28 | 29 | instance Validity StrongCardFileReference 30 | 31 | data StrongCardReference 32 | = StrongCardFile StrongCardFileReference 33 | | StrongCardName CardNameReference 34 | deriving (Show, Eq, Generic) 35 | 36 | instance Validity StrongCardReference 37 | 38 | data CompileSettings = CompileSettings 39 | { compileDefaultKind :: DeploymentKind 40 | , compileKindOverride :: Maybe DeploymentKind 41 | } deriving (Show, Eq, Generic) 42 | 43 | instance Validity CompileSettings 44 | 45 | defaultCompileSettings :: CompileSettings 46 | defaultCompileSettings = 47 | CompileSettings 48 | {compileDefaultKind = LinkDeployment, compileKindOverride = Nothing} 49 | 50 | type RawDeployment = Deployment FilePath 51 | 52 | data Deployment a = Deployment 53 | { deploymentDirections :: DeploymentDirections a 54 | , deploymentKind :: DeploymentKind 55 | } deriving (Show, Eq, Generic) 56 | 57 | instance Validity a => 58 | Validity (Deployment a) 59 | 60 | instance FromJSON a => 61 | FromJSON (Deployment a) where 62 | parseJSON = 63 | withObject "Deployment" $ \o -> 64 | Deployment <$> o .: "directions" <*> o .: "deployment kind" 65 | 66 | instance ToJSON a => 67 | ToJSON (Deployment a) where 68 | toJSON depl = 69 | object 70 | [ "directions" .= deploymentDirections depl 71 | , "deployment kind" .= deploymentKind depl 72 | ] 73 | 74 | instance Functor Deployment where 75 | fmap f (Deployment dis dk) = Deployment (fmap f dis) dk 76 | 77 | data DeploymentDirections a = Directions 78 | { directionSources :: [a] 79 | , directionDestination :: a 80 | } deriving (Show, Eq, Generic) 81 | 82 | instance Validity a => 83 | Validity (DeploymentDirections a) 84 | 85 | instance ToJSON a => 86 | ToJSON (DeploymentDirections a) where 87 | toJSON (Directions srcs dst) = 88 | object ["sources" .= srcs, "destination" .= dst] 89 | 90 | instance FromJSON a => 91 | FromJSON (DeploymentDirections a) where 92 | parseJSON = 93 | withObject "Deployment Directions" $ \o -> 94 | Directions <$> o .: "sources" <*> o .: "destination" 95 | 96 | instance Functor DeploymentDirections where 97 | fmap f (Directions srcs dst) = Directions (map f srcs) (f dst) 98 | 99 | type CompilerPrefix = [PrefixPart] 100 | 101 | data PrefixPart 102 | = Literal String 103 | | Alts [String] 104 | deriving (Show, Eq, Generic) 105 | 106 | instance Validity PrefixPart 107 | 108 | data CompilerState = CompilerState 109 | { stateDeploymentKindLocalOverride :: Maybe DeploymentKind 110 | , stateInto :: Directory 111 | , stateOutofPrefix :: CompilerPrefix 112 | } deriving (Show, Eq, Generic) 113 | 114 | instance Validity CompilerState 115 | 116 | type ImpureCompiler = ExceptT CompileError (ReaderT CompileSettings IO) 117 | 118 | type PureCompiler = ExceptT CompileError (ReaderT CompileSettings Identity) 119 | 120 | type InternalCompiler = StateT CompilerState (WriterT ([RawDeployment], [CardReference]) PureCompiler) 121 | 122 | data CompileError 123 | = CompileParseError ParseError 124 | | PreCompileErrors [PreCompileError] 125 | | DuringCompilationError String 126 | deriving (Show, Eq, Generic) 127 | 128 | instance Validity CompileError 129 | -------------------------------------------------------------------------------- /src/SuperUserSpark/Compiler/Utils.hs: -------------------------------------------------------------------------------- 1 | module SuperUserSpark.Compiler.Utils where 2 | 3 | import Import hiding (()) 4 | 5 | import SuperUserSpark.Compiler.Types 6 | import SuperUserSpark.Language.Types 7 | import System.FilePath (()) 8 | 9 | initialState :: CompilerState 10 | initialState = 11 | CompilerState 12 | { stateDeploymentKindLocalOverride = Nothing 13 | , stateInto = "" 14 | , stateOutofPrefix = [] 15 | } 16 | 17 | addDeployment :: RawDeployment -> InternalCompiler () 18 | addDeployment d = tell ([d], []) 19 | 20 | addCardRef :: CardReference -> InternalCompiler () 21 | addCardRef c = tell ([], [c]) 22 | 23 | sources :: FilePath -> PrefixPart 24 | sources fp@('.':f) = Alts [fp, f] 25 | sources fp = Literal fp 26 | 27 | resolvePrefix :: CompilerPrefix -> [FilePath] 28 | resolvePrefix [] = [] 29 | resolvePrefix [Literal s] = [s] 30 | resolvePrefix [Alts ds] = ds 31 | resolvePrefix (Literal s:ps) = do 32 | rest <- resolvePrefix ps 33 | return $ s rest 34 | resolvePrefix (Alts as:ps) = do 35 | a <- as 36 | rest <- resolvePrefix ps 37 | return $ a rest 38 | -------------------------------------------------------------------------------- /src/SuperUserSpark/Constants.hs: -------------------------------------------------------------------------------- 1 | module SuperUserSpark.Constants where 2 | 3 | import Import 4 | 5 | keywordCard :: String 6 | keywordCard = "card" 7 | 8 | keywordSpark :: String 9 | keywordSpark = "spark" 10 | 11 | keywordFile :: String 12 | keywordFile = "file" 13 | 14 | keywordInto :: String 15 | keywordInto = "into" 16 | 17 | keywordOutof :: String 18 | keywordOutof = "outof" 19 | 20 | keywordKindOverride :: String 21 | keywordKindOverride = "kind" 22 | 23 | keywordLink :: String 24 | keywordLink = "link" 25 | 26 | keywordCopy :: String 27 | keywordCopy = "copy" 28 | 29 | keywordAlternatives :: String 30 | keywordAlternatives = "alternatives" 31 | 32 | linkKindSymbol :: String 33 | linkKindSymbol = "l->" 34 | 35 | copyKindSymbol :: String 36 | copyKindSymbol = "c->" 37 | 38 | unspecifiedKindSymbol :: String 39 | unspecifiedKindSymbol = "->" 40 | 41 | bracesChars :: [Char] 42 | bracesChars = ['{', '}'] 43 | 44 | linespaceChars :: [Char] 45 | linespaceChars = [' ', '\t'] 46 | 47 | endOfLineChars :: [Char] 48 | endOfLineChars = ['\n', '\r'] 49 | 50 | whitespaceChars :: [Char] 51 | whitespaceChars = linespaceChars ++ endOfLineChars 52 | 53 | lineDelimiter :: String 54 | lineDelimiter = ";" 55 | 56 | branchDelimiter :: String 57 | branchDelimiter = ":" 58 | 59 | quotesChar :: Char 60 | quotesChar = '"' 61 | 62 | lineCommentStr :: String 63 | lineCommentStr = "#" 64 | 65 | blockCommentStrs :: (String, String) 66 | blockCommentStrs = ("[[", "]]") 67 | 68 | sparkExtension :: String 69 | sparkExtension = "sus" 70 | -------------------------------------------------------------------------------- /src/SuperUserSpark/CoreTypes.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE OverloadedStrings #-} 2 | {-# LANGUAGE DeriveGeneric #-} 3 | 4 | module SuperUserSpark.CoreTypes where 5 | 6 | import Import 7 | 8 | import Control.Monad (mzero) 9 | import Data.Aeson (FromJSON(..), ToJSON(..), Value(..)) 10 | 11 | type Directory = FilePath 12 | 13 | -- | The kind of a deployment 14 | data DeploymentKind 15 | = LinkDeployment 16 | | CopyDeployment 17 | deriving (Show, Eq, Generic) 18 | 19 | instance Validity DeploymentKind 20 | 21 | instance Read DeploymentKind where 22 | readsPrec _ "link" = [(LinkDeployment, "")] 23 | readsPrec _ "copy" = [(CopyDeployment, "")] 24 | readsPrec _ _ = [] 25 | 26 | instance FromJSON DeploymentKind where 27 | parseJSON (String "link") = return LinkDeployment 28 | parseJSON (String "copy") = return CopyDeployment 29 | parseJSON _ = mzero 30 | 31 | instance ToJSON DeploymentKind where 32 | toJSON LinkDeployment = String "link" 33 | toJSON CopyDeployment = String "copy" 34 | -------------------------------------------------------------------------------- /src/SuperUserSpark/Deployer.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE RecordWildCards #-} 2 | 3 | module SuperUserSpark.Deployer where 4 | 5 | import Import 6 | 7 | import SuperUserSpark.Bake 8 | import SuperUserSpark.Bake.Internal 9 | import SuperUserSpark.Bake.Types 10 | import SuperUserSpark.Check 11 | import SuperUserSpark.Check.Internal 12 | import SuperUserSpark.Check.Types 13 | import SuperUserSpark.Deployer.Internal 14 | import SuperUserSpark.Deployer.Types 15 | import SuperUserSpark.Diagnose 16 | import SuperUserSpark.Diagnose.Types 17 | import SuperUserSpark.OptParse.Types 18 | import SuperUserSpark.Utils 19 | 20 | deployFromArgs :: DeployArgs -> IO () 21 | deployFromArgs das = do 22 | errOrAss <- deployAssignment das 23 | case errOrAss of 24 | Left err -> die $ unwords ["Failed to make Deployment assignment:", err] 25 | Right ass -> deploy ass 26 | 27 | deployAssignment :: DeployArgs -> IO (Either String DeployAssignment) 28 | deployAssignment DeployArgs {..} = do 29 | errOrCardRef <- parseBakeCardReference deployArgCardRef 30 | case errOrCardRef of 31 | Left err -> pure $ Left err 32 | Right cardRef -> 33 | DeployAssignment cardRef <$$> 34 | deriveDeploySettings cardRef deployFlags 35 | 36 | deriveDeploySettings :: BakeCardReference 37 | -> DeployFlags 38 | -> IO (Either String DeploySettings) 39 | deriveDeploySettings bcr DeployFlags {..} = do 40 | ecs <- deriveCheckSettings bcr deployCheckFlags 41 | pure $ do 42 | cs <- ecs 43 | pure 44 | DeploySettings 45 | { deploySetsReplaceLinks = 46 | deployFlagReplaceLinks || deployFlagReplaceAll 47 | , deploySetsReplaceFiles = 48 | deployFlagReplaceFiles || deployFlagReplaceAll 49 | , deploySetsReplaceDirectories = 50 | deployFlagReplaceDirectories || deployFlagReplaceAll 51 | , deployCheckSettings = cs 52 | } 53 | 54 | deploy :: DeployAssignment -> IO () 55 | deploy DeployAssignment {..} = do 56 | errOrDone <- 57 | runReaderT 58 | (runExceptT $ deployByCardRef deployCardReference) 59 | deploySettings 60 | case errOrDone of 61 | Left err -> die $ formatDeployError err 62 | Right () -> pure () 63 | 64 | formatDeployError :: DeployError -> String 65 | formatDeployError (DeployCheckError e) = formatCheckError e 66 | formatDeployError (DeployError s) = unwords ["Deployment failed:", s] 67 | 68 | deployByCardRef :: BakeCardReference -> SparkDeployer () 69 | deployByCardRef dcr = do 70 | deps <- deployerBake $ compileBakeCardRef dcr >>= bakeDeployments 71 | deployAbss deps 72 | 73 | deployerBake :: SparkBaker a -> SparkDeployer a 74 | deployerBake = 75 | withExceptT (DeployCheckError . CheckDiagnoseError . DiagnoseBakeError) . 76 | mapExceptT 77 | (withReaderT $ 78 | diagnoseBakeSettings . checkDiagnoseSettings . deployCheckSettings) 79 | 80 | deployAbss :: [BakedDeployment] -> SparkDeployer () 81 | deployAbss ds = do 82 | stage1 83 | stage2 84 | stage3 85 | where 86 | stage1 = do 87 | ddeps <- liftIO $ diagnoseDeployments ds 88 | let dcrs = checkDeployments ddeps 89 | -- Check for impossible deployments 90 | when (any impossibleDeployment dcrs) $ 91 | err (zip ddeps dcrs) "Deployment is impossible." 92 | -- Clean up the situation 93 | forM_ dcrs $ \d -> do 94 | case d of 95 | DirtySituation _ _ cis -> performClean cis 96 | _ -> return () 97 | stage2 98 | -- Check again 99 | = do 100 | ddeps2 <- liftIO $ diagnoseDeployments ds 101 | let dcrs2 = checkDeployments ddeps2 102 | -- Error if the cleaning is not done now. 103 | when (any (\d -> impossibleDeployment d || dirtyDeployment d) dcrs2) $ 104 | err (zip ddeps2 dcrs2) $ 105 | unlines 106 | [ "Situation was not entirely clean after attemted cleanup." 107 | , "Maybe you forgot to enable cleanups (--replace-all)?" 108 | ] 109 | -- Perform deployments 110 | liftIO $ 111 | mapM_ performDeployment $ 112 | map (\(ReadyToDeploy i) -> i) $ filter deploymentReadyToDeploy dcrs2 113 | stage3 114 | -- Check one last time. 115 | = do 116 | do ddeps3 <- liftIO $ diagnoseDeployments ds 117 | let dcrsf3 = checkDeployments ddeps3 118 | when (any (not . deploymentIsDone) dcrsf3) $ do 119 | err 120 | (zip ddeps3 dcrsf3) 121 | "Something went wrong during deployment. It's not done yet." 122 | err :: [(DiagnosedDeployment, DeploymentCheckResult)] 123 | -> String 124 | -> SparkDeployer () 125 | err dcrs_ text = do 126 | liftIO $ putStrLn $ formatDeploymentChecks dcrs_ 127 | throwError $ DeployError text 128 | -------------------------------------------------------------------------------- /src/SuperUserSpark/Deployer/Internal.hs: -------------------------------------------------------------------------------- 1 | module SuperUserSpark.Deployer.Internal where 2 | 3 | import Import 4 | 5 | import System.FilePath.Posix (dropTrailingPathSeparator) 6 | import System.Posix.Files (createSymbolicLink, removeLink) 7 | 8 | import SuperUserSpark.Check.Types 9 | import SuperUserSpark.Deployer.Types 10 | import SuperUserSpark.Utils 11 | 12 | performClean :: CleanupInstruction -> SparkDeployer () 13 | performClean (CleanFile fp) = incase deploySetsReplaceFiles $ rmFile fp 14 | performClean (CleanDirectory fp) = 15 | incase deploySetsReplaceDirectories $ rmDir fp 16 | performClean (CleanLink fp) = incase deploySetsReplaceLinks $ unlink fp 17 | 18 | unlink :: Path Abs File -> SparkDeployer () 19 | unlink = liftIO . removeLink . dropTrailingPathSeparator . toFilePath 20 | 21 | rmFile :: Path Abs File -> SparkDeployer () 22 | rmFile = liftIO . removeFile 23 | 24 | rmDir :: Path Abs Dir -> SparkDeployer () 25 | rmDir = liftIO . removeDirRecur 26 | 27 | performDeployment :: Instruction -> IO () 28 | performDeployment (CopyFile source destination) = 29 | performCopyFile source destination 30 | performDeployment (CopyDir source destination) = 31 | performCopyDir source destination 32 | performDeployment (LinkFile source destination) = 33 | performLinkFile source destination 34 | performDeployment (LinkDir source destination) = 35 | performLinkDir source destination 36 | 37 | performCopyFile :: Path Abs File -> Path Abs File -> IO () 38 | performCopyFile src dst = do 39 | ensureDir $ parent dst 40 | copyFile src dst 41 | 42 | performCopyDir :: Path Abs Dir -> Path Abs Dir -> IO () 43 | performCopyDir src dst = do 44 | ensureDir $ parent dst 45 | copyDirRecur src dst 46 | 47 | performLinkFile :: Path Abs File -> Path Abs File -> IO () 48 | performLinkFile src dst = do 49 | ensureDir $ parent dst 50 | createSymbolicLink (toFilePath src) (toFilePath dst) 51 | 52 | performLinkDir :: Path Abs Dir -> Path Abs Dir -> IO () 53 | performLinkDir src dst = do 54 | ensureDir $ parent dst 55 | createSymbolicLink 56 | (dropTrailingPathSeparator $ toFilePath src) 57 | (dropTrailingPathSeparator $ toFilePath dst) 58 | -------------------------------------------------------------------------------- /src/SuperUserSpark/Deployer/Types.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE DeriveGeneric #-} 2 | 3 | module SuperUserSpark.Deployer.Types where 4 | 5 | import Import 6 | 7 | import SuperUserSpark.Bake.Types 8 | import SuperUserSpark.Check.Types 9 | import SuperUserSpark.CoreTypes 10 | 11 | data DeployAssignment = DeployAssignment 12 | { deployCardReference :: BakeCardReference 13 | , deploySettings :: DeploySettings 14 | } deriving (Show, Eq, Generic) 15 | 16 | instance Validity DeployAssignment 17 | 18 | data DeploySettings = DeploySettings 19 | { deploySetsReplaceLinks :: Bool 20 | , deploySetsReplaceFiles :: Bool 21 | , deploySetsReplaceDirectories :: Bool 22 | , deployCheckSettings :: CheckSettings 23 | } deriving (Show, Eq, Generic) 24 | 25 | instance Validity DeploySettings 26 | 27 | defaultDeploySettings :: DeploySettings 28 | defaultDeploySettings = 29 | DeploySettings 30 | { deploySetsReplaceLinks = False 31 | , deploySetsReplaceFiles = False 32 | , deploySetsReplaceDirectories = False 33 | , deployCheckSettings = defaultCheckSettings 34 | } 35 | 36 | type SparkDeployer = ExceptT DeployError (ReaderT DeploySettings IO) 37 | 38 | data DeployError 39 | = DeployCheckError CheckError 40 | | DeployError String 41 | deriving (Show, Eq, Generic) 42 | 43 | instance Validity DeployError 44 | 45 | data PreDeployment 46 | = Ready FilePath 47 | FilePath 48 | DeploymentKind 49 | | AlreadyDone 50 | | Error String 51 | deriving (Show, Eq, Generic) 52 | 53 | instance Validity PreDeployment 54 | -------------------------------------------------------------------------------- /src/SuperUserSpark/Diagnose.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE RecordWildCards #-} 2 | 3 | {- 4 | The responsibility of the diagnoser is to turn baked deployments into 5 | diagnosed deployments. This is purely a read-only operation that looks 6 | at the current state of the file system. 7 | -} 8 | module SuperUserSpark.Diagnose 9 | ( diagnoseFromArgs 10 | , diagnoseAssignment 11 | , deriveDiagnoseSettings 12 | , diagnose 13 | , formatDiagnoseError 14 | , diagnoserBake 15 | , diagnoseDeployments 16 | ) where 17 | 18 | import Import 19 | 20 | import qualified Data.Aeson.Encode.Pretty as JSON 21 | import qualified Data.ByteString.Lazy.Char8 as LB 22 | 23 | import SuperUserSpark.Bake 24 | import SuperUserSpark.Bake.Internal 25 | import SuperUserSpark.Bake.Types 26 | import SuperUserSpark.Diagnose.Internal 27 | import SuperUserSpark.Diagnose.Types 28 | import SuperUserSpark.OptParse.Types 29 | import SuperUserSpark.Utils 30 | 31 | diagnoseFromArgs :: DiagnoseArgs -> IO () 32 | diagnoseFromArgs cas = do 33 | errOrAss <- diagnoseAssignment cas 34 | case errOrAss of 35 | Left err -> die $ unwords ["Failed to build Diagnose assignment:", err] 36 | Right ass -> diagnose ass 37 | 38 | diagnoseAssignment :: DiagnoseArgs -> IO (Either String DiagnoseAssignment) 39 | diagnoseAssignment DiagnoseArgs {..} = do 40 | errOrCardRef <- parseBakeCardReference diagnoseArgCardRef 41 | case errOrCardRef of 42 | Left err -> pure $ Left err 43 | Right cardRef -> 44 | DiagnoseAssignment cardRef <$$> 45 | deriveDiagnoseSettings cardRef diagnoseFlags 46 | 47 | deriveDiagnoseSettings :: BakeCardReference 48 | -> DiagnoseFlags 49 | -> IO (Either String DiagnoseSettings) 50 | deriveDiagnoseSettings bcr DiagnoseFlags {..} = 51 | DiagnoseSettings <$$> deriveBakeSettings bcr diagnoseBakeFlags 52 | 53 | diagnose :: DiagnoseAssignment -> IO () 54 | diagnose DiagnoseAssignment {..} = do 55 | errOrDone <- 56 | runReaderT 57 | (runExceptT $ diagnoseByCardRef diagnoseCardReference) 58 | diagnoseSettings 59 | case errOrDone of 60 | Left err -> die $ formatDiagnoseError err 61 | Right () -> pure () 62 | 63 | formatDiagnoseError :: DiagnoseError -> String 64 | formatDiagnoseError (DiagnoseBakeError ce) = formatBakeError ce 65 | formatDiagnoseError (DiagnoseError s) = unwords ["Diagnose failed:", s] 66 | 67 | diagnoseByCardRef :: BakeCardReference -> SparkDiagnoser () 68 | diagnoseByCardRef checkCardReference = do 69 | deps <- 70 | diagnoserBake $ 71 | compileBakeCardRef checkCardReference >>= bakeDeployments 72 | ddeps <- liftIO $ diagnoseDeployments deps 73 | liftIO . LB.putStrLn $ JSON.encodePretty ddeps 74 | 75 | diagnoserBake :: SparkBaker a -> SparkDiagnoser a 76 | diagnoserBake = 77 | withExceptT DiagnoseBakeError . 78 | mapExceptT (withReaderT diagnoseBakeSettings) 79 | 80 | diagnoseDeployments :: [BakedDeployment] -> IO [DiagnosedDeployment] 81 | diagnoseDeployments = mapM diagnoseDeployment 82 | -------------------------------------------------------------------------------- /src/SuperUserSpark/Diagnose/Internal.hs: -------------------------------------------------------------------------------- 1 | module SuperUserSpark.Diagnose.Internal where 2 | 3 | import Import 4 | 5 | import qualified Data.ByteString as SB 6 | import Data.Hashable 7 | import System.Posix.Files 8 | (getSymbolicLinkStatus, isBlockDevice, isCharacterDevice, 9 | isDirectory, isNamedPipe, isRegularFile, isSocket, isSymbolicLink, 10 | readSymbolicLink) 11 | 12 | import SuperUserSpark.Bake.Types 13 | import SuperUserSpark.Compiler.Types 14 | import SuperUserSpark.Diagnose.Types 15 | 16 | diagnoseDeployment :: BakedDeployment -> IO DiagnosedDeployment 17 | diagnoseDeployment (Deployment bds kind) = do 18 | ddirs <- diagnoseDirs bds 19 | return $ Deployment ddirs kind 20 | 21 | diagnoseDirs :: DeploymentDirections AbsP 22 | -> IO (DeploymentDirections DiagnosedFp) 23 | diagnoseDirs (Directions srcs dst) = 24 | Directions <$> mapM diagnoseAbsP srcs <*> diagnoseAbsP dst 25 | 26 | diagnoseAbsP :: AbsP -> IO DiagnosedFp 27 | diagnoseAbsP fp = do 28 | d <- diagnoseFp fp 29 | hash_ <- hashFilePath fp 30 | return $ D fp d hash_ 31 | 32 | diagnoseFp :: AbsP -> IO Diagnostics 33 | diagnoseFp absp = do 34 | let fp = toPath absp 35 | ms <- forgivingAbsence $ getSymbolicLinkStatus fp 36 | case ms of 37 | Nothing -> pure Nonexistent 38 | Just s | isBlockDevice s || isCharacterDevice s || isSocket s || isNamedPipe s 39 | -> return IsWeird 40 | | isSymbolicLink s -> do 41 | point <- readSymbolicLink fp 42 | -- TODO check what happens with relative links. 43 | apoint <- AbsP <$> parseAbsFile point 44 | return $ IsLinkTo apoint 45 | | otherwise -> pure $ 46 | if isDirectory s 47 | then IsDirectory 48 | else if isRegularFile s 49 | then IsFile 50 | else IsWeird 51 | 52 | -- | Hash a filepath so that two filepaths with the same contents have the same hash 53 | hashFilePath :: AbsP -> IO HashDigest 54 | hashFilePath fp = do 55 | d <- diagnoseFp fp 56 | case d of 57 | IsFile -> hashFile fp 58 | IsDirectory -> hashDirectory fp 59 | IsLinkTo _ -> return $ HashDigest $ hash () 60 | IsWeird -> return $ HashDigest $ hash () 61 | Nonexistent -> return $ HashDigest $ hash () 62 | 63 | hashFile :: AbsP -> IO HashDigest 64 | hashFile fp = HashDigest . hash <$> SB.readFile (toPath fp) 65 | 66 | hashDirectory :: AbsP -> IO HashDigest 67 | hashDirectory fp = do 68 | tdir <- parseAbsDir (toPath fp) 69 | walkDirAccum Nothing writer_ tdir 70 | where 71 | writer_ _ _ files = do 72 | hashes <- mapM (hashFile . AbsP) files 73 | pure $ HashDigest $ hash hashes 74 | -------------------------------------------------------------------------------- /src/SuperUserSpark/Diagnose/Types.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE CPP #-} 2 | {-# LANGUAGE DeriveGeneric #-} 3 | {-# LANGUAGE OverloadedStrings #-} 4 | {-# LANGUAGE RecordWildCards #-} 5 | 6 | module SuperUserSpark.Diagnose.Types where 7 | 8 | import Import hiding ((<>)) 9 | 10 | import Data.Aeson 11 | import Data.Hashable 12 | import Text.Printf 13 | 14 | import SuperUserSpark.Bake.Types 15 | import SuperUserSpark.Compiler.Types 16 | 17 | #if __GLASGOW_HASKELL__ < 840 18 | import Data.Semigroup (Semigroup, (<>)) 19 | #endif 20 | 21 | data DiagnoseAssignment = DiagnoseAssignment 22 | { diagnoseCardReference :: BakeCardReference 23 | , diagnoseSettings :: DiagnoseSettings 24 | } deriving (Show, Eq, Generic) 25 | 26 | instance Validity DiagnoseAssignment 27 | 28 | newtype DiagnoseSettings = DiagnoseSettings 29 | { diagnoseBakeSettings :: BakeSettings 30 | } deriving (Show, Eq, Generic) 31 | 32 | instance Validity DiagnoseSettings 33 | 34 | defaultDiagnoseSettings :: DiagnoseSettings 35 | defaultDiagnoseSettings = 36 | DiagnoseSettings {diagnoseBakeSettings = defaultBakeSettings} 37 | 38 | type SparkDiagnoser = ExceptT DiagnoseError (ReaderT DiagnoseSettings IO) 39 | 40 | data DiagnoseError 41 | = DiagnoseBakeError BakeError 42 | | DiagnoseError String 43 | deriving (Show, Eq, Generic) 44 | 45 | instance Validity DiagnoseError 46 | 47 | newtype HashDigest = 48 | HashDigest Int 49 | deriving (Show, Eq, Generic) 50 | 51 | instance Validity HashDigest 52 | 53 | instance Semigroup HashDigest where 54 | HashDigest h1 <> HashDigest h2 = HashDigest $ h1 * 31 + h2 55 | 56 | instance Monoid HashDigest where 57 | mempty = HashDigest (hash ()) 58 | mappend = (<>) 59 | 60 | instance Hashable HashDigest 61 | 62 | instance ToJSON HashDigest where 63 | toJSON (HashDigest i) = toJSON (printf "%016x" i :: String) 64 | 65 | data Diagnostics 66 | = Nonexistent 67 | | IsFile 68 | | IsDirectory 69 | | IsLinkTo AbsP -- Could point to directory too. 70 | | IsWeird 71 | deriving (Show, Eq, Generic) 72 | 73 | instance Validity Diagnostics 74 | 75 | instance ToJSON Diagnostics where 76 | toJSON Nonexistent = String "nonexistent" 77 | toJSON IsFile = String "file" 78 | toJSON IsDirectory = String "directory" 79 | toJSON (IsLinkTo ap) = 80 | object ["kind" .= String "link", "link destination" .= ap] 81 | toJSON IsWeird = String "weird" 82 | 83 | data DiagnosedFp = D 84 | { diagnosedFilePath :: AbsP 85 | , diagnosedDiagnostics :: Diagnostics 86 | , diagnosedHashDigest :: HashDigest 87 | } deriving (Show, Eq, Generic) 88 | 89 | instance Validity DiagnosedFp 90 | 91 | instance ToJSON DiagnosedFp where 92 | toJSON D {..} = 93 | object $ 94 | ["path" .= diagnosedFilePath, "diagnostics" .= diagnosedDiagnostics] ++ 95 | if diagnosedHashDigest == mempty 96 | then [] 97 | else ["hash" .= diagnosedHashDigest] 98 | 99 | type DiagnosedDeployment = Deployment DiagnosedFp 100 | -------------------------------------------------------------------------------- /src/SuperUserSpark/Language/Types.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE DeriveGeneric #-} 2 | 3 | module SuperUserSpark.Language.Types where 4 | 5 | import Import 6 | 7 | import SuperUserSpark.CoreTypes 8 | 9 | -- * Cards 10 | type CardName = String 11 | 12 | type Source = FilePath 13 | 14 | type Destination = FilePath 15 | 16 | data Card = Card 17 | { cardName :: CardName 18 | , cardContent :: Declaration 19 | } deriving (Show, Eq, Generic) 20 | 21 | instance Validity Card 22 | 23 | -- ** Declarations 24 | -- | A declaration in a card 25 | data Declaration 26 | = SparkOff CardReference -- ^ Spark off another card 27 | | Deploy Source 28 | Destination 29 | (Maybe DeploymentKind) -- ^ Deploy from source to destination 30 | | IntoDir Directory -- ^ Deploy into a directory 31 | | OutofDir Directory -- ^ Deploy outof a directory 32 | | DeployKindOverride DeploymentKind -- ^ Override the deployment kind 33 | | Alternatives [Directory] -- ^ Provide a list of alternative sources 34 | | Block [Declaration] -- ^ A scoped block of declarations 35 | deriving (Show, Eq, Generic) 36 | 37 | instance Validity Declaration 38 | 39 | -- * Card references 40 | -- | Reference a card by name (inside a file) 41 | newtype CardNameReference = 42 | CardNameReference CardName 43 | deriving (Show, Eq, Generic) 44 | 45 | instance Validity CardNameReference 46 | 47 | -- | Reference a card by the file it is in and therein potentially by a name reference 48 | data CardFileReference = 49 | CardFileReference FilePath 50 | (Maybe CardNameReference) 51 | deriving (Show, Eq, Generic) 52 | 53 | instance Validity CardFileReference 54 | 55 | instance Read CardFileReference where 56 | readsPrec _ fp = 57 | case length (words fp) of 58 | 1 -> [(CardFileReference fp Nothing, "")] 59 | 2 -> 60 | let [f, c] = words fp 61 | in [(CardFileReference f (Just $ CardNameReference c), "")] 62 | _ -> [] 63 | 64 | -- | Union card reference 65 | data CardReference 66 | = CardFile CardFileReference 67 | | CardName CardNameReference 68 | deriving (Show, Eq, Generic) 69 | 70 | instance Validity CardReference 71 | 72 | -- * Card files 73 | data SparkFile = SparkFile 74 | { sparkFilePath :: Path Abs File 75 | , sparkFileCards :: [Card] 76 | } deriving (Show, Eq, Generic) 77 | 78 | instance Validity SparkFile 79 | -------------------------------------------------------------------------------- /src/SuperUserSpark/OptParse.hs: -------------------------------------------------------------------------------- 1 | module SuperUserSpark.OptParse 2 | ( getDispatch 3 | , Dispatch(..) 4 | ) where 5 | 6 | import Import 7 | 8 | import Options.Applicative.Types 9 | import Options.Applicative 10 | import System.Environment (getArgs) 11 | 12 | import SuperUserSpark.OptParse.Types 13 | 14 | getDispatch :: IO Dispatch 15 | getDispatch = do 16 | args <- getArgs 17 | let result = runOptionsParser args 18 | handleParseResult result 19 | 20 | runOptionsParser :: [String] -> ParserResult Dispatch 21 | runOptionsParser strs = execParserPure prefs_ optionsParser strs 22 | where 23 | prefs_ = 24 | defaultPrefs 25 | { prefMultiSuffix = "" 26 | , prefDisambiguate = True 27 | , prefShowHelpOnError = True 28 | , prefShowHelpOnEmpty = True 29 | } 30 | 31 | optionsParser :: ParserInfo Dispatch 32 | optionsParser = 33 | info 34 | (helper <*> parseDispatch) 35 | (fullDesc <> progDesc "Super User Spark, author: Tom Sydney Kerckhove") 36 | 37 | parseDispatch :: Parser Dispatch 38 | parseDispatch = 39 | hsubparser $ 40 | mconcat 41 | [ command "parse" parseParse 42 | , command "compile" parseCompile 43 | , command "bake" parseBake 44 | , command "diagnose" parseDiagnose 45 | , command "check" parseCheck 46 | , command "deploy" parseDeploy 47 | ] 48 | 49 | parseParse :: ParserInfo Dispatch 50 | parseParse = info parser modifier 51 | where 52 | parser = DispatchParse <$> parseParseArgs 53 | modifier = 54 | fullDesc <> 55 | progDesc "Parse a spark file and check for syntactic errors." 56 | 57 | parseParseArgs :: Parser ParseArgs 58 | parseParseArgs = 59 | ParseArgs <$> 60 | strArgument (mconcat [metavar "FILE", help "the file to parse"]) 61 | 62 | parseCompile :: ParserInfo Dispatch 63 | parseCompile = info parser modifier 64 | where 65 | parser = DispatchCompile <$> parseCompileArgs 66 | modifier = fullDesc <> progDesc "Compile a spark card." 67 | 68 | parseCompileArgs :: Parser CompileArgs 69 | parseCompileArgs = 70 | CompileArgs <$> 71 | strArgument (metavar "CARDREF" <> help "the card file to compile") <*> 72 | option 73 | (Just <$> str) 74 | (mconcat 75 | [ long "output" 76 | , short 'o' 77 | , value Nothing 78 | , metavar "FILE" 79 | , help "The output file for compilation" 80 | ]) <*> 81 | parseCompileFlags 82 | 83 | parseCompileFlags :: Parser CompileFlags 84 | parseCompileFlags = 85 | CompileFlags <$> 86 | option 87 | (Just <$> str) 88 | (mconcat 89 | [ long "kind" 90 | , short 'k' 91 | , value Nothing 92 | , metavar "KIND" 93 | , help 94 | "The kind specification for unspecified deployments (default: link)" 95 | ]) <*> 96 | option 97 | (Just <$> str) 98 | (mconcat 99 | [ long "override" 100 | , short 'O' 101 | , value Nothing 102 | , metavar "KIND" 103 | , help "Override every deployment to be of the given kind" 104 | ]) 105 | 106 | parseBake :: ParserInfo Dispatch 107 | parseBake = info parser modifier 108 | where 109 | parser = DispatchBake <$> parseBakeArgs 110 | modifier = fullDesc <> progDesc "Bake the raw deployment of a spark card." 111 | 112 | parseBakeArgs :: Parser BakeArgs 113 | parseBakeArgs = 114 | BakeArgs <$> strArgument (metavar "CARDREF" <> help "the card to bake") <*> 115 | parseBakeFlags 116 | 117 | parseBakeFlags :: Parser BakeFlags 118 | parseBakeFlags = BakeFlags <$> parseCompileFlags 119 | 120 | parseDiagnose :: ParserInfo Dispatch 121 | parseDiagnose = info parser modifier 122 | where 123 | parser = DispatchDiagnose <$> parseDiagnoseArgs 124 | modifier = 125 | fullDesc <> progDesc "Diagnose the baked deployment of a spark card." 126 | 127 | parseDiagnoseArgs :: Parser DiagnoseArgs 128 | parseDiagnoseArgs = 129 | DiagnoseArgs <$> 130 | strArgument (metavar "CARDREF" <> help "the card to diagnose") <*> 131 | parseDiagnoseFlags 132 | 133 | parseDiagnoseFlags :: Parser DiagnoseFlags 134 | parseDiagnoseFlags = DiagnoseFlags <$> parseBakeFlags 135 | 136 | parseCheck :: ParserInfo Dispatch 137 | parseCheck = info parser modifier 138 | where 139 | parser = DispatchCheck <$> parseCheckArgs 140 | modifier = fullDesc <> progDesc "Check the deployment of a spark card." 141 | 142 | parseCheckArgs :: Parser CheckArgs 143 | parseCheckArgs = 144 | CheckArgs <$> strArgument (metavar "CARDREF" <> help "the card to check") <*> 145 | parseCheckFlags 146 | 147 | parseCheckFlags :: Parser CheckFlags 148 | parseCheckFlags = CheckFlags <$> parseDiagnoseFlags 149 | 150 | parseDeploy :: ParserInfo Dispatch 151 | parseDeploy = info parser modifier 152 | where 153 | parser = DispatchDeploy <$> parseDeployArgs 154 | modifier = fullDesc <> progDesc "Deploy a spark card." 155 | 156 | parseDeployArgs :: Parser DeployArgs 157 | parseDeployArgs = 158 | DeployArgs <$> strArgument (metavar "CARDREF" <> help "the card to deploy") <*> 159 | parseDeployFlags 160 | 161 | parseDeployFlags :: Parser DeployFlags 162 | parseDeployFlags = 163 | DeployFlags <$> 164 | switch 165 | (mconcat 166 | [ long "replace-links" 167 | , help "Replace links at deploy destinations." 168 | ]) <*> 169 | switch 170 | (mconcat 171 | [ long "replace-files" 172 | , help "Replace existing files at deploy destinations." 173 | ]) <*> 174 | switch 175 | (mconcat 176 | [ long "replace-Directories" 177 | , help "Replace existing directories at deploy destinations." 178 | ]) <*> 179 | switch 180 | (mconcat 181 | [ long "replace-all" 182 | , short 'r' 183 | , help 184 | "Equivalent to --replace-files --replace-directories --replace-links" 185 | ]) <*> 186 | parseCheckFlags 187 | -------------------------------------------------------------------------------- /src/SuperUserSpark/OptParse/Types.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE DeriveGeneric #-} 2 | 3 | module SuperUserSpark.OptParse.Types where 4 | 5 | import Import 6 | 7 | data Dispatch 8 | = DispatchParse ParseArgs 9 | | DispatchCompile CompileArgs 10 | | DispatchBake BakeArgs 11 | | DispatchDiagnose DiagnoseArgs 12 | | DispatchCheck CheckArgs 13 | | DispatchDeploy DeployArgs 14 | deriving (Show, Eq, Generic) 15 | 16 | instance Validity Dispatch 17 | 18 | newtype ParseArgs = ParseArgs 19 | { parseFilePath :: FilePath 20 | } deriving (Show, Eq, Generic) 21 | 22 | instance Validity ParseArgs 23 | 24 | data CompileArgs = CompileArgs 25 | { compileArgCardRef :: String 26 | , compileArgOutput :: Maybe FilePath 27 | , compileFlags :: CompileFlags 28 | } deriving (Show, Eq, Generic) 29 | 30 | instance Validity CompileArgs 31 | 32 | data CompileFlags = CompileFlags 33 | { compileDefaultKind :: Maybe String 34 | , compileKindOverride :: Maybe String 35 | } deriving (Show, Eq, Generic) 36 | 37 | instance Validity CompileFlags 38 | 39 | data BakeArgs = BakeArgs 40 | { bakeCardRef :: String 41 | , bakeFlags :: BakeFlags 42 | } deriving (Show, Eq, Generic) 43 | 44 | instance Validity BakeArgs 45 | 46 | newtype BakeFlags = BakeFlags 47 | { bakeCompileFlags :: CompileFlags 48 | } deriving (Show, Eq, Generic) 49 | 50 | instance Validity BakeFlags 51 | 52 | data DiagnoseArgs = DiagnoseArgs 53 | { diagnoseArgCardRef :: String 54 | , diagnoseFlags :: DiagnoseFlags 55 | } deriving (Show, Eq, Generic) 56 | 57 | instance Validity DiagnoseArgs 58 | 59 | newtype DiagnoseFlags = DiagnoseFlags 60 | { diagnoseBakeFlags :: BakeFlags 61 | } deriving (Show, Eq, Generic) 62 | 63 | instance Validity DiagnoseFlags 64 | 65 | data CheckArgs = CheckArgs 66 | { checkArgCardRef :: String 67 | , checkFlags :: CheckFlags 68 | } deriving (Show, Eq, Generic) 69 | 70 | instance Validity CheckArgs 71 | 72 | newtype CheckFlags = CheckFlags 73 | { checkDiagnoseFlags ::DiagnoseFlags 74 | } deriving (Show, Eq, Generic) 75 | 76 | instance Validity CheckFlags 77 | 78 | data DeployArgs = DeployArgs 79 | { deployArgCardRef :: String 80 | , deployFlags :: DeployFlags 81 | } deriving (Show, Eq, Generic) 82 | 83 | instance Validity DeployArgs 84 | 85 | data DeployFlags = DeployFlags 86 | { deployFlagReplaceLinks :: Bool 87 | , deployFlagReplaceFiles :: Bool 88 | , deployFlagReplaceDirectories :: Bool 89 | , deployFlagReplaceAll :: Bool 90 | , deployCheckFlags :: CheckFlags 91 | } deriving (Show, Eq, Generic) 92 | 93 | instance Validity DeployFlags 94 | -------------------------------------------------------------------------------- /src/SuperUserSpark/Parser.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE RecordWildCards #-} 2 | 3 | {- 4 | The Parser is responsible for transforming a 'String' into an AST. 5 | -} 6 | module SuperUserSpark.Parser where 7 | 8 | import Import 9 | 10 | import Control.Exception (try) 11 | 12 | import SuperUserSpark.Language.Types 13 | import SuperUserSpark.OptParse.Types 14 | import SuperUserSpark.Parser.Internal 15 | import SuperUserSpark.Parser.Types 16 | import SuperUserSpark.Utils 17 | 18 | parseFromArgs :: ParseArgs -> IO () 19 | parseFromArgs pa = do 20 | errOrAss <- parseAssignment pa 21 | case errOrAss of 22 | Left err -> die $ unwords ["Unable to build parse assignment:", err] 23 | Right ass -> parse ass 24 | 25 | parseAssignment :: ParseArgs -> IO (Either String ParseAssignment) 26 | parseAssignment ParseArgs {..} = 27 | ParseAssignment <$$> 28 | ((left (show :: PathParseException -> String)) <$> 29 | try (resolveFile' parseFilePath)) 30 | 31 | parse :: ParseAssignment -> IO () 32 | parse ParseAssignment {..} = do 33 | errOrFile <- parseFile fileToParse 34 | case errOrFile of 35 | Left err -> die $ formatParseError err 36 | Right _ -> pure () 37 | 38 | formatParseError :: ParseError -> String 39 | formatParseError (ParseError pe) = show pe 40 | 41 | parseFile :: Path Abs File -> IO (Either ParseError SparkFile) 42 | parseFile file = (left ParseError . parseCardFile file) <$> readFile (toFilePath file) 43 | -------------------------------------------------------------------------------- /src/SuperUserSpark/Parser/Internal.hs: -------------------------------------------------------------------------------- 1 | module SuperUserSpark.Parser.Internal where 2 | 3 | import Import 4 | 5 | import Data.List (isSuffixOf) 6 | import SuperUserSpark.Constants 7 | import SuperUserSpark.CoreTypes 8 | import SuperUserSpark.Language.Types 9 | import Text.Parsec 10 | import Text.Parsec.String 11 | 12 | parseCardFile :: Path Abs File -> String -> Either ParseError SparkFile 13 | parseCardFile f s = SparkFile f <$> parseFromSource sparkFile f s 14 | 15 | parseFromSource :: Parser a -> Path Abs File -> String -> Either ParseError a 16 | parseFromSource parser file = parse parser $ toFilePath file 17 | 18 | --[ Language ]-- 19 | sparkFile :: Parser [Card] 20 | sparkFile = do 21 | clean <- eatComments 22 | setInput clean 23 | resetPosition 24 | cards 25 | 26 | cards :: Parser [Card] 27 | cards = card `sepEndBy1` whitespace 28 | 29 | resetPosition :: Parser () 30 | resetPosition = do 31 | pos <- getPosition 32 | setPosition $ setSourceColumn (setSourceLine pos 1) 1 33 | 34 | card :: Parser Card 35 | card = do 36 | whitespace 37 | void $ string keywordCard 38 | whitespace 39 | name <- cardNameP 40 | whitespace 41 | b <- block 42 | whitespace 43 | return $ Card name b 44 | 45 | declarations :: Parser [Declaration] 46 | declarations = inLineSpace declaration `sepEndBy` delim 47 | 48 | declaration :: Parser Declaration 49 | declaration = 50 | choice $ 51 | map 52 | try 53 | [ block 54 | , alternatives 55 | , sparkOff 56 | , intoDir 57 | , outOfDir 58 | , deploymentKindOverride 59 | , deployment 60 | ] 61 | 62 | block :: Parser Declaration 63 | block = 64 | do ds <- inBraces $ inWhiteSpace declarations 65 | return $ Block ds 66 | "block" 67 | 68 | sparkOff :: Parser Declaration 69 | sparkOff = 70 | do void $ string keywordSpark 71 | linespace 72 | ref <- cardReference 73 | return $ SparkOff ref 74 | "sparkoff" 75 | 76 | compilerCardReference :: Parser CardFileReference 77 | compilerCardReference = unprefixedCardFileReference 78 | 79 | compiledCardReference :: Parser FilePath 80 | compiledCardReference = do 81 | void $ string "compiled" 82 | void linespace 83 | filepath 84 | 85 | cardReference :: Parser CardReference 86 | cardReference = try goName <|> try goFile "card reference" 87 | where 88 | goName = CardName <$> cardNameReference 89 | goFile = CardFile <$> cardFileReference 90 | 91 | cardNameReference :: Parser CardNameReference 92 | cardNameReference = 93 | do void $ string keywordCard 94 | linespace 95 | name <- cardNameP 96 | return $ CardNameReference name 97 | "card name reference" 98 | 99 | cardNameP :: Parser CardName 100 | cardNameP = identifier "card name" 101 | 102 | cardFileReference :: Parser CardFileReference 103 | cardFileReference = do 104 | void $ string keywordFile 105 | void linespace 106 | unprefixedCardFileReference 107 | 108 | unprefixedCardFileReference :: Parser CardFileReference 109 | unprefixedCardFileReference = 110 | do fp <- filepath 111 | linespace 112 | mn <- optionMaybe $ try cardNameP 113 | return $ 114 | case mn of 115 | Nothing -> CardFileReference fp Nothing 116 | Just cn -> CardFileReference fp (Just $ CardNameReference cn) 117 | "card file reference" 118 | 119 | intoDir :: Parser Declaration 120 | intoDir = 121 | do void $ string keywordInto 122 | linespace 123 | dir <- directory 124 | return $ IntoDir dir 125 | "into directory declaration" 126 | 127 | outOfDir :: Parser Declaration 128 | outOfDir = 129 | do void $ string keywordOutof 130 | linespace 131 | dir <- directory 132 | return $ OutofDir dir 133 | "outof directory declaration" 134 | 135 | deploymentKindOverride :: Parser Declaration 136 | deploymentKindOverride = 137 | do void $ string keywordKindOverride 138 | linespace 139 | kind <- try copy <|> link 140 | return $ DeployKindOverride kind 141 | "deployment kind override" 142 | where 143 | copy = string keywordCopy >> return CopyDeployment 144 | link = string keywordLink >> return LinkDeployment 145 | 146 | shortDeployment :: Parser Declaration 147 | shortDeployment = do 148 | source <- try directory <|> filepath 149 | return $ Deploy source source Nothing 150 | 151 | longDeployment :: Parser Declaration 152 | longDeployment = do 153 | source <- filepath 154 | linespace 155 | kind <- deploymentKind 156 | linespace 157 | dest <- filepath 158 | return $ Deploy source dest kind 159 | 160 | deployment :: Parser Declaration 161 | deployment = try longDeployment <|> shortDeployment "deployment" 162 | 163 | deploymentKind :: Parser (Maybe DeploymentKind) 164 | deploymentKind = try link <|> try copy <|> def "deployment kind" 165 | where 166 | link = string linkKindSymbol >> return (Just LinkDeployment) 167 | copy = string copyKindSymbol >> return (Just CopyDeployment) 168 | def = string unspecifiedKindSymbol >> return Nothing 169 | 170 | alternatives :: Parser Declaration 171 | alternatives = do 172 | void $ string keywordAlternatives 173 | linespace 174 | ds <- directory `sepBy1` linespace 175 | return $ Alternatives ds 176 | 177 | -- [ FilePaths ]-- 178 | filepath :: Parser FilePath 179 | filepath = do 180 | i <- identifier "Filepath" 181 | if "/" `isSuffixOf` i 182 | then unexpected "slash at the end" 183 | else return i 184 | 185 | directory :: Parser Directory 186 | directory = filepath "Directory" 187 | 188 | --[ Comments ]-- 189 | comment :: Parser String 190 | comment = try lineComment <|> try blockComment "Comment" 191 | 192 | lineComment :: Parser String 193 | lineComment = 194 | ( "Line comment") $ do 195 | void $ try $ string lineCommentStr 196 | anyChar `manyTill` eol 197 | 198 | blockComment :: Parser String 199 | blockComment = 200 | ( "Block comment") $ do 201 | void $ try $ string start 202 | anyChar `manyTill` try (string stop) 203 | where 204 | (start, stop) = blockCommentStrs 205 | 206 | notComment :: Parser String 207 | notComment = manyTill anyChar (lookAhead (void comment <|> eof)) 208 | 209 | eatComments :: Parser String 210 | eatComments = do 211 | optional comment 212 | xs <- notComment `sepBy` comment 213 | optional comment 214 | let withoutComments = concat xs 215 | return withoutComments 216 | 217 | --[ Identifiers ]-- 218 | identifier :: Parser String 219 | identifier = try quotedIdentifier <|> plainIdentifier 220 | 221 | plainIdentifier :: Parser String 222 | plainIdentifier = 223 | many1 $ 224 | noneOf $ quotesChar : lineDelimiter ++ whitespaceChars ++ bracesChars 225 | 226 | quotedIdentifier :: Parser String 227 | quotedIdentifier = inQuotes $ many $ noneOf $ quotesChar : endOfLineChars 228 | 229 | --[ Delimiters ]-- 230 | inBraces :: Parser a -> Parser a 231 | inBraces = between (char '{') (char '}') 232 | 233 | inQuotes :: Parser a -> Parser a 234 | inQuotes = between (char quotesChar) (char quotesChar) 235 | 236 | delim :: Parser () 237 | delim = try (void $ string lineDelimiter) <|> go 238 | where 239 | go = do 240 | eol 241 | whitespace 242 | 243 | --[ Whitespace ]-- 244 | inLineSpace :: Parser a -> Parser a 245 | inLineSpace = between linespace linespace 246 | 247 | inWhiteSpace :: Parser a -> Parser a 248 | inWhiteSpace = between whitespace whitespace 249 | 250 | linespace :: Parser () 251 | linespace = void $ many $ oneOf linespaceChars 252 | 253 | whitespace :: Parser () 254 | whitespace = void $ many $ oneOf whitespaceChars 255 | 256 | eol :: Parser () 257 | eol = void newline_ 258 | where 259 | newline_ = 260 | try (string "\r\n") <|> try (string "\n") <|> 261 | string "\r" "end of line" 262 | -------------------------------------------------------------------------------- /src/SuperUserSpark/Parser/Types.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE DeriveGeneric #-} 2 | 3 | module SuperUserSpark.Parser.Types where 4 | 5 | import Import 6 | 7 | import qualified Text.Parsec as Parsec 8 | 9 | newtype ParseAssignment = ParseAssignment 10 | { fileToParse :: Path Abs File 11 | } deriving (Show, Eq, Generic) 12 | 13 | instance Validity ParseAssignment 14 | 15 | data ParseSettings = 16 | ParseSettings 17 | deriving (Show, Eq, Generic) 18 | 19 | instance Validity ParseSettings 20 | 21 | newtype ParseError = 22 | ParseError Parsec.ParseError 23 | deriving (Show, Eq, Generic) 24 | 25 | instance Validity ParseError where 26 | validate = trivialValidation 27 | -------------------------------------------------------------------------------- /src/SuperUserSpark/PreCompiler.hs: -------------------------------------------------------------------------------- 1 | module SuperUserSpark.PreCompiler where 2 | 3 | import Import 4 | 5 | import SuperUserSpark.Language.Types 6 | import SuperUserSpark.PreCompiler.Types 7 | import SuperUserSpark.Utils 8 | 9 | preCompileChecks :: Card -> [PreCompileError] 10 | preCompileChecks c = runIdentity $ execWriterT $ cleanCard c 11 | 12 | dirty :: String -> Precompiler () 13 | dirty s = tell [PreCompileError $ "Precompilation check failed: " ++ s] 14 | 15 | cleanCard :: Card -> Precompiler () 16 | cleanCard (Card name d) = do 17 | cleanCardName name 18 | cleanDeclaration d 19 | 20 | cleanDeclaration :: Declaration -> Precompiler () 21 | cleanDeclaration (Deploy src dst _) = do 22 | cleanFilePath src 23 | cleanFilePath dst 24 | cleanDeclaration (SparkOff cr) = cleanCardReference cr 25 | cleanDeclaration (IntoDir dir) = cleanFilePath dir 26 | cleanDeclaration (OutofDir dir) = cleanFilePath dir 27 | cleanDeclaration (DeployKindOverride _) = return () -- Nothing can go wrong. 28 | cleanDeclaration (Alternatives fs) = mapM_ cleanFilePath fs 29 | cleanDeclaration (Block ds) = mapM_ cleanDeclaration ds 30 | 31 | cleanCardReference :: CardReference -> Precompiler () 32 | cleanCardReference (CardFile cfr) = cleanCardFileReference cfr 33 | cleanCardReference (CardName cnr) = cleanCardNameReference cnr 34 | 35 | cleanCardFileReference :: CardFileReference -> Precompiler () 36 | cleanCardFileReference (CardFileReference fp mcnr) = do 37 | cleanFilePath fp 38 | case mcnr of 39 | Nothing -> return () 40 | Just cnr -> cleanCardNameReference cnr 41 | 42 | cleanCardNameReference :: CardNameReference -> Precompiler () 43 | cleanCardNameReference (CardNameReference cn) = cleanCardName cn 44 | 45 | cleanCardName :: CardName -> Precompiler () 46 | cleanCardName n 47 | | containsNewline n = 48 | dirty $ "Card name contains newline character(s): " ++ n 49 | | otherwise = return () 50 | 51 | cleanFilePath :: FilePath -> Precompiler () 52 | cleanFilePath [] = dirty "Empty filepath" 53 | cleanFilePath fp 54 | | containsNewline fp = 55 | dirty $ "Filepath contains newline character(s): " ++ fp 56 | | containsMultipleConsequtiveSlashes fp = 57 | dirty $ "Filepath contains multiple consequtive slashes: " ++ fp 58 | | otherwise = return () 59 | -------------------------------------------------------------------------------- /src/SuperUserSpark/PreCompiler/Types.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE DeriveGeneric #-} 2 | 3 | module SuperUserSpark.PreCompiler.Types where 4 | 5 | import Import 6 | 7 | newtype PreCompileError = 8 | PreCompileError String 9 | deriving (Show, Eq, Generic) 10 | 11 | instance Validity PreCompileError 12 | 13 | type Precompiler = WriterT [PreCompileError] Identity 14 | -------------------------------------------------------------------------------- /src/SuperUserSpark/Utils.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE FlexibleContexts #-} 2 | {-# LANGUAGE MultiParamTypeClasses #-} 3 | 4 | module SuperUserSpark.Utils where 5 | 6 | import Import 7 | 8 | import Data.List (isInfixOf) 9 | 10 | incase 11 | :: MonadReader c m 12 | => (c -> Bool) -> m () -> m () 13 | incase bf func = do 14 | b <- asks bf 15 | when b func 16 | 17 | incaseElse 18 | :: MonadReader c m 19 | => (c -> Bool) -> m a -> m a -> m a 20 | incaseElse bf funcif funcelse = do 21 | b <- asks bf 22 | if b 23 | then funcif 24 | else funcelse 25 | 26 | containsNewline :: String -> Bool 27 | containsNewline f = any (\c -> elem c f) ['\n', '\r'] 28 | 29 | containsMultipleConsequtiveSlashes :: String -> Bool 30 | containsMultipleConsequtiveSlashes = isInfixOf "//" 31 | 32 | (&&&) :: (a -> Bool) -> (a -> Bool) -> a -> Bool 33 | (&&&) f g = \a -> f a && g a 34 | 35 | (<$$>) :: (a -> b) -> IO (Either e a) -> IO (Either e b) 36 | (<$$>) f fa = do 37 | a <- fa 38 | pure $ f <$> a 39 | 40 | (<**>) :: IO (Either e (a -> b)) -> IO (Either e a) -> IO (Either e b) 41 | (<**>) fa fb = do 42 | e1 <- fa 43 | e2 <- fb 44 | pure $ e1 <*> e2 45 | -------------------------------------------------------------------------------- /stack.yaml: -------------------------------------------------------------------------------- 1 | resolver: lts-15.6 2 | packages: 3 | - '.' 4 | nix: 5 | path: [ "nixpkgs=https://github.com/NixOS/nixpkgs/archive/d0faafe335698e3561f222bb38cf3d765cb0c31a.tar.gz" ] 6 | add-gc-roots: true 7 | pure: true 8 | packages: 9 | - zlib 10 | -------------------------------------------------------------------------------- /super-user-spark.cabal: -------------------------------------------------------------------------------- 1 | name: super-user-spark 2 | version: 0.4.0.4 3 | license: MIT 4 | license-file: LICENSE 5 | description: Configure your dotfile deployment with a DSL. 6 | synopsis: Configure your dotfile deployment with a DSL. 7 | author: Tom Sydney Kerckhove 8 | maintainer: syd.kerckhove@gmail.com 9 | homepage: https://github.com/NorfairKing/super-user-spark 10 | category: System 11 | build-type: Simple 12 | extra-source-files: README.md 13 | cabal-version: >=1.10 14 | 15 | extra-source-files: 16 | test_resources/end-to-end/bash.sus 17 | test_resources/end-to-end/bash.sus.res 18 | test_resources/exact_compile_test_src/alternatives.sus 19 | test_resources/exact_compile_test_src/alternatives.sus.res 20 | test_resources/exact_compile_test_src/bash.sus 21 | test_resources/exact_compile_test_src/bash.sus.res 22 | test_resources/exact_compile_test_src/internal_sparkoff.sus 23 | test_resources/exact_compile_test_src/internal_sparkoff.sus.res 24 | test_resources/exact_compile_test_src/nesting.sus 25 | test_resources/exact_compile_test_src/nesting.sus.res 26 | test_resources/exact_compile_test_src/sub.sus 27 | test_resources/exact_compile_test_src/sub.sus.res 28 | test_resources/exact_compile_test_src/sub/subfile.sus 29 | test_resources/exact_compile_test_src/sub/subfile.sus.res 30 | test_resources/hop_test/hop1dir/hop1.sus 31 | test_resources/hop_test/hop1dir/hop2dir/hop2.sus 32 | test_resources/hop_test/hop1dir/hop2dir/hop3dir/hop3.sus 33 | test_resources/hop_test/root.sus 34 | test_resources/shouldCompile/bash.sus 35 | test_resources/shouldCompile/complex.sus 36 | test_resources/shouldNotParse/empty_file.sus 37 | test_resources/shouldNotParse/missing_implementation.sus 38 | test_resources/shouldParse/empty_card.sus 39 | test_resources/shouldParse/littered_with_comments.sus 40 | test_resources/shouldParse/short_syntax.sus 41 | test_resources/shouldParse/with_quotes.sus 42 | 43 | source-repository head 44 | type: git 45 | location: https://github.com/NorfairKing/super-user-spark 46 | 47 | library 48 | hs-source-dirs: src 49 | exposed-modules: 50 | SuperUserSpark 51 | , SuperUserSpark.Bake 52 | , SuperUserSpark.Bake.Internal 53 | , SuperUserSpark.Bake.Types 54 | , SuperUserSpark.Check 55 | , SuperUserSpark.Check.Internal 56 | , SuperUserSpark.Check.Types 57 | , SuperUserSpark.Compiler 58 | , SuperUserSpark.Compiler.Internal 59 | , SuperUserSpark.Compiler.Types 60 | , SuperUserSpark.Compiler.Utils 61 | , SuperUserSpark.Constants 62 | , SuperUserSpark.CoreTypes 63 | , SuperUserSpark.Deployer 64 | , SuperUserSpark.Deployer.Internal 65 | , SuperUserSpark.Deployer.Types 66 | , SuperUserSpark.Diagnose 67 | , SuperUserSpark.Diagnose.Internal 68 | , SuperUserSpark.Diagnose.Types 69 | , SuperUserSpark.Language.Types 70 | , SuperUserSpark.OptParse 71 | , SuperUserSpark.OptParse.Types 72 | , SuperUserSpark.Parser 73 | , SuperUserSpark.Parser.Internal 74 | , SuperUserSpark.Parser.Types 75 | , SuperUserSpark.PreCompiler 76 | , SuperUserSpark.PreCompiler.Types 77 | , SuperUserSpark.Utils 78 | other-modules: 79 | Import 80 | 81 | ghc-options: 82 | -Wall 83 | -fwarn-unused-imports 84 | -fwarn-incomplete-patterns 85 | -fno-warn-unused-do-bind 86 | -fno-warn-name-shadowing 87 | 88 | build-depends: 89 | base >= 4.9 && < 5 90 | , aeson 91 | , aeson-pretty 92 | , bytestring 93 | , directory 94 | , filepath 95 | , mtl 96 | , optparse-applicative 97 | , parsec 98 | , process 99 | , hashable 100 | , text 101 | , unix 102 | , validity >= 0.6.0.0 103 | , validity-path 104 | , path 105 | , path-io 106 | default-language: Haskell2010 107 | default-extensions: NoImplicitPrelude 108 | 109 | executable super-user-spark 110 | main-is: Main.hs 111 | hs-source-dirs: app 112 | ghc-options: -Wall 113 | -fwarn-unused-imports 114 | -fwarn-incomplete-patterns 115 | -fno-warn-unused-do-bind 116 | -fno-warn-name-shadowing 117 | -threaded -rtsopts -with-rtsopts=-N 118 | build-depends: 119 | base >= 4.9 && < 5 120 | , super-user-spark 121 | default-language: Haskell2010 122 | 123 | 124 | test-suite spark-tests 125 | type: exitcode-stdio-1.0 126 | main-is: MainTest.hs 127 | hs-source-dirs: test 128 | other-modules: 129 | SuperUserSpark.BakeSpec 130 | , SuperUserSpark.Bake.Gen 131 | , SuperUserSpark.Check.Gen 132 | , SuperUserSpark.Check.TestUtils 133 | , SuperUserSpark.CheckSpec 134 | , SuperUserSpark.Compiler.Gen 135 | , SuperUserSpark.Compiler.TestUtils 136 | , SuperUserSpark.CompilerSpec 137 | , SuperUserSpark.Deployer.Gen 138 | , SuperUserSpark.DeployerSpec 139 | , SuperUserSpark.Diagnose.Gen 140 | , SuperUserSpark.Diagnose.TestUtils 141 | , SuperUserSpark.DiagnoseSpec 142 | , SuperUserSpark.EndToEnd.RegressionSpec 143 | , SuperUserSpark.EndToEndSpec 144 | , SuperUserSpark.Language.Gen 145 | , SuperUserSpark.OptParse.Gen 146 | , SuperUserSpark.Parser.Gen 147 | , SuperUserSpark.Parser.TestUtils 148 | , SuperUserSpark.ParserSpec 149 | , SuperUserSpark.PreCompiler.Gen 150 | , TestImport 151 | , TestUtils 152 | build-depends: 153 | base >= 4.9 && < 5 154 | , super-user-spark 155 | , hspec 156 | , hspec-core 157 | , QuickCheck 158 | , aeson 159 | , aeson-pretty 160 | , bytestring 161 | , directory 162 | , filepath 163 | , mtl 164 | , optparse-applicative 165 | , parsec 166 | , process 167 | , text 168 | , transformers 169 | , unix 170 | , genvalidity 171 | , genvalidity-path 172 | , genvalidity-hspec 173 | , genvalidity-hspec-aeson 174 | , validity 175 | , validity-path 176 | , path 177 | , path-io 178 | , hashable 179 | default-language: Haskell2010 180 | default-extensions: NoImplicitPrelude 181 | 182 | -------------------------------------------------------------------------------- /test/MainTest.hs: -------------------------------------------------------------------------------- 1 | {-# OPTIONS_GHC -F -pgmF hspec-discover #-} 2 | 3 | module MainTest where 4 | 5 | import qualified Spec 6 | import Test.Hspec.Formatters 7 | import Test.Hspec.Runner 8 | 9 | main :: IO () 10 | main = hspecWith defaultConfig {configFormatter = Just progress} Spec.spec 11 | -------------------------------------------------------------------------------- /test/SuperUserSpark/Bake/Gen.hs: -------------------------------------------------------------------------------- 1 | {-# OPTIONS_GHC -Wno-orphans #-} 2 | 3 | module SuperUserSpark.Bake.Gen where 4 | 5 | import TestImport 6 | 7 | import SuperUserSpark.Bake.Types 8 | import SuperUserSpark.Compiler.Gen () 9 | 10 | instance GenUnchecked BakeAssignment 11 | 12 | instance GenValid BakeAssignment where 13 | genValid = genValidStructurallyWithoutExtraChecking 14 | shrinkValid = shrinkValidStructurallyWithoutExtraFiltering 15 | 16 | instance GenUnchecked BakeCardReference 17 | 18 | instance GenValid BakeCardReference where 19 | genValid = genValidStructurallyWithoutExtraChecking 20 | shrinkValid = shrinkValidStructurallyWithoutExtraFiltering 21 | 22 | instance GenUnchecked BakeSettings 23 | 24 | instance GenValid BakeSettings where 25 | genValid = genValidStructurallyWithoutExtraChecking 26 | shrinkValid = shrinkValidStructurallyWithoutExtraFiltering 27 | 28 | instance GenUnchecked AbsP 29 | 30 | instance GenValid AbsP where 31 | genValid = genValidStructurallyWithoutExtraChecking 32 | shrinkValid = shrinkValidStructurallyWithoutExtraFiltering 33 | 34 | instance GenUnchecked ID 35 | 36 | instance GenValid ID where 37 | genValid = genValidStructurallyWithoutExtraChecking 38 | shrinkValid = shrinkValidStructurallyWithoutExtraFiltering 39 | -------------------------------------------------------------------------------- /test/SuperUserSpark/BakeSpec.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE TypeApplications #-} 2 | {-# LANGUAGE TemplateHaskell #-} 3 | 4 | module SuperUserSpark.BakeSpec where 5 | 6 | import TestImport 7 | 8 | import Data.Either (isLeft) 9 | import Data.Maybe (isNothing) 10 | 11 | import qualified System.FilePath as FP 12 | import System.Posix.Files (createSymbolicLink) 13 | 14 | import SuperUserSpark.Bake 15 | import SuperUserSpark.Bake.Gen () 16 | import SuperUserSpark.Bake.Internal 17 | import SuperUserSpark.Bake.Types 18 | import SuperUserSpark.Compiler.Types 19 | import SuperUserSpark.OptParse.Gen () 20 | import SuperUserSpark.Parser.Gen 21 | 22 | spec :: Spec 23 | spec = do 24 | instanceSpec 25 | bakeSpec 26 | 27 | instanceSpec :: Spec 28 | instanceSpec = 29 | parallel $ do 30 | eqSpec @BakeAssignment 31 | genValidSpec @BakeAssignment 32 | eqSpec @BakeCardReference 33 | genValidSpec @BakeCardReference 34 | eqSpec @BakeSettings 35 | genValidSpec @BakeSettings 36 | eqSpec @BakedDeployment 37 | genValidSpec @BakedDeployment 38 | jsonSpecOnValid @BakedDeployment 39 | eqSpec @AbsP 40 | genValidSpec @AbsP 41 | eqSpec @(DeploymentDirections AbsP) 42 | genValidSpec @(DeploymentDirections AbsP) 43 | jsonSpecOnValid @(DeploymentDirections AbsP) 44 | eqSpec @ID 45 | genValidSpec @ID 46 | 47 | bakeSpec :: Spec 48 | bakeSpec = 49 | parallel $ do 50 | describe "bakeFilePath" $ do 51 | it "works for these unit test cases without variables" $ do 52 | let b root fp s = do 53 | absp <- AbsP <$> parseAbsFile s 54 | rp <- parseAbsDir root 55 | runReaderT 56 | (runExceptT (bakeFilePath fp)) 57 | defaultBakeSettings {bakeRoot = rp} `shouldReturn` 58 | Right absp 59 | b "/home/user/hello" "a/b/c" "/home/user/hello/a/b/c" 60 | b "/home/user/hello" "/home/user/.files/c" "/home/user/.files/c" 61 | it "works for a simple home-only variable situation" $ do 62 | forAll genValid $ \root -> do 63 | let b home fp s = do 64 | absp <- AbsP <$> parseAbsFile s 65 | runReaderT 66 | (runExceptT (bakeFilePath fp)) 67 | defaultBakeSettings 68 | { bakeRoot = root 69 | , bakeEnvironment = [("HOME", home)] 70 | } `shouldReturn` 71 | Right absp 72 | b "/home/user" "~/a/b/c" "/home/user/a/b/c" 73 | b "/home" "~/c" "/home/c" 74 | here <- runIO getCurrentDir 75 | let sandbox = here $(mkRelDir "test_sandbox") 76 | before_ (ensureDir sandbox) $ 77 | after_ (removeDirRecur sandbox) $ do 78 | it 79 | "does not follow toplevel links when the completed path is relative" $ do 80 | let file = $(mkRelFile "file") 81 | let from = $(mkRelDir "from") file 82 | let to = $(mkRelDir "to") file 83 | withCurrentDir sandbox $ do 84 | ensureDir $ parent $ sandbox from 85 | writeFile (toFilePath $ sandbox from) "contents" 86 | ensureDir $ parent $ sandbox to 87 | createSymbolicLink 88 | (toFilePath $ sandbox from) 89 | (toFilePath $ sandbox to) 90 | let runBake f = 91 | runReaderT 92 | (runExceptT f) 93 | (defaultBakeSettings 94 | { bakeRoot = sandbox 95 | , bakeEnvironment = [] 96 | }) 97 | runBake (bakeFilePath (toFilePath to)) `shouldReturn` 98 | (Right $ AbsP $ sandbox to) 99 | runBake (bakeFilePath (toFilePath from)) `shouldReturn` 100 | (Right $ AbsP $ sandbox from) 101 | it 102 | "follows directory-level links when the completed path is relative" $ do 103 | let file = $(mkRelFile "file") 104 | let fromdir = $(mkRelDir "from") 105 | let from = fromdir file 106 | let todir = $(mkRelDir "to") 107 | let to = todir file 108 | withCurrentDir sandbox $ do 109 | ensureDir $ parent $ sandbox from 110 | writeFile 111 | (toFilePath $ sandbox from) 112 | "from contents" 113 | ensureDir $ parent $ sandbox todir 114 | createSymbolicLink 115 | (FP.dropTrailingPathSeparator $ 116 | toFilePath $ sandbox fromdir) 117 | (FP.dropTrailingPathSeparator $ 118 | toFilePath $ sandbox todir) 119 | writeFile 120 | (toFilePath $ sandbox to) 121 | "to contents" 122 | let runBake f = 123 | runReaderT 124 | (runExceptT f) 125 | (defaultBakeSettings 126 | { bakeRoot = sandbox 127 | , bakeEnvironment = [] 128 | }) 129 | runBake (bakeFilePath (toFilePath to)) `shouldReturn` 130 | (Right $ AbsP $ sandbox from) 131 | runBake (bakeFilePath (toFilePath from)) `shouldReturn` 132 | (Right $ AbsP $ sandbox from) 133 | describe "defaultBakeSettings" $ 134 | it "is valid" $ isValid defaultBakeSettings 135 | describe "complete" $ do 136 | it "only ever produces a valid filepath" $ validIfSucceeds2 complete 137 | it 138 | "replaces the home directory as specified for simple home directories and simple paths" $ do 139 | forAll arbitrary $ \env -> 140 | forAll generateWord $ \home -> 141 | forAll generateWord $ \fp -> 142 | complete (("HOME", home) : env) ("~" FP. fp) `shouldBe` 143 | Right (home FP. fp) 144 | describe "parseId" $ do 145 | it "only ever produces valid IDs" $ producesValid parseId 146 | it "Figures out the home directory in these cases" $ do 147 | parseId "~" `shouldBe` [Var "HOME"] 148 | parseId "~/ab" `shouldBe` [Var "HOME", Plain "/ab"] 149 | it "Works for these cases" $ do 150 | parseId "" `shouldBe` [] 151 | parseId "file" `shouldBe` [Plain "file"] 152 | parseId "something$(with)variable" `shouldBe` 153 | [Plain "something", Var "with", Plain "variable"] 154 | parseId "$(one)$(two)$(three)" `shouldBe` 155 | [Var "one", Var "two", Var "three"] 156 | describe "replaceId" $ do 157 | it "only ever produces valid FilePaths" $ validIfSucceeds2 replaceId 158 | it "leaves plain ID's unchanged in any environment" $ 159 | forAll arbitrary $ \env -> 160 | forAll arbitrary $ \s -> 161 | replaceId env (Plain s) `shouldBe` Right s 162 | it "returns Left if a variable is not in the environment" $ 163 | forAll arbitrary $ \var -> 164 | forAll (arbitrary `suchThat` (isNothing . lookup var)) $ \env -> 165 | replaceId env (Var var) `shouldSatisfy` isLeft 166 | it "replaces a variable if it's in the environment" $ 167 | forAll arbitrary $ \var -> 168 | forAll arbitrary $ \val -> 169 | forAll (arbitrary `suchThat` (isNothing . lookup var)) $ \env1 -> 170 | forAll 171 | (arbitrary `suchThat` (isNothing . lookup var)) $ \env2 -> 172 | replaceId 173 | (env1 ++ [(var, val)] ++ env2) 174 | (Var var) `shouldBe` 175 | Right val 176 | -------------------------------------------------------------------------------- /test/SuperUserSpark/Check/Gen.hs: -------------------------------------------------------------------------------- 1 | {-# OPTIONS_GHC -Wno-orphans #-} 2 | 3 | module SuperUserSpark.Check.Gen where 4 | 5 | import TestImport 6 | 7 | import SuperUserSpark.Bake.Gen () 8 | import SuperUserSpark.Check.Types 9 | import SuperUserSpark.Compiler.Gen () 10 | import SuperUserSpark.Diagnose.Gen () 11 | import SuperUserSpark.Language.Gen () 12 | 13 | instance GenUnchecked CheckAssignment 14 | 15 | instance GenValid CheckAssignment where 16 | genValid = genValidStructurallyWithoutExtraChecking 17 | shrinkValid = shrinkValidStructurallyWithoutExtraFiltering 18 | 19 | instance GenUnchecked CheckSettings 20 | 21 | instance GenValid CheckSettings where 22 | genValid = genValidStructurallyWithoutExtraChecking 23 | shrinkValid = shrinkValidStructurallyWithoutExtraFiltering 24 | 25 | instance GenUnchecked CheckResult 26 | 27 | instance GenValid CheckResult where 28 | genValid = genValidStructurallyWithoutExtraChecking 29 | shrinkValid = shrinkValidStructurallyWithoutExtraFiltering 30 | 31 | instance GenUnchecked Instruction 32 | 33 | instance GenValid Instruction where 34 | genValid = genValidStructurallyWithoutExtraChecking 35 | shrinkValid = shrinkValidStructurallyWithoutExtraFiltering 36 | 37 | instance GenUnchecked CleanupInstruction 38 | 39 | instance GenValid CleanupInstruction where 40 | genValid = genValidStructurallyWithoutExtraChecking 41 | shrinkValid = shrinkValidStructurallyWithoutExtraFiltering 42 | 43 | instance GenUnchecked DeploymentCheckResult 44 | 45 | instance GenValid DeploymentCheckResult where 46 | genValid = genValidStructurallyWithoutExtraChecking 47 | shrinkValid = shrinkValidStructurallyWithoutExtraFiltering 48 | -------------------------------------------------------------------------------- /test/SuperUserSpark/Check/TestUtils.hs: -------------------------------------------------------------------------------- 1 | module SuperUserSpark.Check.TestUtils where 2 | 3 | import TestImport 4 | 5 | import SuperUserSpark.Bake.Gen () 6 | import SuperUserSpark.Bake.Types 7 | import SuperUserSpark.Check.Gen () 8 | import SuperUserSpark.Check.Internal 9 | import SuperUserSpark.Check.Types 10 | import SuperUserSpark.CoreTypes 11 | import SuperUserSpark.Diagnose.Types 12 | 13 | -- * Test utils for checkDeployment 14 | shouldBeImpossible' :: DiagnosedDeployment -> Expectation 15 | shouldBeImpossible' dd = checkDeployment dd `shouldSatisfy` impossibleDeployment 16 | 17 | shouldBeImpossibleDeployment :: [CheckResult] -> Expectation 18 | shouldBeImpossibleDeployment dd = 19 | bestResult dd `shouldSatisfy` impossibleDeployment 20 | 21 | -- * Test utils for checkSingle 22 | isDirty :: CheckResult -> Bool 23 | isDirty Dirty{} = True 24 | isDirty _ = False 25 | 26 | isReady :: CheckResult -> Bool 27 | isReady (Ready _) = True 28 | isReady _ = False 29 | 30 | isDone :: CheckResult -> Bool 31 | isDone AlreadyDone = True 32 | isDone _ = False 33 | 34 | isImpossible :: CheckResult -> Bool 35 | isImpossible (Impossible _) = True 36 | isImpossible _ = False 37 | 38 | shouldBeDirty 39 | :: DiagnosedFp 40 | -> DiagnosedFp 41 | -> DeploymentKind 42 | -> CleanupInstruction 43 | -> Expectation 44 | shouldBeDirty src dst kind eci = 45 | case checkSingle src dst kind of 46 | Dirty _ ins ci -> do 47 | ci `shouldBe` eci 48 | let tp = dropTrailingPathSeparator . toFilePath 49 | let checkCopyDeployment isrc idst expectation = do 50 | tp isrc `shouldBe` toPath (diagnosedFilePath src) 51 | tp idst `shouldBe` toPath (diagnosedFilePath dst) 52 | expectation `shouldBe` kind 53 | case ins of 54 | CopyFile isrc idst -> checkCopyDeployment isrc idst CopyDeployment 55 | CopyDir isrc idst -> checkCopyDeployment isrc idst CopyDeployment 56 | LinkFile isrc idst -> checkCopyDeployment isrc idst LinkDeployment 57 | LinkDir isrc idst -> checkCopyDeployment isrc idst LinkDeployment 58 | t -> 59 | expectationFailure $ 60 | unlines 61 | [ "checkSingle" 62 | , show src 63 | , show dst 64 | , show kind 65 | , "should be dirty but is" 66 | , show t 67 | ] 68 | 69 | shouldBeReady :: DiagnosedFp -> DiagnosedFp -> DeploymentKind -> Expectation 70 | shouldBeReady src dst kind = checkSingle src dst kind `shouldSatisfy` isReady 71 | 72 | shouldBeDone :: DiagnosedFp -> DiagnosedFp -> DeploymentKind -> Expectation 73 | shouldBeDone src dst kind = checkSingle src dst kind `shouldSatisfy` isDone 74 | 75 | shouldBeImpossible :: DiagnosedFp 76 | -> DiagnosedFp 77 | -> DeploymentKind 78 | -> Expectation 79 | shouldBeImpossible src dst kind = 80 | checkSingle src dst kind `shouldSatisfy` isImpossible 81 | 82 | validWith :: Diagnostics -> Gen DiagnosedFp 83 | validWith d = D <$> genValid <*> pure d <*> genValid 84 | -------------------------------------------------------------------------------- /test/SuperUserSpark/CheckSpec.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE TypeApplications #-} 2 | {-# LANGUAGE TemplateHaskell #-} 3 | 4 | module SuperUserSpark.CheckSpec where 5 | 6 | import TestImport 7 | 8 | import Control.Monad (forM_) 9 | import Data.Hashable 10 | import System.FilePath (dropTrailingPathSeparator) 11 | import System.Posix.Files 12 | 13 | import SuperUserSpark.Bake.Types 14 | import SuperUserSpark.Check 15 | import SuperUserSpark.Check.Gen () 16 | import SuperUserSpark.Check.Internal 17 | import SuperUserSpark.Check.TestUtils 18 | import SuperUserSpark.Check.Types 19 | import SuperUserSpark.Compiler.Types 20 | import SuperUserSpark.CoreTypes 21 | import SuperUserSpark.Diagnose.Internal 22 | import SuperUserSpark.Diagnose.TestUtils 23 | import SuperUserSpark.Diagnose.Types 24 | import SuperUserSpark.OptParse.Gen () 25 | import SuperUserSpark.Parser.Gen () 26 | import SuperUserSpark.Utils 27 | import TestUtils 28 | 29 | spec :: Spec 30 | spec = do 31 | instanceSpec 32 | checkSpec 33 | 34 | instanceSpec :: Spec 35 | instanceSpec = do 36 | eqSpec @CheckSettings 37 | genValidSpec @CheckSettings 38 | eqSpec @Instruction 39 | genValidSpec @Instruction 40 | eqSpec @CleanupInstruction 41 | genValidSpec @CleanupInstruction 42 | eqSpec @DeploymentCheckResult 43 | genValidSpec @DeploymentCheckResult 44 | eqSpec @CheckResult 45 | genValidSpec @CheckResult 46 | 47 | checkSpec :: Spec 48 | checkSpec = 49 | parallel $ do 50 | checkSingleSpec 51 | checkDeploymentSpec 52 | describe "formatDeploymentChecks" $ 53 | it "always produces valid strings" $ 54 | producesValid formatDeploymentChecks 55 | describe "formatDeploymentCheck" $ 56 | it "always produces valid strings" $ 57 | producesValid formatDeploymentCheck 58 | describe "formatInstruction" $ 59 | it "always produces valid strings" $ producesValid formatInstruction 60 | describe "formatCleanupInstruction" $ 61 | it "always produces valid strings" $ 62 | producesValid formatCleanupInstruction 63 | 64 | checkDeploymentSpec :: Spec 65 | checkDeploymentSpec = do 66 | describe "checkDeployment" $ do 67 | it "always produces valid check results" $ 68 | producesValidsOnValids checkDeployment 69 | it "says 'impossible' for deployments with an empty list of sources" $ do 70 | forAll genUnchecked $ \dst -> 71 | forAll genUnchecked $ \kind -> 72 | shouldBeImpossible' $ Deployment (Directions [] dst) kind 73 | it "says 'impossible' for deployments where all singles are impossible" $ do 74 | forAll 75 | (genValid `suchThat` 76 | (\(Deployment (Directions srcs dst) kind) -> 77 | all (\src -> isImpossible $ checkSingle src dst kind) srcs)) $ \dd -> 78 | shouldBeImpossible' dd 79 | it 80 | "gives the same result as bestResult (just with a better error for empty lists)" $ do 81 | forAll genValid $ \dd@(Deployment (Directions srcs dst) kind) -> 82 | case ( bestResult (map (\src -> checkSingle src dst kind) srcs) 83 | , checkDeployment dd) of 84 | (ImpossibleDeployment r1, ImpossibleDeployment r2) -> 85 | length r1 `shouldSatisfy` (<= (length r2)) 86 | (r1, r2) -> r1 `shouldBe` r2 87 | describe "bestResult" $ do 88 | it "always produces valid check results" $ 89 | producesValidsOnValids bestResult 90 | it "says 'impossible' if all checkresults are impossible" $ do 91 | forAll 92 | (genValid `suchThat` all isImpossible) 93 | shouldBeImpossibleDeployment 94 | it "says 'done' if the first non-impossible in 'done'" $ do 95 | forAll 96 | (genValid `suchThat` 97 | (any (not . isImpossible) &&& 98 | (isDone . head . dropWhile isImpossible))) $ \dd -> 99 | bestResult dd `shouldSatisfy` deploymentIsDone 100 | it "says 'dirty' if the first non-impossible in 'dirty'" $ do 101 | forAll 102 | (genValid `suchThat` 103 | (any (not . isImpossible) &&& 104 | (isDirty . head . dropWhile isImpossible))) $ \dd -> 105 | bestResult dd `shouldSatisfy` dirtyDeployment 106 | it "says 'ready' if the first non-impossible in 'ready'" $ do 107 | forAll 108 | (genValid `suchThat` 109 | (any (not . isImpossible) &&& 110 | (isReady . head . dropWhile isImpossible))) $ \dd -> 111 | bestResult dd `shouldSatisfy` deploymentReadyToDeploy 112 | 113 | checkSingleSpec :: Spec 114 | checkSingleSpec = 115 | describe "checkSingle" $ do 116 | it "always produces valid CheckResults" $ 117 | producesValidsOnValids3 checkSingle 118 | it "says 'impossible' if the source does not exist" $ do 119 | forAll (validWith Nonexistent) $ \src -> 120 | forAll genValid $ \dst -> 121 | forAll genValid $ \kind -> shouldBeImpossible src dst kind 122 | it 123 | "says 'ready' if the source is a file and the destination does not exist" $ do 124 | forAll (validWith IsFile) $ \src -> 125 | forAll (validWith Nonexistent) $ \dst -> 126 | forAll genValid $ \kind -> shouldBeReady src dst kind 127 | it 128 | "says 'dirty' if both the source and destination are files and it's a link deployment" $ do 129 | forAll (validWith IsFile) $ \src -> 130 | forAll (validWith IsFile) $ \dst -> 131 | shouldBeDirty src dst LinkDeployment $ 132 | CleanFile $ unAbsP $ diagnosedFilePath dst 133 | it 134 | "says 'done' if both the source and destination are files and it's a copy deployment and the files are equal" $ do 135 | forAll genValid $ \src -> 136 | forAll genValid $ \dst -> 137 | forAll genValid $ \h1 -> 138 | shouldBeDone 139 | (D src IsFile h1) 140 | (D dst IsFile h1) 141 | CopyDeployment 142 | it 143 | "says 'dirty' if both the source and destination are files and it's a copy deployment but the files are unequal" $ do 144 | forAll genValid $ \src -> 145 | forAll genValid $ \dst -> 146 | forAll genValid $ \h1 -> 147 | forAll (genValid `suchThat` (/= h1)) $ \h2 -> 148 | shouldBeDirty 149 | (D src IsFile h1) 150 | (D dst IsFile h2) 151 | CopyDeployment $ 152 | CleanFile $ unAbsP dst 153 | it 154 | "says 'dirty' if the source is a file and the destination is a directory" $ do 155 | forAll (validWith IsFile) $ \src -> 156 | forAll (validWith IsDirectory) $ \dst -> 157 | forAll genValid $ \kind -> do 158 | d <- parseAbsDir (toPath $ diagnosedFilePath dst) 159 | shouldBeDirty src dst kind $ CleanDirectory d 160 | it 161 | "says 'dirty' if the source is a file and the destination is a link for a link deployment but the destination doesn't point to the source" $ do 162 | forAll (validWith IsFile) $ \src@(D srcp _ _) -> 163 | forAll (genValid `suchThat` (/= srcp)) $ \l -> 164 | forAll (validWith $ IsLinkTo l) $ \dst -> 165 | shouldBeDirty src dst LinkDeployment $ 166 | CleanLink $ unAbsP $ diagnosedFilePath dst 167 | it 168 | "says 'done' if the source is a file and the destination is a link for a link deployment and the destination points to the source" $ do 169 | forAll (validWith IsFile) $ \src@(D srcp _ _) -> 170 | forAll (validWith $ IsLinkTo srcp) $ \dst -> 171 | shouldBeDone src dst LinkDeployment 172 | it 173 | "says 'dirty' if the source is a file and the destination is a link for a copy deployment" $ do 174 | forAll (validWith IsFile) $ \src -> 175 | forAll genValid $ \l -> 176 | forAll (validWith $ IsLinkTo l) $ \dst -> 177 | shouldBeDirty src dst CopyDeployment $ 178 | CleanLink $ unAbsP $ diagnosedFilePath dst 179 | it 180 | "says 'ready' if the source is a directory and the destination does not exist" $ do 181 | forAll (validWith IsDirectory) $ \src -> 182 | forAll (validWith Nonexistent) $ \dst -> 183 | forAll genValid $ \kind -> shouldBeReady src dst kind 184 | it 185 | "says 'dirty' if the source is a directory and the destination is a file" $ do 186 | forAll (validWith IsDirectory) $ \src -> 187 | forAll (validWith IsFile) $ \dst -> 188 | forAll genValid $ \kind -> 189 | shouldBeDirty src dst kind $ 190 | CleanFile $ unAbsP $ diagnosedFilePath dst 191 | it 192 | "says 'dirty' if both the source and destination are directories for a link deployment" $ do 193 | forAll (validWith IsDirectory) $ \src -> 194 | forAll (validWith IsDirectory) $ \dst -> do 195 | d <- parseAbsDir (toPath $ diagnosedFilePath dst) 196 | shouldBeDirty src dst LinkDeployment $ CleanDirectory d 197 | it 198 | "says 'done' if both the source and destination are directories and it's a copy deployment and the directories are equal" $ do 199 | forAll genValid $ \src -> 200 | forAll genValid $ \dst -> 201 | forAll genValid $ \h1 -> 202 | shouldBeDone 203 | (D src IsDirectory h1) 204 | (D dst IsDirectory h1) 205 | CopyDeployment 206 | it 207 | "says 'dirty' if both the source and destination are directories and it's a copy deployment but the directories are unequal" $ do 208 | forAll genValid $ \src -> 209 | forAll genValid $ \dst -> 210 | forAll genValid $ \h1 -> 211 | forAll (genValid `suchThat` (/= h1)) $ \h2 -> do 212 | d <- parseAbsDir $ toPath dst 213 | shouldBeDirty 214 | (D src IsDirectory h1) 215 | (D dst IsDirectory h2) 216 | CopyDeployment $ 217 | CleanDirectory d 218 | it 219 | "says 'dirty' if the source is a directory and the destination is a link for a link deployment but the destination doesn't point to the source" $ do 220 | forAll (validWith IsDirectory) $ \src@(D srcp _ _) -> 221 | forAll (genValid `suchThat` (/= srcp)) $ \l -> 222 | forAll (validWith $ IsLinkTo l) $ \dst -> 223 | shouldBeDirty src dst LinkDeployment $ 224 | CleanLink $ unAbsP $ diagnosedFilePath dst 225 | it 226 | "says 'done' if the source is a directory and the destination is a link for a link deployment and the destination points to the source" $ do 227 | forAll (validWith IsDirectory) $ \src@(D srcp _ _) -> 228 | forAll (validWith $ IsLinkTo srcp) $ \dst -> 229 | shouldBeDone src dst LinkDeployment 230 | it 231 | "says 'dirty' if the source is a directory and the destination is a link for a copy deployment" $ do 232 | forAll (validWith IsDirectory) $ \src -> 233 | forAll genValid $ \l -> 234 | forAll (validWith $ IsLinkTo l) $ \dst -> 235 | shouldBeDirty src dst CopyDeployment $ 236 | CleanLink $ unAbsP $ diagnosedFilePath dst 237 | it "says 'dirty' if the source is a link" $ do 238 | forAll genValid $ \l -> 239 | forAll (validWith $ IsLinkTo l) $ \src -> 240 | forAll genValid $ \dst -> 241 | forAll genValid $ \kind -> 242 | shouldBeImpossible src dst kind 243 | it "says 'dirty' for a weird source" $ do 244 | forAll (validWith IsWeird) $ \src -> 245 | forAll genValid $ \dst -> 246 | forAll genValid $ \kind -> shouldBeImpossible src dst kind 247 | it "says 'dirty' for a weird destination" $ do 248 | forAll genValid $ \src -> 249 | forAll (validWith IsWeird) $ \dst -> 250 | forAll genValid $ \kind -> shouldBeImpossible src dst kind 251 | it "works for these unit tests" $ do pending 252 | -------------------------------------------------------------------------------- /test/SuperUserSpark/Compiler/Gen.hs: -------------------------------------------------------------------------------- 1 | {-# OPTIONS_GHC -Wno-orphans #-} 2 | 3 | module SuperUserSpark.Compiler.Gen where 4 | 5 | import TestImport 6 | 7 | import SuperUserSpark.Compiler.Types 8 | import SuperUserSpark.Language.Gen () 9 | import SuperUserSpark.Parser.Gen () 10 | import SuperUserSpark.PreCompiler.Gen () 11 | 12 | instance GenUnchecked CompileAssignment 13 | 14 | instance GenValid CompileAssignment where 15 | genValid = genValidStructurallyWithoutExtraChecking 16 | shrinkValid = shrinkValidStructurallyWithoutExtraFiltering 17 | 18 | instance GenUnchecked StrongCardFileReference 19 | 20 | instance GenValid StrongCardFileReference where 21 | genValid = genValidStructurallyWithoutExtraChecking 22 | shrinkValid = shrinkValidStructurallyWithoutExtraFiltering 23 | 24 | instance GenUnchecked CompileSettings 25 | 26 | instance GenValid CompileSettings where 27 | genValid = genValidStructurallyWithoutExtraChecking 28 | shrinkValid = shrinkValidStructurallyWithoutExtraFiltering 29 | 30 | instance GenUnchecked a => GenUnchecked (Deployment a) where 31 | genUnchecked = Deployment <$> genUnchecked <*> genUnchecked 32 | 33 | instance GenValid a => GenValid (Deployment a) where 34 | genValid = genValidStructurallyWithoutExtraChecking 35 | shrinkValid = shrinkValidStructurallyWithoutExtraFiltering 36 | 37 | instance GenUnchecked a => GenUnchecked (DeploymentDirections a) 38 | 39 | instance GenValid a => GenValid (DeploymentDirections a) where 40 | genValid = genValidStructurallyWithoutExtraChecking 41 | shrinkValid = shrinkValidStructurallyWithoutExtraFiltering 42 | 43 | instance GenUnchecked PrefixPart 44 | 45 | instance GenValid PrefixPart where 46 | genValid = genValidStructurallyWithoutExtraChecking 47 | shrinkValid = shrinkValidStructurallyWithoutExtraFiltering 48 | 49 | instance GenUnchecked CompilerState 50 | 51 | instance GenValid CompilerState where 52 | genValid = genValidStructurallyWithoutExtraChecking 53 | shrinkValid = shrinkValidStructurallyWithoutExtraFiltering 54 | -------------------------------------------------------------------------------- /test/SuperUserSpark/Compiler/TestUtils.hs: -------------------------------------------------------------------------------- 1 | module SuperUserSpark.Compiler.TestUtils where 2 | 3 | import TestImport 4 | 5 | import Data.Either (isLeft, isRight) 6 | 7 | import SuperUserSpark.Compiler.Internal 8 | import SuperUserSpark.Compiler.Types 9 | import SuperUserSpark.Language.Types 10 | import SuperUserSpark.PreCompiler 11 | import SuperUserSpark.PreCompiler.Types 12 | 13 | runPreCompiler :: Precompiler () -> [PreCompileError] 14 | runPreCompiler pc = runIdentity $ execWriterT pc 15 | 16 | cleanBy :: (a -> Precompiler ()) -> a -> Bool 17 | cleanBy func a = null $ runPreCompiler $ func a 18 | 19 | declarationClean :: Declaration -> IO () 20 | declarationClean d = d `shouldSatisfy` cleanBy cleanDeclaration 21 | 22 | declarationDirty :: Declaration -> IO () 23 | declarationDirty d = d `shouldNotSatisfy` cleanBy cleanDeclaration 24 | 25 | filePathDirty :: FilePath -> IO () 26 | filePathDirty fp = fp `shouldNotSatisfy` cleanBy cleanFilePath 27 | 28 | filePathClean :: FilePath -> IO () 29 | filePathClean fp = fp `shouldSatisfy` cleanBy cleanFilePath 30 | 31 | runPureCompiler :: CompileSettings -> PureCompiler a -> Either CompileError a 32 | runPureCompiler c func = runIdentity $ runReaderT (runExceptT func) c 33 | 34 | runInternalCompiler 35 | :: [Declaration] 36 | -> CompilerState 37 | -> CompileSettings 38 | -> Either CompileError (CompilerState, ([RawDeployment], [CardReference])) 39 | runInternalCompiler ds s c = 40 | runPureCompiler c $ runWriterT $ execStateT (compileDecs ds) s 41 | 42 | compileSingleDec 43 | :: Declaration 44 | -> CompilerState 45 | -> CompileSettings 46 | -> Either CompileError (CompilerState, ([RawDeployment], [CardReference])) 47 | compileSingleDec d = runInternalCompiler [d] 48 | 49 | compilationShouldSucceed :: [Declaration] 50 | -> CompilerState 51 | -> CompileSettings 52 | -> IO () 53 | compilationShouldSucceed ds s c = 54 | runInternalCompiler ds s c `shouldSatisfy` isRight 55 | 56 | compilationShouldFail :: [Declaration] 57 | -> CompilerState 58 | -> CompileSettings 59 | -> IO () 60 | compilationShouldFail ds s c = runInternalCompiler ds s c `shouldSatisfy` isLeft 61 | 62 | singleShouldFail :: CompileSettings -> CompilerState -> Declaration -> IO () 63 | singleShouldFail c s d = compilationShouldFail [d] s c 64 | 65 | shouldCompileTo :: CompileSettings 66 | -> CompilerState 67 | -> [Declaration] 68 | -> [RawDeployment] 69 | -> IO () 70 | shouldCompileTo c s ds eds = do 71 | compilationShouldSucceed ds s c 72 | let Right (_, (ads, crs)) = runInternalCompiler ds s c 73 | ads `shouldBe` eds 74 | crs `shouldSatisfy` null 75 | 76 | singleShouldCompileTo :: CompileSettings 77 | -> CompilerState 78 | -> Declaration 79 | -> RawDeployment 80 | -> IO () 81 | singleShouldCompileTo c s d eds = shouldCompileTo c s [d] [eds] 82 | 83 | shouldResultInState :: CompileSettings 84 | -> CompilerState 85 | -> Declaration 86 | -> CompilerState 87 | -> IO () 88 | shouldResultInState c s d es = do 89 | compilationShouldSucceed [d] s c 90 | let Right (as, _) = runInternalCompiler [d] s c 91 | as `shouldBe` es 92 | 93 | -- Filepath utils 94 | containsNewlineCharacter :: String -> Bool 95 | containsNewlineCharacter f = any (`elem` f) ['\n', '\r'] 96 | -------------------------------------------------------------------------------- /test/SuperUserSpark/Deployer/Gen.hs: -------------------------------------------------------------------------------- 1 | {-# OPTIONS_GHC -Wno-orphans #-} 2 | 3 | module SuperUserSpark.Deployer.Gen where 4 | 5 | import TestImport 6 | 7 | import SuperUserSpark.Check.Gen () 8 | import SuperUserSpark.Deployer.Types 9 | 10 | instance GenUnchecked DeployAssignment 11 | 12 | instance GenValid DeployAssignment where 13 | genValid = genValidStructurallyWithoutExtraChecking 14 | shrinkValid = shrinkValidStructurallyWithoutExtraFiltering 15 | 16 | instance GenUnchecked DeploySettings 17 | 18 | instance GenValid DeploySettings where 19 | genValid = genValidStructurallyWithoutExtraChecking 20 | shrinkValid = shrinkValidStructurallyWithoutExtraFiltering 21 | 22 | instance GenUnchecked PreDeployment 23 | 24 | instance GenValid PreDeployment where 25 | genValid = genValidStructurallyWithoutExtraChecking 26 | shrinkValid = shrinkValidStructurallyWithoutExtraFiltering 27 | -------------------------------------------------------------------------------- /test/SuperUserSpark/DeployerSpec.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE TypeApplications #-} 2 | {-# LANGUAGE TemplateHaskell #-} 3 | 4 | module SuperUserSpark.DeployerSpec where 5 | 6 | import TestImport 7 | 8 | import System.Posix.Files 9 | 10 | import SuperUserSpark.Bake.Types 11 | import SuperUserSpark.Check.Internal 12 | import SuperUserSpark.Check.Types 13 | import SuperUserSpark.Deployer 14 | import SuperUserSpark.Deployer.Gen () 15 | import SuperUserSpark.Deployer.Internal 16 | import SuperUserSpark.Deployer.Types 17 | import SuperUserSpark.Diagnose.Internal 18 | import SuperUserSpark.Diagnose.Types 19 | import SuperUserSpark.OptParse.Gen () 20 | import SuperUserSpark.Parser.Gen () 21 | 22 | spec :: Spec 23 | spec = do 24 | instanceSpec 25 | deployerSpec 26 | cleanSpec 27 | deploymentSpec 28 | 29 | instanceSpec :: Spec 30 | instanceSpec = 31 | parallel $ do 32 | eqSpec @DeployAssignment 33 | genValidSpec @DeployAssignment 34 | eqSpec @DeploySettings 35 | genValidSpec @DeploySettings 36 | eqSpec @PreDeployment 37 | genValidSpec @PreDeployment 38 | 39 | deployerSpec :: Spec 40 | deployerSpec = 41 | parallel $ do 42 | describe "defaultDeploySettings" $ 43 | it "is valid" $ isValid defaultDeploySettings 44 | 45 | cleanSpec :: Spec 46 | cleanSpec = do 47 | here <- runIO getCurrentDir 48 | let sandbox = here $(mkRelDir "test_sandbox") 49 | let setup = ensureDir sandbox 50 | let teardown = removeDirRecur sandbox 51 | let clean :: DeploySettings -> CleanupInstruction -> IO () 52 | clean sets ci = 53 | runReaderT (runExceptT $ performClean ci) sets `shouldReturn` 54 | Right () 55 | beforeAll_ setup $ 56 | afterAll_ teardown $ do 57 | describe "performClean" $ do 58 | it "doesn't remove this file if that's not in the config" $ do 59 | let c = 60 | defaultDeploySettings 61 | {deploySetsReplaceFiles = False} 62 | withCurrentDir sandbox $ do 63 | let file = sandbox $(mkRelFile "test.txt") 64 | let fp = AbsP file 65 | writeFile (toFilePath file) "This is a test" 66 | diagnoseFp fp `shouldReturn` IsFile 67 | clean c $ CleanFile file 68 | diagnoseFp fp `shouldReturn` IsFile 69 | removeFile file 70 | diagnoseFp fp `shouldReturn` Nonexistent 71 | it "removes this file if that's in the config" $ do 72 | let c = 73 | defaultDeploySettings 74 | {deploySetsReplaceFiles = True} 75 | withCurrentDir sandbox $ do 76 | let file = sandbox $(mkRelFile "test.txt") 77 | let fp = AbsP file 78 | writeFile (toFilePath file) "This is a test" 79 | diagnoseFp fp `shouldReturn` IsFile 80 | clean c $ CleanFile file 81 | diagnoseFp fp `shouldReturn` Nonexistent 82 | it "doesn't remove this directory if that's not in the config" $ do 83 | let c = 84 | defaultDeploySettings 85 | {deploySetsReplaceDirectories = False} 86 | withCurrentDir sandbox $ do 87 | let dir = sandbox $(mkRelDir "testdirectory") 88 | let dirf = 89 | AbsP $ sandbox $(mkRelFile "testdirectory") 90 | ensureDir dir 91 | diagnoseFp dirf `shouldReturn` IsDirectory 92 | clean c $ CleanDirectory dir 93 | diagnoseFp dirf `shouldReturn` IsDirectory 94 | removeDirRecur dir 95 | diagnoseFp dirf `shouldReturn` Nonexistent 96 | it "removes this directory if that's in the config" $ do 97 | let c = 98 | defaultDeploySettings 99 | {deploySetsReplaceDirectories = True} 100 | withCurrentDir sandbox $ do 101 | let dir = sandbox $(mkRelDir "testdirectory") 102 | let dirf = 103 | AbsP $ sandbox $(mkRelFile "testdirectory") 104 | ensureDir dir 105 | diagnoseFp dirf `shouldReturn` IsDirectory 106 | clean c $ CleanDirectory dir 107 | diagnoseFp dirf `shouldReturn` Nonexistent 108 | it "doesn't remove this link if that's not in the config" $ do 109 | let c = 110 | defaultDeploySettings 111 | {deploySetsReplaceLinks = False} 112 | withCurrentDir sandbox $ do 113 | let link_ = sandbox $(mkRelFile "testlink") 114 | let link_' = AbsP link_ 115 | let file_ = sandbox $(mkRelFile "testfile") 116 | let file_' = AbsP file_ 117 | writeFile (toFilePath file_) "This is a test" 118 | createSymbolicLink (toFilePath file_) (toFilePath link_) 119 | diagnoseFp link_' `shouldReturn` IsLinkTo file_' 120 | clean c $ CleanLink link_ 121 | diagnoseFp link_' `shouldReturn` IsLinkTo file_' 122 | removeLink $ toFilePath link_ 123 | diagnoseFp link_' `shouldReturn` Nonexistent 124 | removeFile file_ 125 | diagnoseFp file_' `shouldReturn` Nonexistent 126 | it 127 | "removes this link with an existent source if that's in the config" $ do 128 | let c = 129 | defaultDeploySettings 130 | {deploySetsReplaceLinks = True} 131 | withCurrentDir sandbox $ do 132 | let link_ = sandbox $(mkRelFile "testlink") 133 | let link_' = AbsP link_ 134 | let file_ = sandbox $(mkRelFile "testfile") 135 | let file_' = AbsP file_ 136 | writeFile (toFilePath file_) "This is a test" 137 | createSymbolicLink (toFilePath file_) (toFilePath link_) 138 | diagnoseFp link_' `shouldReturn` IsLinkTo file_' 139 | clean c $ CleanLink link_ 140 | diagnoseFp link_' `shouldReturn` Nonexistent 141 | diagnoseFp file_' `shouldReturn` IsFile 142 | removeFile file_ 143 | diagnoseFp file_' `shouldReturn` Nonexistent 144 | it 145 | "removes this link with a nonexistent source if that's in the config" $ do 146 | let c = 147 | defaultDeploySettings 148 | {deploySetsReplaceLinks = True} 149 | withCurrentDir sandbox $ do 150 | let link_ = sandbox $(mkRelFile "testlink") 151 | let link_' = AbsP link_ 152 | let file_ = sandbox $(mkRelFile "testfile") 153 | let file_' = AbsP file_ 154 | createSymbolicLink (toFilePath file_) (toFilePath link_) 155 | diagnoseFp link_' `shouldReturn` IsLinkTo file_' 156 | diagnoseFp file_' `shouldReturn` Nonexistent 157 | clean c $ CleanLink link_ 158 | diagnoseFp link_' `shouldReturn` Nonexistent 159 | diagnoseFp file_' `shouldReturn` Nonexistent 160 | 161 | deploymentSpec :: Spec 162 | deploymentSpec = do 163 | here <- runIO $ getCurrentDir 164 | let sandbox = here $(mkRelDir "test_sandbox") 165 | let setup = ensureDir sandbox 166 | let teardown = removeDirRecur sandbox 167 | beforeAll_ setup $ 168 | afterAll_ teardown $ do 169 | describe "performCopyFile" $ do 170 | it "succcesfully copies this file" $ do 171 | withCurrentDir sandbox $ do 172 | let src = sandbox $(mkRelFile "testfile") 173 | let src' = AbsP src 174 | let dst = sandbox $(mkRelFile "testcopy") 175 | let dst' = AbsP dst 176 | writeFile (toFilePath src) "This is a file." 177 | diagnoseFp src' `shouldReturn` IsFile 178 | diagnoseFp dst' `shouldReturn` Nonexistent 179 | -- Under test 180 | performCopyFile src dst 181 | diagnoseFp src' `shouldReturn` IsFile 182 | diagnoseFp dst' `shouldReturn` IsFile 183 | dsrc <- diagnoseAbsP src' 184 | ddst <- diagnoseAbsP dst' 185 | diagnosedHashDigest ddst `shouldBe` 186 | diagnosedHashDigest dsrc 187 | removeFile src 188 | removeFile dst 189 | diagnoseFp src' `shouldReturn` Nonexistent 190 | diagnoseFp dst' `shouldReturn` Nonexistent 191 | describe "performCopyDir" $ do 192 | it "succcesfully copies this directory" $ do 193 | withCurrentDir sandbox $ do 194 | let src = sandbox $(mkRelDir "testdir") 195 | let src' = AbsP $ sandbox $(mkRelFile "testdir") 196 | let dst = sandbox $(mkRelDir "testcopy") 197 | let dst' = AbsP $ sandbox $(mkRelFile "testcopy") 198 | ensureDir src 199 | diagnoseFp src' `shouldReturn` IsDirectory 200 | diagnoseFp dst' `shouldReturn` Nonexistent 201 | -- Under test 202 | performCopyDir src dst 203 | diagnoseFp src' `shouldReturn` IsDirectory 204 | diagnoseFp dst' `shouldReturn` IsDirectory 205 | dsrc <- diagnoseAbsP src' 206 | ddst <- diagnoseAbsP dst' 207 | diagnosedHashDigest ddst `shouldBe` 208 | diagnosedHashDigest dsrc 209 | removeDirRecur src 210 | removeDirRecur dst 211 | diagnoseFp src' `shouldReturn` Nonexistent 212 | diagnoseFp dst' `shouldReturn` Nonexistent 213 | describe "performLinkFile" $ do 214 | it "successfully links this file" $ do 215 | withCurrentDir sandbox $ do 216 | let src = sandbox $(mkRelFile "testfile") 217 | let src' = AbsP src 218 | let dst = sandbox $(mkRelFile "testlink") 219 | let dst' = AbsP dst 220 | diagnoseFp src' `shouldReturn` Nonexistent 221 | diagnoseFp dst' `shouldReturn` Nonexistent 222 | writeFile (toFilePath src) "This is a test." 223 | diagnoseFp src' `shouldReturn` IsFile 224 | diagnoseFp dst' `shouldReturn` Nonexistent 225 | -- Under test 226 | performLinkFile src dst 227 | diagnoseFp src' `shouldReturn` IsFile 228 | diagnoseFp dst' `shouldReturn` IsLinkTo src' 229 | removeFile src 230 | removeLink $ toFilePath dst 231 | diagnoseFp src' `shouldReturn` Nonexistent 232 | diagnoseFp dst' `shouldReturn` Nonexistent 233 | describe "performLinkDir" $ do 234 | it "successfully links this directory" $ do 235 | withCurrentDir sandbox $ do 236 | let src = sandbox $(mkRelDir "testdir") 237 | let src' = AbsP $ sandbox $(mkRelFile "testdir") 238 | let dst = sandbox $(mkRelDir "testlink") 239 | let dst' = AbsP $ sandbox $(mkRelFile "testlink") 240 | diagnoseFp src' `shouldReturn` Nonexistent 241 | diagnoseFp dst' `shouldReturn` Nonexistent 242 | ensureDir src 243 | diagnoseFp src' `shouldReturn` IsDirectory 244 | diagnoseFp dst' `shouldReturn` Nonexistent 245 | -- Under test 246 | performLinkDir src dst 247 | diagnoseFp src' `shouldReturn` IsDirectory 248 | diagnoseFp dst' `shouldReturn` IsLinkTo src' 249 | removeDirRecur src 250 | removeLink $ dropTrailingPathSeparator $ toFilePath dst 251 | diagnoseFp src' `shouldReturn` Nonexistent 252 | diagnoseFp dst' `shouldReturn` Nonexistent 253 | -------------------------------------------------------------------------------- /test/SuperUserSpark/Diagnose/Gen.hs: -------------------------------------------------------------------------------- 1 | {-# OPTIONS_GHC -Wno-orphans #-} 2 | 3 | module SuperUserSpark.Diagnose.Gen where 4 | 5 | import TestImport 6 | 7 | import SuperUserSpark.Bake.Gen () 8 | import SuperUserSpark.Compiler.Gen () 9 | import SuperUserSpark.Diagnose.Types 10 | import SuperUserSpark.Language.Gen () 11 | 12 | instance GenUnchecked DiagnoseAssignment 13 | 14 | instance GenValid DiagnoseAssignment where 15 | genValid = genValidStructurallyWithoutExtraChecking 16 | shrinkValid = shrinkValidStructurallyWithoutExtraFiltering 17 | 18 | instance GenUnchecked DiagnoseSettings 19 | 20 | instance GenValid DiagnoseSettings where 21 | genValid = genValidStructurallyWithoutExtraChecking 22 | shrinkValid = shrinkValidStructurallyWithoutExtraFiltering 23 | 24 | instance GenUnchecked HashDigest 25 | 26 | instance GenValid HashDigest where 27 | genValid = genValidStructurallyWithoutExtraChecking 28 | shrinkValid = shrinkValidStructurallyWithoutExtraFiltering 29 | 30 | instance GenUnchecked Diagnostics 31 | 32 | instance GenValid Diagnostics where 33 | genValid = genValidStructurallyWithoutExtraChecking 34 | shrinkValid = shrinkValidStructurallyWithoutExtraFiltering 35 | 36 | instance GenUnchecked DiagnosedFp 37 | 38 | instance GenValid DiagnosedFp where 39 | genValid = genValidStructurallyWithoutExtraChecking 40 | shrinkValid = shrinkValidStructurallyWithoutExtraFiltering 41 | -------------------------------------------------------------------------------- /test/SuperUserSpark/Diagnose/TestUtils.hs: -------------------------------------------------------------------------------- 1 | module SuperUserSpark.Diagnose.TestUtils where 2 | 3 | import TestImport 4 | 5 | import SuperUserSpark.Bake.Gen () 6 | import SuperUserSpark.Bake.Types 7 | import SuperUserSpark.Check.Gen () 8 | import SuperUserSpark.Check.Internal 9 | import SuperUserSpark.Check.Types 10 | import SuperUserSpark.CoreTypes 11 | import SuperUserSpark.Diagnose.Types 12 | 13 | 14 | -- TODO, the code should be able to handle 'genValid', but currenty it can't. 15 | absPathIn :: Path Abs Dir -> Gen AbsP 16 | absPathIn sandbox = 17 | scale (+ 5) $ do 18 | fp <- genListOf $ elements $ ['a' .. 'z'] ++ ['A' .. 'Z'] 19 | case parseRelFile fp of 20 | Nothing -> absPathIn sandbox 21 | Just p -> pure $ AbsP $ sandbox p 22 | 23 | absFileIn :: Path Abs Dir -> Gen (Path Abs File, AbsP) 24 | absFileIn sandbox = do 25 | p <- absPathIn sandbox 26 | pure (unAbsP p, p) 27 | 28 | absDirIn :: Path Abs Dir -> Gen (Path Abs Dir, AbsP) 29 | absDirIn sandbox = do 30 | p <- absPathIn sandbox 31 | let u = toPath p 32 | case parseAbsDir u of 33 | Nothing -> absDirIn sandbox 34 | Just ad -> pure (ad, p) 35 | -------------------------------------------------------------------------------- /test/SuperUserSpark/DiagnoseSpec.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE TypeApplications #-} 2 | {-# LANGUAGE TemplateHaskell #-} 3 | 4 | module SuperUserSpark.DiagnoseSpec where 5 | 6 | import TestImport 7 | 8 | import Control.Monad (forM_) 9 | import Data.Hashable 10 | import System.FilePath (dropTrailingPathSeparator) 11 | import System.Posix.Files 12 | 13 | import SuperUserSpark.Bake.Types 14 | import SuperUserSpark.Check 15 | import SuperUserSpark.Check.Gen () 16 | import SuperUserSpark.Check.Internal 17 | import SuperUserSpark.Check.TestUtils 18 | import SuperUserSpark.Check.Types 19 | import SuperUserSpark.Compiler.Types 20 | import SuperUserSpark.CoreTypes 21 | import SuperUserSpark.Diagnose 22 | import SuperUserSpark.Diagnose.Gen () 23 | import SuperUserSpark.Diagnose.Internal 24 | import SuperUserSpark.Diagnose.TestUtils 25 | import SuperUserSpark.Diagnose.Types 26 | import SuperUserSpark.OptParse.Gen () 27 | import SuperUserSpark.Parser.Gen () 28 | import SuperUserSpark.Utils 29 | import TestUtils 30 | 31 | spec :: Spec 32 | spec = do 33 | instanceSpec 34 | diagnoseSpec 35 | hashSpec 36 | 37 | instanceSpec :: Spec 38 | instanceSpec = do 39 | eqSpec @CheckSettings 40 | genValidSpec @CheckSettings 41 | eqSpec @Diagnostics 42 | genValidSpec @Diagnostics 43 | eqSpec @DiagnosedFp 44 | genValidSpec @DiagnosedFp 45 | eqSpec @DiagnosedDeployment 46 | genValidSpec @DiagnosedDeployment 47 | 48 | diagnoseSpec :: Spec 49 | diagnoseSpec = do 50 | sandbox <- runIO $ resolveDir' "test_sandbox" 51 | let setup = ensureDir sandbox 52 | let teardown = removeDirRecur sandbox 53 | beforeAll_ setup $ 54 | afterAll_ teardown $ do 55 | describe "diagnoseDeployment" $ do 56 | it 57 | "retains the filepaths and deploymentkind that it diagnoses for valid filepaths" $ do 58 | forAll genValid $ \d@(Deployment (Directions srcs dst) kind) -> do 59 | (Deployment (Directions dsrcs ddst) dkind) <- 60 | diagnoseDeployment d 61 | map diagnosedFilePath dsrcs `shouldBe` srcs 62 | diagnosedFilePath ddst `shouldBe` dst 63 | dkind `shouldBe` kind 64 | pend 65 | describe "diagnose" $ do 66 | it "retains the filepath that it diagnoses for valid AbsPath's" $ do 67 | forAll genValid $ \fp -> do 68 | (D dfp _ _) <- diagnoseAbsP fp 69 | dfp `shouldBe` fp 70 | pend 71 | describe "diagnoseFp" $ do 72 | let expect s ls = do 73 | rs <- 74 | forM ls $ \(a, b) -> do 75 | r <- diagnoseFp a 76 | pure (a, r, b) 77 | unless (all (\(a, r, b) -> r == b) rs) $ 78 | let nice (a, r, b) = 79 | unlines 80 | [ unwords 81 | [ "Unexpected results at stage" 82 | , show s 83 | ] 84 | , unwords ["path:", show $ toPath a] 85 | , unwords 86 | [ "stripped:" 87 | , show $ 88 | (stripProperPrefix sandbox $ unAbsP a :: Maybe (Path Rel File)) 89 | ] 90 | , unwords ["expected:", show b] 91 | , unwords ["real:", show r] 92 | ] 93 | in expectationFailure $ unlines $ map nice rs 94 | it "figures out this link that points to something that exists" $ do 95 | let file = sandbox $(mkRelFile "file") 96 | let file' = AbsP file 97 | let link = sandbox $(mkRelFile "link") 98 | let link' = AbsP link 99 | expect "before" [(file', Nonexistent), (link', Nonexistent)] 100 | writeFile (toFilePath file) "This is a test" 101 | expect 102 | "after file creation" 103 | [(file', IsFile), (link', Nonexistent)] 104 | createSymbolicLink (toFilePath file) (toFilePath link) 105 | expect 106 | "after link creation" 107 | [(file', IsFile), (link', IsLinkTo file')] 108 | removeLink $ toFilePath link 109 | expect 110 | "after link removal" 111 | [(file', IsFile), (link', Nonexistent)] 112 | removeFile file 113 | expect 114 | "after file removal" 115 | [(file', Nonexistent), (link', Nonexistent)] 116 | it 117 | "figures out this link that points to something that does not exist" $ do 118 | let file = sandbox $(mkRelFile "file") 119 | let file' = AbsP file 120 | let link = sandbox $(mkRelFile "link") 121 | let link' = AbsP link 122 | expect "before" [(file', Nonexistent), (link', Nonexistent)] 123 | createSymbolicLink (toFilePath file) (toFilePath link) 124 | expect 125 | "after link creation" 126 | [(file', Nonexistent), (link', IsLinkTo file')] 127 | removeLink $ toFilePath link 128 | expect "after" [(file', Nonexistent), (link', Nonexistent)] 129 | it 130 | "figures out that a thing in a nonexistent dir is nonexistent" $ do 131 | let file = sandbox $(mkRelFile "nonexistent/and/file") 132 | let file' = AbsP file 133 | diagnoseFp file' `shouldReturn` Nonexistent 134 | it "figures out a file" $ do 135 | forAll (absFileIn sandbox) $ \(file, file') -> do 136 | diagnoseFp file' `shouldReturn` Nonexistent 137 | ensureDir $ parent file 138 | writeFile (toFilePath file) "This is a test" 139 | diagnoseFp file' `shouldReturn` IsFile 140 | removeFile file 141 | diagnoseFp file' `shouldReturn` Nonexistent 142 | it "figures out a directory" $ do 143 | forAll (absDirIn sandbox) $ \(dir, dir') -> do 144 | diagnoseFp dir' `shouldReturn` Nonexistent 145 | ensureDir dir 146 | diagnoseFp dir' `shouldReturn` IsDirectory 147 | removeDirRecur dir 148 | diagnoseFp dir' `shouldReturn` Nonexistent 149 | it "figures out a symbolic link with an existent destination" $ do 150 | forAll (absFileIn sandbox) $ \f@(file, file') -> 151 | forAll (absFileIn sandbox `suchThat` (/= f)) $ \(link, link') -> do 152 | expect 153 | "before" 154 | [(file', Nonexistent), (link', Nonexistent)] 155 | writeFile (toFilePath file) "This is a test" 156 | expect 157 | "after file creation" 158 | [(file', IsFile), (link', Nonexistent)] 159 | createSymbolicLink 160 | (toFilePath file) 161 | (toFilePath link) 162 | expect 163 | "after link creation" 164 | [(file', IsFile), (link', IsLinkTo file')] 165 | removeLink $ toFilePath link 166 | expect 167 | "after link removal" 168 | [(file', IsFile), (link', Nonexistent)] 169 | removeFile file 170 | expect 171 | "after" 172 | [(file', Nonexistent), (link', Nonexistent)] 173 | it "figures out a symbolic link with a nonexistent destination" $ do 174 | forAll (absFileIn sandbox) $ \f@(file, file') -> 175 | forAll (absFileIn sandbox `suchThat` (/= f)) $ \(link, link') -> do 176 | expect 177 | "before" 178 | [(file', Nonexistent), (link', Nonexistent)] 179 | createSymbolicLink 180 | (toFilePath file) 181 | (toFilePath link) 182 | expect 183 | "after link creation" 184 | [(file', Nonexistent), (link', IsLinkTo file')] 185 | removeLink $ toFilePath link 186 | expect 187 | "after" 188 | [(file', Nonexistent), (link', Nonexistent)] 189 | it "figures out that /dev/null is weird" $ do 190 | diagnoseFp (AbsP $(mkAbsFile "/dev/null")) `shouldReturn` 191 | IsWeird 192 | it "figures out that /dev/random is weird" $ do 193 | diagnoseFp (AbsP $(mkAbsFile "/dev/random")) `shouldReturn` 194 | IsWeird 195 | 196 | checkDeploymentSpec :: Spec 197 | checkDeploymentSpec = do 198 | describe "checkDeployment" $ do 199 | it "always produces valid check results" $ 200 | producesValidsOnValids checkDeployment 201 | it "says 'impossible' for deployments with an empty list of sources" $ do 202 | forAll genUnchecked $ \dst -> 203 | forAll genUnchecked $ \kind -> 204 | shouldBeImpossible' $ Deployment (Directions [] dst) kind 205 | it "says 'impossible' for deployments where all singles are impossible" $ do 206 | forAll 207 | (genValid `suchThat` 208 | (\(Deployment (Directions srcs dst) kind) -> 209 | all (\src -> isImpossible $ checkSingle src dst kind) srcs)) $ \dd -> 210 | shouldBeImpossible' dd 211 | it 212 | "gives the same result as bestResult (just with a better error for empty lists)" $ do 213 | forAll genValid $ \dd@(Deployment (Directions srcs dst) kind) -> 214 | case ( bestResult (map (\src -> checkSingle src dst kind) srcs) 215 | , checkDeployment dd) of 216 | (ImpossibleDeployment r1, ImpossibleDeployment r2) -> 217 | length r1 `shouldSatisfy` (<= (length r2)) 218 | (r1, r2) -> r1 `shouldBe` r2 219 | describe "bestResult" $ do 220 | it "always produces valid check results" $ 221 | producesValidsOnValids bestResult 222 | it "says 'impossible' if all checkresults are impossible" $ do 223 | forAll 224 | (genValid `suchThat` all isImpossible) 225 | shouldBeImpossibleDeployment 226 | it "says 'done' if the first non-impossible in 'done'" $ do 227 | forAll 228 | (genValid `suchThat` 229 | (any (not . isImpossible) &&& 230 | (isDone . head . dropWhile isImpossible))) $ \dd -> 231 | bestResult dd `shouldSatisfy` deploymentIsDone 232 | it "says 'dirty' if the first non-impossible in 'dirty'" $ do 233 | forAll 234 | (genValid `suchThat` 235 | (any (not . isImpossible) &&& 236 | (isDirty . head . dropWhile isImpossible))) $ \dd -> 237 | bestResult dd `shouldSatisfy` dirtyDeployment 238 | it "says 'ready' if the first non-impossible in 'ready'" $ do 239 | forAll 240 | (genValid `suchThat` 241 | (any (not . isImpossible) &&& 242 | (isReady . head . dropWhile isImpossible))) $ \dd -> 243 | bestResult dd `shouldSatisfy` deploymentReadyToDeploy 244 | 245 | hashSpec :: Spec 246 | hashSpec = do 247 | tooManyFilesTest 248 | 249 | tooManyFilesTest :: Spec 250 | tooManyFilesTest = do 251 | sandbox <- runIO $ resolveDir' "test_sandbox" 252 | let setup = ensureDir sandbox 253 | let teardown = removeDirRecur sandbox 254 | let aLot = 20000 :: Int 255 | let setupALotOfFiles = do 256 | forM_ [1 .. aLot] $ \i -> do 257 | f <- parseRelFile $ "file" ++ show i 258 | writeFile (toFilePath $ sandbox f) $ 259 | "This is file " ++ show i ++ ".\n" 260 | beforeAll_ setup $ 261 | afterAll_ teardown $ do 262 | describe "hashFilePath" $ do 263 | beforeAll_ setupALotOfFiles $ do 264 | it 265 | ("has no problem with hashing a directory of " ++ 266 | show aLot ++ " files") $ do 267 | sb <- resolveFile' "test_sandbox" 268 | let d = AbsP sb 269 | hashFilePath d `shouldNotReturn` HashDigest (hash ()) 270 | -------------------------------------------------------------------------------- /test/SuperUserSpark/EndToEnd/RegressionSpec.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE TemplateHaskell #-} 2 | {-# LANGUAGE LambdaCase #-} 3 | 4 | module SuperUserSpark.EndToEnd.RegressionSpec 5 | ( spec 6 | ) where 7 | 8 | import TestImport 9 | 10 | import System.Directory 11 | import System.Environment (withArgs) 12 | import System.Exit (ExitCode(ExitFailure)) 13 | import System.Posix.Files 14 | 15 | import SuperUserSpark 16 | import SuperUserSpark.Utils 17 | 18 | spec :: Spec 19 | spec = linkThenCopySpec 20 | 21 | linkThenCopySpec :: Spec 22 | linkThenCopySpec = do 23 | here <- runIO getCurrentDir 24 | let sandbox = here $(mkRelDir "test_sandbox") 25 | let setup = ensureDir sandbox 26 | let teardown = removeDirRecur sandbox 27 | beforeAll_ setup $ 28 | afterAll_ teardown $ 29 | describe "link then copy regression" $ do 30 | let runSpark args = do 31 | putStrLn . unwords $ "spark" : args 32 | withArgs args spark 33 | it 34 | "ensures that deploy fails when there is already a link that points to the file that is being copied." $ 35 | withCurrentDir sandbox $ do 36 | let cf = sandbox $(mkRelFile "cardfile.sus") 37 | let file = $(mkRelFile "file") 38 | let from = sandbox $(mkRelDir "from") file 39 | let to = sandbox $(mkRelDir "to") file 40 | -- Set up the file 41 | ensureDir (parent from) 42 | writeFile (toFilePath from) "contents" 43 | let setUpCardFile cf = do 44 | runSpark ["parse", toFilePath cf] 45 | runSpark ["compile", toFilePath cf] 46 | runSpark ["bake", toFilePath cf] 47 | runSpark ["check", toFilePath cf] 48 | writeFile (toFilePath from) "contents" 49 | -- Set up the first card file 50 | writeFile 51 | (toFilePath cf) 52 | "card link { kind link; into to; outof from; file }" 53 | setUpCardFile cf 54 | runSpark ["deploy", toFilePath cf] 55 | -- Set up the second card file 56 | writeFile 57 | (toFilePath cf) 58 | "card link { kind copy; into to; outof from; file }" 59 | setUpCardFile cf 60 | runSpark ["deploy", toFilePath cf] `shouldThrow` 61 | (\case 62 | ExitFailure _ -> True 63 | _ -> False) 64 | -------------------------------------------------------------------------------- /test/SuperUserSpark/EndToEndSpec.hs: -------------------------------------------------------------------------------- 1 | module SuperUserSpark.EndToEndSpec 2 | ( spec 3 | ) where 4 | 5 | import TestImport hiding ((), removeFile, copyFile) 6 | 7 | import qualified Prelude as P (writeFile, readFile) 8 | 9 | import SuperUserSpark 10 | import SuperUserSpark.Utils 11 | import System.Directory 12 | import System.Environment (withArgs) 13 | import System.Exit (ExitCode(ExitSuccess)) 14 | import System.FilePath.Posix (()) 15 | import System.Posix.Files 16 | 17 | spec :: Spec 18 | spec = do 19 | helpTextSpec 20 | regularWorkflowSpec 21 | 22 | helpTextSpec :: Spec 23 | helpTextSpec = 24 | describe "help text test" $ do 25 | it "shows the help text without crashing" $ do 26 | withArgs ["--help"] spark `shouldThrow` (== ExitSuccess) 27 | 28 | regularWorkflowSpec :: Spec 29 | regularWorkflowSpec = do 30 | here <- runIO getCurrentDirectory 31 | let sandbox = here "test_sandbox" 32 | let setup = createDirectoryIfMissing True sandbox 33 | let teardown = removeDirectoryRecursive sandbox 34 | let rsc = here "test_resources" "end-to-end" 35 | beforeAll_ setup $ 36 | afterAll_ teardown $ 37 | describe "standard bash card test" $ do 38 | let bashrsc = rsc "bash.sus" 39 | let bashrscres = rsc "bash.sus.res" 40 | let cardfile = sandbox "bash.sus" 41 | let up = do 42 | copyFile bashrsc cardfile 43 | withCurrentDirectory sandbox $ do 44 | createDirectoryIfMissing True "bash" 45 | withCurrentDirectory "bash" $ do 46 | P.writeFile "bash_aliases" "bash_aliases" 47 | P.writeFile "bashrc" "bashrc" 48 | P.writeFile "bash_profile" "bash_profile" 49 | let down = do 50 | removeFile cardfile 51 | withCurrentDirectory sandbox $ 52 | removeDirectoryRecursive "bash" 53 | beforeAll_ up $ 54 | afterAll_ down $ do 55 | it "parses correcty" $ 56 | withCurrentDirectory sandbox $ 57 | withArgs ["parse", cardfile] spark `shouldReturn` 58 | () 59 | it "compiles correctly" $ do 60 | let outfile = sandbox "bash.sus.res" 61 | withCurrentDirectory sandbox $ 62 | withArgs 63 | ["compile", cardfile, "--output", outfile] 64 | spark `shouldReturn` 65 | () 66 | actual <- P.readFile outfile 67 | expected <- P.readFile bashrscres 68 | unless (actual == expected) $ expectationFailure $ unlines ["Expected and actual differ:", expected, actual] 69 | it "checks without exceptions" $ 70 | withCurrentDirectory sandbox $ 71 | withArgs ["check", cardfile] spark `shouldReturn` 72 | () 73 | it "deploys correctly" $ 74 | withCurrentDirectory sandbox $ do 75 | withArgs ["deploy", cardfile] spark `shouldReturn` 76 | () 77 | let f1 = "subdir" ".bashrc" 78 | f2 = "subdir" ".bash_aliases" 79 | f3 = "subdir" ".bash_profile" 80 | P.readFile f1 `shouldReturn` "bashrc" 81 | P.readFile f2 `shouldReturn` "bash_aliases" 82 | P.readFile f3 `shouldReturn` "bash_profile" 83 | removeLink f1 84 | removeLink f2 85 | removeLink f3 86 | -------------------------------------------------------------------------------- /test/SuperUserSpark/Language/Gen.hs: -------------------------------------------------------------------------------- 1 | {-# OPTIONS_GHC -Wno-orphans #-} 2 | 3 | module SuperUserSpark.Language.Gen where 4 | 5 | import TestImport 6 | 7 | import SuperUserSpark.CoreTypes 8 | import SuperUserSpark.Language.Types 9 | 10 | instance GenUnchecked SparkFile where 11 | genUnchecked = SparkFile <$> genUnchecked <*> genUnchecked 12 | 13 | instance GenValid SparkFile 14 | 15 | instance GenUnchecked Card where 16 | genUnchecked = Card <$> genUnchecked <*> genUnchecked 17 | 18 | instance GenValid Card 19 | 20 | instance GenUnchecked Declaration where 21 | genUnchecked = resize 5 $ sized go 22 | where 23 | go 0 = 24 | oneof 25 | [ SparkOff <$> genUnchecked 26 | , Deploy <$> genUnchecked <*> genUnchecked <*> genUnchecked 27 | , IntoDir <$> genUnchecked 28 | , OutofDir <$> genUnchecked 29 | , DeployKindOverride <$> genUnchecked 30 | , Alternatives <$> genUnchecked 31 | , Block <$> genUnchecked 32 | ] 33 | go n = 34 | oneof 35 | [ SparkOff <$> genUnchecked 36 | , Deploy <$> genUnchecked <*> genUnchecked <*> genUnchecked 37 | , IntoDir <$> genUnchecked 38 | , OutofDir <$> genUnchecked 39 | , DeployKindOverride <$> genUnchecked 40 | , Alternatives <$> genUnchecked 41 | , Block <$> listOf (go $ n - 1) 42 | ] 43 | 44 | instance GenValid Declaration 45 | 46 | instance GenUnchecked DeploymentKind where 47 | genUnchecked = elements [LinkDeployment, CopyDeployment] 48 | 49 | instance GenValid DeploymentKind 50 | 51 | instance GenUnchecked CardNameReference where 52 | genUnchecked = CardNameReference <$> genUnchecked 53 | 54 | instance GenValid CardNameReference 55 | 56 | instance GenUnchecked CardFileReference where 57 | genUnchecked = CardFileReference <$> genUnchecked <*> genUnchecked 58 | 59 | instance GenValid CardFileReference 60 | 61 | instance GenUnchecked CardReference where 62 | genUnchecked = oneof [CardFile <$> genUnchecked, CardName <$> genUnchecked] 63 | 64 | instance GenValid CardReference 65 | -------------------------------------------------------------------------------- /test/SuperUserSpark/OptParse/Gen.hs: -------------------------------------------------------------------------------- 1 | {-# OPTIONS_GHC -Wno-orphans #-} 2 | 3 | module SuperUserSpark.OptParse.Gen where 4 | 5 | import TestImport 6 | 7 | import SuperUserSpark.OptParse.Types 8 | 9 | instance GenUnchecked Dispatch 10 | 11 | instance GenValid Dispatch 12 | 13 | instance GenUnchecked ParseArgs 14 | 15 | instance GenValid ParseArgs 16 | 17 | instance GenUnchecked CompileArgs 18 | 19 | instance GenValid CompileArgs 20 | 21 | instance GenUnchecked CompileFlags 22 | 23 | instance GenValid CompileFlags 24 | 25 | instance GenUnchecked BakeArgs 26 | 27 | instance GenValid BakeArgs 28 | 29 | instance GenUnchecked BakeFlags 30 | 31 | instance GenValid BakeFlags 32 | 33 | instance GenUnchecked CheckArgs 34 | 35 | instance GenValid CheckArgs 36 | 37 | instance GenUnchecked CheckFlags 38 | 39 | instance GenValid CheckFlags 40 | 41 | instance GenUnchecked DiagnoseArgs 42 | 43 | instance GenValid DiagnoseArgs 44 | 45 | instance GenUnchecked DiagnoseFlags 46 | 47 | instance GenValid DiagnoseFlags 48 | 49 | instance GenUnchecked DeployArgs 50 | 51 | instance GenValid DeployArgs 52 | 53 | instance GenUnchecked DeployFlags 54 | 55 | instance GenValid DeployFlags 56 | -------------------------------------------------------------------------------- /test/SuperUserSpark/Parser/Gen.hs: -------------------------------------------------------------------------------- 1 | module SuperUserSpark.Parser.Gen where 2 | 3 | import TestImport 4 | 5 | import Data.List (isSuffixOf) 6 | 7 | generateNormalCharacter :: Gen Char 8 | generateNormalCharacter = 9 | elements $ ['a' .. 'z'] ++ ['A' .. 'Z'] ++ ['0' .. '1'] 10 | 11 | generateWord :: Gen String 12 | generateWord = listOf1 generateNormalCharacter 13 | 14 | generateTab :: Gen Char 15 | generateTab = return '\t' 16 | 17 | generateSpace :: Gen Char 18 | generateSpace = return ' ' 19 | 20 | generateLineFeed :: Gen Char 21 | generateLineFeed = return '\n' 22 | 23 | generateCarriageReturn :: Gen Char 24 | generateCarriageReturn = return '\r' 25 | 26 | generateLineSpace :: Gen String 27 | generateLineSpace = listOf $ oneof [generateTab, generateSpace] 28 | 29 | generateWhiteSpace :: Gen String 30 | generateWhiteSpace = 31 | listOf $ 32 | oneof [generateTab, generateSpace, generateLineFeed, generateCarriageReturn] 33 | 34 | generateWords :: Gen String 35 | generateWords = unwords <$> listOf1 generateWord 36 | 37 | generateEol :: Gen String 38 | generateEol = elements ["\n", "\r", "\r\n"] 39 | 40 | twice :: Gen a -> Gen (a, a) 41 | twice gen = (,) <$> gen <*> gen 42 | 43 | trice :: Gen a -> Gen (a, a, a) 44 | trice gen = (,,) <$> gen <*> gen <*> gen 45 | 46 | generateCardName :: Gen (String, String) 47 | generateCardName = oneof [generateQuotedIdentifier, generatePlainIdentifier] 48 | 49 | generateIdentifier :: Gen (String, String) 50 | generateIdentifier = oneof [generatePlainIdentifier, generateQuotedIdentifier] 51 | 52 | generateQuotedIdentifier :: Gen (String, String) 53 | generateQuotedIdentifier = do 54 | w <- generateWord 55 | return ("\"" ++ w ++ "\"", w) 56 | 57 | generatePlainIdentifier :: Gen (String, String) 58 | generatePlainIdentifier = do 59 | w <- generateWord 60 | return (w, w) 61 | 62 | generateFilePath :: Gen (FilePath, FilePath) 63 | generateFilePath = 64 | generateIdentifier `suchThat` (\(_, f) -> not $ "/" `isSuffixOf` f) 65 | 66 | generateDirectory :: Gen (FilePath, FilePath) 67 | generateDirectory = generateFilePath 68 | 69 | generateComment :: Gen (String, String) 70 | generateComment = oneof [generateLineComment, generateBlockComment] 71 | 72 | generateLineComment :: Gen (String, String) 73 | generateLineComment = do 74 | ws <- generateWords 75 | let ws' = "#" ++ ws ++ "\n" 76 | return (ws', ws) 77 | 78 | generateBlockComment :: Gen (String, String) 79 | generateBlockComment = do 80 | ws <- generateWords 81 | let ws' = "[[" ++ ws ++ "]]" 82 | return (ws', ws) 83 | 84 | generateDeploymentKindSymbol :: Gen String 85 | generateDeploymentKindSymbol = elements ["l->", "c->", "->"] 86 | -------------------------------------------------------------------------------- /test/SuperUserSpark/Parser/TestUtils.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE TemplateHaskell #-} 2 | 3 | module SuperUserSpark.Parser.TestUtils where 4 | 5 | import TestImport hiding (succeeds) 6 | 7 | import Data.Either (isRight) 8 | import SuperUserSpark.Parser.Internal 9 | import Text.Parsec 10 | import Text.Parsec.String 11 | 12 | shouldSucceed 13 | :: (Show a, Eq a) 14 | => Parser a -> String -> IO () 15 | shouldSucceed parser input = input `shouldSatisfy` succeeds parser 16 | 17 | shouldFail 18 | :: (Show a, Eq a) 19 | => Parser a -> String -> IO () 20 | shouldFail parser input = input `shouldNotSatisfy` succeeds parser 21 | 22 | succeeds :: Parser a -> String -> Bool 23 | succeeds parser = succeedsWithLeftover $ parser >> eof 24 | 25 | succeedsWithLeftover :: Parser a -> String -> Bool 26 | succeedsWithLeftover parser input = isRight $ parseWithoutSource parser input 27 | 28 | succeedsAnywhere :: Parser a -> String -> Bool 29 | succeedsAnywhere p s = any (succeedsWithLeftover p) $ tails s 30 | where 31 | tails :: [a] -> [[a]] 32 | tails [] = [[]] 33 | tails ass@(_:as) = ass : tails as 34 | 35 | fails :: Parser a -> String -> Bool 36 | fails parser input = not $ succeeds parser input 37 | 38 | testInputSource :: Path Abs File 39 | testInputSource = $(mkAbsFile "/Test/input/file") 40 | 41 | parseShouldSucceedAs 42 | :: (Show a, Eq a) 43 | => Parser a -> String -> a -> IO () 44 | parseShouldSucceedAs parser input a = 45 | parseFromSource parser testInputSource input `shouldBe` Right a 46 | 47 | parseShouldBe 48 | :: (Show a, Eq a) 49 | => Parser a -> String -> Either ParseError a -> IO () 50 | parseShouldBe parser input result = 51 | parseFromSource parser testInputSource input `shouldBe` result 52 | 53 | parseWithoutSource :: Parser a -> String -> Either ParseError a 54 | parseWithoutSource parser = parseFromSource parser testInputSource 55 | -------------------------------------------------------------------------------- /test/SuperUserSpark/PreCompiler/Gen.hs: -------------------------------------------------------------------------------- 1 | {-# OPTIONS_GHC -Wno-orphans #-} 2 | 3 | module SuperUserSpark.PreCompiler.Gen where 4 | 5 | import TestImport 6 | 7 | import SuperUserSpark.Language.Gen () 8 | import SuperUserSpark.PreCompiler.Types 9 | 10 | instance GenUnchecked PreCompileError 11 | 12 | instance GenValid PreCompileError 13 | -------------------------------------------------------------------------------- /test/TestImport.hs: -------------------------------------------------------------------------------- 1 | module TestImport 2 | ( module X 3 | ) where 4 | 5 | import Prelude as X 6 | 7 | import Path as X 8 | import Path.IO as X 9 | 10 | import Control.Monad as X 11 | import Control.Monad.Except as X 12 | import Control.Monad.IO.Class as X (MonadIO(..)) 13 | import Control.Monad.Identity as X 14 | import Control.Monad.Reader as X 15 | import Control.Monad.State as X 16 | import Control.Monad.Writer as X 17 | 18 | import Debug.Trace as X 19 | 20 | import Test.Hspec as X 21 | import Test.QuickCheck as X 22 | 23 | import Data.GenValidity as X 24 | import Data.GenValidity.Path as X () 25 | import Data.Validity as X () 26 | import Data.Validity.Path as X () 27 | import Test.Validity as X 28 | import Test.Validity.Aeson as X 29 | 30 | import System.FilePath as X (dropTrailingPathSeparator) 31 | -------------------------------------------------------------------------------- /test/TestUtils.hs: -------------------------------------------------------------------------------- 1 | {-# LANGUAGE TemplateHaskell #-} 2 | 3 | module TestUtils where 4 | 5 | import TestImport 6 | 7 | shouldParseDir :: Path Rel Dir 8 | shouldParseDir = $(mkRelDir "shouldParseDir") 9 | 10 | shouldNotParseDir :: Path Rel Dir 11 | shouldNotParseDir = $(mkRelDir "shouldNotParseDir") 12 | 13 | shouldCompileDir :: Path Rel Dir 14 | shouldCompileDir = $(mkRelDir "shouldCompileDir") 15 | 16 | shouldNotCompileDir :: Path Rel Dir 17 | shouldNotCompileDir = $(mkRelDir "shouldNotCompileDir") 18 | 19 | concerningContents :: (Path Abs File -> String -> SpecWith a) 20 | -> (Path Abs File -> SpecWith a) 21 | concerningContents func file = (runIO . readFile $ toFilePath file) >>= func file 22 | 23 | forFileInDirss :: [Path Abs Dir] -> (Path Abs File -> SpecWith a) -> SpecWith a 24 | forFileInDirss [] _ = return () 25 | forFileInDirss dirs func = 26 | forM_ dirs $ \dir -> do 27 | exists <- runIO $ doesDirExist dir 28 | when exists $ do 29 | files <- runIO $ snd <$> listDirRecur dir 30 | forM_ files func 31 | 32 | pend :: SpecWith () 33 | pend = it "is still missing some tests" pending 34 | -------------------------------------------------------------------------------- /test_resources/end-to-end/bash.sus: -------------------------------------------------------------------------------- 1 | card bash { 2 | into subdir 3 | outof bash 4 | 5 | .bashrc 6 | .bash_aliases 7 | .bash_profile 8 | } 9 | -------------------------------------------------------------------------------- /test_resources/end-to-end/bash.sus.res: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "deployment kind": "link", 4 | "directions": { 5 | "destination": "subdir/.bashrc", 6 | "sources": [ 7 | "bash/.bashrc", 8 | "bash/bashrc" 9 | ] 10 | } 11 | }, 12 | { 13 | "deployment kind": "link", 14 | "directions": { 15 | "destination": "subdir/.bash_aliases", 16 | "sources": [ 17 | "bash/.bash_aliases", 18 | "bash/bash_aliases" 19 | ] 20 | } 21 | }, 22 | { 23 | "deployment kind": "link", 24 | "directions": { 25 | "destination": "subdir/.bash_profile", 26 | "sources": [ 27 | "bash/.bash_profile", 28 | "bash/bash_profile" 29 | ] 30 | } 31 | } 32 | ] -------------------------------------------------------------------------------- /test_resources/exact_compile_test_src/alternatives.sus: -------------------------------------------------------------------------------- 1 | card alternatives { 2 | alternatives 1 2 3 4 3 | alternatives a b c d 4 | 5 | file 6 | } 7 | -------------------------------------------------------------------------------- /test_resources/exact_compile_test_src/alternatives.sus.res: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "deployment kind": "link", 4 | "directions": { 5 | "destination": "file", 6 | "sources": [ 7 | "1/a/file", 8 | "1/b/file", 9 | "1/c/file", 10 | "1/d/file", 11 | "2/a/file", 12 | "2/b/file", 13 | "2/c/file", 14 | "2/d/file", 15 | "3/a/file", 16 | "3/b/file", 17 | "3/c/file", 18 | "3/d/file", 19 | "4/a/file", 20 | "4/b/file", 21 | "4/c/file", 22 | "4/d/file" 23 | ] 24 | } 25 | } 26 | ] -------------------------------------------------------------------------------- /test_resources/exact_compile_test_src/bash.sus: -------------------------------------------------------------------------------- 1 | card bash { 2 | into "~" 3 | 4 | .bashrc 5 | .bash_aliases 6 | .bash_profile 7 | } 8 | -------------------------------------------------------------------------------- /test_resources/exact_compile_test_src/bash.sus.res: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "deployment kind": "link", 4 | "directions": { 5 | "destination": "~/.bashrc", 6 | "sources": [ 7 | ".bashrc", 8 | "bashrc" 9 | ] 10 | } 11 | }, 12 | { 13 | "deployment kind": "link", 14 | "directions": { 15 | "destination": "~/.bash_aliases", 16 | "sources": [ 17 | ".bash_aliases", 18 | "bash_aliases" 19 | ] 20 | } 21 | }, 22 | { 23 | "deployment kind": "link", 24 | "directions": { 25 | "destination": "~/.bash_profile", 26 | "sources": [ 27 | ".bash_profile", 28 | "bash_profile" 29 | ] 30 | } 31 | } 32 | ] -------------------------------------------------------------------------------- /test_resources/exact_compile_test_src/internal_sparkoff.sus: -------------------------------------------------------------------------------- 1 | card one { 2 | spark card two 3 | spark card five 4 | } 5 | 6 | card two { 7 | two 8 | spark card three 9 | spark card four 10 | } 11 | 12 | card three { 13 | three 14 | } 15 | 16 | card four { 17 | four 18 | } 19 | 20 | card five { 21 | five 22 | spark card six 23 | } 24 | 25 | card six { 26 | six 27 | } 28 | -------------------------------------------------------------------------------- /test_resources/exact_compile_test_src/internal_sparkoff.sus.res: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "deployment kind": "link", 4 | "directions": { 5 | "destination": "two", 6 | "sources": [ 7 | "two" 8 | ] 9 | } 10 | }, 11 | { 12 | "deployment kind": "link", 13 | "directions": { 14 | "destination": "three", 15 | "sources": [ 16 | "three" 17 | ] 18 | } 19 | }, 20 | { 21 | "deployment kind": "link", 22 | "directions": { 23 | "destination": "four", 24 | "sources": [ 25 | "four" 26 | ] 27 | } 28 | }, 29 | { 30 | "deployment kind": "link", 31 | "directions": { 32 | "destination": "five", 33 | "sources": [ 34 | "five" 35 | ] 36 | } 37 | }, 38 | { 39 | "deployment kind": "link", 40 | "directions": { 41 | "destination": "six", 42 | "sources": [ 43 | "six" 44 | ] 45 | } 46 | } 47 | ] -------------------------------------------------------------------------------- /test_resources/exact_compile_test_src/nesting.sus: -------------------------------------------------------------------------------- 1 | card complex { 2 | into ~ 3 | outof complex 4 | { 5 | into a 6 | outof x 7 | { 8 | into b 9 | outof y 10 | { 11 | into c 12 | outof z 13 | 14 | file 15 | } 16 | } 17 | } 18 | } 19 | -------------------------------------------------------------------------------- /test_resources/exact_compile_test_src/nesting.sus.res: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "deployment kind": "link", 4 | "directions": { 5 | "destination": "~/a/b/c/file", 6 | "sources": [ 7 | "complex/x/y/z/file" 8 | ] 9 | } 10 | } 11 | ] -------------------------------------------------------------------------------- /test_resources/exact_compile_test_src/sub.sus: -------------------------------------------------------------------------------- 1 | card sub { 2 | spark file sub/subfile.sus 3 | spark card other 4 | } 5 | 6 | card other { 7 | file.txt 8 | } 9 | -------------------------------------------------------------------------------- /test_resources/exact_compile_test_src/sub.sus.res: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "deployment kind": "link", 4 | "directions": { 5 | "destination": "thing.txt", 6 | "sources": [ 7 | "sub/thing.txt" 8 | ] 9 | } 10 | }, 11 | { 12 | "deployment kind": "link", 13 | "directions": { 14 | "destination": "file.txt", 15 | "sources": [ 16 | "file.txt" 17 | ] 18 | } 19 | } 20 | ] -------------------------------------------------------------------------------- /test_resources/exact_compile_test_src/sub/subfile.sus: -------------------------------------------------------------------------------- 1 | card subfile { 2 | thing.txt 3 | } 4 | -------------------------------------------------------------------------------- /test_resources/exact_compile_test_src/sub/subfile.sus.res: -------------------------------------------------------------------------------- 1 | [ 2 | { 3 | "deployment kind": "link", 4 | "directions": { 5 | "destination": "thing.txt", 6 | "sources": [ 7 | "thing.txt" 8 | ] 9 | } 10 | } 11 | ] -------------------------------------------------------------------------------- /test_resources/hop_test/hop1dir/hop1.sus: -------------------------------------------------------------------------------- 1 | card hop1 { 2 | into b 3 | outof x 4 | 5 | beta -> one 6 | spark file hop2dir/hop2.sus 7 | } 8 | -------------------------------------------------------------------------------- /test_resources/hop_test/hop1dir/hop2dir/hop2.sus: -------------------------------------------------------------------------------- 1 | card hop2 { 2 | into c 3 | outof y 4 | 5 | gamma -> two 6 | spark file hop3dir/hop3.sus 7 | } 8 | -------------------------------------------------------------------------------- /test_resources/hop_test/hop1dir/hop2dir/hop3dir/hop3.sus: -------------------------------------------------------------------------------- 1 | card hop3 { 2 | into d 3 | outof z 4 | 5 | delta -> three 6 | } 7 | -------------------------------------------------------------------------------- /test_resources/hop_test/root.sus: -------------------------------------------------------------------------------- 1 | card root { 2 | into a 3 | outof u 4 | 5 | alpha -> zero 6 | spark file hop1dir/hop1.sus 7 | } 8 | -------------------------------------------------------------------------------- /test_resources/shouldCompile/bash.sus: -------------------------------------------------------------------------------- 1 | card bash { 2 | into ~ 3 | 4 | .bashrc 5 | .bash_aliases 6 | .bash_profile 7 | .profile 8 | } 9 | -------------------------------------------------------------------------------- /test_resources/shouldCompile/complex.sus: -------------------------------------------------------------------------------- 1 | card main { 2 | spark card configs 3 | spark card poems 4 | } 5 | 6 | card configs { 7 | spark file bash.sus bash 8 | 9 | alternatives super-laptop shared 10 | into ~ 11 | 12 | { 13 | outof zsh 14 | 15 | .zshrc 16 | .zshenv 17 | .zlogin 18 | } 19 | 20 | vim -> .vim 21 | } 22 | 23 | card poems { 24 | outof poems 25 | into poems 26 | { 27 | kind copy 28 | 29 | "A windows file with spaces.txt" -> clean 30 | } 31 | { 32 | kind link 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /test_resources/shouldNotParse/empty_file.sus: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/NorfairKing/super-user-spark/3c286a52a0b81e083c0e6d39f6f2b02aa9bb9231/test_resources/shouldNotParse/empty_file.sus -------------------------------------------------------------------------------- /test_resources/shouldNotParse/missing_implementation.sus: -------------------------------------------------------------------------------- 1 | card "missing implementation" 2 | -------------------------------------------------------------------------------- /test_resources/shouldParse/empty_card.sus: -------------------------------------------------------------------------------- 1 | card "empty" {} 2 | -------------------------------------------------------------------------------- /test_resources/shouldParse/littered_with_comments.sus: -------------------------------------------------------------------------------- 1 | card [[muhaha]] bash #yadayada 2 | { # Wut 3 | into [[hehe]] ~ 4 | out[[xD]]of bash[[files!]] 5 | bashrc -[[wut]]> .bashrc 6 | } # These comments are annoying, right? 7 | -------------------------------------------------------------------------------- /test_resources/shouldParse/short_syntax.sus: -------------------------------------------------------------------------------- 1 | card "bash" { 2 | into ~ 3 | 4 | # Without the dot shortcut 5 | test.txt 6 | file 7 | 8 | # With the dot shortcut 9 | .bashrc 10 | .bash_aliases 11 | } 12 | -------------------------------------------------------------------------------- /test_resources/shouldParse/with_quotes.sus: -------------------------------------------------------------------------------- 1 | card name { 2 | "a"c->"x" 3 | "b"l->"y" 4 | "c"->"z" 5 | } 6 | --------------------------------------------------------------------------------