├── .gitignore ├── examples ├── swaysudo.Jannfile ├── cd.Jannfile ├── install.Jannfile ├── sway.Jannfile └── desktop.Jannfile ├── Cargo.toml ├── LICENSE ├── src ├── util.rs ├── main.rs ├── com.rs ├── deploy.rs ├── inter.rs ├── exec.rs ├── invoke.rs └── parse.rs ├── README.md └── Cargo.lock /.gitignore: -------------------------------------------------------------------------------- 1 | scratch/ 2 | target/ 3 | pipeline-main/ 4 | -------------------------------------------------------------------------------- /examples/swaysudo.Jannfile: -------------------------------------------------------------------------------- 1 | deploy { 2 | rofi/nasa.rasi => /usr/share/rofi/themes 3 | } 4 | 5 | main 6 | | deploy 7 | -------------------------------------------------------------------------------- /examples/cd.Jannfile: -------------------------------------------------------------------------------- 1 | test { 2 | $ mkdir bar 3 | bar/ -> { 4 | $ touch baz 5 | } 6 | $ touch spqr 7 | } 8 | 9 | main | test 10 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | name = "jann" 3 | version = "0.1.0" 4 | authors = ["Jack Byrne"] 5 | 6 | [dependencies] 7 | walkdir = "2" 8 | bitflags = "1.1.0" 9 | dirs = "2.0.2" 10 | regex = "1" 11 | -------------------------------------------------------------------------------- /examples/install.Jannfile: -------------------------------------------------------------------------------- 1 | // Jann can also be a neat way to install programs! 2 | // Use --enable switches to select a distro 3 | 4 | void { 5 | @command = "sudo xbps-install" 6 | @install = [sway, rofi, mako] 7 | } 8 | 9 | arch { 10 | @command = "sudo pacman -S" 11 | @install = [sway, rofi, mako, termite] 12 | } 13 | 14 | install { 15 | @install -> p { 16 | $ {{command}} {{p}} 17 | } 18 | } 19 | 20 | main 21 | : void 22 | : arch 23 | | install 24 | -------------------------------------------------------------------------------- /examples/sway.Jannfile: -------------------------------------------------------------------------------- 1 | // A Jannfile for a typical Sway based system 2 | // Actual configuration files not included in this example 3 | 4 | #sudo_include [swaysudo.Jannfile, privileged] 5 | 6 | deploy { 7 | rofi/config => ~/.config/rofi/ 8 | 9 | termite/config => ~/.config/termite 10 | 11 | sway/config => ~/.config/sway/ 12 | 13 | mako/config => ~/.config/mako 14 | 15 | waybar/ >> ~/.config/waybar 16 | 17 | misc/wallpaper.jpg >> ~/pic/wallpaper 18 | } 19 | 20 | refresh { 21 | $ swaymsg reload 22 | $ killall -USR1 termite 23 | } 24 | 25 | main 26 | | deploy 27 | | privileged 28 | | refresh 29 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 Jack Byrne 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /src/util.rs: -------------------------------------------------------------------------------- 1 | extern crate regex; 2 | 3 | use parse::Token; 4 | use std::process; 5 | 6 | pub struct Log<'src> { 7 | job : String, 8 | lines: &'src Vec, 9 | err_count: usize, 10 | } 11 | 12 | impl<'src> Log<'src> { 13 | pub fn new(job: String, lines: &'src Vec) -> Log<'src> { 14 | Log { 15 | job: job, 16 | lines: lines, 17 | err_count: 0, 18 | } 19 | } 20 | 21 | fn message(&self, lvl: &str, msg: &str, hint: &str, tok: &Token) { 22 | println!("{}: {}", lvl, msg); 23 | if tok.lno != 1 { 24 | let preln = self.lines.get(tok.lno - 2).unwrap(); 25 | if !preln.is_empty() { 26 | println!("{:>4} | {}", tok.lno - 1, preln); 27 | } 28 | } 29 | println!("{:>4} | {}", tok.lno, self.lines.get(tok.lno - 1).unwrap()); 30 | println!(" |{}{}", &" ".repeat(1 + tok.val.lptr), &"^".repeat((tok.val.rptr - tok.val.lptr) + 1)); 31 | println!("hint: {}\n", hint); 32 | } 33 | 34 | pub fn has_err(&self) -> bool { 35 | self.err_count > 0 36 | } 37 | 38 | pub fn conclude(&self) -> ! { 39 | if self.err_count == 0 { 40 | println!("\n[{}] success", self.job); 41 | process::exit(0); 42 | } 43 | else { 44 | println!("\n[{}] failed", self.job); 45 | process::exit(1); 46 | } 47 | } 48 | 49 | pub fn error(&mut self, msg: &str, hint: &str, tok: &Token) { 50 | self.message("error", msg, hint, tok); 51 | self.err_count += 1; 52 | } 53 | 54 | pub fn terminal(&mut self, msg: &str, hint: &str, tok: &Token) -> ! { 55 | self.message("error", msg, hint, tok); 56 | self.die(); 57 | } 58 | 59 | pub fn sys_terminal(&mut self, msg: &str) -> ! { 60 | println!("error: {}", msg); 61 | self.die(); 62 | } 63 | 64 | pub fn die(&mut self) -> ! { 65 | self.err_count += 1; 66 | self.conclude() 67 | } 68 | } 69 | -------------------------------------------------------------------------------- /examples/desktop.Jannfile: -------------------------------------------------------------------------------- 1 | // A Jannfile to install a Wayland desktop from source 2 | // wlroots, sway, mako, and rofi are built and installed 3 | 4 | init_ubuntu { 5 | @install = "sudo apt install" 6 | [git, meson] -> p { 7 | $ {{install}} {{p}} 8 | } 9 | } 10 | 11 | init 12 | | init_ubuntu [ubuntu] 13 | 14 | wlr_ubuntu { 15 | ["libwayland-*", "wayland-protocols", "libegl1-mesa-dev", "libgles2-mesa-dev", 16 | "libgbm-dev", "libinput-dev", "libxkbcommon-dev", "libpixman-1-dev", 17 | "libxcb-xinput-dev", "libsystemd-dev"] -> p { 18 | $ {{install}} {{p}} 19 | } 20 | } 21 | 22 | 23 | wlr_build { 24 | $ git clone https://github.com/swaywm/wlroots 25 | wlroots/ -> { 26 | $ meson build 27 | $ ninja -C build 28 | $ sudo ninja -C build install 29 | } 30 | } 31 | 32 | wlroots 33 | | wlr_ubuntu [ubuntu] 34 | | wlr_build 35 | 36 | sway_ubuntu { 37 | ["libjson-c-dev", "libcairo2-dev", "libpango1.0-dev", "libelogind-dev", "libgdk-pixbuf2.0-dev"] -> p { 38 | $ {{install}} {{p}} 39 | } 40 | } 41 | 42 | sway_build { 43 | $ git clone https://github.com/swaywm/sway 44 | sway/ -> { 45 | $ PKG_CONFIG_PATH=/usr/local/lib64/ meson build 46 | $ ninja -C build 47 | $ sudo ninja -C build install 48 | } 49 | } 50 | 51 | sway 52 | | sway_ubuntu [ubuntu] 53 | | sway_build 54 | 55 | rofi_ubuntu { 56 | ["libgdk-pixbuf2.0-dev", "librsvg2-dev", "libxcb-util-dev", "libxcb-xkb-dev", 57 | "libxkbcommon-x11-dev", "libxcb-ewmh-dev", "libxcb-icccm4-dev", "libxcb-xrm-dev", 58 | "libxcb-xinerama0-dev", "libstartup-notification0-dev", "flex", "bison"] -> p { 59 | $ {{install}} {{p}} 60 | } 61 | } 62 | 63 | rofi_build { 64 | $ git clone https://github.com/davatorium/rofi 65 | rofi/ -> { 66 | $ git submodule update --init 67 | $ meson setup build 68 | $ ninja -C build 69 | $ sudo ninja -C build install 70 | } 71 | } 72 | 73 | rofi 74 | | rofi_ubuntu [ubuntu] 75 | | rofi_build 76 | 77 | mako { 78 | $ git clone https://github.com/emersion/mako 79 | mako/ -> { 80 | $ meson build 81 | $ ninja -C build 82 | $ sudo ninja -C build install 83 | } 84 | } 85 | 86 | main 87 | | init 88 | | wlroots 89 | | sway 90 | | rofi 91 | | mako 92 | -------------------------------------------------------------------------------- /src/main.rs: -------------------------------------------------------------------------------- 1 | use std::io; 2 | use std::io::{BufRead, BufReader}; 3 | use std::path::PathBuf; 4 | use std::env; 5 | use std::process; 6 | use std::fs::File; 7 | 8 | mod com; 9 | mod parse; 10 | mod util; 11 | mod invoke; 12 | mod exec; 13 | mod inter; 14 | mod deploy; 15 | 16 | fn main() { 17 | /* Parse command line arguments */ 18 | 19 | let command = com::Command::new(); 20 | 21 | let (lines, switches, job) = match command { 22 | com::Command::HELP { code } => { 23 | println!("jann - Configuration deployment utility for *nix"); 24 | process::exit(code); 25 | }, 26 | com::Command::VERSION { code } => { 27 | println!("jann v0.1.0"); 28 | process::exit(code); 29 | }, 30 | com::Command::DO_STDIN { switches } => { 31 | let stdin = io::stdin(); 32 | let lines: Vec = stdin.lock().lines().map(|l| l.unwrap()).collect(); 33 | (lines, switches, String::from("stdin")) 34 | }, 35 | com::Command::DO_FILE { switches, file: path } => { 36 | let file = File::open(&path); 37 | let file = file.unwrap_or_else( |_| { 38 | println!("error: no such file {}", path); 39 | process::exit(66); 40 | }); 41 | let reader = BufReader::new(file); 42 | let lines: Vec = reader.lines().map(|l| l.unwrap()).collect(); 43 | (lines, switches, path) 44 | }, 45 | }; 46 | 47 | // println!("Switches: {:?}", switches); 48 | 49 | 50 | let mut log = util::Log::new(job, &lines); 51 | 52 | /* Tokenise input data */ 53 | 54 | let mut toks = vec![]; 55 | 56 | let mut id: usize = 1; 57 | let mut lno: usize = 1; 58 | for index in 0..(lines.len()) { 59 | toks.extend(parse::tokenise(&mut log, lno, &mut id, &lines[index])); 60 | lno += 1; 61 | } 62 | if log.has_err() { 63 | log.conclude(); 64 | } 65 | 66 | //println!("{:#?}", &toks); 67 | 68 | /* Create parse tree for input data */ 69 | 70 | let tree = parse::parse(&mut log, &toks); 71 | if log.has_err() { 72 | log.conclude(); 73 | } 74 | 75 | //tree.print_tree(); 76 | 77 | /* Get entry-point */ 78 | 79 | let mut pl_name = String::from("main"); 80 | let mut opts = deploy::DepOpt { OW_FF: true, OW_DD: true, OW_FD: false, OW_DF: true, INTER: true}; 81 | for (com, refs) in &switches { 82 | match (*com).as_ref() { 83 | "execute" => { 84 | if let Some(com::Reference::PIPELINE(pl)) = refs.get(0) { 85 | pl_name = pl.to_string(); 86 | } 87 | }, 88 | "allow" => { 89 | for r in refs { 90 | if let com::Reference::FLAG(r) = r { 91 | match r.to_lowercase().as_ref() { 92 | "ff" => {opts.OW_FF = true;}, 93 | "dd" => {opts.OW_DD = true;}, 94 | "fd" => {opts.OW_FD = true;}, 95 | "df" => {opts.OW_DF = true;}, 96 | "inter" => {opts.OW_DF = true;}, 97 | _ => (), 98 | } 99 | } 100 | } 101 | }, 102 | "forbid" => { 103 | for r in refs { 104 | if let com::Reference::FLAG(r) = r { 105 | match r.to_lowercase().as_ref() { 106 | "ff" => {opts.OW_FF = false;}, 107 | "dd" => {opts.OW_DD = false;}, 108 | "fd" => {opts.OW_FD = false;}, 109 | "df" => {opts.OW_DF = false;}, 110 | "inter" => {opts.OW_DF = false;}, 111 | _ => (), 112 | } 113 | } 114 | } 115 | }, 116 | _ => (), 117 | } 118 | } 119 | 120 | /* Execute parsed Jannfile */ 121 | 122 | let art = inter::Artifact::new(&toks, &tree); 123 | let cwd = env::current_dir().expect("Could not get cwd"); 124 | // use ./deploy as execution directory for now 125 | let edir = cwd.join("deploy"); 126 | let inv = invoke::Invocation { 127 | root: cwd, 128 | edir, 129 | opts, 130 | pl_name, 131 | art: art, 132 | switches: switches, 133 | }; 134 | inv.invoke(&mut log); 135 | 136 | log.conclude(); 137 | } 138 | 139 | -------------------------------------------------------------------------------- /src/com.rs: -------------------------------------------------------------------------------- 1 | use std::env; 2 | use util; 3 | use std::collections::HashMap; 4 | use std::process; 5 | 6 | #[derive(Debug, Clone)] 7 | pub enum Reference { 8 | // --enable, --disable 9 | TAG(String), 10 | STAGE(String), 11 | PL_TAG(String, String), 12 | PL_STAGE(String, String), 13 | ALL, 14 | 15 | // --execute 16 | PIPELINE(String), 17 | 18 | // --allow, --forbid 19 | FLAG(String), 20 | } 21 | 22 | pub type Switches = Vec<(String, Vec)>; 23 | 24 | pub enum Command { 25 | VERSION { code: i32 }, 26 | HELP { code: i32 }, 27 | DO_STDIN { switches: Switches }, 28 | DO_FILE { switches: Switches, file: String }, 29 | } 30 | 31 | fn is_verb(s: &str) -> bool { 32 | match s { 33 | "execute" | "allow" | "forbid" | "enable" | "disable" => true, 34 | _ => false, 35 | } 36 | } 37 | 38 | fn parse_switches(args : env::Args) -> Result { 39 | let mut switches = Switches::new(); 40 | let mut cur_verb : Option = None; 41 | let mut cur_args = vec![]; 42 | for mut arg in args { 43 | if arg.starts_with("--") { 44 | if let Some(ref verb) = cur_verb { 45 | switches.push((verb.to_string(), cur_args)); 46 | cur_args = vec![]; 47 | } 48 | let cv = arg.split_off(2).to_string(); 49 | if !is_verb(&cv) { 50 | return Err(Command::HELP { code: 64 }); 51 | } 52 | cur_verb = Some(cv); 53 | } 54 | else { 55 | match cur_verb { 56 | None => { 57 | println!("Expected a verb (such as --enable) in the position of the argument {}", arg); 58 | process::exit(1); 59 | }, 60 | Some(ref verb) => { 61 | if verb == "execute" { 62 | cur_args.push(Reference::PIPELINE(arg)); 63 | } 64 | else if verb == "allow" || verb == "forbid" { 65 | cur_args.push(Reference::FLAG(arg)) 66 | } 67 | else if verb == "enable" || verb == "disable" { 68 | if arg == "*" { 69 | cur_args.push(Reference::ALL); 70 | } 71 | if arg.starts_with("%") { 72 | cur_args.push(Reference::TAG(arg.split_off(1).to_string())); 73 | } 74 | else { 75 | let parts = arg.split(".").collect::>(); 76 | if parts.len() == 1 { 77 | cur_args.push(Reference::STAGE(arg)); 78 | } 79 | else { 80 | if parts[1].starts_with("%") { 81 | cur_args.push(Reference::PL_TAG(parts[0].to_string(), 82 | parts[1].to_string() 83 | .split_off(1) 84 | .to_string())); 85 | } 86 | else { 87 | cur_args.push(Reference::PL_STAGE(parts[0].to_string(), 88 | parts[1].to_string())); 89 | } 90 | } 91 | } 92 | } 93 | else { 94 | return Err(Command::HELP { code: 64 }); 95 | } 96 | } 97 | } 98 | } 99 | } 100 | 101 | if let Some(verb) = cur_verb { 102 | switches.push((verb, cur_args)); 103 | } 104 | 105 | Ok(switches) 106 | } 107 | 108 | impl Command { 109 | pub fn new() -> Command { 110 | let mut args = std::env::args(); 111 | let jann_bin = args.next(); 112 | match args.next() { 113 | Some(ref arg) => { 114 | match arg.as_ref() { 115 | "--version" => { return Command::VERSION { code: 0 }; }, 116 | "--help" => { return Command::HELP { code: 64 }; }, 117 | _ => (), 118 | } 119 | 120 | match parse_switches(args) { 121 | Ok(sw) => { 122 | if arg == "--" { 123 | return Command::DO_STDIN { switches: sw }; 124 | } 125 | else { 126 | return Command::DO_FILE { switches: sw, file: arg.clone() }; 127 | } 128 | }, 129 | Err(com) => { 130 | println!("Invalid command"); 131 | return com; 132 | } 133 | } 134 | }, 135 | None => Command::HELP { code: 64 }, 136 | } 137 | } 138 | } 139 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ## jann 2 | 3 | `jann` is a configuration deployment utility for \*nix operating systems. 4 | 5 | *Disclaimer: `jann` is alpha software. I implore you not to test it on any system that matters. `jann` is a penknife not a butter knife, and can overwrite important data with ease.* 6 | 7 | The idea behind `jann` is a simple one - you put all your configuration files in one directory tree, then write a manifest called a `Jannfile` to specify where in your filesystem those configuration files should be copied to. Note the difference in philosophy to the traditional technique of using `GNU stow`, which relies on softlinks. 8 | 9 | A typical `jann` bundle might look something like this: 10 | 11 | ``` 12 | awesome_config/ 13 | Jannfile 14 | .bashrc 15 | .vimrc 16 | .vim/ 17 | ... 18 | sway/ 19 | config 20 | status_config.toml 21 | wallpaper.png 22 | ``` 23 | 24 | The simple, readable `Jannfile` would then be as follows: 25 | 26 | ``` 27 | console { 28 | .bashrc => ~/ 29 | .vimrc => ~/ 30 | .vim => ~/ 31 | } 32 | 33 | graphical { 34 | sway/config => ~/.config/sway/ 35 | sway/status_config.toml => ~/.config/sway/ 36 | wallpaper.png >> ~/pic/wallpaper 37 | } 38 | 39 | main 40 | | console 41 | | graphical 42 | ``` 43 | 44 | The deployment may be completed in one simple command `jann Jannfile`. 45 | 46 | Note the two key structures here, **blocks** - named sequences of instructions surrounded by curly braces, and **pipelines** - named sequences of blocks (or other pipelines) to be run consecutively. The default pipeline is `main` - a different entry point can be specified with `--execute `. 47 | 48 | Note also the two different types of arrows used to represent two different types of copy operations - insertion copies, where the left path is copied into the right path, and 'splatting' copies, where the left path is copied directly onto the right path. 49 | 50 | Something important to note here is that when a directory is copied on top of another folder in `jann`, the original folder is completely deleted. This is a deliberate choice, but one which I realise goes against the behaviour of traditional tools and as such could catch the unwary user out. You can prevent any directories being overwritten with the switch `--forbid DD FD` - more on that later. 51 | 52 | This brief example does not cover much of `jann`'s functionality. Here are some examples of other features of `jann`. 53 | 54 | **Variables** 55 | 56 | `jann` supports local (scoped) and global variables, which can be interpolated into strings and commands. 57 | 58 | ``` 59 | foo { 60 | @glob = "Hello" 61 | loc = "Hiya" 62 | } 63 | 64 | bar { 65 | // Will echo Hello 66 | $ echo {{glob}} 67 | 68 | // Will also echo Hello 69 | loc2 = @glob 70 | $ echo {{loc2}} 71 | 72 | // Not gonna work! 73 | $ echo {{loc}} 74 | } 75 | 76 | main 77 | | foo 78 | | bar 79 | ``` 80 | 81 | **Command Execution** 82 | 83 | As indicated in the previous example, it is possible to run arbritrary shell commands. 84 | 85 | ``` 86 | shell_out { 87 | msg = "Yo!" 88 | $ echo {{msg}} 89 | } 90 | ``` 91 | 92 | **Maps** 93 | 94 | Maps allow the same instructions to be performed on a range of values. 95 | 96 | ``` 97 | colours { 98 | ["redfile", "bluefile", "greenfile"] -> c { 99 | "{{c}}" => ~/colourfiles/ 100 | } 101 | } 102 | ``` 103 | 104 | **Enabling and Disabling** 105 | 106 | Stages within pipelines can be, by default, enabled or disabled. Enabled stages are marked with a pipe '`|`', while disabled ones are marked with a colon '`:`'. This is more clear in an example: 107 | 108 | ``` 109 | my_pipeline 110 | | fiddle <-- enabled 111 | : lacquer <-- disabled 112 | | spin <-- enabled 113 | : incinerate <-- disabled 114 | ``` 115 | 116 | The default enable and disable states can be modified with the command line switches `--enable` and `--disable`. These switches take the following arguments. 117 | 118 | * `"*"` - Apply to every stage of every pipeline 119 | * `%foo` - Apply to every stage tagged `foo` 120 | * `bar` - Apply to every instance of the stage `bar` 121 | * `spqr.%foo` - Apply to every stage tagged `foo` in the pipeline `spqr` 122 | * `spqr.bar` - Apply to the stage `bar` in the pipleline `spqr` 123 | 124 | This might lead you to the natural question - what is a tag? Good question! Tags can be applied to pipeline stages like so: 125 | 126 | ``` 127 | spqr 128 | | pillage [important, destructive] 129 | | frolic 130 | | encamp [important] 131 | | barrage [destructive] 132 | ``` 133 | 134 | In this example, `important` and `destructive` are tags. 135 | 136 | **Options** 137 | 138 | `jann` features a fine-grained options system which allows control over the extent to which your filesystem can be modified. 139 | 140 | It features the following flags: 141 | 142 | * FF - Files can be overwritten by Files 143 | * DD - Directories can be overwritten by Directories 144 | * DF - Directories can be overwritten by Files 145 | * FD - Files can be overwritten by Directories 146 | * INTER - Intermediate directories can be created to complete a copy 147 | 148 | These flags can be turned on and off with the `--allow` and `--forbid` switches. For example: 149 | 150 | jann Jannfile --allow FF DD --disallow DF FD INTER 151 | 152 | These chosen options propogate to any auxilliary Jannfiles included with directives (see below). 153 | 154 | **Includes** 155 | 156 | It is possible to bring references to other Jannfiles into the namespace. This may be desirable for the sake of modularity, or to allow certain instructions to run as root. 157 | 158 | Here are some examples of how these directives can be used 159 | 160 | ``` 161 | // Bring the spqr pipeline of other.Jannfile into the namespace 162 | # include other.Jannfile::spqr 163 | // Bring the main (default) pipeline of priv.Jannfile into the namespace as 'elevated' 164 | # sudo_include [priv.Jannfile, elevated] 165 | 166 | main 167 | | spqr 168 | | elevated 169 | ``` 170 | 171 | -------------------------------------------------------------------------------- /src/deploy.rs: -------------------------------------------------------------------------------- 1 | extern crate walkdir; 2 | 3 | use std::path::{Path, PathBuf, Component}; 4 | use std::fs; 5 | use std::convert; 6 | 7 | // Deploy Options 8 | // - Whether to overwrite {Files, Dirs} w/ {Files, Dirs} 9 | // - Whether to create INTERmediary directories 10 | #[derive(Clone, Copy, Debug)] 11 | pub struct DepOpt { 12 | pub OW_FF: bool, 13 | pub OW_DD: bool, 14 | pub OW_FD: bool, 15 | pub OW_DF: bool, 16 | pub INTER: bool, 17 | } 18 | 19 | impl DepOpt { 20 | // check - Determine if an overwrite may take place based on these options 21 | fn check(&self, src_ent: &Entity, dst_ent: &Entity) -> bool { 22 | match *src_ent { 23 | Entity::FILE => { 24 | match *dst_ent { 25 | Entity::FILE => { 26 | self.OW_FF 27 | }, 28 | Entity::DIR => { 29 | self.OW_DF 30 | } 31 | } 32 | }, 33 | 34 | Entity::DIR => { 35 | match *dst_ent { 36 | Entity::FILE => { 37 | self.OW_FD 38 | }, 39 | Entity::DIR => { 40 | self.OW_DD 41 | } 42 | } 43 | } 44 | } 45 | } 46 | 47 | pub fn dump(&self) -> Vec<&'static str> { 48 | let mut allows = vec!["--allow"]; 49 | let mut forbids = vec!["--forbid"]; 50 | if self.OW_FF { allows.push("ff"); } else { forbids.push("ff"); } 51 | if self.OW_DD { allows.push("dd"); } else { forbids.push("dd"); } 52 | if self.OW_DF { allows.push("df"); } else { forbids.push("df"); } 53 | if self.OW_FD { allows.push("fd"); } else { forbids.push("fd"); } 54 | if self.INTER { allows.push("inter"); } else { forbids.push("inter"); } 55 | 56 | allows.extend(forbids); 57 | allows 58 | } 59 | } 60 | 61 | #[derive(PartialEq, Debug)] 62 | pub enum Entity { 63 | FILE, 64 | DIR, 65 | } 66 | 67 | // EndPtr - Points to last extant entity in a path 68 | #[derive(Debug)] 69 | struct EndPtr { 70 | ptr: usize, 71 | entity: Entity, 72 | full: bool, 73 | } 74 | 75 | // scout - Determine EndPtr for Destination path 76 | fn scout(dst_cmps: &Vec) -> EndPtr { 77 | let mut scout_path = PathBuf::new(); 78 | let mut entity = Entity::DIR; 79 | for (i, cmp) in dst_cmps.iter().enumerate() { 80 | scout_path.push(cmp); 81 | if scout_path.is_file() { 82 | entity = Entity::FILE; 83 | } 84 | else if scout_path.is_dir() { 85 | entity = Entity::DIR; 86 | } 87 | else { 88 | return EndPtr { 89 | ptr: i, 90 | entity, 91 | full: false, 92 | }; 93 | } 94 | } 95 | EndPtr { 96 | ptr: dst_cmps.len(), 97 | entity, 98 | full: true, 99 | } 100 | } 101 | 102 | #[derive(Debug)] 103 | pub struct DeployError { 104 | pub source : String, 105 | pub message: String, 106 | } 107 | 108 | impl DeployError { 109 | fn locked(source: &'static str, message: &'static str) -> DeployError { 110 | DeployError { source: String::from(source), message: String::from(message) } 111 | } 112 | } 113 | 114 | impl convert::From for DeployError { 115 | fn from(err: walkdir::Error) -> Self { 116 | DeployError {source : "WalkDir".to_owned(), 117 | message: format!("{}", err)} 118 | } 119 | } 120 | 121 | impl convert::From for DeployError { 122 | fn from(err: std::io::Error) -> Self { 123 | DeployError {source : "IO".to_owned(), 124 | message: format!("{}", err)} 125 | } 126 | } 127 | 128 | fn copy_dir, Q: AsRef>(src: P, dst: Q) -> Result<(), DeployError> { 129 | let src_path = src.as_ref(); 130 | let dst_path = dst.as_ref(); 131 | 132 | for entry in walkdir::WalkDir::new(src_path) { 133 | let entry = entry?; 134 | let path = entry.path(); 135 | let linked = dst_path.join(path.strip_prefix(src_path).unwrap()); 136 | if path.is_file() { 137 | fs::copy(path, &linked)?; 138 | } 139 | else { 140 | fs::create_dir_all(&linked)?; 141 | } 142 | } 143 | Ok(()) 144 | } 145 | 146 | 147 | // Deploy from source to destination based on options 148 | pub fn deploy(src: PathBuf, src_ent: Entity, dst: PathBuf, opt: DepOpt) -> Result<(), DeployError> { 149 | if option_env!("JANN_MOSTLY_HARMLESS") == Some("1") { 150 | println!("{:?} => {:?}\n...as {:?}\n... with {:?}", src, dst, src_ent, opt); 151 | return Ok(()); 152 | } 153 | let src_cmps: Vec = src.components().collect(); 154 | let dst_cmps: Vec = dst.components().collect(); 155 | let dst_ptr = scout(&dst_cmps); 156 | 157 | if dst_ptr.full { 158 | let viable = opt.check(&src_ent, &dst_ptr.entity); 159 | if viable { 160 | match &dst_ptr.entity { 161 | Entity::FILE => { 162 | fs::remove_file(&dst)?; //.expect("Could not remove destination file"); 163 | }, 164 | Entity::DIR => { 165 | fs::remove_dir_all(&dst)?; //.expect("Could not remove destination directory"); 166 | } 167 | } 168 | 169 | match &src_ent { 170 | Entity::FILE => { 171 | fs::copy(&src, &dst)?; 172 | }, 173 | Entity::DIR => { 174 | copy_dir(&src, &dst)?; 175 | } 176 | } 177 | } 178 | else { 179 | return Err(DeployError::locked("Deploy", "Options disallow this copy")); 180 | } 181 | } 182 | else { 183 | if dst_ptr.entity == Entity::FILE { 184 | if !opt.OW_FD { return Err(DeployError::locked("Deploy", "Options disallow overwriting files with directories.")) } 185 | 186 | let mut ow_path = PathBuf::new(); 187 | for c in dst_cmps.iter().take(dst_ptr.ptr) { 188 | ow_path.push(c); 189 | } 190 | if ow_path.is_file() { // should always be true 191 | fs::remove_file(&ow_path)?; //.expect("Could not remove clashing file"); 192 | } 193 | else { 194 | unreachable!(); 195 | } 196 | } 197 | let parent = dst.parent().unwrap(); 198 | if !parent.is_dir() { 199 | if opt.INTER { 200 | fs::create_dir_all(&parent)?; 201 | } 202 | else { 203 | return Err(DeployError::locked("Deploy", "Options disallow creating intermediate directories")); 204 | } 205 | } 206 | match &src_ent { 207 | Entity::FILE => { 208 | fs::copy(&src, &dst)?; 209 | }, 210 | Entity::DIR => { 211 | fs::create_dir(&dst)?; 212 | copy_dir(&src, &dst)?; 213 | } 214 | } 215 | } 216 | Ok(()) 217 | } 218 | -------------------------------------------------------------------------------- /src/inter.rs: -------------------------------------------------------------------------------- 1 | use std::collections::HashMap; 2 | use parse::{ParseTree, ParseTreeNode, PTNodeType, Token, TokenType}; 3 | use util; 4 | 5 | #[derive(Clone, Debug)] 6 | pub enum Value<'src> { 7 | List(Vec>), 8 | Str(String), 9 | Name(&'src str), 10 | JName(&'src str), 11 | } 12 | 13 | 14 | #[derive(Debug)] 15 | pub struct Symbols<'src> { 16 | pub names: HashMap<&'src str, Value<'src>>, 17 | pub jnames: HashMap<&'src str, Value<'src>>, 18 | pub blocks: HashMap<&'src str, usize>, 19 | pub includes: HashMap, 20 | } 21 | 22 | impl<'src> Symbols<'src> { 23 | pub fn new() -> Symbols<'src> { 24 | Symbols { 25 | names : HashMap::new(), 26 | jnames: HashMap::new(), 27 | blocks: HashMap::new(), 28 | includes: HashMap::new(), 29 | } 30 | } 31 | } 32 | 33 | #[derive(Debug)] 34 | pub struct Artifact<'src> { 35 | pub toks: &'src Vec>, 36 | pub tree: &'src ParseTree, 37 | } 38 | 39 | #[derive(Debug)] 40 | pub struct LinkNode<'int, 'src: 'int> { 41 | pub int: &'int Artifact<'src>, 42 | pub tok: &'src Token<'src>, 43 | pub ptn: &'src ParseTreeNode, 44 | } 45 | 46 | impl<'int, 'src: 'int> LinkNode<'int, 'src> { 47 | pub fn children(&self) -> Vec> { 48 | let mut in_children = vec![]; 49 | for cid in &self.ptn.children { 50 | let child = &self.int.tree.get_node(*cid); 51 | let tok = &self.int.toks[child.tok_id - 1]; 52 | in_children.push(LinkNode { int: &self.int, tok, ptn: child }); 53 | } 54 | in_children 55 | } 56 | 57 | pub fn is_type(&self, nt: &PTNodeType) -> bool { 58 | self.ptn.nt == *nt 59 | } 60 | 61 | pub fn expect_type(&self, nt: &PTNodeType) { 62 | if !self.is_type(nt) { 63 | panic!("Expected {:?} type!", nt); 64 | } 65 | } 66 | 67 | pub fn token_value(&self) -> &'src str { 68 | self.tok.val.slice() 69 | } 70 | } 71 | 72 | //opts: deploy::DepOpt { OW_FF: true, OW_DD: true, OW_FD: false, OW_DF: true, INTER: true } 73 | 74 | impl<'int, 'src: 'int> Artifact<'src> { 75 | pub fn new(toks: &'src Vec>, tree: &'src ParseTree) -> Artifact<'src> { 76 | Artifact { toks, tree } 77 | } 78 | 79 | pub fn root(&'int self) -> LinkNode<'int, 'src> { 80 | if self.tree.is_empty() { 81 | panic!("Parse Tree is empty"); 82 | } 83 | let ptn = &self.tree.get_node(0); 84 | /* Token value should never be read, so just point to arbritrary token */ 85 | LinkNode { int: &self, tok: &(self.toks[0]), ptn: ptn } 86 | } 87 | 88 | pub fn node(&'int self, n: usize) -> LinkNode<'int, 'src> { 89 | if n == 0 { 90 | return self.root(); 91 | } 92 | let ptn = &self.tree.get_node(n); 93 | LinkNode { int: &self, tok: &(self.toks[ptn.tok_id - 1]), ptn: ptn } 94 | } 95 | } 96 | 97 | pub fn check_name(name: &str) -> bool { 98 | let re = regex::Regex::new(r"^[a-zA-Z0-9_]*$").unwrap(); 99 | re.is_match(name) 100 | } 101 | 102 | // Substitute variable names from the symbol table 103 | // Used for shell command statements and also other value strings 104 | 105 | pub fn interpolate<'inv, 'src: 'inv>(log: &mut util::Log<'src>, 106 | symbols: &Symbols<'src>, 107 | base: &'inv str, 108 | node: &LinkNode<'inv, 'src>) -> String { 109 | // A mini enumeration of parsing states 110 | let NONE = 0; 111 | let LBRACE = 1; 112 | let RBRACE = 2; 113 | let WITHIN = 3; 114 | // Expecting Escape 115 | let mut esc = false; 116 | // Expected State 117 | let mut ex = NONE; 118 | 119 | // The final string is built into outstr 120 | let mut outstr: String = "".to_string(); 121 | 122 | // Name stores interpolation variables as they are parsed 123 | let mut name: String = "".to_string(); 124 | 125 | // We parse on a char-by-char basis 126 | for c in base.chars() { 127 | if ex == RBRACE { 128 | if c != '}' { 129 | log.terminal("Expected right brace", "Missing right brace", &node.tok); 130 | } 131 | ex = NONE; 132 | continue; 133 | } 134 | 135 | if ex == WITHIN { 136 | if c == '}' { 137 | let val = symbols.names.get(name.trim()).unwrap_or_else( || { 138 | symbols.jnames.get(name.trim()).unwrap_or_else( || { 139 | log.terminal(&format!("No such variable {}", name), 140 | "Ensure interpolation uses extant, in-scope variables", &node.tok); 141 | }) 142 | }); 143 | if let Value::Str(ref v) = val { 144 | outstr.push_str(v); 145 | } 146 | else { 147 | log.terminal("Only strings can be interpolated into commands", 148 | &format!("Change the type of variable {}", name.trim()), &node.tok); 149 | } 150 | name = "".to_string(); 151 | ex = RBRACE; 152 | continue; 153 | } 154 | name.push(c); 155 | continue; 156 | } 157 | 158 | if ex == LBRACE { 159 | if c == '{' { 160 | ex = WITHIN; 161 | continue; 162 | } 163 | else { 164 | ex = NONE; 165 | outstr.push_str("{"); 166 | } 167 | 168 | } 169 | 170 | if c == '\\' && !esc { 171 | esc = true; 172 | continue; 173 | } 174 | if c == '{' && !esc { 175 | ex = LBRACE; 176 | continue; 177 | } 178 | 179 | if esc { esc = false; } 180 | 181 | outstr.push(c); 182 | } 183 | 184 | if ex != NONE { 185 | log.terminal("Bad interpolation syntax", "Make sure all braces are matched", &node.tok); 186 | } 187 | 188 | outstr 189 | } 190 | 191 | pub fn load_value<'old, 'src: 'old>(symbols: &Symbols<'src>, 192 | node : &LinkNode<'old, 'src> ) -> Value<'src> { 193 | match node.ptn.nt { 194 | PTNodeType::NAME => { 195 | let name = node.tok.val.slice(); 196 | if let Some(val) = symbols.names.get(name) { 197 | (*val).clone() 198 | } 199 | else { 200 | Value::Str(name.to_string()) 201 | } 202 | }, 203 | PTNodeType::JNAME => { 204 | let jname = node.tok.val.slice(); 205 | if let Some(val) = symbols.jnames.get(jname) { 206 | (*val).clone() 207 | } 208 | else { 209 | panic!("Undefined JNAME"); 210 | } 211 | }, 212 | PTNodeType::LIST => { 213 | let mut vals = vec![]; 214 | for elem in node.children() { 215 | vals.push(load_value(symbols, &elem)); 216 | } 217 | Value::List(vals) 218 | }, 219 | _ => { panic!(format!("Bad Value {:?}", node.ptn.nt)); } 220 | } 221 | } 222 | 223 | -------------------------------------------------------------------------------- /src/exec.rs: -------------------------------------------------------------------------------- 1 | extern crate walkdir; 2 | extern crate dirs; 3 | 4 | use parse::PTNodeType; 5 | use std::process::Command; 6 | use std::path::{Path, PathBuf, Component}; 7 | use std::env; 8 | use std::fs; 9 | 10 | use deploy; 11 | use invoke; 12 | use inter; 13 | use util; 14 | use parse; 15 | 16 | fn component_string(c: &Component) -> String { 17 | c.as_os_str().to_string_lossy().to_string() 18 | } 19 | 20 | fn command<'inv, 'src: 'inv>(inv: &invoke::Invocation<'src>, 21 | symbols: &mut inter::Symbols<'src>, 22 | log: &mut util::Log<'src>, 23 | node: &inter::LinkNode<'inv, 'src>){ 24 | let shell = { 25 | if let Some(inter::Value::Str(s)) = symbols.jnames.get("shell") { 26 | s.to_owned() 27 | } 28 | else { 29 | "/bin/sh".to_owned() 30 | } 31 | }; 32 | 33 | let outcom = inter::interpolate(log, symbols, node.token_value(), node); 34 | println!(">>> {}", outcom); 35 | 36 | let mut proc = Command::new(&shell) 37 | .arg("-c") 38 | .arg(outcom) 39 | .spawn() 40 | .expect("failed to execute process"); 41 | 42 | if !proc.wait().expect("failed to wait on process").success() { println!("Command ended with non-zero status") } 43 | } 44 | 45 | fn execute_stmts<'inv, 'src: 'inv>(inv: &invoke::Invocation<'src>, 46 | symbols: &mut inter::Symbols<'src>, 47 | log: &mut util::Log<'src>, 48 | stmts: Vec<&inter::LinkNode<'inv, 'src>>) { 49 | let mut scope_names : Vec<&'src str> = vec![]; 50 | for node in stmts { 51 | match node.ptn.nt { 52 | PTNodeType::ASSIGN => { 53 | let rval = inter::load_value(symbols, &node.children()[1]); 54 | let lval = &node.children()[0]; 55 | if inter::check_name(lval.token_value()) { 56 | if lval.is_type(&PTNodeType::NAME) { 57 | scope_names.push(lval.token_value()); 58 | symbols.names.insert(lval.token_value(), rval); 59 | } 60 | else if lval.is_type(&PTNodeType::JNAME) { 61 | symbols.jnames.insert(lval.token_value(), rval); 62 | } 63 | } 64 | else { 65 | log.terminal("Invalid variable name", "Make this a valid name", &lval.tok); 66 | } 67 | }, 68 | PTNodeType::COMMAND => { 69 | command(inv, symbols, log, node); 70 | }, 71 | PTNodeType::COPY | PTNodeType::INSERT => { 72 | let deploy_children = &node.children(); 73 | let src_buf = PathBuf::from(inter::interpolate(log, 74 | symbols, 75 | &deploy_children[0].token_value(), 76 | &deploy_children[0])); 77 | 78 | let comps: Vec = src_buf.components().collect(); 79 | 80 | if comps.len() == 0 { 81 | log.terminal("Source path is empty (this should not be allowed by the parser)", 82 | "Put a path here and then please file a bug report!", 83 | &deploy_children[0].tok); 84 | } 85 | 86 | if !comps.iter().all(|&c| match c { Component::Normal(_) => true, _ => false }) { 87 | log.terminal("Invalid source path", 88 | "Remove any expansions and ensure path is relative to Jannfile", 89 | &deploy_children[0].tok); 90 | } 91 | 92 | let full_src = inv.root.join(&src_buf); 93 | 94 | if !full_src.exists() { 95 | log.terminal(&format!("No entity at source path: {:?}", full_src), 96 | "Make this a valid path", &deploy_children[0].tok); 97 | } 98 | 99 | let mut dst_buf = PathBuf::from(inter::interpolate(log, 100 | symbols, 101 | &deploy_children[1].token_value(), 102 | &deploy_children[1])); 103 | 104 | let dst_cpy = dst_buf.clone(); 105 | let dst_comps: Vec = dst_cpy.components().collect(); 106 | 107 | if dst_comps.len() == 0 { 108 | log.terminal("Destination path is empty (this should not be allowed by the parser)", 109 | "Put a path here and then please file a bug report!", 110 | &deploy_children[1].tok); 111 | 112 | } 113 | 114 | dst_buf = if let Ok(dst_tail) = dst_buf.strip_prefix("~") { 115 | dirs::home_dir().unwrap_or_else( || { 116 | log.sys_terminal("Could not find home directory"); 117 | }).join(dst_tail) 118 | } 119 | else { 120 | dst_buf 121 | }; 122 | 123 | if !dst_buf.components().all(|c| match c { 124 | Component::CurDir | Component::ParentDir => false, 125 | _ => true, 126 | }) { 127 | log.terminal(&format!("Invalid destination path {:?}", dst_buf), 128 | "Ensure path is absolute", 129 | &deploy_children[1].tok); 130 | } 131 | 132 | if node.is_type(&PTNodeType::INSERT) { 133 | let entity = if let Some(parent) = src_buf.parent() { 134 | src_buf.strip_prefix(parent).unwrap() 135 | } 136 | else { 137 | &src_buf 138 | }; 139 | dst_buf = PathBuf::from("/").join(dst_buf.join(entity)); 140 | } 141 | 142 | if let Err(result) = { 143 | if full_src.is_file() { 144 | deploy::deploy(full_src, deploy::Entity::FILE, dst_buf, inv.opts) 145 | } 146 | else { 147 | deploy::deploy(full_src, deploy::Entity::DIR, dst_buf, inv.opts) 148 | } 149 | } { 150 | log.terminal(&format!("Deployment error: [{}] {}", &result.source, &result.message), 151 | "Modify this line appropriately", &node.tok); 152 | } 153 | 154 | }, 155 | PTNodeType::BLOCK => { execute_block(inv, symbols, log, node); }, 156 | _ => { continue; }, 157 | } 158 | } 159 | for name in scope_names.iter() { 160 | symbols.names.remove(name); 161 | } 162 | } 163 | 164 | pub fn execute_block<'inv, 'src: 'inv>(inv: &invoke::Invocation<'src>, 165 | symbols: &mut inter::Symbols<'src>, 166 | log: &mut util::Log<'src>, 167 | node: &inter::LinkNode<'inv, 'src>) { 168 | let mut block_children = node.children(); 169 | let tag = &block_children[0]; 170 | 171 | match tag.ptn.nt { 172 | PTNodeType::NAME => { 173 | if inter::check_name(tag.token_value()) { 174 | execute_stmts(inv, symbols, log, block_children.iter().skip(1).collect()); 175 | } 176 | else { 177 | log.terminal("Invalid Block Name", "Choose a valid name for this block", &tag.tok); 178 | } 179 | }, 180 | PTNodeType::MAP => { 181 | let map = &block_children[0]; 182 | let map_children = map.children(); 183 | if let inter::Value::List(vlist) = inter::load_value(symbols, &map_children[0]) { 184 | let name = map_children[1].token_value(); 185 | if inter::check_name(&name) { 186 | for elem in vlist { 187 | let elem = if let inter::Value::Str(ref s) = elem { 188 | inter::Value::Str(inter::interpolate(log, symbols, s, map)) 189 | } else { 190 | elem 191 | }; 192 | symbols.names.insert(&name, elem); 193 | execute_stmts(inv, symbols, log, block_children.iter().skip(1).collect()); 194 | } 195 | symbols.names.remove(name); 196 | } 197 | else { 198 | log.terminal("Invalid Map Variable Name", 199 | "Choose a valid name for this variable", &map_children[1].tok); 200 | } 201 | } 202 | else { 203 | log.terminal("Left side of Map must be a list", 204 | "Replace this value with a list", &map_children[0].tok); 205 | } 206 | let name = &node.children()[1]; 207 | 208 | }, 209 | PTNodeType::CD => { 210 | let cd = &block_children[0]; 211 | let pval = &cd.children()[0]; 212 | if let inter::Value::Str(path) = inter::load_value(symbols, pval) { 213 | let path = inter::interpolate(log, symbols, &path, pval); 214 | let cur = env::current_dir().unwrap(); 215 | let path = cur.join(path); 216 | if path.is_dir() { 217 | if let Ok(_) = env::set_current_dir(path) { 218 | execute_stmts(inv, symbols, log, block_children.iter().skip(1).collect()); 219 | } 220 | else { 221 | log.terminal("Could not set working directory", "Make this an accessible directory", pval.tok); 222 | } 223 | } 224 | else { 225 | log.terminal("Could not set working directory", "Make this an extant directory", pval.tok); 226 | } 227 | env::set_current_dir(cur); 228 | } 229 | } 230 | _ => { log.terminal("Invalid Block Tag", "Replace this with a name or a mapping", &tag.tok); }, 231 | } 232 | } 233 | -------------------------------------------------------------------------------- /Cargo.lock: -------------------------------------------------------------------------------- 1 | # This file is automatically @generated by Cargo. 2 | # It is not intended for manual editing. 3 | [[package]] 4 | name = "aho-corasick" 5 | version = "0.7.6" 6 | source = "registry+https://github.com/rust-lang/crates.io-index" 7 | dependencies = [ 8 | "memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)", 9 | ] 10 | 11 | [[package]] 12 | name = "argon2rs" 13 | version = "0.2.5" 14 | source = "registry+https://github.com/rust-lang/crates.io-index" 15 | dependencies = [ 16 | "blake2-rfc 0.2.18 (registry+https://github.com/rust-lang/crates.io-index)", 17 | "scoped_threadpool 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", 18 | ] 19 | 20 | [[package]] 21 | name = "arrayvec" 22 | version = "0.4.11" 23 | source = "registry+https://github.com/rust-lang/crates.io-index" 24 | dependencies = [ 25 | "nodrop 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)", 26 | ] 27 | 28 | [[package]] 29 | name = "backtrace" 30 | version = "0.3.34" 31 | source = "registry+https://github.com/rust-lang/crates.io-index" 32 | dependencies = [ 33 | "backtrace-sys 0.1.31 (registry+https://github.com/rust-lang/crates.io-index)", 34 | "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", 35 | "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", 36 | "rustc-demangle 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)", 37 | ] 38 | 39 | [[package]] 40 | name = "backtrace-sys" 41 | version = "0.1.31" 42 | source = "registry+https://github.com/rust-lang/crates.io-index" 43 | dependencies = [ 44 | "cc 1.0.38 (registry+https://github.com/rust-lang/crates.io-index)", 45 | "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", 46 | ] 47 | 48 | [[package]] 49 | name = "bitflags" 50 | version = "1.1.0" 51 | source = "registry+https://github.com/rust-lang/crates.io-index" 52 | 53 | [[package]] 54 | name = "blake2-rfc" 55 | version = "0.2.18" 56 | source = "registry+https://github.com/rust-lang/crates.io-index" 57 | dependencies = [ 58 | "arrayvec 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)", 59 | "constant_time_eq 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", 60 | ] 61 | 62 | [[package]] 63 | name = "cc" 64 | version = "1.0.38" 65 | source = "registry+https://github.com/rust-lang/crates.io-index" 66 | 67 | [[package]] 68 | name = "cfg-if" 69 | version = "0.1.9" 70 | source = "registry+https://github.com/rust-lang/crates.io-index" 71 | 72 | [[package]] 73 | name = "cloudabi" 74 | version = "0.0.3" 75 | source = "registry+https://github.com/rust-lang/crates.io-index" 76 | dependencies = [ 77 | "bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", 78 | ] 79 | 80 | [[package]] 81 | name = "constant_time_eq" 82 | version = "0.1.3" 83 | source = "registry+https://github.com/rust-lang/crates.io-index" 84 | 85 | [[package]] 86 | name = "dirs" 87 | version = "2.0.2" 88 | source = "registry+https://github.com/rust-lang/crates.io-index" 89 | dependencies = [ 90 | "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", 91 | "dirs-sys 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)", 92 | ] 93 | 94 | [[package]] 95 | name = "dirs-sys" 96 | version = "0.3.4" 97 | source = "registry+https://github.com/rust-lang/crates.io-index" 98 | dependencies = [ 99 | "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", 100 | "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", 101 | "redox_users 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)", 102 | "winapi 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", 103 | ] 104 | 105 | [[package]] 106 | name = "failure" 107 | version = "0.1.5" 108 | source = "registry+https://github.com/rust-lang/crates.io-index" 109 | dependencies = [ 110 | "backtrace 0.3.34 (registry+https://github.com/rust-lang/crates.io-index)", 111 | "failure_derive 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", 112 | ] 113 | 114 | [[package]] 115 | name = "failure_derive" 116 | version = "0.1.5" 117 | source = "registry+https://github.com/rust-lang/crates.io-index" 118 | dependencies = [ 119 | "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)", 120 | "quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)", 121 | "syn 0.15.42 (registry+https://github.com/rust-lang/crates.io-index)", 122 | "synstructure 0.10.2 (registry+https://github.com/rust-lang/crates.io-index)", 123 | ] 124 | 125 | [[package]] 126 | name = "fuchsia-cprng" 127 | version = "0.1.1" 128 | source = "registry+https://github.com/rust-lang/crates.io-index" 129 | 130 | [[package]] 131 | name = "jann" 132 | version = "0.1.0" 133 | dependencies = [ 134 | "bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)", 135 | "dirs 2.0.2 (registry+https://github.com/rust-lang/crates.io-index)", 136 | "regex 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)", 137 | "walkdir 2.1.4 (registry+https://github.com/rust-lang/crates.io-index)", 138 | ] 139 | 140 | [[package]] 141 | name = "lazy_static" 142 | version = "1.4.0" 143 | source = "registry+https://github.com/rust-lang/crates.io-index" 144 | 145 | [[package]] 146 | name = "libc" 147 | version = "0.2.60" 148 | source = "registry+https://github.com/rust-lang/crates.io-index" 149 | 150 | [[package]] 151 | name = "memchr" 152 | version = "2.2.1" 153 | source = "registry+https://github.com/rust-lang/crates.io-index" 154 | 155 | [[package]] 156 | name = "nodrop" 157 | version = "0.1.13" 158 | source = "registry+https://github.com/rust-lang/crates.io-index" 159 | 160 | [[package]] 161 | name = "proc-macro2" 162 | version = "0.4.30" 163 | source = "registry+https://github.com/rust-lang/crates.io-index" 164 | dependencies = [ 165 | "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", 166 | ] 167 | 168 | [[package]] 169 | name = "quote" 170 | version = "0.6.13" 171 | source = "registry+https://github.com/rust-lang/crates.io-index" 172 | dependencies = [ 173 | "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)", 174 | ] 175 | 176 | [[package]] 177 | name = "rand_core" 178 | version = "0.3.1" 179 | source = "registry+https://github.com/rust-lang/crates.io-index" 180 | dependencies = [ 181 | "rand_core 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", 182 | ] 183 | 184 | [[package]] 185 | name = "rand_core" 186 | version = "0.4.0" 187 | source = "registry+https://github.com/rust-lang/crates.io-index" 188 | 189 | [[package]] 190 | name = "rand_os" 191 | version = "0.1.3" 192 | source = "registry+https://github.com/rust-lang/crates.io-index" 193 | dependencies = [ 194 | "cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)", 195 | "fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", 196 | "libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)", 197 | "rand_core 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", 198 | "rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", 199 | "winapi 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", 200 | ] 201 | 202 | [[package]] 203 | name = "rdrand" 204 | version = "0.4.0" 205 | source = "registry+https://github.com/rust-lang/crates.io-index" 206 | dependencies = [ 207 | "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", 208 | ] 209 | 210 | [[package]] 211 | name = "redox_syscall" 212 | version = "0.1.56" 213 | source = "registry+https://github.com/rust-lang/crates.io-index" 214 | 215 | [[package]] 216 | name = "redox_users" 217 | version = "0.3.0" 218 | source = "registry+https://github.com/rust-lang/crates.io-index" 219 | dependencies = [ 220 | "argon2rs 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", 221 | "failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", 222 | "rand_os 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)", 223 | "redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)", 224 | ] 225 | 226 | [[package]] 227 | name = "regex" 228 | version = "1.3.1" 229 | source = "registry+https://github.com/rust-lang/crates.io-index" 230 | dependencies = [ 231 | "aho-corasick 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)", 232 | "memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)", 233 | "regex-syntax 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)", 234 | "thread_local 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", 235 | ] 236 | 237 | [[package]] 238 | name = "regex-syntax" 239 | version = "0.6.12" 240 | source = "registry+https://github.com/rust-lang/crates.io-index" 241 | 242 | [[package]] 243 | name = "rustc-demangle" 244 | version = "0.1.15" 245 | source = "registry+https://github.com/rust-lang/crates.io-index" 246 | 247 | [[package]] 248 | name = "same-file" 249 | version = "1.0.2" 250 | source = "registry+https://github.com/rust-lang/crates.io-index" 251 | dependencies = [ 252 | "winapi 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", 253 | ] 254 | 255 | [[package]] 256 | name = "scoped_threadpool" 257 | version = "0.1.9" 258 | source = "registry+https://github.com/rust-lang/crates.io-index" 259 | 260 | [[package]] 261 | name = "syn" 262 | version = "0.15.42" 263 | source = "registry+https://github.com/rust-lang/crates.io-index" 264 | dependencies = [ 265 | "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)", 266 | "quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)", 267 | "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", 268 | ] 269 | 270 | [[package]] 271 | name = "synstructure" 272 | version = "0.10.2" 273 | source = "registry+https://github.com/rust-lang/crates.io-index" 274 | dependencies = [ 275 | "proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)", 276 | "quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)", 277 | "syn 0.15.42 (registry+https://github.com/rust-lang/crates.io-index)", 278 | "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)", 279 | ] 280 | 281 | [[package]] 282 | name = "thread_local" 283 | version = "0.3.6" 284 | source = "registry+https://github.com/rust-lang/crates.io-index" 285 | dependencies = [ 286 | "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", 287 | ] 288 | 289 | [[package]] 290 | name = "unicode-xid" 291 | version = "0.1.0" 292 | source = "registry+https://github.com/rust-lang/crates.io-index" 293 | 294 | [[package]] 295 | name = "walkdir" 296 | version = "2.1.4" 297 | source = "registry+https://github.com/rust-lang/crates.io-index" 298 | dependencies = [ 299 | "same-file 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", 300 | "winapi 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", 301 | ] 302 | 303 | [[package]] 304 | name = "winapi" 305 | version = "0.3.5" 306 | source = "registry+https://github.com/rust-lang/crates.io-index" 307 | dependencies = [ 308 | "winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", 309 | "winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)", 310 | ] 311 | 312 | [[package]] 313 | name = "winapi-i686-pc-windows-gnu" 314 | version = "0.4.0" 315 | source = "registry+https://github.com/rust-lang/crates.io-index" 316 | 317 | [[package]] 318 | name = "winapi-x86_64-pc-windows-gnu" 319 | version = "0.4.0" 320 | source = "registry+https://github.com/rust-lang/crates.io-index" 321 | 322 | [metadata] 323 | "checksum aho-corasick 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)" = "58fb5e95d83b38284460a5fda7d6470aa0b8844d283a0b614b8535e880800d2d" 324 | "checksum argon2rs 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "3f67b0b6a86dae6e67ff4ca2b6201396074996379fba2b92ff649126f37cb392" 325 | "checksum arrayvec 0.4.11 (registry+https://github.com/rust-lang/crates.io-index)" = "b8d73f9beda665eaa98ab9e4f7442bd4e7de6652587de55b2525e52e29c1b0ba" 326 | "checksum backtrace 0.3.34 (registry+https://github.com/rust-lang/crates.io-index)" = "b5164d292487f037ece34ec0de2fcede2faa162f085dd96d2385ab81b12765ba" 327 | "checksum backtrace-sys 0.1.31 (registry+https://github.com/rust-lang/crates.io-index)" = "82a830b4ef2d1124a711c71d263c5abdc710ef8e907bd508c88be475cebc422b" 328 | "checksum bitflags 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3d155346769a6855b86399e9bc3814ab343cd3d62c7e985113d46a0ec3c281fd" 329 | "checksum blake2-rfc 0.2.18 (registry+https://github.com/rust-lang/crates.io-index)" = "5d6d530bdd2d52966a6d03b7a964add7ae1a288d25214066fd4b600f0f796400" 330 | "checksum cc 1.0.38 (registry+https://github.com/rust-lang/crates.io-index)" = "ce400c638d48ee0e9ab75aef7997609ec57367ccfe1463f21bf53c3eca67bf46" 331 | "checksum cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "b486ce3ccf7ffd79fdeb678eac06a9e6c09fc88d33836340becb8fffe87c5e33" 332 | "checksum cloudabi 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f" 333 | "checksum constant_time_eq 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "8ff012e225ce166d4422e0e78419d901719760f62ae2b7969ca6b564d1b54a9e" 334 | "checksum dirs 2.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "13aea89a5c93364a98e9b37b2fa237effbb694d5cfe01c5b70941f7eb087d5e3" 335 | "checksum dirs-sys 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "afa0b23de8fd801745c471deffa6e12d248f962c9fd4b4c33787b055599bde7b" 336 | "checksum failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "795bd83d3abeb9220f257e597aa0080a508b27533824adf336529648f6abf7e2" 337 | "checksum failure_derive 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "ea1063915fd7ef4309e222a5a07cf9c319fb9c7836b1f89b85458672dbb127e1" 338 | "checksum fuchsia-cprng 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "a06f77d526c1a601b7c4cdd98f54b5eaabffc14d5f2f0296febdc7f357c6d3ba" 339 | "checksum lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e2abad23fbc42b3700f2f279844dc832adb2b2eb069b2df918f455c4e18cc646" 340 | "checksum libc 0.2.60 (registry+https://github.com/rust-lang/crates.io-index)" = "d44e80633f007889c7eff624b709ab43c92d708caad982295768a7b13ca3b5eb" 341 | "checksum memchr 2.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "88579771288728879b57485cc7d6b07d648c9f0141eb955f8ab7f9d45394468e" 342 | "checksum nodrop 0.1.13 (registry+https://github.com/rust-lang/crates.io-index)" = "2f9667ddcc6cc8a43afc9b7917599d7216aa09c463919ea32c59ed6cac8bc945" 343 | "checksum proc-macro2 0.4.30 (registry+https://github.com/rust-lang/crates.io-index)" = "cf3d2011ab5c909338f7887f4fc896d35932e29146c12c8d01da6b22a80ba759" 344 | "checksum quote 0.6.13 (registry+https://github.com/rust-lang/crates.io-index)" = "6ce23b6b870e8f94f81fb0a363d65d86675884b34a09043c81e5562f11c1f8e1" 345 | "checksum rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b" 346 | "checksum rand_core 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d0e7a549d590831370895ab7ba4ea0c1b6b011d106b5ff2da6eee112615e6dc0" 347 | "checksum rand_os 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "7b75f676a1e053fc562eafbb47838d67c84801e38fc1ba459e8f180deabd5071" 348 | "checksum rdrand 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "678054eb77286b51581ba43620cc911abf02758c91f93f479767aed0f90458b2" 349 | "checksum redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)" = "2439c63f3f6139d1b57529d16bc3b8bb855230c8efcc5d3a896c8bea7c3b1e84" 350 | "checksum redox_users 0.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3fe5204c3a17e97dde73f285d49be585df59ed84b50a872baf416e73b62c3828" 351 | "checksum regex 1.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "dc220bd33bdce8f093101afe22a037b8eb0e5af33592e6a9caafff0d4cb81cbd" 352 | "checksum regex-syntax 0.6.12 (registry+https://github.com/rust-lang/crates.io-index)" = "11a7e20d1cce64ef2fed88b66d347f88bd9babb82845b2b858f3edbf59a4f716" 353 | "checksum rustc-demangle 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)" = "a7f4dccf6f4891ebcc0c39f9b6eb1a83b9bf5d747cb439ec6fba4f3b977038af" 354 | "checksum same-file 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "cfb6eded0b06a0b512c8ddbcf04089138c9b4362c2f696f3c3d76039d68f3637" 355 | "checksum scoped_threadpool 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "1d51f5df5af43ab3f1360b429fa5e0152ac5ce8c0bd6485cae490332e96846a8" 356 | "checksum syn 0.15.42 (registry+https://github.com/rust-lang/crates.io-index)" = "eadc09306ca51a40555dd6fc2b415538e9e18bc9f870e47b1a524a79fe2dcf5e" 357 | "checksum synstructure 0.10.2 (registry+https://github.com/rust-lang/crates.io-index)" = "02353edf96d6e4dc81aea2d8490a7e9db177bf8acb0e951c24940bf866cb313f" 358 | "checksum thread_local 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "c6b53e329000edc2b34dbe8545fd20e55a333362d0a321909685a19bd28c3f1b" 359 | "checksum unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc" 360 | "checksum walkdir 2.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "63636bd0eb3d00ccb8b9036381b526efac53caf112b7783b730ab3f8e44da369" 361 | "checksum winapi 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "773ef9dcc5f24b7d850d0ff101e542ff24c3b090a9768e03ff889fdef41f00fd" 362 | "checksum winapi-i686-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" 363 | "checksum winapi-x86_64-pc-windows-gnu 0.4.0 (registry+https://github.com/rust-lang/crates.io-index)" = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" 364 | -------------------------------------------------------------------------------- /src/invoke.rs: -------------------------------------------------------------------------------- 1 | use parse::{ParseTree, ParseTreeNode, PTNodeType, Token, TokenType}; 2 | use com; 3 | use inter; 4 | use exec; 5 | use deploy; 6 | use util; 7 | 8 | use std::fs; 9 | use std::env; 10 | use std::path::PathBuf; 11 | use std::collections::HashMap; 12 | use std::process::Command; 13 | 14 | #[derive(Debug)] 15 | enum RunState { 16 | NOTRUN, 17 | DONE , 18 | } 19 | 20 | #[derive(Debug)] 21 | struct PipelineStage<'src> { 22 | name: &'src str, 23 | tags: Vec<&'src str>, 24 | enabled: bool, 25 | state: RunState, 26 | pl_ptr: Option 27 | } 28 | 29 | // A Pipeline is a sequence of executable stages 30 | 31 | #[derive(Debug)] 32 | struct Pipeline<'src> { 33 | name : &'src str, 34 | stages: Vec>, 35 | } 36 | 37 | impl<'inv, 'src: 'inv> Pipeline<'src> { 38 | 39 | // Execute a Pipeline 40 | fn execute(flow: &mut Workflow, 41 | pl_self: usize, 42 | inv: &Invocation<'src>, 43 | symbols: &mut inter::Symbols<'src>, 44 | log: &mut util::Log<'src>, 45 | tab: usize, 46 | ) { 47 | 48 | // Tabbing allows better logging of nested Pipelines 49 | let tabs = "\t".repeat(tab); 50 | println!("[Execute] {}{}", tabs, &flow.lines[pl_self].name); 51 | 52 | // Iterate through own stages 53 | for st_index in 0..flow.lines[pl_self].stages.len() { 54 | 55 | // Bypass disabled stages 56 | if !flow.lines[pl_self].stages[st_index].enabled { 57 | println!("[ Ignore] {} : {}", tabs, flow.lines[pl_self].stages[st_index].name); 58 | continue; 59 | } 60 | 61 | // If it's a pointer to another Pipeline, execute that 62 | // Note that we increment the tab count 63 | if let Some(ptr) = flow.lines[pl_self].stages[st_index].pl_ptr { 64 | println!("[Running] {} | {}", tabs, flow.lines[pl_self].stages[st_index].name); 65 | Pipeline::execute(flow, ptr, inv, symbols, log, tab + 1); 66 | } 67 | else { 68 | // If it's not a Pipeline it's either a block or an external reference 69 | let name = &flow.lines[pl_self].stages[st_index].name; 70 | 71 | // Checking the run-state prevents a block from being run twice 72 | match flow.lines[pl_self].stages[st_index].state { 73 | RunState::NOTRUN => { 74 | // It's not been run before so we execute it 75 | println!("[Execute] {} | {}", tabs, name); 76 | 77 | // If it's a block, we execute it 78 | if let Some(block_id) = symbols.blocks.get(name) { 79 | let mut node: inter::LinkNode = inv.art.node(*block_id); 80 | exec::execute_block(inv, symbols, log, &node); 81 | } 82 | 83 | // Otherwise, it might be an 'include' - a reference to an external file 84 | else if let Some((file, entry, sudo)) = symbols.includes.get(*name) { 85 | 86 | // We try and build the path to the other Jannfile 87 | let jannfile = inv.root.join(file).into_os_string().into_string(); 88 | let jannfile = jannfile.unwrap_or_else( |_| { 89 | log.sys_terminal(&format!("Unable to handle file path {}", file)); 90 | }); 91 | 92 | // We also try and get a path to our own binary 93 | let binary = env::current_exe().unwrap_or_else( |_| { 94 | log.sys_terminal(&format!("Unable to get jann binary path")); 95 | }).into_os_string().into_string().unwrap_or_else( |_| { 96 | log.sys_terminal(&format!("Unable to handle binary path")); 97 | }); 98 | 99 | // Now we can create a new jann process to run the included file 100 | // Note that the included file recieves no state 101 | 102 | let optstr = inv.opts.dump(); 103 | 104 | let incl_msg = format!("********** Include: {}::{} *********", &file, &entry); 105 | println!("\n{}", incl_msg); 106 | 107 | let mut proc = if *sudo { 108 | Command::new("sudo") 109 | .current_dir(&inv.root) 110 | .arg(binary) 111 | .arg(jannfile) 112 | .args(optstr) 113 | .arg("--execute") 114 | .arg(entry) 115 | .spawn() 116 | .expect("Failed to run included Jannfile") 117 | } 118 | else { 119 | Command::new(binary) 120 | .current_dir(&inv.root) 121 | .arg(file) 122 | .args(optstr) 123 | .arg("--execute") 124 | .arg(entry) 125 | .spawn() 126 | .expect("Failed to run included Jannfile") 127 | }; 128 | 129 | if !proc.wait().expect("Failed to wait on Jann").success() { 130 | println!("{}", "*".repeat(incl_msg.len())); 131 | log.die(); 132 | }; 133 | 134 | println!("{}\n", "*".repeat(incl_msg.len())); 135 | } 136 | 137 | // There's nothing to run with this name 138 | else { 139 | log.sys_terminal(&format!("No such block or pipeline {}", name)); 140 | } 141 | }, 142 | RunState::DONE => { 143 | println!("[ Done] {} * {}", tabs, name); 144 | }, 145 | } 146 | } 147 | flow.lines[pl_self].stages[st_index].state = RunState::DONE; 148 | } 149 | } 150 | } 151 | 152 | // A workflow is a set of indexed Pipelines 153 | 154 | struct Workflow<'src> { 155 | lines: Vec>, 156 | index : HashMap<&'src str, usize>, 157 | } 158 | 159 | impl<'inv, 'src: 'inv> Workflow<'src> { 160 | fn new() -> Workflow<'src> { 161 | Workflow { lines: vec![], index: HashMap::new() } 162 | } 163 | 164 | fn execute(&mut self, inv: &Invocation<'src>, symbols: &mut inter::Symbols<'src>, log: &mut util::Log<'src>) { 165 | let mut main_line = self.index.get(inv.pl_name.as_str()).unwrap_or_else( | | { 166 | log.sys_terminal("No such pipeline exists."); 167 | }); 168 | Pipeline::execute(self, *main_line, inv, symbols, log, 0); 169 | } 170 | } 171 | 172 | // Encapsulates all the data pertaining to an invocation of a Jannfile 173 | pub struct Invocation<'src> { 174 | pub root : PathBuf, 175 | pub edir : PathBuf, 176 | pub opts : deploy::DepOpt, 177 | pub pl_name : String, 178 | pub art : inter::Artifact<'src>, 179 | pub switches: com::Switches, 180 | } 181 | 182 | 183 | impl<'inv, 'src: 'inv> Invocation<'src> { 184 | pub fn invoke(self, log: &'inv mut util::Log<'src>) { 185 | 186 | // Set up directories 187 | 188 | let cwd = env::current_dir().unwrap_or_else( | _ | { 189 | log.sys_terminal("Could not get cwd."); 190 | }); 191 | 192 | if !self.edir.exists() { 193 | fs::create_dir_all(&self.edir).unwrap_or_else( | _ | { 194 | log.sys_terminal("Unable to create execution dir"); 195 | }); 196 | } 197 | 198 | env::set_current_dir(&self.edir).unwrap_or_else( | _ | { 199 | log.sys_terminal( 200 | &format!("Could not change working directory to {:?}.", &self.edir) 201 | ); 202 | }); 203 | 204 | // Create Symbol table and Workflow 205 | 206 | let mut symbols = inter::Symbols::new(); 207 | 208 | // Pre-populate symbol table with relevant directory path 209 | 210 | symbols.jnames.insert("BUNDLE", inter::Value::Str(self.root.clone() 211 | .into_os_string() 212 | .into_string() 213 | .unwrap_or_else( | _ | { 214 | log.sys_terminal("Unable to handle bundle path"); 215 | }))); 216 | 217 | let root = self.art.root(); 218 | let mut flow = Workflow::new(); 219 | 220 | // Utility function to parse a reference to an external pipeline 221 | fn parse_extern(node: &inter::LinkNode, log: &mut util::Log) -> Option<(String, String)> { 222 | let parts = node.token_value().split("::").collect::>(); 223 | match parts.len() { 224 | 1 => Some((parts[0].to_string(), "main".to_string())), 225 | 2 => Some((parts[0].to_string(), parts[1].to_string())), 226 | _ => { log.error("Bad directive", "Too many '::'", node.tok); None }, 227 | } 228 | } 229 | 230 | // Populate the symbol table and build the workflow by walking 231 | // through the top level nodes of the parse trees 232 | 233 | for child in root.children() { 234 | 235 | // First we check if the child is a directive we need to parse 236 | 237 | if child.is_type(&PTNodeType::DIRECTIVE) { 238 | let verb = &child.children()[0]; 239 | let data = &child.children()[1]; 240 | if !verb.is_type(&PTNodeType::NAME) { 241 | log.error("Invalid directive verb", "This needs to be a name", verb.tok); 242 | continue; 243 | } 244 | match verb.token_value() { 245 | v @ "include" | v @ "sudo_include" => { 246 | let (file, entry, symbol) = match data.ptn.nt { 247 | PTNodeType::NAME => { 248 | let (file, entry) = match parse_extern(&data, log) { Some(t) => t, None => { continue; } }; 249 | (file, entry.clone(), entry) 250 | }, 251 | PTNodeType::LIST => { 252 | if data.children().len() != 2 { 253 | log.error("Bad list argument to include directive", 254 | "Should be two values here", data.tok); 255 | } 256 | if !data.children().iter().all(|c| c.is_type(&PTNodeType::NAME)) { 257 | log.error("Bad value in list argument for include directive", 258 | "These values need all be names", data.tok); 259 | } 260 | if let Some((file, entry)) = parse_extern(&data.children()[0], log) { 261 | (file, entry, data.children()[1].token_value().to_string()) 262 | } 263 | else { 264 | continue; 265 | } 266 | }, 267 | _ => { 268 | unimplemented!(); 269 | } 270 | }; 271 | symbols.includes.insert(symbol, (file, entry, v == "sudo_include")); 272 | }, 273 | _ => {}, 274 | } 275 | continue; 276 | } 277 | 278 | // It's not a directive, it must be a pipeline or block 279 | 280 | let tag = &child.children()[0]; 281 | if tag.is_type(&PTNodeType::NAME) { 282 | symbols.blocks.insert(tag.token_value(), child.ptn.id); 283 | } 284 | 285 | // Blocks just need to be added to the Symbol table, but 286 | // pipelines need to be incorporated into the Workflow. 287 | 288 | if child.is_type(&PTNodeType::PIPELINE) { 289 | let pl_children = child.children(); 290 | 291 | let pl_name = &pl_children[0].token_value(); 292 | 293 | if !inter::check_name(pl_name) { 294 | log.error("Invalid pipeline name", "Make this a valid pipeline name", &pl_children[0].tok); 295 | continue; 296 | } 297 | 298 | let pl_list = &pl_children[1]; 299 | let mut stages = vec![]; 300 | 301 | for stage in pl_list.children() { 302 | if !stage.is_type(&PTNodeType::NAME) { 303 | log.error("Invalid stage name", "Make this a name", stage.tok); 304 | continue; 305 | } 306 | if !inter::check_name(stage.token_value()) { 307 | log.error("Invalid stage name", "Make this a valid stage name", stage.tok); 308 | continue; 309 | } 310 | 311 | let mut enabled = false; 312 | let mut tags = vec![]; 313 | for child in stage.children() { 314 | if child.is_type(&PTNodeType::FLAG) { 315 | enabled = true; 316 | } 317 | else if child.is_type(&PTNodeType::LIST) { 318 | for tag in child.children() { 319 | match tag.ptn.nt { 320 | PTNodeType::NAME => { tags.push(tag.token_value()) }, 321 | _ => { log.error("Invalid tag", "Make this a valid tag name", tag.tok) }, 322 | } 323 | } 324 | } 325 | } 326 | 327 | stages.push(PipelineStage { 328 | name: stage.token_value(), 329 | tags: tags, 330 | enabled: enabled, 331 | state: RunState::NOTRUN, 332 | pl_ptr: None, 333 | }); 334 | } 335 | flow.index.insert(pl_name, flow.lines.len()); 336 | flow.lines.push(Pipeline { name: pl_name, stages }); 337 | } 338 | } 339 | 340 | // We now determine which stages are enabled and which are disabled 341 | 342 | let mut enable_set : Vec<(com::Reference, bool)> = vec![]; 343 | 344 | // Build a list of all command line selectors 345 | 346 | for (com, refs) in &self.switches { 347 | match com.as_ref() { 348 | "enable" => refs.iter().for_each(|r| enable_set.push((r.clone(), true))), 349 | "disable" => refs.iter().for_each(|r| enable_set.push((r.clone(), false))), 350 | _ => (), 351 | } 352 | } 353 | 354 | // Iterate through selectors and enable and disable stages as appropriate 355 | 356 | for (r, val) in enable_set { 357 | match r { 358 | // --enable %foo 359 | com::Reference::TAG(ref rtag) => { 360 | for pl in &mut flow.lines { 361 | for stage in &mut pl.stages { 362 | if stage.tags.contains(&rtag.as_str()) { 363 | stage.enabled = val; 364 | } 365 | } 366 | } 367 | }, 368 | // --enable bar 369 | com::Reference::STAGE(ref rstage) => { 370 | for pl in &mut flow.lines { 371 | for stage in &mut pl.stages { 372 | if *rstage == stage.name { 373 | stage.enabled = val; 374 | } 375 | } 376 | } 377 | }, 378 | // --enable spqr.%foo 379 | com::Reference::PL_TAG(ref pl, ref rtag) => { 380 | if let Some(pl_ind) = flow.index.get(pl.as_str()) { 381 | for stage in &mut flow.lines[*pl_ind].stages { 382 | if stage.tags.contains(&rtag.as_str()) { 383 | stage.enabled = val; 384 | } 385 | } 386 | } 387 | }, 388 | // --enable spqr.bar 389 | com::Reference::PL_STAGE(ref pl, ref rstage) => { 390 | if let Some(pl_ind) = flow.index.get(pl.as_str()) { 391 | for stage in &mut flow.lines[*pl_ind].stages { 392 | if stage.name == *rstage { 393 | stage.enabled = val; 394 | } 395 | } 396 | } 397 | }, 398 | // --enable "*" 399 | com::Reference::ALL => { 400 | for pl in &mut flow.lines { 401 | for stage in &mut pl.stages { 402 | stage.enabled = val; 403 | } 404 | } 405 | }, 406 | _ => unreachable!(), 407 | } 408 | } 409 | 410 | // Link references between pipelines 411 | for pl in &mut flow.lines { 412 | for stage in &mut pl.stages { 413 | if let Some(nxt_pl) = flow.index.get(&stage.name) { 414 | (*stage).pl_ptr = Some(*nxt_pl); 415 | } 416 | } 417 | } 418 | 419 | // Light the blue touch-paper! 420 | flow.execute(&self, &mut symbols, log); 421 | 422 | env::set_current_dir(&cwd).unwrap_or_else( | _ | { 423 | log.sys_terminal("Could not change CWD!"); 424 | }); 425 | } 426 | } 427 | -------------------------------------------------------------------------------- /src/parse.rs: -------------------------------------------------------------------------------- 1 | use std::str; 2 | use std::fmt; 3 | use util; 4 | 5 | // A span represents a slice into the input string 6 | // Typically, a token will encapsulate a Span 7 | 8 | #[derive(Copy, Clone)] 9 | pub struct Span<'src> { 10 | pub src: &'src str, 11 | pub lptr: usize, 12 | pub rptr: usize, 13 | } 14 | 15 | impl<'src> Span<'src> { 16 | fn single(src: &'src str, ptr: usize) -> Span { 17 | Span {src, lptr: ptr, rptr: ptr} 18 | } 19 | 20 | fn begin(src: &'src str, lptr: usize) -> Span { 21 | Span {src, lptr, rptr: 2_000_000_000} 22 | } 23 | 24 | fn conclude(&mut self, rptr: usize) { 25 | self.rptr = rptr; 26 | } 27 | 28 | fn conclude_prev(&mut self, rptr_plus: usize) { 29 | self.rptr = rptr_plus - 1; 30 | } 31 | 32 | fn shrink(&mut self, n: usize) { 33 | self.lptr += n; 34 | self.rptr -= n; 35 | } 36 | 37 | pub fn slice(&self) -> &'src str { 38 | str::from_utf8(&self.src.as_bytes()[self.lptr..(self.rptr + 1)]).unwrap() 39 | } 40 | } 41 | 42 | impl<'src> fmt::Debug for Span<'src> { 43 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 44 | write!(f, "{}", self.slice()) 45 | } 46 | } 47 | 48 | #[derive(Copy, Clone, Debug)] 49 | pub enum TokenType { 50 | STRING , // raw or "quoted" 51 | COMMAND, // git clone 52 | 53 | LBRACE, // { 54 | RBRACE, // } 55 | LBRACK, // [ 56 | RBRACK, // ] 57 | ARROW , // -> 58 | AT , // @ 59 | EQUALS, // = 60 | DARROW, // => 61 | AARROW, // >> 62 | COMMA , // , 63 | PIPE , // | 64 | COLON , // : 65 | HASH , // # 66 | ERR , 67 | } 68 | 69 | #[derive(Copy, Clone)] 70 | pub struct Token<'src> { 71 | id: usize, 72 | pub lno: usize, 73 | pub tt: TokenType, 74 | pub val: Span<'src>, 75 | } 76 | 77 | impl<'src> fmt::Debug for Token<'src> { 78 | fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { 79 | write!(f, "({} | {:?} | {:?})", self.id, self.tt, self.val) 80 | } 81 | } 82 | 83 | enum Within { 84 | NONE , 85 | QSTRING , 86 | BSTRING , 87 | COMSTART, 88 | COMMAND , 89 | ARROW , 90 | DARROW , 91 | AARROW , 92 | } 93 | 94 | fn breaking(c: char) -> bool { 95 | if c.is_alphanumeric() { 96 | return false; 97 | } 98 | 99 | if c.is_whitespace() { 100 | return true; 101 | } 102 | 103 | ['{','}','[',']','$','@','-','>','=',',','!','|','#'].iter().find(|b| **b == c).is_some() 104 | } 105 | 106 | /* tokenise() takes input strings and processes them into strings of tokens */ 107 | 108 | pub fn tokenise<'src>(log: &mut util::Log, lno: usize, init_id: &mut usize, input: &'src str) -> Vec> { 109 | 110 | if input.trim_start().starts_with("//") { 111 | return vec![]; 112 | } 113 | 114 | let mut id = *init_id; 115 | let mut within: Within = Within::NONE; 116 | let mut esc: bool = false; 117 | let mut span: Span = Span::single(input, 0); 118 | let mut toks = vec![]; 119 | 120 | let ci : Vec<(usize, char)> = input.char_indices().collect(); 121 | let inlen = ci.len(); 122 | let mut idx: usize = 0; 123 | 124 | 'tok: while let Some((i, c)) = ci.get(idx) { 125 | let i = *i; let c = *c; 126 | match within { 127 | Within::NONE => { 128 | let stt = match c { 129 | '{' => Some(TokenType::LBRACE), 130 | '}' => Some(TokenType::RBRACE), 131 | '[' => Some(TokenType::LBRACK), 132 | ']' => Some(TokenType::RBRACK), 133 | '@' => Some(TokenType::AT ), 134 | ',' => Some(TokenType::COMMA ), 135 | '|' => Some(TokenType::PIPE ), 136 | ':' => Some(TokenType::COLON ), 137 | '#' => Some(TokenType::HASH ), 138 | _ => None, 139 | }; 140 | 141 | if let Some(stt) = stt { 142 | toks.push(Token { id, lno, tt: stt, val: Span::single(input, i) } ); 143 | id += 1 144 | } 145 | else { 146 | span = Span::begin(input, i); 147 | within = match c { 148 | '-' => Within::ARROW, 149 | '=' => Within::DARROW, 150 | '>' => Within::AARROW, 151 | '"' => Within::QSTRING, 152 | '$' => Within::COMSTART, 153 | c if !breaking(c) => Within::BSTRING, 154 | _ => Within::NONE, 155 | } 156 | } 157 | }, 158 | 159 | Within::QSTRING => { 160 | if c == '"' && !esc { 161 | span.conclude(i); 162 | span.shrink(1); 163 | toks.push(Token { id, lno, tt: TokenType::STRING, val: span }); 164 | id += 1; span = Span::single(input, 0); 165 | within = Within::NONE; 166 | } 167 | }, 168 | 169 | Within::BSTRING => { 170 | if breaking(c) && !esc { 171 | span.conclude_prev(i); 172 | toks.push(Token { id, lno, tt: TokenType::STRING, val: span } ); 173 | id += 1; span = Span::single(input, 0); 174 | within = Within::NONE; 175 | continue 'tok; 176 | } 177 | if c == '\\' && !esc { 178 | esc = true; 179 | } 180 | else if esc { 181 | esc = false; 182 | } 183 | 184 | }, 185 | 186 | Within::COMSTART => { 187 | if !c.is_whitespace() { 188 | span = Span::begin(input, i); 189 | within = Within::COMMAND; 190 | continue 'tok; 191 | } 192 | }, 193 | 194 | Within::COMMAND => { 195 | // Within a command 196 | }, 197 | 198 | arr @ Within::ARROW | arr @ Within::AARROW => { 199 | if c == '>' { 200 | span.conclude(i); 201 | toks.push(Token { id, lno, tt: match arr { Within::ARROW => TokenType::ARROW, Within::AARROW => TokenType::AARROW, _ => unreachable!() }, val: span } ); 202 | id += 1; span = Span::single(input, 0); 203 | within = Within::NONE; 204 | } 205 | else { 206 | span.conclude(i - 1); 207 | toks.push(Token { id, lno, tt: TokenType::ERR, val: span } ); 208 | log.error("Headless Arrow", "Add a '>' character", &toks.last().unwrap()); 209 | *init_id = id + 1; 210 | return toks; 211 | } 212 | }, 213 | 214 | Within::DARROW => { 215 | if c == '>' { 216 | span.conclude(i); 217 | toks.push(Token { id, lno, tt: TokenType::DARROW, val: span } ); 218 | id += 1; span = Span::single(input, 0); 219 | within = Within::NONE; 220 | } 221 | else { 222 | span.conclude(i - 1); 223 | toks.push(Token { id, lno, tt: TokenType::EQUALS, val: span } ); 224 | id += 1; span = Span::single(input, 0); 225 | within = Within::NONE; 226 | continue 'tok; 227 | } 228 | }, 229 | } 230 | idx += 1; 231 | 232 | if idx >= inlen { 233 | match within { 234 | Within::NONE => {}, 235 | Within::BSTRING => { 236 | span.conclude(i); 237 | toks.push(Token { id, lno, tt: TokenType::STRING, val: span } ); 238 | id += 1; 239 | }, 240 | Within::COMMAND => { 241 | span.conclude(i); 242 | toks.push(Token { id, lno, tt: TokenType::COMMAND, val: span } ); 243 | id += 1; 244 | }, 245 | _ => { 246 | span.conclude(i); 247 | toks.push(Token { id, lno, tt: TokenType::ERR, val: span } ); 248 | log.error("Unexpected EOF", "Close this construct", &toks.last().unwrap()); 249 | *init_id = id + 1; 250 | return toks; 251 | } 252 | } 253 | break; 254 | } 255 | } 256 | *init_id = id; 257 | toks 258 | } 259 | 260 | /* Having tokenised the input Jannfile, we can build a Parse Tree */ 261 | 262 | #[derive(PartialEq, Debug)] 263 | pub enum PTNodeType { 264 | ROOT , 265 | BLOCK , // name { } 266 | MAP , // [a, b, c] -> d { } 267 | CD , // path -> { } 268 | ASSIGN , // foo = bar 269 | COMMAND, // $ echo foo 270 | DIRECTIVE, // # include bar::spqr 271 | JNAME , // @connaught 272 | NAME , // Any variable or string 273 | LIST , // [foo, bar, baz] 274 | INSERT , // src => dst 275 | COPY , // src >> dst 276 | PIPELINE, // pl | stage1 : stage2 | stage3 277 | FLAG , // 'Virtual node', denotes if stage is enabled 278 | } 279 | 280 | #[derive(Debug)] 281 | pub struct ParseTreeNode { 282 | pub id: usize, 283 | pub parent: Option, 284 | pub children: Vec, 285 | pub nt: PTNodeType, 286 | pub tok_id: usize, 287 | } 288 | 289 | impl ParseTreeNode { 290 | fn rprint(&self, tree: &ParseTree, n: usize) { 291 | println!("{}{:?}: {:?} [{:?}]", "\t".repeat(n), self.id, self.nt, self.tok_id); 292 | for child in &self.children { 293 | tree.nodes[*child].rprint(&tree, n + 1); 294 | } 295 | } 296 | } 297 | 298 | #[derive(Debug)] 299 | pub struct ParseTree { 300 | nodes: Vec, 301 | } 302 | 303 | impl ParseTree { 304 | fn new() -> ParseTree { 305 | let root = ParseTreeNode { 306 | id: 0, 307 | parent: None, 308 | children: vec![], 309 | nt: PTNodeType::ROOT, 310 | tok_id: 0 311 | }; 312 | ParseTree { nodes: vec![root] } 313 | } 314 | 315 | fn add_node(&mut self, mut node: ParseTreeNode) -> usize { 316 | node.id = self.nodes.len(); 317 | let nid = node.id; 318 | if let Some(parent) = node.parent { 319 | self.nodes[parent].children.push(nid); 320 | } 321 | self.nodes.push(node); 322 | nid 323 | } 324 | 325 | fn bind_child(&mut self, parent: usize, child: usize) { 326 | self.nodes[parent].children.push(child); 327 | self.nodes[child].parent = Some(parent); 328 | } 329 | 330 | pub fn print_tree(&self) { 331 | self.nodes[0].rprint(&self, 0); 332 | } 333 | 334 | pub fn get_node(&self, id: usize) -> &ParseTreeNode { 335 | self.nodes.get(id).unwrap() 336 | } 337 | 338 | pub fn is_empty(&self) -> bool { 339 | self.nodes.len() == 1 340 | } 341 | } 342 | 343 | 344 | struct Parser<'log, 'src: 'log> { 345 | toks : &'src Vec>, 346 | backptr: usize, 347 | foreptr: usize, 348 | tree : ParseTree, 349 | log : &'log mut util::Log<'src>, 350 | } 351 | 352 | impl<'log, 'src> Parser<'log, 'src> { 353 | fn new(log: &'log mut util::Log<'src>, toks: &'src Vec) -> Parser<'log, 'src> { 354 | Parser { toks, backptr: 0, foreptr: 0, tree: ParseTree::new(), log } 355 | } 356 | 357 | fn tok(&self) -> &Token<'src> { 358 | self.toks.get(self.backptr).unwrap() 359 | } 360 | 361 | fn tok_id(&self) -> usize { 362 | self.toks.get(self.backptr).unwrap().id 363 | } 364 | 365 | fn has_cur(&self) -> bool { 366 | self.backptr < self.toks.len() 367 | } 368 | 369 | fn has_next(&self) -> bool { 370 | (self.backptr + 1) < self.toks.len() 371 | } 372 | 373 | fn retreat(&mut self) -> usize { 374 | self.backptr -= 1; 375 | self.backptr 376 | } 377 | 378 | fn step(&mut self) -> usize { 379 | self.backptr += 1; 380 | self.backptr 381 | } 382 | 383 | fn step_or_err(&mut self, msg: &str, hint: &str) -> Option { 384 | if !self.has_next() { 385 | self.terminal(msg, hint); 386 | None 387 | } 388 | else { 389 | Some(self.step()) 390 | } 391 | } 392 | 393 | fn peek(&mut self, n: usize) -> &Token<'src> { 394 | self.foreptr = self.backptr + n; 395 | self.toks.get(self.foreptr).unwrap() 396 | } 397 | 398 | fn orphan(&mut self, nt: PTNodeType, tok_id: usize) -> usize { 399 | let node = ParseTreeNode { 400 | id: 0, /* assigned by ParseTree */ 401 | parent: None, 402 | children: vec![], 403 | nt: nt, 404 | tok_id: tok_id, 405 | }; 406 | 407 | self.tree.add_node(node) 408 | } 409 | 410 | fn node(&mut self, parent: usize, nt: PTNodeType, tok_id: usize) -> usize { 411 | let node = ParseTreeNode { 412 | id: 0, /* assigned by ParseTree */ 413 | parent: Some(parent), 414 | children: vec![], 415 | nt: nt, 416 | tok_id: tok_id, 417 | }; 418 | 419 | self.tree.add_node(node) 420 | } 421 | 422 | fn error(&mut self, msg: &str, hint: &str) { 423 | let cur_tok = &self.tok().clone(); 424 | self.log.error(msg, hint, cur_tok); 425 | } 426 | 427 | fn terminal(&mut self, msg: &str, hint: &str) { 428 | let cur_tok = &self.tok().clone(); 429 | self.log.terminal(msg, hint, cur_tok); 430 | } 431 | 432 | } 433 | 434 | // parse_val - either a String (Name), JName, or List 435 | 436 | fn parse_val(parser: &mut Parser) -> Option { 437 | let cur_tt = parser.tok().tt; 438 | let tok_id = parser.tok_id(); 439 | match cur_tt { 440 | TokenType::STRING => { 441 | parser.step(); 442 | Some(parser.orphan(PTNodeType::NAME, tok_id)) 443 | }, 444 | TokenType::AT => { 445 | parser.step_or_err("Bare '@'", "Cannot conclude here")?; 446 | let name_tt = parser.tok().tt; 447 | let name_id = parser.tok_id(); 448 | match name_tt { 449 | TokenType::STRING => { 450 | parser.step(); 451 | Some(parser.orphan(PTNodeType::JNAME, name_id)) 452 | }, 453 | _ => { 454 | parser.error("Name must follow '@'", "Change this value to a name"); 455 | None 456 | }, 457 | } 458 | }, 459 | TokenType::LBRACK => { 460 | let list = parser.orphan(PTNodeType::LIST, tok_id); 461 | parser.step_or_err("Bare Left Bracket", "Cannot conclude here")?; 462 | loop { 463 | match parser.tok().tt { 464 | TokenType::RBRACK => { 465 | parser.step(); 466 | break Some(list); 467 | }, 468 | _ => { 469 | let elem = parse_val(parser)?; 470 | parser.tree.bind_child(list, elem); 471 | }, 472 | } 473 | 474 | match parser.tok().tt { 475 | TokenType::COMMA => { 476 | parser.step_or_err("Bare Comma", "Cannot conclude here")?; 477 | }, 478 | TokenType::RBRACK => { 479 | parser.step(); 480 | break Some(list); 481 | } 482 | _ => { 483 | parser.error("Malformed List", "Add a comma or bracket before here"); 484 | break None; 485 | }, 486 | } 487 | } 488 | }, 489 | _ => { parser.error("Expected value", "Add a value before here"); None }, 490 | } 491 | } 492 | 493 | // Basic recovery routine, just keep going til we find a right brace 494 | 495 | fn recover_block(parser: &mut Parser) { 496 | loop { 497 | if parser.step_or_err("Unclosed Brace", "Add a brace after here").is_none() { 498 | return; 499 | } 500 | 501 | match parser.tok().tt { 502 | TokenType::RBRACE => { 503 | parser.step(); 504 | return; 505 | }, 506 | _ => {}, 507 | } 508 | } 509 | } 510 | 511 | fn parse_block(parser: &mut Parser, tag: usize) -> Option { 512 | let block_id = parser.tok_id(); 513 | let block = parser.orphan(PTNodeType::BLOCK, block_id); 514 | parser.tree.bind_child(block, tag); 515 | parser.step_or_err("Unclosed Brace", "Add a brace after here")?; 516 | loop { 517 | if let Some(sub_stmt) = parse_stmt(parser) { 518 | if sub_stmt == 0 { 519 | break; 520 | } 521 | else { 522 | parser.tree.bind_child(block, sub_stmt); 523 | } 524 | } 525 | else { 526 | recover_block(parser); 527 | break; 528 | } 529 | } 530 | Some(block) 531 | } 532 | 533 | // parse_val_stmt - Parse statement with the structure ... 534 | 535 | fn parse_val_stmt(parser: &mut Parser) -> Option { 536 | let val = parse_val(parser)?; 537 | 538 | if !parser.has_cur() { 539 | parser.retreat(); 540 | parser.error("Bare Value", "Cannot conclude here"); 541 | return None; 542 | } 543 | 544 | let cur_tt = parser.tok().tt; 545 | let tok_id = parser.tok_id(); 546 | 547 | match cur_tt { 548 | TokenType::EQUALS => { 549 | let stmt = parser.orphan(PTNodeType::ASSIGN, tok_id); 550 | parser.tree.bind_child(stmt, val); 551 | parser.step_or_err("Bare Equals", "Cannot conclude here")?; 552 | let rval = parse_val(parser)?; 553 | parser.tree.bind_child(stmt, rval); 554 | Some(stmt) 555 | }, 556 | TokenType::AARROW => { 557 | let stmt = parser.orphan(PTNodeType::COPY, tok_id); 558 | parser.tree.bind_child(stmt, val); 559 | parser.step_or_err("Bare Copy Arrow", "Cannot conclude here")?; 560 | let rval = parse_val(parser)?; 561 | parser.tree.bind_child(stmt, rval); 562 | Some(stmt) 563 | }, 564 | TokenType::DARROW => { 565 | let stmt = parser.orphan(PTNodeType::INSERT, tok_id); 566 | parser.tree.bind_child(stmt, val); 567 | parser.step_or_err("Bare Insertion Arrow", "Cannot conclude here")?; 568 | let rval = parse_val(parser)?; 569 | parser.tree.bind_child(stmt, rval); 570 | Some(stmt) 571 | }, 572 | TokenType::PIPE | TokenType::COLON => { 573 | let mut enabled = match cur_tt { TokenType::PIPE => true, 574 | TokenType::COLON => false, 575 | _ => unreachable!() }; 576 | let mut bar_tok_id = tok_id; 577 | let stmt = parser.orphan(PTNodeType::PIPELINE, tok_id); 578 | parser.tree.bind_child(stmt, val); 579 | parser.step_or_err("Bare pipeline symbol", "Cannot conclude here")?; 580 | let stages = parser.orphan(PTNodeType::LIST, tok_id); 581 | loop { 582 | let stage = parse_val(parser)?; 583 | parser.tree.bind_child(stages, stage); 584 | if enabled { 585 | let stage_enabled = parser.orphan(PTNodeType::FLAG, bar_tok_id); 586 | parser.tree.bind_child(stage, stage_enabled); 587 | } 588 | if !parser.has_cur() { break; } 589 | match parser.tok().tt { 590 | TokenType::LBRACK => { 591 | let tags = parse_val(parser)?; 592 | parser.tree.bind_child(stage, tags); 593 | }, 594 | _ => (), 595 | } 596 | 597 | bar_tok_id = parser.tok_id(); 598 | match parser.tok().tt { 599 | TokenType::PIPE => { 600 | enabled = true; 601 | parser.step_or_err("Bare enabled pipe", "Cannot conclude here")?; 602 | }, 603 | TokenType::COLON => { 604 | enabled = false; 605 | parser.step_or_err("Bare disabled pipe", "Cannot conclude here")?; 606 | }, 607 | _ => { break; } 608 | } 609 | } 610 | parser.tree.bind_child(stmt, stages); 611 | Some(stmt) 612 | }, 613 | TokenType::ARROW => { 614 | parser.step_or_err("Bare arrow", "Cannot conclude here")?; 615 | 616 | match parser.tok().tt { 617 | TokenType::LBRACE => { 618 | let cd = parser.orphan(PTNodeType::CD, tok_id); 619 | parser.tree.bind_child(cd, val); 620 | return Some(parse_block(parser, cd)?); 621 | }, 622 | _ => (), 623 | } 624 | 625 | let map = parser.orphan(PTNodeType::MAP, tok_id); 626 | parser.tree.bind_child(map, val); 627 | 628 | 629 | let rval = parse_val(parser)?; 630 | parser.tree.bind_child(map, rval); 631 | 632 | if !parser.has_cur() { 633 | parser.retreat(); 634 | parser.error("Expected block", "Add a block after here"); 635 | return None; 636 | } 637 | 638 | match parser.tok().tt { 639 | TokenType::LBRACE => { 640 | Some(parse_block(parser, map)?) 641 | }, 642 | _ => { 643 | parser.error("Expected block", "Add a brace before here"); 644 | None 645 | }, 646 | } 647 | }, 648 | TokenType::LBRACE => { 649 | Some(parse_block(parser, val)?) 650 | }, 651 | _ => { parser.error("Malformed statement", "This token is invalid in this position"); None }, 652 | } 653 | } 654 | 655 | // parse_stmt - Parse statements of the form ... 656 | 657 | fn parse_stmt(parser: &mut Parser) -> Option { 658 | if !parser.has_cur() { 659 | parser.retreat(); 660 | return None; 661 | } 662 | 663 | let cur_tt = parser.tok().tt; 664 | let tok_id = parser.tok_id(); 665 | match cur_tt { 666 | TokenType::COMMAND => { 667 | let stmt = parser.orphan(PTNodeType::COMMAND, tok_id); 668 | parser.step(); 669 | Some(stmt) 670 | }, 671 | TokenType::HASH => { 672 | let stmt = parser.orphan(PTNodeType::DIRECTIVE, tok_id); 673 | parser.step(); 674 | let verb = parse_val(parser)?; 675 | parser.tree.bind_child(stmt, verb); 676 | let data = parse_val(parser)?; 677 | parser.tree.bind_child(stmt, data); 678 | Some(stmt) 679 | } 680 | TokenType::RBRACE => { parser.step(); Some(0) }, 681 | _ => { parse_val_stmt(parser) }, 682 | } 683 | } 684 | 685 | fn parse_file(parser: &mut Parser) { 686 | if parser.has_cur() { 687 | loop { 688 | if let Some(stmt) = parse_stmt(parser) { 689 | parser.tree.bind_child(0, stmt); 690 | if !parser.has_next() { 691 | break; 692 | } 693 | } 694 | else { 695 | break; 696 | } 697 | } 698 | } 699 | } 700 | pub fn parse<'log, 'src: 'log>(log: &'log mut util::Log<'src>, toks: &'src Vec>) -> ParseTree { 701 | let mut parser = Parser::new(log, toks); 702 | parse_file(&mut parser); 703 | parser.tree 704 | } 705 | --------------------------------------------------------------------------------