├── .gitignore ├── AUTHORS.txt ├── Cargo.toml ├── LICENSE.txt ├── Makefile ├── README.md └── src ├── format.rs ├── main.rs ├── test.rs ├── token.rs └── transform.rs /.gitignore: -------------------------------------------------------------------------------- 1 | *~ 2 | \#*\# 3 | \#* 4 | .#* 5 | *.x86 6 | *.llvm 7 | *.out 8 | *.boot 9 | *.rustc 10 | *.cmx 11 | *.dll 12 | *.def 13 | *.exe 14 | *.a 15 | *.rlib 16 | *.so 17 | *.dylib 18 | *.orig 19 | *.cmo 20 | *.cmi 21 | *.dSYM 22 | *.d 23 | *.o 24 | *.ll 25 | *.bc 26 | *.aux 27 | *.cp 28 | *.fn 29 | *.ky 30 | *.cps 31 | *.log 32 | *.pdf 33 | *.epub 34 | *.html 35 | *.pg 36 | *.toc 37 | *.tp 38 | *.vr 39 | *.patch 40 | *.diff 41 | *.rej 42 | *.swp 43 | *.swo 44 | *.tmp 45 | *.pyc 46 | *.elc 47 | .hg/ 48 | .hgignore 49 | .cproject 50 | .project 51 | .valgrindrc 52 | lexer.ml 53 | TAGS 54 | TAGS.emacs 55 | TAGS.vim 56 | version.ml 57 | version.texi 58 | /Makefile 59 | config.mk 60 | /rt/ 61 | /rustllvm/ 62 | /test/ 63 | /build/ 64 | /inst/ 65 | /mingw-build/ 66 | src/.DS_Store 67 | /tmp/ 68 | /stage0/ 69 | /dl/ 70 | /stage1/ 71 | /stage2/ 72 | /stage3/ 73 | *.bz2 74 | /doc/html 75 | /doc/latex 76 | /doc/std 77 | /doc/extra 78 | /nd/ 79 | /llvm/ 80 | version.md 81 | *.tex 82 | keywords.md 83 | x86_64-apple-darwin/ 84 | x86_64-unknown-linux-gnu/ 85 | i686-unknown-linux-gnu/ 86 | tmp.*.rs 87 | config.stamp 88 | .DS_Store 89 | src/etc/dl 90 | .settings/ 91 | build/ 92 | i686-pc-mingw32/ 93 | .rust 94 | .emacs.desktop 95 | /lib 96 | /build 97 | /bin 98 | /doc 99 | /target -------------------------------------------------------------------------------- /AUTHORS.txt: -------------------------------------------------------------------------------- 1 | Patrick Walton 2 | Jeff Olson 3 | Ramakrishnan Muthukrishnan 4 | -------------------------------------------------------------------------------- /Cargo.toml: -------------------------------------------------------------------------------- 1 | [package] 2 | 3 | name = "rustfmt" 4 | version = "0.0.1" 5 | readme = "README.md" 6 | authors = ["Patrick Walton ", "Jeff Olson "] 7 | tags = ["util"] 8 | 9 | [[bin]] 10 | 11 | name = "rustfmt" 12 | path = "src/main.rs" 13 | 14 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | Copyright (c) 2014 Mozilla Foundation 2 | 3 | Permission is hereby granted, free of charge, to any person obtaining a copy 4 | of this software and associated documentation files (the "Software"), to deal 5 | in the Software without restriction, including without limitation the rights 6 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 7 | copies of the Software, and to permit persons to whom the Software is 8 | furnished to do so, subject to the following conditions: 9 | 10 | The above copyright notice and this permission notice shall be included in 11 | all copies or substantial portions of the Software. 12 | 13 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 14 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 15 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 16 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 17 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 18 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN 19 | THE SOFTWARE. 20 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | # Rust-Empty: An Makefile to get started with Rust 2 | # https://github.com/bvssvni/rust-empty 3 | # 4 | # The MIT License (MIT) 5 | # 6 | # Copyright (c) 2014 Sven Nilsen 7 | # 8 | # Permission is hereby granted, free of charge, to any person obtaining a copy of 9 | # this software and associated documentation files (the "Software"), to deal in 10 | # the Software without restriction, including without limitation the rights to 11 | # use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of 12 | # the Software, and to permit persons to whom the Software is furnished to do so, 13 | # subject to the following conditions: 14 | # 15 | # The above copyright notice and this permission notice shall be included in all 16 | # copies or substantial portions of the Software. 17 | # 18 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 19 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS 20 | # FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR 21 | # COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER 22 | # IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN 23 | # CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 24 | 25 | SHELL := /bin/bash 26 | 27 | # The default make command. 28 | # Change this to 'lib' if you are building a library. 29 | DEFAULT = exe 30 | # The entry file of library source. 31 | # Change this to support multi-crate source structure. 32 | # For advanced usage, you can rename the file 'rust-empty.mk' 33 | # and call it with 'make -f rust-empty.mk ' from your Makefile. 34 | LIB_ENTRY_FILE = src/main.rs 35 | # The entry file of executable source. 36 | EXE_ENTRY_FILE = src/main.rs 37 | 38 | EXAMPLE_FILES = examples/*.rs 39 | SOURCE_FILES = $(shell test -e src/ && find src -type f) 40 | 41 | COMPILER = rustc 42 | 43 | # For release: 44 | # COMPILER_FLAGS = -O 45 | # For debugging: 46 | COMPILER_FLAGS = -g -D warnings 47 | 48 | RUSTDOC = rustdoc 49 | 50 | # Extracts target from rustc. 51 | TARGET = $(shell rustc --version verbose 2> /dev/null | awk "/host:/ { print \$$2 }") 52 | # TARGET = x86_64-unknown-linux-gnu 53 | # TARGET = x86_64-apple-darwin 54 | 55 | TARGET_LIB_DIR = target/deps/ 56 | 57 | # Ask 'rustc' the file name of the library and use a dummy name if the source has not been created yet. 58 | # The dummy file name is used to trigger the creation of the source first time. 59 | # Next time 'rustc' will return the right file name. 60 | RLIB_FILE = $(shell (rustc --crate-type=rlib --crate-file-name "$(LIB_ENTRY_FILE)" 2> /dev/null) || (echo "dummy.rlib")) 61 | # You can't have quotes around paths because 'make' doesn't see it exists. 62 | RLIB = target/$(RLIB_FILE) 63 | DYLIB_FILE = $(shell (rustc --crate-type=dylib --crate-file-name "$(LIB_ENTRY_FILE)" 2> /dev/null) || (echo "dummy.dylib")) 64 | DYLIB = target/$(DYLIB_FILE) 65 | 66 | # Use 'VERBOSE=1' to echo all commands, for example 'make help VERBOSE=1'. 67 | ifdef VERBOSE 68 | Q := 69 | else 70 | Q := @ 71 | endif 72 | 73 | all: $(DEFAULT) 74 | 75 | help: 76 | $(Q)echo "--- rust-empty (0.6 003)" 77 | $(Q)echo "make run - Runs executable" 78 | $(Q)echo "make exe - Builds main executable" 79 | $(Q)echo "make lib - Both static and dynamic library" 80 | $(Q)echo "make rlib - Static library" 81 | $(Q)echo "make dylib - Dynamic library" 82 | $(Q)echo "make test - Tests library internally and externally" 83 | $(Q)echo "make test-internal - Tests library internally" 84 | $(Q)echo "make test-external - Tests library externally" 85 | $(Q)echo "make bench - Benchmarks library internally and externally" 86 | $(Q)echo "make bench-internal - Benchmarks library internally" 87 | $(Q)echo "make bench-external - Benchmarks library externally" 88 | $(Q)echo "make doc - Builds documentation for library" 89 | $(Q)echo "make git-ignore - Setup files to be ignored by Git" 90 | $(Q)echo "make examples - Builds examples" 91 | $(Q)echo "make cargo-lite-exe - Setup executable package" 92 | $(Q)echo "make cargo-lite-lib - Setup library package" 93 | $(Q)echo "make cargo-exe - Setup executable package" 94 | $(Q)echo "make cargo-lib - Setup library package" 95 | $(Q)echo "make rust-ci-lib - Setup Travis CI Rust library" 96 | $(Q)echo "make rust-ci-exe - Setup Travis CI Rust executable" 97 | $(Q)echo "make rusti - Setup 'rusti.sh' for interactive Rust" 98 | $(Q)echo "make watch - Setup 'watch.sh' for compilation on save" 99 | $(Q)echo "make loc - Count lines of code in src folder" 100 | $(Q)echo "make nightly-install - Installs Rust nightly build" 101 | $(Q)echo "make nightly-uninstall - Uninstalls Rust nightly build" 102 | $(Q)echo "make clean - Deletes binaries and documentation." 103 | $(Q)echo "make clear-project - WARNING: Deletes project files except 'Makefile'" 104 | $(Q)echo "make clear-git - WARNING: Deletes Git setup" 105 | $(Q)echo "make symlink-build - Creates a script for building dependencies" 106 | $(Q)echo "make symlink-info - Symlinked libraries dependency info" 107 | $(Q)echo "make target-dir - Creates directory for current target" 108 | 109 | .PHONY: \ 110 | bench \ 111 | bench-internal \ 112 | bench-external \ 113 | cargo-lib \ 114 | cargo-exe \ 115 | cargo-lite-lib \ 116 | cargo-lite-exe \ 117 | clean \ 118 | clear-git \ 119 | clear-project \ 120 | loc \ 121 | nightly-install \ 122 | nightly-uninstall \ 123 | run \ 124 | rusti \ 125 | rust-ci-lib \ 126 | rust-ci-exe \ 127 | symlink-build \ 128 | symlink-info \ 129 | target-dir \ 130 | test \ 131 | test-internal \ 132 | test-external \ 133 | watch 134 | 135 | nightly-install: 136 | $(Q)cd ~ \ 137 | && curl -s http://www.rust-lang.org/rustup.sh > rustup.sh \ 138 | && ( \ 139 | echo "Rust install-script stored as '~/rustup.sh'" ; \ 140 | read -p "Do you want to install? [y/n]:" -n 1 -r ; \ 141 | echo "" ; \ 142 | if [[ $$REPLY =~ ^[Yy]$$ ]] ; \ 143 | then \ 144 | cat rustup.sh | sudo sh ; \ 145 | fi \ 146 | ) 147 | 148 | nightly-uninstall: 149 | $(Q)cd ~ \ 150 | && curl -s http://www.rust-lang.org/rustup.sh > rustup.sh \ 151 | && ( \ 152 | echo "Rust install-script stored as '~/rustup.sh'" ; \ 153 | read -p "Do you want to uninstall? [y/n]:" -n 1 -r ; \ 154 | echo "" ; \ 155 | if [[ $$REPLY =~ ^[Yy]$$ ]] ; \ 156 | then \ 157 | cat rustup.sh | sudo sh -s -- --uninstall ; \ 158 | fi \ 159 | ) 160 | 161 | cargo-lite-exe: $(EXE_ENTRY_FILE) 162 | $(Q)( \ 163 | test -e cargo-lite.conf \ 164 | && echo "--- The file 'cargo-lite.conf' already exists" \ 165 | ) \ 166 | || \ 167 | ( \ 168 | echo -e "deps = [\n]\n\n[build]\ncrate_root = \"$(EXE_ENTRY_FILE)\"\nrustc_args = []\n" > cargo-lite.conf \ 169 | && echo "--- Created 'cargo-lite.conf' for executable" \ 170 | && cat cargo-lite.conf \ 171 | ) 172 | 173 | cargo-lite-lib: $(LIB_ENTRY_FILE) 174 | $(Q)( \ 175 | test -e cargo-lite.conf \ 176 | && echo "--- The file 'cargo-lite.conf' already exists" \ 177 | ) \ 178 | || \ 179 | ( \ 180 | echo -e "deps = [\n]\n\n[build]\ncrate_root = \"$(LIB_ENTRY_FILE)\"\ncrate_type = \"library\"\nrustc_args = []\n" > cargo-lite.conf \ 181 | && echo "--- Created 'cargo-lite.conf' for library" \ 182 | && cat cargo-lite.conf \ 183 | ) 184 | 185 | cargo-exe: $(EXE_ENTRY_FILE) 186 | $(Q)( \ 187 | test -e Cargo.toml \ 188 | && echo "--- The file 'Cargo.toml' already exists" \ 189 | ) \ 190 | || \ 191 | ( \ 192 | name=$${PWD##/*/} ; \ 193 | readme=$$((test -e README.md && echo -e "readme = \"README.md\"") || ("")) ; \ 194 | echo -e "[package]\n\nname = \"$$name\"\nversion = \"0.0.0\"\n$$readme\nauthors = [\"Your Name \"]\ntags = []\n\n[[bin]]\n\nname = \"$$name\"\npath = \"$(EXE_ENTRY_FILE)\"\n" > Cargo.toml \ 195 | && echo "--- Created 'Cargo.toml' for executable" \ 196 | && cat Cargo.toml \ 197 | ) 198 | 199 | cargo-lib: $(LIB_ENTRY_FILE) 200 | $(Q)( \ 201 | test -e Cargo.toml \ 202 | && echo "--- The file 'Cargo.toml' already exists" \ 203 | ) \ 204 | || \ 205 | ( \ 206 | name=$${PWD##/*/} ; \ 207 | readme=$$((test -e README.md && echo -e "readme = \"README.md\"") || ("")) ; \ 208 | echo -e "[package]\n\nname = \"$$name\"\nversion = \"0.0.0\"\n$$readme\nauthors = [\"Your Name \"]\ntags = []\n\n[[lib]]\n\nname = \"$$name\"\npath = \"$(LIB_ENTRY_FILE)\"\n" > Cargo.toml \ 209 | && echo "--- Created 'Cargo.toml' for executable" \ 210 | && cat Cargo.toml \ 211 | ) 212 | 213 | rust-ci-lib: $(LIB_ENTRY_FILE) 214 | $(Q)( \ 215 | test -e .travis.yml \ 216 | && echo "--- The file '.travis.yml' already exists" \ 217 | ) \ 218 | || \ 219 | ( \ 220 | echo -e "install:\n - wget http://static.rust-lang.org/dist/rust-nightly-x86_64-unknown-linux-gnu.tar.gz -O - | sudo tar zxf - --strip-components 1 -C /usr/local\nscript:\n - make lib\n" > .travis.yml \ 221 | && echo "--- Created '.travis.yml' for library" \ 222 | && cat .travis.yml \ 223 | ) 224 | 225 | rust-ci-exe: $(EXE_ENTRY_FILE) 226 | $(Q)( \ 227 | test -e .travis.yml \ 228 | && echo "--- The file '.travis.yml' already exists" \ 229 | ) \ 230 | || \ 231 | ( \ 232 | echo -e "install:\n - wget http://static.rust-lang.org/dist/rust-nightly-x86_64-unknown-linux-gnu.tar.gz -O - | sudo tar zxf - --strip-components 1 -C /usr/local\nscript:\n - make exe\n" > .travis.yml \ 233 | && echo "--- Created '.travis.yml' for executable" \ 234 | && cat .travis.yml \ 235 | ) 236 | 237 | doc: $(SOURCE_FILES) | src/ 238 | $(Q)$(RUSTDOC) $(LIB_ENTRY_FILE) -L "$(TARGET_LIB_DIR)" \ 239 | && echo "--- Built documentation" 240 | 241 | run: exe 242 | $(Q)cd bin/ \ 243 | && ./main 244 | 245 | target-dir: $(TARGET_LIB_DIR) 246 | 247 | exe: bin/main | $(TARGET_LIB_DIR) 248 | 249 | bin/main: $(SOURCE_FILES) | bin/ $(EXE_ENTRY_FILE) 250 | $(Q)$(COMPILER) --target "$(TARGET)" $(COMPILER_FLAGS) $(EXE_ENTRY_FILE) -o bin/main -L "$(TARGET_LIB_DIR)" -L "target" \ 251 | && echo "--- Built executable" \ 252 | && echo "--- Type 'make run' to run executable" 253 | 254 | test: test-internal 255 | $(Q)echo "--- Internal tests succeeded" 256 | 257 | test-external: bin/test-external 258 | $(Q)cd "bin/" \ 259 | && ./test-external 260 | 261 | bin/test-external: $(SOURCE_FILES) | rlib bin/ src/test.rs 262 | $(Q)$(COMPILER) --target "$(TARGET)" $(COMPILER_FLAGS) --test src/test.rs -o bin/test-external -L "$(TARGET_LIB_DIR)" -L "target" \ 263 | && echo "--- Built external test runner" 264 | 265 | test-internal: bin/test-internal 266 | $(Q)cd "bin/" \ 267 | && ./test-internal 268 | 269 | bin/test-internal: $(SOURCE_FILES) | rlib src/ bin/ 270 | $(Q)$(COMPILER) --target "$(TARGET)" $(COMPILER_FLAGS) --test $(LIB_ENTRY_FILE) -o bin/test-internal -L "$(TARGET_LIB_DIR)" -L "target" \ 271 | && echo "--- Built internal test runner" 272 | 273 | bench: bench-internal bench-external 274 | 275 | bench-external: test-external 276 | $(Q)bin/test-external --bench 277 | 278 | bench-internal: test-internal 279 | $(Q)bin/test-internal --bench 280 | 281 | lib: rlib dylib 282 | $(Q)echo "--- Type 'make test' to test library" 283 | 284 | rlib: $(RLIB) 285 | 286 | $(RLIB): $(SOURCE_FILES) | $(LIB_ENTRY_FILE) $(TARGET_LIB_DIR) 287 | $(Q)$(COMPILER) --target "$(TARGET)" $(COMPILER_FLAGS) --crate-type=rlib $(LIB_ENTRY_FILE) -L "$(TARGET_LIB_DIR)" --out-dir "target" \ 288 | && echo "--- Built rlib" 289 | 290 | dylib: $(DYLIB) 291 | 292 | $(DYLIB): $(SOURCE_FILES) | $(LIB_ENTRY_FILE) $(TARGET_LIB_DIR) 293 | $(Q)$(COMPILER) --target "$(TARGET)" $(COMPILER_FLAGS) --crate-type=dylib $(LIB_ENTRY_FILE) -L "$(TARGET_LIB_DIR)" --out-dir "target/" \ 294 | && echo "--- Built dylib" 295 | 296 | bin/: 297 | $(Q)mkdir -p bin 298 | 299 | $(TARGET_LIB_DIR): 300 | $(Q)mkdir -p $(TARGET_LIB_DIR) 301 | 302 | src/: 303 | $(Q)mkdir -p src 304 | 305 | examples-dir: 306 | $(Q)test -e examples \ 307 | || \ 308 | ( \ 309 | mkdir examples \ 310 | && echo -e "fn main() {\n\tprintln!(\"Hello!\");\n}\n" > examples/hello.rs \ 311 | && echo "--- Created examples folder" \ 312 | ) 313 | 314 | rust-dir: 315 | $(Q)mkdir -p .rust 316 | 317 | git-ignore: 318 | $(Q)( \ 319 | test -e .gitignore \ 320 | && echo "--- The file '.gitignore' already exists" \ 321 | ) \ 322 | || \ 323 | ( \ 324 | echo -e ".DS_Store\n*~\n*#\n*.o\n*.so\n*.swp\n*.dylib\n*.dSYM\n*.dll\n*.rlib\n*.dummy\n*.exe\n*-test\n/bin/main\n/bin/test-internal\n/bin/test-external\n/doc/\n/target/\n/build/\n/.rust/\nrusti.sh\nwatch.sh\n/examples/**\n!/examples/*.rs\n!/examples/assets/" > .gitignore \ 325 | && echo "--- Created '.gitignore' for git" \ 326 | && cat .gitignore \ 327 | ) 328 | 329 | examples: $(EXAMPLE_FILES) 330 | 331 | $(EXAMPLE_FILES): lib examples-dir 332 | $(Q)$(COMPILER) --target "$(TARGET)" $(COMPILER_FLAGS) $@ -L "$(TARGET_LIB_DIR)" -L "target" --out-dir examples/ \ 333 | && echo "--- Built '$@' (make $@)" 334 | 335 | $(EXE_ENTRY_FILE): | src/ 336 | $(Q)test -e $(EXE_ENTRY_FILE) \ 337 | || \ 338 | ( \ 339 | echo -e "fn main() {\n\tprintln!(\"Hello world!\");\n}" > $(EXE_ENTRY_FILE) \ 340 | ) 341 | 342 | src/test.rs: | src/ 343 | $(Q)test -e src/test.rs \ 344 | || \ 345 | ( \ 346 | touch src/test.rs \ 347 | ) 348 | 349 | $(LIB_ENTRY_FILE): | src/ 350 | $(Q)test -e $(LIB_ENTRY_FILE) \ 351 | || \ 352 | ( \ 353 | echo -e "#![crate_id = \"\"]\n#![deny(missing_doc)]\n\n//! Documentation goes here.\n" > $(LIB_ENTRY_FILE) \ 354 | ) 355 | 356 | clean: 357 | $(Q)rm -f "$(RLIB)" 358 | $(Q)rm -f "$(DYLIB)" 359 | $(Q)rm -rf "doc/" 360 | $(Q)rm -f "bin/main" 361 | $(Q)rm -f "bin/test-internal" 362 | $(Q)rm -f "bin/test-external" 363 | $(Q)echo "--- Deleted binaries and documentation" 364 | 365 | clear-project: 366 | $(Q)rm -f ".symlink-info" 367 | $(Q)rm -f "cargo-lite.conf" 368 | $(Q)rm -f "Cargo.toml" 369 | $(Q)rm -f ".travis.yml" 370 | $(Q)rm -f "rusti.sh" 371 | $(Q)rm -f "watch.sh" 372 | $(Q)rm -rf "target/" 373 | $(Q)rm -rf "src/" 374 | $(Q)rm -rf "bin/" 375 | $(Q)rm -rf "examples/" 376 | $(Q)rm -rf "doc/" 377 | $(Q)echo "--- Removed all source files, binaries and documentation" \ 378 | && echo "--- Content in project folder" \ 379 | && ls -a 380 | 381 | clear-git: 382 | $(Q)rm -f ".gitignore" 383 | $(Q)rm -rf ".git" 384 | $(Q)echo "--- Removed Git" \ 385 | && echo "--- Content in project folder" \ 386 | && ls -a 387 | 388 | # borrowed from http://stackoverflow.com/q/649246/1256624 389 | define RUSTI_SCRIPT 390 | #!/bin/bash 391 | 392 | #written by mcpherrin 393 | 394 | while true; do 395 | echo -n "> " 396 | read line 397 | TMP="`mktemp r.XXXXXX`" 398 | $(COMPILER) - -o $$TMP -L "$(TARGET_LIB_DIR)" < rusti.sh \ 447 | && chmod +x rusti.sh \ 448 | && echo "--- Created 'rusti.sh'" \ 449 | && echo "--- Type './rusti.sh' to start interactive Rust" \ 450 | ) 451 | 452 | # borrowed from http://stackoverflow.com/q/649246/1256624 453 | define WATCH_SCRIPT 454 | #!/bin/bash 455 | 456 | #written by zzmp 457 | 458 | # This script will recompile a rust project using `make` 459 | # every time something in the specified directory changes. 460 | 461 | # Watch files in infinite loop 462 | watch () { 463 | UNAME=$$(uname) 464 | if [ -e "$$2" ]; then 465 | echo "Watching files in $$2.." 466 | CTIME=$$(date "+%s") 467 | while :; do 468 | sleep 1 469 | for f in `find $$2 -type f -name "*.rs"`; do 470 | if [[ $$UNAME == "Darwin" ]]; then 471 | st_mtime=$$(stat -f "%m" "$$f") 472 | elif [[ $$UNAME == "FreeBSD" ]]; then 473 | st_mtime=$$(stat -f "%m" "$$f") 474 | else 475 | st_mtime=$$(stat -c "%Y" "$$f") 476 | fi 477 | if [ $$st_mtime -gt $$CTIME ]; then 478 | CTIME=$$(date "+%s") 479 | echo "~~~ Rebuilding" 480 | $$1 481 | if [ ! $$? -eq 0 ]; then 482 | echo "" 483 | fi 484 | fi 485 | done 486 | done 487 | else 488 | echo "$$2 is not a valid directory" 489 | fi 490 | } 491 | 492 | # Capture user input with defaults 493 | CMD=$${1:-make} 494 | DIR=$${2:-src} 495 | 496 | if [ $${CMD:0:2} = '-h' ]; then 497 | echo ' 498 | This script will recompile a rust project using `make` 499 | every time something in the specified directory changes. 500 | 501 | Use: ./watch.sh [CMD] [DIR] 502 | Example: ./watch.sh "make run" src 503 | 504 | CMD: Command to execute 505 | Complex commands may be passed as strings 506 | `make` by default 507 | DIR: Directory to watch 508 | src by default 509 | 510 | If DIR is supplied, CMD must be as well.\n' 511 | else 512 | watch "$$CMD" "$$DIR" 513 | fi 514 | 515 | endef 516 | export WATCH_SCRIPT 517 | 518 | watch: $(TARGET_LIB_DIR) 519 | $(Q)( \ 520 | test -e watch.sh \ 521 | && echo "--- The file 'watch.sh' already exists" \ 522 | ) \ 523 | || \ 524 | ( \ 525 | echo -e "$$WATCH_SCRIPT" > watch.sh \ 526 | && chmod +x watch.sh \ 527 | && echo "--- Created 'watch.sh'" \ 528 | && echo "--- Type './watch.sh' to start compilation on save" \ 529 | && echo "--- Type './watch.sh -h' for more options" \ 530 | ) 531 | 532 | # borrowed from http://stackoverflow.com/q/649246/1256624 533 | define SYMLINK_BUILD_SCRIPT 534 | #!/bin/bash 535 | # written by bvssvni 536 | # Modify the setting to do conditional compilation. 537 | # For example "--cfg my_feature" 538 | SETTINGS="" 539 | # ================================================ 540 | 541 | MAKE=make 542 | if [ "$$OS" == "Windows_NT" ]; then 543 | MAKE=mingw32-make 544 | fi 545 | 546 | # Checks if an item exists in an array. 547 | # Copied from http://stackoverflow.com/questions/3685970/check-if-an-array-contains-a-value 548 | function contains() { 549 | local n=$$# 550 | local value=$${!n} 551 | for ((i=1;i < $$#;i++)) { 552 | if [ "$${!i}" == "$${value}" ]; then 553 | echo "y" 554 | return 0 555 | fi 556 | } 557 | echo "n" 558 | return 1 559 | } 560 | 561 | # This is a counter used to insert dependencies. 562 | # It is global because we need an array of all the 563 | # visited dependencies. 564 | i=0 565 | function build_deps { 566 | local current=$$(pwd) 567 | for symlib in $$(find $(TARGET_LIB_DIR) -type l) ; do 568 | cd $$current 569 | echo $$symlib 570 | local original_file=$$(readlink $$symlib) 571 | local original_dir=$$(dirname $$original_file) 572 | cd $$original_dir 573 | 574 | # Go to the git root directory. 575 | local current_git_dir=$$(git rev-parse --show-toplevel) 576 | echo "--- Parent $$current" 577 | echo "--- Child $$current_git_dir" 578 | cd $$current_git_dir 579 | 580 | # Skip building if it is already built. 581 | if [ $$(contains "$${git_dir[@]}" $$current_git_dir) == "y" ]; then 582 | echo "--- Visited $$current_git_dir" 583 | continue 584 | fi 585 | 586 | # Remember git directory to not build it twice 587 | git_dir[i]=$$current_git_dir 588 | let i+=1 589 | 590 | # Visit the symlinks and build the dependencies 591 | build_deps 592 | 593 | # First check for a 'build.sh' script with default settings. 594 | # Check for additional 'rust-empty.mk' file. \ 595 | # Compile with the settings flags. \ 596 | # If no other options, build with make. 597 | ( \ 598 | test -e build.sh \ 599 | && ./build.sh \ 600 | ) \ 601 | || \ 602 | ( \ 603 | test -e rust-empty.mk \ 604 | && $$MAKE -f rust-empty.mk clean \ 605 | && $$MAKE -f rust-empty.mk \ 606 | ) \ 607 | || \ 608 | ( \ 609 | echo "--- Building $$current_git_dir" \ 610 | && $$MAKE clean \ 611 | && $$MAKE \ 612 | ) 613 | done 614 | cd $$current 615 | } 616 | 617 | # Mark main project as visited to avoid infinite loop. 618 | git_dir[i]=$$(pwd) 619 | let i+=1 620 | if [ "$$1" == "deps" ]; then 621 | build_deps 622 | fi 623 | 624 | echo "--- Building $$(pwd)" 625 | ( \ 626 | test -e rust-empty.mk \ 627 | && $$MAKE -f rust-empty.mk clean \ 628 | && $$MAKE -f rust-empty.mk COMPILER_FLAGS+="$$SETTINGS" \ 629 | ) \ 630 | || \ 631 | ( \ 632 | $$MAKE clean 633 | $$MAKE COMPILER_FLAGS+="$$SETTINGS" 634 | ) 635 | 636 | endef 637 | export SYMLINK_BUILD_SCRIPT 638 | 639 | symlink-build: 640 | $(Q)( \ 641 | test -e build.sh \ 642 | && echo "--- The file 'build.sh' already exists" \ 643 | ) \ 644 | || \ 645 | ( \ 646 | echo -e "$$SYMLINK_BUILD_SCRIPT" > build.sh \ 647 | && chmod +x build.sh \ 648 | && echo "--- Created 'build.sh'" \ 649 | && echo "--- Type './build.sh deps' to build everything" \ 650 | ) 651 | 652 | loc: 653 | $(Q)echo "--- Counting lines of .rs files in 'src' (LOC):" \ 654 | && find src/ -type f -name "*.rs" -exec cat {} \; | wc -l 655 | 656 | # Finds the original locations of symlinked libraries and 657 | # prints the commit hash with remote branches containing that commit. 658 | symlink-info: 659 | $(Q) current=$$(pwd) ; \ 660 | for symlib in $$(find $(TARGET_LIB_DIR) -type l) ; do \ 661 | cd $$current ; \ 662 | echo $$symlib ; \ 663 | original_file=$$(readlink $$symlib) ; \ 664 | original_dir=$$(dirname $$original_file) ; \ 665 | cd $$original_dir ; \ 666 | commit=$$(git rev-parse HEAD) ; \ 667 | echo $$commit ; \ 668 | echo "origin:" ; \ 669 | git config --get remote.origin.url ; \ 670 | echo "upstream:" ; \ 671 | git config --get remote.upstream.url ; \ 672 | echo "available in remote branches:" ; \ 673 | git branch -r --contains $$commit ; \ 674 | echo "" ; \ 675 | done \ 676 | > .symlink-info \ 677 | && cd $$current \ 678 | && echo "--- Created '.symlink-info'" \ 679 | && cat .symlink-info 680 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ### Important notice 2 | 3 | This project is deprecated in favor of nrc's `rustfmt` available here: 4 | 5 | https://github.com/rust-lang-nursery/rustfmt 6 | 7 | ## `rustfmt` -- code formatting Rust 8 | 9 | `rustfmt` is distributed under the terms of the "MIT License." See `LICENSE.txt` in the root of this repository for more information. 10 | 11 | ### Building `rustfmt` 12 | 13 | With a recent build of `rustc` and `cargo` installed: 14 | 15 | ~~~~ 16 | cargo build 17 | ~~~~ 18 | 19 | Failing a cargo install, the project can be built with: 20 | 21 | ~~~~ 22 | make 23 | ~~~~ 24 | 25 | or 26 | 27 | ~~~~ 28 | rustc src/main.rs 29 | ~~~~ 30 | 31 | This will give you a `rustfmt` or `main` binary that behaves as detailed below. 32 | 33 | ### Functionality 34 | 35 | `rustfmt` currently: 36 | 37 | * Reads from `stdin` 38 | * Lexes the string to ensure correctness 39 | * Prints the lexed code to `stdout` in an idiomatically arranged format 40 | 41 | `rustfmt` does *not*: 42 | 43 | * Preserve comments 44 | * Lots of other stuff 45 | -------------------------------------------------------------------------------- /src/format.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2014 Mozilla Foundation 2 | // Permission is hereby granted, free of charge, to any person obtaining 3 | // a copy of this software and associated documentation files (the 4 | // "Software"), to deal in the Software without restriction, including 5 | // without limitation the rights to use, copy, modify, merge, publish, 6 | // distribute, sublicense, and/or sell 7 | // copies of the Software, and to permit persons to whom the Software is 8 | // furnished to do so, subject to the following conditions: 9 | // 10 | // * The above copyright notice and this permission notice shall be 11 | // included in all copies or substantial portions of the Software. 12 | // 13 | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS 14 | // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 15 | // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. 16 | // IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY 17 | // CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, 18 | // TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 19 | // SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 20 | 21 | // src/transform.rs 22 | 23 | use std::io::Writer; 24 | 25 | use syntax::parse::lexer::{TokenAndSpan, Reader}; 26 | use syntax::parse::token::Token; 27 | use syntax::parse::token::keywords; 28 | use syntax::parse::token; 29 | 30 | use token::{Comment, LexerVal, TransformedToken, BlankLine}; 31 | 32 | macro_rules! try_io( 33 | ($e:expr) => (match $e { 34 | Ok(_) => {}, 35 | Err(err) => return Err( 36 | format!("Err in Formatter: {}: '{}' details: {}", err.to_string(), err.desc, err.detail)) 37 | }) 38 | ) 39 | 40 | static TAB_WIDTH: i32 = 4; 41 | 42 | pub type FormatterResult = Result; 43 | 44 | enum ProductionToParse { 45 | MatchProduction, 46 | UseProduction, 47 | BracesProduction, 48 | ParenthesesProduction, 49 | AttributeProduction 50 | } 51 | 52 | pub struct LineToken { 53 | tok: TransformedToken, 54 | x_pos: i32, 55 | } 56 | 57 | impl LineToken { 58 | pub fn new(tok: TransformedToken) -> LineToken { 59 | LineToken { 60 | tok: tok, 61 | x_pos: 0, 62 | } 63 | } 64 | 65 | pub fn is_token(&self, token: &token::Token) -> bool { 66 | match &self.tok { 67 | &LexerVal(ref t) => &t.tok == token, 68 | _ => false 69 | } 70 | } 71 | pub fn is_blank_line(&self) -> bool { 72 | match &self.tok { 73 | &BlankLine => true, 74 | _ => false 75 | } 76 | } 77 | 78 | fn whitespace_needed_after(&self, next: &LineToken) -> bool { 79 | let (curr_tok, _curr_comment_ends_line) = match &self.tok { 80 | &LexerVal(ref token_and_span) => (token_and_span.tok.clone(), false), 81 | &Comment(_, _, ends_line) => (token::COMMENT, ends_line), 82 | _ => (token::WS, false) 83 | }; 84 | let (next_tok, _next_comment_ends_line) = match &next.tok { 85 | &LexerVal(ref token_and_span) => (token_and_span.tok.clone(), false), 86 | &Comment(_, _, ends_line) => (token::COMMENT, ends_line), 87 | _ => (token::WS, false) 88 | }; 89 | match (&curr_tok, &next_tok) { 90 | (&token::IDENT(..), &token::IDENT(..)) => true, 91 | (&token::IDENT(..), &token::NOT) 92 | if !token::is_any_keyword(&curr_tok) => { 93 | // Macros. 94 | false 95 | } 96 | 97 | (&token::IDENT(..), _) if 98 | token::is_keyword(keywords::If, &curr_tok) || 99 | token::is_keyword(keywords::As, &curr_tok) || 100 | token::is_keyword(keywords::Match, &curr_tok) => { 101 | true 102 | } 103 | (_, &token::IDENT(..)) 104 | if token::is_keyword(keywords::If, &next_tok) => { 105 | true 106 | } 107 | 108 | (&token::COLON, _) => true, 109 | (&token::COMMA, _) => true, 110 | (&token::EQ, _) | (_, &token::EQ) => true, 111 | (&token::LT, _) | (_, &token::LT) => true, 112 | (&token::LE, _) | (_, &token::LE) => true, 113 | (&token::EQEQ, _) | (_, &token::EQEQ) => true, 114 | (&token::NE, _) | (_, &token::NE) => true, 115 | (&token::GE, _) | (_, &token::GE) => true, 116 | (&token::GT, _) | (_, &token::GT) => true, 117 | (&token::ANDAND, _) | (_, &token::ANDAND) => true, 118 | (&token::OROR, _) | (_, &token::OROR) => true, 119 | (&token::TILDE, _) | (_, &token::TILDE) => true, 120 | 121 | (&token::LPAREN, _) => false, 122 | (_, &token::RPAREN) => false, 123 | (&token::BINOP(token::AND), _) => false, 124 | 125 | (&token::BINOP(_), _) | (_, &token::BINOP(_)) => true, 126 | (&token::BINOPEQ(_), _) | (_, &token::BINOPEQ(_)) => true, 127 | 128 | (&token::MOD_SEP, _) | (_, &token::MOD_SEP) => false, 129 | 130 | (&token::RARROW, _) | (_, &token::RARROW) => true, 131 | (&token::FAT_ARROW, _) | (_, &token::FAT_ARROW) => true, 132 | (&token::LBRACE, _) | (_, &token::LBRACE) => true, 133 | (&token::RBRACE, _) | (_, &token::RBRACE) => true, 134 | (&token::SEMI, _) | (_, &token::COMMENT) => true, 135 | (&token::COMMENT, _) => true, 136 | _ => false, 137 | } 138 | } 139 | 140 | fn length(&self) -> i32 { 141 | match &self.tok { 142 | &LexerVal(ref token_and_span) => 143 | token::to_string(&token_and_span.tok).len() as i32, 144 | _ => 0 145 | } 146 | } 147 | 148 | fn preindentation(&self) -> i32 { 149 | match &self.tok { 150 | &LexerVal(ref token_and_span) => { 151 | match &token_and_span.tok { 152 | &token::RBRACE => -TAB_WIDTH, 153 | _ => 0, 154 | } 155 | }, 156 | _ => 0 157 | } 158 | } 159 | } 160 | 161 | struct LogicalLine { 162 | tokens: Vec, 163 | } 164 | 165 | impl LogicalLine { 166 | fn new() -> LogicalLine { 167 | LogicalLine { 168 | tokens: Vec::new(), 169 | } 170 | } 171 | 172 | fn layout(&mut self, mut x_pos: i32) { 173 | if self.tokens.len() == 0 { 174 | return 175 | } 176 | 177 | for i in range(0, self.tokens.len()) { 178 | self.tokens.get_mut(i).x_pos = x_pos; 179 | x_pos += self.tokens[i].length(); 180 | 181 | if i < self.tokens.len() - 1 && 182 | self.tokens[i].whitespace_needed_after(&self.tokens[i + 1]) { 183 | x_pos += 1; 184 | } 185 | } 186 | } 187 | 188 | fn whitespace_after(&self, index: uint) -> i32 { 189 | if self.tokens.len() <= 1 || index >= self.tokens.len() - 1 { 190 | return 0 191 | } 192 | 193 | self.tokens[index + 1].x_pos - (self.tokens[index].x_pos + 194 | self.tokens[index].length()) 195 | } 196 | 197 | fn postindentation(&self) -> i32 { 198 | match self.tokens.as_slice().last() { 199 | None => 0, 200 | Some(line_token) => { 201 | match &line_token.tok { 202 | &LexerVal(ref token_and_span) => { 203 | match token_and_span.tok { 204 | token::LBRACE => TAB_WIDTH, 205 | _ => 0, 206 | } 207 | }, 208 | _ => 0 209 | } 210 | } 211 | } 212 | } 213 | } 214 | 215 | pub struct Formatter<'a> { 216 | input_tokens: &'a [TransformedToken], 217 | curr_idx: uint, 218 | indent: i32, 219 | logical_line: LogicalLine, 220 | last_token: Token, 221 | second_previous_token: Token, 222 | newline_after_comma: bool, 223 | newline_after_brace: bool, 224 | in_attribute: bool, 225 | output: &'a mut Writer 226 | } 227 | 228 | impl<'a> Formatter<'a> { 229 | pub fn new<'a>(input_tokens: &'a [TransformedToken], output: &'a mut Writer) -> Formatter<'a> { 230 | Formatter { 231 | input_tokens: input_tokens, 232 | curr_idx: 0, 233 | indent: 0, 234 | logical_line: LogicalLine::new(), 235 | last_token: token::SEMI, 236 | second_previous_token: token::SEMI, 237 | newline_after_comma: false, 238 | newline_after_brace: true, 239 | in_attribute: false, 240 | output: output 241 | } 242 | } 243 | 244 | pub fn process(mut self) { 245 | loop { 246 | match self.next_token() { 247 | Ok(true) => { 248 | match self.parse_production() { 249 | Err(e) => fail!(e), 250 | _ => {} 251 | } 252 | }, 253 | Ok(false) => break, 254 | Err(e) => fail!(e) 255 | } 256 | } 257 | } 258 | 259 | fn curr_tok(&'a self) -> &'a TransformedToken { 260 | &self.input_tokens[self.curr_idx] 261 | } 262 | 263 | fn is_eof(&'a self) -> bool { 264 | self.input_tokens.len() == self.curr_idx 265 | } 266 | 267 | fn token_ends_logical_line(&self, line_token: &LineToken) -> bool { 268 | match &line_token.tok { 269 | &LexerVal(ref token_and_span) => { 270 | match token_and_span.tok { 271 | token::SEMI => { 272 | match self.curr_tok() { 273 | &Comment(_, starts_line, _) => starts_line, 274 | _ => true 275 | } 276 | }, 277 | token::RBRACE => { 278 | match self.curr_tok() { 279 | &LexerVal(TokenAndSpan { tok: token::COMMA, sp: _ }) => { 280 | false 281 | } 282 | _ => true 283 | } 284 | }, 285 | token::COMMA => self.newline_after_comma, 286 | token::LBRACE => { 287 | match self.curr_tok() { 288 | &LexerVal(ref t) => { 289 | if t.tok == token::RBRACE { 290 | false 291 | } else { 292 | self.newline_after_brace 293 | } 294 | } 295 | _ => self.newline_after_brace 296 | } 297 | }, 298 | token::DOC_COMMENT(_) => true, 299 | token::RBRACKET => self.in_attribute, 300 | _ => false, 301 | } 302 | }, 303 | &BlankLine => true, 304 | &Comment(_, _, ends_line) => ends_line 305 | } 306 | } 307 | 308 | fn token_starts_logical_line(&self, line_token: &LineToken) -> bool { 309 | match &line_token.tok { 310 | &LexerVal(ref token_and_span) => { 311 | match token_and_span.tok { 312 | token::RBRACE => { 313 | match (&self.second_previous_token, &self.last_token) { 314 | (&token::FAT_ARROW, &token::LBRACE) => false, 315 | _ => self.newline_after_brace 316 | } 317 | }, 318 | _ => false 319 | } 320 | }, 321 | &BlankLine => true, 322 | &Comment(_, starts_line, _) => starts_line, 323 | } 324 | } 325 | 326 | fn parse_tokens_up_to(&mut self, pred: |&token::Token| -> bool) -> FormatterResult { 327 | while try!(self.next_token()) { 328 | if pred(&self.last_token) { 329 | return Ok(true); 330 | } 331 | } 332 | return Ok(false); 333 | } 334 | 335 | fn parse_productions_up_to(&mut self, pred: |&token::Token| -> bool) -> FormatterResult { 336 | while try!(self.next_token()) { 337 | if pred(&self.last_token) { 338 | return Ok(true); 339 | } 340 | try!(self.parse_production()); 341 | } 342 | return Ok(false); 343 | } 344 | 345 | fn parse_match(&mut self) -> FormatterResult { 346 | // We've already parsed the keyword. Parse until we find a `{`. 347 | if !try!(self.parse_tokens_up_to(|token| *token == token::LBRACE)) { 348 | return Ok(false); 349 | } 350 | 351 | let old_newline_after_comma_setting = self.newline_after_comma; 352 | self.newline_after_comma = true; 353 | 354 | if !try!(self.parse_productions_up_to(|token| *token == token::RBRACE)) { 355 | return Ok(false); 356 | } 357 | 358 | self.newline_after_comma = old_newline_after_comma_setting; 359 | return Ok(true); 360 | } 361 | 362 | fn parse_use(&mut self) -> FormatterResult { 363 | let old_newline_after_brace_setting = self.newline_after_brace; 364 | self.newline_after_brace = false; 365 | 366 | // We've already parsed the keyword. Parse until we find a `{`. 367 | if !try!(self.parse_tokens_up_to(|token| *token == token::LBRACE || *token == token::SEMI)) { 368 | return Ok(false); 369 | } 370 | 371 | if self.last_token == token::LBRACE { 372 | let old_newline_after_comma_setting = self.newline_after_comma; 373 | self.newline_after_comma = false; 374 | 375 | if !try!(self.parse_productions_up_to(|token| *token == token::RBRACE)) { 376 | return Ok(false); 377 | } 378 | 379 | self.newline_after_comma = old_newline_after_comma_setting; 380 | } 381 | 382 | self.newline_after_brace = old_newline_after_brace_setting; 383 | return Ok(true); 384 | } 385 | 386 | fn parse_braces(&mut self) -> FormatterResult { 387 | let old_newline_after_comma_setting = self.newline_after_comma; 388 | self.newline_after_comma = true; 389 | // We've already parsed the '{'. Parse until we find a '}'. 390 | let result = try!(self.parse_productions_up_to(|token| *token == token::RBRACE)); 391 | 392 | self.newline_after_comma = old_newline_after_comma_setting; 393 | return Ok(result); 394 | } 395 | 396 | fn parse_parentheses(&mut self) -> FormatterResult { 397 | let old_newline_after_comma_setting = self.newline_after_comma; 398 | self.newline_after_comma = false; 399 | 400 | // We've already parsed the '('. Parse until we find a ')'. 401 | let result = try!(self.parse_productions_up_to(|token| *token == token::RPAREN)); 402 | 403 | self.newline_after_comma = old_newline_after_comma_setting; 404 | return Ok(result); 405 | } 406 | 407 | fn parse_attribute(&mut self) -> FormatterResult { 408 | // Parse until we find a ']'. 409 | self.in_attribute = true; 410 | let result = try!(self.parse_productions_up_to(|token| *token == token::RBRACKET)); 411 | return Ok(result); 412 | } 413 | 414 | pub fn parse_production(&mut self) -> FormatterResult { 415 | let production_to_parse; 416 | // TRANSFORM 417 | match self.last_token { 418 | token::IDENT(..) if token::is_keyword(keywords::Match, &self.last_token) => { 419 | production_to_parse = MatchProduction; 420 | } 421 | token::IDENT(..) if token::is_keyword(keywords::Use, &self.last_token) => { 422 | production_to_parse = UseProduction; 423 | } 424 | token::LBRACE => production_to_parse = BracesProduction, 425 | token::LPAREN => production_to_parse = ParenthesesProduction, 426 | token::POUND => production_to_parse = AttributeProduction, 427 | _ => return Ok(true), 428 | } 429 | 430 | match production_to_parse { 431 | MatchProduction => return self.parse_match(), 432 | UseProduction => return self.parse_use(), 433 | BracesProduction => return self.parse_braces(), 434 | ParenthesesProduction => return self.parse_parentheses(), 435 | AttributeProduction => return self.parse_attribute() 436 | } 437 | } 438 | 439 | pub fn next_token(&mut self) -> FormatterResult { 440 | use syntax::parse::lexer::Reader; 441 | loop { 442 | if self.is_eof() { 443 | return Ok(false); 444 | } 445 | 446 | let current_line_token = LineToken::new(self.curr_tok().clone()); 447 | 448 | if self.token_starts_logical_line(¤t_line_token) && self.logical_line.tokens.len() > 0 { 449 | try!(self.flush_line()); 450 | continue; 451 | } 452 | 453 | if self.logical_line.tokens.len() == 0 { 454 | self.indent += current_line_token.preindentation(); 455 | } 456 | 457 | let curr_tok_copy = self.curr_tok().clone(); 458 | self.curr_idx += 1; 459 | let token_ends_logical_line = self.token_ends_logical_line(¤t_line_token); 460 | self.second_previous_token = self.last_token.clone(); 461 | self.last_token = match curr_tok_copy { 462 | LexerVal(token_and_span) => token_and_span.tok, 463 | BlankLine => token::WS, 464 | Comment(_, _, _) => token::COMMENT 465 | }; 466 | self.logical_line.tokens.push(current_line_token); 467 | if token_ends_logical_line { 468 | try!(self.flush_line()); 469 | } 470 | 471 | return Ok(true); 472 | } 473 | } 474 | 475 | fn flush_line(&mut self) -> FormatterResult<()> { 476 | self.in_attribute = false; 477 | self.logical_line.layout(self.indent); 478 | 479 | if !self.logical_line.tokens[0].is_blank_line() { 480 | for _ in range(0, self.indent) { 481 | try_io!(self.output.write_str(" ")); 482 | } 483 | } 484 | for i in range(0, self.logical_line.tokens.len()) { 485 | match &self.logical_line.tokens[i] { 486 | &LineToken{ tok: LexerVal(ref token_and_span), x_pos: _ } => { 487 | let curr_tok = &token_and_span.tok; 488 | try_io!(self.output.write_str(format!("{}", token::to_string(curr_tok)).as_slice())); 489 | 490 | // collapse empty blocks in match arms 491 | if (curr_tok == &token::LBRACE && i != self.logical_line.tokens.len() - 1) && 492 | self.logical_line.tokens[i+1].is_token(&token::RBRACE) { 493 | continue; 494 | } 495 | // no whitespace after right-brackets, before comma in match arm 496 | if (curr_tok == &token::RBRACE && i != self.logical_line.tokens.len() - 1) && 497 | self.logical_line.tokens[i+1].is_token(&token::COMMA) { 498 | continue; 499 | } 500 | for _ in range(0, self.logical_line.whitespace_after(i)) { 501 | try_io!(self.output.write_str(" ")); 502 | } 503 | }, 504 | &LineToken{ tok: Comment(ref comment_str, _, _), x_pos: _ } => { 505 | try_io!(self.output.write_str(comment_str.as_slice())); 506 | for _ in range(0, self.logical_line.whitespace_after(i)) { 507 | try_io!(self.output.write_str(" ")); 508 | } 509 | } 510 | _ => {} 511 | } 512 | } 513 | try_io!(self.output.write_line("")); 514 | 515 | self.indent += self.logical_line.postindentation(); 516 | self.logical_line = LogicalLine::new(); 517 | Ok(()) 518 | } 519 | } 520 | -------------------------------------------------------------------------------- /src/main.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2014 Mozilla Foundation 2 | // Permission is hereby granted, free of charge, to any person obtaining 3 | // a copy of this software and associated documentation files (the 4 | // "Software"), to deal in the Software without restriction, including 5 | // without limitation the rights to use, copy, modify, merge, publish, 6 | // distribute, sublicense, and/or sell 7 | // copies of the Software, and to permit persons to whom the Software is 8 | // furnished to do so, subject to the following conditions: 9 | // 10 | // * The above copyright notice and this permission notice shall be 11 | // included in all copies or substantial portions of the Software. 12 | // 13 | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS 14 | // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 15 | // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. 16 | // IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY 17 | // CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, 18 | // TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 19 | // SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 20 | 21 | // src/main.rs 22 | 23 | #![crate_name="rustfmt"] 24 | #![desc = "Rust code formatter"] 25 | #![license = "MIT"] 26 | #![feature(macro_rules)] 27 | 28 | extern crate syntax; 29 | 30 | use std::io; 31 | use std::str; 32 | use syntax::parse::lexer; 33 | use syntax::parse; 34 | 35 | use transform::transform_tokens; 36 | use format::Formatter; 37 | use token::extract_tokens; 38 | 39 | mod transform; 40 | mod format; 41 | mod token; 42 | #[cfg(test)] 43 | mod test; 44 | 45 | /// The Main Function 46 | pub fn main() { 47 | let source = io::stdin().read_to_end().unwrap(); 48 | let source = str::from_utf8(source.as_slice()).unwrap(); 49 | 50 | // nothing special 51 | let session = parse::new_parse_sess(); 52 | let filemap = parse::string_to_filemap(&session, source.to_string(), "".to_string()); 53 | let mut lexer = lexer::StringReader::new(&session.span_diagnostic, filemap); 54 | let mut stdout = io::stdio::stdout(); 55 | { 56 | let all_tokens = extract_tokens(&mut lexer); 57 | match transform_tokens(all_tokens.as_slice(), &session.span_diagnostic) { 58 | Ok(out_tokens) => { 59 | let formatter = Formatter::new(out_tokens.as_slice(), &mut stdout); 60 | formatter.process(); 61 | }, 62 | Err(e) => fail!("Error in transformer: {}", e) 63 | } 64 | } 65 | } 66 | -------------------------------------------------------------------------------- /src/test.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2014 Mozilla Foundation 2 | // Permission is hereby granted, free of charge, to any person obtaining 3 | // a copy of this software and associated documentation files (the 4 | // "Software"), to deal in the Software without restriction, including 5 | // without limitation the rights to use, copy, modify, merge, publish, 6 | // distribute, sublicense, and/or sell 7 | // copies of the Software, and to permit persons to whom the Software is 8 | // furnished to do so, subject to the following conditions: 9 | // 10 | // * The above copyright notice and this permission notice shall be 11 | // included in all copies or substantial portions of the Software. 12 | // 13 | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS 14 | // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 15 | // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. 16 | // IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY 17 | // CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, 18 | // TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 19 | // SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 20 | 21 | // src/test.rs 22 | 23 | use std::io::MemWriter; 24 | use std::str; 25 | use syntax::parse::lexer; 26 | use syntax::parse; 27 | use syntax::parse::token; 28 | use token::extract_tokens; 29 | 30 | use transform::transform_tokens; 31 | use format::{LineToken, Formatter}; 32 | 33 | fn test_rustfmt(source: &str) -> String { 34 | // nothing special 35 | let session = parse::new_parse_sess(); 36 | let filemap = parse::string_to_filemap(&session, source.to_string(), "".to_string()); 37 | let mut lexer = lexer::StringReader::new(&session.span_diagnostic, filemap); 38 | let mut output = MemWriter::new(); 39 | { 40 | let all_tokens = extract_tokens(&mut lexer); 41 | match transform_tokens(all_tokens.as_slice(), &session.span_diagnostic) { 42 | Ok(out_tokens) => { 43 | let formatter = Formatter::new(out_tokens.as_slice(), &mut output); 44 | formatter.process(); 45 | }, 46 | Err(e) => fail!("Error in trasformer: {}", e) 47 | } 48 | } 49 | str::from_utf8(output.unwrap().as_slice()).unwrap().to_string() 50 | } 51 | 52 | #[test] 53 | fn can_format_a_basic_function() { 54 | let result = test_rustfmt("fn main() {}"); 55 | assert_eq!(result, 56 | "fn main() { 57 | } 58 | ".to_string()); 59 | } 60 | 61 | #[test] 62 | fn adds_newline_after_attributes() { 63 | let result = test_rustfmt("#[foo]fn main() {}"); 64 | assert_eq!(result, 65 | "#[foo] 66 | fn main() { 67 | } 68 | ".to_string()); 69 | } 70 | 71 | #[test] 72 | fn adds_newline_after_doc_comments() { 73 | let result = test_rustfmt("/// The Main function 74 | fn main() {}"); 75 | assert_eq!("/// The Main function 76 | fn main() { 77 | } 78 | ".to_string(), result); 79 | } 80 | 81 | #[test] 82 | fn adds_newline_after_multiline_doc_comment() { 83 | let result = test_rustfmt( 84 | "/*! The Main function 85 | * some neat info goes here 86 | * ```` 87 | * bleh(); 88 | * ```` 89 | */ 90 | fn main() {}"); 91 | assert_eq!(result, 92 | "/*! The Main function 93 | * some neat info goes here 94 | * ```` 95 | * bleh(); 96 | * ```` 97 | */ 98 | fn main() { 99 | } 100 | ".to_string()); 101 | } 102 | 103 | #[test] 104 | fn indent_regression_from_port_to_result_api() { 105 | let input = "#![feature(macro_rules)] 106 | extern crate syntax; 107 | use foo; 108 | mod rustfmt; 109 | #[cfg(test)] 110 | mod test; 111 | pub fn main() { 112 | foo(); 113 | } 114 | "; 115 | 116 | assert_eq!(input.to_string(), test_rustfmt(input)); 117 | } 118 | 119 | #[test] 120 | fn should_preserve_empty_blocks() { 121 | let input = "match foo { 122 | _ => {} 123 | } 124 | "; 125 | 126 | assert_eq!(input.to_string(), test_rustfmt(input)); 127 | } 128 | 129 | #[test] 130 | fn full_regression() { 131 | let input = "#![feature(macro_rules)] 132 | extern crate syntax; 133 | use foo; 134 | mod rustfmt; 135 | #[cfg(test)] 136 | mod test; 137 | /// The Main Function 138 | pub fn main() { 139 | let source = io::stdin().read_to_end().unwrap(); 140 | let source = str::from_utf8(source.as_slice()).unwrap(); 141 | let session = parse::new_parse_sess(); 142 | let filemap = parse::string_to_filemap(&session, source.to_string(), foo.to_string()); 143 | let lexer = lexer::StringReader::new(&session.span_diagnostic, filemap); 144 | let mut output = io::MemWriter::new(); 145 | { 146 | let mut formatter = rustfmt::Formatter::new(lexer, &mut output); 147 | loop { 148 | match formatter.next_token() { 149 | Ok(true) => { 150 | match formatter.parse_production() { 151 | Err(e) => fail!(e), 152 | _ => {} 153 | } 154 | }, 155 | Ok(false) => break, 156 | Err(e) => fail!(e) 157 | } 158 | } 159 | } 160 | let output = str::from_utf8(output.unwrap().as_slice()).unwrap().to_string(); 161 | print!(bar, output); 162 | } 163 | "; 164 | 165 | assert_eq!(input.to_string(), test_rustfmt(input)); 166 | } 167 | 168 | #[test] 169 | fn should_preserve_single_empty_lines() { 170 | let input = "use foo; 171 | 172 | fn foo() { 173 | 174 | } 175 | "; 176 | assert_eq!("use foo; 177 | 178 | fn foo() { 179 | 180 | } 181 | ".to_string(), test_rustfmt(input)); 182 | } 183 | 184 | #[test] 185 | fn should_collapse_multiple_blank_lines_into_one() { 186 | let input = "fn foo() { 187 | 188 | 189 | } 190 | "; 191 | assert_eq!("fn foo() { 192 | 193 | } 194 | ".to_string(), test_rustfmt(input)); 195 | } 196 | 197 | #[test] 198 | fn has_blank_line_should_return_true_for_ws_with_more_than_one_unix_line_ending() { 199 | use transform::has_blank_line; 200 | let ws_str = " \n \n"; 201 | assert_eq!(true, has_blank_line(ws_str)); 202 | } 203 | 204 | #[test] 205 | fn has_blank_line_should_return_false_for_ws_with_single_unix_line_ending() { 206 | use transform::has_blank_line; 207 | let ws_str = " \n"; 208 | assert_eq!(false, has_blank_line(ws_str)); 209 | } 210 | 211 | #[test] 212 | fn has_blank_line_should_return_false_for_just_spaces_and_tabs() { 213 | use transform::has_blank_line; 214 | let ws_str = " \t "; 215 | assert_eq!(false, has_blank_line(ws_str)); 216 | } 217 | 218 | #[test] 219 | fn is_token_works() { 220 | let source = "{}"; 221 | let session = parse::new_parse_sess(); 222 | let filemap = parse::string_to_filemap(&session, source.to_string(), "".to_string()); 223 | let mut lexer = lexer::StringReader::new(&session.span_diagnostic, filemap); 224 | let all_tokens = extract_tokens(&mut lexer); 225 | let left_brace = LineToken::new(all_tokens[0].clone()); 226 | assert!(left_brace.is_token(&token::LBRACE) == true); 227 | assert!(left_brace.is_token(&token::RBRACE) == false); 228 | } 229 | 230 | #[test] 231 | fn regular_line_comments_on_their_own_line_are_preserved() { 232 | let input = "// standalone.. ends line 233 | pub fn main() { 234 | /* blah 235 | * blah blah blah 236 | * blah blah blah 237 | * ends line 238 | */ 239 | foo(); // ends line 240 | 1 + /* doesn't end line! */ 42 241 | } 242 | "; 243 | assert_eq!(input.to_string(), test_rustfmt(input)); 244 | } 245 | -------------------------------------------------------------------------------- /src/token.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2014 Mozilla Foundation 2 | // Permission is hereby granted, free of charge, to any person obtaining 3 | // a copy of this software and associated documentation files (the 4 | // "Software"), to deal in the Software without restriction, including 5 | // without limitation the rights to use, copy, modify, merge, publish, 6 | // distribute, sublicense, and/or sell 7 | // copies of the Software, and to permit persons to whom the Software is 8 | // furnished to do so, subject to the following conditions: 9 | // 10 | // * The above copyright notice and this permission notice shall be 11 | // included in all copies or substantial portions of the Software. 12 | // 13 | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS 14 | // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 15 | // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. 16 | // IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY 17 | // CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, 18 | // TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 19 | // SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 20 | 21 | // src/token.rs 22 | 23 | use syntax::diagnostic::SpanHandler; 24 | use syntax::parse::lexer::{StringReader, TokenAndSpan, Reader}; 25 | use syntax::parse::token; 26 | 27 | #[deriving(Clone)] 28 | pub enum TransformedToken { 29 | LexerVal(TokenAndSpan), 30 | BlankLine, 31 | Comment(String, bool, bool) 32 | } 33 | 34 | impl TransformedToken { 35 | pub fn contains_newline(&self, sh: &SpanHandler) -> bool { 36 | match self { 37 | &BlankLine => true, 38 | &LexerVal(ref t) => { 39 | if t.tok == token::WS { 40 | let comment_str = sh.cm.span_to_snippet(t.sp).unwrap(); 41 | comment_str.as_slice().contains("\n") 42 | } else { 43 | false 44 | } 45 | }, 46 | &Comment(ref c, _, _) => c.as_slice().contains("\n") 47 | } 48 | } 49 | } 50 | 51 | pub fn extract_tokens(lexer: &mut StringReader) -> Vec { 52 | let mut in_toknspans = Vec::new(); 53 | loop { 54 | match lexer.next_token() { 55 | t @ TokenAndSpan{tok: token::EOF, sp: _} => { 56 | in_toknspans.push(LexerVal(t)); 57 | break; 58 | }, 59 | t @ _ => in_toknspans.push(LexerVal(t)) 60 | } 61 | } 62 | in_toknspans 63 | } 64 | -------------------------------------------------------------------------------- /src/transform.rs: -------------------------------------------------------------------------------- 1 | // Copyright (c) 2014 Mozilla Foundation 2 | // Permission is hereby granted, free of charge, to any person obtaining 3 | // a copy of this software and associated documentation files (the 4 | // "Software"), to deal in the Software without restriction, including 5 | // without limitation the rights to use, copy, modify, merge, publish, 6 | // distribute, sublicense, and/or sell 7 | // copies of the Software, and to permit persons to whom the Software is 8 | // furnished to do so, subject to the following conditions: 9 | // 10 | // * The above copyright notice and this permission notice shall be 11 | // included in all copies or substantial portions of the Software. 12 | // 13 | // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS 14 | // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF 15 | // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. 16 | // IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY 17 | // CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, 18 | // TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE 19 | // SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. 20 | 21 | // src/transform.rs 22 | 23 | use syntax::diagnostic::SpanHandler; 24 | use syntax::parse::lexer::{TokenAndSpan}; 25 | use syntax::parse::token; 26 | 27 | use token::{Comment, LexerVal, BlankLine, TransformedToken}; 28 | 29 | pub type TransformerResult = Result; 30 | 31 | #[allow(dead_code)] 32 | pub fn has_blank_line<'a>(ws_str: &'a str) -> bool { 33 | use std::str::StrSlice; 34 | let newlines: Vec<(uint, uint)> = ws_str.match_indices("\n").collect(); 35 | let newline_count = newlines.len(); 36 | newline_count > 1 37 | } 38 | 39 | pub fn transform_tokens(input_tokens: &[TransformedToken], span_handler: &SpanHandler) -> TransformerResult> { 40 | let mut out_tokens = Vec::new(); 41 | let mut curr_idx = 0; 42 | let in_len = input_tokens.len(); 43 | loop { 44 | if curr_idx >= in_len { 45 | break 46 | } 47 | 48 | let current_token = &input_tokens[curr_idx]; 49 | 50 | match current_token { 51 | &LexerVal(ref current_token) => { 52 | match current_token { 53 | t @ &TokenAndSpan { tok: token::WS, sp: _ } => { 54 | let ws_str = span_handler.cm.span_to_snippet(t.sp).unwrap(); 55 | if has_blank_line(ws_str.as_slice()) { 56 | out_tokens.push(BlankLine); 57 | } 58 | curr_idx += 1; 59 | }, 60 | t @ &TokenAndSpan { tok: token::COMMENT, sp: _ } => { 61 | handle_comment(input_tokens, &mut out_tokens, &mut curr_idx, span_handler, t); 62 | } 63 | t => { 64 | out_tokens.push(LexerVal(t.clone())); 65 | curr_idx += 1; 66 | } 67 | } 68 | }, 69 | t => { 70 | out_tokens.push(t.clone()); 71 | curr_idx += 1; 72 | } 73 | } 74 | } 75 | Ok(out_tokens) 76 | } 77 | 78 | fn handle_comment(input_tokens: &[TransformedToken], out_tokens: &mut Vec, curr_idx: &mut uint, span_handler: &SpanHandler, t: &TokenAndSpan) { 79 | let curr_idx_cpy = *curr_idx; 80 | let comment_str = span_handler.cm.span_to_snippet(t.sp).unwrap(); 81 | let starts_line = { 82 | let last_token = if curr_idx_cpy == 0 { 83 | &input_tokens[0] 84 | } else { 85 | &input_tokens[curr_idx_cpy - 1] 86 | }; 87 | last_token.contains_newline(span_handler) 88 | }; 89 | let ends_line = { 90 | let next_token = if curr_idx_cpy + 1 >= input_tokens.len() { 91 | &input_tokens[input_tokens.len() - 1] 92 | } else { 93 | &input_tokens[curr_idx_cpy + 1] 94 | }; 95 | next_token.contains_newline(span_handler) 96 | }; 97 | out_tokens.push(Comment(comment_str, starts_line, ends_line)); 98 | *curr_idx += 1; 99 | } 100 | --------------------------------------------------------------------------------