├── .coveragerc ├── .gitignore ├── .travis.yml ├── AUTHORS ├── COPYING ├── LICENSE ├── MANIFEST.in ├── README.md ├── TODO.md ├── anvil-local.bat ├── anvil-local.sh ├── anvil ├── __init__.py ├── async.py ├── async_test.py ├── build_logging.py ├── build_logging_test.py ├── cache.py ├── cache_test.py ├── commands │ ├── __init__.py │ ├── build_command.py │ ├── clean_command.py │ ├── completion_command.py │ ├── depends_command.py │ ├── deploy_command.py │ ├── overlay_command.py │ ├── serve_command.py │ ├── test_command.py │ └── util.py ├── config.py ├── config_test.py ├── context.py ├── context_test.py ├── depends.py ├── depends_test.py ├── enums.py ├── graph.py ├── graph_test.py ├── log_sink.py ├── manage.py ├── manage_test.py ├── module.py ├── module_test.py ├── project.py ├── project_test.py ├── rule.py ├── rule_test.py ├── rules │ ├── TODO │ ├── __init__.py │ ├── archive_rules.py │ ├── closure_gss_rules.py │ ├── closure_js_rules.py │ ├── closure_soy_rules.py │ ├── core_rules.py │ ├── core_rules_test.py │ ├── less_rules.py │ ├── overlay_rules.py │ ├── preprocessor_rules.py │ └── preprocessor_rules_test.py ├── task.py ├── task_test.py ├── test.py ├── util.py ├── util_test.py └── version.py ├── run-coverage.sh ├── run-tests.py ├── setup-local.bat ├── setup-local.sh ├── setup.py └── test └── fixtures ├── cache └── dummy.txt ├── config ├── .anvilrc └── deep │ ├── .anvilrc │ └── none │ └── dummy.txt ├── core_rules ├── concat_files │ ├── 1.txt │ ├── 2.txt │ ├── 3.txt │ ├── 4.txt │ ├── BUILD │ └── t.txt ├── copy_files │ ├── BUILD │ ├── a.txt │ └── dir │ │ ├── BUILD │ │ ├── b.txt │ │ └── c.not-txt └── file_set │ ├── BUILD │ ├── a.txt │ └── dir │ ├── BUILD │ └── b.txt ├── custom_rules └── rules │ ├── BUILD │ ├── other_rules.py │ └── some_rules.py ├── manage ├── bad_commands │ └── bad_commands.py └── commands │ └── test_commands.py ├── preprocessor_rules └── template_files │ ├── BUILD │ ├── a.nfo │ ├── a.txt │ └── dir │ ├── BUILD │ ├── b.nfo │ └── b.txt ├── resolution ├── BUILD ├── a │ └── BUILD ├── b │ ├── BUILD │ └── c │ │ ├── BUILD │ │ └── build_file.py └── empty │ └── dummy ├── rules ├── dummy_rules.py ├── dupe.py ├── more │ └── more_rules.py └── rule_x.py └── simple ├── BUILD ├── a.txt ├── a.txt-a ├── b.txt ├── b.txt-b ├── c.txt ├── c.txt-c ├── dir └── dir_2 │ ├── BUILD │ ├── d.txt │ ├── e.txt │ └── f.not-txt └── g.not-txt /.coveragerc: -------------------------------------------------------------------------------- 1 | [report] 2 | exclude_lines = 3 | pragma: no cover 4 | def __repr__ 5 | def __str__ 6 | if self.debug: 7 | if settings.DEBUG 8 | raise AssertionError 9 | raise NotImplementedError 10 | if 0: 11 | if __name__ == .__main__.: 12 | 13 | omit = 14 | BUILD 15 | anvil/test.py 16 | run-tests.py 17 | *_test.py 18 | /usr/** 19 | /tmp/** 20 | /Library/Python/** 21 | /System/Library/Frameworks/** 22 | /private/var/** 23 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # ============================================================================== 2 | # Misc system junk 3 | # ============================================================================== 4 | 5 | .DS_Store 6 | ._* 7 | .Spotlight-V100 8 | .Trashes 9 | .com.apple.* 10 | Thumbs.db 11 | Desktop.ini 12 | 13 | # ============================================================================== 14 | # Projects/IDE files 15 | # ============================================================================== 16 | 17 | # Sublime Text 18 | *.sublime-project 19 | *.sublime-workspace 20 | 21 | # VIM 22 | .*.sw[a-z] 23 | *.un~ 24 | Session.vim 25 | 26 | # TextMate 27 | *.tmproj 28 | *.tmproject 29 | tmtags 30 | 31 | # Eclipse 32 | .project 33 | .metadata 34 | 35 | # Emacs 36 | *~ 37 | 38 | # ============================================================================== 39 | # Temp generated code 40 | # ============================================================================== 41 | 42 | *.py[co] 43 | .coverage 44 | 45 | # ============================================================================== 46 | # Logs and dumps 47 | # ============================================================================== 48 | 49 | npm-debug.log 50 | 51 | # ============================================================================== 52 | # Build system output 53 | # ============================================================================== 54 | 55 | # Python 56 | local_virtualenv/ 57 | *.egg-info 58 | build/ 59 | dist/ 60 | 61 | # npm/node 62 | .lock-wscript 63 | node_modules/**/build/ 64 | node_modules/.bin/ 65 | 66 | # coverage/etc 67 | scratch/ 68 | 69 | .build-cache/ 70 | build-out/ 71 | build-gen/ 72 | build-bin/ 73 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | # Copyright 2012 Google Inc. All Rights Reserved. 2 | 3 | # Travis CI script 4 | # http://travis-ci.org/#!/google/anvil-build 5 | 6 | language: python 7 | python: 8 | - 2.6 9 | - 2.7 10 | 11 | install: python setup.py -q install 12 | 13 | script: python setup.py test 14 | -------------------------------------------------------------------------------- /AUTHORS: -------------------------------------------------------------------------------- 1 | # This is a list of contributors 2 | 3 | # Names should be added to this file like so: 4 | # Name or Organization 5 | 6 | Ben Vanik 7 | Ziling Zhao 8 | 9 | -------------------------------------------------------------------------------- /COPYING: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | 179 | APPENDIX: How to apply the Apache License to your work. 180 | 181 | To apply the Apache License to your work, attach the following 182 | boilerplate notice, with the fields enclosed by brackets "[]" 183 | replaced with your own identifying information. (Don't include 184 | the brackets!) The text should be enclosed in the appropriate 185 | comment syntax for the file format. We also recommend that a 186 | file or class name and description of purpose be included on the 187 | same "printed page" as the copyright notice for easier 188 | identification within third-party archives. 189 | 190 | Copyright [yyyy] [name of copyright owner] 191 | 192 | Licensed under the Apache License, Version 2.0 (the "License"); 193 | you may not use this file except in compliance with the License. 194 | You may obtain a copy of the License at 195 | 196 | http://www.apache.org/licenses/LICENSE-2.0 197 | 198 | Unless required by applicable law or agreed to in writing, software 199 | distributed under the License is distributed on an "AS IS" BASIS, 200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 201 | See the License for the specific language governing permissions and 202 | limitations under the License. 203 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2012 Google Inc. All Rights Reserved. 2 | 3 | Licensed under the Apache License, Version 2.0 (the "License"); 4 | you may not use this file except in compliance with the License. 5 | You may obtain a copy of the License at 6 | 7 | http://www.apache.org/licenses/LICENSE-2.0 8 | 9 | Unless required by applicable law or agreed to in writing, software 10 | distributed under the License is distributed on an "AS IS" BASIS, 11 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | See the License for the specific language governing permissions and 13 | limitations under the License. 14 | 15 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include AUTHORS COPYING LICENSE README.md 2 | 3 | exclude MANIFEST.in 4 | global-exclude *_test.py 5 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | Anvil - a modern build system 2 | ----------------------------- 3 | 4 | [![Build Status](https://secure.travis-ci.org/google/anvil-build.png)](http://travis-ci.org/google/anvil-build) 5 | 6 | **NOTE**: this project has been deprecated. [Bazel](https://bazel.build/) is an open source build system that shares roots with the design of anvil, and is a superset of the functionality available here. 7 | 8 | Anvil is a build system designed to ease the construction of content pipelines, taking many concepts and the rule file syntax that powers Google's internal build system and making them accessible in a small, open-source Python library. It features a rich and extensible build file format and many built-in rules to get started. 9 | 10 | Modern web apps and games have shifted to be more content than code and older build systems (make/scons/etc) are ill-suited for this shift. Most developers now roll their own shell scripts and hack together tools, but as projects scale in both size and complexity they fall apart. Limiting the engineering robustness of many large games is now this lack of solid content pipeline, not language or browser features. Anvil is designed to help fill this gap and let developers build polished, efficient, and cross-browser applications. 11 | 12 | Features 13 | -------- 14 | 15 | * Parallelizable build process 16 | * Eventually distributed 17 | * Rich build files (Python-like) 18 | * Tiny environment, very few assumptions or dependencies 19 | * Build files are generally WYSIWYG, with no hidden state or behavior 20 | * Continuous build server and hosting mode 21 | * Easy to build live-refresh pages and content 22 | * Dependency management for rule types 23 | * Make it simple to checkout and build projects that depend on custom tools or packages 24 | * Extensible rule definitions 25 | * Simple Python to add custom data formats or actions 26 | 27 | JavaScript Bootstrap 28 | -------------------- 29 | 30 | Want to use anvil in a new JavaScript project? This is the easiest way: 31 | 32 | wget https://raw.github.com/gist/3814397/anvil-bootstrap.sh 33 | chmod +x anvil-bootstrap.sh 34 | ./anvil-bootstrap.sh my-project mp "Your Name" 35 | # this will create my-project/, git init, with 'mp' as the namespace for things 36 | cd my-project/ 37 | anvil build :release 38 | 39 | Getting Started 40 | --------------- 41 | 42 | # Clone (or add as a submodule) and setup a local install 43 | git clone https://benvanik@github.com/benvanik/anvil-build.git 44 | cd anvil-build/ 45 | python setup.py develop 46 | 47 | # 'anvil' is the main app, use it to build, test, or serve your content 48 | anvil build project:some_output 49 | 50 | Anvil is available via PyPI as '[anvil-build](http://pypi.python.org/pypi/anvil-build)' and can be installed via easy_install or pip, however it's recommeneded that it's used as a submodule instead. 51 | 52 | # Install the master git dev branch 53 | pip install anvil-build 54 | 55 | Note that bash completion should be enable, but if not use `sudo anvil completion --install --bash` to install it. You can complete on options, module files, and if you add a `:` on rules. 56 | 57 | Build Files 58 | ----------- 59 | 60 | TODO: detailed overview 61 | 62 | The base unit in the build system is a rule. Rules describe some action that is performed on input files and produce output files, and they may reference other rules as inputs. Modules are files that contain many rules, and a project may be made up of many modules. When using the 'anvil' command line tool one specifies a rule or list of rules to build as targets and the build system takes care of building all of the required rules. 63 | 64 | The naming syntax for rules is `/some/path:rule_name`, with the colon splitting module file paths from the rule names contained within. The module files should always be called `BUILD`, which is a special name that the build system treats as the module for the parent directory. This enables one to omit `BUILD` when referencing rules, auto-expanding `/some/path:rule_name` to `/some/path/BUILD:rule_name`. A shorthand is allowed as `:rule_name` (omitting the path), enabling easy access to rules defined in the same file or, when dealing with rules from the command line, in the BUILD file in the current working directory. 65 | 66 | For example, here's two simple files: 67 | 68 | # in /some/path/BUILD: 69 | # All txt files under the current path, plus the outputs of foo:rule2 70 | file_set(name='rule1', srcs=glob('**/*.txt') + ['foo:rule2']) 71 | 72 | # in /some/path/foo/BUILD: 73 | file_set(name='rule2', srcs=['some_file.js']) 74 | 75 | From the command line when referencing these files: 76 | 77 | # if cwd = /some/path, all of these are equivalent: 78 | anvil build :rule1 79 | anvil build /some/path:rule1 80 | anvil build /some/path/BUILD:rule1 81 | 82 | TODO: dumping the build graph 83 | 84 | Rules 85 | ----- 86 | 87 | All rules have a few shared parameters, and most use them exclusively to do their work: 88 | 89 | * `name`: The name of the rule when referenced. Some rules will use this as the base name of their output file. 90 | * `srcs`: A list of source files the rule works on. May reference files, globs, or other rules. 91 | * `deps`: A list of rules that must execute before the rule does, but the files are not used. 92 | 93 | TODO: talk about base rules (`file_set`, `copy_files`, `concat_files`, `template_files`) 94 | 95 | Commands 96 | -------- 97 | 98 | TODO: talk about built-in commands (`build`, `test`, `clean`, `depends`, `deploy`, `serve`) 99 | 100 | Custom Rules 101 | ------------ 102 | 103 | TODO: custom rules/.anvilrc files 104 | -------------------------------------------------------------------------------- /TODO.md: -------------------------------------------------------------------------------- 1 | foo#*.ext for src_filter on input side 2 | removes extra intermediate rules (*.css only from set) 3 | 4 | Caching 5 | ============================================================== 6 | 7 | copy_files/etc need to preserve times 8 | 9 | Stages 10 | ============================================================== 11 | 12 | Commands emit a list of stages. Each stage is executed in order and only if the previous stage was skipped/succeeded. 13 | 14 | StageManager: 15 | log_sinks 16 | log 17 | stages 18 | execute() 19 | 20 | Stage: 21 | name 22 | log 23 | execute() 24 | 25 | Simple commands can setup a single StageManager with stages and execute. The daemon would setup a new StageManager each time it goes to perform its work. 26 | 27 | StageManagers setup LogSinks as needed (ConsoleLogSink, RemoteLogSink, FileLogSink) and wire up all the loggers. 28 | 29 | Stages are logical subcommands, such as 'build', 'test', 'deploy', 'clean', etc. A 'rebuild' command may consist of 'clean' and 'build', where a 'test' command would have 'build' and 'test'. 30 | 31 | Logging 32 | ============================================================== 33 | 34 | LogSource 35 | -------------------- 36 | 37 | Builds a line list of output from a single rule. Pickleable to allow for passing across processes. 38 | 39 | * Name 40 | * Parent / children 41 | * Sinks 42 | * Default verbosity level (inherit/+/-) 43 | * Methods for debug/info/warn/error 44 | * Status: waiting|running|succeeded|failed|skipped 45 | * Exception 46 | * start_time / end_time 47 | * work_unit / work_unit_count for progress tracking 48 | 49 | LogSink 50 | ----------- 51 | 52 | Receives change notifications for all logger objects. For every field updated on a LogSource, the LogSink will get notified that the change occurred. 53 | 54 | * source_open(source) 55 | * source_set_status(source, value) 56 | * source_set_exeception(source, ex) 57 | * source_append_line(source, line) 58 | * source_set_time(source, start_time, end_time) 59 | * source_set_work_unit(source, work_unit, work_unit_count) 60 | * source_close(source) 61 | 62 | Example sinks: 63 | * ConsoleLogSink: log to an interactive console 64 | * FileLogSink: log simple output to a file/pipe 65 | * RemoteLogSink: post to a log server 66 | 67 | Flow 68 | ---- 69 | 70 | One ScopedLogger for the entire command, one for each stage (build/test/etc), and one for each rule. 71 | 72 | Log Server 73 | ============================================================== 74 | 75 | Starts a streaming log server that can be viewed in the browser to watch build status/test reports, as well as providing an API for build sessions to post data with. 76 | 77 | anvil log_server --http_port=8000 78 | 79 | * / : index 80 | * Report history 81 | * Live-updating 'in-progress' list 82 | * Basic machine stats (CPU %, etc) 83 | * GET /report/N/ : report view 84 | * Info: build config/command line/etc 85 | * Timing information for whole report 86 | * Graph w/ timing for each node 87 | * State (success, fail, running, skipped) 88 | * Time elapsed 89 | * Click to show output 90 | * Console log (all output) 91 | * Test results 92 | * Take output from Buster/etc? 93 | * POST /report/ : create report 94 | * POST /report/N/ : update report 95 | 96 | POST blobs 97 | ---------- 98 | 99 | Creation: 100 | ''' 101 | { 102 | 'host': { 103 | 'name': 'some-machine', 104 | 'platform': 'windows', 105 | 'processors': 9, 106 | ... 107 | }, 108 | 'working_dir': '/some/path/', 109 | 'command_line': 'anvil build --foo ...', 110 | 'command': 'build', 111 | 'stages': ['build', 'test', 'deploy'], 112 | 'configuration': '...', 113 | 'targets': [':c'], 114 | 'graph': { 115 | 'nodes': [ 116 | { 117 | 'name': ':a', 118 | 'path': '/some/path:a', 119 | ... 120 | } 121 | ], 122 | 'edges': [[':a', ':b'], [':b', ':c']] 123 | }, 124 | } 125 | ''' 126 | 127 | Update: 128 | ''' 129 | scoped logger blob: 130 | // any fields can be omitted to not update that field 131 | // output is always appended if present 132 | 'status': 'waiting|running|succeeded|failed|skipped', 133 | 'start_time': 0, 134 | 'end_time': 0, 135 | 'exception': undefined, 136 | 'output': 'new output', 137 | 'work_unit_count': 100, 138 | 'work_unit': 28, 139 | 140 | { 141 | {scoped logger blob}, 142 | 143 | 'children': { 144 | 'build': { 145 | {scoped logger blob}, 146 | 'children': { 147 | '/some/path:a': { 148 | {scoped logger blob}, 149 | 'src_paths': ['a', 'b'], 150 | 'all_output_files': ['ax', 'bx'] 151 | } 152 | } 153 | }, 154 | 'test': { 155 | {scoped logger blob}, 156 | // something 157 | } 158 | } 159 | } 160 | ''' 161 | 162 | 163 | Serving/Daemon 164 | ============================================================== 165 | 166 | 167 | anvilrc 168 | ============================================================== 169 | 170 | Universal arg: '--anvil_config=...' 171 | If not specified, search up cwd path until .anvilrc or .git/ found 172 | 173 | Format 174 | ------ 175 | 176 | [core] 177 | jobs=2 178 | [commands] 179 | search_paths= 180 | some/path/ 181 | [rules] 182 | search_paths= 183 | some/path/ 184 | [serving] 185 | http_port=8080 186 | daemon_port=8081 187 | [logging] 188 | http_port=8000 189 | ... 190 | -------------------------------------------------------------------------------- /anvil-local.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | SET DIR=%~dp0 4 | python %DIR%\anvil\manage.py %* 5 | -------------------------------------------------------------------------------- /anvil-local.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Copyright 2012 Google Inc. All Rights Reserved. 4 | 5 | # Runs the local virtualenv copy of anvil instead of the global one. 6 | # Requires that things be setup with ./setup-local.sh (which this will attempt 7 | # to invoke if it notices things not quite right). 8 | 9 | 10 | DIR="$( cd "$( dirname "$0" )" && pwd )" 11 | 12 | 13 | # Check to see if setup. 14 | if [ ! -d "$DIR/local_virtualenv" ]; then 15 | echo "Missing local virtualenv - setting up..." 16 | $DIR/setup-local.sh 17 | fi 18 | 19 | 20 | source $DIR/local_virtualenv/bin/activate 21 | python $DIR/anvil/manage.py "$@" 22 | -------------------------------------------------------------------------------- /anvil/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2012 Google Inc. All Rights Reserved. 2 | 3 | """ 4 | """ 5 | 6 | __author__ = 'benvanik@google.com (Ben Vanik)' 7 | -------------------------------------------------------------------------------- /anvil/async.py: -------------------------------------------------------------------------------- 1 | # Copyright 2012 Google Inc. All Rights Reserved. 2 | 3 | __author__ = 'benvanik@google.com (Ben Vanik)' 4 | 5 | 6 | class Deferred(object): 7 | """A simple deferred object, designed for single-threaded tracking of futures. 8 | """ 9 | 10 | def __init__(self): 11 | """Initializes a deferred.""" 12 | self._callbacks = [] 13 | self._errbacks = [] 14 | self._is_done = False 15 | self._failed = False 16 | self._exception = None 17 | self._args = None 18 | self._kwargs = None 19 | 20 | def is_done(self): 21 | """Whether the deferred has completed (either succeeded or failed). 22 | 23 | Returns: 24 | True if the deferred has completed. 25 | """ 26 | return self._is_done 27 | 28 | def add_callback_fn(self, fn): 29 | """Adds a function that will be called when the deferred completes 30 | successfully. 31 | 32 | The arguments passed to the function will be those arguments passed to 33 | callback. If multiple callbacks are registered they will all be called with 34 | the same arguments, so don't modify them. 35 | 36 | If the deferred has already completed when this function is called then the 37 | callback will be made immediately. 38 | 39 | Args: 40 | fn: Function to call back. 41 | """ 42 | if self._is_done: 43 | if not self._failed: 44 | fn(*self._args, **self._kwargs) 45 | return 46 | self._callbacks.append(fn) 47 | 48 | def add_errback_fn(self, fn): 49 | """Adds a function that will be called when the deferred completes with 50 | an error. 51 | 52 | The arguments passed to the function will be those arguments passed to 53 | errback. If multiple callbacks are registered they will all be called with 54 | the same arguments, so don't modify them. 55 | 56 | If the deferred has already completed when this function is called then the 57 | callback will be made immediately. 58 | 59 | Args: 60 | fn: Function to call back. 61 | """ 62 | if self._is_done: 63 | if self._failed: 64 | fn(*self._args, **self._kwargs) 65 | return 66 | self._errbacks.append(fn) 67 | 68 | def callback(self, *args, **kwargs): 69 | """Completes a deferred successfully and calls any registered callbacks.""" 70 | assert not self._is_done 71 | self._is_done = True 72 | self._args = args 73 | self._kwargs = kwargs 74 | callbacks = self._callbacks 75 | self._callbacks = [] 76 | self._errbacks = [] 77 | for fn in callbacks: 78 | fn(*args, **kwargs) 79 | 80 | def errback(self, *args, **kwargs): 81 | """Completes a deferred with an error and calls any registered errbacks.""" 82 | assert not self._is_done 83 | self._is_done = True 84 | self._failed = True 85 | if len(args) and isinstance(args[0], Exception): 86 | self._exception = args[0] 87 | self._args = args 88 | self._kwargs = kwargs 89 | errbacks = self._errbacks 90 | self._callbacks = [] 91 | self._errbacks = [] 92 | for fn in errbacks: 93 | fn(*args, **kwargs) 94 | 95 | 96 | def gather_deferreds(deferreds, errback_if_any_fail=False): 97 | """Waits until all of the given deferreds callback. 98 | Once all have completed this deferred will issue a callback 99 | with a list corresponding to each waiter, with a (success, args, kwargs) 100 | tuple for each deferred. 101 | 102 | The deferred returned by this will only ever issue callbacks, never errbacks. 103 | 104 | Args: 105 | deferreds: A list of deferreds to wait on. 106 | errback_if_any_fail: True to use errback instead of callback if at least one 107 | of the input deferreds fails. 108 | 109 | Returns: 110 | A deferred that is called back with a list of tuples corresponding to each 111 | input deferred. The tuples are of (success, args, kwargs) with success 112 | being a boolean True if the deferred used callback and False if it used 113 | errback. 114 | """ 115 | if isinstance(deferreds, Deferred): 116 | deferreds = [deferreds] 117 | gather_deferred = Deferred() 118 | deferred_len = len(deferreds) 119 | if not deferred_len: 120 | gather_deferred.callback([]) 121 | return gather_deferred 122 | 123 | pending = [deferred_len] 124 | result_tuples = deferred_len * [None] 125 | def _complete(): 126 | pending[0] -= 1 127 | if not pending[0]: 128 | if not errback_if_any_fail: 129 | gather_deferred.callback(result_tuples) 130 | else: 131 | any_failed = False 132 | for result in result_tuples: 133 | if not result[0]: 134 | any_failed = True 135 | break 136 | if any_failed: 137 | gather_deferred.errback(result_tuples) 138 | else: 139 | gather_deferred.callback(result_tuples) 140 | 141 | def _makecapture(n, deferred): 142 | def _callback(*args, **kwargs): 143 | result_tuples[n] = (True, args, kwargs) 144 | _complete() 145 | def _errback(*args, **kwargs): 146 | result_tuples[n] = (False, args, kwargs) 147 | _complete() 148 | deferred.add_callback_fn(_callback) 149 | deferred.add_errback_fn(_errback) 150 | 151 | for n in xrange(deferred_len): 152 | _makecapture(n, deferreds[n]) 153 | 154 | return gather_deferred 155 | -------------------------------------------------------------------------------- /anvil/async_test.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | # Copyright 2012 Google Inc. All Rights Reserved. 4 | 5 | """Tests for the async module. 6 | """ 7 | 8 | __author__ = 'benvanik@google.com (Ben Vanik)' 9 | 10 | 11 | import unittest2 12 | 13 | from anvil.async import Deferred, gather_deferreds 14 | from anvil.test import AsyncTestCase 15 | 16 | 17 | class DeferredTest(unittest2.TestCase): 18 | """Behavioral tests of the Deferred type.""" 19 | 20 | def testMultiCall(self): 21 | d = Deferred() 22 | d.callback() 23 | with self.assertRaises(AssertionError): 24 | d.callback() 25 | d = Deferred() 26 | d.errback() 27 | with self.assertRaises(AssertionError): 28 | d.errback() 29 | d = Deferred() 30 | d.callback() 31 | with self.assertRaises(AssertionError): 32 | d.errback() 33 | d = Deferred() 34 | d.errback() 35 | with self.assertRaises(AssertionError): 36 | d.callback() 37 | 38 | def testCallbackArgs(self): 39 | cb = {} 40 | def cb_thunk(*args, **kwargs): 41 | cb['done'] = True 42 | cb['args'] = args 43 | cb['kwargs'] = kwargs 44 | 45 | d = Deferred() 46 | self.assertFalse(d.is_done()) 47 | d.callback() 48 | self.assertTrue(d.is_done()) 49 | 50 | d = Deferred() 51 | self.assertFalse(d.is_done()) 52 | d.errback() 53 | self.assertTrue(d.is_done()) 54 | 55 | d = Deferred() 56 | d.add_callback_fn(cb_thunk) 57 | d.callback() 58 | self.assertNotEqual(len(cb), 0) 59 | self.assertTrue(cb['done']) 60 | self.assertEqual(len(cb['args']), 0) 61 | self.assertEqual(len(cb['kwargs']), 0) 62 | cb.clear() 63 | 64 | d = Deferred() 65 | d.add_callback_fn(cb_thunk) 66 | d.callback('a', 'b') 67 | self.assertNotEqual(len(cb), 0) 68 | self.assertTrue(cb['done']) 69 | self.assertEqual(len(cb['args']), 2) 70 | self.assertEqual(cb['args'][0], 'a') 71 | self.assertEqual(cb['args'][1], 'b') 72 | self.assertEqual(len(cb['kwargs']), 0) 73 | cb.clear() 74 | 75 | d = Deferred() 76 | d.add_callback_fn(cb_thunk) 77 | d.callback('a', b='b') 78 | self.assertNotEqual(len(cb), 0) 79 | self.assertTrue(cb['done']) 80 | self.assertEqual(len(cb['args']), 1) 81 | self.assertEqual(cb['args'][0], 'a') 82 | self.assertEqual(len(cb['kwargs']), 1) 83 | self.assertEqual(cb['kwargs']['b'], 'b') 84 | cb.clear() 85 | 86 | def testCallbackOrder(self): 87 | cb = {} 88 | def cb_thunk(*args, **kwargs): 89 | cb['done'] = True 90 | cb['args'] = args 91 | cb['kwargs'] = kwargs 92 | 93 | d = Deferred() 94 | d.add_callback_fn(cb_thunk) 95 | d.callback('a') 96 | self.assertNotEqual(len(cb), 0) 97 | self.assertTrue(cb['done']) 98 | self.assertEqual(len(cb['args']), 1) 99 | self.assertEqual(cb['args'][0], 'a') 100 | self.assertEqual(len(cb['kwargs']), 0) 101 | cb.clear() 102 | 103 | d = Deferred() 104 | d.callback('a') 105 | d.add_callback_fn(cb_thunk) 106 | self.assertNotEqual(len(cb), 0) 107 | self.assertTrue(cb['done']) 108 | self.assertEqual(len(cb['args']), 1) 109 | self.assertEqual(cb['args'][0], 'a') 110 | self.assertEqual(len(cb['kwargs']), 0) 111 | cb.clear() 112 | 113 | d = Deferred() 114 | d.add_errback_fn(cb_thunk) 115 | d.errback('a') 116 | self.assertNotEqual(len(cb), 0) 117 | self.assertTrue(cb['done']) 118 | self.assertEqual(len(cb['args']), 1) 119 | self.assertEqual(cb['args'][0], 'a') 120 | self.assertEqual(len(cb['kwargs']), 0) 121 | cb.clear() 122 | 123 | d = Deferred() 124 | d.errback('a') 125 | d.add_errback_fn(cb_thunk) 126 | self.assertNotEqual(len(cb), 0) 127 | self.assertTrue(cb['done']) 128 | self.assertEqual(len(cb['args']), 1) 129 | self.assertEqual(cb['args'][0], 'a') 130 | self.assertEqual(len(cb['kwargs']), 0) 131 | cb.clear() 132 | 133 | d = Deferred() 134 | d.add_callback_fn(cb_thunk) 135 | d.errback('a') 136 | self.assertEqual(len(cb), 0) 137 | cb.clear() 138 | 139 | d = Deferred() 140 | d.errback('a') 141 | d.add_callback_fn(cb_thunk) 142 | self.assertEqual(len(cb), 0) 143 | cb.clear() 144 | 145 | d = Deferred() 146 | d.add_errback_fn(cb_thunk) 147 | d.callback('a') 148 | self.assertEqual(len(cb), 0) 149 | cb.clear() 150 | 151 | d = Deferred() 152 | d.callback('a') 153 | d.add_errback_fn(cb_thunk) 154 | self.assertEqual(len(cb), 0) 155 | cb.clear() 156 | 157 | def testMultiCallbacks(self): 158 | cbs = [] 159 | def cb_multi_thunk(*args, **kwargs): 160 | cbs.append({ 161 | 'done': True, 162 | 'args': args, 163 | 'kwargs': kwargs 164 | }) 165 | 166 | d = Deferred() 167 | d.add_callback_fn(cb_multi_thunk) 168 | d.callback('a') 169 | self.assertEqual(len(cbs), 1) 170 | self.assertNotEqual(len(cbs[0]), 0) 171 | self.assertEqual(cbs[0]['args'][0], 'a') 172 | cbs[:] = [] 173 | 174 | d = Deferred() 175 | d.add_callback_fn(cb_multi_thunk) 176 | d.add_callback_fn(cb_multi_thunk) 177 | d.callback('a') 178 | self.assertEqual(len(cbs), 2) 179 | self.assertNotEqual(len(cbs[0]), 0) 180 | self.assertNotEqual(len(cbs[1]), 0) 181 | self.assertEqual(cbs[0]['args'][0], 'a') 182 | self.assertEqual(cbs[1]['args'][0], 'a') 183 | cbs[:] = [] 184 | 185 | d = Deferred() 186 | d.add_callback_fn(cb_multi_thunk) 187 | d.callback('a') 188 | d.add_callback_fn(cb_multi_thunk) 189 | self.assertEqual(len(cbs), 2) 190 | self.assertNotEqual(len(cbs[0]), 0) 191 | self.assertNotEqual(len(cbs[1]), 0) 192 | self.assertEqual(cbs[0]['args'][0], 'a') 193 | self.assertEqual(cbs[1]['args'][0], 'a') 194 | cbs[:] = [] 195 | 196 | d = Deferred() 197 | d.callback('a') 198 | d.add_callback_fn(cb_multi_thunk) 199 | d.add_callback_fn(cb_multi_thunk) 200 | self.assertEqual(len(cbs), 2) 201 | self.assertNotEqual(len(cbs[0]), 0) 202 | self.assertNotEqual(len(cbs[1]), 0) 203 | self.assertEqual(cbs[0]['args'][0], 'a') 204 | self.assertEqual(cbs[1]['args'][0], 'a') 205 | cbs[:] = [] 206 | 207 | 208 | class GatherTest(AsyncTestCase): 209 | """Behavioral tests for the async gather function.""" 210 | 211 | def testGather(self): 212 | d = gather_deferreds([]) 213 | self.assertCallbackEqual(d, []) 214 | 215 | da = Deferred() 216 | db = Deferred() 217 | dc = Deferred() 218 | df = Deferred() 219 | d = gather_deferreds([da, db, dc, df]) 220 | df.errback() 221 | dc.callback('c') 222 | db.callback('b') 223 | da.callback('a') 224 | self.assertCallbackEqual(d, [ 225 | (True, ('a',), {}), 226 | (True, ('b',), {}), 227 | (True, ('c',), {}), 228 | (False, (), {})]) 229 | 230 | da = Deferred() 231 | db = Deferred() 232 | dc = Deferred() 233 | df = Deferred() 234 | df.errback('f') 235 | dc.callback('c') 236 | d = gather_deferreds([da, db, dc, df]) 237 | db.callback('b') 238 | da.callback('a') 239 | self.assertCallbackEqual(d, [ 240 | (True, ('a',), {}), 241 | (True, ('b',), {}), 242 | (True, ('c',), {}), 243 | (False, ('f',), {})]) 244 | 245 | def testErrback(self): 246 | d = gather_deferreds([], errback_if_any_fail=True) 247 | self.assertCallbackEqual(d, []) 248 | 249 | da = Deferred() 250 | db = Deferred() 251 | dc = Deferred() 252 | d = gather_deferreds([da, db, dc], errback_if_any_fail=True) 253 | dc.callback('c') 254 | db.callback('b') 255 | da.callback('a') 256 | self.assertCallbackEqual(d, [ 257 | (True, ('a',), {}), 258 | (True, ('b',), {}), 259 | (True, ('c',), {})]) 260 | 261 | da = Deferred() 262 | db = Deferred() 263 | dc = Deferred() 264 | df = Deferred() 265 | d = gather_deferreds([da, db, dc, df], errback_if_any_fail=True) 266 | df.errback() 267 | dc.callback('c') 268 | db.callback('b') 269 | da.callback('a') 270 | self.assertErrbackEqual(d, [ 271 | (True, ('a',), {}), 272 | (True, ('b',), {}), 273 | (True, ('c',), {}), 274 | (False, (), {})]) 275 | 276 | da = Deferred() 277 | db = Deferred() 278 | dc = Deferred() 279 | df = Deferred() 280 | df.errback('f') 281 | dc.callback('c') 282 | d = gather_deferreds([da, db, dc, df], errback_if_any_fail=True) 283 | db.callback('b') 284 | da.callback('a') 285 | self.assertErrbackEqual(d, [ 286 | (True, ('a',), {}), 287 | (True, ('b',), {}), 288 | (True, ('c',), {}), 289 | (False, ('f',), {})]) 290 | 291 | 292 | if __name__ == '__main__': 293 | unittest2.main() 294 | -------------------------------------------------------------------------------- /anvil/cache.py: -------------------------------------------------------------------------------- 1 | # Copyright 2012 Google Inc. All Rights Reserved. 2 | 3 | """Rule cache. 4 | """ 5 | 6 | __author__ = 'benvanik@google.com (Ben Vanik)' 7 | 8 | 9 | import base64 10 | import cPickle 11 | import os 12 | 13 | 14 | class RuleCache(object): 15 | """Abstract rule cache. 16 | """ 17 | 18 | def __init__(self, *args, **kwargs): 19 | """Initializes the rule cache. 20 | """ 21 | pass 22 | 23 | def save(self): 24 | """Saves the cache off to disk. 25 | """ 26 | pass 27 | 28 | def compute_delta(self, rule_path, mode, src_paths): 29 | """Computes a file delta for the given source paths. 30 | 31 | Args: 32 | rule_path: Full path to the rule. 33 | mode: Mode indicating which type of set to use. 34 | src_paths: A list of fully-resolved source file paths. 35 | 36 | Returns: 37 | A FileDelta with the information from the given paths. 38 | """ 39 | file_delta = FileDelta() 40 | file_delta.all_files.extend(src_paths) 41 | file_delta.added_files.extend(src_paths) 42 | file_delta.changed_files.extend(src_paths) 43 | return file_delta 44 | 45 | 46 | class FileRuleCache(RuleCache): 47 | """File-based rule cache. 48 | """ 49 | 50 | def __init__(self, cache_path, *args, **kwargs): 51 | """Initializes the rule cache. 52 | 53 | Args: 54 | cache_path: Path to store the cache file in. 55 | """ 56 | super(FileRuleCache, self).__init__(self, *args, **kwargs) 57 | self.cache_path = os.path.join(cache_path, '.anvil-cache') 58 | self.data = dict() 59 | self._dirty = False 60 | 61 | if os.path.exists(self.cache_path): 62 | with open(self.cache_path, 'rb') as file_obj: 63 | self.data.update(cPickle.load(file_obj)) 64 | 65 | def save(self): 66 | if not self._dirty: 67 | return 68 | self._dirty = False 69 | try: 70 | os.makedirs(os.path.split(self.cache_path)[0]) 71 | except: 72 | pass 73 | with open(self.cache_path, 'wb') as file_obj: 74 | cPickle.dump(self.data, file_obj, 2) 75 | 76 | def compute_delta(self, rule_path, mode, src_paths): 77 | file_delta = FileDelta() 78 | file_delta.all_files.extend(src_paths) 79 | 80 | # Scan all files - we need this to compare regardless of whether we have 81 | # data from the cache 82 | # TODO(benvanik): make this parallel 83 | new_data = dict() 84 | for src_path in src_paths: 85 | if os.path.exists(src_path): 86 | file_time = os.path.getmtime(src_path) 87 | file_size = os.path.getsize(src_path) 88 | new_data[src_path] = '%s-%s' % (file_time, file_size) 89 | 90 | # Always swap for new data 91 | key = base64.b64encode('%s->%s' % (rule_path, mode)) 92 | old_data = self.data.get(key, None) 93 | self.data[key] = new_data 94 | 95 | # No previous data 96 | if old_data is None: 97 | if len(src_paths): 98 | self._dirty = True 99 | file_delta.changed_files.extend(src_paths) 100 | return file_delta 101 | 102 | # Find added/modified files 103 | for (new_path, new_key) in new_data.items(): 104 | old_key = old_data.get(new_path, None) 105 | if old_key: 106 | # File exists in both old/new, compare keys to see if modified 107 | if old_key != new_key: 108 | file_delta.modified_files.append(new_path) 109 | else: 110 | # File exists in new but not old, added 111 | file_delta.added_files.append(new_path) 112 | 113 | # Find removed files 114 | for old_path in old_data.keys(): 115 | if not old_path in new_data: 116 | file_delta.removed_files.append(old_path) 117 | 118 | file_delta.changed_files.extend(file_delta.added_files) 119 | file_delta.changed_files.extend(file_delta.modified_files) 120 | if len(file_delta.changed_files) or len(file_delta.removed_files): 121 | self._dirty = True 122 | return file_delta 123 | 124 | 125 | class FileDelta(object): 126 | """File delta information. 127 | """ 128 | 129 | def __init__(self): 130 | """Initializes a file delta. 131 | """ 132 | self.all_files = [] 133 | self.added_files = [] 134 | self.removed_files = [] 135 | self.modified_files = [] 136 | self.changed_files = [] 137 | 138 | def any_changes(self): 139 | """ 140 | Returns: 141 | True if any changes occurred. 142 | """ 143 | return len(self.changed_files) 144 | -------------------------------------------------------------------------------- /anvil/cache_test.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | # Copyright 2012 Google Inc. All Rights Reserved. 4 | 5 | """Tests for the cache module. 6 | """ 7 | 8 | __author__ = 'benvanik@google.com (Ben Vanik)' 9 | 10 | 11 | import os 12 | import unittest2 13 | 14 | import anvil.cache 15 | from anvil.test import FixtureTestCase 16 | 17 | 18 | class CacheTest(FixtureTestCase): 19 | """Behavioral tests for caching.""" 20 | fixture = 'cache' 21 | 22 | def test(self): 23 | rule_cache = anvil.cache.FileRuleCache(self.root_path) 24 | rule_cache.save() 25 | 26 | 27 | if __name__ == '__main__': 28 | unittest2.main() 29 | -------------------------------------------------------------------------------- /anvil/commands/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2012 Google Inc. All Rights Reserved. 2 | 3 | """ 4 | """ 5 | 6 | __author__ = 'benvanik@google.com (Ben Vanik)' 7 | -------------------------------------------------------------------------------- /anvil/commands/build_command.py: -------------------------------------------------------------------------------- 1 | # Copyright 2012 Google Inc. All Rights Reserved. 2 | 3 | """Builds a set of target rules. 4 | 5 | Examples: 6 | # Build the given rules 7 | anvil build :some_rule some/path:another_rule 8 | # Force a full rebuild (essentially a 'anvil clean') 9 | anvil build --rebuild :some_rule 10 | """ 11 | 12 | __author__ = 'benvanik@google.com (Ben Vanik)' 13 | 14 | 15 | import anvil.commands.util as commandutil 16 | from anvil.manage import ManageCommand 17 | 18 | 19 | class BuildCommand(ManageCommand): 20 | def __init__(self): 21 | super(BuildCommand, self).__init__( 22 | name='build', 23 | help_short='Builds target rules.', 24 | help_long=__doc__) 25 | self._add_common_build_hints() 26 | self.completion_hints.extend([ 27 | '--rebuild', 28 | ]) 29 | 30 | def create_argument_parser(self): 31 | parser = super(BuildCommand, self).create_argument_parser() 32 | 33 | # Add all common args 34 | self._add_common_build_arguments(parser, targets=True) 35 | 36 | # 'build' specific 37 | parser.add_argument('--rebuild', 38 | dest='rebuild', 39 | action='store_true', 40 | default=False, 41 | help=('Cleans all output and caches before building.')) 42 | 43 | return parser 44 | 45 | def execute(self, args, cwd): 46 | # Handle --rebuild 47 | if args.rebuild: 48 | if not commandutil.clean_output(cwd): 49 | return False 50 | 51 | (result, all_target_outputs) = commandutil.run_build(cwd, args) 52 | 53 | print 'result %s, %s outputs' % (result, len(all_target_outputs)) 54 | #print all_target_outputs 55 | 56 | return 0 if result else 1 57 | -------------------------------------------------------------------------------- /anvil/commands/clean_command.py: -------------------------------------------------------------------------------- 1 | # Copyright 2012 Google Inc. All Rights Reserved. 2 | 3 | """Cleans all build-* paths and caches. 4 | Attempts to delete all paths the build system creates. 5 | """ 6 | 7 | __author__ = 'benvanik@google.com (Ben Vanik)' 8 | 9 | 10 | import os 11 | import shutil 12 | import sys 13 | 14 | import anvil.commands.util as commandutil 15 | from anvil.manage import ManageCommand 16 | 17 | 18 | class CleanCommand(ManageCommand): 19 | def __init__(self): 20 | super(CleanCommand, self).__init__( 21 | name='clean', 22 | help_short='Cleans outputs and caches.', 23 | help_long=__doc__) 24 | 25 | def create_argument_parser(self): 26 | parser = super(CleanCommand, self).create_argument_parser() 27 | return parser 28 | 29 | def execute(self, args, cwd): 30 | result = commandutil.clean_output(cwd) 31 | return 0 if result else 1 32 | -------------------------------------------------------------------------------- /anvil/commands/completion_command.py: -------------------------------------------------------------------------------- 1 | # Copyright 2012 Google Inc. All Rights Reserved. 2 | 3 | """Provides scripts to enable shell completion. 4 | To enable completion at startup, run the following (under bash): 5 | $ anvil completion --bash >> ~/.profile 6 | 7 | If you prefer to only have it in the current session: 8 | $ eval "`anvil completion --bash`" 9 | 10 | If you want it globally installed (to /etc/bash_completion.d): 11 | $ sudo anvil completion --install --bash 12 | """ 13 | 14 | # Portions of this file are inspired by optcomplete and pip 15 | 16 | __author__ = 'benvanik@google.com (Ben Vanik)' 17 | 18 | 19 | import io 20 | import os 21 | import sys 22 | 23 | from anvil.manage import ManageCommand 24 | 25 | 26 | class _ShellCompletion(object): 27 | """Base type for shell completion setup utilities. 28 | """ 29 | 30 | def get_profile_string(self): 31 | """Gets a string that can be placed in a .profile. 32 | 33 | Returns: 34 | A string in the current shell language for setting up completion. 35 | """ 36 | raise NotImplementedError() 37 | 38 | def install(self): 39 | """Installs a global completion script, if possible. 40 | This will generally require root access and should warn appropriately. 41 | 42 | Returns: 43 | True if the install was successful. 44 | """ 45 | raise NotImplementedError() 46 | 47 | 48 | class _BashCompletion(_ShellCompletion): 49 | """Bash shell completion utilities. 50 | """ 51 | 52 | _completion_str = """ 53 | _anvil_completion() 54 | { 55 | COMPREPLY=($(COMP_WORDS="${COMP_WORDS[*]}" COMP_CWORD=$COMP_CWORD \\ 56 | ANVIL_AUTO_COMPLETE=1 $1)) 57 | } 58 | complete -o default -F _anvil_completion anvil 59 | """ 60 | 61 | def get_profile_string(self): 62 | return """ 63 | # > anvil-build completion script%s# < anvil-build completion script 64 | """ % self._completion_str 65 | 66 | def install(self): 67 | file_str = """# Copyright 2012 Google Inc. All Rights Reserved. 68 | 69 | # Bash completion script for anvil-build 70 | %s 71 | """ % (self._completion_str) 72 | try: 73 | with io.open('/etc/bash_completion.d/anvil', 'wb') as f: 74 | f.write(file_str) 75 | print 'Successfully installed to /etc/bash_completion.d/' 76 | print 'Restart your shell or run the following to start completing:' 77 | print '$ source /etc/bash_completion.d/anvil' 78 | return True 79 | except IOError as e: 80 | print e 81 | print 'error: unable to write to /etc/bash_completion.d/' 82 | print ' try running with sudo!' 83 | return False 84 | 85 | 86 | _COMPLETIONS = { 87 | 'bash': _BashCompletion(), 88 | } 89 | 90 | 91 | class CompletionCommand(ManageCommand): 92 | def __init__(self): 93 | super(CompletionCommand, self).__init__( 94 | name='completion', 95 | help_short='Provides scripts to enable shell completion.', 96 | help_long=__doc__) 97 | self.completion_hints.extend([ 98 | '--install', 99 | '--bash', 100 | ]) 101 | 102 | def create_argument_parser(self): 103 | parser = super(CompletionCommand, self).create_argument_parser() 104 | 105 | # 'completion' specific 106 | parser.add_argument('--install', 107 | dest='install', 108 | action='store_true', 109 | default=False, 110 | help=('Install the completion script to the global ' 111 | 'path, such as /etc/bash_completion.d/')) 112 | parser.add_argument('--bash', 113 | dest='shell', 114 | action='store_const', 115 | const='bash', 116 | help=('Generate completion code for bash.')) 117 | # TODO(benvanik): other shells? are there even any others that people use? 118 | 119 | return parser 120 | 121 | def execute(self, args, cwd): 122 | if not args.shell: 123 | self.create_argument_parser().print_help() 124 | print '\nerror: please specify a shell (such as --bash)' 125 | return 1 126 | 127 | if not _COMPLETIONS.has_key(args.shell): 128 | self.create_argument_parser().print_help() 129 | print '\nerror: shell environment "%s" not supported' % (args.shell) 130 | return 1 131 | 132 | completion = _COMPLETIONS[args.shell] 133 | if args.install: 134 | if completion.install(): 135 | return 0 136 | else: 137 | return 1 138 | else: 139 | print completion.get_profile_string() 140 | 141 | return 0 142 | -------------------------------------------------------------------------------- /anvil/commands/depends_command.py: -------------------------------------------------------------------------------- 1 | # Copyright 2012 Google Inc. All Rights Reserved. 2 | 3 | """Scans all reachable rules for dependencies and installs them. 4 | Given a set of target rules, all reachable rules will be scanned for 5 | dependencies that they require to function (such as external Python libraries, 6 | system tools/libraries/etc). 7 | 8 | If the --install option is passed to the command it will attempt to install or 9 | update all of the discovered dependencies. The command must be run as root 10 | (via sudo) in order for this to work. For dependencies that install locally 11 | (such as Node.js modules) they will be placed in the current working directory. 12 | 13 | TODO(benvanik): it'd be nice to support * syntax or some way to say 'everything' 14 | 15 | Example: 16 | # Check dependencies and print results for rule :some_rule 17 | anvil depends :some_rule 18 | # Install/update all dependencies for rule :some_rule 19 | anvil depends --install :some_rule 20 | """ 21 | 22 | __author__ = 'benvanik@google.com (Ben Vanik)' 23 | 24 | 25 | import argparse 26 | import os 27 | import sys 28 | 29 | from anvil.depends import DependencyManager 30 | from anvil.manage import ManageCommand 31 | 32 | 33 | class DependsCommand(ManageCommand): 34 | def __init__(self): 35 | super(DependsCommand, self).__init__( 36 | name='depends', 37 | help_short='Manages external rule type dependencies.', 38 | help_long=__doc__) 39 | self.completion_hints.extend([ 40 | '-i', '--install', 41 | '--stop_on_error', 42 | ]) 43 | 44 | def create_argument_parser(self): 45 | parser = super(DependsCommand, self).create_argument_parser() 46 | 47 | # 'depends' specific 48 | parser.add_argument('-i', '--install', 49 | dest='install', 50 | action='store_true', 51 | default=False, 52 | help=('Install any missing dependencies. Must be run ' 53 | 'as root.')) 54 | parser.add_argument('--stop_on_error', 55 | dest='stop_on_error', 56 | action='store_true', 57 | default=False, 58 | help=('Stop installing when an error is encountered.')) 59 | parser.add_argument('targets', 60 | nargs='+', 61 | metavar='target', 62 | help='Target build rule (such as :a or foo/bar:a)') 63 | 64 | return parser 65 | 66 | def execute(self, args, cwd): 67 | dep_manager = DependencyManager(cwd=cwd) 68 | dependencies = dep_manager.scan_dependencies(args.targets) 69 | 70 | if not len(dependencies): 71 | print 'No requirements found' 72 | return True 73 | 74 | if not args.install: 75 | # TODO(benvanik): prettier output 76 | for dependency in dependencies: 77 | print dependency 78 | return True 79 | 80 | # TODO(benvanik): check if running as root 81 | running_as_root = False 82 | if args.install and not running_as_root: 83 | print 'Not running as root - run again with sudo' 84 | return False 85 | 86 | result = dep_manager.install_all(dependencies) 87 | return 0 if result else 1 88 | -------------------------------------------------------------------------------- /anvil/commands/deploy_command.py: -------------------------------------------------------------------------------- 1 | # Copyright 2012 Google Inc. All Rights Reserved. 2 | 3 | """Runs a build and copies all output results of the specified rules to a path. 4 | All of the output files of the specified rules will be copied to the target 5 | output path. The directory structure will be exactly that of under the 6 | various build-*/ folders but collapsed into one. 7 | 8 | A typical deploy rule will bring together many result srcs, for example 9 | converted audio files or compiled code, for a specific configuration. 10 | One could have many such rules to target different configurations, such as 11 | unoptimized/uncompiled vs. optimized/packed. 12 | 13 | Examples: 14 | # Copy all output files of :release_all to /some/bin/, merging the output 15 | anvil deploy --output=/some/bin/ :release_all 16 | # Clean (aka delete) /some/bin/ before doing the copy 17 | anvil deploy --clean --output=/some/bin/ :release_all 18 | """ 19 | 20 | __author__ = 'benvanik@google.com (Ben Vanik)' 21 | 22 | 23 | import os 24 | import shutil 25 | import sys 26 | 27 | import anvil.commands.util as commandutil 28 | from anvil.manage import ManageCommand 29 | 30 | 31 | class DeployCommand(ManageCommand): 32 | def __init__(self): 33 | super(DeployCommand, self).__init__( 34 | name='deploy', 35 | help_short='Builds and copies output to a target path.', 36 | help_long=__doc__) 37 | self._add_common_build_hints() 38 | self.completion_hints.extend([ 39 | '-o', '--output', 40 | '-c', '--clean', 41 | ]) 42 | 43 | def create_argument_parser(self): 44 | parser = super(DeployCommand, self).create_argument_parser() 45 | 46 | # Add all common args 47 | self._add_common_build_arguments(parser, targets=True) 48 | 49 | # 'deploy' specific 50 | parser.add_argument('-o', '--output', 51 | dest='output', 52 | required=True, 53 | help=('Output path to place all results. Will be ' 54 | 'created if it does not exist.')) 55 | parser.add_argument('-c', '--clean', 56 | dest='clean', 57 | action='store_true', 58 | help=('Whether to remove all output files before ' 59 | 'deploying.')) 60 | 61 | return parser 62 | 63 | def execute(self, args, cwd): 64 | # Build everything first 65 | (result, all_target_outputs) = commandutil.run_build(cwd, args) 66 | if not result: 67 | # Failed - don't copy anything 68 | return False 69 | 70 | # Delete output, if desired 71 | if args.clean: 72 | try: 73 | shutil.rmtree(args.output) 74 | except: 75 | pass 76 | 77 | # Ensure output exists 78 | if not os.path.isdir(args.output): 79 | os.makedirs(args.output) 80 | 81 | # Sort all outputs by path, as it makes things prettier 82 | all_target_outputs = list(all_target_outputs) 83 | all_target_outputs.sort() 84 | 85 | # Copy results 86 | print '' 87 | print 'Copying results to %s:' % (args.output) 88 | for target_output in all_target_outputs: 89 | # Get path relative to root 90 | # This will contain the build-out/ etc 91 | rel_path = os.path.relpath(target_output, cwd) 92 | 93 | # Strip the build-*/ 94 | # TODO(benvanik): a more reliable strip 95 | rel_path_parts = rel_path.split(os.sep) 96 | if rel_path_parts[0].startswith('build-'): 97 | rel_path = os.path.join(*(rel_path_parts[1:])) 98 | 99 | # Make output path 100 | deploy_path = os.path.normpath(os.path.join(args.output, rel_path)) 101 | 102 | # Ensure directory exists 103 | # TODO(benvanik): cache whether we have checked yet to reduce OS cost 104 | deploy_dir = os.path.dirname(deploy_path) 105 | if not os.path.isdir(deploy_dir): 106 | os.makedirs(deploy_dir) 107 | 108 | # Copy! 109 | print '%s -> %s' % (rel_path, deploy_path) 110 | shutil.copy(target_output, deploy_path) 111 | 112 | return 0 if result else 1 113 | -------------------------------------------------------------------------------- /anvil/commands/overlay_command.py: -------------------------------------------------------------------------------- 1 | # Copyright 2012 Google Inc. All Rights Reserved. 2 | 3 | """Runs a build and symlinks all output results of the specified rules to a 4 | path. 5 | All of the output files of the specified rules will be symlinked to the target 6 | output path. The directory structure will be exactly that of under the 7 | various build-*/ folders but collapsed into one. 8 | 9 | A typical overlay rule will bring together many result srcs, for example 10 | converted audio files or compiled code, for a specific configuration. 11 | One could have many such rules to target different configurations, such as 12 | unoptimized/uncompiled vs. optimized/packed. 13 | 14 | Examples: 15 | # Link all output files of :release_all to /some/bin/, merging the output 16 | anvil overlay --output=/some/bin/ :overlay 17 | # Clean (aka delete symlinks) /some/bin/ before doing the linking 18 | anvil overlay --clean --output=/some/bin/ :overlay 19 | """ 20 | 21 | __author__ = 'benvanik@google.com (Ben Vanik)' 22 | 23 | 24 | import os 25 | import shutil 26 | import sys 27 | 28 | import anvil.commands.util as commandutil 29 | from anvil.manage import ManageCommand 30 | 31 | 32 | class OverlayCommand(ManageCommand): 33 | def __init__(self): 34 | super(OverlayCommand, self).__init__( 35 | name='overlay', 36 | help_short='Builds and symlinks output to a target path.', 37 | help_long=__doc__) 38 | self._add_common_build_hints() 39 | self.completion_hints.extend([ 40 | '-o', '--output', 41 | '-c', '--clean', 42 | ]) 43 | 44 | def create_argument_parser(self): 45 | parser = super(OverlayCommand, self).create_argument_parser() 46 | 47 | # Add all common args 48 | self._add_common_build_arguments(parser, targets=True) 49 | 50 | # 'overlay' specific 51 | parser.add_argument('-o', '--output', 52 | dest='output', 53 | required=True, 54 | help=('Output path to place all symlinks. Will be ' 55 | 'created if it does not exist.')) 56 | parser.add_argument('-c', '--clean', 57 | dest='clean', 58 | action='store_true', 59 | help=('Whether to remove all output files before ' 60 | 'deploying.')) 61 | 62 | return parser 63 | 64 | def execute(self, args, cwd): 65 | # Build everything first 66 | (result, all_target_outputs) = commandutil.run_build(cwd, args) 67 | if not result: 68 | # Failed - don't copy anything 69 | return False 70 | 71 | # Delete output, if desired 72 | if args.clean: 73 | try: 74 | shutil.rmtree(args.output) 75 | except: 76 | pass 77 | 78 | # Ensure output exists 79 | if not os.path.isdir(args.output): 80 | os.makedirs(args.output) 81 | 82 | # Sort all outputs by path, as it makes things prettier 83 | all_target_outputs = list(all_target_outputs) 84 | all_target_outputs.sort() 85 | 86 | # Tracks all exists checks on link parent paths 87 | checked_dirs = {} 88 | 89 | # Copy results 90 | print '' 91 | print 'Symlinking results to %s:' % (args.output) 92 | skipped_links = 0 93 | for target_output in all_target_outputs: 94 | # Get path relative to root 95 | # This will contain the build-out/ etc 96 | rel_path = os.path.relpath(target_output, cwd) 97 | 98 | # Strip the build-*/ 99 | # TODO(benvanik): a more reliable strip 100 | rel_path_parts = rel_path.split(os.sep) 101 | if rel_path_parts[0].startswith('build-'): 102 | rel_path = os.path.join(*(rel_path_parts[1:])) 103 | 104 | # Make output path 105 | deploy_path = os.path.normpath(os.path.join(args.output, rel_path)) 106 | 107 | # Ensure directory exists 108 | # Ensure parent of link path exists 109 | deploy_dir = os.path.dirname(deploy_path) 110 | if not checked_dirs.get(deploy_dir, False): 111 | if not os.path.exists(deploy_dir): 112 | os.makedirs(deploy_dir) 113 | checked_dirs[deploy_dir] = True 114 | 115 | # Link! 116 | if not os.path.exists(deploy_path): 117 | print '%s -> %s' % (rel_path, deploy_path) 118 | os.symlink(target_output, deploy_path) 119 | else: 120 | skipped_links += 1 121 | 122 | if skipped_links: 123 | print '(%s skipped)' % (skipped_links) 124 | 125 | return 0 if result else 1 126 | -------------------------------------------------------------------------------- /anvil/commands/serve_command.py: -------------------------------------------------------------------------------- 1 | # Copyright 2012 Google Inc. All Rights Reserved. 2 | 3 | """Launches an HTTP server and optionally a continuous build daemon. 4 | This serves the current working directory over HTTP, similar to Python's 5 | SimpleHTTPServer. 6 | 7 | If a daemon port and any rules are defined then changes to the 8 | specified paths will automatically trigger builds. A WebSocket port is specified 9 | that clients can connect to and get lists of file change sets. 10 | 11 | Daemon rules should be of the form: 12 | file_set('some_daemon', 13 | srcs=['watch_path_1/', 'watch_path_2/'], 14 | deps=[':root_build_target']) 15 | Where the given srcs will be recursively watched for changes to trigger the 16 | rules specified in deps. 17 | 18 | Examples: 19 | # Simple HTTP server 20 | anvil serve 21 | anvil serve --http_port=8080 22 | # HTTP server + build daemon 23 | anvil serve :some_daemon 24 | anvil serve --http_port=8080 --daemon_port=8081 :some_daemon 25 | """ 26 | 27 | __author__ = 'benvanik@google.com (Ben Vanik)' 28 | 29 | 30 | import copy 31 | import os 32 | import sys 33 | 34 | import anvil.commands.util as commandutil 35 | from anvil.manage import ManageCommand 36 | 37 | 38 | class ServeCommand(ManageCommand): 39 | def __init__(self): 40 | super(ServeCommand, self).__init__( 41 | name='serve', 42 | help_short='Continuously builds and serves targets.', 43 | help_long=__doc__) 44 | self._add_common_build_hints() 45 | 46 | def create_argument_parser(self): 47 | parser = super(ServeCommand, self).create_argument_parser() 48 | 49 | # Add all common args 50 | self._add_common_build_arguments( 51 | parser, targets=True, targets_optional=True) 52 | 53 | # 'serve' specific 54 | parser.add_argument('-p', '--http_port', 55 | dest='http_port', 56 | type=int, 57 | default=8080, 58 | help=('TCP port the HTTP server will listen on.')) 59 | 60 | return parser 61 | 62 | def execute(self, args, cwd): 63 | # Initial build 64 | if len(args.targets): 65 | (result, all_target_outputs) = commandutil.run_build(cwd, args) 66 | print all_target_outputs 67 | 68 | self._launch_http_server(args.http_port, cwd) 69 | 70 | return 0 71 | 72 | def _launch_http_server(self, port, root_path): 73 | """Launches a simple static twisted HTTP server. 74 | The server will automatically merge build-* paths in to a unified namespace. 75 | 76 | Args: 77 | port: TCP port to listen on. 78 | root_path: Root path of the HTTP server. 79 | """ 80 | # Twisted has a bug where it doesn't properly initialize mimetypes 81 | # This must be done before importing it 82 | import mimetypes 83 | mimetypes.init() 84 | 85 | from twisted.internet import reactor 86 | from twisted.web.resource import Resource, NoResource 87 | from twisted.web.server import Site 88 | from twisted.web.static import File 89 | 90 | # Special site handler that merges various output and input paths into a 91 | # single unifed file system 92 | class MergedSite(Site): 93 | def getResourceFor(self, request): 94 | # Scan well-known search paths first 95 | search_paths = ['build-out', 'build-gen',] 96 | for search_path in search_paths: 97 | resource = self.resource 98 | prepath = copy.copy(request.prepath) 99 | postpath = copy.copy(request.postpath) 100 | postpath.insert(0, search_path) 101 | while postpath and not resource.isLeaf: 102 | path_element = postpath.pop(0) 103 | prepath.append(path_element) 104 | resource = resource.getChildWithDefault(path_element, request) 105 | if resource and not isinstance(resource, NoResource): 106 | return resource 107 | # Fallback to normal handling 108 | return Site.getResourceFor(self, request) 109 | 110 | print 'Launching HTTP server on port %s...' % (port) 111 | 112 | root = File(root_path) 113 | factory = MergedSite(root) 114 | reactor.listenTCP(port, factory) 115 | reactor.run() 116 | -------------------------------------------------------------------------------- /anvil/commands/test_command.py: -------------------------------------------------------------------------------- 1 | # Copyright 2012 Google Inc. All Rights Reserved. 2 | 3 | """Builds and executes a set of test rules. 4 | TODO: need some custom rules (test_js or something?) that provide parameters 5 | to some test framework (BusterJS?) 6 | 7 | Example: 8 | anvil test :test_rule ... 9 | """ 10 | 11 | __author__ = 'benvanik@google.com (Ben Vanik)' 12 | 13 | 14 | import os 15 | import sys 16 | 17 | import anvil.commands.util as commandutil 18 | from anvil.manage import ManageCommand 19 | 20 | 21 | class TestCommand(ManageCommand): 22 | def __init__(self): 23 | super(TestCommand, self).__init__( 24 | name='test', 25 | help_short='Builds and runs test rules.', 26 | help_long=__doc__) 27 | self._add_common_build_hints() 28 | 29 | def create_argument_parser(self): 30 | parser = super(TestCommand, self).create_argument_parser() 31 | 32 | # Add all common args 33 | self._add_common_build_arguments(parser, targets=True) 34 | 35 | return parser 36 | 37 | def execute(self, args, cwd): 38 | (result, all_target_outputs) = commandutil.run_build(cwd, args) 39 | 40 | print all_target_outputs 41 | 42 | return 0 if result else 1 43 | -------------------------------------------------------------------------------- /anvil/commands/util.py: -------------------------------------------------------------------------------- 1 | # Copyright 2012 Google Inc. All Rights Reserved. 2 | 3 | """Common command utilities. 4 | """ 5 | 6 | __author__ = 'benvanik@google.com (Ben Vanik)' 7 | 8 | 9 | import os 10 | import shutil 11 | import sys 12 | 13 | from anvil.cache import RuleCache, FileRuleCache 14 | from anvil.context import BuildEnvironment, BuildContext 15 | from anvil.project import FileModuleResolver, Project 16 | from anvil.task import InProcessTaskExecutor, MultiProcessTaskExecutor 17 | 18 | 19 | def clean_output(cwd): 20 | """Cleans all build-related output and caches. 21 | 22 | Args: 23 | cwd: Current working directory. 24 | 25 | Returns: 26 | True if the clean succeeded. 27 | """ 28 | nuke_paths = [ 29 | '.build-cache', 30 | 'build-out', 31 | 'build-gen', 32 | 'build-bin', 33 | ] 34 | any_failed = False 35 | for path in nuke_paths: 36 | full_path = os.path.join(cwd, path) 37 | if os.path.isdir(full_path): 38 | try: 39 | shutil.rmtree(full_path) 40 | except Exception as e: 41 | print 'Unable to remove %s: %s' % (full_path, e) 42 | any_failed = True 43 | return not any_failed 44 | 45 | 46 | def run_build(cwd, parsed_args): 47 | """Runs a build with the given arguments. 48 | Assumes that add_common_args and add_common_build_args was called on the 49 | ArgumentParser. 50 | 51 | Args: 52 | cwd: Current working directory. 53 | parsed_args: Argument namespace from an ArgumentParser. 54 | 55 | Returns: 56 | (success, a list of all target output paths) 57 | """ 58 | if not len(parsed_args.targets): 59 | return (True, []) 60 | 61 | build_env = BuildEnvironment(root_path=cwd) 62 | 63 | module_resolver = FileModuleResolver(cwd) 64 | project = Project(module_resolver=module_resolver) 65 | 66 | # -j/--jobs switch to change execution mode 67 | #task_executor = None 68 | if parsed_args.jobs is None: 69 | # Default to -j1 on Windows only. 70 | # TODO(benvanik): figure out why this fails so catastrophically 71 | if (sys.platform == 'cygwin' or 72 | sys.platform == 'win32'): 73 | parsed_args.jobs = 1 74 | if parsed_args.jobs == 1: 75 | task_executor = InProcessTaskExecutor() 76 | else: 77 | task_executor = MultiProcessTaskExecutor(worker_count=parsed_args.jobs) 78 | 79 | # TODO(benvanik): good logging/info - resolve rules in project and print 80 | # info? 81 | print 'building %s' % (parsed_args.targets) 82 | 83 | # Setup cache 84 | if not parsed_args.force: 85 | cache_path = os.getcwd() 86 | rule_cache = FileRuleCache(cache_path) 87 | else: 88 | rule_cache = RuleCache() 89 | 90 | # TODO(benvanik): take additional args from command line 91 | all_target_outputs = set([]) 92 | with BuildContext(build_env, project, 93 | rule_cache=rule_cache, 94 | task_executor=task_executor, 95 | force=parsed_args.force, 96 | stop_on_error=parsed_args.stop_on_error, 97 | raise_on_error=False) as build_ctx: 98 | result = build_ctx.execute_sync(parsed_args.targets) 99 | if result: 100 | for target in parsed_args.targets: 101 | (state, target_outputs) = build_ctx.get_rule_results(target) 102 | all_target_outputs.update(target_outputs) 103 | 104 | return (result == True, all_target_outputs) 105 | -------------------------------------------------------------------------------- /anvil/config.py: -------------------------------------------------------------------------------- 1 | # Copyright 2012 Google Inc. All Rights Reserved. 2 | 3 | __author__ = 'benvanik@google.com (Ben Vanik)' 4 | 5 | 6 | import ConfigParser 7 | import io 8 | import os 9 | 10 | 11 | _DEFAULT_NAME = '.anvilrc' 12 | 13 | 14 | def _scan_up(path, target_name): 15 | """Recursively scans up the path looking for the given file. 16 | 17 | Args: 18 | path: Directory to search. 19 | target_name: Target file name to find. 20 | 21 | Returns: 22 | A full file path if the file is found, otherwise None. 23 | """ 24 | if not len(path) or len(path.split(os.sep)) <= 2: 25 | return None 26 | self_path = os.path.join(path, target_name) 27 | if os.path.isfile(self_path): 28 | return self_path 29 | return _scan_up(os.path.dirname(path), target_name) 30 | 31 | 32 | def _scan_up_all(path, target_name): 33 | """Recursively scans up the entire path chain, finding all files with the 34 | given name. 35 | 36 | Args: 37 | path: Directory to search. 38 | target_name: Target file name to find. 39 | 40 | Returns: 41 | A list of full file paths for each file found. May be empty. 42 | """ 43 | file_paths = [] 44 | while True: 45 | file_path = _scan_up(path, target_name) 46 | if not file_path: 47 | break 48 | file_paths.append(file_path) 49 | path = os.path.dirname(os.path.dirname(file_path)) 50 | file_paths.reverse() 51 | return file_paths 52 | 53 | 54 | def load(path): 55 | """Loads all config files, including those up the directory path and in the 56 | user profile path. 57 | 58 | Args: 59 | path: Path to search for the config file. 60 | 61 | Returns: 62 | An initialized Config object or None if no config was found. 63 | """ 64 | file_paths = _scan_up_all(path, _DEFAULT_NAME) 65 | file_paths.append(os.path.expanduser('~/%s' % _DEFAULT_NAME)) 66 | 67 | config = ConfigParser.SafeConfigParser() 68 | config.read(file_paths) 69 | return config 70 | -------------------------------------------------------------------------------- /anvil/config_test.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | # Copyright 2012 Google Inc. All Rights Reserved. 4 | 5 | """Tests for the config module. 6 | """ 7 | 8 | __author__ = 'benvanik@google.com (Ben Vanik)' 9 | 10 | 11 | import os 12 | import unittest2 13 | 14 | import anvil.config 15 | from anvil.test import FixtureTestCase 16 | 17 | 18 | class ConfigTest(FixtureTestCase): 19 | """Behavioral tests for config loading.""" 20 | fixture = 'config' 21 | 22 | def testNone(self): 23 | config = anvil.config.load(os.path.dirname(self.root_path)) 24 | self.assertIsNotNone(config) 25 | self.assertFalse(config.has_option('a', 'opt')) 26 | 27 | def testLoading(self): 28 | config = anvil.config.load(self.root_path) 29 | self.assertIsNotNone(config) 30 | self.assertTrue(config.has_option('a', 'opt')) 31 | self.assertEqual(config.get('a', 'opt'), 'hello') 32 | 33 | def testDeep(self): 34 | config = anvil.config.load(os.path.join(self.root_path, 'deep')) 35 | self.assertIsNotNone(config) 36 | self.assertTrue(config.has_option('a', 'opt')) 37 | self.assertEqual(config.get('a', 'opt'), 'world') 38 | self.assertTrue(config.has_option('b', 'opt')) 39 | self.assertEqual(config.get('b', 'opt'), 'another') 40 | 41 | config = anvil.config.load(os.path.join(self.root_path, 'deep', 'none')) 42 | self.assertIsNotNone(config) 43 | self.assertTrue(config.has_option('a', 'opt')) 44 | self.assertEqual(config.get('a', 'opt'), 'world') 45 | self.assertTrue(config.has_option('b', 'opt')) 46 | self.assertEqual(config.get('b', 'opt'), 'another') 47 | 48 | 49 | if __name__ == '__main__': 50 | unittest2.main() 51 | -------------------------------------------------------------------------------- /anvil/depends.py: -------------------------------------------------------------------------------- 1 | # Copyright 2012 Google Inc. All Rights Reserved. 2 | 3 | """Dependency definition and management utilities. 4 | Rules can define system dependencies such as libraries or applications that 5 | are required to run them. The build system can then use this metadata to alert 6 | the user to missing dependencies or help install them. 7 | """ 8 | 9 | # TODO(benvanik): refactor to allow install(deps) batches 10 | # TODO(benvanik): refactor to make requirements tuples like: 11 | # ('node-module', 'glsl-unit@1.0.0') 12 | # ('python-package', 'argparse>=1.0') 13 | # (['apt-get', 'port'], 'foo') 14 | # ('brew', 'foo_lib', '1.0') 15 | 16 | __author__ = 'benvanik@google.com (Ben Vanik)' 17 | 18 | 19 | import os 20 | import subprocess 21 | import sys 22 | 23 | from anvil.context import BuildEnvironment, BuildContext 24 | from anvil.project import FileModuleResolver, Project 25 | from anvil.task import InProcessTaskExecutor 26 | 27 | class Dependency(object): 28 | """A dependency definition of an external library or application. 29 | Dependency definitions contain enough metadata for the build system to display 30 | meaningful and actionable error messages in the event of a missing dependency, 31 | as well as provide automatic installation support. 32 | """ 33 | 34 | def __init__(self, *args, **kwargs): 35 | """Initializes a dependency definition. 36 | """ 37 | self.requires_root = False 38 | 39 | def check(self): 40 | """Checks to see if the dependency is met. 41 | 42 | Returns: 43 | True if the dependency is valid and up to date. If the check could not be 44 | performed then None will be returned, signaling that an install is likely 45 | required. 46 | """ 47 | raise NotImplementedError() 48 | 49 | def install(self): 50 | """Installs the dependency if it is not present. 51 | 52 | Returns: 53 | True if the installation completed successfully. 54 | """ 55 | raise NotImplementedError() 56 | 57 | 58 | class NodeLibrary(Dependency): 59 | """A dependency on a Node.js library. 60 | This will attempt to use npm to install the library locally. 61 | """ 62 | 63 | def __init__(self, package_str, *args, **kwargs): 64 | """Initializes a Node.js library dependency definition. 65 | 66 | Args: 67 | package_str: Package string, such as 'some-lib@1.0' or a URL. 68 | This is passed directly to npm. 69 | """ 70 | super(NodeLibrary, self).__init__(*args, **kwargs) 71 | self.package_str = package_str 72 | 73 | def check(self): 74 | # TODO(benvanik): find a way to check with NPM? 75 | # Could invoke node -e 'require("%s")'? would need the name to use 76 | return None 77 | 78 | def install(self): 79 | return subprocess.call(['npm', 'install', self.package_str]) == 0 80 | 81 | 82 | class PythonLibrary(Dependency): 83 | """A dependency on a Python library. 84 | This uses pip to query the available libraries and install new ones. 85 | """ 86 | 87 | def __init__(self, requirement_str, *args, **kwargs): 88 | """Initializes a Python library dependency definition. 89 | 90 | Args: 91 | requirement_str: Requirement string, such as 'anvil-build>=0.0.1'. 92 | This is passed directly to pip, so it supports extras and other 93 | features of requirement strings. 94 | """ 95 | super(PythonLibrary, self).__init__(*args, **kwargs) 96 | import pkg_resources 97 | self.requires_root = True 98 | self.requirement_str = requirement_str 99 | self.requirement = pkg_resources.Requirement.parse(requirement_str) 100 | 101 | def __str__(self): 102 | return 'Python Library "%s"' % (self.requirement_str) 103 | 104 | def check(self): 105 | any_found = False 106 | any_valid = False 107 | import pip 108 | for distro in pip.get_installed_distributions(): 109 | # distro is a pkg_resources.Distribution 110 | if distro in self.requirement: 111 | # Found and valid! 112 | any_found = True 113 | any_valid = True 114 | elif distro.project_name == self.requirement.project_name: 115 | # Present, but old 116 | any_found = True 117 | any_valid = False 118 | # TODO(benvanik): something with the result? log? different values? 119 | return any_found and any_valid 120 | 121 | def install(self): 122 | return pip.main(['install', self.requirement_str]) == 0 123 | 124 | 125 | class NativePackage(Dependency): 126 | """A dependency on a native system package. 127 | This will attempt to use a supported platform package manager such as MacPorts 128 | or apt-get to install a dependency. If that fails it can (if supported) try 129 | to build from source. 130 | """ 131 | 132 | def __init__(self, *args, **kwargs): 133 | """Initializes a native system dependency definition. 134 | 135 | Args: 136 | ?? 137 | """ 138 | super(NativePackage, self).__init__(*args, **kwargs) 139 | self.requires_root = True 140 | 141 | def check(self): 142 | return None 143 | 144 | def install(self): 145 | return False 146 | 147 | def _get_package_manager(self): 148 | # TODO(benvanik): switch _PackageManager type based on platform? detect? 149 | return None 150 | 151 | class _PackageManager(object): 152 | pass 153 | 154 | class _AptGetPackageManager(_PackageManager): 155 | pass 156 | 157 | class _MacPortsPackageManager(_PackageManager): 158 | pass 159 | 160 | class _HomebrewPackageManager(_PackageManager): 161 | pass 162 | 163 | 164 | class DependencyManager(object): 165 | """ 166 | """ 167 | 168 | def __init__(self, cwd=None, *args, **kwargs): 169 | """ 170 | Args: 171 | cwd: Current working directory. 172 | """ 173 | self.cwd = cwd if cwd else os.getcwd() 174 | 175 | def scan_dependencies(self, target_rule_names): 176 | """Scans a list of target rules for their dependency information. 177 | 178 | Args: 179 | target_rule_names: A list of rule names that are to be executed. 180 | 181 | Returns: 182 | A de-duplicated list of Dependency definitions. 183 | """ 184 | build_env = BuildEnvironment(root_path=self.cwd) 185 | module_resolver = FileModuleResolver(self.cwd) 186 | project = Project(module_resolver=module_resolver) 187 | dependencies = [] 188 | with BuildContext(build_env, project, 189 | task_executor=InProcessTaskExecutor(), 190 | stop_on_error=False, 191 | raise_on_error=False) as build_ctx: 192 | rule_sequence = build_ctx.rule_graph.calculate_rule_sequence( 193 | target_rule_names) 194 | for rule in rule_sequence: 195 | if hasattr(rule, 'requires'): 196 | dependencies.extend(rule.requires) 197 | # TODO(benvanik): de-duplicate 198 | return dependencies 199 | 200 | def install_all(self, dependencies): 201 | """Installs all of the given dependencies. 202 | 203 | Args: 204 | dependencies: A list of Dependency definitions to install. 205 | 206 | Returns: 207 | True if the installs succeeded. 208 | """ 209 | # TODO(benvanik): sort by type first so batch install can be used 210 | raise NotImplementedError() 211 | 212 | dependencies = [] 213 | -------------------------------------------------------------------------------- /anvil/depends_test.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | # Copyright 2012 Google Inc. All Rights Reserved. 4 | 5 | """Tests for the depends module. 6 | """ 7 | 8 | __author__ = 'benvanik@google.com (Ben Vanik)' 9 | 10 | 11 | import os 12 | import unittest2 13 | 14 | from anvil.depends import * 15 | 16 | 17 | class DependencyTest(unittest2.TestCase): 18 | """Behavioral tests of the Dependency type.""" 19 | 20 | def testNodeLibrary(self): 21 | # TODO(benvanik): test NodeLibrary 22 | NodeLibrary('glsl-unit') 23 | pass 24 | 25 | def testPythonLibrary(self): 26 | # TODO(benvanik): test PythonLibrary 27 | PythonLibrary('argparse') 28 | pass 29 | 30 | def testNativePackage(self): 31 | # TODO(benvanik): test NativePackage 32 | NativePackage() 33 | pass 34 | 35 | 36 | class DependencyManagerTest(unittest2.TestCase): 37 | """Behavioral tests of the DependencyManager type.""" 38 | 39 | def testScanDependencies(self): 40 | # TODO(benvanik): test scan_dependencies 41 | DependencyManager() 42 | pass 43 | 44 | def testInstallAll(self): 45 | # TODO(benvanik): test install_all 46 | DependencyManager() 47 | pass 48 | 49 | 50 | if __name__ == '__main__': 51 | unittest2.main() 52 | -------------------------------------------------------------------------------- /anvil/enums.py: -------------------------------------------------------------------------------- 1 | """Defines anvil enums. 2 | """ 3 | 4 | class LogLevel: 5 | """Enumeration containing the types of log messages to display. 6 | """ 7 | DEBUG = 0 8 | INFO = 1 9 | WARNING = 2 10 | ERROR = 3 11 | 12 | class Status: 13 | """Provides work status values. 14 | """ 15 | WAITING = 0 16 | RUNNING = 1 17 | SUCCEEDED = 2 18 | FAILED = 3 19 | SKIPPED = 4 20 | 21 | class Verbosity: 22 | """Enumeration containing verbosity levels for logging. 23 | SILENT = No logging. 24 | NORMAL = Log info, warn and error. 25 | VERBOSE = Log debug, info, warn and error. 26 | INHERIT = Use verbosity level of parent LogSource. 27 | """ 28 | SILENT = 0 29 | NORMAL = 1 30 | VERBOSE = 2 31 | INHERIT = 3 32 | 33 | def status_to_string(value): 34 | to_string_values = { 35 | Status.WAITING: 'WAITING', 36 | Status.RUNNING: 'RUNNING', 37 | Status.SUCCEEDED: 'SUCCEEDED', 38 | Status.FAILED: 'FAILED', 39 | Status.SKIPPED: 'SKIPPED' 40 | } 41 | return to_string_values[value] 42 | 43 | def log_level_to_string(value): 44 | to_string_values = { 45 | LogLevel.DEBUG: 'DEBUG', 46 | LogLevel.INFO: 'INFO', 47 | LogLevel.WARNING: 'WARNING', 48 | LogLevel.ERROR: 'ERROR' 49 | } 50 | return to_string_values[value] 51 | -------------------------------------------------------------------------------- /anvil/graph.py: -------------------------------------------------------------------------------- 1 | # Copyright 2012 Google Inc. All Rights Reserved. 2 | 3 | """Rule dependency graph. 4 | 5 | A rule graph represents all of the rules in a project as they have been resolved 6 | and tracked for dependencies. The graph can then be queried for various 7 | information such as build rule sets/etc. 8 | """ 9 | 10 | __author__ = 'benvanik@google.com (Ben Vanik)' 11 | 12 | 13 | import networkx as nx 14 | 15 | from anvil import project 16 | from anvil import util 17 | 18 | 19 | class RuleGraph(object): 20 | """A graph of rule nodes. 21 | """ 22 | 23 | def __init__(self, project): 24 | """Initializes a rule graph. 25 | 26 | Args: 27 | project: Project to use for resolution. 28 | """ 29 | self.project = project 30 | self.graph = nx.DiGraph() 31 | # A map of rule paths to rules, if they exist 32 | self.rule_nodes = {} 33 | 34 | def has_dependency(self, rule_path, predecessor_rule_path): 35 | """Checks to see if the given rule has a dependency on another rule. 36 | 37 | Args: 38 | rule_path: The name of the rule to check. 39 | predecessor_rule_path: A potential predecessor rule. 40 | 41 | Returns: 42 | True if by any way rule_path depends on predecessor_rule_path. 43 | 44 | Raises: 45 | KeyError: One of the given rules was not found. 46 | """ 47 | if not rule_path in self.rule_nodes: 48 | raise KeyError('Rule "%s" not found' % (rule_path)) 49 | if not predecessor_rule_path in self.rule_nodes: 50 | raise KeyError('Rule "%s" not found' % (predecessor_rule_path)) 51 | return nx.has_path(self.graph, predecessor_rule_path, rule_path) 52 | 53 | def _ensure_rules_present(self, rule_paths, requesting_module=None): 54 | """Ensures that the given list of rules are present in the graph, and if not 55 | recursively loads them. 56 | 57 | Args: 58 | rule_paths: A list of target rule paths to add to the graph. 59 | requesting_module: Module that is requesting the given rules or None if 60 | all rule paths are absolute. 61 | """ 62 | # Add all of the rules listed 63 | rules = [] 64 | for rule_path in rule_paths: 65 | # Attempt to resolve the rule 66 | rule = self.project.resolve_rule(rule_path, 67 | requesting_module=requesting_module) 68 | if not rule: 69 | raise KeyError('Rule "%s" unable to be resolved' % (rule_path)) 70 | rules.append(rule) 71 | 72 | # If already present, ignore (no need to recurse) 73 | if rule.path in self.rule_nodes: 74 | continue 75 | 76 | # Add node to the graph 77 | self.rule_nodes[rule.path] = rule 78 | self.graph.add_node(rule.path) 79 | 80 | # Recursively resolve all dependent rules 81 | dependent_rule_paths = [] 82 | for dep in rule.get_dependent_paths(): 83 | if util.is_rule_path(dep): 84 | dependent_rule_paths.append(dep) 85 | if len(dependent_rule_paths): 86 | self._ensure_rules_present(dependent_rule_paths, 87 | requesting_module=rule.parent_module) 88 | 89 | # Add edges for all of the requested rules (at this point, all rules should 90 | # be added to the graph) 91 | for rule in rules: 92 | for dep in rule.get_dependent_paths(): 93 | if util.is_rule_path(dep): 94 | dep_rule = self.project.resolve_rule(dep, 95 | requesting_module=rule.parent_module) 96 | # Node should exist due to recursive addition above 97 | assert dep_rule.path in self.rule_nodes 98 | self.graph.add_edge(dep_rule.path, rule.path) 99 | 100 | # Ensure the graph is a DAG (no cycles) 101 | if not nx.is_directed_acyclic_graph(self.graph): 102 | # TODO(benvanik): use nx.simple_cycles() to print the cycles 103 | raise ValueError('Cycle detected in the rule graph: %s' % ( 104 | nx.simple_cycles(self.graph))) 105 | 106 | def add_rules_from_module(self, module): 107 | """Adds all rules (and their dependencies) from the given module. 108 | 109 | Args: 110 | module: A module with rules to add. 111 | """ 112 | rule_paths = [] 113 | for rule in module.rule_iter(): 114 | rule_paths.append(rule.path) 115 | self._ensure_rules_present(rule_paths, requesting_module=module) 116 | 117 | def has_rule(self, rule_path): 118 | """Whether the graph has the given rule loaded. 119 | 120 | Args: 121 | rule_path: Full rule path. 122 | 123 | Returns: 124 | True if the given rule has been resolved and added to the graph. 125 | """ 126 | return rule_path in self.rule_nodes 127 | 128 | def calculate_rule_sequence(self, target_rule_paths): 129 | """Calculates an ordered sequence of rules terminating with the given 130 | target rules. 131 | 132 | By passing multiple target names it's possible to build a combined sequence 133 | that ensures all the given targets are included with no duplicate 134 | dependencies. 135 | 136 | Args: 137 | target_rule_paths: A list of target rule paths to include in the 138 | sequence, or a single target rule path. 139 | 140 | Returns: 141 | An ordered list of Rule instances including all of the given target rules 142 | and their dependencies. 143 | 144 | Raises: 145 | KeyError: One of the given rules was not found. 146 | """ 147 | if isinstance(target_rule_paths, str): 148 | target_rule_paths = [target_rule_paths] 149 | 150 | # Ensure the graph has everything required - if things go south this will 151 | # raise errors 152 | self._ensure_rules_present(target_rule_paths) 153 | 154 | # Reversed graph to make sorting possible 155 | # If this gets expensive (or many sequences are calculated) it could be 156 | # cached 157 | reverse_graph = self.graph.reverse() 158 | 159 | # Paths are added in reverse (from target to dependencies) 160 | sequence_graph = nx.DiGraph() 161 | 162 | def _add_rule_node_dependencies(rule_node): 163 | if sequence_graph.has_node(rule_node): 164 | # Already present in the sequence graph, no need to add again 165 | return 166 | # Add node 167 | sequence_graph.add_node(rule_node) 168 | # Recursively add all dependent children 169 | for out_edge in reverse_graph.out_edges_iter([rule_node]): 170 | out_rule_node = out_edge[1] 171 | if not sequence_graph.has_node(out_rule_node): 172 | _add_rule_node_dependencies(out_rule_node) 173 | sequence_graph.add_edge(rule_node, out_rule_node) 174 | 175 | # Add all paths for targets 176 | # Note that all nodes are present if we got this far, so no need to check 177 | for rule_path in target_rule_paths: 178 | rule = self.project.resolve_rule(rule_path) 179 | assert rule 180 | assert rule.path in self.rule_nodes 181 | _add_rule_node_dependencies(rule.path) 182 | 183 | # Reverse the graph so that it's dependencies -> targets 184 | reversed_sequence_graph = sequence_graph.reverse() 185 | 186 | # Get the list of nodes in sorted order 187 | rule_sequence = [] 188 | for rule_node in nx.topological_sort(reversed_sequence_graph): 189 | rule_sequence.append(self.rule_nodes[rule_node]) 190 | return rule_sequence 191 | -------------------------------------------------------------------------------- /anvil/graph_test.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | # Copyright 2012 Google Inc. All Rights Reserved. 4 | 5 | """Tests for the graph module. 6 | """ 7 | 8 | __author__ = 'benvanik@google.com (Ben Vanik)' 9 | 10 | 11 | import unittest2 12 | 13 | from anvil.graph import * 14 | from anvil.module import * 15 | from anvil.rule import * 16 | from anvil.project import * 17 | 18 | 19 | class RuleGraphTest(unittest2.TestCase): 20 | """Behavioral tests of the RuleGraph type.""" 21 | 22 | def setUp(self): 23 | super(RuleGraphTest, self).setUp() 24 | 25 | self.module_1 = Module('m1', rules=[ 26 | Rule('a1'), 27 | Rule('a2'), 28 | Rule('a3'), 29 | Rule('b', srcs=[':a1', 'a/b/c'], deps=[':a2'],), 30 | Rule('c', deps=[':b'],),]) 31 | self.module_2 = Module('m2', rules=[ 32 | Rule('p', deps=['m1:c'],)]) 33 | self.project = Project(modules=[self.module_1, self.module_2]) 34 | 35 | def testConstruction(self): 36 | project = Project() 37 | graph = RuleGraph(project) 38 | self.assertIs(graph.project, project) 39 | 40 | project = self.project 41 | graph = RuleGraph(project) 42 | self.assertIs(graph.project, project) 43 | 44 | def testAddRulesFromModule(self): 45 | graph = RuleGraph(self.project) 46 | graph.add_rules_from_module(self.module_1) 47 | self.assertTrue(graph.has_rule('m1:a1')) 48 | self.assertTrue(graph.has_rule('m1:a2')) 49 | self.assertTrue(graph.has_rule('m1:a3')) 50 | self.assertTrue(graph.has_rule('m1:b')) 51 | self.assertTrue(graph.has_rule('m1:c')) 52 | self.assertFalse(graph.has_rule('m2:p')) 53 | graph.add_rules_from_module(self.module_2) 54 | self.assertTrue(graph.has_rule('m2:p')) 55 | 56 | graph = RuleGraph(self.project) 57 | graph.add_rules_from_module(self.module_2) 58 | self.assertTrue(graph.has_rule('m2:p')) 59 | self.assertTrue(graph.has_rule('m1:a1')) 60 | self.assertTrue(graph.has_rule('m1:a2')) 61 | self.assertFalse(graph.has_rule('m1:a3')) 62 | self.assertTrue(graph.has_rule('m1:b')) 63 | self.assertTrue(graph.has_rule('m1:c')) 64 | 65 | def testCycle(self): 66 | module = Module('mc', rules=[ 67 | Rule('a', deps=[':b']), 68 | Rule('b', deps=[':a'])]) 69 | project = Project(modules=[module]) 70 | graph = RuleGraph(project) 71 | with self.assertRaises(ValueError): 72 | graph.add_rules_from_module(module) 73 | 74 | module_1 = Module('mc1', rules=[Rule('a', deps=['mc2:a'])]) 75 | module_2 = Module('mc2', rules=[Rule('a', deps=['mc1:a'])]) 76 | project = Project(modules=[module_1, module_2]) 77 | graph = RuleGraph(project) 78 | with self.assertRaises(ValueError): 79 | graph.add_rules_from_module(module_1) 80 | 81 | def testHasRule(self): 82 | graph = RuleGraph(self.project) 83 | graph.add_rules_from_module(self.module_1) 84 | self.assertTrue(graph.has_rule('m1:a1')) 85 | self.assertFalse(graph.has_rule('m2:p')) 86 | self.assertFalse(graph.has_rule('x:x')) 87 | 88 | def testHasDependency(self): 89 | graph = RuleGraph(Project()) 90 | with self.assertRaises(KeyError): 91 | graph.has_dependency('m1:a', 'm1:b') 92 | 93 | graph = RuleGraph(self.project) 94 | graph.add_rules_from_module(self.module_1) 95 | self.assertTrue(graph.has_dependency('m1:c', 'm1:c')) 96 | self.assertTrue(graph.has_dependency('m1:a3', 'm1:a3')) 97 | self.assertTrue(graph.has_dependency('m1:c', 'm1:b')) 98 | self.assertTrue(graph.has_dependency('m1:c', 'm1:a1')) 99 | self.assertTrue(graph.has_dependency('m1:b', 'm1:a1')) 100 | self.assertFalse(graph.has_dependency('m1:b', 'm1:c')) 101 | self.assertFalse(graph.has_dependency('m1:a1', 'm1:a2')) 102 | self.assertFalse(graph.has_dependency('m1:c', 'm1:a3')) 103 | with self.assertRaises(KeyError): 104 | graph.has_dependency('m1:c', 'm1:x') 105 | with self.assertRaises(KeyError): 106 | graph.has_dependency('m1:x', 'm1:c') 107 | with self.assertRaises(KeyError): 108 | graph.has_dependency('m1:x', 'm1:x') 109 | 110 | def testCalculateRuleSequence(self): 111 | graph = RuleGraph(self.project) 112 | 113 | with self.assertRaises(KeyError): 114 | graph.calculate_rule_sequence(':x') 115 | with self.assertRaises(KeyError): 116 | graph.calculate_rule_sequence([':x']) 117 | with self.assertRaises(KeyError): 118 | graph.calculate_rule_sequence(['m1:x']) 119 | 120 | seq = graph.calculate_rule_sequence('m1:a1') 121 | self.assertEqual(len(seq), 1) 122 | self.assertEqual(seq[0].name, 'a1') 123 | seq = graph.calculate_rule_sequence(['m1:a1']) 124 | self.assertEqual(len(seq), 1) 125 | self.assertEqual(seq[0].name, 'a1') 126 | 127 | seq = graph.calculate_rule_sequence(['m1:b']) 128 | self.assertEqual(len(seq), 3) 129 | self.assertTrue((seq[0].name in ['a1', 'a2']) or 130 | (seq[1].name in ['a1', 'a2'])) 131 | self.assertEqual(seq[2].name, 'b') 132 | 133 | seq = graph.calculate_rule_sequence(['m1:a1', 'm1:b']) 134 | self.assertEqual(len(seq), 3) 135 | self.assertTrue((seq[0].name in ['a1', 'a2']) or 136 | (seq[1].name in ['a1', 'a2'])) 137 | self.assertEqual(seq[2].name, 'b') 138 | 139 | seq = graph.calculate_rule_sequence(['m1:a1', 'm1:a3']) 140 | self.assertEqual(len(seq), 2) 141 | self.assertTrue((seq[0].name in ['a1', 'a3']) or 142 | (seq[1].name in ['a1', 'a3'])) 143 | 144 | module = Module('mx', rules=[Rule('a', deps=[':b'])]) 145 | project = Project(modules=[module]) 146 | graph = RuleGraph(project) 147 | with self.assertRaises(KeyError): 148 | graph.calculate_rule_sequence('mx:a') 149 | 150 | def testCrossModuleRules(self): 151 | graph = RuleGraph(self.project) 152 | 153 | seq = graph.calculate_rule_sequence(['m2:p']) 154 | self.assertEqual(len(seq), 5) 155 | self.assertTrue((seq[0].name in ['a1', 'a2']) or 156 | (seq[1].name in ['a1', 'a2'])) 157 | self.assertTrue(seq[4].path, 'm2:p') 158 | self.assertTrue(graph.has_dependency('m2:p', 'm1:a1')) 159 | 160 | 161 | if __name__ == '__main__': 162 | unittest2.main() 163 | -------------------------------------------------------------------------------- /anvil/log_sink.py: -------------------------------------------------------------------------------- 1 | """Defines a package containing LogSink implementations. 2 | """ 3 | 4 | __author__ = 'joshharrison@google.com' 5 | 6 | 7 | class PrintLogSink(object): 8 | """A very basic LogSink that simply prints to stdout. 9 | """ 10 | def log(self, message): 11 | print '%s %s' % (message[1], message[3]) 12 | -------------------------------------------------------------------------------- /anvil/manage.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | # Copyright 2012 Google Inc. All Rights Reserved. 4 | 5 | """Management shell script. 6 | """ 7 | 8 | __author__ = 'benvanik@google.com (Ben Vanik)' 9 | 10 | 11 | import argparse 12 | import fnmatch 13 | import imp 14 | import io 15 | import os 16 | import re 17 | import sys 18 | 19 | sys.path.insert(1, os.path.join(os.path.dirname(__file__), '..')) 20 | 21 | from anvil import util 22 | 23 | 24 | # Hack to get formatting in usage() correct 25 | class _ComboHelpFormatter(argparse.RawDescriptionHelpFormatter, 26 | argparse.ArgumentDefaultsHelpFormatter): 27 | pass 28 | 29 | 30 | class ManageCommand(object): 31 | """Base type for manage commands. 32 | All subclasses of this type can be auto-discovered and added to the command 33 | list. 34 | """ 35 | 36 | def __init__(self, name, help_short=None, help_long=None, *args, **kwargs): 37 | """Initializes a manage command. 38 | 39 | Args: 40 | name: The name of the command exposed to the management script. 41 | help_short: Help text printed alongside the command when queried. 42 | help_long: Extended help text when viewing command help. 43 | """ 44 | self.name = name 45 | self.help_short = help_short 46 | self.help_long = help_long 47 | self.completion_hints = [] 48 | 49 | def create_argument_parser(self): 50 | """Creates and sets up an argument parser. 51 | 52 | Returns: 53 | An ready to use ArgumentParser. 54 | """ 55 | parser = argparse.ArgumentParser(prog='anvil %s' % (self.name), 56 | description=self.help_long, 57 | formatter_class=_ComboHelpFormatter) 58 | # TODO(benvanik): add common arguments (logging levels/etc) 59 | return parser 60 | 61 | def _add_common_build_hints(self): 62 | self.completion_hints.extend([ 63 | '-j', '--jobs', 64 | '-f', '--force', 65 | '--stop_on_error', 66 | ]) 67 | 68 | def _add_common_build_arguments(self, parser, targets=False, 69 | targets_optional=False): 70 | """Adds common build arguments to an argument parser. 71 | 72 | Args: 73 | parser: ArgumentParser to modify. 74 | targets: True to add variable target arguments. 75 | targets_optional: Targets, if included, are optional 76 | """ 77 | # Threading/execution control 78 | parser.add_argument('-j', '--jobs', 79 | dest='jobs', 80 | type=int, 81 | default=None, 82 | help=('Specifies the number of tasks to run ' 83 | 'simultaneously. If omitted then all processors ' 84 | 'will be used.')) 85 | 86 | # Build context control 87 | parser.add_argument('-f', '--force', 88 | dest='force', 89 | action='store_true', 90 | default=False, 91 | help=('Force all rules to run as if there was no ' 92 | 'cache.')) 93 | parser.add_argument('--stop_on_error', 94 | dest='stop_on_error', 95 | action='store_true', 96 | default=False, 97 | help=('Stop building when an error is encountered.')) 98 | 99 | # Target specification 100 | if targets: 101 | parser.add_argument('targets', 102 | nargs='*' if targets_optional else '+', 103 | metavar='target', 104 | help='Target build rule (such as :a or foo/bar:a)') 105 | 106 | def execute(self, args, cwd): 107 | """Executes the command. 108 | 109 | Args: 110 | args: ArgumentParser parsed argument object. 111 | cwd: Current working directory. 112 | 113 | Returns: 114 | Return code of the command. 115 | """ 116 | return 1 117 | 118 | 119 | def discover_commands(search_paths): 120 | """Looks for all commands and returns a dictionary of them. 121 | Commands are discovered in the given search path (or anvil/commands/) by 122 | looking for subclasses of ManageCommand. 123 | 124 | Args: 125 | search_paths: Search paths to use for command discovery. 126 | 127 | Returns: 128 | A dictionary containing name-to-ManageCommand mappings. 129 | 130 | Raises: 131 | KeyError: Multiple commands have the same name. 132 | """ 133 | commands = {} 134 | for search_path in search_paths: 135 | for (root, dirs, files) in os.walk(search_path): 136 | for name in files: 137 | if fnmatch.fnmatch(name, '*.py'): 138 | full_path = os.path.join(root, name) 139 | module = imp.load_source(os.path.splitext(name)[0], full_path) 140 | for attr_name in dir(module): 141 | command_cls = getattr(module, attr_name) 142 | if (command_cls != ManageCommand and 143 | isinstance(command_cls, type) and 144 | issubclass(command_cls, ManageCommand)): 145 | command = command_cls() 146 | command_name = command.name 147 | if commands.has_key(command_name): 148 | raise KeyError('Command "%s" already defined' % (command_name)) 149 | commands[command_name] = command 150 | return commands 151 | 152 | 153 | def usage(commands): 154 | """Gets usage info that can be displayed to the user. 155 | 156 | Args: 157 | commands: A command dictionary from discover_commands. 158 | 159 | Returns: 160 | A string containing usage info and a command listing. 161 | """ 162 | s = 'anvil command [-h]\n' 163 | s += '\n' 164 | s += 'Commands:\n' 165 | command_names = commands.keys() 166 | command_names.sort() 167 | for command_name in command_names: 168 | s += ' %s\n' % (command_name) 169 | command_help = commands[command_name].help_short 170 | if command_help: 171 | s += ' %s\n' % (command_help) 172 | return s 173 | 174 | 175 | def run_command(command, args, cwd): 176 | """Runs a command with the given context. 177 | 178 | Args: 179 | command: ManageCommand to run. 180 | args: Arguments, with the app and command name stripped. 181 | cwd: Current working directory. 182 | 183 | Returns: 184 | 0 if the command succeeded and non-zero otherwise. 185 | 186 | Raises: 187 | ValueError: The command could not be found or was not specified. 188 | """ 189 | parser = command.create_argument_parser() 190 | parsed_args = parser.parse_args(args) 191 | return command.execute(parsed_args, cwd) 192 | 193 | 194 | def autocomplete(words, cword, cwd, commands=None): 195 | """Completes the given command string. 196 | 197 | Args: 198 | words: A list of all words in the current command line (minus the prog). 199 | COMP_WORDS split and with the first argument (app name) removed. 200 | cword: An index into words indicating the cursor position. 201 | COMP_CWORD in integer form. 202 | cwd: Current working directory. 203 | 204 | Returns: 205 | A space-delimited string of completion words for the current command line. 206 | """ 207 | try: 208 | current = words[cword] 209 | except IndexError: 210 | current = '' 211 | try: 212 | previous = words[cword - 1] 213 | except IndexError: 214 | previous = '' 215 | 216 | if cword == 0: 217 | # At the first word, which is the command 218 | # Attempt to autocomplete one if it's in progress, or just list them out 219 | return ' '.join([c for c in commands.keys() if c.startswith(current)]) 220 | 221 | # Somewhere inside of a command 222 | if not commands.has_key(words[0]): 223 | # Whatever the user is typing is not recognized 224 | return None 225 | command = commands[words[0]] 226 | 227 | if current.startswith('-') or current.startswith('--'): 228 | # TODO(benvanik): argparse is poorly designed and cannot support completion 229 | # easily (it has no way to iterate options) 230 | hints = command.completion_hints 231 | return ' '.join([c for c in hints if c.startswith(current)]) 232 | 233 | # Bash treats ':' as a separator and passes in things like 'a:b' as [a,:,b] 234 | # So, look for current = ':' and prev = ':' to try to find out if we are 235 | # referencing rules 236 | 237 | target_module = '' 238 | rule_prefix = '' 239 | if previous == ':': 240 | rule_prefix = current 241 | current = ':' 242 | try: 243 | previous = words[cword - 2] 244 | except IndexError: 245 | previous = '' 246 | if current == ':': 247 | if len(previous): 248 | # If previous refers to a module, get all rules from it 249 | target_module = os.path.normpath(os.path.join(cwd, previous)) 250 | else: 251 | # If there is a BUILD file in the current path, get all rules from it 252 | target_module = cwd 253 | if len(target_module): 254 | target_module = util.get_build_file_path(target_module) 255 | if os.path.isfile(target_module): 256 | # Module exists! Extract the rules and return them 257 | # TODO(benvanik): maybe load the module? that requires a lot more work... 258 | with io.open(target_module, 'r') as f: 259 | module_str = f.read() 260 | all_rules = [] 261 | for rule_match in re.finditer(r'name=[\'\"]([a-zA-Z0-9_]+)[\'\"]', 262 | module_str, 263 | flags=re.MULTILINE): 264 | rule_name = rule_match.group(1) 265 | all_rules.append(rule_name) 266 | return ' '.join([c for c in all_rules if c.startswith(rule_prefix)]) 267 | # Bad - prevent any more completion on this block 268 | # TODO(benvanik): how do you prevent completion?! 269 | return None 270 | 271 | # Nothing we know or care about - allow bash to take over 272 | return None 273 | 274 | 275 | def main(): # pragma: no cover 276 | """Entry point for scripts.""" 277 | # Always add anvil/.. to the path 278 | sys.path.insert(1, util.get_anvil_path()) 279 | 280 | # TODO(benvanik): if a command is specified try loading it first - may be 281 | # able to avoid searching all commands 282 | search_paths = [os.path.join(util.get_anvil_path(), 'commands')] 283 | # TODO(benvanik): look for a .anvilrc, load it to find 284 | # - extra command search paths 285 | # - extra rule search paths 286 | # Also check to see if it was specified in args? 287 | 288 | # Find all commands 289 | commands = discover_commands(search_paths) 290 | 291 | # Run auto-completion logic 292 | if 'ANVIL_AUTO_COMPLETE' in os.environ: 293 | match_str = autocomplete( 294 | words=os.environ['COMP_WORDS'].split(' ')[1:], 295 | cword=int(os.environ['COMP_CWORD']) - 1, 296 | cwd=os.getcwd(), 297 | commands=commands) 298 | if match_str and len(match_str): 299 | print match_str 300 | sys.exit(1) 301 | 302 | try: 303 | if len(sys.argv) < 2: 304 | raise ValueError('No command given') 305 | command_name = sys.argv[1] 306 | if not commands.has_key(command_name): 307 | raise ValueError('Command "%s" not found' % (command_name)) 308 | 309 | command = commands[command_name] 310 | return_code = run_command(command=command, 311 | args=sys.argv[2:], 312 | cwd=os.getcwd()) 313 | except ValueError: 314 | print usage(commands) 315 | return_code = 1 316 | except Exception as e: 317 | #print e 318 | raise 319 | return_code = 1 320 | sys.exit(return_code) 321 | 322 | 323 | if __name__ == '__main__': 324 | import anvil.manage 325 | # Always add anvil/.. to the path 326 | sys.path.insert(1, os.path.join(util.get_anvil_path(), '..')) 327 | anvil.manage.main() 328 | -------------------------------------------------------------------------------- /anvil/manage_test.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | # Copyright 2012 Google Inc. All Rights Reserved. 4 | 5 | """Tests for the manage module. 6 | """ 7 | 8 | __author__ = 'benvanik@google.com (Ben Vanik)' 9 | 10 | 11 | import types 12 | import unittest2 13 | 14 | from anvil import manage 15 | from anvil.manage import * 16 | from anvil import test 17 | from anvil.test import AsyncTestCase, FixtureTestCase 18 | 19 | 20 | class ManageTest(FixtureTestCase): 21 | """Behavioral tests for the management wrapper.""" 22 | fixture = 'manage' 23 | 24 | def setUp(self): 25 | super(ManageTest, self).setUp() 26 | 27 | def testDiscovery(self): 28 | # Check built-in 29 | search_paths = [os.path.join(util.get_anvil_path(), 'commands')] 30 | commands = manage.discover_commands(search_paths) 31 | self.assertTrue(commands.has_key('build')) 32 | self.assertIsInstance(commands['build'], ManageCommand) 33 | 34 | # Check custom 35 | commands = manage.discover_commands( 36 | [os.path.join(self.root_path, 'commands')]) 37 | self.assertTrue(commands.has_key('test_command')) 38 | test_command = commands['test_command'] 39 | self.assertIsInstance(test_command, ManageCommand) 40 | args = test_command.create_argument_parser() 41 | parsed_args = args.parse_args([]) 42 | cwd = os.getcwd() 43 | self.assertEqual(commands['test_command'].execute(parsed_args, cwd), 123) 44 | 45 | # Duplicate command names/etc 46 | with self.assertRaises(KeyError): 47 | manage.discover_commands([os.path.join(self.root_path, 'bad_commands')]) 48 | 49 | def testUsage(self): 50 | search_paths = [os.path.join(util.get_anvil_path(), 'commands')] 51 | commands = manage.discover_commands(search_paths) 52 | self.assertNotEqual(len(manage.usage(commands)), 0) 53 | 54 | def testRunCommand(self): 55 | class SomeCommand(ManageCommand): 56 | def __init__(self): 57 | super(SomeCommand, self).__init__(name='some_command') 58 | def execute(self, args, cwd): 59 | return 123 60 | self.assertEqual(manage.run_command( 61 | SomeCommand(), [], os.getcwd()), 123) 62 | 63 | # TODO(benvanik): add test for argument parsing 64 | 65 | def testAutocomplete(self): 66 | # TODO(benvanik): test autocomplete 67 | pass 68 | 69 | def testMain(self): 70 | pass 71 | 72 | 73 | if __name__ == '__main__': 74 | unittest2.main() 75 | -------------------------------------------------------------------------------- /anvil/project.py: -------------------------------------------------------------------------------- 1 | # Copyright 2012 Google Inc. All Rights Reserved. 2 | 3 | """Project representation. 4 | 5 | A project is a module (or set of modules) that provides a namespace of rules. 6 | Rules may refer to each other and will be resolved in the project namespace. 7 | """ 8 | 9 | __author__ = 'benvanik@google.com (Ben Vanik)' 10 | 11 | 12 | import base64 13 | import os 14 | import pickle 15 | import re 16 | import stat 17 | import string 18 | 19 | from anvil.module import ModuleLoader 20 | from anvil.rule import RuleNamespace 21 | import anvil.util 22 | 23 | 24 | class Project(object): 25 | """Project type that contains rules. 26 | Projects, once constructed, are designed to be immutable. Many duplicate 27 | build processes may run over the same project instance and all expect it to 28 | be in the state it was when first created. 29 | """ 30 | 31 | def __init__(self, name='Project', rule_namespace=None, module_resolver=None, 32 | modules=None): 33 | """Initializes an empty project. 34 | 35 | Args: 36 | name: A human-readable name for the project that will be used for 37 | logging. 38 | rule_namespace: Rule namespace to use when loading modules. If omitted a 39 | default one is used. 40 | module_resolver: A module resolver to use when attempt to dynamically 41 | resolve modules by path. 42 | modules: A list of modules to add to the project. 43 | 44 | Raises: 45 | NameError: The name given is not valid. 46 | """ 47 | self.name = name 48 | 49 | if rule_namespace: 50 | self.rule_namespace = rule_namespace 51 | else: 52 | self.rule_namespace = RuleNamespace() 53 | self.rule_namespace.discover() 54 | 55 | if module_resolver: 56 | self.module_resolver = module_resolver 57 | else: 58 | self.module_resolver = StaticModuleResolver() 59 | 60 | self.modules = {} 61 | if modules and len(modules): 62 | self.add_modules(modules) 63 | 64 | def add_module(self, module): 65 | """Adds a module to the project. 66 | 67 | Args: 68 | module: A module to add. 69 | 70 | Raises: 71 | KeyError: A module with the given name already exists in the project. 72 | """ 73 | self.add_modules([module]) 74 | 75 | def add_modules(self, modules): 76 | """Adds a list of modules to the project. 77 | 78 | Args: 79 | modules: A list of modules to add. 80 | 81 | Raises: 82 | KeyError: A module with the given name already exists in the project. 83 | """ 84 | for module in modules: 85 | if self.modules.get(module.path, None): 86 | raise KeyError('A module with the path "%s" is already defined' % ( 87 | module.path)) 88 | for module in modules: 89 | self.modules[module.path] = module 90 | 91 | def get_module(self, module_path): 92 | """Gets a module by path. 93 | 94 | Args: 95 | module_path: Name of the module to find. 96 | 97 | Returns: 98 | The module with the given path or None if it was not found. 99 | """ 100 | return self.modules.get(module_path, None) 101 | 102 | def module_list(self): 103 | """Gets a list of all modules in the project. 104 | 105 | Returns: 106 | A list of all modules. 107 | """ 108 | return self.modules.values() 109 | 110 | def module_iter(self): 111 | """Iterates over all modules in the project.""" 112 | for module_path in self.modules: 113 | yield self.modules[module_path] 114 | 115 | def resolve_rule(self, rule_path, requesting_module=None): 116 | """Gets a rule by path, supporting module lookup and dynamic loading. 117 | 118 | Args: 119 | rule_path: Path of the rule to find. Must include a semicolon. 120 | requesting_module: The module that is requesting the given rule. If not 121 | provided then no local rule paths (':foo') or relative paths are 122 | allowed. 123 | 124 | Returns: 125 | The rule with the given name or None if it was not found. 126 | 127 | Raises: 128 | NameError: The given rule name was not valid. 129 | KeyError: The given rule was not found. 130 | IOError: Unable to load referenced module. 131 | """ 132 | if not anvil.util.is_rule_path(rule_path): 133 | raise NameError('The rule path "%s" is missing a semicolon' % (rule_path)) 134 | (module_path, rule_name) = string.rsplit(rule_path, ':', 1) 135 | if self.module_resolver.can_resolve_local: 136 | if not len(module_path) and not requesting_module: 137 | module_path = '.' 138 | if not len(module_path) and not requesting_module: 139 | raise KeyError('Local rule "%s" given when no resolver defined' % ( 140 | rule_path)) 141 | 142 | module = requesting_module 143 | if len(module_path): 144 | requesting_path = None 145 | if requesting_module: 146 | requesting_path = os.path.dirname(requesting_module.path) 147 | full_path = self.module_resolver.resolve_module_path( 148 | module_path, requesting_path) 149 | module = self.modules.get(full_path, None) 150 | if not module: 151 | # Module not yet loaded - need to grab it 152 | module = self.module_resolver.load_module( 153 | full_path, self.rule_namespace) 154 | if module: 155 | self.add_module(module) 156 | else: 157 | raise IOError('Module "%s" not found', module_path) 158 | 159 | return module.get_rule(rule_name) 160 | 161 | 162 | class ModuleResolver(object): 163 | """A type to use for resolving modules. 164 | This is used to get a module when a project tries to resolve a rule in a 165 | module that has not yet been loaded. 166 | """ 167 | 168 | def __init__(self, *args, **kwargs): 169 | """Initializes a module resolver.""" 170 | self.can_resolve_local = False 171 | 172 | def resolve_module_path(self, path, working_path=None): 173 | """Resolves a module path to its full, absolute path. 174 | This is used by the project system to disambugate modules and check the 175 | cache before actually performing a load. 176 | The path returned from this will be passed to load_module. 177 | 178 | Args: 179 | path: Path of the module (may be relative/etc). 180 | working_path: Path relative paths should be pased off of. If not provided 181 | then relative paths may fail. 182 | 183 | Returns: 184 | An absolute path that can be used as a cache key and passed to 185 | load_module. 186 | """ 187 | raise NotImplementedError() 188 | 189 | def load_module(self, full_path, rule_namespace): 190 | """Loads a module from the given path. 191 | 192 | Args: 193 | full_path: Absolute path of the module as returned by resolve_module_path. 194 | rule_namespace: Rule namespace to use when loading modules. 195 | 196 | Returns: 197 | A Module representing the given path or None if it could not be found. 198 | 199 | Raises: 200 | IOError: The module could not be found. 201 | """ 202 | raise NotImplementedError() 203 | 204 | 205 | class StaticModuleResolver(ModuleResolver): 206 | """A static module resolver that can resolve from a list of modules. 207 | """ 208 | 209 | def __init__(self, modules=None, *args, **kwargs): 210 | """Initializes a static module resolver. 211 | 212 | Args: 213 | modules: A list of modules that can be resolved. 214 | """ 215 | super(StaticModuleResolver, self).__init__(*args, **kwargs) 216 | 217 | self.modules = {} 218 | if modules: 219 | for module in modules: 220 | self.modules[os.path.normpath(module.path)] = module 221 | 222 | def resolve_module_path(self, path, working_path=None): 223 | real_path = path 224 | if working_path and len(working_path): 225 | real_path = os.path.join(working_path, path) 226 | return os.path.normpath(real_path) 227 | 228 | def load_module(self, full_path, rule_namespace): 229 | return self.modules.get(os.path.normpath(full_path), None) 230 | 231 | 232 | class FileModuleResolver(ModuleResolver): 233 | """A file-system backed module resolver. 234 | 235 | Rules are searched for with relative paths from a defined root path. 236 | If the module path given is a directory, the resolver will attempt to load 237 | a BUILD file from that directory - otherwise the file specified will be 238 | treated as the module. 239 | """ 240 | 241 | def __init__(self, root_path, *args, **kwargs): 242 | """Initializes a file-system module resolver. 243 | 244 | Args: 245 | root_path: Root filesystem path to treat as the base for all resolutions. 246 | 247 | Raises: 248 | IOError: The given root path is not found or is not a directory. 249 | """ 250 | super(FileModuleResolver, self).__init__(*args, **kwargs) 251 | 252 | self.can_resolve_local = True 253 | 254 | self.root_path = os.path.normpath(root_path) 255 | if not os.path.isdir(self.root_path): 256 | raise IOError('Root path "%s" not found' % (self.root_path)) 257 | 258 | def resolve_module_path(self, path, working_path=None): 259 | # Compute the real path 260 | has_working_path = working_path and len(working_path) 261 | real_path = path 262 | if has_working_path: 263 | real_path = os.path.join(working_path, path) 264 | real_path = os.path.normpath(real_path) 265 | full_path = os.path.join(self.root_path, real_path) 266 | full_path = os.path.normpath(full_path) 267 | 268 | # Check to see if it exists and is a file 269 | # Special handling to find BUILD files under directories 270 | full_path = anvil.util.get_build_file_path(full_path) 271 | if not os.path.isfile(full_path): 272 | raise IOError('Path "%s" is not a file' % (full_path)) 273 | 274 | return os.path.normpath(full_path) 275 | 276 | def load_module(self, full_path, rule_namespace): 277 | module_loader = ModuleLoader(full_path, rule_namespace=rule_namespace) 278 | module_loader.load() 279 | return module_loader.execute() 280 | -------------------------------------------------------------------------------- /anvil/rule_test.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | # Copyright 2012 Google Inc. All Rights Reserved. 4 | 5 | """Tests for the rule module. 6 | """ 7 | 8 | __author__ = 'benvanik@google.com (Ben Vanik)' 9 | 10 | 11 | import os 12 | import unittest2 13 | 14 | from anvil.rule import * 15 | from anvil.test import FixtureTestCase 16 | 17 | 18 | class RuleTest(unittest2.TestCase): 19 | """Behavioral tests of the Rule type.""" 20 | 21 | def testRuleNames(self): 22 | with self.assertRaises(NameError): 23 | Rule(None) 24 | with self.assertRaises(NameError): 25 | Rule('') 26 | with self.assertRaises(NameError): 27 | Rule(' ') 28 | with self.assertRaises(NameError): 29 | Rule(' a') 30 | with self.assertRaises(NameError): 31 | Rule('a ') 32 | with self.assertRaises(NameError): 33 | Rule(' a ') 34 | with self.assertRaises(NameError): 35 | Rule('a\n') 36 | with self.assertRaises(NameError): 37 | Rule('a\t') 38 | with self.assertRaises(NameError): 39 | Rule('a b') 40 | with self.assertRaises(NameError): 41 | Rule(':a') 42 | rule = Rule('a') 43 | self.assertEqual(rule.name, 'a') 44 | self.assertEqual(rule.path, ':a') 45 | Rule('\u0CA_\u0CA') 46 | 47 | def testRuleSrcs(self): 48 | rule = Rule('r') 49 | self.assertEqual(len(rule.srcs), 0) 50 | 51 | srcs = ['a', 'b', ':c'] 52 | rule = Rule('r', srcs=srcs) 53 | self.assertEqual(len(rule.srcs), 3) 54 | self.assertIsNot(rule.srcs, srcs) 55 | srcs[0] = 'x' 56 | self.assertEqual(rule.srcs[0], 'a') 57 | 58 | srcs = 'a' 59 | rule = Rule('r', srcs=srcs) 60 | self.assertEqual(len(rule.srcs), 1) 61 | self.assertEqual(rule.srcs[0], 'a') 62 | 63 | rule = Rule('r', srcs=None) 64 | rule = Rule('r', srcs='') 65 | self.assertEqual(len(rule.srcs), 0) 66 | with self.assertRaises(TypeError): 67 | Rule('r', srcs={}) 68 | with self.assertRaises(TypeError): 69 | Rule('r', srcs=[None]) 70 | with self.assertRaises(TypeError): 71 | Rule('r', srcs=['']) 72 | with self.assertRaises(TypeError): 73 | Rule('r', srcs=[{}]) 74 | with self.assertRaises(NameError): 75 | Rule('r', srcs=' a') 76 | with self.assertRaises(NameError): 77 | Rule('r', srcs='a ') 78 | with self.assertRaises(NameError): 79 | Rule('r', srcs=' a ') 80 | 81 | def testRuleDeps(self): 82 | rule = Rule('r') 83 | self.assertEqual(len(rule.deps), 0) 84 | 85 | deps = [':a', ':b', ':c'] 86 | rule = Rule('r', deps=deps) 87 | self.assertEqual(len(rule.deps), 3) 88 | self.assertIsNot(rule.deps, deps) 89 | deps[0] = 'x' 90 | self.assertEqual(rule.deps[0], ':a') 91 | 92 | deps = ':a' 93 | rule = Rule('r', deps=deps) 94 | self.assertEqual(len(rule.deps), 1) 95 | self.assertEqual(rule.deps[0], ':a') 96 | 97 | rule = Rule('r', deps=None) 98 | rule = Rule('r', deps='') 99 | self.assertEqual(len(rule.deps), 0) 100 | with self.assertRaises(TypeError): 101 | Rule('r', deps={}) 102 | with self.assertRaises(TypeError): 103 | Rule('r', deps=[None]) 104 | with self.assertRaises(TypeError): 105 | Rule('r', deps=['']) 106 | with self.assertRaises(TypeError): 107 | Rule('r', deps={}) 108 | with self.assertRaises(NameError): 109 | Rule('r', deps=' a') 110 | with self.assertRaises(NameError): 111 | Rule('r', deps='a ') 112 | with self.assertRaises(NameError): 113 | Rule('r', deps=' a ') 114 | 115 | def testRuleDependentPaths(self): 116 | rule = Rule('r') 117 | self.assertEqual(rule.get_dependent_paths(), set([])) 118 | 119 | rule = Rule('r', srcs=[':a', 'a.txt']) 120 | self.assertEqual(rule.get_dependent_paths(), set([':a', 'a.txt'])) 121 | 122 | rule = Rule('r', deps=[':a', 'm:b']) 123 | self.assertEqual(rule.get_dependent_paths(), set([':a', 'm:b'])) 124 | 125 | rule = Rule('r', srcs=['a.txt'], deps=[':b']) 126 | self.assertEqual(rule.get_dependent_paths(), set(['a.txt', ':b'])) 127 | 128 | rule = Rule('r', srcs=[':b'], deps=[':b']) 129 | self.assertEqual(rule.get_dependent_paths(), set([':b'])) 130 | 131 | with self.assertRaises(NameError): 132 | Rule('r', deps=['a.txt']) 133 | 134 | class RuleWithAttrs(Rule): 135 | def __init__(self, name, extra_srcs=None, extra_deps=None, 136 | *args, **kwargs): 137 | super(RuleWithAttrs, self).__init__(name, *args, **kwargs) 138 | self.extra_srcs = extra_srcs[:] 139 | self._append_dependent_paths(self.extra_srcs) 140 | self.extra_deps = extra_deps[:] 141 | self._append_dependent_paths(self.extra_deps, require_semicolon=True) 142 | 143 | rule = RuleWithAttrs('r', srcs=['a.txt'], deps=[':b'], 144 | extra_srcs=['c.txt'], extra_deps=[':d']) 145 | self.assertEqual(rule.get_dependent_paths(), set([ 146 | 'a.txt', ':b', 'c.txt', ':d'])) 147 | 148 | def testRuleCacheKey(self): 149 | rule1 = Rule('r1') 150 | rule1_key = rule1.compute_cache_key() 151 | self.assertIsNotNone(rule1_key) 152 | self.assertGreater(len(rule1_key), 0) 153 | self.assertEqual(rule1_key, rule1.compute_cache_key()) 154 | rule1.srcs.append('a') 155 | self.assertNotEqual(rule1_key, rule1.compute_cache_key()) 156 | 157 | rule1 = Rule('r1') 158 | rule2 = Rule('r1') 159 | self.assertEqual(rule1.compute_cache_key(), rule2.compute_cache_key()) 160 | rule1 = Rule('r1') 161 | rule2 = Rule('r2') 162 | self.assertNotEqual(rule1.compute_cache_key(), rule2.compute_cache_key()) 163 | 164 | rule1 = Rule('r1', srcs='a') 165 | rule2 = Rule('r1', srcs='a') 166 | self.assertEqual(rule1.compute_cache_key(), rule2.compute_cache_key()) 167 | rule1 = Rule('r1', srcs='a') 168 | rule2 = Rule('r1', srcs='b') 169 | self.assertNotEqual(rule1.compute_cache_key(), rule2.compute_cache_key()) 170 | rule1 = Rule('r1', deps=':a') 171 | rule2 = Rule('r1', deps=':a') 172 | self.assertEqual(rule1.compute_cache_key(), rule2.compute_cache_key()) 173 | rule1 = Rule('r1', deps=':a') 174 | rule2 = Rule('r1', deps=':b') 175 | self.assertNotEqual(rule1.compute_cache_key(), rule2.compute_cache_key()) 176 | rule1 = Rule('r1', srcs='a', deps=':a') 177 | rule2 = Rule('r1', srcs='a', deps=':a') 178 | self.assertEqual(rule1.compute_cache_key(), rule2.compute_cache_key()) 179 | rule1 = Rule('r1', srcs='a', deps=':a') 180 | rule2 = Rule('r1', srcs='b', deps=':b') 181 | self.assertNotEqual(rule1.compute_cache_key(), rule2.compute_cache_key()) 182 | 183 | def testRuleFilter(self): 184 | rule = Rule('a') 185 | self.assertIsNone(rule.src_filter) 186 | rule = Rule('a', src_filter='') 187 | self.assertIsNone(rule.src_filter) 188 | rule = Rule('a', src_filter='*.js') 189 | self.assertEqual(rule.src_filter, '*.js') 190 | 191 | def testRuleExclusionFilter(self): 192 | rule = Rule('a') 193 | self.assertIsNone(rule.src_exclude_filter) 194 | rule = Rule('a', src_filter='') 195 | self.assertIsNone(rule.src_exclude_filter) 196 | rule = Rule('a', src_exclude_filter='*.js') 197 | self.assertEqual(rule.src_exclude_filter, '*.js') 198 | 199 | 200 | class RuleNamespaceTest(FixtureTestCase): 201 | """Behavioral tests of the Rule type.""" 202 | fixture = 'rules' 203 | 204 | def testManual(self): 205 | ns = RuleNamespace() 206 | self.assertEqual(len(ns.rule_types), 0) 207 | 208 | class MockRule1(Rule): 209 | pass 210 | ns.add_rule_type('mock_rule_1', MockRule1) 211 | self.assertEqual(len(ns.rule_types), 1) 212 | 213 | with self.assertRaises(KeyError): 214 | ns.add_rule_type('mock_rule_1', MockRule1) 215 | 216 | def testDiscovery(self): 217 | ns = RuleNamespace() 218 | ns.discover() 219 | self.assertTrue(ns.rule_types.has_key('file_set')) 220 | 221 | rule_path = self.root_path 222 | ns = RuleNamespace() 223 | ns.discover(rule_path) 224 | self.assertEqual(len(ns.rule_types), 3) 225 | self.assertFalse(ns.rule_types.has_key('file_set')) 226 | self.assertTrue(ns.rule_types.has_key('rule_a')) 227 | self.assertTrue(ns.rule_types.has_key('rule_b')) 228 | self.assertTrue(ns.rule_types.has_key('rule_c')) 229 | self.assertFalse(ns.rule_types.has_key('rule_x')) 230 | 231 | rule_path = os.path.join(self.root_path, 'dupe.py') 232 | ns = RuleNamespace() 233 | with self.assertRaises(KeyError): 234 | ns.discover(rule_path) 235 | self.assertEqual(len(ns.rule_types), 0) 236 | 237 | rule_path = os.path.join(self.root_path, 'more', 'more_rules.py') 238 | ns = RuleNamespace() 239 | ns.discover(rule_path) 240 | self.assertEqual(len(ns.rule_types), 1) 241 | self.assertTrue(ns.rule_types.has_key('rule_c')) 242 | 243 | 244 | if __name__ == '__main__': 245 | unittest2.main() 246 | -------------------------------------------------------------------------------- /anvil/rules/TODO: -------------------------------------------------------------------------------- 1 | Some ideas for rules, based on old code: 2 | 3 | # ============================================================================== 4 | # Common Tasks 5 | # ============================================================================== 6 | 7 | CopyFilesTask 8 | ExecutableTask 9 | - JavaExecutableTask 10 | - NodeExecutableTask 11 | - PythonExecutableTask 12 | 13 | # ============================================================================== 14 | # Core 15 | # ============================================================================== 16 | 17 | copy_files( 18 | name='a', 19 | srcs=['a/file.txt']) 20 | - results in out/a/file.txt 21 | 22 | copy_files( 23 | name='a', 24 | srcs=glob('**/*.txt')) 25 | - results in out/things/a/file.txt + others 26 | 27 | concat_files( 28 | name='catted', 29 | srcs=['a.txt'] + glob('**/*.txt')) 30 | - results in out/catted 31 | 32 | concat_files( 33 | name='catted', 34 | srcs=['a.txt'] + glob('**/*.txt'), 35 | out='catted.txt') 36 | - results in out/catted.txt 37 | 38 | template_files( 39 | name='templated_txt', 40 | srcs=glob('**/*.txt'), 41 | params={ 42 | 'author': 'bob', 43 | 'year': '2012', 44 | }) 45 | - results in out/...txt with ${author} and ${year} replaced 46 | 47 | # ============================================================================== 48 | # Audio 49 | # ============================================================================== 50 | 51 | compile_soundbank( 52 | name='bank1', 53 | srcs=['*.wav'], 54 | out='assets/audio/') 55 | - creates out/assets/audio/bank1.wav + bank1.json 56 | 57 | SOUNDBANK_FORMATS = select_any({ 58 | 'RELEASE': ['audio/wav', 'audio/mpeg', 'audio/ogg', 'audio/mp4',], 59 | }, ['audio/wav',]) 60 | transcode_audio( 61 | name='encoded_banks', 62 | srcs=[':bank1', ':bank2'], 63 | formats=SOUNDBANK_FORMATS) 64 | - encodes all input audio files to the specified formats, updating the json 65 | with any new data sources - in this case, it files bank1.json and bank2.json, 66 | transcodes all sources for them, and updates their respective json files - 67 | the output files are all inputs + the transcoded files 68 | 69 | generate_soundbank_js( 70 | name='bank_js', 71 | srcs=':encoded_banks', 72 | namespace='foo.audio', 73 | gen='foo/audio/') 74 | - for each json file generates a js file from the json metadata, resulting in 75 | gen/foo/audio/bank1.js (class foo.audio.bank1) + bank2.js 76 | 77 | compile_tracklist( 78 | name='music', 79 | srcs=['*.ogg'],) 80 | - creates out/assets/audio/music.ogg (copy) + music.json 81 | 82 | TRACKLIST_FORMATS=select_any({ 83 | 'RELEASE': ['audio/mpeg', 'audio/ogg', 'audio/mp4',], 84 | }, ['audio/ogg',]) 85 | transcode_audio( 86 | name='encoded_music', 87 | srcs=':music', 88 | formats=TRACKLIST_FORMATS) 89 | generate_tracklist_js( 90 | name='music_js', 91 | srcs=':encoded_music', 92 | namespace='foo.audio', 93 | gen='foo/audio/') 94 | - for each json file generates a js file from the json metadata, resulting in 95 | gen/foo/audio/music.js (class foo.audio.music) 96 | 97 | 98 | # ============================================================================== 99 | # GLSL 100 | # ============================================================================== 101 | 102 | compile_glsl( 103 | name='compiled_glsl', 104 | srcs=glob('assets/glsl/**/*.glsl*')) 105 | - compiles all .glsl files into .json files, such as assets/glsl/a.glsl -> 106 | out/assets/glsl/a.json - any glsllib files are ignored, but may be used by 107 | the compiler 108 | outputs are only the json files 109 | 110 | generate_glsl_js( 111 | name='glsl_js', 112 | srcs=':compiled_glsl', 113 | namespace='foo.glsl', 114 | gen='foo/glsl/') 115 | - for each json file generates a js file from the json metadata, resulting in 116 | gen/foo/glsl/a.js (class foo.glsl.a) 117 | 118 | 119 | # ============================================================================== 120 | # CSS 121 | # ============================================================================== 122 | 123 | compile_gss( 124 | name='page_gss', 125 | srcs=glob('assets/css/**/*.gss'), 126 | out='css/page_gss.css', 127 | gen='css/page_gss.js') 128 | - compiles all gss into out/css/page.css, and drops the map file to 129 | gen/css/page.js 130 | 131 | 132 | # ============================================================================== 133 | # Closure JS 134 | # ============================================================================== 135 | 136 | JS_NAMESPACES=['myns1', 'myns2'] 137 | 138 | fix_closure_js( 139 | name='fix_js', 140 | srcs=glob('src/**/*.js'), 141 | namespaces=JS_NAMESPACES) 142 | - runs fixjsstyle on all sources (with the same args as lint_closure_js) and 143 | returns all srcs as outputs 144 | 145 | lint_closure_js( 146 | name='lint_js', 147 | srcs=':fix_js', 148 | namespaces=JS_NAMESPACES) 149 | - runs gjslist over all of the source files with the following args: 150 | --multiprocess 151 | --strict 152 | --jslint_error=all 153 | --closurized_namespaces=goog,gf, + namespaces 154 | and returns all srcs as outputs 155 | 156 | file_set( 157 | name='all_js', 158 | src_filter='*.js', 159 | srcs=[':fix_js', ':audio_rules', ':page_gss',]) 160 | generate_closure_deps_js( 161 | name='deps_js', 162 | srcs=[':all_js'], 163 | gen='my_deps.js') 164 | - runs genjsdeps on all sources and generate the gen/my_deps.js file 165 | note that this pulls in all generated JS code by sourcing from all rules 166 | 167 | file_set( 168 | name='uncompiled', 169 | deps=[':deps_js']) 170 | - a synthetic rule to allow for easy 'uncompiled' building 171 | 172 | SHARED_JS_FLAGS=['--define=foo=false'] 173 | compile_closure_js( 174 | name='compiled_js', 175 | srcs=[':all_js', ':deps_js',], 176 | out='js/compiled.js', 177 | root_namespace='myns1.start', 178 | compiler_flags=SHARED_JS_FLAGS + select_many({ 179 | 'RELEASE': ['--define=gf.BUILD_CLIENT=false', 180 | '--define=goog.DEBUG=false', 181 | '--define=goog.asserts.ENABLE_ASSERTS=false',], 182 | }) 183 | - creates a out/js/compiled.js file based on all sources 184 | could add source_map='foo.map' to enable source mapping output 185 | wrap_with_global='s' to do (function(){...})(s) 186 | 187 | 188 | # ============================================================================== 189 | # Future... 190 | # ============================================================================== 191 | 192 | * wget/curl-esque rules w/ caching (grab text/json/manifest from somewhere) 193 | * SASS/LESS/etc 194 | * uglifyjs/etc 195 | * jslint 196 | * html/json/etc linting 197 | * localization utils (common format translations) 198 | * soy compiler 199 | * images/texture compression 200 | * spriting 201 | * more advanced templating with mako 202 | * git info (get current commit hash/etc) - embedded version #s 203 | -------------------------------------------------------------------------------- /anvil/rules/__init__.py: -------------------------------------------------------------------------------- 1 | # Copyright 2012 Google Inc. All Rights Reserved. 2 | 3 | """ 4 | """ 5 | 6 | __author__ = 'benvanik@google.com (Ben Vanik)' 7 | -------------------------------------------------------------------------------- /anvil/rules/archive_rules.py: -------------------------------------------------------------------------------- 1 | # Copyright 2012 Google Inc. All Rights Reserved. 2 | 3 | """Rules for archiving files. 4 | """ 5 | 6 | __author__ = 'benvanik@google.com (Ben Vanik)' 7 | 8 | 9 | import io 10 | import os 11 | import shutil 12 | import string 13 | import zipfile 14 | 15 | import anvil.util 16 | from anvil.context import RuleContext 17 | from anvil.rule import Rule, build_rule 18 | from anvil.task import Task 19 | 20 | 21 | @build_rule('archive_files') 22 | class ArchiveFilesRule(Rule): 23 | """Zip up files into an archive. 24 | Archives all files into a zip file. build- paths are flattened. 25 | 26 | Inputs: 27 | srcs: Source file paths. 28 | out: Optional output name. If none is provided than the rule name will be 29 | used. 30 | flatten_paths: A list of paths to flatten into the root. For example, 31 | pass ['a/'] to flatten 'a/b/c.txt' to 'b/c.txt' 32 | 33 | Outputs: 34 | All of the srcs archived into a single zip file. If no out is specified 35 | a file with the name of the rule will be created. 36 | """ 37 | 38 | def __init__(self, name, out=None, flatten_paths=None, *args, **kwargs): 39 | """Initializes an archive files rule. 40 | 41 | Args: 42 | name: Rule name. 43 | out: Optional output name. 44 | """ 45 | super(ArchiveFilesRule, self).__init__(name, *args, **kwargs) 46 | self.out = out 47 | self.flatten_paths = flatten_paths or [] 48 | self.flatten_paths = [path.replace('/', os.path.sep) 49 | for path in self.flatten_paths] 50 | 51 | class _Context(RuleContext): 52 | def begin(self): 53 | super(ArchiveFilesRule._Context, self).begin() 54 | 55 | output_path = self._get_out_path(name=self.rule.out, suffix='.zip') 56 | self._ensure_output_exists(os.path.dirname(output_path)) 57 | self._append_output_paths([output_path]) 58 | 59 | # Skip if cache hit 60 | if self._check_if_cached(): 61 | self._succeed() 62 | return 63 | 64 | # Compute the relative archive path for each file 65 | paths = [] 66 | for src_path in self.src_paths: 67 | rel_path = os.path.relpath(src_path, self.build_env.root_path) 68 | rel_path = anvil.util.strip_build_paths(rel_path) 69 | for prefix in self.rule.flatten_paths: 70 | rel_path = rel_path.replace(prefix, '') 71 | paths.append((src_path, rel_path)) 72 | 73 | # Async issue archive task 74 | d = self._run_task_async(_ArchiveFilesTask( 75 | self.build_env, paths, output_path)) 76 | self._chain(d) 77 | 78 | 79 | class _ArchiveFilesTask(Task): 80 | def __init__(self, build_env, paths, output_path, *args, **kwargs): 81 | super(_ArchiveFilesTask, self).__init__(build_env, *args, **kwargs) 82 | self.paths = paths 83 | self.output_path = output_path 84 | 85 | def execute(self): 86 | f = zipfile.ZipFile(self.output_path, 'w', zipfile.ZIP_DEFLATED) 87 | try: 88 | for path in self.paths: 89 | (src_path, rel_path) = path 90 | f.write(src_path, rel_path) 91 | except: 92 | f.close() 93 | return True 94 | -------------------------------------------------------------------------------- /anvil/rules/closure_gss_rules.py: -------------------------------------------------------------------------------- 1 | # Copyright 2012 Google Inc. All Rights Reserved. 2 | 3 | """Closure compiler rules for the build system. 4 | 5 | Contains the following rules: 6 | closure_gss_library 7 | """ 8 | 9 | __author__ = 'benvanik@google.com (Ben Vanik)' 10 | 11 | 12 | import os 13 | 14 | from anvil.context import RuleContext 15 | from anvil.rule import Rule, build_rule 16 | from anvil.task import Task, JavaExecutableTask 17 | 18 | 19 | @build_rule('closure_gss_library') 20 | class ClosureGssLibraryRule(Rule): 21 | """A Closure Stylesheets transformed file. 22 | Uses the Closure Stylesheets compiler to cat/minify input GSS files into a 23 | single output CSS file. 24 | 25 | The input order of the GSS files matters. Dependent files must be provided 26 | before the files that depend on them. 27 | 28 | Inputs: 29 | srcs: All source GSS files, in order. 30 | mode: Minification mode, one of ['MINIFIED', 'DEBUG_COMPILED', 'COMPILED']. 31 | compiler_jar: Path to a compiler .jar file. 32 | pretty_print: True to pretty print the output. 33 | defines: A list of defines for conditional operators. 34 | compiler_flags: A list of string compiler flags. 35 | out: Optional output name. If none is provided than the rule name will be 36 | used. 37 | 38 | Outputs: 39 | A single compiled CSS file. If no out is specified a file with the name of 40 | the rule will be created. If enabled, a naming map JS file will also be 41 | emitted. 42 | """ 43 | 44 | def __init__(self, name, mode, compiler_jar, 45 | pretty_print=False, defines=None, compiler_flags=None, out=None, 46 | *args, **kwargs): 47 | """Initializes a Closure GSS rule. 48 | 49 | Args: 50 | srcs: All source GSS files, in order. 51 | mode: Minification mode, one of ['MINIFIED', 'DEBUG_COMPILED', 52 | 'COMPILED']. 53 | compiler_jar: Path to a compiler .jar file. 54 | pretty_print: True to pretty print the output. 55 | defines: A list of defines for conditional operators. 56 | compiler_flags: A list of string compiler flags. 57 | out: Optional output name. If none is provided than the rule name will be 58 | used. 59 | """ 60 | super(ClosureGssLibraryRule, self).__init__(name, *args, **kwargs) 61 | self.mode = mode 62 | self.compiler_jar = compiler_jar 63 | self._append_dependent_paths([self.compiler_jar]) 64 | self.pretty_print = pretty_print 65 | 66 | self.defines = [] 67 | if defines: 68 | self.defines.extend(defines) 69 | 70 | self.compiler_flags = [] 71 | if compiler_flags: 72 | self.compiler_flags.extend(compiler_flags) 73 | 74 | self.out = out 75 | 76 | class _Context(RuleContext): 77 | def begin(self): 78 | super(ClosureGssLibraryRule._Context, self).begin() 79 | 80 | args = [] 81 | args.extend(self.rule.compiler_flags) 82 | 83 | args.extend(['--allowed-non-standard-function', 'color-stop']) 84 | 85 | needs_map_file = False 86 | if self.rule.mode == 'MINIFIED': 87 | args.extend(['--rename', 'NONE']) 88 | elif self.rule.mode == 'DEBUG_COMPILED': 89 | needs_map_file = True 90 | args.extend(['--rename', 'DEBUG']) 91 | args.extend(['--output-renaming-map-format', 'CLOSURE_COMPILED_SPLIT_HYPHENS']) 92 | elif self.rule.mode == 'COMPILED': 93 | needs_map_file = True 94 | args.extend(['--rename', 'CLOSURE']) 95 | args.extend(['--output-renaming-map-format', 'CLOSURE_COMPILED_SPLIT_HYPHENS']) 96 | 97 | if needs_map_file: 98 | map_path = self._get_gen_path(name=self.rule.out, suffix='.js') 99 | self._ensure_output_exists(os.path.dirname(map_path)) 100 | self._append_output_paths([map_path]) 101 | args.extend(['--output-renaming-map', map_path]) 102 | 103 | if self.rule.pretty_print: 104 | args.append('--pretty-print') 105 | 106 | output_path = self._get_out_path(name=self.rule.out, suffix='.css') 107 | self._ensure_output_exists(os.path.dirname(output_path)) 108 | self._append_output_paths([output_path]) 109 | args.extend(['--output-file', output_path]) 110 | 111 | args.extend(self.src_paths) 112 | 113 | # Skip if cache hit 114 | if self._check_if_cached(): 115 | self._succeed() 116 | return 117 | 118 | jar_path = self._resolve_input_files([self.rule.compiler_jar])[0] 119 | d = self._run_task_async(JavaExecutableTask( 120 | self.build_env, jar_path, args)) 121 | # TODO(benvanik): pull out (stdout, stderr) from result and the exception 122 | # to get better error logging 123 | self._chain(d) 124 | -------------------------------------------------------------------------------- /anvil/rules/closure_soy_rules.py: -------------------------------------------------------------------------------- 1 | # Copyright 2012 Google Inc. All Rights Reserved. 2 | 3 | """Closure template rules for the build system. 4 | 5 | Contains the following rules: 6 | closure_soy_library 7 | """ 8 | 9 | __author__ = 'benvanik@google.com (Ben Vanik)' 10 | 11 | 12 | import os 13 | 14 | from anvil.context import RuleContext 15 | from anvil.rule import Rule, build_rule 16 | from anvil.task import Task, JavaExecutableTask 17 | 18 | 19 | @build_rule('closure_soy_library') 20 | class ClosureSoyLibraryRule(Rule): 21 | """A Closure Templates transformed file. 22 | Uses the Closure Templates compiler to translate input soy templates into 23 | JS files. Each input .soy file results in a single output .js file. 24 | 25 | Inputs: 26 | srcs: All source soy files. 27 | compiler_jar: Path to a compiler .jar file. 28 | compiler_flags: A list of string compiler flags. 29 | 30 | Outputs: 31 | One .js file for each input .soy file. 32 | """ 33 | 34 | def __init__(self, name, compiler_jar, compiler_flags=None, 35 | *args, **kwargs): 36 | """Initializes a Closure templating rule. 37 | 38 | Args: 39 | srcs: All source soy files. 40 | compiler_jar: Path to a compiler .jar file. 41 | compiler_flags: A list of string compiler flags. 42 | """ 43 | super(ClosureSoyLibraryRule, self).__init__(name, *args, **kwargs) 44 | self.compiler_jar = compiler_jar 45 | self._append_dependent_paths([self.compiler_jar]) 46 | 47 | self.compiler_flags = [] 48 | if compiler_flags: 49 | self.compiler_flags.extend(compiler_flags) 50 | 51 | class _Context(RuleContext): 52 | def begin(self): 53 | super(ClosureSoyLibraryRule._Context, self).begin() 54 | 55 | # If there are no source paths, die 56 | if not len(self.src_paths): 57 | self._succeed() 58 | return 59 | 60 | args = [ 61 | '--shouldProvideRequireSoyNamespaces', 62 | '--shouldGenerateJsdoc', 63 | '--shouldGenerateGoogMsgDefs', 64 | '--bidiGlobalDir', '1', 65 | '--codeStyle', 'stringbuilder', 66 | '--cssHandlingScheme', 'goog', 67 | '--outputPathFormat', os.path.join( 68 | os.path.dirname(self._get_gen_path()), 69 | '{INPUT_DIRECTORY}/{INPUT_FILE_NAME_NO_EXT}-soy.js'), 70 | ] 71 | args.extend(self.rule.compiler_flags) 72 | 73 | for src_path in self.src_paths: 74 | output_path = os.path.splitext(self._get_gen_path_for_src(src_path))[0] 75 | output_path += '-soy.js' 76 | self._ensure_output_exists(os.path.dirname(output_path)) 77 | self._append_output_paths([output_path]) 78 | rel_path = os.path.relpath(src_path, self.build_env.root_path) 79 | args.append(rel_path) 80 | 81 | # Skip if cache hit 82 | if self._check_if_cached(): 83 | self._succeed() 84 | return 85 | 86 | jar_path = self._resolve_input_files([self.rule.compiler_jar])[0] 87 | d = self._run_task_async(JavaExecutableTask( 88 | self.build_env, jar_path, args)) 89 | # TODO(benvanik): pull out (stdout, stderr) from result and the exception 90 | # to get better error logging 91 | self._chain(d) 92 | -------------------------------------------------------------------------------- /anvil/rules/core_rules_test.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | # Copyright 2012 Google Inc. All Rights Reserved. 4 | 5 | """Tests for the core_rules module. 6 | """ 7 | 8 | __author__ = 'benvanik@google.com (Ben Vanik)' 9 | 10 | 11 | import os 12 | import unittest2 13 | 14 | from anvil.context import BuildContext, BuildEnvironment, Status 15 | from anvil.project import FileModuleResolver, Project 16 | from anvil.test import FixtureTestCase, RuleTestCase 17 | from core_rules import * 18 | 19 | 20 | class FileSetRuleTest(RuleTestCase): 21 | """Behavioral tests of the FileSetRule type.""" 22 | fixture='core_rules/file_set' 23 | 24 | def setUp(self): 25 | super(FileSetRuleTest, self).setUp() 26 | self.build_env = BuildEnvironment(root_path=self.root_path) 27 | 28 | def test(self): 29 | project = Project(module_resolver=FileModuleResolver(self.root_path)) 30 | 31 | with BuildContext(self.build_env, project) as ctx: 32 | self.assertTrue(ctx.execute_sync([ 33 | ':a', 34 | ':a_glob', 35 | ':b_ref', 36 | ':all_glob', 37 | ':combo', 38 | ':dupes', 39 | 'dir:b', 40 | 'dir:b_glob', 41 | ])) 42 | 43 | self.assertRuleResultsEqual(ctx, 44 | ':a', ['a.txt',]) 45 | self.assertRuleResultsEqual(ctx, 46 | ':a_glob', ['a.txt',]) 47 | self.assertRuleResultsEqual(ctx, 48 | ':b_ref', ['dir/b.txt',]) 49 | self.assertRuleResultsEqual(ctx, 50 | ':all_glob', ['a.txt', 'dir/b.txt',]) 51 | self.assertRuleResultsEqual(ctx, 52 | ':combo', ['a.txt', 'dir/b.txt',]) 53 | self.assertRuleResultsEqual(ctx, 54 | ':dupes', ['a.txt', 'dir/b.txt',]) 55 | self.assertRuleResultsEqual(ctx, 56 | 'dir:b', ['dir/b.txt',]) 57 | self.assertRuleResultsEqual(ctx, 58 | 'dir:b_glob', ['dir/b.txt',]) 59 | 60 | 61 | class CopyFilesRuleTest(RuleTestCase): 62 | """Behavioral tests of the CopyFilesRule type.""" 63 | fixture='core_rules/copy_files' 64 | 65 | def setUp(self): 66 | super(CopyFilesRuleTest, self).setUp() 67 | self.build_env = BuildEnvironment(root_path=self.root_path) 68 | 69 | def test(self): 70 | project = Project(module_resolver=FileModuleResolver(self.root_path)) 71 | 72 | with BuildContext(self.build_env, project) as ctx: 73 | self.assertTrue(ctx.execute_sync([ 74 | ':copy_all_txt', 75 | 'dir:copy_c', 76 | ])) 77 | 78 | self.assertRuleResultsEqual(ctx, 79 | ':copy_all_txt', ['a.txt', 80 | 'dir/b.txt'], 81 | output_prefix='build-out') 82 | self.assertFileContents( 83 | os.path.join(self.root_path, 'build-out/a.txt'), 84 | 'a\n') 85 | self.assertFileContents( 86 | os.path.join(self.root_path, 'build-out/dir/b.txt'), 87 | 'b\n') 88 | 89 | self.assertRuleResultsEqual(ctx, 90 | 'dir:copy_c', ['dir/c.not-txt',], 91 | output_prefix='build-out') 92 | self.assertFileContents( 93 | os.path.join(self.root_path, 'build-out/dir/c.not-txt'), 94 | 'c\n') 95 | 96 | 97 | class ConcatFilesRuleTest(RuleTestCase): 98 | """Behavioral tests of the ConcatFilesRule type.""" 99 | fixture='core_rules/concat_files' 100 | 101 | def setUp(self): 102 | super(ConcatFilesRuleTest, self).setUp() 103 | self.build_env = BuildEnvironment(root_path=self.root_path) 104 | 105 | def test(self): 106 | project = Project(module_resolver=FileModuleResolver(self.root_path)) 107 | 108 | with BuildContext(self.build_env, project) as ctx: 109 | self.assertTrue(ctx.execute_sync([ 110 | ':concat', 111 | ':concat_out', 112 | ':concat_template', 113 | ':templated', 114 | ])) 115 | 116 | self.assertRuleResultsEqual(ctx, 117 | ':concat', ['concat',], 118 | output_prefix='build-out') 119 | self.assertFileContents( 120 | os.path.join(self.root_path, 'build-out/concat'), 121 | '1\n2\n3\n4\n') 122 | 123 | self.assertRuleResultsEqual(ctx, 124 | ':concat_out', ['concat.txt',], 125 | output_prefix='build-out') 126 | self.assertFileContents( 127 | os.path.join(self.root_path, 'build-out/concat.txt'), 128 | '1\n2\n3\n4\n') 129 | 130 | self.assertRuleResultsEqual(ctx, 131 | ':concat_template', ['concat_template',], 132 | output_prefix='build-out') 133 | self.assertFileContents( 134 | os.path.join(self.root_path, 'build-out/concat_template'), 135 | '1\n2\n3\n4\nx${hello}x\n1\n2\n3\n4\n') 136 | self.assertRuleResultsEqual(ctx, 137 | ':templated', ['concat_template.out',], 138 | output_prefix='build-out') 139 | self.assertFileContents( 140 | os.path.join(self.root_path, 'build-out/concat_template.out'), 141 | '1\n2\n3\n4\nxworld!x\n1\n2\n3\n4\n') 142 | 143 | 144 | if __name__ == '__main__': 145 | unittest2.main() 146 | -------------------------------------------------------------------------------- /anvil/rules/less_rules.py: -------------------------------------------------------------------------------- 1 | # Copyright 2012 Google Inc. All Rights Reserved. 2 | 3 | """LESS stylesheets rules for the build system. 4 | 5 | Contains the following rules: 6 | less_css_library 7 | """ 8 | 9 | __author__ = 'benvanik@google.com (Ben Vanik)' 10 | 11 | 12 | import os 13 | 14 | from anvil.context import RuleContext 15 | from anvil.rule import Rule, build_rule 16 | from anvil.task import Task, NodeExecutableTask 17 | 18 | 19 | @build_rule('less_css_library') 20 | class LessCssLibraryRule(Rule): 21 | """A LESS transformed file. 22 | Uses the LESS compiler to process an input LESS file into a 23 | single output CSS file. 24 | 25 | Only the first source will be used as the root to less. The rest will be 26 | treated as dependencies. 27 | 28 | Inputs: 29 | srcs: The root LESS file.. 30 | include_paths: Paths to search for include files. 31 | compiler_flags: A list of string compiler flags. 32 | out: Optional output name. If none is provided than the rule name will be 33 | used. 34 | 35 | Outputs: 36 | A single compiled CSS file. If no out is specified a file with the name of 37 | the rule will be created. 38 | """ 39 | 40 | def __init__(self, name, include_paths=None, 41 | compiler_flags=None, out=None, *args, **kwargs): 42 | """Initializes a LESS CSS rule. 43 | 44 | Args: 45 | srcs: The root LESS file. 46 | include_paths: Paths to search for include files. 47 | compiler_flags: A list of string compiler flags. 48 | out: Optional output name. If none is provided than the rule name will be 49 | used. 50 | """ 51 | super(LessCssLibraryRule, self).__init__(name, *args, **kwargs) 52 | 53 | self.include_paths = [] 54 | if include_paths: 55 | self.include_paths.extend(include_paths) 56 | self._append_dependent_paths(self.include_paths) 57 | 58 | self.compiler_flags = [] 59 | if compiler_flags: 60 | self.compiler_flags.extend(compiler_flags) 61 | 62 | self.out = out 63 | 64 | class _Context(RuleContext): 65 | def begin(self): 66 | super(LessCssLibraryRule._Context, self).begin() 67 | 68 | args = [ 69 | ] 70 | args.extend(self.rule.compiler_flags) 71 | 72 | if len(self.rule.include_paths): 73 | args.append('--include-path=%s' % ( 74 | ':'.join(self.rule.include_paths))) 75 | 76 | output_path = self._get_out_path(name=self.rule.out, suffix='.css') 77 | self._ensure_output_exists(os.path.dirname(output_path)) 78 | self._append_output_paths([output_path]) 79 | 80 | args.append(self.src_paths[0]) 81 | args.append(output_path) 82 | 83 | # Skip if cache hit 84 | if self._check_if_cached(): 85 | self._succeed() 86 | return 87 | 88 | d = self._run_task_async(NodeExecutableTask( 89 | self.build_env, 'node_modules/less/bin/lessc', args)) 90 | # TODO(benvanik): pull out (stdout, stderr) from result and the exception 91 | # to get better error logging 92 | self._chain(d) 93 | -------------------------------------------------------------------------------- /anvil/rules/overlay_rules.py: -------------------------------------------------------------------------------- 1 | # Copyright 2012 Google Inc. All Rights Reserved. 2 | 3 | """Merged path view rules for the build system. 4 | """ 5 | 6 | __author__ = 'benvanik@google.com (Ben Vanik)' 7 | 8 | 9 | import os 10 | 11 | from anvil.context import RuleContext 12 | from anvil.rule import Rule, build_rule 13 | from anvil.task import Task 14 | import anvil.util 15 | 16 | 17 | @build_rule('overlay_view') 18 | class OverlayViewRule(Rule): 19 | """Constructs or updates an view over merged paths. 20 | This uses system symlinks to build a path that contains access to all source 21 | paths as if they existed side-by-side. This only needs to be re-run when 22 | file structure changes, and allows for access to files at their sources 23 | (adding edit-reloadability). 24 | 25 | Inputs: 26 | srcs: Source file paths. All of the files that will be available. 27 | out: Optional output name. If none is provided than the rule name will be 28 | used. 29 | flatten_paths: A list of paths to flatten into the root. For example, 30 | pass ['a/'] to flatten 'a/b/c.txt' to 'b/c.txt' 31 | 32 | Outputs: 33 | Merged directory filled with symlinks. 34 | """ 35 | 36 | def __init__(self, name, out=None, flatten_paths=None, *args, **kwargs): 37 | """Initializes an overlay view rule. 38 | 39 | Args: 40 | name: Rule name. 41 | out: Optional output name. If none is provided than the rule name will be 42 | used. 43 | """ 44 | super(OverlayViewRule, self).__init__(name, *args, **kwargs) 45 | self.out = out 46 | self.flatten_paths = flatten_paths or [] 47 | self.flatten_paths = [path.replace('/', os.path.sep) 48 | for path in self.flatten_paths] 49 | 50 | class _Context(RuleContext): 51 | def begin(self): 52 | super(OverlayViewRule._Context, self).begin() 53 | 54 | # Could, if output exists, only modify added/removed symlinks 55 | # file_delta = self.file_delta 56 | # file_delta.added_files 57 | # file_delta.removed_files 58 | 59 | # Ensure output exists 60 | output_path = self._get_root_path(name=self.rule.out) 61 | self._ensure_output_exists(output_path) 62 | self._append_output_paths([output_path]) 63 | 64 | # Compute the relative path for each file 65 | paths = [] 66 | for src_path in self.src_paths: 67 | rel_path = os.path.relpath(src_path, self.build_env.root_path) 68 | rel_path = anvil.util.strip_build_paths(rel_path) 69 | for prefix in self.rule.flatten_paths: 70 | rel_path = rel_path.replace(prefix, '') 71 | paths.append((src_path, rel_path)) 72 | 73 | # Async issue linking task 74 | d = self._run_task_async(_SymlinkTask( 75 | self.build_env, paths, output_path)) 76 | self._chain(d) 77 | 78 | 79 | class _SymlinkTask(Task): 80 | def __init__(self, build_env, paths, output_path, *args, **kwargs): 81 | super(_SymlinkTask, self).__init__(build_env, *args, **kwargs) 82 | self.paths = paths 83 | self.output_path = output_path 84 | 85 | def execute(self): 86 | # Tracks all exists checks on link parent paths 87 | checked_dirs = {} 88 | 89 | for path in self.paths: 90 | (src_path, rel_path) = path 91 | link_path = os.path.join(self.output_path, rel_path) 92 | if not os.path.exists(link_path): 93 | # Ensure parent of link path exists 94 | link_parent = os.path.dirname(link_path) 95 | if not checked_dirs.get(link_parent, False): 96 | if not os.path.exists(link_parent): 97 | os.makedirs(link_parent) 98 | checked_dirs[link_parent] = True 99 | 100 | os.symlink(src_path, link_path) 101 | 102 | return True 103 | -------------------------------------------------------------------------------- /anvil/rules/preprocessor_rules.py: -------------------------------------------------------------------------------- 1 | # Copyright 2012 Google Inc. All Rights Reserved. 2 | 3 | """Simple preprocessor rules for the build system. 4 | """ 5 | 6 | __author__ = 'benvanik@google.com (Ben Vanik)' 7 | 8 | 9 | import io 10 | import os 11 | import re 12 | import shutil 13 | import string 14 | 15 | from anvil.context import RuleContext 16 | from anvil.rule import Rule, build_rule 17 | from anvil.task import Task, ExecutableTask 18 | 19 | 20 | @build_rule('template_files') 21 | class TemplateFilesRule(Rule): 22 | """Applies simple templating to a set of files. 23 | Processes each source file replacing a list of strings with corresponding 24 | strings. 25 | 26 | This uses the Python string templating functionality documented here: 27 | http://docs.python.org/library/string.html#template-strings 28 | 29 | Identifiers in the source template should be of the form "${identifier}", each 30 | of which maps to a key in the params dictionary. 31 | 32 | In order to prevent conflicts, it is strongly encouraged that a new_extension 33 | value is provided. If a source file has an extension it will be replaced with 34 | the specified one, and files without extensions will have it added. 35 | 36 | TODO(benvanik): more advanced template vars? perhaps regex? 37 | 38 | Inputs: 39 | srcs: Source file paths. 40 | new_extension: The extension to replace (or add) to all output files, with a 41 | leading dot ('.txt'). 42 | params: A dictionary of key-value replacement parameters. 43 | 44 | Outputs: 45 | One file for each source file with the templating rules applied. 46 | """ 47 | 48 | def __init__(self, name, new_extension=None, params=None, *args, **kwargs): 49 | """Initializes a file templating rule. 50 | 51 | Args: 52 | name: Rule name. 53 | new_extension: Replacement extension ('.txt'). 54 | params: A dictionary of key-value replacement parameters. 55 | """ 56 | super(TemplateFilesRule, self).__init__(name, *args, **kwargs) 57 | self.new_extension = new_extension 58 | self.params = params 59 | 60 | class _Context(RuleContext): 61 | def begin(self): 62 | super(TemplateFilesRule._Context, self).begin() 63 | 64 | # Get all source -> output paths (and ensure directories exist) 65 | file_pairs = [] 66 | for src_path in self.src_paths: 67 | out_path = self._get_out_path_for_src(src_path) 68 | if self.rule.new_extension: 69 | out_path = os.path.splitext(out_path)[0] + self.rule.new_extension 70 | self._ensure_output_exists(os.path.dirname(out_path)) 71 | self._append_output_paths([out_path]) 72 | file_pairs.append((src_path, out_path)) 73 | 74 | # Skip if cache hit 75 | if self._check_if_cached(): 76 | self._succeed() 77 | return 78 | 79 | # Async issue templating task 80 | d = self._run_task_async(_TemplateFilesTask( 81 | self.build_env, file_pairs, self.rule.params)) 82 | self._chain(d) 83 | 84 | 85 | class _TemplateFilesTask(Task): 86 | def __init__(self, build_env, file_pairs, params, *args, **kwargs): 87 | super(_TemplateFilesTask, self).__init__(build_env, *args, **kwargs) 88 | self.file_pairs = file_pairs 89 | self.params = params 90 | 91 | def execute(self): 92 | for file_pair in self.file_pairs: 93 | with io.open(file_pair[0], 'rt') as f: 94 | template_str = f.read() 95 | template = string.Template(template_str) 96 | result_str = template.substitute(self.params) 97 | with io.open(file_pair[1], 'wt') as f: 98 | f.write(result_str) 99 | return True 100 | 101 | 102 | @build_rule('strip_comments') 103 | class StripCommentsRule(Rule): 104 | """Applies simple comment stripping to a set of files. 105 | Processes each source file removing C/C++-style comments. 106 | 107 | Note that this is incredibly hacky and may break in all sorts of cases. 108 | 109 | In order to prevent conflicts, it is strongly encouraged that a new_extension 110 | value is provided. If a source file has an extension it will be replaced with 111 | the specified one, and files without extensions will have it added. 112 | 113 | Inputs: 114 | srcs: Source file paths. 115 | new_extension: The extension to replace (or add) to all output files, with a 116 | leading dot ('.txt'). 117 | 118 | Outputs: 119 | One file for each source file with the comments removed. 120 | """ 121 | 122 | def __init__(self, name, new_extension=None, *args, **kwargs): 123 | """Initializes a comment stripping rule. 124 | 125 | Args: 126 | name: Rule name. 127 | new_extension: Replacement extension ('.txt'). 128 | """ 129 | super(StripCommentsRule, self).__init__(name, *args, **kwargs) 130 | self.new_extension = new_extension 131 | 132 | class _Context(RuleContext): 133 | def begin(self): 134 | super(StripCommentsRule._Context, self).begin() 135 | 136 | # Get all source -> output paths (and ensure directories exist) 137 | file_pairs = [] 138 | for src_path in self.src_paths: 139 | out_path = self._get_out_path_for_src(src_path) 140 | if self.rule.new_extension: 141 | out_path = os.path.splitext(out_path)[0] + self.rule.new_extension 142 | self._ensure_output_exists(os.path.dirname(out_path)) 143 | self._append_output_paths([out_path]) 144 | file_pairs.append((src_path, out_path)) 145 | 146 | # Skip if cache hit 147 | if self._check_if_cached(): 148 | self._succeed() 149 | return 150 | 151 | # Async issue stripping task 152 | d = self._run_task_async(_StripCommentsRuleTask( 153 | self.build_env, file_pairs)) 154 | self._chain(d) 155 | 156 | 157 | class _StripCommentsRuleTask(Task): 158 | def __init__(self, build_env, file_pairs, *args, **kwargs): 159 | super(_StripCommentsRuleTask, self).__init__(build_env, *args, **kwargs) 160 | self.file_pairs = file_pairs 161 | 162 | def execute(self): 163 | for file_pair in self.file_pairs: 164 | with io.open(file_pair[0], 'rt') as f: 165 | raw_str = f.read() 166 | 167 | # Code from Markus Jarderot, posted to stackoverflow 168 | def replacer(match): 169 | s = match.group(0) 170 | if s.startswith('/'): 171 | return "" 172 | else: 173 | return s 174 | pattern = re.compile( 175 | r'//.*?$|/\*.*?\*/|\'(?:\\.|[^\\\'])*\'|"(?:\\.|[^\\"])*"', 176 | re.DOTALL | re.MULTILINE) 177 | result_str = re.sub(pattern, replacer, raw_str) 178 | 179 | with io.open(file_pair[1], 'wt') as f: 180 | f.write(result_str) 181 | 182 | return True 183 | 184 | 185 | @build_rule('preprocess') 186 | class PreprocessRule(Rule): 187 | """Applies simple C-style preprocessing to a set of files. 188 | Processes each source file handling the built-in preprocessor rules. 189 | 190 | Note that this is incredibly hacky and may break in all sorts of cases. 191 | 192 | In order to prevent conflicts, it is strongly encouraged that a new_extension 193 | value is provided. If a source file has an extension it will be replaced with 194 | the specified one, and files without extensions will have it added. 195 | 196 | Inputs: 197 | srcs: Source file paths. 198 | new_extension: The extension to replace (or add) to all output files, with a 199 | leading dot ('.txt'). 200 | defines: A list of values to be defined by default. 201 | Example - 'DEBUG'. 202 | 203 | Outputs: 204 | One file for each source file after preprocessing. 205 | """ 206 | 207 | def __init__(self, name, new_extension=None, defines=None, *args, **kwargs): 208 | """Initializes a preprocessing rule. 209 | 210 | Args: 211 | name: Rule name. 212 | new_extension: Replacement extension ('.txt'). 213 | defines: A list of defines. 214 | """ 215 | super(PreprocessRule, self).__init__(name, *args, **kwargs) 216 | self.new_extension = new_extension 217 | self.defines = defines[:] if defines else [] 218 | 219 | class _Context(RuleContext): 220 | def begin(self): 221 | super(PreprocessRule._Context, self).begin() 222 | 223 | # Get all source -> output paths (and ensure directories exist) 224 | file_pairs = [] 225 | for src_path in self.src_paths: 226 | out_path = self._get_out_path_for_src(src_path) 227 | if self.rule.new_extension: 228 | out_path = os.path.splitext(out_path)[0] + self.rule.new_extension 229 | self._ensure_output_exists(os.path.dirname(out_path)) 230 | self._append_output_paths([out_path]) 231 | file_pairs.append((src_path, out_path)) 232 | 233 | # Skip if cache hit 234 | if self._check_if_cached(): 235 | self._succeed() 236 | return 237 | 238 | # Async issue stripping task 239 | d = self._run_task_async(_PreprocessFilesTask( 240 | self.build_env, file_pairs, self.rule.defines)) 241 | self._chain(d) 242 | 243 | 244 | class _PreprocessFilesTask(Task): 245 | def __init__(self, build_env, file_pairs, defines, *args, **kwargs): 246 | super(_PreprocessFilesTask, self).__init__(build_env, *args, **kwargs) 247 | self.file_pairs = file_pairs 248 | self.defines = defines 249 | 250 | def execute(self): 251 | for file_pair in self.file_pairs: 252 | with io.open(file_pair[0], 'rt') as f: 253 | source_lines = f.readlines() 254 | 255 | result_str = self._preprocess(source_lines, self.defines) 256 | 257 | with io.open(file_pair[1], 'wt') as f: 258 | f.write(result_str) 259 | 260 | return True 261 | 262 | def _preprocess(self, source_lines, global_defines): 263 | # All defines in global + #defined in file 264 | file_defines = set(global_defines) 265 | 266 | # A stack of #ifdef scopes - for a given line to be included all must be 267 | # set to true 268 | inclusion_scopes = [True] 269 | 270 | target_lines = [] 271 | for line in source_lines: 272 | line_included = all(inclusion_scopes) 273 | 274 | if line[0] == '#': 275 | line_included = False 276 | if line.startswith('#ifdef '): 277 | value = line[7:].strip() 278 | inclusion_scopes.append(value in file_defines) 279 | elif line.startswith('#else'): 280 | inclusion_scopes[-1] = not inclusion_scopes[-1] 281 | elif line.startswith('#endif'): 282 | inclusion_scopes.pop() 283 | elif line.startswith('#define '): 284 | value = line[8:].strip() 285 | file_defines.add(value) 286 | elif line.startswith('#undef '): 287 | value = line[7:].strip() 288 | file_defines.remove(value) 289 | 290 | if line_included: 291 | target_lines.append(line) 292 | 293 | return '\n'.join(target_lines) 294 | -------------------------------------------------------------------------------- /anvil/rules/preprocessor_rules_test.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | # Copyright 2012 Google Inc. All Rights Reserved. 4 | 5 | """Tests for the preprocessor_rules module. 6 | """ 7 | 8 | __author__ = 'benvanik@google.com (Ben Vanik)' 9 | 10 | 11 | import os 12 | import unittest2 13 | 14 | from anvil.context import BuildContext, BuildEnvironment, Status 15 | from anvil.project import FileModuleResolver, Project 16 | from anvil.test import FixtureTestCase, RuleTestCase 17 | from preprocessor_rules import * 18 | 19 | 20 | class TemplateFilesRuleTest(RuleTestCase): 21 | """Behavioral tests of the TemplateFilesRule type.""" 22 | fixture='preprocessor_rules/template_files' 23 | 24 | def setUp(self): 25 | super(TemplateFilesRuleTest, self).setUp() 26 | self.build_env = BuildEnvironment(root_path=self.root_path) 27 | 28 | def test(self): 29 | project = Project(module_resolver=FileModuleResolver(self.root_path)) 30 | 31 | with BuildContext(self.build_env, project) as ctx: 32 | self.assertTrue(ctx.execute_sync([ 33 | ':template_all', 34 | ':template_dep_2', 35 | ])) 36 | 37 | self.assertRuleResultsEqual(ctx, 38 | ':template_all', ['a.txt', 39 | 'dir/b.txt'], 40 | output_prefix='build-out') 41 | self.assertFileContents( 42 | os.path.join(self.root_path, 'build-out/a.txt'), 43 | '123world456\n') 44 | self.assertFileContents( 45 | os.path.join(self.root_path, 'build-out/dir/b.txt'), 46 | 'b123world456\n') 47 | 48 | self.assertRuleResultsEqual(ctx, 49 | ':template_dep_1', ['a.nfo', 50 | 'dir/b.nfo'], 51 | output_prefix='build-out') 52 | self.assertFileContents( 53 | os.path.join(self.root_path, 'build-out/a.nfo'), 54 | '123${arg2}456\n') 55 | self.assertFileContents( 56 | os.path.join(self.root_path, 'build-out/dir/b.nfo'), 57 | 'b123${arg2}456\n') 58 | 59 | self.assertRuleResultsEqual(ctx, 60 | ':template_dep_2', ['a.out', 61 | 'dir/b.out'], 62 | output_prefix='build-out') 63 | self.assertFileContents( 64 | os.path.join(self.root_path, 'build-out/a.out'), 65 | '123world!456\n') 66 | self.assertFileContents( 67 | os.path.join(self.root_path, 'build-out/dir/b.out'), 68 | 'b123world!456\n') 69 | 70 | 71 | if __name__ == '__main__': 72 | unittest2.main() 73 | -------------------------------------------------------------------------------- /anvil/task_test.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | # Copyright 2012 Google Inc. All Rights Reserved. 4 | 5 | """Tests for the task module. 6 | """ 7 | 8 | __author__ = 'benvanik@google.com (Ben Vanik)' 9 | 10 | 11 | import unittest2 12 | 13 | from anvil.context import BuildEnvironment 14 | from anvil.task import * 15 | from anvil.test import AsyncTestCase, FixtureTestCase 16 | 17 | 18 | # TODO(benvanik): test WriteFileTask 19 | # TODO(benvanik): test MakoTemplateTask 20 | 21 | 22 | class ExecutableTaskTest(FixtureTestCase): 23 | """Behavioral tests for ExecutableTask.""" 24 | fixture = 'simple' 25 | 26 | def setUp(self): 27 | super(ExecutableTaskTest, self).setUp() 28 | self.build_env = BuildEnvironment(root_path=self.root_path) 29 | 30 | @unittest2.skipUnless(sys.platform.startswith('win'), 'platform') 31 | def testExecutionWindows(self): 32 | task = ExecutableTask(self.build_env, 'cmd', [ 33 | '/Q', '/C', 34 | 'type', 35 | os.path.join(self.root_path, 'a.txt')]) 36 | self.assertEqual(task.execute(), 37 | ('hello!\n', '')) 38 | 39 | task = ExecutableTask(self.build_env, 'cmd', [ 40 | '/Q', '/C', 41 | 'type', 42 | os.path.join(self.root_path, 'x.txt')]) 43 | with self.assertRaises(ExecutableError): 44 | task.execute() 45 | 46 | @unittest2.skipIf(sys.platform.startswith('win'), 'platform') 47 | def testExecutionUnix(self): 48 | task = ExecutableTask(self.build_env, 'cat', [ 49 | os.path.join(self.root_path, 'a.txt')]) 50 | self.assertEqual(task.execute(), 51 | ('hello!\n', '')) 52 | 53 | task = ExecutableTask(self.build_env, 'cat', [ 54 | os.path.join(self.root_path, 'x.txt')]) 55 | with self.assertRaises(ExecutableError): 56 | task.execute() 57 | 58 | def testJava(self): 59 | version = JavaExecutableTask.detect_java_version() 60 | self.assertNotEqual(len(version), 0) 61 | self.assertIsNone( 62 | JavaExecutableTask.detect_java_version(java_executable='xxx')) 63 | 64 | # TODO(benvanik): test a JAR somehow 65 | task = JavaExecutableTask(self.build_env, 'some_jar') 66 | 67 | # def testNode(self): 68 | # version = NodeExecutableTask.detect_node_version() 69 | # self.assertNotEqual(len(version), 0) 70 | # self.assertIsNone( 71 | # NodeExecutableTask.detect_node_version(node_executable='xxx')) 72 | 73 | # # TODO(benvanik): test a nodejs app 74 | # task = NodeExecutableTask(self.build_env, 'some_js') 75 | 76 | # def testPython(self): 77 | # version = PythonExecutableTask.detect_python_version() 78 | # self.assertNotEqual(len(version), 0) 79 | # self.assertIsNone( 80 | # PythonExecutableTask.detect_python_version(python_executable='xxx')) 81 | 82 | # # TODO(benvanik): test a python script 83 | # task = PythonExecutableTask(self.build_env, 'some_py') 84 | 85 | 86 | class SuccessTask(Task): 87 | def __init__(self, build_env, success_result, *args, **kwargs): 88 | super(SuccessTask, self).__init__(build_env, *args, **kwargs) 89 | self.success_result = success_result 90 | def execute(self): 91 | return self.success_result 92 | 93 | class FailureTask(Task): 94 | def execute(self): 95 | raise TypeError('Failed!') 96 | 97 | 98 | class TaskExecutorTest(AsyncTestCase): 99 | """Behavioral tests of the TaskExecutor type.""" 100 | 101 | def runTestsWithExecutorType(self, executor_cls): 102 | build_env = BuildEnvironment() 103 | 104 | executor = executor_cls() 105 | executor.close() 106 | with self.assertRaises(RuntimeError): 107 | executor.run_task_async(SuccessTask(build_env, True)) 108 | with self.assertRaises(RuntimeError): 109 | executor.close() 110 | 111 | with executor_cls() as executor: 112 | d = executor.run_task_async(SuccessTask(build_env, True)) 113 | executor.wait(d) 114 | self.assertFalse(executor.has_any_running()) 115 | self.assertCallbackEqual(d, True) 116 | executor.close() 117 | self.assertFalse(executor.has_any_running()) 118 | 119 | with executor_cls() as executor: 120 | d = executor.run_task_async(FailureTask(build_env)) 121 | executor.wait(d) 122 | self.assertFalse(executor.has_any_running()) 123 | self.assertErrbackWithError(d, TypeError) 124 | 125 | d = executor.run_task_async(SuccessTask(build_env, True)) 126 | executor.wait(d) 127 | executor.wait(d) 128 | self.assertFalse(executor.has_any_running()) 129 | self.assertCallback(d) 130 | 131 | da = executor.run_task_async(SuccessTask(build_env, 'a')) 132 | executor.wait(da) 133 | self.assertFalse(executor.has_any_running()) 134 | self.assertCallbackEqual(da, 'a') 135 | db = executor.run_task_async(SuccessTask(build_env, 'b')) 136 | executor.wait(db) 137 | self.assertFalse(executor.has_any_running()) 138 | self.assertCallbackEqual(db, 'b') 139 | dc = executor.run_task_async(SuccessTask(build_env, 'c')) 140 | executor.wait(dc) 141 | self.assertFalse(executor.has_any_running()) 142 | self.assertCallbackEqual(dc, 'c') 143 | 144 | da = executor.run_task_async(SuccessTask(build_env, 'a')) 145 | db = executor.run_task_async(SuccessTask(build_env, 'b')) 146 | dc = executor.run_task_async(SuccessTask(build_env, 'c')) 147 | executor.wait([da, db, dc]) 148 | self.assertFalse(executor.has_any_running()) 149 | self.assertCallbackEqual(dc, 'c') 150 | self.assertCallbackEqual(db, 'b') 151 | self.assertCallbackEqual(da, 'a') 152 | 153 | da = executor.run_task_async(SuccessTask(build_env, 'a')) 154 | db = executor.run_task_async(FailureTask(build_env)) 155 | dc = executor.run_task_async(SuccessTask(build_env, 'c')) 156 | executor.wait(da) 157 | self.assertCallbackEqual(da, 'a') 158 | executor.wait(db) 159 | self.assertErrbackWithError(db, TypeError) 160 | executor.wait(dc) 161 | self.assertCallbackEqual(dc, 'c') 162 | self.assertFalse(executor.has_any_running()) 163 | 164 | # This test is not quite right - it's difficult to test for proper 165 | # early termination 166 | with executor_cls() as executor: 167 | executor.close(graceful=False) 168 | self.assertFalse(executor.has_any_running()) 169 | 170 | def testInProcess(self): 171 | self.runTestsWithExecutorType(InProcessTaskExecutor) 172 | 173 | def testMultiprocess(self): 174 | self.runTestsWithExecutorType(MultiProcessTaskExecutor) 175 | 176 | 177 | if __name__ == '__main__': 178 | unittest2.main() 179 | -------------------------------------------------------------------------------- /anvil/test.py: -------------------------------------------------------------------------------- 1 | # Copyright 2012 Google Inc. All Rights Reserved. 2 | 3 | """Base test case for tests that require static file fixtures. 4 | """ 5 | 6 | __author__ = 'benvanik@google.com (Ben Vanik)' 7 | 8 | 9 | import io 10 | import os 11 | import tempfile 12 | import shutil 13 | import sys 14 | import unittest2 15 | 16 | from anvil.context import Status 17 | from anvil import util 18 | 19 | 20 | def main(): 21 | """Entry point for running tests. 22 | """ 23 | # Collect tests 24 | tests = collector() 25 | 26 | # Run the tests in the default runner 27 | test_runner = unittest2.runner.TextTestRunner(verbosity=2) 28 | test_runner.run(tests) 29 | 30 | 31 | def collector(): 32 | """Collects test for the setuptools test_suite command. 33 | """ 34 | # Only find test_*.py files under anvil/ 35 | loader = unittest2.TestLoader() 36 | return loader.discover('anvil', 37 | pattern='*_test.py', 38 | top_level_dir='.') 39 | 40 | 41 | class AsyncTestCase(unittest2.TestCase): 42 | """Test case adding additional asserts for async results.""" 43 | 44 | def assertCallback(self, deferred): 45 | self.assertTrue(deferred.is_done()) 46 | done = [] 47 | def _callback(*args, **kwargs): 48 | done.append(True) 49 | def _errback(*args, **kwargs): 50 | self.fail('Deferred failed when it should have succeeded') 51 | deferred.add_errback_fn(_errback) 52 | deferred.add_callback_fn(_callback) 53 | if not len(done): 54 | self.fail('Deferred not called back with success') 55 | 56 | def assertCallbackEqual(self, deferred, value): 57 | self.assertTrue(deferred.is_done()) 58 | done = [] 59 | def _callback(*args, **kwargs): 60 | self.assertEqual(args[0], value) 61 | done.append(True) 62 | def _errback(*args, **kwargs): 63 | self.fail('Deferred failed when it should have succeeded') 64 | deferred.add_errback_fn(_errback) 65 | deferred.add_callback_fn(_callback) 66 | if not len(done): 67 | self.fail('Deferred not called back with success') 68 | 69 | def assertErrback(self, deferred): 70 | self.assertTrue(deferred.is_done()) 71 | done = [] 72 | def _callback(*args, **kwargs): 73 | self.fail('Deferred succeeded when it should have failed') 74 | def _errback(*args, **kwargs): 75 | done.append(True) 76 | deferred.add_callback_fn(_callback) 77 | deferred.add_errback_fn(_errback) 78 | if not len(done): 79 | self.fail('Deferred not called back with error') 80 | 81 | def assertErrbackEqual(self, deferred, value): 82 | self.assertTrue(deferred.is_done()) 83 | done = [] 84 | def _callback(*args, **kwargs): 85 | self.fail('Deferred succeeded when it should have failed') 86 | def _errback(*args, **kwargs): 87 | self.assertEqual(args[0], value) 88 | done.append(True) 89 | deferred.add_callback_fn(_callback) 90 | deferred.add_errback_fn(_errback) 91 | if not len(done): 92 | self.fail('Deferred not called back with error') 93 | 94 | def assertErrbackWithError(self, deferred, error_cls): 95 | self.assertTrue(deferred.is_done()) 96 | done = [] 97 | def _callback(*args, **kwargs): 98 | self.fail('Deferred succeeded when it should have failed') 99 | def _errback(exception=None, *args, **kwargs): 100 | done.append(True) 101 | self.assertIsInstance(exception, error_cls) 102 | deferred.add_callback_fn(_callback) 103 | deferred.add_errback_fn(_errback) 104 | if not len(done): 105 | self.fail('Deferred not called back with error') 106 | 107 | 108 | class FixtureTestCase(AsyncTestCase): 109 | """Test case supporting static fixture/output support. 110 | Set self.fixture to a folder name from the test/fixtures/ path. 111 | """ 112 | 113 | def setUp(self): 114 | super(FixtureTestCase, self).setUp() 115 | 116 | # Root output path 117 | self.temp_path = tempfile.mkdtemp() 118 | self.addCleanup(shutil.rmtree, self.temp_path) 119 | self.root_path = self.temp_path 120 | 121 | # Copy fixture files 122 | if self.fixture: 123 | self.root_path = os.path.join(self.root_path, self.fixture) 124 | build_path = util.get_anvil_path() 125 | if not build_path: 126 | raise Error('Unable to find build path') 127 | fixture_path = os.path.join( 128 | build_path, '..', 'test', 'fixtures', self.fixture) 129 | target_path = os.path.join(self.temp_path, self.fixture) 130 | shutil.copytree(fixture_path, target_path) 131 | 132 | def assertFileContents(self, path, contents): 133 | self.assertTrue(os.path.isfile(path)) 134 | with io.open(path, 'rt') as f: 135 | file_contents = f.read() 136 | self.assertEqual(file_contents, contents) 137 | 138 | 139 | class RuleTestCase(FixtureTestCase): 140 | def assertRuleResultsEqual(self, build_ctx, rule_path, expected_file_matches, 141 | output_prefix=''): 142 | results = build_ctx.get_rule_results(rule_path) 143 | self.assertEqual(results[0], Status.SUCCEEDED) 144 | output_paths = results[1] 145 | 146 | fixed_expected = [os.path.normpath(f) for f in expected_file_matches] 147 | 148 | root_path = os.path.join(build_ctx.build_env.root_path, output_prefix) 149 | result_file_list = \ 150 | [os.path.normpath(os.path.relpath(f, root_path)) for f in output_paths] 151 | self.assertEqual( 152 | set(result_file_list), 153 | set(fixed_expected)) 154 | -------------------------------------------------------------------------------- /anvil/util.py: -------------------------------------------------------------------------------- 1 | # Copyright 2012 Google Inc. All Rights Reserved. 2 | 3 | __author__ = 'benvanik@google.com (Ben Vanik)' 4 | 5 | 6 | import inspect 7 | import os 8 | import re 9 | import string 10 | import sys 11 | import time 12 | 13 | 14 | # Unfortunately there is no one-true-timer in python 15 | # This should always be preferred over direct use of the time module 16 | if sys.platform == 'win32' or sys.platform == 'cygwin': 17 | timer = time.clock # pragma: no cover 18 | else: 19 | timer = time.time # pragma: no cover 20 | 21 | 22 | def get_anvil_path(): 23 | """Gets the anvil/ path. 24 | 25 | Returns: 26 | The full path to the anvil/ source. 27 | """ 28 | return os.path.normpath(os.path.dirname(__file__)) 29 | 30 | 31 | def get_build_file_path(path): 32 | """Gets the path to a BUILD file, given a directory or file. 33 | If a directory is passed the implicit build file names (such as BUILD) will 34 | be checked; if they exist, they will be returned. 35 | If the file cannot be found/does not exist the input will be returned. 36 | 37 | Args: 38 | path: File or directory path. 39 | 40 | Returns: 41 | The input or a modified path if a special build file exists. 42 | """ 43 | if os.path.isdir(path): 44 | if os.path.isfile(os.path.join(path, 'BUILD.anvil')): 45 | return os.path.join(path, 'BUILD.anvil') 46 | elif os.path.isfile(os.path.join(path, 'BUILD')): 47 | return os.path.join(path, 'BUILD') 48 | return path 49 | 50 | 51 | def strip_implicit_build_name(path): 52 | """Strips the implicit build names (such as BUILD) from the given path. 53 | 54 | Args: 55 | path: A path that may contain a BUILD name. 56 | 57 | Returns: 58 | The path with the name stripped. 59 | """ 60 | path = path.replace('/BUILD:', ':').replace('BUILD:', ':') 61 | path = path.replace('/BUILD.anvil:', ':').replace('BUILD.anvil:', ':') 62 | return path 63 | 64 | 65 | def get_script_path(): 66 | """Gets the absolute parent path of the currently executing script. 67 | 68 | Returns: 69 | Absolute path of the calling file. 70 | """ 71 | return os.path.dirname(os.path.abspath(inspect.stack()[1][1])) 72 | 73 | 74 | def ensure_forwardslashes(path): 75 | """Fixes a potentially Windows-esque file path into a valid URI. 76 | 77 | Args: 78 | path: File path, maybe containing \\. 79 | 80 | Returns: 81 | The input path with the separators flipped to URI standard /. 82 | """ 83 | return string.replace(path, '\\', '/') 84 | 85 | 86 | def strip_build_paths(path): 87 | """Strips out build-*/ from the given path. 88 | 89 | Args: 90 | path: Path that may contain build-*/. 91 | 92 | Returns: 93 | The path with build-*/ removed. 94 | """ 95 | strip_paths = [ 96 | 'build-out%s' % os.sep, 97 | 'build-gen%s' % os.sep, 98 | 'build-out%s' % os.altsep, 99 | 'build-gen%s' % os.altsep, 100 | ] 101 | for strip_path in strip_paths: 102 | path = path.replace(strip_path, '') 103 | return path 104 | 105 | 106 | def is_rule_path(value): 107 | """Detects whether the given value is a rule name. 108 | 109 | Returns: 110 | True if the string is a valid rule name. 111 | """ 112 | if not isinstance(value, str) or not len(value): 113 | return False 114 | semicolon = string.rfind(value, ':') 115 | if semicolon < 0: 116 | return False 117 | # Must be just a valid literal after, no path separators 118 | if (string.find(value, '\\', semicolon) >= 0 or 119 | string.find(value, '/', semicolon) >= 0): 120 | return False 121 | return True 122 | 123 | 124 | def validate_names(values, require_semicolon=False): 125 | """Validates a list of rule names to ensure they are well-defined. 126 | 127 | Args: 128 | values: A list of values to validate. 129 | require_semicolon: Whether to require a : 130 | 131 | Raises: 132 | NameError: A rule value is not valid. 133 | TypeError: The type of values is incorrect. 134 | """ 135 | if not values: 136 | return 137 | for value in values: 138 | if not isinstance(value, str) or not len(value): 139 | raise TypeError('Names must be a string of non-zero length') 140 | if len(value.strip()) != len(value): 141 | raise NameError( 142 | 'Names cannot have leading/trailing whitespace: "%s"' % (value)) 143 | if require_semicolon and not is_rule_path(value): 144 | raise NameError('Names must be a rule (contain a :): "%s"' % (value)) 145 | 146 | 147 | def underscore_to_pascalcase(value): 148 | """Converts a string from underscore_case to PascalCase. 149 | 150 | Args: 151 | value: Source string value. 152 | Example - hello_world 153 | 154 | Returns: 155 | The string, converted to PascalCase. 156 | Example - hello_world -> HelloWorld 157 | """ 158 | if not value: 159 | return value 160 | def __CapWord(seq): 161 | for word in seq: 162 | yield word.capitalize() 163 | return ''.join(__CapWord(word if word else '_' for word in value.split('_'))) 164 | 165 | 166 | def which(executable_name): 167 | """Gets the full path to the given executable. 168 | If the given path exists in the CWD or is already absolute it is returned. 169 | Otherwise this method will look through the system PATH to try to find it. 170 | 171 | Args: 172 | executable_name: Name or path to the executable. 173 | 174 | Returns: 175 | The full path to the executable or None if it was not found. 176 | """ 177 | if (os.path.exists(executable_name) and 178 | not os.path.isdir(executable_name)): 179 | return os.path.abspath(executable_name) 180 | for path in os.environ.get('PATH', '').split(os.pathsep): 181 | if (os.path.exists(os.path.join(path, executable_name)) and 182 | not os.path.isdir(os.path.join(path, executable_name))): 183 | return os.path.join(path, executable_name) 184 | return None 185 | -------------------------------------------------------------------------------- /anvil/util_test.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | # Copyright 2012 Google Inc. All Rights Reserved. 4 | 5 | """Tests for the util module. 6 | """ 7 | 8 | __author__ = 'benvanik@google.com (Ben Vanik)' 9 | 10 | 11 | import sys 12 | import unittest2 13 | 14 | from anvil import util 15 | 16 | 17 | class IsRulePathTest(unittest2.TestCase): 18 | """Behavioral tests of the is_rule_path method.""" 19 | 20 | def testEmpty(self): 21 | self.assertFalse(util.is_rule_path(None)) 22 | self.assertFalse(util.is_rule_path('')) 23 | 24 | def testTypes(self): 25 | self.assertFalse(util.is_rule_path(4)) 26 | self.assertFalse(util.is_rule_path(['a'])) 27 | self.assertFalse(util.is_rule_path({'a': 1})) 28 | 29 | def testNames(self): 30 | self.assertTrue(util.is_rule_path(':a')) 31 | self.assertTrue(util.is_rule_path(':ab')) 32 | self.assertTrue(util.is_rule_path('xx:ab')) 33 | self.assertTrue(util.is_rule_path('/a/b:ab')) 34 | 35 | self.assertFalse(util.is_rule_path('a')) 36 | self.assertFalse(util.is_rule_path('/a/b.c')) 37 | self.assertFalse(util.is_rule_path('a b c')) 38 | 39 | 40 | class ValidateNamesTest(unittest2.TestCase): 41 | """Behavioral tests of the validate_names method.""" 42 | 43 | def testEmpty(self): 44 | util.validate_names(None) 45 | util.validate_names([]) 46 | 47 | def testNames(self): 48 | util.validate_names(['a']) 49 | util.validate_names([':a']) 50 | util.validate_names(['xx:a']) 51 | util.validate_names(['/a/b:a']) 52 | util.validate_names(['/a/b.c:a']) 53 | util.validate_names(['/a/b.c/:a']) 54 | util.validate_names(['a', ':b']) 55 | with self.assertRaises(TypeError): 56 | util.validate_names([None]) 57 | with self.assertRaises(TypeError): 58 | util.validate_names(['']) 59 | with self.assertRaises(TypeError): 60 | util.validate_names([{}]) 61 | with self.assertRaises(NameError): 62 | util.validate_names([' a']) 63 | with self.assertRaises(NameError): 64 | util.validate_names(['a ']) 65 | with self.assertRaises(NameError): 66 | util.validate_names([' a ']) 67 | with self.assertRaises(NameError): 68 | util.validate_names(['a', ' b']) 69 | 70 | def testRequireSemicolon(self): 71 | util.validate_names([':a'], require_semicolon=True) 72 | util.validate_names([':a', ':b'], require_semicolon=True) 73 | util.validate_names(['C:/a/:b'], require_semicolon=True) 74 | util.validate_names(['C:\\a\\:b'], require_semicolon=True) 75 | with self.assertRaises(NameError): 76 | util.validate_names(['a'], require_semicolon=True) 77 | with self.assertRaises(NameError): 78 | util.validate_names([':a', 'b'], require_semicolon=True) 79 | with self.assertRaises(NameError): 80 | util.validate_names([':/a'], require_semicolon=True) 81 | with self.assertRaises(NameError): 82 | util.validate_names([':\\a'], require_semicolon=True) 83 | with self.assertRaises(NameError): 84 | util.validate_names(['C:\\a'], require_semicolon=True) 85 | with self.assertRaises(NameError): 86 | util.validate_names(['C:\\a:\\b'], require_semicolon=True) 87 | with self.assertRaises(NameError): 88 | util.validate_names([':a/b'], require_semicolon=True) 89 | with self.assertRaises(NameError): 90 | util.validate_names(['a:b/a'], require_semicolon=True) 91 | 92 | 93 | class UnderscoreToPascalCaseTest(unittest2.TestCase): 94 | """Behavioral tests of the underscore_to_pascalcase method.""" 95 | 96 | def testEmpty(self): 97 | self.assertEqual( 98 | util.underscore_to_pascalcase(None), 99 | None) 100 | self.assertEqual( 101 | util.underscore_to_pascalcase(''), 102 | '') 103 | 104 | def testUnderscores(self): 105 | self.assertEqual( 106 | util.underscore_to_pascalcase('ab'), 107 | 'Ab') 108 | self.assertEqual( 109 | util.underscore_to_pascalcase('aB'), 110 | 'Ab') 111 | self.assertEqual( 112 | util.underscore_to_pascalcase('AB'), 113 | 'Ab') 114 | self.assertEqual( 115 | util.underscore_to_pascalcase('a_b'), 116 | 'AB') 117 | self.assertEqual( 118 | util.underscore_to_pascalcase('A_b'), 119 | 'AB') 120 | self.assertEqual( 121 | util.underscore_to_pascalcase('aa_bb'), 122 | 'AaBb') 123 | self.assertEqual( 124 | util.underscore_to_pascalcase('aa1_bb2'), 125 | 'Aa1Bb2') 126 | self.assertEqual( 127 | util.underscore_to_pascalcase('1aa_2bb'), 128 | '1aa2bb') 129 | 130 | def testWhitespace(self): 131 | self.assertEqual( 132 | util.underscore_to_pascalcase(' '), 133 | ' ') 134 | self.assertEqual( 135 | util.underscore_to_pascalcase(' a'), 136 | ' a') 137 | self.assertEqual( 138 | util.underscore_to_pascalcase('a '), 139 | 'A ') 140 | self.assertEqual( 141 | util.underscore_to_pascalcase(' a '), 142 | ' a ') 143 | self.assertEqual( 144 | util.underscore_to_pascalcase('a b'), 145 | 'A b') 146 | self.assertEqual( 147 | util.underscore_to_pascalcase('a b'), 148 | 'A b') 149 | 150 | class WhichTest(unittest2.TestCase): 151 | """Behavioral tests of the which method.""" 152 | 153 | @unittest2.skipUnless(sys.platform.startswith('win'), 'platform') 154 | def testWindows(self): 155 | notepad_path = 'C:\\Windows\\System32\\notepad.exe' 156 | self.assertEqual(util.which(notepad_path), notepad_path) 157 | self.assertIsNone(util.which('xxx')) 158 | self.assertIsNotNone(util.which('notepad.exe')) 159 | 160 | @unittest2.skipIf(sys.platform.startswith('win'), 'platform') 161 | def testUnix(self): 162 | self.assertEqual(util.which('/bin/sh'), '/bin/sh') 163 | self.assertIsNone(util.which('xxx')) 164 | self.assertIsNotNone(util.which('cat')) 165 | 166 | 167 | if __name__ == '__main__': 168 | unittest2.main() 169 | -------------------------------------------------------------------------------- /anvil/version.py: -------------------------------------------------------------------------------- 1 | # Copyright 2012 Google Inc. All Rights Reserved. 2 | 3 | """ 4 | """ 5 | 6 | __author__ = 'benvanik@google.com (Ben Vanik)' 7 | 8 | 9 | # TODO(benvanik): pull from somewhere? 10 | VERSION = (0, 0, 1, 'dev') 11 | VERSION_STR = '0.0.1dev' 12 | -------------------------------------------------------------------------------- /run-coverage.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Copyright 2012 Google Inc. All Rights Reserved. 4 | 5 | # This script runs the build unit tests with a coverage run and spits out 6 | # the result HTML to scratch/coverage/ 7 | 8 | # TODO(benvanik): merge with run-tests.py? 9 | 10 | # This must currently run from the root of the repo 11 | # TODO(benvanik): make this runnable from anywhere (find git directory?) 12 | if [ ! -d ".git" ]; then 13 | echo "This script must be run from the root of the repository (the folder containing .git)" 14 | exit 1 15 | fi 16 | 17 | # Get into a known-good initial state by removing everything 18 | # (removes the possibility for confusing old output when runs fail) 19 | if [ -e ".coverage" ]; then 20 | rm .coverage 21 | fi 22 | if [ -d "scratch/coverage" ]; then 23 | rm -rf scratch/coverage 24 | fi 25 | 26 | # Run all unit tests with coverage 27 | coverage run --branch ./run-tests.py 28 | 29 | # Dump to console (so you see *something*) 30 | coverage report -m 31 | 32 | # Output HTML report 33 | coverage html -d scratch/coverage/ 34 | 35 | # Cleanup the coverage temp data, as it's unused and regenerated 36 | if [ -e ".coverage" ]; then 37 | rm .coverage 38 | fi 39 | -------------------------------------------------------------------------------- /run-tests.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | 3 | # Copyright 2012 Google Inc. All Rights Reserved. 4 | 5 | """Python build system test runner. 6 | In order to speed things up (and avoid some platform incompatibilities) this 7 | script should be used instead of unit2 or python -m unittest. 8 | """ 9 | 10 | __author__ = 'benvanik@google.com (Ben Vanik)' 11 | 12 | 13 | import os 14 | import sys 15 | 16 | 17 | def main(): 18 | # Add self to the root search path 19 | sys.path.insert(0, os.path.abspath(os.path.dirname(__file__))) 20 | 21 | # Run the tests 22 | import anvil.test 23 | anvil.test.main() 24 | 25 | 26 | if __name__ == '__main__': 27 | main() 28 | -------------------------------------------------------------------------------- /setup-local.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | REM Copyright 2012 Google Inc. All Rights Reserved. 4 | REM 5 | REM wtf Windows setup script 6 | REM Sets up a local virtualenv for anvil. 7 | REM This places all dependencies within the anvil-build/ path such that nothing 8 | REM from site-packages is used. In order to make use of this consumers should 9 | REM invoke anvil-local.bat instead of the global 'anvil'. 10 | 11 | SET DIR=%~dp0 12 | 13 | REM Visual Studio 2010 14 | SET VS90COMNTOOLS=%VS100COMNTOOLS% 15 | REM Visual Studio 2012 16 | REM SET VS90COMNTOOLS=%VS110COMNTOOLS% 17 | 18 | ECHO Installing virtualenv (1.8.2)... 19 | 20 | pip install virtualenv==1.8.2 21 | 22 | ECHO Setting up the virtual environment... 23 | 24 | virtualenv %DIR%\local_virtualenv 25 | 26 | ECHO Preparing virtualenv... 27 | 28 | REM Instead of using active we need to do it manually - the Windows 29 | REM activate script doesn't return control back to this script when run. 30 | SET VIRTUAL_ENV=%DIR%\local_virtualenv 31 | SET PATH=%VIRTUAL_ENV%\Scripts;%PATH% 32 | REM %DIR%\local_virtualenv\Scripts\activate 33 | 34 | ECHO Repeatedly installing twisted, as python still doesn't support VS2010... 35 | FOR %%A IN (1 2 3 4 5) DO pip install twisted 36 | 37 | ECHO Installing anvil-build... 38 | cd %DIR% 39 | python setup.py develop 40 | -------------------------------------------------------------------------------- /setup-local.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Copyright 2012 Google Inc. All Rights Reserved. 4 | 5 | # Sets up a local virtualenv for anvil. 6 | # This places all dependencies within the anvil-build/ path such that nothing 7 | # from site-packages is used. In order to make use of this consumers should 8 | # invoke anvil-local.sh instead of the global 'anvil'. 9 | 10 | 11 | DIR="$( cd "$( dirname "$0" )" && pwd )" 12 | 13 | 14 | # Ensure virtualenv is present. 15 | if [ ! -e "$(which virtualenv)" ]; then 16 | echo "virtualenv not found - installing..." 17 | if [ -e "$(which pip)" ]; then 18 | # Always install 1.8.2 on Windows - 1.8.4 is broken. 19 | # See: https://github.com/pypa/virtualenv/issues/373 20 | if [ -e "/Cygwin.bat" ]; then 21 | pip install virtualenv==1.8.2 22 | else 23 | sudo pip install virtualenv 24 | fi 25 | elif [-e "$(which easyinstall)" ]; then 26 | sudo easy_install virtualenv 27 | else 28 | echo "No python package installer found - aborting" 29 | echo "(get pip or easy_install)" 30 | exit 1 31 | fi 32 | fi 33 | 34 | # Setup the virtual environment. 35 | virtualenv $DIR/local_virtualenv 36 | 37 | # Install there. 38 | source $DIR/local_virtualenv/bin/activate 39 | cd $DIR 40 | echo "running setup.py develop, this may take a moment..." 41 | python setup.py --quiet develop 42 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | # Copyright 2012 Google Inc. All Rights Reserved. 4 | 5 | """ 6 | Anvil 7 | ----- 8 | 9 | A parallel build system and content pipeline. 10 | 11 | """ 12 | 13 | __author__ = 'benvanik@google.com (Ben Vanik)' 14 | 15 | 16 | import sys 17 | from setuptools import setup 18 | 19 | 20 | # Require Python 2.6+ 21 | if sys.version_info < (2, 6): 22 | raise RuntimeError('Python 2.6 or higher required') 23 | 24 | 25 | # Pull from the version py 26 | import anvil.version 27 | VERSION = anvil.version.VERSION_STR 28 | 29 | CLASSIFIERS = [ 30 | 'Development Status :: 2 - Pre-Alpha', 31 | 'Environment :: Console', 32 | 'Intended Audience :: Developers', 33 | 'License :: OSI Approved :: Apache Software License', 34 | 'Operating System :: OS Independent', 35 | 'Programming Language :: Python', 36 | 'Programming Language :: Python :: 2.6', 37 | 'Programming Language :: Python :: 2.7', 38 | 'Topic :: Software Development :: Build Tools', 39 | 'Topic :: Software Development :: Libraries :: Python Modules', 40 | 'Topic :: Utilities', 41 | ] 42 | 43 | INSTALL_REQUIRES = [ 44 | 'argparse>=1.2.1', 45 | 'autobahn>=0.5.1', 46 | 'blessings>=1.6', 47 | 'glob2>=0.3', 48 | 'networkx==1.7', 49 | 'pip>=1.1', 50 | 'Sphinx>=1.1.3', 51 | 'twisted>=15', 52 | 'watchdog>=0.6', 53 | 54 | # Needed for the linter - easier to just have here 55 | 'python-gflags', 56 | ] 57 | 58 | TESTS_REQUIRE = [ 59 | 'coverage>=3.5.1', 60 | 'unittest2>=0.5.1', 61 | ] 62 | 63 | 64 | setup( 65 | name='anvil-build', 66 | version=VERSION, 67 | author='Ben Vanik', 68 | author_email='benvanik@google.com', 69 | description='A parallel build system and content pipeline', 70 | long_description=__doc__, 71 | classifiers=CLASSIFIERS, 72 | url='https://github.com/google/anvil-build/', 73 | download_url='https://github.com/google/anvil-build/tarball/master#egg=anvil-build-%s' % VERSION, 74 | license='Apache License 2.0', 75 | platforms='any', 76 | install_requires=INSTALL_REQUIRES, 77 | tests_require=TESTS_REQUIRE, 78 | extras_require={ 79 | 'test': TESTS_REQUIRE, 80 | }, 81 | packages=[ 82 | 'anvil', 83 | 'anvil.commands', 84 | 'anvil.rules', 85 | ], 86 | include_package_data=True, 87 | package_data={ 88 | 'anvil.commands': ['*_command.py'], 89 | 'anvil.rules': ['*_rules.py'], 90 | }, 91 | test_suite='anvil.test.collector', 92 | # We dynamically load command/rule py files - would need to use 93 | # pkg_resources or something else to be zip safe 94 | # http://www.no-ack.org/2010/09/including-data-files-into-python.html 95 | zip_safe=False, 96 | entry_points = { 97 | 'console_scripts': [ 98 | 'anvil = anvil.manage:main', 99 | ], 100 | }) 101 | 102 | -------------------------------------------------------------------------------- /test/fixtures/cache/dummy.txt: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/google/anvil-build/01f6f89426308904cc5eaf9f7ab64038f84ea191/test/fixtures/cache/dummy.txt -------------------------------------------------------------------------------- /test/fixtures/config/.anvilrc: -------------------------------------------------------------------------------- 1 | [a] 2 | opt=hello 3 | -------------------------------------------------------------------------------- /test/fixtures/config/deep/.anvilrc: -------------------------------------------------------------------------------- 1 | [a] 2 | opt=world 3 | 4 | [b] 5 | opt=another 6 | -------------------------------------------------------------------------------- /test/fixtures/config/deep/none/dummy.txt: -------------------------------------------------------------------------------- 1 | dummy 2 | -------------------------------------------------------------------------------- /test/fixtures/core_rules/concat_files/1.txt: -------------------------------------------------------------------------------- 1 | 1 2 | -------------------------------------------------------------------------------- /test/fixtures/core_rules/concat_files/2.txt: -------------------------------------------------------------------------------- 1 | 2 2 | -------------------------------------------------------------------------------- /test/fixtures/core_rules/concat_files/3.txt: -------------------------------------------------------------------------------- 1 | 3 2 | -------------------------------------------------------------------------------- /test/fixtures/core_rules/concat_files/4.txt: -------------------------------------------------------------------------------- 1 | 4 2 | -------------------------------------------------------------------------------- /test/fixtures/core_rules/concat_files/BUILD: -------------------------------------------------------------------------------- 1 | concat_files( 2 | name='concat', 3 | srcs=['1.txt', '2.txt', '3.txt', '4.txt']) 4 | 5 | concat_files( 6 | name='concat_out', 7 | srcs=['1.txt', '2.txt', '3.txt', '4.txt'], 8 | out='concat.txt') 9 | 10 | concat_files( 11 | name='concat_template', 12 | srcs=[':concat_out', 't.txt', ':concat_out']) 13 | template_files( 14 | name='templated', 15 | srcs=[':concat_template',], 16 | new_extension='.out', 17 | params={'hello': 'world!'}) 18 | -------------------------------------------------------------------------------- /test/fixtures/core_rules/concat_files/t.txt: -------------------------------------------------------------------------------- 1 | x${hello}x 2 | -------------------------------------------------------------------------------- /test/fixtures/core_rules/copy_files/BUILD: -------------------------------------------------------------------------------- 1 | file_set('a', srcs='a.txt') 2 | file_set('all_txt', srcs=glob('**/*.txt')) 3 | 4 | copy_files('copy_all_txt', srcs=':all_txt') 5 | -------------------------------------------------------------------------------- /test/fixtures/core_rules/copy_files/a.txt: -------------------------------------------------------------------------------- 1 | a 2 | -------------------------------------------------------------------------------- /test/fixtures/core_rules/copy_files/dir/BUILD: -------------------------------------------------------------------------------- 1 | file_set('b', srcs='b.txt') 2 | file_set('c', srcs='c.not-txt') 3 | 4 | copy_files('copy_c', srcs=':c') 5 | -------------------------------------------------------------------------------- /test/fixtures/core_rules/copy_files/dir/b.txt: -------------------------------------------------------------------------------- 1 | b 2 | -------------------------------------------------------------------------------- /test/fixtures/core_rules/copy_files/dir/c.not-txt: -------------------------------------------------------------------------------- 1 | c 2 | -------------------------------------------------------------------------------- /test/fixtures/core_rules/file_set/BUILD: -------------------------------------------------------------------------------- 1 | file_set('a', srcs='a.txt') 2 | file_set('a_glob', srcs=glob('*.txt')) 3 | 4 | file_set('b_ref', srcs='dir:b') 5 | 6 | file_set('all_glob', srcs=glob('**/*.txt')) 7 | 8 | file_set('combo', srcs=[':a', ':b_ref']) 9 | file_set('dupes', srcs=[':a', 'a.txt', ':b_ref', 'dir:b', ':combo']) 10 | -------------------------------------------------------------------------------- /test/fixtures/core_rules/file_set/a.txt: -------------------------------------------------------------------------------- 1 | a 2 | -------------------------------------------------------------------------------- /test/fixtures/core_rules/file_set/dir/BUILD: -------------------------------------------------------------------------------- 1 | file_set('b', srcs='b.txt') 2 | file_set('b_glob', srcs=glob('*.txt')) 3 | -------------------------------------------------------------------------------- /test/fixtures/core_rules/file_set/dir/b.txt: -------------------------------------------------------------------------------- 1 | b 2 | -------------------------------------------------------------------------------- /test/fixtures/custom_rules/rules/BUILD: -------------------------------------------------------------------------------- 1 | file_set(name='all_rules', srcs=glob('**/*_rules.py')) 2 | -------------------------------------------------------------------------------- /test/fixtures/custom_rules/rules/other_rules.py: -------------------------------------------------------------------------------- 1 | from anvil.rule import Rule, build_rule 2 | 3 | 4 | @build_rule('other_rule') 5 | class OtherRule(Rule): 6 | def __init__(self, name, *args, **kwargs): 7 | super(OtherRule, self).__init__(name, *args, **kwargs) 8 | -------------------------------------------------------------------------------- /test/fixtures/custom_rules/rules/some_rules.py: -------------------------------------------------------------------------------- 1 | from anvil.rule import Rule, build_rule 2 | 3 | 4 | @build_rule('some_rule') 5 | class SomeRule(Rule): 6 | def __init__(self, name, *args, **kwargs): 7 | super(SomeRule, self).__init__(name, *args, **kwargs) 8 | -------------------------------------------------------------------------------- /test/fixtures/manage/bad_commands/bad_commands.py: -------------------------------------------------------------------------------- 1 | from anvil.manage import ManageCommand 2 | 3 | 4 | class TestCommand(ManageCommand): 5 | def __init__(self): 6 | super(TestCommand, self).__init__(name='test_command') 7 | 8 | 9 | class TestCommand1(ManageCommand): 10 | def __init__(self): 11 | super(TestCommand1, self).__init__(name='test_command') 12 | -------------------------------------------------------------------------------- /test/fixtures/manage/commands/test_commands.py: -------------------------------------------------------------------------------- 1 | from anvil.manage import ManageCommand 2 | 3 | 4 | class TestCommand(ManageCommand): 5 | def __init__(self): 6 | super(TestCommand, self).__init__(name='test_command') 7 | 8 | def execute(self, args, cwd): 9 | return 123 10 | -------------------------------------------------------------------------------- /test/fixtures/preprocessor_rules/template_files/BUILD: -------------------------------------------------------------------------------- 1 | file_set('a', srcs='a.txt') 2 | template_files( 3 | name='template_a', 4 | srcs='a.txt', 5 | params={'hello': 'world_a',}) 6 | template_files( 7 | name='template_a_rule', 8 | srcs=':a', 9 | params={'hello': 'world_a_rule',}) 10 | 11 | file_set('all_glob', srcs=glob('**/*.txt')) 12 | template_files( 13 | name='template_all', 14 | srcs=':all_glob', 15 | params={'hello': 'world',}) 16 | 17 | template_files( 18 | name='template_dep_1', 19 | srcs=glob('**/*.nfo'), 20 | params={'arg1': '${arg2}',}) 21 | template_files( 22 | name='template_dep_2', 23 | srcs=':template_dep_1', 24 | new_extension='.out', 25 | params={'arg2': 'world!',}) 26 | -------------------------------------------------------------------------------- /test/fixtures/preprocessor_rules/template_files/a.nfo: -------------------------------------------------------------------------------- 1 | 123${arg1}456 2 | -------------------------------------------------------------------------------- /test/fixtures/preprocessor_rules/template_files/a.txt: -------------------------------------------------------------------------------- 1 | 123${hello}456 2 | -------------------------------------------------------------------------------- /test/fixtures/preprocessor_rules/template_files/dir/BUILD: -------------------------------------------------------------------------------- 1 | file_set('b', srcs='b.txt') 2 | template_files( 3 | name='template_b', 4 | srcs='b.txt', 5 | params={'hello': 'world_b',}) 6 | template_files( 7 | name='template_b_rule', 8 | srcs=':b', 9 | params={'hello': 'world_b_rule',}) 10 | -------------------------------------------------------------------------------- /test/fixtures/preprocessor_rules/template_files/dir/b.nfo: -------------------------------------------------------------------------------- 1 | b123${arg1}456 2 | -------------------------------------------------------------------------------- /test/fixtures/preprocessor_rules/template_files/dir/b.txt: -------------------------------------------------------------------------------- 1 | b123${hello}456 2 | -------------------------------------------------------------------------------- /test/fixtures/resolution/BUILD: -------------------------------------------------------------------------------- 1 | file_set('root_rule', deps=['a:rule_a']) 2 | -------------------------------------------------------------------------------- /test/fixtures/resolution/a/BUILD: -------------------------------------------------------------------------------- 1 | file_set('rule_a', deps=['../b:rule_b']) 2 | -------------------------------------------------------------------------------- /test/fixtures/resolution/b/BUILD: -------------------------------------------------------------------------------- 1 | file_set('rule_b', deps=['c:rule_c']) 2 | -------------------------------------------------------------------------------- /test/fixtures/resolution/b/c/BUILD: -------------------------------------------------------------------------------- 1 | file_set('rule_c', deps=['build_file.py:rule_c_file']) 2 | -------------------------------------------------------------------------------- /test/fixtures/resolution/b/c/build_file.py: -------------------------------------------------------------------------------- 1 | file_set('rule_c_file') 2 | -------------------------------------------------------------------------------- /test/fixtures/resolution/empty/dummy: -------------------------------------------------------------------------------- 1 | hi 2 | -------------------------------------------------------------------------------- /test/fixtures/rules/dummy_rules.py: -------------------------------------------------------------------------------- 1 | # Dummy rule types for testing rules 2 | 3 | 4 | from anvil.rule import Rule, build_rule 5 | 6 | 7 | @build_rule('rule_a') 8 | class RuleA(Rule): 9 | def __init__(self, name, *args, **kwargs): 10 | super(RuleA, self).__init__(name, *args, **kwargs) 11 | 12 | 13 | @build_rule('rule_b') 14 | class RuleB(Rule): 15 | def __init__(self, name, *args, **kwargs): 16 | super(RuleB, self).__init__(name, *args, **kwargs) 17 | -------------------------------------------------------------------------------- /test/fixtures/rules/dupe.py: -------------------------------------------------------------------------------- 1 | # File with duplicate rules 2 | 3 | 4 | from anvil.rule import Rule, build_rule 5 | 6 | 7 | @build_rule('rule_d') 8 | class RuleD1(Rule): 9 | def __init__(self, name, *args, **kwargs): 10 | super(RuleD1, self).__init__(name, *args, **kwargs) 11 | 12 | 13 | @build_rule('rule_d') 14 | class RuleD2(Rule): 15 | def __init__(self, name, *args, **kwargs): 16 | super(RuleD2, self).__init__(name, *args, **kwargs) 17 | -------------------------------------------------------------------------------- /test/fixtures/rules/more/more_rules.py: -------------------------------------------------------------------------------- 1 | # More (nested) rule types for testing rules 2 | 3 | 4 | from anvil.rule import Rule, build_rule 5 | 6 | 7 | @build_rule('rule_c') 8 | class RuleC(Rule): 9 | def __init__(self, name, *args, **kwargs): 10 | super(RuleC, self).__init__(name, *args, **kwargs) 11 | -------------------------------------------------------------------------------- /test/fixtures/rules/rule_x.py: -------------------------------------------------------------------------------- 1 | # Dummy file - this rule should not be discovered 2 | 3 | 4 | from anvil.rule import Rule, build_rule 5 | 6 | 7 | @build_rule('rule_x') 8 | class RuleX(Rule): 9 | def __init__(self, name, *args, **kwargs): 10 | super(RuleX, self).__init__(name, *args, **kwargs) 11 | -------------------------------------------------------------------------------- /test/fixtures/simple/BUILD: -------------------------------------------------------------------------------- 1 | # Simple sample build file 2 | # Does nothing but provide some rules 3 | 4 | file_set('a', 5 | srcs=['a.txt']) 6 | 7 | file_set('b', 8 | srcs=['b.txt']) 9 | 10 | file_set('c', 11 | srcs=['c.txt'], 12 | deps=[':a', ':b']) 13 | 14 | file_set('local_txt', 15 | srcs=glob('*.txt')) 16 | file_set('recursive_txt', 17 | srcs=glob('**/*.txt')) 18 | file_set('missing_txt', 19 | srcs='x.txt') 20 | file_set('missing_glob_txt', 21 | srcs=glob('*.notpresent')) 22 | 23 | file_set('local_txt_filter', 24 | srcs=glob('*'), 25 | src_filter='*.txt') 26 | file_set('recursive_txt_filter', 27 | srcs=glob('**/*'), 28 | src_filter='*.txt') 29 | file_set('exclude_txt_filter', 30 | srcs=glob('*'), 31 | src_exclude_filter='*.txt') 32 | file_set('include_exclude_filter', 33 | srcs=glob('*'), 34 | src_filter='*.txt-a|*.txt-b', 35 | src_exclude_filter='*.txt') 36 | 37 | file_set('file_input', 38 | srcs='a.txt') 39 | file_set('rule_input', 40 | srcs=':file_input') 41 | file_set('mixed_input', 42 | srcs=['b.txt', ':file_input']) 43 | file_set('missing_input', 44 | srcs=':x') 45 | 46 | file_set('multi_exts', 47 | srcs=['a.txt-a', 'b.txt-b', 'c.txt-c']) 48 | file_set('only_a', 49 | srcs=':multi_exts', 50 | src_filter='*.txt-a') 51 | file_set('only_ab', 52 | srcs=':multi_exts', 53 | src_filter='*.txt-a|*.txt-b') 54 | -------------------------------------------------------------------------------- /test/fixtures/simple/a.txt: -------------------------------------------------------------------------------- 1 | hello! 2 | -------------------------------------------------------------------------------- /test/fixtures/simple/a.txt-a: -------------------------------------------------------------------------------- 1 | a 2 | -------------------------------------------------------------------------------- /test/fixtures/simple/b.txt: -------------------------------------------------------------------------------- 1 | world! 2 | -------------------------------------------------------------------------------- /test/fixtures/simple/b.txt-b: -------------------------------------------------------------------------------- 1 | b 2 | -------------------------------------------------------------------------------- /test/fixtures/simple/c.txt: -------------------------------------------------------------------------------- 1 | !!! 2 | -------------------------------------------------------------------------------- /test/fixtures/simple/c.txt-c: -------------------------------------------------------------------------------- 1 | c 2 | -------------------------------------------------------------------------------- /test/fixtures/simple/dir/dir_2/BUILD: -------------------------------------------------------------------------------- 1 | file_set('d',srcs=['d.txt']) 2 | -------------------------------------------------------------------------------- /test/fixtures/simple/dir/dir_2/d.txt: -------------------------------------------------------------------------------- 1 | !!! 2 | -------------------------------------------------------------------------------- /test/fixtures/simple/dir/dir_2/e.txt: -------------------------------------------------------------------------------- 1 | !!! 2 | -------------------------------------------------------------------------------- /test/fixtures/simple/dir/dir_2/f.not-txt: -------------------------------------------------------------------------------- 1 | !!! 2 | -------------------------------------------------------------------------------- /test/fixtures/simple/g.not-txt: -------------------------------------------------------------------------------- 1 | !!! 2 | --------------------------------------------------------------------------------