├── .gitattributes ├── .gitignore ├── LICENSE ├── MANIFEST.in ├── Makefile ├── Pipfile ├── Pipfile.lock ├── README.md ├── setup.cfg ├── setup.py ├── spar ├── __init__.py ├── __main__.py ├── cli.py ├── data │ └── distributions │ │ ├── cpl_dist.pkl │ │ ├── instance_cpu_dist.pkl │ │ ├── instance_duration_dist.pkl │ │ ├── instance_mem_dist.pkl │ │ ├── instance_num_dist.pkl │ │ ├── job_interval_dist.pkl │ │ ├── level_dist.pkl │ │ ├── task_cpu_dist.pkl │ │ ├── task_duration_dist.pkl │ │ ├── task_mem_dist.pkl │ │ └── task_num_dist.pkl ├── generate.py ├── io.py ├── progress.py ├── transform.py └── utils.py └── tests ├── context.py ├── test_generate.py ├── test_progress.py └── test_utils.py /.gitattributes: -------------------------------------------------------------------------------- 1 | spar/data/samples/sample_tasks.csv filter=lfs diff=lfs merge=lfs -text 2 | spar/data/samples/sample_instances.csv filter=lfs diff=lfs merge=lfs -text 3 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | 2 | # Created by https://www.gitignore.io/api/linux,macos,python,windows,pycharm,sublimetext 3 | # Edit at https://www.gitignore.io/?templates=linux,macos,python,windows,pycharm,sublimetext 4 | 5 | ### Linux ### 6 | *~ 7 | 8 | # temporary files which can be created if a process still has a handle open of a deleted file 9 | .fuse_hidden* 10 | 11 | # KDE directory preferences 12 | .directory 13 | 14 | # Linux trash folder which might appear on any partition or disk 15 | .Trash-* 16 | 17 | # .nfs files are created when an open file is removed but is still being accessed 18 | .nfs* 19 | 20 | ### macOS ### 21 | # General 22 | .DS_Store 23 | .AppleDouble 24 | .LSOverride 25 | 26 | # Icon must end with two \r 27 | Icon 28 | 29 | # Thumbnails 30 | ._* 31 | 32 | # Files that might appear in the root of a volume 33 | .DocumentRevisions-V100 34 | .fseventsd 35 | .Spotlight-V100 36 | .TemporaryItems 37 | .Trashes 38 | .VolumeIcon.icns 39 | .com.apple.timemachine.donotpresent 40 | 41 | # Directories potentially created on remote AFP share 42 | .AppleDB 43 | .AppleDesktop 44 | Network Trash Folder 45 | Temporary Items 46 | .apdisk 47 | 48 | ### PyCharm ### 49 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and WebStorm 50 | # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 51 | 52 | # User-specific stuff 53 | .idea/**/workspace.xml 54 | .idea/**/tasks.xml 55 | .idea/**/usage.statistics.xml 56 | .idea/**/dictionaries 57 | .idea/**/shelf 58 | 59 | # Generated files 60 | .idea/**/contentModel.xml 61 | 62 | # Sensitive or high-churn files 63 | .idea/**/dataSources/ 64 | .idea/**/dataSources.ids 65 | .idea/**/dataSources.local.xml 66 | .idea/**/sqlDataSources.xml 67 | .idea/**/dynamic.xml 68 | .idea/**/uiDesigner.xml 69 | .idea/**/dbnavigator.xml 70 | 71 | # Gradle 72 | .idea/**/gradle.xml 73 | .idea/**/libraries 74 | 75 | # Gradle and Maven with auto-import 76 | # When using Gradle or Maven with auto-import, you should exclude module files, 77 | # since they will be recreated, and may cause churn. Uncomment if using 78 | # auto-import. 79 | # .idea/modules.xml 80 | # .idea/*.iml 81 | # .idea/modules 82 | # *.iml 83 | # *.ipr 84 | 85 | # CMake 86 | cmake-build-*/ 87 | 88 | # Mongo Explorer plugin 89 | .idea/**/mongoSettings.xml 90 | 91 | # File-based project format 92 | *.iws 93 | 94 | # IntelliJ 95 | out/ 96 | 97 | # mpeltonen/sbt-idea plugin 98 | .idea_modules/ 99 | 100 | # JIRA plugin 101 | atlassian-ide-plugin.xml 102 | 103 | # Cursive Clojure plugin 104 | .idea/replstate.xml 105 | 106 | # Crashlytics plugin (for Android Studio and IntelliJ) 107 | com_crashlytics_export_strings.xml 108 | crashlytics.properties 109 | crashlytics-build.properties 110 | fabric.properties 111 | 112 | # Editor-based Rest Client 113 | .idea/httpRequests 114 | 115 | # Android studio 3.1+ serialized cache file 116 | .idea/caches/build_file_checksums.ser 117 | 118 | ### PyCharm Patch ### 119 | # Comment Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-215987721 120 | 121 | # *.iml 122 | # modules.xml 123 | # .idea/misc.xml 124 | # *.ipr 125 | 126 | # Sonarlint plugin 127 | .idea/sonarlint 128 | 129 | ### Python ### 130 | # Byte-compiled / optimized / DLL files 131 | __pycache__/ 132 | *.py[cod] 133 | *$py.class 134 | 135 | # C extensions 136 | *.so 137 | 138 | # Distribution / packaging 139 | .Python 140 | build/ 141 | develop-eggs/ 142 | dist/ 143 | downloads/ 144 | eggs/ 145 | .eggs/ 146 | lib/ 147 | lib64/ 148 | parts/ 149 | sdist/ 150 | var/ 151 | wheels/ 152 | pip-wheel-metadata/ 153 | share/python-wheels/ 154 | *.egg-info/ 155 | .installed.cfg 156 | *.egg 157 | MANIFEST 158 | 159 | # PyInstaller 160 | # Usually these files are written by a python script from a template 161 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 162 | *.manifest 163 | *.spec 164 | 165 | # Installer logs 166 | pip-log.txt 167 | pip-delete-this-directory.txt 168 | 169 | # Unit test / coverage reports 170 | htmlcov/ 171 | .tox/ 172 | .nox/ 173 | .coverage 174 | .coverage.* 175 | .cache 176 | nosetests.xml 177 | coverage.xml 178 | *.cover 179 | .hypothesis/ 180 | .pytest_cache/ 181 | 182 | # Translations 183 | *.mo 184 | *.pot 185 | 186 | # Django stuff: 187 | *.log 188 | local_settings.py 189 | db.sqlite3 190 | db.sqlite3-journal 191 | 192 | # Flask stuff: 193 | instance/ 194 | .webassets-cache 195 | 196 | # Scrapy stuff: 197 | .scrapy 198 | 199 | # Sphinx documentation 200 | docs/_build/ 201 | 202 | # PyBuilder 203 | target/ 204 | 205 | # Jupyter Notebook 206 | .ipynb_checkpoints 207 | 208 | # IPython 209 | profile_default/ 210 | ipython_config.py 211 | 212 | # pyenv 213 | .python-version 214 | 215 | # pipenv 216 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 217 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 218 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 219 | # install all needed dependencies. 220 | #Pipfile.lock 221 | 222 | # celery beat schedule file 223 | celerybeat-schedule 224 | 225 | # SageMath parsed files 226 | *.sage.py 227 | 228 | # Environments 229 | .env 230 | .venv 231 | env/ 232 | venv/ 233 | ENV/ 234 | env.bak/ 235 | venv.bak/ 236 | 237 | # Spyder project settings 238 | .spyderproject 239 | .spyproject 240 | 241 | # Rope project settings 242 | .ropeproject 243 | 244 | # mkdocs documentation 245 | /site 246 | 247 | # mypy 248 | .mypy_cache/ 249 | .dmypy.json 250 | dmypy.json 251 | 252 | # Pyre type checker 253 | .pyre/ 254 | 255 | ### SublimeText ### 256 | # Cache files for Sublime Text 257 | *.tmlanguage.cache 258 | *.tmPreferences.cache 259 | *.stTheme.cache 260 | 261 | # Workspace files are user-specific 262 | *.sublime-workspace 263 | 264 | # Project files should be checked into the repository, unless a significant 265 | # proportion of contributors will probably not be using Sublime Text 266 | # *.sublime-project 267 | 268 | # SFTP configuration file 269 | sftp-config.json 270 | 271 | # Package control specific files 272 | Package Control.last-run 273 | Package Control.ca-list 274 | Package Control.ca-bundle 275 | Package Control.system-ca-bundle 276 | Package Control.cache/ 277 | Package Control.ca-certs/ 278 | Package Control.merged-ca-bundle 279 | Package Control.user-ca-bundle 280 | oscrypto-ca-bundle.crt 281 | bh_unicode_properties.cache 282 | 283 | # Sublime-github package stores a github token in this file 284 | # https://packagecontrol.io/packages/sublime-github 285 | GitHub.sublime-settings 286 | 287 | ### Windows ### 288 | # Windows thumbnail cache files 289 | Thumbs.db 290 | Thumbs.db:encryptable 291 | ehthumbs.db 292 | ehthumbs_vista.db 293 | 294 | # Dump file 295 | *.stackdump 296 | 297 | # Folder config file 298 | [Dd]esktop.ini 299 | 300 | # Recycle Bin used on file shares 301 | $RECYCLE.BIN/ 302 | 303 | # Windows Installer files 304 | *.cab 305 | *.msi 306 | *.msix 307 | *.msm 308 | *.msp 309 | 310 | # Windows shortcuts 311 | *.lnk 312 | 313 | # End of https://www.gitignore.io/api/linux,macos,python,windows,pycharm,sublimetext 314 | 315 | ## Project Specfic 316 | spar/data/samples/* 317 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | ----------- 3 | 4 | Copyright (c) 2019 All-less 5 | Permission is hereby granted, free of charge, to any person 6 | obtaining a copy of this software and associated documentation 7 | files (the "Software"), to deal in the Software without 8 | restriction, including without limitation the rights to use, 9 | copy, modify, merge, publish, distribute, sublicense, and/or sell 10 | copies of the Software, and to permit persons to whom the 11 | Software is furnished to do so, subject to the following 12 | conditions: 13 | 14 | The above copyright notice and this permission notice shall be 15 | included in all copies or substantial portions of the Software. 16 | 17 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 18 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES 19 | OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 20 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 21 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, 22 | WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 23 | FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 24 | OTHER DEALINGS IN THE SOFTWARE. 25 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include LICENSE 2 | include Makefile 3 | include Pipfile 4 | include spar/data/samples/*.csv 5 | include spar/data/distributions/*.pkl 6 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: clean 2 | 3 | clean: 4 | rm -rf *.egg-info 5 | rm -rf build dist 6 | 7 | init: 8 | pipenv --three 9 | pipenv install 10 | pipenv install --dev 11 | 12 | uninstall: 13 | pip uninstall spar 14 | 15 | local-install: 16 | pip install --no-cache-dir dist/spar*.tar.gz 17 | 18 | build: 19 | python setup.py sdist bdist_wheel 20 | 21 | test-publish: 22 | twine upload -r pypitest dist/spar* 23 | 24 | test-install: 25 | pip install --no-cache-dir --index-url https://test.pypi.org/simple/ spar 26 | 27 | publish: 28 | twine upload -r pypi dist/spar* 29 | -------------------------------------------------------------------------------- /Pipfile: -------------------------------------------------------------------------------- 1 | [[source]] 2 | url = "https://pypi.org/simple" 3 | verify_ssl = true 4 | name = "pypi" 5 | 6 | [packages] 7 | click = "*" 8 | scipy = "*" 9 | crayons = "*" 10 | numpy = "*" 11 | 12 | [dev-packages] 13 | pytest = "*" 14 | wheel = "*" 15 | twine = "*" 16 | keyring = "*" 17 | check-manifest = "*" 18 | 19 | [requires] 20 | python_version = "3.7" 21 | -------------------------------------------------------------------------------- /Pipfile.lock: -------------------------------------------------------------------------------- 1 | { 2 | "_meta": { 3 | "hash": { 4 | "sha256": "8a4043612c65aa2db94349ad57b5321dd2abebd7889845be0c9f3aa98d65a10d" 5 | }, 6 | "pipfile-spec": 6, 7 | "requires": { 8 | "python_version": "3.7" 9 | }, 10 | "sources": [ 11 | { 12 | "name": "pypi", 13 | "url": "https://pypi.org/simple", 14 | "verify_ssl": true 15 | } 16 | ] 17 | }, 18 | "default": { 19 | "click": { 20 | "hashes": [ 21 | "sha256:2335065e6395b9e67ca716de5f7526736bfa6ceead690adf616d925bdc622b13", 22 | "sha256:5b94b49521f6456670fdb30cd82a4eca9412788a93fa6dd6df72c94d5a8ff2d7" 23 | ], 24 | "index": "pypi", 25 | "version": "==7.0" 26 | }, 27 | "colorama": { 28 | "hashes": [ 29 | "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", 30 | "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6" 31 | ], 32 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5, 3.6'", 33 | "version": "==0.4.6" 34 | }, 35 | "crayons": { 36 | "hashes": [ 37 | "sha256:50e5fa729d313e2c607ae8bf7b53bb487652e10bd8e7a1e08c4bc8bf62755ffc", 38 | "sha256:8c9e4a3a607bc10e9a9140d496ecd16c6805088dd16c852c378f1f1d5db7aeb6" 39 | ], 40 | "index": "pypi", 41 | "version": "==0.3.0" 42 | }, 43 | "numpy": { 44 | "hashes": [ 45 | "sha256:0b0dd8f47fb177d00fa6ef2d58783c4f41ad3126b139c91dd2f7c4b3fdf5e9a5", 46 | "sha256:25ffe71f96878e1da7e014467e19e7db90ae7d4e12affbc73101bcf61785214e", 47 | "sha256:26efd7f7d755e6ca966a5c0ac5a930a87dbbaab1c51716ac26a38f42ecc9bc4b", 48 | "sha256:28b1180c758abf34a5c3fea76fcee66a87def1656724c42bb14a6f9717a5bdf7", 49 | "sha256:2e418f0a59473dac424f888dd57e85f77502a593b207809211c76e5396ae4f5c", 50 | "sha256:30c84e3a62cfcb9e3066f25226e131451312a044f1fe2040e69ce792cb7de418", 51 | "sha256:4650d94bb9c947151737ee022b934b7d9a845a7c76e476f3e460f09a0c8c6f39", 52 | "sha256:4dd830a11e8724c9c9379feed1d1be43113f8bcce55f47ea7186d3946769ce26", 53 | "sha256:4f2a2b279efde194877aff1f76cf61c68e840db242a5c7169f1ff0fd59a2b1e2", 54 | "sha256:62d22566b3e3428dfc9ec972014c38ed9a4db4f8969c78f5414012ccd80a149e", 55 | "sha256:669795516d62f38845c7033679c648903200980d68935baaa17ac5c7ae03ae0c", 56 | "sha256:75fcd60d682db3e1f8fbe2b8b0c6761937ad56d01c1dc73edf4ef2748d5b6bc4", 57 | "sha256:9395b0a41e8b7e9a284e3be7060db9d14ad80273841c952c83a5afc241d2bd98", 58 | "sha256:9e37c35fc4e9410093b04a77d11a34c64bf658565e30df7cbe882056088a91c1", 59 | "sha256:a0678793096205a4d784bd99f32803ba8100f639cf3b932dc63b21621390ea7e", 60 | "sha256:b46554ad4dafb2927f88de5a1d207398c5385edbb5c84d30b3ef187c4a3894d8", 61 | "sha256:c867eeccd934920a800f65c6068acdd6b87e80d45cd8c8beefff783b23cdc462", 62 | "sha256:dd0667f5be56fb1b570154c2c0516a528e02d50da121bbbb2cbb0b6f87f59bc2", 63 | "sha256:de2b1c20494bdf47f0160bd88ed05f5e48ae5dc336b8de7cfade71abcc95c0b9", 64 | "sha256:f1df7b2b7740dd777571c732f98adb5aad5450aee32772f1b39249c8a50386f6", 65 | "sha256:ffca69e29079f7880c5392bf675eb8b4146479d976ae1924d01cd92b04cccbcc" 66 | ], 67 | "index": "pypi", 68 | "version": "==1.17.3" 69 | }, 70 | "scipy": { 71 | "hashes": [ 72 | "sha256:0baa64bf42592032f6f6445a07144e355ca876b177f47ad8d0612901c9375bef", 73 | "sha256:243b04730d7223d2b844bda9500310eecc9eda0cba9ceaf0cde1839f8287dfa8", 74 | "sha256:2643cfb46d97b7797d1dbdb6f3c23fe3402904e3c90e6facfe6a9b98d808c1b5", 75 | "sha256:396eb4cdad421f846a1498299474f0a3752921229388f91f60dc3eda55a00488", 76 | "sha256:3ae3692616975d3c10aca6d574d6b4ff95568768d4525f76222fb60f142075b9", 77 | "sha256:435d19f80b4dcf67dc090cc04fde2c5c8a70b3372e64f6a9c58c5b806abfa5a8", 78 | "sha256:46a5e55850cfe02332998b3aef481d33f1efee1960fe6cfee0202c7dd6fc21ab", 79 | "sha256:75b513c462e58eeca82b22fc00f0d1875a37b12913eee9d979233349fce5c8b2", 80 | "sha256:7ccfa44a08226825126c4ef0027aa46a38c928a10f0a8a8483c80dd9f9a0ad44", 81 | "sha256:89dd6a6d329e3f693d1204d5562dd63af0fd7a17854ced17f9cbc37d5b853c8d", 82 | "sha256:a81da2fe32f4eab8b60d56ad43e44d93d392da228a77e229e59b51508a00299c", 83 | "sha256:a9d606d11eb2eec7ef893eb825017fbb6eef1e1d0b98a5b7fc11446ebeb2b9b1", 84 | "sha256:ac37eb652248e2d7cbbfd89619dce5ecfd27d657e714ed049d82f19b162e8d45", 85 | "sha256:cbc0611699e420774e945f6a4e2830f7ca2b3ee3483fca1aa659100049487dd5", 86 | "sha256:d02d813ec9958ed63b390ded463163685af6025cb2e9a226ec2c477df90c6957", 87 | "sha256:dd3b52e00f93fd1c86f2d78243dfb0d02743c94dd1d34ffea10055438e63b99d" 88 | ], 89 | "index": "pypi", 90 | "version": "==1.3.1" 91 | } 92 | }, 93 | "develop": { 94 | "atomicwrites": { 95 | "hashes": [ 96 | "sha256:81b2c9071a49367a7f770170e5eec8cb66567cfbbc8c73d20ce5ca4a8d71cf11" 97 | ], 98 | "version": "==1.4.1" 99 | }, 100 | "attrs": { 101 | "hashes": [ 102 | "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6", 103 | "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c" 104 | ], 105 | "markers": "python_version >= '3.5'", 106 | "version": "==22.1.0" 107 | }, 108 | "bleach": { 109 | "hashes": [ 110 | "sha256:085f7f33c15bd408dd9b17a4ad77c577db66d76203e5984b1bd59baeee948b2a", 111 | "sha256:0d03255c47eb9bd2f26aa9bb7f2107732e7e8fe195ca2f64709fcf3b0a4a085c" 112 | ], 113 | "markers": "python_version >= '3.7'", 114 | "version": "==5.0.1" 115 | }, 116 | "certifi": { 117 | "hashes": [ 118 | "sha256:35824b4c3a97115964b408844d64aa14db1cc518f6562e8d7261699d1350a9e3", 119 | "sha256:4ad3232f5e926d6718ec31cfc1fcadfde020920e278684144551c91769c7bc18" 120 | ], 121 | "index": "pypi", 122 | "version": "==2022.12.7" 123 | }, 124 | "cffi": { 125 | "hashes": [ 126 | "sha256:00a9ed42e88df81ffae7a8ab6d9356b371399b91dbdf0c3cb1e84c03a13aceb5", 127 | "sha256:03425bdae262c76aad70202debd780501fabeaca237cdfddc008987c0e0f59ef", 128 | "sha256:04ed324bda3cda42b9b695d51bb7d54b680b9719cfab04227cdd1e04e5de3104", 129 | "sha256:0e2642fe3142e4cc4af0799748233ad6da94c62a8bec3a6648bf8ee68b1c7426", 130 | "sha256:173379135477dc8cac4bc58f45db08ab45d228b3363adb7af79436135d028405", 131 | "sha256:198caafb44239b60e252492445da556afafc7d1e3ab7a1fb3f0584ef6d742375", 132 | "sha256:1e74c6b51a9ed6589199c787bf5f9875612ca4a8a0785fb2d4a84429badaf22a", 133 | "sha256:2012c72d854c2d03e45d06ae57f40d78e5770d252f195b93f581acf3ba44496e", 134 | "sha256:21157295583fe8943475029ed5abdcf71eb3911894724e360acff1d61c1d54bc", 135 | "sha256:2470043b93ff09bf8fb1d46d1cb756ce6132c54826661a32d4e4d132e1977adf", 136 | "sha256:285d29981935eb726a4399badae8f0ffdff4f5050eaa6d0cfc3f64b857b77185", 137 | "sha256:30d78fbc8ebf9c92c9b7823ee18eb92f2e6ef79b45ac84db507f52fbe3ec4497", 138 | "sha256:320dab6e7cb2eacdf0e658569d2575c4dad258c0fcc794f46215e1e39f90f2c3", 139 | "sha256:33ab79603146aace82c2427da5ca6e58f2b3f2fb5da893ceac0c42218a40be35", 140 | "sha256:3548db281cd7d2561c9ad9984681c95f7b0e38881201e157833a2342c30d5e8c", 141 | "sha256:3799aecf2e17cf585d977b780ce79ff0dc9b78d799fc694221ce814c2c19db83", 142 | "sha256:39d39875251ca8f612b6f33e6b1195af86d1b3e60086068be9cc053aa4376e21", 143 | "sha256:3b926aa83d1edb5aa5b427b4053dc420ec295a08e40911296b9eb1b6170f6cca", 144 | "sha256:3bcde07039e586f91b45c88f8583ea7cf7a0770df3a1649627bf598332cb6984", 145 | "sha256:3d08afd128ddaa624a48cf2b859afef385b720bb4b43df214f85616922e6a5ac", 146 | "sha256:3eb6971dcff08619f8d91607cfc726518b6fa2a9eba42856be181c6d0d9515fd", 147 | "sha256:40f4774f5a9d4f5e344f31a32b5096977b5d48560c5592e2f3d2c4374bd543ee", 148 | "sha256:4289fc34b2f5316fbb762d75362931e351941fa95fa18789191b33fc4cf9504a", 149 | "sha256:470c103ae716238bbe698d67ad020e1db9d9dba34fa5a899b5e21577e6d52ed2", 150 | "sha256:4f2c9f67e9821cad2e5f480bc8d83b8742896f1242dba247911072d4fa94c192", 151 | "sha256:50a74364d85fd319352182ef59c5c790484a336f6db772c1a9231f1c3ed0cbd7", 152 | "sha256:54a2db7b78338edd780e7ef7f9f6c442500fb0d41a5a4ea24fff1c929d5af585", 153 | "sha256:5635bd9cb9731e6d4a1132a498dd34f764034a8ce60cef4f5319c0541159392f", 154 | "sha256:59c0b02d0a6c384d453fece7566d1c7e6b7bae4fc5874ef2ef46d56776d61c9e", 155 | "sha256:5d598b938678ebf3c67377cdd45e09d431369c3b1a5b331058c338e201f12b27", 156 | "sha256:5df2768244d19ab7f60546d0c7c63ce1581f7af8b5de3eb3004b9b6fc8a9f84b", 157 | "sha256:5ef34d190326c3b1f822a5b7a45f6c4535e2f47ed06fec77d3d799c450b2651e", 158 | "sha256:6975a3fac6bc83c4a65c9f9fcab9e47019a11d3d2cf7f3c0d03431bf145a941e", 159 | "sha256:6c9a799e985904922a4d207a94eae35c78ebae90e128f0c4e521ce339396be9d", 160 | "sha256:70df4e3b545a17496c9b3f41f5115e69a4f2e77e94e1d2a8e1070bc0c38c8a3c", 161 | "sha256:7473e861101c9e72452f9bf8acb984947aa1661a7704553a9f6e4baa5ba64415", 162 | "sha256:8102eaf27e1e448db915d08afa8b41d6c7ca7a04b7d73af6514df10a3e74bd82", 163 | "sha256:87c450779d0914f2861b8526e035c5e6da0a3199d8f1add1a665e1cbc6fc6d02", 164 | "sha256:8b7ee99e510d7b66cdb6c593f21c043c248537a32e0bedf02e01e9553a172314", 165 | "sha256:91fc98adde3d7881af9b59ed0294046f3806221863722ba7d8d120c575314325", 166 | "sha256:94411f22c3985acaec6f83c6df553f2dbe17b698cc7f8ae751ff2237d96b9e3c", 167 | "sha256:98d85c6a2bef81588d9227dde12db8a7f47f639f4a17c9ae08e773aa9c697bf3", 168 | "sha256:9ad5db27f9cabae298d151c85cf2bad1d359a1b9c686a275df03385758e2f914", 169 | "sha256:a0b71b1b8fbf2b96e41c4d990244165e2c9be83d54962a9a1d118fd8657d2045", 170 | "sha256:a0f100c8912c114ff53e1202d0078b425bee3649ae34d7b070e9697f93c5d52d", 171 | "sha256:a591fe9e525846e4d154205572a029f653ada1a78b93697f3b5a8f1f2bc055b9", 172 | "sha256:a5c84c68147988265e60416b57fc83425a78058853509c1b0629c180094904a5", 173 | "sha256:a66d3508133af6e8548451b25058d5812812ec3798c886bf38ed24a98216fab2", 174 | "sha256:a8c4917bd7ad33e8eb21e9a5bbba979b49d9a97acb3a803092cbc1133e20343c", 175 | "sha256:b3bbeb01c2b273cca1e1e0c5df57f12dce9a4dd331b4fa1635b8bec26350bde3", 176 | "sha256:cba9d6b9a7d64d4bd46167096fc9d2f835e25d7e4c121fb2ddfc6528fb0413b2", 177 | "sha256:cc4d65aeeaa04136a12677d3dd0b1c0c94dc43abac5860ab33cceb42b801c1e8", 178 | "sha256:ce4bcc037df4fc5e3d184794f27bdaab018943698f4ca31630bc7f84a7b69c6d", 179 | "sha256:cec7d9412a9102bdc577382c3929b337320c4c4c4849f2c5cdd14d7368c5562d", 180 | "sha256:d400bfb9a37b1351253cb402671cea7e89bdecc294e8016a707f6d1d8ac934f9", 181 | "sha256:d61f4695e6c866a23a21acab0509af1cdfd2c013cf256bbf5b6b5e2695827162", 182 | "sha256:db0fbb9c62743ce59a9ff687eb5f4afbe77e5e8403d6697f7446e5f609976f76", 183 | "sha256:dd86c085fae2efd48ac91dd7ccffcfc0571387fe1193d33b6394db7ef31fe2a4", 184 | "sha256:e00b098126fd45523dd056d2efba6c5a63b71ffe9f2bbe1a4fe1716e1d0c331e", 185 | "sha256:e229a521186c75c8ad9490854fd8bbdd9a0c9aa3a524326b55be83b54d4e0ad9", 186 | "sha256:e263d77ee3dd201c3a142934a086a4450861778baaeeb45db4591ef65550b0a6", 187 | "sha256:ed9cb427ba5504c1dc15ede7d516b84757c3e3d7868ccc85121d9310d27eed0b", 188 | "sha256:fa6693661a4c91757f4412306191b6dc88c1703f780c8234035eac011922bc01", 189 | "sha256:fcd131dd944808b5bdb38e6f5b53013c5aa4f334c5cad0c72742f6eba4b73db0" 190 | ], 191 | "version": "==1.15.1" 192 | }, 193 | "charset-normalizer": { 194 | "hashes": [ 195 | "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845", 196 | "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f" 197 | ], 198 | "markers": "python_version >= '3.6'", 199 | "version": "==2.1.1" 200 | }, 201 | "check-manifest": { 202 | "hashes": [ 203 | "sha256:42de6eaab4ed149e60c9b367ada54f01a3b1e4d6846784f9b9710e770ff5572c", 204 | "sha256:78dd077f2c70dbac7cfcc9d12cbd423914e787ea4b5631de45aecd25b524e8e3" 205 | ], 206 | "index": "pypi", 207 | "version": "==0.40" 208 | }, 209 | "cryptography": { 210 | "hashes": [ 211 | "sha256:0e70da4bdff7601b0ef48e6348339e490ebfb0cbe638e083c9c41fb49f00c8bd", 212 | "sha256:10652dd7282de17990b88679cb82f832752c4e8237f0c714be518044269415db", 213 | "sha256:175c1a818b87c9ac80bb7377f5520b7f31b3ef2a0004e2420319beadedb67290", 214 | "sha256:1d7e632804a248103b60b16fb145e8df0bc60eed790ece0d12efe8cd3f3e7744", 215 | "sha256:1f13ddda26a04c06eb57119caf27a524ccae20533729f4b1e4a69b54e07035eb", 216 | "sha256:2ec2a8714dd005949d4019195d72abed84198d877112abb5a27740e217e0ea8d", 217 | "sha256:2fa36a7b2cc0998a3a4d5af26ccb6273f3df133d61da2ba13b3286261e7efb70", 218 | "sha256:2fb481682873035600b5502f0015b664abc26466153fab5c6bc92c1ea69d478b", 219 | "sha256:3178d46f363d4549b9a76264f41c6948752183b3f587666aff0555ac50fd7876", 220 | "sha256:4367da5705922cf7070462e964f66e4ac24162e22ab0a2e9d31f1b270dd78083", 221 | "sha256:4eb85075437f0b1fd8cd66c688469a0c4119e0ba855e3fef86691971b887caf6", 222 | "sha256:50a1494ed0c3f5b4d07650a68cd6ca62efe8b596ce743a5c94403e6f11bf06c1", 223 | "sha256:53049f3379ef05182864d13bb9686657659407148f901f3f1eee57a733fb4b00", 224 | "sha256:6391e59ebe7c62d9902c24a4d8bcbc79a68e7c4ab65863536127c8a9cd94043b", 225 | "sha256:67461b5ebca2e4c2ab991733f8ab637a7265bb582f07c7c88914b5afb88cb95b", 226 | "sha256:78e47e28ddc4ace41dd38c42e6feecfdadf9c3be2af389abbfeef1ff06822285", 227 | "sha256:80ca53981ceeb3241998443c4964a387771588c4e4a5d92735a493af868294f9", 228 | "sha256:8a4b2bdb68a447fadebfd7d24855758fe2d6fecc7fed0b78d190b1af39a8e3b0", 229 | "sha256:8e45653fb97eb2f20b8c96f9cd2b3a0654d742b47d638cf2897afbd97f80fa6d", 230 | "sha256:998cd19189d8a747b226d24c0207fdaa1e6658a1d3f2494541cb9dfbf7dcb6d2", 231 | "sha256:a10498349d4c8eab7357a8f9aa3463791292845b79597ad1b98a543686fb1ec8", 232 | "sha256:b4cad0cea995af760f82820ab4ca54e5471fc782f70a007f31531957f43e9dee", 233 | "sha256:bfe6472507986613dc6cc00b3d492b2f7564b02b3b3682d25ca7f40fa3fd321b", 234 | "sha256:c9e0d79ee4c56d841bd4ac6e7697c8ff3c8d6da67379057f29e66acffcd1e9a7", 235 | "sha256:ca57eb3ddaccd1112c18fc80abe41db443cc2e9dcb1917078e02dfa010a4f353", 236 | "sha256:ce127dd0a6a0811c251a6cddd014d292728484e530d80e872ad9806cfb1c5b3c" 237 | ], 238 | "markers": "python_version >= '3.6'", 239 | "version": "==38.0.4" 240 | }, 241 | "docutils": { 242 | "hashes": [ 243 | "sha256:33995a6753c30b7f577febfc2c50411fec6aac7f7ffeb7c4cfe5991072dcf9e6", 244 | "sha256:5e1de4d849fee02c63b040a4a3fd567f4ab104defd8a5511fbbc24a8a017efbc" 245 | ], 246 | "markers": "python_version >= '3.7'", 247 | "version": "==0.19" 248 | }, 249 | "entrypoints": { 250 | "hashes": [ 251 | "sha256:b706eddaa9218a19ebcd67b56818f05bb27589b1ca9e8d797b74affad4ccacd4", 252 | "sha256:f174b5ff827504fd3cd97cc3f8649f3693f51538c7e4bdf3ef002c8429d42f9f" 253 | ], 254 | "markers": "python_version >= '3.6'", 255 | "version": "==0.4" 256 | }, 257 | "idna": { 258 | "hashes": [ 259 | "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4", 260 | "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2" 261 | ], 262 | "markers": "python_version >= '3.5'", 263 | "version": "==3.4" 264 | }, 265 | "importlib-metadata": { 266 | "hashes": [ 267 | "sha256:d5059f9f1e8e41f80e9c56c2ee58811450c31984dfa625329ffd7c0dad88a73b", 268 | "sha256:d84d17e21670ec07990e1044a99efe8d615d860fd176fc29ef5c306068fda313" 269 | ], 270 | "markers": "python_version < '3.8'", 271 | "version": "==5.1.0" 272 | }, 273 | "jeepney": { 274 | "hashes": [ 275 | "sha256:5efe48d255973902f6badc3ce55e2aa6c5c3b3bc642059ef3a91247bcfcc5806", 276 | "sha256:c0a454ad016ca575060802ee4d590dd912e35c122fa04e70306de3d076cce755" 277 | ], 278 | "markers": "python_version >= '3.7'", 279 | "version": "==0.8.0" 280 | }, 281 | "keyring": { 282 | "hashes": [ 283 | "sha256:91037ccaf0c9a112a76f7740e4a416b9457a69b66c2799421581bee710a974b3", 284 | "sha256:f5bb20ea6c57c2360daf0c591931c9ea0d7660a8d9e32ca84d63273f131ea605" 285 | ], 286 | "index": "pypi", 287 | "version": "==19.2.0" 288 | }, 289 | "more-itertools": { 290 | "hashes": [ 291 | "sha256:250e83d7e81d0c87ca6bd942e6aeab8cc9daa6096d12c5308f3f92fa5e5c1f41", 292 | "sha256:5a6257e40878ef0520b1803990e3e22303a41b5714006c32a3fd8304b26ea1ab" 293 | ], 294 | "markers": "python_version >= '3.7'", 295 | "version": "==9.0.0" 296 | }, 297 | "packaging": { 298 | "hashes": [ 299 | "sha256:2198ec20bd4c017b8f9717e00f0c8714076fc2fd93816750ab48e2c41de2cfd3", 300 | "sha256:957e2148ba0e1a3b282772e791ef1d8083648bc131c8ab0c1feba110ce1146c3" 301 | ], 302 | "markers": "python_version >= '3.7'", 303 | "version": "==22.0" 304 | }, 305 | "pkginfo": { 306 | "hashes": [ 307 | "sha256:ac03e37e4d601aaee40f8087f63fc4a2a6c9814dda2c8fa6aab1b1829653bdfa", 308 | "sha256:d580059503f2f4549ad6e4c106d7437356dbd430e2c7df99ee1efe03d75f691e" 309 | ], 310 | "markers": "python_version >= '3.6'", 311 | "version": "==1.9.2" 312 | }, 313 | "pluggy": { 314 | "hashes": [ 315 | "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0", 316 | "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d" 317 | ], 318 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", 319 | "version": "==0.13.1" 320 | }, 321 | "py": { 322 | "hashes": [ 323 | "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719", 324 | "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378" 325 | ], 326 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", 327 | "version": "==1.11.0" 328 | }, 329 | "pycparser": { 330 | "hashes": [ 331 | "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9", 332 | "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206" 333 | ], 334 | "version": "==2.21" 335 | }, 336 | "pygments": { 337 | "hashes": [ 338 | "sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1", 339 | "sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42" 340 | ], 341 | "markers": "python_version >= '3.6'", 342 | "version": "==2.13.0" 343 | }, 344 | "pytest": { 345 | "hashes": [ 346 | "sha256:7e4800063ccfc306a53c461442526c5571e1462f61583506ce97e4da6a1d88c8", 347 | "sha256:ca563435f4941d0cb34767301c27bc65c510cb82e90b9ecf9cb52dc2c63caaa0" 348 | ], 349 | "index": "pypi", 350 | "version": "==5.2.1" 351 | }, 352 | "readme-renderer": { 353 | "hashes": [ 354 | "sha256:cd653186dfc73055656f090f227f5cb22a046d7f71a841dfa305f55c9a513273", 355 | "sha256:f67a16caedfa71eef48a31b39708637a6f4664c4394801a7b0d6432d13907343" 356 | ], 357 | "markers": "python_version >= '3.7'", 358 | "version": "==37.3" 359 | }, 360 | "requests": { 361 | "hashes": [ 362 | "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983", 363 | "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349" 364 | ], 365 | "markers": "python_version >= '3.7' and python_version < '4'", 366 | "version": "==2.28.1" 367 | }, 368 | "requests-toolbelt": { 369 | "hashes": [ 370 | "sha256:18565aa58116d9951ac39baa288d3adb5b3ff975c4f25eee78555d89e8f247f7", 371 | "sha256:62e09f7ff5ccbda92772a29f394a49c3ad6cb181d568b1337626b2abb628a63d" 372 | ], 373 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", 374 | "version": "==0.10.1" 375 | }, 376 | "secretstorage": { 377 | "hashes": [ 378 | "sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77", 379 | "sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99" 380 | ], 381 | "markers": "sys_platform == 'linux'", 382 | "version": "==3.3.3" 383 | }, 384 | "setuptools": { 385 | "hashes": [ 386 | "sha256:57f6f22bde4e042978bcd50176fdb381d7c21a9efa4041202288d3737a0c6a54", 387 | "sha256:a7620757bf984b58deaf32fc8a4577a9bbc0850cf92c20e1ce41c38c19e5fb75" 388 | ], 389 | "markers": "python_version >= '3.7'", 390 | "version": "==65.6.3" 391 | }, 392 | "six": { 393 | "hashes": [ 394 | "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", 395 | "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254" 396 | ], 397 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", 398 | "version": "==1.16.0" 399 | }, 400 | "toml": { 401 | "hashes": [ 402 | "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b", 403 | "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f" 404 | ], 405 | "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", 406 | "version": "==0.10.2" 407 | }, 408 | "tqdm": { 409 | "hashes": [ 410 | "sha256:5f4f682a004951c1b450bc753c710e9280c5746ce6ffedee253ddbcbf54cf1e4", 411 | "sha256:6fee160d6ffcd1b1c68c65f14c829c22832bc401726335ce92c52d395944a6a1" 412 | ], 413 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", 414 | "version": "==4.64.1" 415 | }, 416 | "twine": { 417 | "hashes": [ 418 | "sha256:5319dd3e02ac73fcddcd94f035b9631589ab5d23e1f4699d57365199d85261e1", 419 | "sha256:9fe7091715c7576df166df8ef6654e61bada39571783f2fd415bdcba867c6993" 420 | ], 421 | "index": "pypi", 422 | "version": "==2.0.0" 423 | }, 424 | "typing-extensions": { 425 | "hashes": [ 426 | "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa", 427 | "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e" 428 | ], 429 | "markers": "python_version < '3.8'", 430 | "version": "==4.4.0" 431 | }, 432 | "urllib3": { 433 | "hashes": [ 434 | "sha256:47cc05d99aaa09c9e72ed5809b60e7ba354e64b59c9c173ac3018642d8bb41fc", 435 | "sha256:c083dd0dce68dbfbe1129d5271cb90f9447dea7d52097c6e0126120c521ddea8" 436 | ], 437 | "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5'", 438 | "version": "==1.26.13" 439 | }, 440 | "wcwidth": { 441 | "hashes": [ 442 | "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784", 443 | "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83" 444 | ], 445 | "version": "==0.2.5" 446 | }, 447 | "webencodings": { 448 | "hashes": [ 449 | "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78", 450 | "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923" 451 | ], 452 | "version": "==0.5.1" 453 | }, 454 | "wheel": { 455 | "hashes": [ 456 | "sha256:10c9da68765315ed98850f8e048347c3eb06dd81822dc2ab1d4fde9dc9702646", 457 | "sha256:f4da1763d3becf2e2cd92a14a7c920f0f00eca30fdde9ea992c836685b9faf28" 458 | ], 459 | "index": "pypi", 460 | "version": "==0.33.6" 461 | }, 462 | "zipp": { 463 | "hashes": [ 464 | "sha256:83a28fcb75844b5c0cdaf5aa4003c2d728c77e05f5aeabe8e95e56727005fbaa", 465 | "sha256:a7a22e05929290a67401440b39690ae6563279bced5f314609d9d03798f56766" 466 | ], 467 | "markers": "python_version >= '3.7'", 468 | "version": "==3.11.0" 469 | } 470 | } 471 | } 472 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ## Spår: Cluster Trace Generator 2 | 3 | This command-line tool generates cluster trace in a more controllable manner based on [Alibaba's cluster trace](https://github.com/alibaba/clusterdata) 4 | 5 | [![image](https://img.shields.io/pypi/l/spar.svg)](https://python.org/pypi/spar) 6 | [![image](https://img.shields.io/pypi/pyversions/spar.svg)](https://python.org/pypi/spar) 7 | 8 | ### Installation 9 | 10 | It is recommended to install the tool with `pip3`. 11 | 12 | ``` 13 | pip3 install spar 14 | ``` 15 | 16 | Caution: The tool might not work with newer versions of `Python`(3.8+) and `scipy`(1.8+). 17 | 18 | ### Usage 19 | 20 | ``` 21 | Usage: spar [OPTIONS] OUTPUT_DIR 22 | 23 | By default, we output an hour-long trace from the original Alibaba 24 | trace to the OUTPUT_DIR. But you could provide several parameters 25 | and we would transform the trace as follows. 26 | 1. Up- or down-sample trace according to load-factor. For up-sampling, 27 | we replace the dependencies with synthesized ones. 28 | 2. Adjust resource heterogeneity according to heter-factor. 29 | 3. Rescale resource request and usage according to machine-conf. 30 | 31 | Examples: 32 | 33 | Generate an hour-long trace. 34 | $ spar 35 | 36 | Generate an hour-long trace with 2x jobs. 37 | $ spar --load-factor 2 38 | 39 | Generate a half-hour-long trace. 40 | $ spar --duration 0.5 41 | 42 | Generate an hour-long trace with the resource request and usage deviating 43 | from the average 1.5x the original. 44 | $ spar --heter-factor 1.5 45 | 46 | Generate an hour-long trace for clusters with 24 cores and 50 unit of memory. 47 | $ spar --machine-conf (24, 50) 48 | 49 | Options: 50 | --trace-dir PATH The location of Alibaba trace. 51 | --load-factor FLOAT A factor adjusting the average load (i.e., # 52 | jobs/hour) of the output trace. 53 | --duration FLOAT RANGE The duration (in hours) of the trace. 54 | --heter-factor FLOAT A factor adjusting the heterogeneity 55 | (defined as the ratio: value/average) of the 56 | output trace. 57 | --machine-conf ... 58 | An integer pair indicating the (CPU, memory) 59 | of each server. Default: (96, 100) as in 60 | Alibaba cluster. 61 | --help Show this message and exit. 62 | ``` 63 | 64 | Please refer to our wiki for the [detailed format of input and output](https://github.com/All-less/trace-generator/wiki/Trace-Format). 65 | 66 | ### Publication 67 | 68 | For more details, please refer to the following paper. 69 | 70 | > Huangshi Tian, Yunchuan Zheng, and Wei Wang. "Characterizing and Synthesizing Task Dependencies of Data-Parallel Jobs in Alibaba Cloud." In SoCC. 2019. 71 | 72 | 73 | ### Contributing 74 | 75 | Any form of contribution is welcome! If you find a bug, create an issue; if you extend a feature, send a pull request. 76 | 77 | 78 | ### Acknowledgement 79 | 80 | [@SimonZYC](https://github.com/SimonZYC) has significantly contributed to this project. 81 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | description-file = README.md 3 | license_file = LICENSE 4 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup, find_packages 2 | 3 | setup( 4 | name = 'spar', 5 | packages = find_packages(exclude=('config')), 6 | version = '0.0.7', 7 | description = 'A CLI tool for generating controllable cluster trace.', 8 | author = 'All-less', 9 | author_email = 'all.less.mail@gmail.com', 10 | url = 'https://github.com/All-less/trace-generator', 11 | install_requires = [ 12 | 'click', 13 | 'scipy', 14 | 'crayons', 15 | 'numpy' 16 | ], 17 | entry_points = { 18 | 'console_scripts': [ 19 | 'spar=spar.cli:main' 20 | ] 21 | }, 22 | license="MIT", 23 | keywords = [ 'cluster trace', 'cloud computing' ], 24 | classifiers = [ 25 | "License :: OSI Approved :: MIT License", 26 | 'Programming Language :: Python :: 3.6', 27 | 'Programming Language :: Python :: 3.7' 28 | ], 29 | include_package_data=True 30 | ) 31 | -------------------------------------------------------------------------------- /spar/__init__.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | __version__ = '0.1.0' 3 | -------------------------------------------------------------------------------- /spar/__main__.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | from .cli import main 3 | 4 | main() 5 | -------------------------------------------------------------------------------- /spar/cli.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | import random 3 | from math import ceil, floor 4 | from pathlib import Path 5 | 6 | import click 7 | 8 | from .io import iter_job, write_job 9 | from .progress import Bar 10 | from .generate import random_interval, random_job 11 | from .transform import Transformer 12 | 13 | 14 | @click.command() 15 | @click.argument('output-dir', type=click.Path(exists=True)) 16 | @click.option('--trace-dir', type=click.Path(exists=True), default=(Path(__file__).parents[0] / 'data' / 'samples'), 17 | help='The location of Alibaba trace.') 18 | @click.option('--load-factor', type=float, default=1, 19 | help='A factor adjusting the average load (i.e., # jobs/hour) of the output trace.') 20 | @click.option('--duration', type=click.FloatRange(0, None), default=1, 21 | help='The duration (in hours) of the trace.') 22 | @click.option('--heter-factor', type=float, default=1, 23 | help='A factor adjusting the heterogeneity (defined as the ratio: value/average) of the output trace.') 24 | @click.option('--machine-conf', type=(int, int), default=(96, 100), 25 | help='An integer pair indicating the (CPU, memory) of each server. Default: (96, 100) as in Alibaba cluster.') 26 | def main(trace_dir, output_dir, load_factor, heter_factor, machine_conf, duration): 27 | ''' 28 | \b 29 | By default, we output an hour-long trace from the original Alibaba 30 | trace to the OUTPUT_DIR. But you could provide several parameters 31 | and we would transform the trace as follows. 32 | 1. Up- or down-sample trace according to load-factor. For up-sampling, 33 | we replace the dependencies with synthesized ones. 34 | 2. Adjust resource heterogeneity according to heter-factor. 35 | 3. Rescale resource request and usage according to machine-conf. 36 | 37 | Examples: 38 | 39 | \b 40 | Generate an hour-long trace. 41 | $ spar 42 | 43 | \b 44 | Generate an hour-long trace with 2x jobs. 45 | $ spar --load-factor 2 46 | 47 | \b 48 | Generate a half-hour-long trace. 49 | $ spar --duration 0.5 50 | 51 | \b 52 | Generate an hour-long trace with the resource request and usage deviating 53 | from the average 1.5x the original. 54 | $ spar --heter-factor 1.5 55 | 56 | \b 57 | Generate an hour-long trace for clusters with 24 cores and 50 unit of memory. 58 | $ spar --machine-conf (24, 50) 59 | ''' 60 | with (Path(trace_dir) / 'sample_tasks.csv').open() as sample_task, \ 61 | (Path(trace_dir) / 'sample_instances.csv').open() as sample_instance, \ 62 | (Path(output_dir) / 'batch_task.csv').open('w') as output_task, \ 63 | (Path(output_dir) / 'batch_instace.csv').open('w') as output_instace: 64 | 65 | transformer = Transformer(heter_factor, machine_conf) 66 | output_job = lambda a, j: write_job(a, transformer.transform(j), 67 | output_task, output_instace) 68 | last = 0 # the arrival time of the last generated job 69 | 70 | total_jobs = 16749 * duration * load_factor # total number of jobs to be generated 71 | step_size = int(30 * random.random() + 20) # enlarge interval of updating progress bar to reduce overhead 72 | with Bar(label='Generating jobs ', expected_size=total_jobs, every=step_size) as bar: 73 | for i, (arrive_at, job) in enumerate(iter_job(sample_task, sample_instance)): 74 | 75 | # d: duration, l: load_factor 76 | dl = duration * load_factor 77 | if dl == 1: 78 | # the number of jobs will not change 79 | output_job(arrive_at / load_factor, job) 80 | 81 | elif dl > 1: 82 | # insert expected dl-1 synthesized jobs 83 | to_insert = ceil(dl) if floor(dl) + random.random() < dl else floor(dl) 84 | for _ in range(to_insert): 85 | last += random_interval() / load_factor 86 | output_job(last, random_job()) 87 | output_job(arrive_at * duration, job) 88 | 89 | # when d*l < 1, retain the job with probability of d*l 90 | elif random.random() < dl: 91 | output_job(arrive_at * duration, job) 92 | 93 | last = arrive_at * duration 94 | bar.show(int(i * dl)) 95 | 96 | 97 | if __name__ == '__main__': 98 | main() 99 | -------------------------------------------------------------------------------- /spar/data/distributions/cpl_dist.pkl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/All-less/trace-generator/5a689312fef84d35fa1897c8a9090ea65fa06946/spar/data/distributions/cpl_dist.pkl -------------------------------------------------------------------------------- /spar/data/distributions/instance_cpu_dist.pkl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/All-less/trace-generator/5a689312fef84d35fa1897c8a9090ea65fa06946/spar/data/distributions/instance_cpu_dist.pkl -------------------------------------------------------------------------------- /spar/data/distributions/instance_duration_dist.pkl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/All-less/trace-generator/5a689312fef84d35fa1897c8a9090ea65fa06946/spar/data/distributions/instance_duration_dist.pkl -------------------------------------------------------------------------------- /spar/data/distributions/instance_mem_dist.pkl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/All-less/trace-generator/5a689312fef84d35fa1897c8a9090ea65fa06946/spar/data/distributions/instance_mem_dist.pkl -------------------------------------------------------------------------------- /spar/data/distributions/instance_num_dist.pkl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/All-less/trace-generator/5a689312fef84d35fa1897c8a9090ea65fa06946/spar/data/distributions/instance_num_dist.pkl -------------------------------------------------------------------------------- /spar/data/distributions/job_interval_dist.pkl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/All-less/trace-generator/5a689312fef84d35fa1897c8a9090ea65fa06946/spar/data/distributions/job_interval_dist.pkl -------------------------------------------------------------------------------- /spar/data/distributions/level_dist.pkl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/All-less/trace-generator/5a689312fef84d35fa1897c8a9090ea65fa06946/spar/data/distributions/level_dist.pkl -------------------------------------------------------------------------------- /spar/data/distributions/task_cpu_dist.pkl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/All-less/trace-generator/5a689312fef84d35fa1897c8a9090ea65fa06946/spar/data/distributions/task_cpu_dist.pkl -------------------------------------------------------------------------------- /spar/data/distributions/task_duration_dist.pkl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/All-less/trace-generator/5a689312fef84d35fa1897c8a9090ea65fa06946/spar/data/distributions/task_duration_dist.pkl -------------------------------------------------------------------------------- /spar/data/distributions/task_mem_dist.pkl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/All-less/trace-generator/5a689312fef84d35fa1897c8a9090ea65fa06946/spar/data/distributions/task_mem_dist.pkl -------------------------------------------------------------------------------- /spar/data/distributions/task_num_dist.pkl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/All-less/trace-generator/5a689312fef84d35fa1897c8a9090ea65fa06946/spar/data/distributions/task_num_dist.pkl -------------------------------------------------------------------------------- /spar/generate.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | from math import ceil 3 | from random import sample 4 | from functools import reduce 5 | from collections import defaultdict 6 | 7 | from .utils import draw 8 | 9 | 10 | def random_interval(): 11 | return draw('job_interval_dist.pkl')[0] 12 | 13 | 14 | def random_levels(num_nodes): 15 | cpl = min(draw('cpl_dist.pkl', output_integer=True, path=[ min(num_nodes, 35), ])[0], num_nodes) 16 | levels = draw('level_dist.pkl', num=num_nodes - cpl, output_integer=True, path=[ min(cpl, 20), ]) 17 | return levels + [ *range(1, cpl + 1) ] 18 | 19 | 20 | def random_dag(num_nodes): 21 | if num_nodes == 1: 22 | return { 0: [] } 23 | 24 | # randomly select a critical path length and assign nodes along it 25 | nodes = defaultdict(list) 26 | for n, l in enumerate(sorted(random_levels(num_nodes))): 27 | nodes[l].append(n) 28 | 29 | # randomly generate edges 30 | parents = { n:[] for n in range(num_nodes) } 31 | for l in range(1, len(nodes)): 32 | for n in nodes[l]: 33 | for c in set(sample(nodes[l + 1], ceil(len(nodes[l + 1]) / len(nodes[l]) * 3 / 4))): 34 | parents[c].append(n) 35 | 36 | return parents 37 | 38 | 39 | def random_job(): 40 | task_num = draw('task_num_dist.pkl', num=1, output_integer=True)[0] 41 | job_dag = random_dag(task_num) # { : [ , ... ], ... } 42 | 43 | # generate task_name, duration, plan_cpu, plan_mem, inst_num for each task 44 | task_info = [ *zip( 45 | [ f'T{k}' + reduce(str.__add__, [ f'_{p}' for p in v ], '') for k, v in job_dag.items() ], 46 | draw('task_duration_dist.pkl', num=len(job_dag)), 47 | draw('task_cpu_dist.pkl', num=len(job_dag)), 48 | draw('task_mem_dist.pkl', num=len(job_dag)), 49 | draw('instance_num_dist.pkl', num=len(job_dag), output_integer=True) 50 | ) ] 51 | 52 | # generate task_name, inst_name, duration, cpu_avg, mem_avg for each instance 53 | instance_info = reduce(list.__add__, [ [ *zip( 54 | [ task_name for _ in range(inst_num) ], 55 | [ f'inst_{i}' for i in range(inst_num) ], 56 | draw('instance_duration_dist.pkl', num=inst_num), 57 | draw('instance_cpu_dist.pkl', num=inst_num), 58 | draw('instance_mem_dist.pkl', num=inst_num) 59 | ) ] for task_name, _, _, _, inst_num in task_info 60 | ]) 61 | 62 | return { 63 | 'tasks': [ ','.join(map(str, info)) for info in task_info ], 64 | 'instances': [ ','.join(map(str, info)) for info in instance_info ] 65 | } 66 | -------------------------------------------------------------------------------- /spar/io.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | 3 | 4 | def write_job(arrival, job, task_file, instance_file): 5 | write_job.called += 1 6 | for line in job['tasks']: 7 | task_file.write(f'{arrival},j_{write_job.called},{line.strip()}\n') 8 | for line in job['instances']: 9 | instance_file.write(f'{arrival},j_{write_job.called},{line.strip()}\n') 10 | 11 | # We assign a counter to `write_job` function and it will be 12 | # used for generating `job_id`. 13 | write_job.called = 0 14 | 15 | 16 | def iter_job(task_file, instance_file): 17 | 18 | ARR_TIME, JOB_ID, REST = 0, 1, 2 19 | 20 | def extract(line): 21 | arrival_end = line.index(',') 22 | job_id_end = line.index(',', arrival_end + 1) 23 | return line[:arrival_end], line[arrival_end+1:job_id_end], line[job_id_end+1:] 24 | 25 | def read_lines(file, job_id, line_buffer): # read all lines related to one job 26 | try: 27 | parts = extract(next(file)) 28 | while parts[JOB_ID] == job_id: 29 | line_buffer.append(parts[REST]) 30 | parts = extract(next(file)) 31 | return parts 32 | except StopIteration: 33 | return '', '', '' 34 | 35 | next_task, next_instance = extract(next(task_file)), extract(next(instance_file)) 36 | while next_task[JOB_ID] != '': 37 | arrive_at, task_lines, instance_lines = next_task[ARR_TIME], [ next_task[REST] ], [ next_instance[REST] ] 38 | next_task = read_lines(task_file, next_task[JOB_ID], task_lines) 39 | next_instance = read_lines(instance_file, next_instance[JOB_ID], instance_lines) 40 | yield float(arrive_at), { 'tasks': task_lines, 'instances': instance_lines } 41 | -------------------------------------------------------------------------------- /spar/progress.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | """ 3 | A progress bar borrowed from Pipenv. 4 | https://github.com/pypa/pipenv/blob/master/pipenv/progress.py 5 | """ 6 | import os 7 | import sys 8 | import time 9 | 10 | import crayons 11 | 12 | 13 | # configure as you need 14 | PIPENV_COLORBLIND = False 15 | PIPENV_HIDE_EMOJIS = False 16 | 17 | STREAM = sys.stderr 18 | MILL_TEMPLATE = "%s %s %i/%i\r" 19 | DOTS_CHAR = "." 20 | if PIPENV_HIDE_EMOJIS: 21 | if PIPENV_COLORBLIND: 22 | BAR_FILLED_CHAR = "=" 23 | BAR_EMPTY_CHAR = "-" 24 | else: 25 | BAR_FILLED_CHAR = str(crayons.green("=", bold=True)) 26 | BAR_EMPTY_CHAR = str(crayons.black("-")) 27 | else: 28 | if PIPENV_COLORBLIND: 29 | BAR_FILLED_CHAR = "▉" 30 | BAR_EMPTY_CHAR = " " 31 | else: 32 | BAR_FILLED_CHAR = str(crayons.green("▉", bold=True)) 33 | BAR_EMPTY_CHAR = str(crayons.black("▉")) 34 | 35 | if (sys.version_info[0] >= 3) and (os.name != "nt"): 36 | BAR_TEMPLATE = u" %s%s%s %i/%i — {0}\r".format(crayons.black("%s")) 37 | else: 38 | if os.name == "nt": 39 | BAR_TEMPLATE = " %s%s%s %i/%i - %s\r" 40 | else: 41 | BAR_TEMPLATE = " %s%s%s %i/%i — %s\r" 42 | MILL_CHARS = ["|", "/", "-", "\\"] 43 | # How long to wait before recalculating the ETA 44 | ETA_INTERVAL = 1 45 | # How many intervals (excluding the current one) to calculate the simple moving 46 | # average 47 | ETA_SMA_WINDOW = 9 48 | 49 | 50 | class Bar(object): 51 | def __enter__(self): 52 | return self 53 | 54 | def __exit__(self, exc_type, exc_val, exc_tb): 55 | self.done() 56 | return False # we're not suppressing exceptions 57 | 58 | def __init__( 59 | self, 60 | label="", 61 | width=32, 62 | hide=None, 63 | empty_char=BAR_EMPTY_CHAR, 64 | filled_char=BAR_FILLED_CHAR, 65 | expected_size=None, 66 | every=1, 67 | ): 68 | self.label = label 69 | self.width = width 70 | self.hide = hide 71 | # Only show bar in terminals by default (better for piping, logging etc.) 72 | if hide is None: 73 | try: 74 | self.hide = not STREAM.isatty() 75 | except AttributeError: # output does not support isatty() 76 | self.hide = True 77 | self.empty_char = empty_char 78 | self.filled_char = filled_char 79 | self.expected_size = expected_size 80 | self.every = every 81 | self.start = time.time() 82 | self.ittimes = [] 83 | self.eta = 0 84 | self.etadelta = time.time() 85 | self.etadisp = self.format_time(self.eta) 86 | self.last_progress = 0 87 | if self.expected_size: 88 | self.show(0) 89 | 90 | def show(self, progress, count=None): 91 | if count is not None: 92 | self.expected_size = count 93 | if self.expected_size is None: 94 | raise Exception("expected_size not initialized") 95 | 96 | self.last_progress = progress 97 | if (time.time() - self.etadelta) > ETA_INTERVAL: 98 | self.etadelta = time.time() 99 | self.ittimes = self.ittimes[-ETA_SMA_WINDOW:] + [ 100 | -(self.start - time.time()) / (progress + 1) 101 | ] 102 | self.eta = ( 103 | sum(self.ittimes) 104 | / float(len(self.ittimes)) 105 | * (self.expected_size - progress) 106 | ) 107 | self.etadisp = self.format_time(self.eta) 108 | x = int(self.width * progress / self.expected_size) 109 | if not self.hide: 110 | if ( 111 | progress % self.every == 0 # True every "every" updates 112 | or progress == self.expected_size # And when we're done 113 | ): 114 | STREAM.write( 115 | BAR_TEMPLATE 116 | % ( 117 | self.label, 118 | self.filled_char * x, 119 | self.empty_char * (self.width - x), 120 | progress, 121 | self.expected_size, 122 | self.etadisp, 123 | ) 124 | ) 125 | STREAM.flush() 126 | 127 | def done(self): 128 | self.elapsed = time.time() - self.start 129 | elapsed_disp = self.format_time(self.elapsed) 130 | if not self.hide: 131 | # Print completed bar with elapsed time 132 | STREAM.write( 133 | BAR_TEMPLATE 134 | % ( 135 | self.label, 136 | self.filled_char * self.width, 137 | self.empty_char * 0, 138 | self.last_progress, 139 | self.expected_size, 140 | elapsed_disp, 141 | ) 142 | ) 143 | STREAM.write("\n") 144 | STREAM.flush() 145 | 146 | def format_time(self, seconds): 147 | return time.strftime("%H:%M:%S", time.gmtime(seconds)) 148 | 149 | 150 | def bar( 151 | it, 152 | label="", 153 | width=32, 154 | hide=None, 155 | empty_char=BAR_EMPTY_CHAR, 156 | filled_char=BAR_FILLED_CHAR, 157 | expected_size=None, 158 | every=1, 159 | ): 160 | """Progress iterator. Wrap your iterables with it.""" 161 | count = len(it) if expected_size is None else expected_size 162 | with Bar( 163 | label=label, 164 | width=width, 165 | hide=hide, 166 | empty_char=BAR_EMPTY_CHAR, 167 | filled_char=BAR_FILLED_CHAR, 168 | expected_size=count, 169 | every=every, 170 | ) as bar: 171 | for i, item in enumerate(it): 172 | yield item 173 | 174 | bar.show(i + 1) 175 | 176 | 177 | def dots(it, label="", hide=None, every=1): 178 | """Progress iterator. Prints a dot for each item being iterated""" 179 | count = 0 180 | if not hide: 181 | STREAM.write(label) 182 | for i, item in enumerate(it): 183 | if not hide: 184 | if i % every == 0: # True every "every" updates 185 | STREAM.write(DOTS_CHAR) 186 | sys.stderr.flush() 187 | count += 1 188 | yield item 189 | 190 | STREAM.write("\n") 191 | STREAM.flush() 192 | 193 | 194 | def mill(it, label="", hide=None, expected_size=None, every=1): 195 | """Progress iterator. Prints a mill while iterating over the items.""" 196 | 197 | def _mill_char(_i): 198 | if _i >= count: 199 | return " " 200 | 201 | else: 202 | return MILL_CHARS[(_i // every) % len(MILL_CHARS)] 203 | 204 | def _show(_i): 205 | if not hide: 206 | if ( 207 | _i % every == 0 # True every "every" updates 208 | or _i == count # And when we're done 209 | ): 210 | STREAM.write(MILL_TEMPLATE % (label, _mill_char(_i), _i, count)) 211 | STREAM.flush() 212 | 213 | count = len(it) if expected_size is None else expected_size 214 | if count: 215 | _show(0) 216 | for i, item in enumerate(it): 217 | yield item 218 | 219 | _show(i + 1) 220 | if not hide: 221 | STREAM.write("\n") 222 | STREAM.flush() 223 | -------------------------------------------------------------------------------- /spar/transform.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | from functools import partial 3 | from collections import defaultdict 4 | 5 | 6 | class Transformer: 7 | 8 | def __init__(self, heter_factor, machine_conf): 9 | self._no_transform = False 10 | self._transformers = { 11 | 'tasks': defaultdict(list), 12 | 'instances': defaultdict(list), 13 | } 14 | if heter_factor == 1.0 and machine_conf == (96, 100): 15 | self._no_transform = True 16 | if heter_factor != 1.0: 17 | self._transformers['tasks'][1].append(lambda cpu: cpu * heter_factor if cpu > 75.414 else cpu / heter_factor) 18 | self._transformers['tasks'][2].append(lambda mem: mem * heter_factor if mem > 1.002 else mem / heter_factor) 19 | self._transformers['instances'][2].append(lambda d: d * heter_factor if d > 59.202 else d / heter_factor) 20 | self._transformers['instances'][3].append(lambda cpu: cpu * heter_factor if cpu > 64.291 else cpu / heter_factor) 21 | self._transformers['instances'][4].append(lambda mem: mem * heter_factor if mem > 1.024 else mem / heter_factor) 22 | if machine_conf[0] != 96: 23 | self._transformers['tasks'][1].append(lambda cpu: cpu / 96 * machine_conf[0]) 24 | self._transformers['instances'][3].append(lambda cpu: cpu / 96 * machine_conf[0]) 25 | if machine_conf[1] != 100: 26 | self._transformers['tasks'][2].append(lambda mem: mem / 100 * machine_conf[1]) 27 | self._transformers['instances'][4].append(lambda mem: mem / 100 * machine_conf[1]) 28 | 29 | def transform(self, job): 30 | if self._no_transform: 31 | return job 32 | job['tasks'] = [ self._apply(l, self._transformers['tasks']) for l in job['tasks'] ] 33 | job['instances'] = [ self._apply(l, self._transformers['instances']) for l in job['instances'] ] 34 | return job 35 | 36 | def _apply(self, line, transformers): 37 | parts = line.split(',') 38 | for pos, funcs in transformers.items(): 39 | val = float(parts[pos]) 40 | for f in funcs: 41 | val = f(val) 42 | parts[pos] = str(val) 43 | return ','.join(parts) 44 | -------------------------------------------------------------------------------- /spar/utils.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | import math 3 | import time 4 | import pickle 5 | from pathlib import Path 6 | from collections import defaultdict 7 | 8 | import numpy as np 9 | 10 | 11 | DATA_DIR = Path(__file__).resolve().parents[0] / 'data' / 'distributions' 12 | DIST_CACHE = {} 13 | SAMPLE_CACHE = defaultdict(list) 14 | 15 | 16 | def draw(dist_name, num=1, path=[], output_integer=True): 17 | """Draw random samples from a given distribution.""" 18 | if dist_name not in DIST_CACHE: 19 | with (DATA_DIR / dist_name).open('rb') as f: 20 | DIST_CACHE[dist_name] = pickle.load(f) 21 | 22 | dist = DIST_CACHE[dist_name] 23 | for p in path: 24 | dist = dist[p] 25 | 26 | # As the drawing process is time-consuming, we generate a large batch 27 | # in advance and return the results from cached values. 28 | cache_name = dist_name + ''.join(map(str, path)) 29 | cache = SAMPLE_CACHE[cache_name] 30 | while len(cache) < num: 31 | cache += list(dist.rvs(size=10240)) 32 | 33 | samples = cache[:num] 34 | if output_integer: 35 | samples = [ *map(math.ceil, samples) ] 36 | 37 | SAMPLE_CACHE[cache_name] = cache[num:] 38 | 39 | return samples 40 | -------------------------------------------------------------------------------- /tests/context.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | import os 3 | import sys 4 | sys.path.insert(0, os.path.abspath('.')) # pytest is called at root directory 5 | 6 | import spar 7 | -------------------------------------------------------------------------------- /tests/test_generate.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | from context import spar 3 | from spar import generate 4 | 5 | 6 | def test_random_dag(): 7 | print(generate.random_dag(10)) 8 | 9 | 10 | def test_random_job(): 11 | print(generate.random_job()) 12 | -------------------------------------------------------------------------------- /tests/test_progress.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | from context import spar 3 | from spar import progress 4 | 5 | 6 | def test_bar(): 7 | print('\n') 8 | for _ in progress.bar(range(10000)): 9 | pass 10 | -------------------------------------------------------------------------------- /tests/test_utils.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | from context import spar 3 | from spar import utils 4 | 5 | 6 | def test_draw(): 7 | print(utils.draw('task_num_dist.pkl', num=5, output_integer=True)) 8 | --------------------------------------------------------------------------------