├── .coveragerc ├── .flake8 ├── .github └── workflows │ ├── badge.yml │ ├── doc.yml │ ├── release.yml │ └── test.yml ├── .gitignore ├── .style.yapf ├── CONTRIBUTING.md ├── LICENSE ├── MANIFEST.in ├── Makefile ├── README.md ├── assets └── ditk_doc_annotated_demo.png ├── cloc.sh ├── codecov.yml ├── ditk ├── __init__.py ├── config │ ├── __init__.py │ └── meta.py ├── doc │ ├── __init__.py │ └── annotated │ │ ├── __init__.py │ │ ├── __main__.py │ │ ├── generate.py │ │ ├── ppo.py │ │ ├── pylit.css │ │ └── solarized.css ├── logging │ ├── __init__.py │ ├── base.py │ ├── explicit.py │ ├── file.py │ ├── func.py │ ├── inherit.py │ ├── log.py │ ├── rich.py │ ├── stream.py │ └── terminal.py └── tensorboard │ ├── __init__.py │ ├── log.py │ └── plots │ ├── __init__.py │ └── range.py ├── docs ├── Makefile ├── main_page.html └── source │ ├── _libs │ └── .keep │ ├── _shims │ └── .keep │ ├── _static │ ├── .keep │ ├── css │ │ └── custom.css │ └── tb_create_range_plots.svg │ ├── _templates │ ├── .keep │ ├── page.html │ └── versions.html │ ├── all.mk │ ├── api_doc │ ├── config │ │ ├── index.rst │ │ └── meta.rst │ └── logging │ │ └── index.rst │ ├── conf.py │ ├── demos.mk │ ├── diagrams.mk │ ├── graphviz.mk │ ├── index.rst │ └── tutorials │ ├── installation │ ├── index.rst │ └── install_check.demo.py │ └── quick_start │ └── index.rst ├── format.sh ├── pytest.ini ├── requirements-doc.txt ├── requirements-style.txt ├── requirements-test.txt ├── requirements.txt ├── setup.py └── test ├── __init__.py ├── config ├── __init__.py └── test_meta.py ├── doc ├── __init__.py └── annotated │ ├── __init__.py │ └── test_generate.py ├── logging ├── __init__.py ├── conftest.py ├── test_func.py ├── test_inherit.py └── test_log.py ├── tensorboard ├── plots │ ├── __init__.py │ ├── conftest.py │ └── test_range.py └── test_log.py ├── testfile ├── pong_tb │ ├── pong_efficientzero_tb │ │ ├── seed0 │ │ │ └── events.out.tfevents.pong-efficientzero-seed0 │ │ ├── seed1 │ │ │ └── events.out.tfevents.pong-efficientzero-seed1 │ │ └── seed2 │ │ │ └── events.out.tfevents.pong-efficientzero-seed2 │ └── pong_muzero_tb │ │ ├── seed0 │ │ └── events.out.tfevents.pong_muzero_seed0 │ │ ├── seed1 │ │ └── events.out.tfevents.pong_muzero_seed1 │ │ └── seed2 │ │ └── events.out.tfevents.pong_muzero_seed2 ├── pong_tb_plot.png └── tb1 │ ├── sac │ └── events.out.tfevents.1684900409.CN0014009700M.local │ └── td3 │ └── events.out.tfevents.1684910134.CN0014009700M.local └── testing ├── __init__.py ├── log.py └── testfile.py /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | omit = ditk/doc/annotated/ppo.py -------------------------------------------------------------------------------- /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | ignore=F401,F841,F403,E226,E126,W504,E265,E722,W503,W605,E741,E122,E731 3 | max-line-length=120 4 | statistics 5 | -------------------------------------------------------------------------------- /.github/workflows/badge.yml: -------------------------------------------------------------------------------- 1 | name: Badge Creation 2 | 3 | on: 4 | push: 5 | branches: [ main, 'badge/*', 'doc/*' ] 6 | 7 | jobs: 8 | update-badges: 9 | name: Update Badges 10 | runs-on: ubuntu-latest 11 | strategy: 12 | matrix: 13 | python-version: [ 3.7 ] 14 | steps: 15 | - uses: actions/checkout@v2 16 | - name: Set up Python ${{ matrix.python-version }} 17 | uses: actions/setup-python@v2 18 | with: 19 | python-version: ${{ matrix.python-version }} 20 | - name: Download cloc 21 | run: | 22 | sudo apt-get update -y 23 | sudo apt-get install -y cloc 24 | - name: Get the Numbers 25 | run: | 26 | cloc . 27 | echo "CODE_LINES=$(./cloc.sh --loc)" >> $GITHUB_ENV 28 | echo "COMMENT_LINES=$(./cloc.sh --percentage)%" >> $GITHUB_ENV 29 | - name: Create Lines-of-Code-Badge 30 | uses: schneegans/dynamic-badges-action@v1.0.0 31 | with: 32 | auth: ${{ secrets.GIST_SECRET }} 33 | gistID: ${{ secrets.BADGE_GIST_ID }} 34 | filename: loc.json 35 | label: Lines of Code 36 | message: ${{ env.CODE_LINES }} 37 | color: lightgrey 38 | - name: Create Comments-Badge 39 | uses: schneegans/dynamic-badges-action@v1.0.0 40 | with: 41 | auth: ${{ secrets.GIST_SECRET }} 42 | gistID: ${{ secrets.BADGE_GIST_ID }} 43 | filename: comments.json 44 | label: Comments 45 | message: ${{ env.COMMENT_LINES }} 46 | color: green -------------------------------------------------------------------------------- /.github/workflows/doc.yml: -------------------------------------------------------------------------------- 1 | # This workflow will check flake style 2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions 3 | 4 | name: Docs Deploy 5 | 6 | on: 7 | push: 8 | branches: [ main, 'doc/*', 'dev/*' ] 9 | release: 10 | types: [ published ] 11 | 12 | jobs: 13 | doc: 14 | runs-on: ubuntu-latest 15 | strategy: 16 | matrix: 17 | python-version: [ 3.7 ] 18 | 19 | services: 20 | plantuml: 21 | image: plantuml/plantuml-server 22 | ports: 23 | - 18080:8080 24 | 25 | steps: 26 | - uses: actions/checkout@v2 27 | - name: Set up Python ${{ matrix.python-version }} 28 | uses: actions/setup-python@v2 29 | with: 30 | python-version: ${{ matrix.python-version }} 31 | - name: Install dependencies 32 | run: | 33 | sudo apt-get update -y 34 | sudo apt-get install -y make wget curl cloc graphviz 35 | dot -V 36 | python -m pip install -r requirements.txt 37 | python -m pip install -r requirements-doc.txt 38 | - name: Generate 39 | env: 40 | ENV_PROD: 'true' 41 | PLANTUML_HOST: http://localhost:18080 42 | run: | 43 | git fetch --all --tags 44 | git branch -av 45 | git remote -v 46 | git tag 47 | plantumlcli -c 48 | make pdocs 49 | mv ./docs/build/html ./public 50 | - name: Deploy to Github Page 51 | uses: JamesIves/github-pages-deploy-action@3.7.1 52 | with: 53 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 54 | BRANCH: gh-pages # The branch the action should deploy to. 55 | FOLDER: public # The folder the action should deploy. 56 | CLEAN: true # Automatically remove deleted files from the deploy branch 57 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Package Release 2 | 3 | on: 4 | release: 5 | types: [ published ] 6 | 7 | jobs: 8 | release: 9 | name: Publish to official pypi 10 | runs-on: ${{ matrix.os }} 11 | if: ${{ github.repository == 'opendilab/DI-toolkit' }} 12 | strategy: 13 | matrix: 14 | os: 15 | - 'ubuntu-latest' 16 | python-version: 17 | - '3.8' 18 | 19 | steps: 20 | - name: Checkout code 21 | uses: actions/checkout@v2 22 | with: 23 | fetch-depth: 20 24 | - name: Set up python ${{ matrix.python-version }} 25 | uses: actions/setup-python@v2 26 | with: 27 | python-version: ${{ matrix.python-version }} 28 | - name: Set up python dependences 29 | run: | 30 | pip install --upgrade pip 31 | pip install --upgrade flake8 setuptools wheel twine 32 | if [ -f requirements.txt ]; then pip install -r requirements.txt; fi 33 | if [ -f requirements-test.txt ]; then pip install -r requirements-test.txt; fi 34 | pip install --upgrade build 35 | - name: Build packages 36 | run: | 37 | python -m build --sdist --wheel --outdir dist/ 38 | - name: Publish distribution 📦 to real PyPI 39 | uses: pypa/gh-action-pypi-publish@master 40 | with: 41 | password: ${{ secrets.PYPI_PASSWORD }} 42 | - name: Upload binaries to release 43 | uses: svenstaro/upload-release-action@v2 44 | with: 45 | repo_token: ${{ secrets.GITHUB_TOKEN }} 46 | file: dist/* 47 | tag: ${{ github.ref }} 48 | overwrite: true 49 | file_glob: true 50 | -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: Code Test 2 | 3 | on: 4 | push: 5 | 6 | jobs: 7 | unittest: 8 | name: Code test 9 | runs-on: ${{ matrix.os }} 10 | strategy: 11 | fail-fast: false 12 | matrix: 13 | os: 14 | - 'ubuntu-latest' 15 | - 'windows-latest' 16 | - 'macos-latest' 17 | python-version: 18 | - '3.7' 19 | - '3.8' 20 | - '3.9' 21 | - '3.10' 22 | - '3.11' 23 | 24 | steps: 25 | - name: Get system version for Linux 26 | if: ${{ contains(matrix.os, 'ubuntu') }} 27 | shell: bash 28 | run: | 29 | echo "OS_NAME=Linux" >> $GITHUB_ENV 30 | echo "IS_WIN=" >> $GITHUB_ENV 31 | echo "IS_MAC=" >> $GITHUB_ENV 32 | - name: Get system version for Windows 33 | if: ${{ contains(matrix.os, 'windows') }} 34 | shell: bash 35 | run: | 36 | echo "OS_NAME=Windows" >> $GITHUB_ENV 37 | echo "IS_WIN=1" >> $GITHUB_ENV 38 | echo "IS_MAC=" >> $GITHUB_ENV 39 | - name: Get system version for MacOS 40 | if: ${{ contains(matrix.os, 'macos') }} 41 | shell: bash 42 | run: | 43 | echo "OS_NAME=MacOS" >> $GITHUB_ENV 44 | echo "IS_WIN=" >> $GITHUB_ENV 45 | echo "IS_MAC=1" >> $GITHUB_ENV 46 | - name: Set environment for Cpython 47 | if: ${{ !contains(matrix.python-version, 'pypy') }} 48 | shell: bash 49 | run: | 50 | echo "IS_PYPY=" >> $GITHUB_ENV 51 | - name: Set environment for PyPy 52 | if: ${{ contains(matrix.python-version, 'pypy') }} 53 | shell: bash 54 | run: | 55 | echo "IS_PYPY=1" >> $GITHUB_ENV 56 | - name: Checkout code 57 | uses: actions/checkout@v2 58 | with: 59 | fetch-depth: 20 60 | - name: Set up system dependences on Linux 61 | if: ${{ env.OS_NAME == 'Linux' }} 62 | shell: bash 63 | run: | 64 | sudo apt-get update 65 | sudo apt-get install -y tree cloc wget curl make zip 66 | - name: Set up system dependences on Windows 67 | if: ${{ env.OS_NAME == 'Windows' }} 68 | shell: bash 69 | run: | 70 | choco install tree cloc wget curl make zip 71 | - name: Set up system dependences on MacOS 72 | if: ${{ env.OS_NAME == 'MacOS' }} 73 | run: | 74 | brew install tree cloc wget curl make zip 75 | - name: Set up python ${{ matrix.python-version }} 76 | uses: actions/setup-python@v2 77 | with: 78 | python-version: ${{ matrix.python-version }} 79 | - name: Install dependencies 80 | shell: bash 81 | run: | 82 | python -m pip install --upgrade pip 83 | pip install --upgrade flake8 setuptools wheel twine 84 | pip install -r requirements.txt 85 | pip install -r requirements-test.txt 86 | - name: Test the basic environment 87 | shell: bash 88 | run: | 89 | python -V 90 | pip --version 91 | pip list 92 | tree . 93 | cloc ditk 94 | cloc test 95 | - name: Run unittest 96 | env: 97 | CI: 'true' 98 | CPU_COUNT: '6' 99 | shell: bash 100 | run: | 101 | make unittest IS_WIN=${{ env.IS_WIN }} IS_MAC=${{ env.IS_MAC }} 102 | - name: Upload coverage to Codecov 103 | uses: codecov/codecov-action@v1 104 | with: 105 | token: ${{ secrets.CODECOV_TOKEN }} 106 | file: ./coverage.xml 107 | flags: unittests 108 | name: codecov-umbrella 109 | fail_ci_if_error: false 110 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Created by .ignore support plugin (hsz.mobi) 2 | ### Example user template template 3 | ### Example user template 4 | 5 | # IntelliJ project files 6 | .idea 7 | *.iml 8 | out 9 | gen 10 | ### CVS template 11 | /CVS/* 12 | **/CVS/* 13 | .cvsignore 14 | */.cvsignore 15 | 16 | ### C++ template 17 | # Prerequisites 18 | *.d 19 | 20 | # Compiled Object files 21 | *.slo 22 | *.lo 23 | *.o 24 | *.obj 25 | 26 | # Precompiled Headers 27 | *.gch 28 | *.pch 29 | 30 | # Compiled Dynamic libraries 31 | *.so 32 | *.dylib 33 | *.dll 34 | 35 | # Fortran module files 36 | *.mod 37 | *.smod 38 | 39 | # Compiled Static libraries 40 | *.lai 41 | *.la 42 | *.a 43 | *.lib 44 | 45 | # Executables 46 | *.exe 47 | *.out 48 | *.app 49 | 50 | ### CUDA template 51 | *.i 52 | *.ii 53 | *.gpu 54 | *.ptx 55 | *.cubin 56 | *.fatbin 57 | 58 | ### SVN template 59 | .svn/ 60 | 61 | ### Images template 62 | # JPEG 63 | *.jpe 64 | *.jif 65 | *.jfif 66 | *.jfi 67 | 68 | # JPEG 2000 69 | *.jp2 70 | *.j2k 71 | *.jpf 72 | *.jpx 73 | *.jpm 74 | *.mj2 75 | 76 | # JPEG XR 77 | *.jxr 78 | *.hdp 79 | *.wdp 80 | 81 | # Graphics Interchange Format 82 | *.gif 83 | 84 | # RAW 85 | *.raw 86 | 87 | # Web P 88 | *.webp 89 | 90 | # Portable Network Graphics 91 | *.png 92 | 93 | # Animated Portable Network Graphics 94 | *.apng 95 | 96 | # Multiple-image Network Graphics 97 | *.mng 98 | 99 | # Tagged Image File Format 100 | *.tiff 101 | *.tif 102 | 103 | # Scalable Vector Graphics 104 | *.svg 105 | *.svgz 106 | 107 | # Portable Document Format 108 | *.pdf 109 | 110 | # X BitMap 111 | *.xbm 112 | 113 | # BMP 114 | *.bmp 115 | *.dib 116 | 117 | # ICO 118 | *.ico 119 | 120 | # 3D Images 121 | *.3dm 122 | *.max 123 | 124 | ### Eclipse template 125 | .metadata 126 | bin/ 127 | tmp/ 128 | *.tmp 129 | *.bak 130 | *.swp 131 | *~.nib 132 | local.properties 133 | .settings/ 134 | .loadpath 135 | .recommenders 136 | 137 | # External tool builders 138 | .externalToolBuilders/ 139 | 140 | # Locally stored "Eclipse launch configurations" 141 | *.launch 142 | 143 | # PyDev specific (Python IDE for Eclipse) 144 | *.pydevproject 145 | 146 | # CDT-specific (C/C++ Development Tooling) 147 | .cproject 148 | 149 | # CDT- autotools 150 | .autotools 151 | 152 | # Java annotation processor (APT) 153 | .factorypath 154 | 155 | # PDT-specific (PHP Development Tools) 156 | .buildpath 157 | 158 | # sbteclipse plugin 159 | .target 160 | 161 | # Tern plugin 162 | .tern-project 163 | 164 | # TeXlipse plugin 165 | .texlipse 166 | 167 | # STS (Spring Tool Suite) 168 | .springBeans 169 | 170 | # Code Recommenders 171 | .recommenders/ 172 | 173 | # Annotation Processing 174 | .apt_generated/ 175 | .apt_generated_test/ 176 | 177 | # Scala IDE specific (Scala & Java development for Eclipse) 178 | .cache-main 179 | .scala_dependencies 180 | .worksheet 181 | 182 | # Uncomment this line if you wish to ignore the project description file. 183 | # Typically, this file would be tracked if it contains build/dependency configurations: 184 | #.project 185 | 186 | ### Diff template 187 | *.patch 188 | *.diff 189 | 190 | ### macOS template 191 | # General 192 | .DS_Store 193 | .AppleDouble 194 | .LSOverride 195 | 196 | # Icon must end with two \r 197 | Icon 198 | 199 | # Thumbnails 200 | ._* 201 | 202 | # Files that might appear in the root of a volume 203 | .DocumentRevisions-V100 204 | .fseventsd 205 | .Spotlight-V100 206 | .TemporaryItems 207 | .Trashes 208 | .VolumeIcon.icns 209 | .com.apple.timemachine.donotpresent 210 | 211 | # Directories potentially created on remote AFP share 212 | .AppleDB 213 | .AppleDesktop 214 | Network Trash Folder 215 | Temporary Items 216 | .apdisk 217 | 218 | ### CMake template 219 | CMakeLists.txt.user 220 | CMakeCache.txt 221 | CMakeFiles 222 | CMakeScripts 223 | Testing 224 | cmake_install.cmake 225 | install_manifest.txt 226 | compile_commands.json 227 | CTestTestfile.cmake 228 | _deps 229 | 230 | ### Linux template 231 | *~ 232 | 233 | # temporary files which can be created if a process still has a handle open of a deleted file 234 | .fuse_hidden* 235 | 236 | # KDE directory preferences 237 | .directory 238 | 239 | # Linux trash folder which might appear on any partition or disk 240 | .Trash-* 241 | 242 | # .nfs files are created when an open file is removed but is still being accessed 243 | .nfs* 244 | 245 | ### MicrosoftOffice template 246 | *.tmp 247 | 248 | # Word temporary 249 | ~$*.doc* 250 | 251 | # Word Auto Backup File 252 | Backup of *.doc* 253 | 254 | # Excel temporary 255 | ~$*.xls* 256 | 257 | # Excel Backup File 258 | *.xlk 259 | 260 | # PowerPoint temporary 261 | ~$*.ppt* 262 | 263 | # Visio autosave temporary files 264 | *.~vsd* 265 | 266 | ### VisualStudio template 267 | ## Ignore Visual Studio temporary files, build results, and 268 | ## files generated by popular Visual Studio add-ons. 269 | ## 270 | ## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore 271 | 272 | # User-specific files 273 | *.rsuser 274 | *.suo 275 | *.user 276 | *.userosscache 277 | *.sln.docstates 278 | 279 | # User-specific files (MonoDevelop/Xamarin Studio) 280 | *.userprefs 281 | 282 | # Mono auto generated files 283 | mono_crash.* 284 | 285 | # Build results 286 | [Dd]ebug/ 287 | [Dd]ebugPublic/ 288 | [Rr]elease/ 289 | [Rr]eleases/ 290 | x64/ 291 | x86/ 292 | [Ww][Ii][Nn]32/ 293 | [Aa][Rr][Mm]/ 294 | [Aa][Rr][Mm]64/ 295 | bld/ 296 | [Bb]in/ 297 | [Oo]bj/ 298 | [Ll]og/ 299 | [Ll]ogs/ 300 | 301 | # Visual Studio 2015/2017 cache/options directory 302 | .vs/ 303 | # Uncomment if you have tasks that create the project's static files in wwwroot 304 | #wwwroot/ 305 | 306 | # Visual Studio 2017 auto generated files 307 | Generated\ Files/ 308 | 309 | # MSTest test Results 310 | [Tt]est[Rr]esult*/ 311 | [Bb]uild[Ll]og.* 312 | 313 | # NUnit 314 | *.VisualState.xml 315 | TestResult.xml 316 | nunit-*.xml 317 | 318 | # Build Results of an ATL Project 319 | [Dd]ebugPS/ 320 | [Rr]eleasePS/ 321 | dlldata.c 322 | 323 | # Benchmark Results 324 | BenchmarkDotNet.Artifacts/ 325 | 326 | # .NET Core 327 | project.lock.json 328 | project.fragment.lock.json 329 | artifacts/ 330 | 331 | # ASP.NET Scaffolding 332 | ScaffoldingReadMe.txt 333 | 334 | # StyleCop 335 | StyleCopReport.xml 336 | 337 | # Files built by Visual Studio 338 | *_i.c 339 | *_p.c 340 | *_h.h 341 | *.ilk 342 | *.meta 343 | *.obj 344 | *.iobj 345 | *.pch 346 | *.pdb 347 | *.ipdb 348 | *.pgc 349 | *.pgd 350 | *.rsp 351 | *.sbr 352 | *.tlb 353 | *.tli 354 | *.tlh 355 | *.tmp 356 | *.tmp_proj 357 | *_wpftmp.csproj 358 | *.log 359 | *.vspscc 360 | *.vssscc 361 | .builds 362 | *.pidb 363 | *.svclog 364 | *.scc 365 | 366 | # Chutzpah Test files 367 | _Chutzpah* 368 | 369 | # Visual C++ cache files 370 | ipch/ 371 | *.aps 372 | *.ncb 373 | *.opendb 374 | *.opensdf 375 | *.sdf 376 | *.cachefile 377 | *.VC.db 378 | *.VC.VC.opendb 379 | 380 | # Visual Studio profiler 381 | *.psess 382 | *.vsp 383 | *.vspx 384 | *.sap 385 | 386 | # Visual Studio Trace Files 387 | *.e2e 388 | 389 | # TFS 2012 Local Workspace 390 | $tf/ 391 | 392 | # Guidance Automation Toolkit 393 | *.gpState 394 | 395 | # ReSharper is a .NET coding add-in 396 | _ReSharper*/ 397 | *.[Rr]e[Ss]harper 398 | *.DotSettings.user 399 | 400 | # TeamCity is a build add-in 401 | _TeamCity* 402 | 403 | # DotCover is a Code Coverage Tool 404 | *.dotCover 405 | 406 | # AxoCover is a Code Coverage Tool 407 | .axoCover/* 408 | !.axoCover/settings.json 409 | 410 | # Coverlet is a free, cross platform Code Coverage Tool 411 | coverage*.json 412 | coverage*.xml 413 | coverage*.info 414 | 415 | # Visual Studio code coverage results 416 | *.coverage 417 | *.coveragexml 418 | 419 | # NCrunch 420 | _NCrunch_* 421 | .*crunch*.local.xml 422 | nCrunchTemp_* 423 | 424 | # MightyMoose 425 | *.mm.* 426 | AutoTest.Net/ 427 | 428 | # Web workbench (sass) 429 | .sass-cache/ 430 | 431 | # Installshield output folder 432 | [Ee]xpress/ 433 | 434 | # DocProject is a documentation generator add-in 435 | DocProject/buildhelp/ 436 | DocProject/Help/*.HxT 437 | DocProject/Help/*.HxC 438 | DocProject/Help/*.hhc 439 | DocProject/Help/*.hhk 440 | DocProject/Help/*.hhp 441 | DocProject/Help/Html2 442 | DocProject/Help/html 443 | 444 | # Click-Once directory 445 | publish/ 446 | 447 | # Publish Web Output 448 | *.[Pp]ublish.xml 449 | *.azurePubxml 450 | # Note: Comment the next line if you want to checkin your web deploy settings, 451 | # but database connection strings (with potential passwords) will be unencrypted 452 | *.pubxml 453 | *.publishproj 454 | 455 | # Microsoft Azure Web App publish settings. Comment the next line if you want to 456 | # checkin your Azure Web App publish settings, but sensitive information contained 457 | # in these scripts will be unencrypted 458 | PublishScripts/ 459 | 460 | # NuGet Packages 461 | *.nupkg 462 | # NuGet Symbol Packages 463 | *.snupkg 464 | # The packages folder can be ignored because of Package Restore 465 | **/[Pp]ackages/* 466 | # except build/, which is used as an MSBuild target. 467 | !**/[Pp]ackages/build/ 468 | # Uncomment if necessary however generally it will be regenerated when needed 469 | #!**/[Pp]ackages/repositories.config 470 | # NuGet v3's project.json files produces more ignorable files 471 | *.nuget.props 472 | *.nuget.targets 473 | 474 | # Microsoft Azure Build Output 475 | csx/ 476 | *.build.csdef 477 | 478 | # Microsoft Azure Emulator 479 | ecf/ 480 | rcf/ 481 | 482 | # Windows Store app package directories and files 483 | AppPackages/ 484 | BundleArtifacts/ 485 | Package.StoreAssociation.xml 486 | _pkginfo.txt 487 | *.appx 488 | *.appxbundle 489 | *.appxupload 490 | 491 | # Visual Studio cache files 492 | # files ending in .cache can be ignored 493 | *.[Cc]ache 494 | # but keep track of directories ending in .cache 495 | !?*.[Cc]ache/ 496 | 497 | # Others 498 | ClientBin/ 499 | ~$* 500 | *~ 501 | *.dbmdl 502 | *.dbproj.schemaview 503 | *.jfm 504 | *.pfx 505 | *.publishsettings 506 | orleans.codegen.cs 507 | 508 | # Including strong name files can present a security risk 509 | # (https://github.com/github/gitignore/pull/2483#issue-259490424) 510 | #*.snk 511 | 512 | # Since there are multiple workflows, uncomment next line to ignore bower_components 513 | # (https://github.com/github/gitignore/pull/1529#issuecomment-104372622) 514 | #bower_components/ 515 | 516 | # RIA/Silverlight projects 517 | Generated_Code/ 518 | 519 | # Backup & report files from converting an old project file 520 | # to a newer Visual Studio version. Backup files are not needed, 521 | # because we have git ;-) 522 | _UpgradeReport_Files/ 523 | Backup*/ 524 | UpgradeLog*.XML 525 | UpgradeLog*.htm 526 | ServiceFabricBackup/ 527 | *.rptproj.bak 528 | 529 | # SQL Server files 530 | *.mdf 531 | *.ldf 532 | *.ndf 533 | 534 | # Business Intelligence projects 535 | *.rdl.data 536 | *.bim.layout 537 | *.bim_*.settings 538 | *.rptproj.rsuser 539 | *- [Bb]ackup.rdl 540 | *- [Bb]ackup ([0-9]).rdl 541 | *- [Bb]ackup ([0-9][0-9]).rdl 542 | 543 | # Microsoft Fakes 544 | FakesAssemblies/ 545 | 546 | # GhostDoc plugin setting file 547 | *.GhostDoc.xml 548 | 549 | # Node.js Tools for Visual Studio 550 | .ntvs_analysis.dat 551 | node_modules/ 552 | 553 | # Visual Studio 6 build log 554 | *.plg 555 | 556 | # Visual Studio 6 workspace options file 557 | *.opt 558 | 559 | # Visual Studio 6 auto-generated workspace file (contains which files were open etc.) 560 | *.vbw 561 | 562 | # Visual Studio LightSwitch build output 563 | **/*.HTMLClient/GeneratedArtifacts 564 | **/*.DesktopClient/GeneratedArtifacts 565 | **/*.DesktopClient/ModelManifest.xml 566 | **/*.Server/GeneratedArtifacts 567 | **/*.Server/ModelManifest.xml 568 | _Pvt_Extensions 569 | 570 | # Paket dependency manager 571 | .paket/paket.exe 572 | paket-files/ 573 | 574 | # FAKE - F# Make 575 | .fake/ 576 | 577 | # CodeRush personal settings 578 | .cr/personal 579 | 580 | # Python Tools for Visual Studio (PTVS) 581 | __pycache__/ 582 | *.pyc 583 | 584 | # Cake - Uncomment if you are using it 585 | # tools/** 586 | # !tools/packages.config 587 | 588 | # Tabs Studio 589 | *.tss 590 | 591 | # Telerik's JustMock configuration file 592 | *.jmconfig 593 | 594 | # BizTalk build output 595 | *.btp.cs 596 | *.btm.cs 597 | *.odx.cs 598 | *.xsd.cs 599 | 600 | # OpenCover UI analysis results 601 | OpenCover/ 602 | 603 | # Azure Stream Analytics local run output 604 | ASALocalRun/ 605 | 606 | # MSBuild Binary and Structured Log 607 | *.binlog 608 | 609 | # NVidia Nsight GPU debugger configuration file 610 | *.nvuser 611 | 612 | # MFractors (Xamarin productivity tool) working folder 613 | .mfractor/ 614 | 615 | # Local History for Visual Studio 616 | .localhistory/ 617 | 618 | # BeatPulse healthcheck temp database 619 | healthchecksdb 620 | 621 | # Backup folder for Package Reference Convert tool in Visual Studio 2017 622 | MigrationBackup/ 623 | 624 | # Ionide (cross platform F# VS Code tools) working folder 625 | .ionide/ 626 | 627 | # Fody - auto-generated XML schema 628 | FodyWeavers.xsd 629 | 630 | ### Vim template 631 | # Swap 632 | [._]*.s[a-v][a-z] 633 | !*.svg # comment out if you don't need vector files 634 | [._]*.sw[a-p] 635 | [._]s[a-rt-v][a-z] 636 | [._]ss[a-gi-z] 637 | [._]sw[a-p] 638 | 639 | # Session 640 | Session.vim 641 | Sessionx.vim 642 | 643 | # Temporary 644 | .netrwhist 645 | *~ 646 | # Auto-generated tag files 647 | tags 648 | # Persistent undo 649 | [._]*.un~ 650 | 651 | ### Backup template 652 | *.bak 653 | *.gho 654 | *.ori 655 | *.orig 656 | *.tmp 657 | 658 | ### SublimeText template 659 | # Cache files for Sublime Text 660 | *.tmlanguage.cache 661 | *.tmPreferences.cache 662 | *.stTheme.cache 663 | 664 | # Workspace files are user-specific 665 | *.sublime-workspace 666 | 667 | # Project files should be checked into the repository, unless a significant 668 | # proportion of contributors will probably not be using Sublime Text 669 | # *.sublime-project 670 | 671 | # SFTP configuration file 672 | sftp-config.json 673 | sftp-config-alt*.json 674 | 675 | # Package control specific files 676 | Package Control.last-run 677 | Package Control.ca-list 678 | Package Control.ca-bundle 679 | Package Control.system-ca-bundle 680 | Package Control.cache/ 681 | Package Control.ca-certs/ 682 | Package Control.merged-ca-bundle 683 | Package Control.user-ca-bundle 684 | oscrypto-ca-bundle.crt 685 | bh_unicode_properties.cache 686 | 687 | # Sublime-github package stores a github token in this file 688 | # https://packagecontrol.io/packages/sublime-github 689 | GitHub.sublime-settings 690 | 691 | ### C template 692 | # Prerequisites 693 | *.d 694 | 695 | # Object files 696 | *.o 697 | *.ko 698 | *.obj 699 | *.elf 700 | 701 | # Linker output 702 | *.ilk 703 | *.map 704 | *.exp 705 | 706 | # Precompiled Headers 707 | *.gch 708 | *.pch 709 | 710 | # Libraries 711 | *.lib 712 | *.a 713 | *.la 714 | *.lo 715 | 716 | # Shared objects (inc. Windows DLLs) 717 | *.dll 718 | *.so 719 | *.so.* 720 | *.dylib 721 | 722 | # Executables 723 | *.exe 724 | *.out 725 | *.app 726 | *.i*86 727 | *.x86_64 728 | *.hex 729 | 730 | # Debug files 731 | *.dSYM/ 732 | *.su 733 | *.idb 734 | *.pdb 735 | 736 | # Kernel Module Compile Results 737 | *.mod* 738 | *.cmd 739 | .tmp_versions/ 740 | modules.order 741 | Module.symvers 742 | Mkfile.old 743 | dkms.conf 744 | 745 | ### VirtualEnv template 746 | # Virtualenv 747 | # http://iamzed.com/2009/05/07/a-primer-on-virtualenv/ 748 | .Python 749 | [Bb]in 750 | [Ii]nclude 751 | [Ll]ib 752 | [Ll]ib64 753 | [Ll]ocal 754 | [Ss]cripts 755 | pyvenv.cfg 756 | .venv 757 | pip-selfcheck.json 758 | 759 | ### LibreOffice template 760 | # LibreOffice locks 761 | .~lock.*# 762 | 763 | ### Xilinx template 764 | # gitignore template for Xilinx Vivado Design Suite 765 | # website: https://www.xilinx.com/support/download.html 766 | 767 | # [home] 768 | *.jou 769 | *.log 770 | *.debug 771 | *.str 772 | *.zip 773 | *.tmp 774 | *.os 775 | *.js 776 | *.pb 777 | *.dcp 778 | *.hwdef 779 | *.vds 780 | *.veo 781 | *.wdf 782 | *.vdi 783 | *.dmp 784 | *.rpx 785 | *.rpt 786 | *_stub.v 787 | *_stub.vhdl 788 | *_funcsim.v 789 | *_funcsim.vhdl 790 | .project 791 | 792 | # [dir] 793 | *.cache 794 | .metadata 795 | *.data 796 | *.ipdefs 797 | .Xil 798 | *.sdk 799 | *.hw 800 | *.ip_user_files 801 | 802 | ### IP synth 803 | *_synth_* 804 | 805 | .jobs 806 | 807 | ### project synth 808 | */*.runs/synth*/*.xml 809 | */*.runs/synth*/*.txt 810 | */*.runs/synth*/*.sh 811 | */*.runs/synth*/*.tcl 812 | */*.runs/synth*/*.bat 813 | */*.runs/synth*/*.xdc 814 | !*/*.runs/synth*/*utilization*.rpt 815 | 816 | *.runs/synth*/*.xml 817 | *.runs/synth*/*.txt 818 | *.runs/synth*/*.sh 819 | *.runs/synth*/*.tcl 820 | *.runs/synth*/*.bat 821 | *.runs/synth*/*.xdc 822 | !*.runs/synth*/*utilization*.rpt 823 | 824 | ### project impl 825 | */*.runs/impl*/*.xml 826 | */*.runs/impl*/*.html 827 | */*.runs/impl*/*.txt 828 | */*.runs/impl*/*.sh 829 | */*.runs/impl*/*.tcl 830 | */*.runs/impl*/*.bat 831 | !*/*.runs/impl*/*utilization*.rpt 832 | 833 | *.runs/impl*/*.xml 834 | *.runs/impl*/*.html 835 | *.runs/impl*/*.txt 836 | *.runs/impl*/*.sh 837 | *.runs/impl*/*.tcl 838 | *.runs/impl*/*.bat 839 | !*.runs/impl*/*utilization*.rpt 840 | 841 | ### block design 842 | */*/bd/*/hdl 843 | */*/*/bd/*/hdl 844 | 845 | */*/bd/*/*.xdc 846 | */*/*/bd/*/*.xdc 847 | 848 | */*/bd/*/ip/*/*.xdc 849 | */*/*/bd/*/ip/*/*.xdc 850 | 851 | */*/bd/*/ip/*/*/ 852 | */*/*/bd/*/ip/*/*/ 853 | 854 | */*/bd/*/ip/*/*.vhd 855 | */*/*/bd/*/ip/*/*.vhd 856 | 857 | */*/bd/*/ip/*/*.xml 858 | */*/*/bd/*/ip/*/*.xml 859 | 860 | *.c 861 | *.cpp 862 | *.h 863 | *.vho 864 | */*/bd/*/ip/*/*.tcl 865 | */*/*/bd/*/ip/*/*.tcl 866 | hw_handoff 867 | ipshared 868 | 869 | ### Lua template 870 | # Compiled Lua sources 871 | luac.out 872 | 873 | # luarocks build files 874 | *.src.rock 875 | *.zip 876 | *.tar.gz 877 | 878 | # Object files 879 | *.o 880 | *.os 881 | *.ko 882 | *.obj 883 | *.elf 884 | 885 | # Precompiled Headers 886 | *.gch 887 | *.pch 888 | 889 | # Libraries 890 | *.lib 891 | *.a 892 | *.la 893 | *.lo 894 | *.def 895 | *.exp 896 | 897 | # Shared objects (inc. Windows DLLs) 898 | *.dll 899 | *.so 900 | *.so.* 901 | *.dylib 902 | 903 | # Executables 904 | *.exe 905 | *.out 906 | *.app 907 | *.i*86 908 | *.x86_64 909 | *.hex 910 | 911 | 912 | ### Vagrant template 913 | # General 914 | .vagrant/ 915 | 916 | # Log files (if you are creating logs in debug mode, uncomment this) 917 | # *.log 918 | 919 | ### PuTTY template 920 | # Private key 921 | *.ppk 922 | 923 | ### Patch template 924 | *.orig 925 | *.rej 926 | 927 | ### Python template 928 | # Byte-compiled / optimized / DLL files 929 | __pycache__/ 930 | *.py[cod] 931 | *$py.class 932 | 933 | # C extensions 934 | *.so 935 | 936 | # Distribution / packaging 937 | .Python 938 | build/ 939 | develop-eggs/ 940 | dist/ 941 | wheelhouse/ 942 | downloads/ 943 | eggs/ 944 | .eggs/ 945 | lib/ 946 | lib64/ 947 | parts/ 948 | sdist/ 949 | var/ 950 | wheels/ 951 | share/python-wheels/ 952 | *.egg-info/ 953 | .installed.cfg 954 | *.egg 955 | MANIFEST 956 | 957 | # PyInstaller 958 | # Usually these files are written by a python script from a template 959 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 960 | *.manifest 961 | *.spec 962 | 963 | # Installer logs 964 | pip-log.txt 965 | pip-delete-this-directory.txt 966 | 967 | # Unit test / coverage reports 968 | htmlcov/ 969 | .tox/ 970 | .nox/ 971 | .coverage 972 | .coverage.* 973 | .cache 974 | nosetests.xml 975 | coverage.xml 976 | *.cover 977 | *.py,cover 978 | .hypothesis/ 979 | .pytest_cache/ 980 | cover/ 981 | 982 | # Translations 983 | *.mo 984 | *.pot 985 | 986 | # Django stuff: 987 | *.log 988 | local_settings.py 989 | db.sqlite3 990 | db.sqlite3-journal 991 | 992 | # Flask stuff: 993 | instance/ 994 | .webassets-cache 995 | 996 | # Scrapy stuff: 997 | .scrapy 998 | 999 | # Sphinx documentation 1000 | docs/_build/ 1001 | 1002 | # PyBuilder 1003 | .pybuilder/ 1004 | target/ 1005 | 1006 | # Jupyter Notebook 1007 | .ipynb_checkpoints 1008 | 1009 | # IPython 1010 | profile_default/ 1011 | ipython_config.py 1012 | 1013 | # pyenv 1014 | # For a library or package, you might want to ignore these files since the code is 1015 | # intended to run in multiple environments; otherwise, check them in: 1016 | # .python-version 1017 | 1018 | # pipenv 1019 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 1020 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 1021 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 1022 | # install all needed dependencies. 1023 | #Pipfile.lock 1024 | 1025 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 1026 | __pypackages__/ 1027 | 1028 | # Celery stuff 1029 | celerybeat-schedule 1030 | celerybeat.pid 1031 | 1032 | # SageMath parsed files 1033 | *.sage.py 1034 | 1035 | # Environments 1036 | .env 1037 | .venv 1038 | env/ 1039 | venv/ 1040 | ENV/ 1041 | env.bak/ 1042 | venv.bak/ 1043 | 1044 | # Spyder project settings 1045 | .spyderproject 1046 | .spyproject 1047 | 1048 | # Rope project settings 1049 | .ropeproject 1050 | 1051 | # mkdocs documentation 1052 | /site 1053 | 1054 | # mypy 1055 | .mypy_cache/ 1056 | .dmypy.json 1057 | dmypy.json 1058 | 1059 | # Pyre type checker 1060 | .pyre/ 1061 | 1062 | # pytype static type analyzer 1063 | .pytype/ 1064 | 1065 | # Cython debug symbols 1066 | cython_debug/ 1067 | 1068 | ### Windows template 1069 | # Windows thumbnail cache files 1070 | Thumbs.db 1071 | Thumbs.db:encryptable 1072 | ehthumbs.db 1073 | ehthumbs_vista.db 1074 | 1075 | # Dump file 1076 | *.stackdump 1077 | 1078 | # Folder config file 1079 | [Dd]esktop.ini 1080 | 1081 | # Recycle Bin used on file shares 1082 | $RECYCLE.BIN/ 1083 | 1084 | # Windows Installer files 1085 | *.cab 1086 | *.msi 1087 | *.msix 1088 | *.msm 1089 | *.msp 1090 | 1091 | # Windows shortcuts 1092 | *.lnk 1093 | 1094 | ### Xcode template 1095 | # Xcode 1096 | # 1097 | # gitignore contributors: remember to update Global/Xcode.gitignore, Objective-C.gitignore & Swift.gitignore 1098 | 1099 | ## User settings 1100 | xcuserdata/ 1101 | 1102 | ## compatibility with Xcode 8 and earlier (ignoring not required starting Xcode 9) 1103 | *.xcscmblueprint 1104 | *.xccheckout 1105 | 1106 | ## compatibility with Xcode 3 and earlier (ignoring not required starting Xcode 4) 1107 | build/ 1108 | DerivedData/ 1109 | *.moved-aside 1110 | *.pbxuser 1111 | !default.pbxuser 1112 | *.mode1v3 1113 | !default.mode1v3 1114 | *.mode2v3 1115 | !default.mode2v3 1116 | *.perspectivev3 1117 | !default.perspectivev3 1118 | 1119 | ## Gcc Patch 1120 | /*.gcno 1121 | 1122 | ### JetBrains template 1123 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider 1124 | # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 1125 | 1126 | # User-specific stuff 1127 | .idea/**/workspace.xml 1128 | .idea/**/tasks.xml 1129 | .idea/**/usage.statistics.xml 1130 | .idea/**/dictionaries 1131 | .idea/**/shelf 1132 | 1133 | # Generated files 1134 | .idea/**/contentModel.xml 1135 | 1136 | # Sensitive or high-churn files 1137 | .idea/**/dataSources/ 1138 | .idea/**/dataSources.ids 1139 | .idea/**/dataSources.local.xml 1140 | .idea/**/sqlDataSources.xml 1141 | .idea/**/dynamic.xml 1142 | .idea/**/uiDesigner.xml 1143 | .idea/**/dbnavigator.xml 1144 | 1145 | # Gradle 1146 | .idea/**/gradle.xml 1147 | .idea/**/libraries 1148 | 1149 | # Gradle and Maven with auto-import 1150 | # When using Gradle or Maven with auto-import, you should exclude module files, 1151 | # since they will be recreated, and may cause churn. Uncomment if using 1152 | # auto-import. 1153 | # .idea/artifacts 1154 | # .idea/compiler.xml 1155 | # .idea/jarRepositories.xml 1156 | # .idea/modules.xml 1157 | # .idea/*.iml 1158 | # .idea/modules 1159 | # *.iml 1160 | # *.ipr 1161 | 1162 | # CMake 1163 | cmake-build-*/ 1164 | 1165 | # Mongo Explorer plugin 1166 | .idea/**/mongoSettings.xml 1167 | 1168 | # File-based project format 1169 | *.iws 1170 | 1171 | # IntelliJ 1172 | out/ 1173 | 1174 | # mpeltonen/sbt-idea plugin 1175 | .idea_modules/ 1176 | 1177 | # JIRA plugin 1178 | atlassian-ide-plugin.xml 1179 | 1180 | # Cursive Clojure plugin 1181 | .idea/replstate.xml 1182 | 1183 | # Crashlytics plugin (for Android Studio and IntelliJ) 1184 | com_crashlytics_export_strings.xml 1185 | crashlytics.properties 1186 | crashlytics-build.properties 1187 | fabric.properties 1188 | 1189 | # Editor-based Rest Client 1190 | .idea/httpRequests 1191 | 1192 | # Android studio 3.1+ serialized cache file 1193 | .idea/caches/build_file_checksums.ser 1194 | 1195 | /test_* 1196 | .python-version 1197 | /docs/build 1198 | /public 1199 | /docs/source/**/*.puml.svg 1200 | /docs/source/**/*.puml.png 1201 | /docs/source/**/*.gv.svg 1202 | /docs/source/**/*.gv.png 1203 | /docs/source/**/*.py.txt 1204 | /docs/source/**/*.py.err 1205 | /docs/source/**/*.py.exitcode 1206 | /docs/source/**/*.sh.txt 1207 | /docs/source/**/*.sh.err 1208 | /docs/source/**/*.sh.exitcode 1209 | /docs/source/**/*.dat.* 1210 | !/docs/source/_static/**/* 1211 | /cartpole_dqn_* 1212 | !/ditk/doc/**/*.png 1213 | !/test/testfile/**/* 1214 | !/assets/**/* -------------------------------------------------------------------------------- /.style.yapf: -------------------------------------------------------------------------------- 1 | [style] 2 | # For explanation and more information: https://github.com/google/yapf 3 | BASED_ON_STYLE=pep8 4 | DEDENT_CLOSING_BRACKETS=True 5 | SPLIT_BEFORE_FIRST_ARGUMENT=True 6 | ALLOW_SPLIT_BEFORE_DICT_VALUE=False 7 | JOIN_MULTIPLE_LINES=False 8 | COLUMN_LIMIT=120 9 | BLANK_LINE_BEFORE_NESTED_CLASS_OR_DEF=True 10 | BLANK_LINES_AROUND_TOP_LEVEL_DEFINITION=2 11 | SPACES_AROUND_POWER_OPERATOR=True 12 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing Guide 2 | 3 | Guide content is still under development, will be completed soon afterwards. 4 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include README.md 2 | include MANIFEST.in 3 | include requirements.txt 4 | include requirements-*.txt 5 | recursive-include ditk *.pyx *.pxd *.css *.js 6 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: docs test unittest 2 | 3 | PYTHON := $(shell which python) 4 | 5 | PROJ_DIR := . 6 | DOC_DIR := ${PROJ_DIR}/docs 7 | BUILD_DIR := ${PROJ_DIR}/build 8 | DIST_DIR := ${PROJ_DIR}/dist 9 | TEST_DIR := ${PROJ_DIR}/test 10 | SRC_DIR := ${PROJ_DIR}/ditk 11 | 12 | RANGE_DIR ?= . 13 | RANGE_TEST_DIR := ${TEST_DIR}/${RANGE_DIR} 14 | RANGE_SRC_DIR := ${SRC_DIR}/${RANGE_DIR} 15 | 16 | COV_TYPES ?= xml term-missing 17 | 18 | package: 19 | $(PYTHON) -m build --sdist --wheel --outdir ${DIST_DIR} 20 | clean: 21 | rm -rf ${DIST_DIR} ${BUILD_DIR} *.egg-info 22 | 23 | test: unittest 24 | 25 | unittest: 26 | pytest "${RANGE_TEST_DIR}" \ 27 | -sv -m unittest \ 28 | $(shell for type in ${COV_TYPES}; do echo "--cov-report=$$type"; done) \ 29 | --cov="${RANGE_SRC_DIR}" \ 30 | $(if ${MIN_COVERAGE},--cov-fail-under=${MIN_COVERAGE},) \ 31 | $(if ${WORKERS},-n ${WORKERS},) 32 | 33 | docs: 34 | $(MAKE) -C "${DOC_DIR}" build 35 | pdocs: 36 | $(MAKE) -C "${DOC_DIR}" prod 37 | 38 | format: 39 | yapf --in-place --recursive -p --verbose --style .style.yapf ${RANGE_SRC_DIR} 40 | yapf --in-place --recursive -p --verbose --style .style.yapf ${RANGE_TEST_DIR} 41 | format_test: 42 | bash format.sh ${RANGE_SRC_DIR} --test 43 | bash format.sh ${RANGE_TEST_DIR} --test 44 | flake_check: 45 | flake8 ${RANGE_SRC_DIR} 46 | flake8 ${RANGE_TEST_DIR} 47 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # DI-toolkit 2 | 3 | [![PyPI](https://img.shields.io/pypi/v/DI-toolkit)](https://pypi.org/project/DI-toolkit/) 4 | ![PyPI - Python Version](https://img.shields.io/pypi/pyversions/DI-toolkit) 5 | ![Loc](https://img.shields.io/endpoint?url=https://gist.githubusercontent.com/HansBug/82e5c38227081da9d25e729e5bd3b5b8/raw/loc.json) 6 | ![Comments](https://img.shields.io/endpoint?url=https://gist.githubusercontent.com/HansBug/82e5c38227081da9d25e729e5bd3b5b8/raw/comments.json) 7 | 8 | [![Docs Deploy](https://github.com/opendilab/DI-toolkit/workflows/Docs%20Deploy/badge.svg)](https://github.com/opendilab/DI-toolkit/actions?query=workflow%3A%22Docs+Deploy%22) 9 | [![Code Test](https://github.com/opendilab/DI-toolkit/workflows/Code%20Test/badge.svg)](https://github.com/opendilab/DI-toolkit/actions?query=workflow%3A%22Code+Test%22) 10 | [![Badge Creation](https://github.com/opendilab/DI-toolkit/workflows/Badge%20Creation/badge.svg)](https://github.com/opendilab/DI-toolkit/actions?query=workflow%3A%22Badge+Creation%22) 11 | [![Package Release](https://github.com/opendilab/DI-toolkit/workflows/Package%20Release/badge.svg)](https://github.com/opendilab/DI-toolkit/actions?query=workflow%3A%22Package+Release%22) 12 | [![codecov](https://codecov.io/gh/opendilab/DI-toolkit/branch/main/graph/badge.svg?token=XJVDP4EFAT)](https://codecov.io/gh/opendilab/DI-toolkit) 13 | 14 | [![GitHub stars](https://img.shields.io/github/stars/opendilab/DI-toolkit)](https://github.com/opendilab/DI-toolkit/stargazers) 15 | [![GitHub forks](https://img.shields.io/github/forks/opendilab/DI-toolkit)](https://github.com/opendilab/DI-toolkit/network) 16 | ![GitHub commit activity](https://img.shields.io/github/commit-activity/m/opendilab/DI-toolkit) 17 | [![GitHub issues](https://img.shields.io/github/issues/opendilab/DI-toolkit)](https://github.com/opendilab/DI-toolkit/issues) 18 | [![GitHub pulls](https://img.shields.io/github/issues-pr/opendilab/DI-toolkit)](https://github.com/opendilab/DI-toolkit/pulls) 19 | [![Contributors](https://img.shields.io/github/contributors/opendilab/DI-toolkit)](https://github.com/opendilab/DI-toolkit/graphs/contributors) 20 | [![GitHub license](https://img.shields.io/github/license/opendilab/DI-toolkit)](https://github.com/opendilab/DI-toolkit/blob/master/LICENSE) 21 | 22 | A simple toolkit package for opendilab, including the following utilities: 23 | 24 | - `ditk.logging`, a easy-to-use logger system 25 | - `ditk.annonated`, an annotated documentation generation script 26 | - `ditk.tensorboard`, a utility for extract data from tensorboard log file 27 | - `ditk.tensorboard.plot`, plot utilities for plotting data extracted from tensorboard log file 28 | 29 | ## Installation 30 | 31 | You can simply install it with `pip` command line from the official PyPI site. 32 | 33 | ```shell 34 | pip install DI-toolkit 35 | ``` 36 | 37 | Or installing from the latest source code as follows: 38 | 39 | ```shell 40 | git clone https://github.com/opendilab/DI-toolkit.git 41 | cd di-toolkit 42 | pip install . --user 43 | ``` 44 | 45 | ## Quick Start 46 | 47 | ### Example of ditk.logging 48 | 49 | Here is an example of logging. 50 | 51 | ```python 52 | from ditk import logging 53 | 54 | if __name__ == '__main__': 55 | logging.try_init_root(logging.INFO) 56 | logging.info('This is info') 57 | logging.warning('This is warning with integer 233') 58 | logging.error('This is a error with string \'233\'.') 59 | 60 | try: 61 | _ = 1 / 0 62 | except ZeroDivisionError as err: 63 | logging.exception(err) 64 | 65 | ``` 66 | 67 | `ditk.logging`has almost the same interface as native `logging` module. You can directly replace `import logging` in the 68 | code with `from ditk import logging`. 69 | 70 | ### ditk.annonated 71 | 72 | Python annotated documentation generation script like the following 73 | 74 | ![](./assets/ditk_doc_annotated_demo.png) 75 | 76 | #### Usage 77 | 78 | ```shell 79 | python -m ditk.doc.annotated create -i ditk/doc/annotated/ppo.py -o my_doc/index.html -L zh 80 | ``` 81 | 82 | You will get 83 | 84 | ```text 85 | my_doc 86 | ├── assets 87 | │ ├── pylit.css 88 | │ └── solarized.css 89 | └── index.html 90 | ``` 91 | 92 | #### Help Information 93 | 94 | * `python -m ditk.doc.annotated --help` 95 | 96 | ```text 97 | Usage: python -m ditk.doc.annotated [OPTIONS] COMMAND [ARGS]... 98 | 99 | Utils for creating annotation documentation. 100 | 101 | Options: 102 | -v, --version Show version information. 103 | -h, --help Show this message and exit. 104 | 105 | Commands: 106 | create Utils for creating annotation documentation from local code. 107 | ``` 108 | 109 | * `python -m ditk.doc.annotated create --help` 110 | 111 | ```text 112 | Usage: python -m ditk.doc.annotated create [OPTIONS] 113 | 114 | Utils for creating annotation documentation from local code. 115 | 116 | Options: 117 | -i, --input_file FILE Input source code. [required] 118 | -o, --output_file FILE Output annotated documentation code. [required] 119 | -A, --assets_dir DIRECTORY Directory for assets file of this documentation. 120 | -L, --language [zh|en] Language for documentation. [default: en] 121 | -T, --title TEXT Title of the documentation. [default: ] 123 | -h, --help Show this message and exit. 124 | ``` 125 | 126 | #### Related Library 127 | 128 | - [KaTex](https://github.com/KaTeX/KaTeX) 129 | - [codemirror5](https://github.com/codemirror/codemirror5) 130 | - [yattag](https://www.yattag.org/) 131 | 132 | ### Create Multi-Seed Multi-Algorithm Benchmark Plots 133 | 134 | ```python 135 | import matplotlib.pyplot as plt 136 | import seaborn as sns 137 | 138 | from ditk.tensorboard.plots import tb_create_range_plots 139 | 140 | sns.set() 141 | 142 | tb_create_range_plots( 143 | 'test/testfile/pong_tb', # directory of tensorboard log 144 | xname='step', 145 | yname='evaluator_step/reward_mean', 146 | ) 147 | 148 | plt.show() 149 | ``` 150 | 151 | ![tb_create_range_plots](docs/source/_static/tb_create_range_plots.svg) 152 | 153 | ## Contributing 154 | 155 | We appreciate all contributions to improve `DI-toolkit`, both logic and system designs. Please refer to CONTRIBUTING.md 156 | for more guides. 157 | 158 | ## License 159 | 160 | `DI-toolkit` released under the Apache 2.0 license. 161 | -------------------------------------------------------------------------------- /assets/ditk_doc_annotated_demo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opendilab/DI-toolkit/8d728a42aaf139b10e9f5133447fefde32149f67/assets/ditk_doc_annotated_demo.png -------------------------------------------------------------------------------- /cloc.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # This scripts counts the lines of code and comments in all source files 4 | # and prints the results to the command line. It uses the commandline tool 5 | # "cloc". You can either pass --loc, --comments or --percentage to show the 6 | # respective values only. 7 | # Some parts below need to be adapted to your project! 8 | 9 | # Get the location of this script. 10 | SCRIPT_DIR="$(cd "$(dirname "$0")" && pwd)" 11 | 12 | # Run cloc - this counts code lines, blank lines and comment lines 13 | # for the specified languages. You will need to change this accordingly. 14 | # For C++, you could use "C++,C/C++ Header" for example. 15 | # We are only interested in the summary, therefore the tail -1 16 | SUMMARY="$(cloc "${SCRIPT_DIR}" --include-lang="Python" --md | tail -1)" 17 | 18 | # The $SUMMARY is one line of a markdown table and looks like this: 19 | # SUM:|101|3123|2238|10783 20 | # We use the following command to split it into an array. 21 | IFS='|' read -r -a TOKENS <<<"$SUMMARY" 22 | 23 | # Store the individual tokens for better readability. 24 | NUMBER_OF_FILES=${TOKENS[1]} 25 | COMMENT_LINES=${TOKENS[3]} 26 | LINES_OF_CODE=${TOKENS[4]} 27 | 28 | # To make the estimate of commented lines more accurate, we have to 29 | # subtract any copyright header which is included in each file. 30 | # For Fly-Pie, this header has the length of five lines. 31 | # All dumb comments like those /////////// or those // ------------ 32 | # are also subtracted. As cloc does not count inline comments, 33 | # the overall estimate should be rather conservative. 34 | # Change the lines below according to your project. 35 | # DUMB_COMMENTS="$(grep -r -E '//////|// -----' "${SCRIPT_DIR}" | wc -l)" 36 | # COMMENT_LINES=$(($COMMENT_LINES - 5 * $NUMBER_OF_FILES - $DUMB_COMMENTS)) 37 | 38 | # Print all results if no arguments are given. 39 | if [[ $# -eq 0 ]]; then 40 | awk -v a=$LINES_OF_CODE \ 41 | 'BEGIN {printf "Lines of source code: %6.1fk\n", a/1000}' 42 | awk -v a=$COMMENT_LINES \ 43 | 'BEGIN {printf "Lines of comments: %6.1fk\n", a/1000}' 44 | awk -v a=$COMMENT_LINES -v b=$LINES_OF_CODE \ 45 | 'BEGIN {printf "Comment Percentage: %6.1f%\n", 100*a/b}' 46 | exit 0 47 | fi 48 | 49 | # Show lines of code if --loc is given. 50 | if [[ $* == *--loc* ]]; then 51 | awk -v a=$LINES_OF_CODE \ 52 | 'BEGIN {printf "%.1fk\n", a/1000}' 53 | fi 54 | 55 | # Show lines of comments if --comments is given. 56 | if [[ $* == *--comments* ]]; then 57 | awk -v a=$COMMENT_LINES \ 58 | 'BEGIN {printf "%.1fk\n", a/1000}' 59 | fi 60 | 61 | # Show precentage of comments if --percentage is given. 62 | if [[ $* == *--percentage* ]]; then 63 | awk -v a=$COMMENT_LINES -v b=$LINES_OF_CODE \ 64 | 'BEGIN {printf "%.1f\n", 100*a/b}' 65 | fi 66 | -------------------------------------------------------------------------------- /codecov.yml: -------------------------------------------------------------------------------- 1 | coverage: 2 | status: 3 | project: 4 | default: 5 | # basic 6 | target: auto 7 | threshold: 3% 8 | if_ci_failed: success #success, failure, error, ignore 9 | -------------------------------------------------------------------------------- /ditk/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opendilab/DI-toolkit/8d728a42aaf139b10e9f5133447fefde32149f67/ditk/__init__.py -------------------------------------------------------------------------------- /ditk/config/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opendilab/DI-toolkit/8d728a42aaf139b10e9f5133447fefde32149f67/ditk/config/__init__.py -------------------------------------------------------------------------------- /ditk/config/meta.py: -------------------------------------------------------------------------------- 1 | """ 2 | Overview: 3 | Meta information for DI-toolkit package. 4 | """ 5 | 6 | #: Title of this project (should be `DI-toolkit`). 7 | __TITLE__ = "DI-toolkit" 8 | 9 | #: Version of this project. 10 | __VERSION__ = "0.2.1" 11 | 12 | #: Short description of the project, will be included in ``setup.py``. 13 | __DESCRIPTION__ = 'A simple tool for opendilab.' 14 | 15 | #: Author of this project. 16 | __AUTHOR__ = "HansBug" 17 | 18 | #: Email of the authors'. 19 | __AUTHOR_EMAIL__ = "hansbug@buaa.edu.cn" 20 | -------------------------------------------------------------------------------- /ditk/doc/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opendilab/DI-toolkit/8d728a42aaf139b10e9f5133447fefde32149f67/ditk/doc/__init__.py -------------------------------------------------------------------------------- /ditk/doc/annotated/__init__.py: -------------------------------------------------------------------------------- 1 | from .generate import generate_annotated_doc 2 | -------------------------------------------------------------------------------- /ditk/doc/annotated/__main__.py: -------------------------------------------------------------------------------- 1 | from functools import partial 2 | 3 | import click 4 | from click.core import Context, Option 5 | 6 | from ditk import logging 7 | from ditk.config.meta import __VERSION__ 8 | from .generate import generate_annotated_doc, Lang 9 | 10 | GLOBAL_CONTEXT_SETTINGS = dict( 11 | help_option_names=['-h', '--help'] 12 | ) 13 | 14 | 15 | def print_version(module, ctx: Context, param: Option, value: bool) -> None: 16 | """ 17 | Print version information of cli 18 | :param module: current module using this cli. 19 | :param ctx: click context 20 | :param param: current parameter's metadata 21 | :param value: value of current parameter 22 | """ 23 | _ = param 24 | if not value or ctx.resilient_parsing: 25 | return # pragma: no cover 26 | 27 | click.echo(f'CLI for {module}, version {__VERSION__}') 28 | ctx.exit() 29 | 30 | 31 | @click.group(context_settings={**GLOBAL_CONTEXT_SETTINGS}, 32 | help='Utils for creating annotation documentation.') 33 | @click.option('-v', '--version', is_flag=True, 34 | callback=partial(print_version, 'ditk.doc.annotated'), expose_value=False, is_eager=True, 35 | help="Show version information.") 36 | def cli(): 37 | pass 38 | 39 | 40 | @cli.command('create', context_settings={**GLOBAL_CONTEXT_SETTINGS}, 41 | help='Utils for creating annotation documentation from local code.') 42 | @click.option('-i', '--input_file', 'input_file', type=click.types.Path(dir_okay=False, exists=True), 43 | required=True, help='Input source code.') 44 | @click.option('-o', '--output_file', 'output_file', type=click.types.Path(dir_okay=False), 45 | required=True, help='Output annotated documentation code.') 46 | @click.option('-A', '--assets_dir', 'assets_directory', type=click.types.Path(file_okay=False), 47 | default=None, help='Directory for assets file of this documentation.') 48 | @click.option('-L', '--language', 'language', type=click.types.Choice(list(Lang.__members__.values())), 49 | default=Lang.English.value, help='Language for documentation.', show_default=True) 50 | @click.option('-T', '--title', type=str, default='', 51 | help='Title of the documentation.', show_default=True) 52 | def main(input_file, output_file, assets_directory, language, title): 53 | logging.try_init_root(logging.INFO) 54 | generate_annotated_doc(input_file, output_file, title, assets_directory, language) 55 | 56 | 57 | if __name__ == '__main__': 58 | cli() 59 | -------------------------------------------------------------------------------- /ditk/doc/annotated/generate.py: -------------------------------------------------------------------------------- 1 | import os 2 | import shutil 3 | from enum import IntEnum, Enum 4 | 5 | from yattag import Doc 6 | 7 | from ditk import logging 8 | 9 | code_js = r""" 10 | window.onload = function(){ 11 | var codeElement = document.getElementsByName('py_code'); 12 | var lineCount = 1; 13 | for (var i = 0; i < codeElement.length; i++) { 14 | var code = codeElement[i].innerText; 15 | if (code.length <= 1) { 16 | continue; 17 | } 18 | 19 | codeElement[i].innerHTML = ""; 20 | 21 | var codeMirror = CodeMirror( 22 | codeElement[i], 23 | { 24 | value: code, 25 | mode: "python", 26 | theme: "solarized dark", 27 | lineNumbers: true, 28 | firstLineNumber: lineCount, 29 | readOnly: false, 30 | lineWrapping: true, 31 | } 32 | ); 33 | var noNewLineCode = code.replace(/[\r\n]/g, ""); 34 | lineCount += code.length - noNewLineCode.length + 1; 35 | } 36 | }; 37 | """ 38 | 39 | 40 | class StateCode(IntEnum): 41 | NORMAL = 0 42 | LINE_COMMENT = 1 43 | BLOCK_COMMENT = 2 44 | 45 | 46 | class Lang(str, Enum): 47 | Chinese = 'zh' 48 | English = 'en' 49 | 50 | 51 | def _get_assets(path, *paths): 52 | return os.path.normpath(os.path.join(__file__, '..', path, *paths)) 53 | 54 | 55 | def generate_annotated_doc(src_py_path, dst_html_path, title, 56 | ds_assets_path=None, lang: Lang = Lang.English): 57 | if not ds_assets_path: 58 | ds_assets_path = os.path.normcase(os.path.normpath(os.path.join(dst_html_path, '..', 'assets'))) 59 | ds_assets_path = os.path.abspath(ds_assets_path) 60 | dst_html_dir = os.path.dirname(os.path.abspath(dst_html_path)) 61 | if not os.path.exists(ds_assets_path): 62 | os.makedirs(ds_assets_path, exist_ok=True) 63 | 64 | def _get_rel_url_from_html(asset_path): 65 | segments = os.path.relpath(asset_path, start=dst_html_dir).split(os.path.sep) 66 | return '/'.join(segments) 67 | 68 | def _place_asset(path, *paths): 69 | segments = [path, *paths] 70 | dst_file = os.path.join(ds_assets_path, segments[-1]) 71 | shutil.copyfile(_get_assets(*segments), dst_file) 72 | return _get_rel_url_from_html(dst_file) 73 | 74 | with open(src_py_path, 'r') as f: 75 | src = f.read() 76 | line_data = src.split('\n') 77 | doc, tag, text, line = Doc().ttl() 78 | with tag('html'): 79 | with tag('head'): 80 | with tag('meta', charset="utf-8"): 81 | pass 82 | with tag('title'): 83 | text(title) 84 | with tag('link', rel="stylesheet", href=_place_asset('pylit.css')): 85 | pass 86 | with tag('link', rel="stylesheet", href=_place_asset('solarized.css')): 87 | pass 88 | with tag('link', rel="stylesheet", href="https://cdn.jsdelivr.net/npm/katex@0.16.3/dist/katex.min.css", 89 | integrity="sha384-Juol1FqnotbkyZUT5Z7gUPjQ9gzlwCENvUZTpQBAPxtusdwFLRy382PSDx5UUJ4/", 90 | crossorigin="anonymous"): 91 | pass 92 | with tag('script', src="https://cdn.jsdelivr.net/npm/katex@0.16.3/dist/katex.min.js", 93 | integrity="sha384-97gW6UIJxnlKemYavrqDHSX3SiygeOwIZhwyOKRfSaf0JWKRVj9hLASHgFTzT+0O", 94 | crossorigin="anonymous"): 95 | pass 96 | with tag('script', src="https://cdn.jsdelivr.net/npm/katex@0.16.3/dist/contrib/auto-render.min.js", 97 | integrity="sha384-+VBxd3r6XgURycqtZ117nYw44OOcIax56Z4dCRWbxyPt0Koah1uHoK0o4+/RRE05", 98 | crossorigin="anonymous", onload="renderMathInElement(document.body);", defer=True): 99 | pass 100 | with tag('link', rel="stylesheet", 101 | href="https://cdn.jsdelivr.net/npm/codemirror@5.61.0/lib/codemirror.min.css"): 102 | pass 103 | with tag('script'): 104 | doc.attr(src="https://cdn.jsdelivr.net/npm/codemirror@5.61.0/lib/codemirror.min.js") 105 | with tag('script'): 106 | doc.attr(src="https://cdn.jsdelivr.net/npm/codemirror@5.61.0/mode/python/python.min.js") 107 | 108 | with tag('body'): 109 | def item_fn(doc_data, code_data, cnt): 110 | if len(doc_data) == 0: 111 | return 112 | doc_data = doc_data.replace('\n', '
') 113 | doc_data = doc_data.replace('**:', '') 114 | doc_data = doc_data.replace('**', '') 115 | doc_data = doc_data.replace(' ``', ' ') 116 | doc_data = doc_data.replace('`` ', ' ') 117 | doc_data = doc_data.replace('Related Link') 119 | with tag('div', klass='section', id=f'section-{cnt}'): 120 | if cnt == 0: 121 | with tag('div', klass='docs doc-strings'): 122 | with tag('p'): 123 | with tag('p'): 124 | with tag('a', href='index.html'): 125 | with tag('b'): 126 | text("HOME
") 127 | 128 | with tag('a', href="https://github.com/opendilab/PPOxFamily", target="_blank"): 129 | with tag('img', alt="GitHub", style="max-width:100%;"): 130 | doc.attr( 131 | src="https://img.shields.io/github/stars/opendilab/PPOxFamily?style=social" 132 | ) 133 | text(' ') 134 | with tag('a', href="https://space.bilibili.com/1112854351?spm_id_from=333.337.0.0", 135 | target="_blank"): 136 | with tag('img', alt="bilibili", style="max-width:100%;"): 137 | doc.attr(src="https://img.shields.io/badge/bilibili-video%20course-blue") 138 | text(' ') 139 | with tag('a', href="https://twitter.com/OpenDILab", rel="nofollow", target="_blank"): 140 | with tag('img', alt="twitter", style="max-width:100%;"): 141 | doc.attr(src="https://img.shields.io/twitter/follow/opendilab?style=social") 142 | text('
') 143 | with tag('a', 144 | href=f"https://github.com/opendilab/PPOxFamily/tree/main/{src_py_path}", 145 | target="_blank"): 146 | text("View code on GitHub") 147 | text('

') 148 | text(doc_data) 149 | 150 | else: 151 | with tag('div', klass='docs doc-strings'): 152 | with tag('p'): 153 | text(doc_data) 154 | 155 | with tag('div', klass='code'): 156 | with tag('pre'): 157 | with tag('code', id=f"code_{cnt}", name="py_code"): 158 | text(code_data) 159 | 160 | cnt = 0 161 | state = StateCode.NORMAL 162 | line_code, line_comment, block_comment = [], [], [] 163 | for i in range(len(line_data)): 164 | logging.info(f'{i:3d} {line_data[i]}') 165 | no_space_data = line_data[i].strip() 166 | if no_space_data.startswith('if __name__ == "__main__":'): 167 | break 168 | if state == StateCode.NORMAL: 169 | if no_space_data.startswith('"""'): # block comment 170 | state = StateCode.BLOCK_COMMENT 171 | elif no_space_data.startswith('#'): # line comment 172 | item_fn('\n'.join(line_comment), '\n'.join(line_code), cnt) 173 | line_code, line_comment, block_comment = [], [], [] 174 | cnt += 1 175 | state = StateCode.LINE_COMMENT 176 | line_comment.append(line_data[i].replace('# ', '')) # remove '# ' 177 | if 'delimiter' in line_data[i]: 178 | line_comment[-1] = '' 179 | else: 180 | line_code.append(line_data[i]) 181 | elif state == StateCode.LINE_COMMENT: 182 | if no_space_data.startswith('#'): 183 | line_comment.append(line_data[i].replace('# ', '')) # remove '# ' 184 | else: 185 | state = StateCode.NORMAL 186 | line_code.append(line_data[i]) 187 | elif state == StateCode.BLOCK_COMMENT: 188 | if no_space_data.startswith('"""'): # block comment end 189 | item_fn('\n'.join(block_comment), '\n'.join(line_code), cnt) 190 | line_code, line_comment, block_comment = [], [], [] 191 | cnt += 1 192 | state = StateCode.NORMAL 193 | else: 194 | block_comment.append(line_data[i]) 195 | else: 196 | raise RuntimeError(state) 197 | 198 | item_fn('\n'.join(line_comment), '\n'.join(line_code), cnt) 199 | with tag('div', klass='section', id=f'section-{cnt}'): 200 | with tag('div', klass='docs doc-strings'): 201 | with tag('p'): 202 | with tag('i'): 203 | if lang == Lang.Chinese: 204 | text('如果读者关于本文档有任何问题和建议,可以在 GitHub 提 issue 或是直接发邮件给我们' 205 | ' (opendilab@pjlab.org.cn) 。') 206 | elif lang == Lang.English: 207 | text( 208 | 'If you have any questions or advices about this documentation, ' 209 | 'you can raise issues in GitHub (https://github.com/opendilab/PPOxFamily) ' 210 | 'or email us (opendilab@pjlab.org.cn).' 211 | ) 212 | else: 213 | raise ValueError(f'Unsupported lang - {lang!r}.') 214 | 215 | with tag('script', type="text/javascript"): 216 | text(code_js) 217 | 218 | result = doc.getvalue() 219 | result = result.replace('<', '<') 220 | result = result.replace('>', '>') 221 | with open(dst_html_path, 'w') as f: 222 | f.write('\n' + result) 223 | 224 | logging.info(f'[green]Log to [u]{dst_html_path}[/u] success![/green]') 225 | 226 | 227 | if __name__ == "__main__": 228 | generate_annotated_doc('ppo.py', 'ppo.html', 'This is the title') 229 | -------------------------------------------------------------------------------- /ditk/doc/annotated/ppo.py: -------------------------------------------------------------------------------- 1 | """ 2 | PyTorch's implementation of Proximal Policy Optimization (PPO) 3 | """ 4 | from collections import namedtuple 5 | from typing import Optional, Tuple 6 | 7 | import torch 8 | 9 | ppo_policy_data = namedtuple('ppo_policy_data', ['logit_new', 'logit_old', 'action', 'adv', 'weight']) 10 | ppo_policy_loss = namedtuple('ppo_policy_loss', ['policy_loss', 'entropy_loss']) 11 | ppo_info = namedtuple('ppo_info', ['approx_kl', 'clipfrac']) 12 | 13 | 14 | def ppo_policy_error(data: namedtuple, 15 | clip_ratio: float = 0.2, 16 | dual_clip: Optional[float] = None) -> Tuple[namedtuple, namedtuple]: 17 | """ 18 | **Overview**: 19 | Implementation of Proximal Policy Optimization 20 | with entropy bounus, value_clip and dual_clip. 21 | """ 22 | # Unpack data: $$<\pi_{new}(a|s), \pi_{old}(a|s), a, A^{\pi_{old}}(s, a), w>$$ 23 | logit_new, logit_old, action, adv, weight = data 24 | # Prepare weight for default cases. 25 | if weight is None: 26 | weight = torch.ones_like(adv) 27 | # Prepare policy distribution from logit and get log propability. 28 | dist_new = torch.distributions.categorical.Categorical(logits=logit_new) 29 | dist_old = torch.distributions.categorical.Categorical(logits=logit_old) 30 | logp_new = dist_new.log_prob(action) 31 | logp_old = dist_old.log_prob(action) 32 | # Entropy bonus: $$\pi_{new}(a|s) log(\pi_{new}(a|s))$$ 33 | dist_new_entropy = dist_new.entropy() 34 | entropy_loss = (dist_new_entropy * weight).mean() 35 | # Importance sampling weight: $$r(\theta) = \frac{\pi_{new}(a|s)}{\pi_{old}(a|s)}$$ 36 | ratio = torch.exp(logp_new - logp_old) 37 | # Original surrogate objective: $$r(\theta) A^{\pi_{old}}(s, a)$$ 38 | surr1 = ratio * adv 39 | # Clipped surrogate objective: $$clip(r(\theta), 1-\epsilon, 1+\epsilon) A^{\pi_{old}}(s, a)$$ 40 | surr2 = ratio.clamp(1 - clip_ratio, 1 + clip_ratio) * adv 41 | # Dual clip proposed by . 42 | # Only use dual_clip when adv < 0. 43 | if dual_clip is not None: 44 | clip1 = torch.min(surr1, surr2) 45 | clip2 = torch.max(clip1, dual_clip * adv) 46 | policy_loss = -(torch.where(adv < 0, clip2, clip1) * weight).mean() 47 | # PPO-Clipped Loss: $$min(r(\theta) A^{\pi_{old}}(s, a), clip(r(\theta), 1-\epsilon, 1+\epsilon) A^{\pi_{old}}(s, a))$$ 48 | # Multiply sample-wise weight and reduce mean in batch dimension. 49 | else: 50 | policy_loss = (-torch.min(surr1, surr2) * weight).mean() 51 | # Add some visualization metrics to monitor optimization status. 52 | with torch.no_grad(): 53 | approx_kl = (logp_old - logp_new).mean().item() 54 | clipped = ratio.gt(1 + clip_ratio) | ratio.lt(1 - clip_ratio) 55 | clipfrac = torch.as_tensor(clipped).float().mean().item() 56 | # Return final loss and information. 57 | return ppo_policy_loss(policy_loss, entropy_loss), ppo_info(approx_kl, clipfrac) 58 | -------------------------------------------------------------------------------- /ditk/doc/annotated/pylit.css: -------------------------------------------------------------------------------- 1 | html { 2 | font-size: 62.5%; 3 | } 4 | 5 | body { 6 | font-size: 1.5em; 7 | line-height: 1.6; 8 | font-weight: 400; 9 | font-family: "SF Pro Text", "SF Pro Icons", "Helvetica Neue", "Helvetica", "Arial", sans-serif; 10 | margin: 0; 11 | padding: 0; 12 | } 13 | 14 | hr { 15 | border-style: solid; 16 | border-bottom: none; 17 | } 18 | 19 | pre > code { 20 | display: block; 21 | padding: 1rem 1.5rem; 22 | white-space: pre; 23 | } 24 | 25 | p { 26 | margin: 0 0 10px 0; 27 | } 28 | 29 | h1, h2, h3, h4, h5, h6 { 30 | margin: 40px 0 15px 0; 31 | } 32 | 33 | h2, h3, h4, h5, h6 { 34 | margin-top: 0; 35 | } 36 | 37 | #container { 38 | position: relative; 39 | } 40 | 41 | #background { 42 | position: absolute; 43 | top: 0; 44 | left: 40%; 45 | right: 0; 46 | bottom: 0; 47 | z-index: 0; 48 | display: none; 49 | } 50 | @media (min-width: 768px) { 51 | #background { 52 | display: block; 53 | } 54 | } 55 | 56 | a.parent { 57 | text-transform: uppercase; 58 | font-weight: bold; 59 | font-size: 12px; 60 | margin-right: 10px; 61 | text-decoration: none; 62 | } 63 | a.parent:after { 64 | content: ">"; 65 | font-size: 14px; 66 | margin-left: 4px; 67 | } 68 | 69 | div.footer { 70 | margin-top: 25px; 71 | position: relative; 72 | padding: 10px 0; 73 | text-align: center; 74 | } 75 | div.footer a { 76 | display: inline-block; 77 | margin: 8px; 78 | font-size: 1.3rem; 79 | } 80 | 81 | div.section { 82 | position: relative; 83 | } 84 | div.section:after { 85 | clear: both; 86 | content: ""; 87 | display: block; 88 | } 89 | div.section:hover div.docs .section-link a { 90 | opacity: 1; 91 | } 92 | div.section div.docs { 93 | box-sizing: border-box; 94 | padding: 10px 8px 1px 8px; 95 | vertical-align: top; 96 | text-align: left; 97 | } 98 | @media (min-width: 768px) { 99 | div.section div.docs { 100 | float: left; 101 | width: 40%; 102 | min-height: 5px; 103 | } 104 | } 105 | @media (min-width: 1024px) { 106 | div.section div.docs { 107 | padding: 10px 25px 1px 50px; 108 | } 109 | } 110 | div.section div.docs .section-link { 111 | position: relative; 112 | } 113 | div.section div.docs .section-link a { 114 | font: 12px Arial; 115 | text-decoration: none; 116 | position: absolute; 117 | top: 3px; 118 | left: -20px; 119 | padding: 1px 2px; 120 | opacity: 0; 121 | -webkit-transition: opacity 0.2s linear; 122 | } 123 | div.section div.docs .katex-display { 124 | overflow-x: auto; 125 | overflow-y: hidden; 126 | } 127 | div.section div.docs pre code { 128 | overflow-x: auto; 129 | } 130 | div.section div.docs img { 131 | max-width: 100%; 132 | } 133 | div.section div.docs pre { 134 | margin: 15px 0 15px; 135 | } 136 | div.section div.code { 137 | padding: 1px 8px 12px 15px; 138 | vertical-align: top; 139 | } 140 | @media (min-width: 768px) { 141 | div.section div.code { 142 | margin-left: 45%; 143 | } 144 | } 145 | div.section div.code pre { 146 | font-size: 16px; 147 | word-break: break-all; 148 | white-space: pre-wrap; 149 | } 150 | 151 | .highlight .n, .highlight .nn, .highlight .nc, .highlight .nf { 152 | cursor: pointer; 153 | } 154 | 155 | code { 156 | padding: 0.2rem 0.5rem; 157 | margin: 0 0.2rem; 158 | font-size: 80%; 159 | white-space: nowrap; 160 | border-radius: 4px; 161 | } 162 | 163 | pre, tt, code { 164 | line-height: 18px; 165 | font-family: Monaco, Consolas, "Lucida Console", monospace; 166 | /* margin: 0; */ 167 | padding: 0; 168 | } 169 | 170 | span.lineno { 171 | width: 20px; 172 | display: inline-block; 173 | text-align: right; 174 | padding-right: 10px; 175 | opacity: 0.3; 176 | font-size: 10px; 177 | white-space: nowrap; 178 | } 179 | 180 | p > img { 181 | max-height: 240px; 182 | max-width: 240px; 183 | border-radius: 5px; 184 | cursor: pointer; 185 | transition: 0.3s; 186 | } 187 | p > img:hover { 188 | opacity: 0.7; 189 | } 190 | 191 | #modal { 192 | position: fixed; 193 | z-index: 1000; 194 | left: 0; 195 | top: 0; 196 | right: 0; 197 | bottom: 0; 198 | overflow: scroll; 199 | } 200 | #modal > div { 201 | padding: 100px 10px 10px 10px; 202 | } 203 | #modal > div > img { 204 | margin: auto; 205 | display: block; 206 | width: 80%; 207 | max-width: 700px; 208 | } 209 | #modal > div > p { 210 | margin: auto; 211 | display: block; 212 | width: 80%; 213 | max-width: 700px; 214 | text-align: center; 215 | padding: 10px 0; 216 | height: 150px; 217 | } 218 | #modal > div > img, #modal > div > p { 219 | animation-name: zoom; 220 | animation-duration: 0.6s; 221 | } 222 | @keyframes zoom { 223 | from { 224 | transform: scale(0); 225 | } 226 | to { 227 | transform: scale(1); 228 | } 229 | } 230 | #modal > span.close { 231 | position: absolute; 232 | top: 15px; 233 | right: 35px; 234 | font-size: 40px; 235 | font-weight: bold; 236 | transition: 0.3s; 237 | } 238 | #modal > span.close:hover, #modal > span.close:focus { 239 | text-decoration: none; 240 | cursor: pointer; 241 | } 242 | 243 | .katex span { 244 | cursor: default; 245 | } 246 | .katex span.coloredeq { 247 | cursor: pointer; 248 | } 249 | .katex span.coloredeq span { 250 | cursor: pointer; 251 | } 252 | 253 | @media only screen and (max-width: 700px) { 254 | #modal > img { 255 | width: 100%; 256 | } 257 | } 258 | body { 259 | color: #999; 260 | background: #1d2127; 261 | } 262 | 263 | hr { 264 | border-color: #555; 265 | } 266 | 267 | a { 268 | color: #bbb; 269 | } 270 | a:visited { 271 | color: #aaa; 272 | } 273 | 274 | #container { 275 | background: #1d2127; 276 | } 277 | 278 | div.section { 279 | border-top: 1px solid #353745; 280 | } 281 | 282 | #background { 283 | background: #282a36; 284 | border-left: 1px solid #293340; 285 | } 286 | 287 | div.footer { 288 | background: #30353d; 289 | } 290 | div.footer a { 291 | display: inline-block; 292 | margin: 5px; 293 | } 294 | 295 | a.parent { 296 | color: #ffffff; 297 | } 298 | a.parent:after { 299 | color: #aaa; 300 | } 301 | 302 | div.section:hover { 303 | background: #080a16; 304 | } 305 | div.section:hover div.code { 306 | background: #080a16; 307 | } 308 | div.section div.docs .section-link a { 309 | color: #454545; 310 | } 311 | div.section div.docs p tt, div.section div.docs li code, div.section div.docs ol code, div.section div.docs p code { 312 | background: #282a36; 313 | color: #ccc; 314 | } 315 | div.section div.code { 316 | background: #282a36; 317 | } 318 | 319 | div.doc-strings { 320 | color: #ccc; 321 | } 322 | 323 | .mjx-chtml { 324 | color: #ccc; 325 | } 326 | 327 | code { 328 | background: #282a36; 329 | border: 1px solid #484a56; 330 | } 331 | 332 | #modal { 333 | background-color: rgba(0, 0, 0, 0.9); 334 | } 335 | #modal > div > p { 336 | color: #ccc; 337 | } 338 | #modal > span.close { 339 | color: #f1f1f1; 340 | } 341 | #modal:hover, #modal:focus { 342 | color: #bbb; 343 | } 344 | 345 | .highlight { 346 | color: #f8f8f2; 347 | } 348 | .highlight .hll { 349 | background-color: #f1fa8c; 350 | } 351 | .highlight .c { 352 | color: #6272a4; 353 | } 354 | .highlight .err { 355 | color: #f8f8f2; 356 | } 357 | .highlight .g { 358 | color: #f8f8f2; 359 | } 360 | .highlight .k { 361 | color: #ff79c6; 362 | } 363 | .highlight .l { 364 | color: #f8f8f2; 365 | } 366 | .highlight .n { 367 | color: #f8f8f2; 368 | } 369 | .highlight .o { 370 | color: #ff4996; 371 | } 372 | .highlight .x { 373 | color: #f8f8f2; 374 | } 375 | .highlight .p { 376 | color: #a8a8a2; 377 | } 378 | .highlight .ch { 379 | color: #6272a4; 380 | } 381 | .highlight .cm { 382 | color: #6272a4; 383 | } 384 | .highlight .cp { 385 | color: #ff79c6; 386 | } 387 | .highlight .cpf { 388 | color: #6272a4; 389 | } 390 | .highlight .c1 { 391 | color: #6272a4; 392 | } 393 | .highlight .cs { 394 | color: #6272a4; 395 | } 396 | .highlight .gd { 397 | color: #8b080b; 398 | } 399 | .highlight .ge { 400 | color: #f8f8f2; 401 | text-decoration: underline; 402 | } 403 | .highlight .gr { 404 | color: #f8f8f2; 405 | } 406 | .highlight .gh { 407 | color: #f8f8f2; 408 | font-weight: bold; 409 | } 410 | .highlight .gi { 411 | color: #f8f8f2; 412 | font-weight: bold; 413 | } 414 | .highlight .go { 415 | color: #44475a; 416 | } 417 | .highlight .gp { 418 | color: #f8f8f2; 419 | } 420 | .highlight .gs { 421 | color: #f8f8f2; 422 | } 423 | .highlight .gu { 424 | color: #f8f8f2; 425 | font-weight: bold; 426 | } 427 | .highlight .gt { 428 | color: #f8f8f2; 429 | } 430 | .highlight .kc { 431 | color: #ff79c6; 432 | } 433 | .highlight .kd { 434 | color: #8be9fd; 435 | font-style: italic; 436 | } 437 | .highlight .kn { 438 | color: #ff79c6; 439 | } 440 | .highlight .kp { 441 | color: #ff79c6; 442 | } 443 | .highlight .kr { 444 | color: #ff79c6; 445 | } 446 | .highlight .kt { 447 | color: #8be9fd; 448 | } 449 | .highlight .ld { 450 | color: #f8f8f2; 451 | } 452 | .highlight .m { 453 | color: #bd93f9; 454 | } 455 | .highlight .s { 456 | color: #f1fa8c; 457 | } 458 | .highlight .na { 459 | color: #50fa7b; 460 | } 461 | .highlight .nb { 462 | color: #8be9fd; 463 | font-style: italic; 464 | } 465 | .highlight .nc { 466 | color: #ffb86c; 467 | font-weight: bold; 468 | } 469 | .highlight .no { 470 | color: #f8f8f2; 471 | } 472 | .highlight .nd { 473 | color: #9d93ff; 474 | } 475 | .highlight .ni { 476 | color: #f8f8f2; 477 | } 478 | .highlight .ne { 479 | color: #d8d8d2; 480 | font-style: italic; 481 | } 482 | .highlight .nf { 483 | color: #ffb86c; 484 | } 485 | .highlight .nl { 486 | color: #8be9fd; 487 | font-style: italic; 488 | } 489 | .highlight .nn { 490 | color: #f8f8f2; 491 | } 492 | .highlight .nx { 493 | color: #f8f8f2; 494 | } 495 | .highlight .py { 496 | color: #f8f8f2; 497 | } 498 | .highlight .nt { 499 | color: #ff79c6; 500 | } 501 | .highlight .nv { 502 | color: #8be9fd; 503 | font-style: italic; 504 | } 505 | .highlight .ow { 506 | color: #ff79c6; 507 | } 508 | .highlight .w { 509 | color: #f8f8f2; 510 | } 511 | .highlight .mb { 512 | color: #bd93f9; 513 | } 514 | .highlight .mf { 515 | color: #bd93f9; 516 | } 517 | .highlight .mh { 518 | color: #bd93f9; 519 | } 520 | .highlight .mi { 521 | color: #bd93f9; 522 | } 523 | .highlight .mo { 524 | color: #bd93f9; 525 | } 526 | .highlight .sa { 527 | color: #f1fa8c; 528 | } 529 | .highlight .sb { 530 | color: #f1fa8c; 531 | } 532 | .highlight .sc { 533 | color: #f1fa8c; 534 | } 535 | .highlight .dl { 536 | color: #f1fa8c; 537 | } 538 | .highlight .sd { 539 | color: #f1fa8c; 540 | } 541 | .highlight .s2 { 542 | color: #f1fa8c; 543 | } 544 | .highlight .se { 545 | color: #f1fa8c; 546 | } 547 | .highlight .sh { 548 | color: #f1fa8c; 549 | } 550 | .highlight .si { 551 | color: #f1fa8c; 552 | } 553 | .highlight .sx { 554 | color: #f1fa8c; 555 | } 556 | .highlight .sr { 557 | color: #f1fa8c; 558 | } 559 | .highlight .s1 { 560 | color: #f1fa8c; 561 | } 562 | .highlight .ss { 563 | color: #f1fa8c; 564 | } 565 | .highlight .bp { 566 | color: #50fa7b; 567 | } 568 | .highlight .fm { 569 | color: #ffb86c; 570 | font-style: italic; 571 | } 572 | .highlight .vc { 573 | color: #8be9fd; 574 | font-style: italic; 575 | } 576 | .highlight .vg { 577 | color: #8be9fd; 578 | font-style: italic; 579 | } 580 | .highlight .vi { 581 | color: #8be9fd; 582 | font-style: italic; 583 | } 584 | .highlight .vm { 585 | color: #8be9fd; 586 | font-style: italic; 587 | } 588 | .highlight .il { 589 | color: #bd93f9; 590 | } 591 | 592 | body.lights-off .highlight .clicked, body.lights-off .katex .clicked { 593 | color: #00ffff !important; 594 | text-shadow: 0 0 20px #00ffff; 595 | } 596 | 597 | body.lights-off .highlight { 598 | color: whitesmoke; 599 | } 600 | body.lights-off .highlight .hll { 601 | background-color: #c3c3c3; 602 | } 603 | body.lights-off .highlight .c { 604 | color: #838383; 605 | } 606 | body.lights-off .highlight .err { 607 | color: whitesmoke; 608 | } 609 | body.lights-off .highlight .g { 610 | color: whitesmoke; 611 | } 612 | body.lights-off .highlight .k { 613 | color: #bcbcbc; 614 | } 615 | body.lights-off .highlight .l { 616 | color: whitesmoke; 617 | } 618 | body.lights-off .highlight .n { 619 | color: whitesmoke; 620 | } 621 | body.lights-off .highlight .o { 622 | color: #a4a4a4; 623 | } 624 | body.lights-off .highlight .x { 625 | color: whitesmoke; 626 | } 627 | body.lights-off .highlight .p { 628 | color: #a5a5a5; 629 | } 630 | body.lights-off .highlight .ch { 631 | color: #838383; 632 | } 633 | body.lights-off .highlight .cm { 634 | color: #838383; 635 | } 636 | body.lights-off .highlight .cp { 637 | color: #bcbcbc; 638 | } 639 | body.lights-off .highlight .cpf { 640 | color: #838383; 641 | } 642 | body.lights-off .highlight .c1 { 643 | color: #838383; 644 | } 645 | body.lights-off .highlight .cs { 646 | color: #838383; 647 | } 648 | body.lights-off .highlight .gd { 649 | color: #4a4a4a; 650 | } 651 | body.lights-off .highlight .ge { 652 | color: whitesmoke; 653 | } 654 | body.lights-off .highlight .gr { 655 | color: whitesmoke; 656 | } 657 | body.lights-off .highlight .gh { 658 | color: whitesmoke; 659 | font-weight: bold; 660 | } 661 | body.lights-off .highlight .gi { 662 | color: whitesmoke; 663 | } 664 | body.lights-off .highlight .go { 665 | color: #4f4f4f; 666 | } 667 | body.lights-off .highlight .gp { 668 | color: whitesmoke; 669 | } 670 | body.lights-off .highlight .gs { 671 | color: whitesmoke; 672 | } 673 | body.lights-off .highlight .gu { 674 | color: whitesmoke; 675 | } 676 | body.lights-off .highlight .gt { 677 | color: whitesmoke; 678 | } 679 | body.lights-off .highlight .kc { 680 | color: #bcbcbc; 681 | } 682 | body.lights-off .highlight .kd { 683 | color: #c4c4c4; 684 | font-style: italic; 685 | } 686 | body.lights-off .highlight .kn { 687 | color: #bcbcbc; 688 | } 689 | body.lights-off .highlight .kp { 690 | color: #bcbcbc; 691 | } 692 | body.lights-off .highlight .kr { 693 | color: #bcbcbc; 694 | } 695 | body.lights-off .highlight .kt { 696 | color: #c4c4c4; 697 | } 698 | body.lights-off .highlight .ld { 699 | color: whitesmoke; 700 | } 701 | body.lights-off .highlight .m { 702 | color: #c6c6c6; 703 | } 704 | body.lights-off .highlight .s { 705 | color: #c3c3c3; 706 | } 707 | body.lights-off .highlight .na { 708 | color: #a5a5a5; 709 | } 710 | body.lights-off .highlight .nb { 711 | color: #c4c4c4; 712 | } 713 | body.lights-off .highlight .nc { 714 | color: #b6b6b6; 715 | } 716 | body.lights-off .highlight .no { 717 | color: whitesmoke; 718 | } 719 | body.lights-off .highlight .nd { 720 | color: #c9c9c9; 721 | } 722 | body.lights-off .highlight .ni { 723 | color: whitesmoke; 724 | } 725 | body.lights-off .highlight .ne { 726 | color: #d5d5d5; 727 | } 728 | body.lights-off .highlight .nf { 729 | color: #b6b6b6; 730 | } 731 | body.lights-off .highlight .nl { 732 | color: #c4c4c4; 733 | } 734 | body.lights-off .highlight .nn { 735 | color: whitesmoke; 736 | } 737 | body.lights-off .highlight .nx { 738 | color: whitesmoke; 739 | } 740 | body.lights-off .highlight .py { 741 | color: whitesmoke; 742 | } 743 | body.lights-off .highlight .nt { 744 | color: #bcbcbc; 745 | } 746 | body.lights-off .highlight .nv { 747 | color: #c4c4c4; 748 | } 749 | body.lights-off .highlight .ow { 750 | color: #bcbcbc; 751 | } 752 | body.lights-off .highlight .w { 753 | color: whitesmoke; 754 | } 755 | body.lights-off .highlight .mb { 756 | color: #c6c6c6; 757 | } 758 | body.lights-off .highlight .mf { 759 | color: #c6c6c6; 760 | } 761 | body.lights-off .highlight .mh { 762 | color: #c6c6c6; 763 | } 764 | body.lights-off .highlight .mi { 765 | color: #c6c6c6; 766 | } 767 | body.lights-off .highlight .mo { 768 | color: #c6c6c6; 769 | } 770 | body.lights-off .highlight .sa { 771 | color: #c3c3c3; 772 | } 773 | body.lights-off .highlight .sb { 774 | color: #c3c3c3; 775 | } 776 | body.lights-off .highlight .sc { 777 | color: #c3c3c3; 778 | } 779 | body.lights-off .highlight .dl { 780 | color: #c3c3c3; 781 | } 782 | body.lights-off .highlight .sd { 783 | color: #c3c3c3; 784 | } 785 | body.lights-off .highlight .s2 { 786 | color: #c3c3c3; 787 | } 788 | body.lights-off .highlight .se { 789 | color: #c3c3c3; 790 | } 791 | body.lights-off .highlight .sh { 792 | color: #c3c3c3; 793 | } 794 | body.lights-off .highlight .si { 795 | color: #c3c3c3; 796 | } 797 | body.lights-off .highlight .sx { 798 | color: #c3c3c3; 799 | } 800 | body.lights-off .highlight .sr { 801 | color: #c3c3c3; 802 | } 803 | body.lights-off .highlight .s1 { 804 | color: #c3c3c3; 805 | } 806 | body.lights-off .highlight .ss { 807 | color: #c3c3c3; 808 | } 809 | body.lights-off .highlight .bp { 810 | color: #a5a5a5; 811 | } 812 | body.lights-off .highlight .fm { 813 | color: #b6b6b6; 814 | } 815 | body.lights-off .highlight .vc { 816 | color: #c4c4c4; 817 | } 818 | body.lights-off .highlight .vg { 819 | color: #c4c4c4; 820 | } 821 | body.lights-off .highlight .vi { 822 | color: #c4c4c4; 823 | } 824 | body.lights-off .highlight .vm { 825 | color: #c4c4c4; 826 | } 827 | body.lights-off .highlight .il { 828 | color: #c6c6c6; 829 | } 830 | 831 | @media (prefers-color-scheme: dark) { 832 | body { 833 | color: #999; 834 | background: #1d2127; 835 | } 836 | 837 | hr { 838 | border-color: #555; 839 | } 840 | 841 | a { 842 | color: #bbb; 843 | } 844 | a:visited { 845 | color: #aaa; 846 | } 847 | 848 | #container { 849 | background: #1d2127; 850 | } 851 | 852 | div.section { 853 | border-top: 1px solid #353745; 854 | } 855 | 856 | #background { 857 | background: #282a36; 858 | border-left: 1px solid #293340; 859 | } 860 | 861 | div.footer { 862 | background: #30353d; 863 | } 864 | div.footer a { 865 | display: inline-block; 866 | margin: 5px; 867 | } 868 | 869 | a.parent { 870 | color: #ffffff; 871 | } 872 | a.parent:after { 873 | color: #aaa; 874 | } 875 | 876 | div.section:hover { 877 | background: #080a16; 878 | } 879 | div.section:hover div.code { 880 | background: #080a16; 881 | } 882 | div.section div.docs .section-link a { 883 | color: #454545; 884 | } 885 | div.section div.docs p tt, div.section div.docs li code, div.section div.docs ol code, div.section div.docs p code { 886 | background: #282a36; 887 | color: #ccc; 888 | } 889 | div.section div.code { 890 | background: #282a36; 891 | } 892 | 893 | div.doc-strings { 894 | color: #ccc; 895 | } 896 | 897 | .mjx-chtml { 898 | color: #ccc; 899 | } 900 | 901 | code { 902 | background: #282a36; 903 | border: 1px solid #484a56; 904 | } 905 | 906 | #modal { 907 | background-color: rgba(0, 0, 0, 0.9); 908 | } 909 | #modal > div > p { 910 | color: #ccc; 911 | } 912 | #modal > span.close { 913 | color: #f1f1f1; 914 | } 915 | #modal:hover, #modal:focus { 916 | color: #bbb; 917 | } 918 | 919 | .highlight { 920 | color: #f8f8f2; 921 | } 922 | .highlight .hll { 923 | background-color: #f1fa8c; 924 | } 925 | .highlight .c { 926 | color: #6272a4; 927 | } 928 | .highlight .err { 929 | color: #f8f8f2; 930 | } 931 | .highlight .g { 932 | color: #f8f8f2; 933 | } 934 | .highlight .k { 935 | color: #ff79c6; 936 | } 937 | .highlight .l { 938 | color: #f8f8f2; 939 | } 940 | .highlight .n { 941 | color: #f8f8f2; 942 | } 943 | .highlight .o { 944 | color: #ff4996; 945 | } 946 | .highlight .x { 947 | color: #f8f8f2; 948 | } 949 | .highlight .p { 950 | color: #a8a8a2; 951 | } 952 | .highlight .ch { 953 | color: #6272a4; 954 | } 955 | .highlight .cm { 956 | color: #6272a4; 957 | } 958 | .highlight .cp { 959 | color: #ff79c6; 960 | } 961 | .highlight .cpf { 962 | color: #6272a4; 963 | } 964 | .highlight .c1 { 965 | color: #6272a4; 966 | } 967 | .highlight .cs { 968 | color: #6272a4; 969 | } 970 | .highlight .gd { 971 | color: #8b080b; 972 | } 973 | .highlight .ge { 974 | color: #f8f8f2; 975 | text-decoration: underline; 976 | } 977 | .highlight .gr { 978 | color: #f8f8f2; 979 | } 980 | .highlight .gh { 981 | color: #f8f8f2; 982 | font-weight: bold; 983 | } 984 | .highlight .gi { 985 | color: #f8f8f2; 986 | font-weight: bold; 987 | } 988 | .highlight .go { 989 | color: #44475a; 990 | } 991 | .highlight .gp { 992 | color: #f8f8f2; 993 | } 994 | .highlight .gs { 995 | color: #f8f8f2; 996 | } 997 | .highlight .gu { 998 | color: #f8f8f2; 999 | font-weight: bold; 1000 | } 1001 | .highlight .gt { 1002 | color: #f8f8f2; 1003 | } 1004 | .highlight .kc { 1005 | color: #ff79c6; 1006 | } 1007 | .highlight .kd { 1008 | color: #8be9fd; 1009 | font-style: italic; 1010 | } 1011 | .highlight .kn { 1012 | color: #ff79c6; 1013 | } 1014 | .highlight .kp { 1015 | color: #ff79c6; 1016 | } 1017 | .highlight .kr { 1018 | color: #ff79c6; 1019 | } 1020 | .highlight .kt { 1021 | color: #8be9fd; 1022 | } 1023 | .highlight .ld { 1024 | color: #f8f8f2; 1025 | } 1026 | .highlight .m { 1027 | color: #bd93f9; 1028 | } 1029 | .highlight .s { 1030 | color: #f1fa8c; 1031 | } 1032 | .highlight .na { 1033 | color: #50fa7b; 1034 | } 1035 | .highlight .nb { 1036 | color: #8be9fd; 1037 | font-style: italic; 1038 | } 1039 | .highlight .nc { 1040 | color: #ffb86c; 1041 | font-weight: bold; 1042 | } 1043 | .highlight .no { 1044 | color: #f8f8f2; 1045 | } 1046 | .highlight .nd { 1047 | color: #9d93ff; 1048 | } 1049 | .highlight .ni { 1050 | color: #f8f8f2; 1051 | } 1052 | .highlight .ne { 1053 | color: #d8d8d2; 1054 | font-style: italic; 1055 | } 1056 | .highlight .nf { 1057 | color: #ffb86c; 1058 | } 1059 | .highlight .nl { 1060 | color: #8be9fd; 1061 | font-style: italic; 1062 | } 1063 | .highlight .nn { 1064 | color: #f8f8f2; 1065 | } 1066 | .highlight .nx { 1067 | color: #f8f8f2; 1068 | } 1069 | .highlight .py { 1070 | color: #f8f8f2; 1071 | } 1072 | .highlight .nt { 1073 | color: #ff79c6; 1074 | } 1075 | .highlight .nv { 1076 | color: #8be9fd; 1077 | font-style: italic; 1078 | } 1079 | .highlight .ow { 1080 | color: #ff79c6; 1081 | } 1082 | .highlight .w { 1083 | color: #f8f8f2; 1084 | } 1085 | .highlight .mb { 1086 | color: #bd93f9; 1087 | } 1088 | .highlight .mf { 1089 | color: #bd93f9; 1090 | } 1091 | .highlight .mh { 1092 | color: #bd93f9; 1093 | } 1094 | .highlight .mi { 1095 | color: #bd93f9; 1096 | } 1097 | .highlight .mo { 1098 | color: #bd93f9; 1099 | } 1100 | .highlight .sa { 1101 | color: #f1fa8c; 1102 | } 1103 | .highlight .sb { 1104 | color: #f1fa8c; 1105 | } 1106 | .highlight .sc { 1107 | color: #f1fa8c; 1108 | } 1109 | .highlight .dl { 1110 | color: #f1fa8c; 1111 | } 1112 | .highlight .sd { 1113 | color: #f1fa8c; 1114 | } 1115 | .highlight .s2 { 1116 | color: #f1fa8c; 1117 | } 1118 | .highlight .se { 1119 | color: #f1fa8c; 1120 | } 1121 | .highlight .sh { 1122 | color: #f1fa8c; 1123 | } 1124 | .highlight .si { 1125 | color: #f1fa8c; 1126 | } 1127 | .highlight .sx { 1128 | color: #f1fa8c; 1129 | } 1130 | .highlight .sr { 1131 | color: #f1fa8c; 1132 | } 1133 | .highlight .s1 { 1134 | color: #f1fa8c; 1135 | } 1136 | .highlight .ss { 1137 | color: #f1fa8c; 1138 | } 1139 | .highlight .bp { 1140 | color: #50fa7b; 1141 | } 1142 | .highlight .fm { 1143 | color: #ffb86c; 1144 | font-style: italic; 1145 | } 1146 | .highlight .vc { 1147 | color: #8be9fd; 1148 | font-style: italic; 1149 | } 1150 | .highlight .vg { 1151 | color: #8be9fd; 1152 | font-style: italic; 1153 | } 1154 | .highlight .vi { 1155 | color: #8be9fd; 1156 | font-style: italic; 1157 | } 1158 | .highlight .vm { 1159 | color: #8be9fd; 1160 | font-style: italic; 1161 | } 1162 | .highlight .il { 1163 | color: #bd93f9; 1164 | } 1165 | 1166 | body.lights-off .highlight .clicked, body.lights-off .katex .clicked { 1167 | color: #00ffff !important; 1168 | text-shadow: 0 0 20px #00ffff; 1169 | } 1170 | 1171 | body.lights-off .highlight { 1172 | color: whitesmoke; 1173 | } 1174 | body.lights-off .highlight .hll { 1175 | background-color: #c3c3c3; 1176 | } 1177 | body.lights-off .highlight .c { 1178 | color: #838383; 1179 | } 1180 | body.lights-off .highlight .err { 1181 | color: whitesmoke; 1182 | } 1183 | body.lights-off .highlight .g { 1184 | color: whitesmoke; 1185 | } 1186 | body.lights-off .highlight .k { 1187 | color: #bcbcbc; 1188 | } 1189 | body.lights-off .highlight .l { 1190 | color: whitesmoke; 1191 | } 1192 | body.lights-off .highlight .n { 1193 | color: whitesmoke; 1194 | } 1195 | body.lights-off .highlight .o { 1196 | color: #a4a4a4; 1197 | } 1198 | body.lights-off .highlight .x { 1199 | color: whitesmoke; 1200 | } 1201 | body.lights-off .highlight .p { 1202 | color: #a5a5a5; 1203 | } 1204 | body.lights-off .highlight .ch { 1205 | color: #838383; 1206 | } 1207 | body.lights-off .highlight .cm { 1208 | color: #838383; 1209 | } 1210 | body.lights-off .highlight .cp { 1211 | color: #bcbcbc; 1212 | } 1213 | body.lights-off .highlight .cpf { 1214 | color: #838383; 1215 | } 1216 | body.lights-off .highlight .c1 { 1217 | color: #838383; 1218 | } 1219 | body.lights-off .highlight .cs { 1220 | color: #838383; 1221 | } 1222 | body.lights-off .highlight .gd { 1223 | color: #4a4a4a; 1224 | } 1225 | body.lights-off .highlight .ge { 1226 | color: whitesmoke; 1227 | } 1228 | body.lights-off .highlight .gr { 1229 | color: whitesmoke; 1230 | } 1231 | body.lights-off .highlight .gh { 1232 | color: whitesmoke; 1233 | font-weight: bold; 1234 | } 1235 | body.lights-off .highlight .gi { 1236 | color: whitesmoke; 1237 | } 1238 | body.lights-off .highlight .go { 1239 | color: #4f4f4f; 1240 | } 1241 | body.lights-off .highlight .gp { 1242 | color: whitesmoke; 1243 | } 1244 | body.lights-off .highlight .gs { 1245 | color: whitesmoke; 1246 | } 1247 | body.lights-off .highlight .gu { 1248 | color: whitesmoke; 1249 | } 1250 | body.lights-off .highlight .gt { 1251 | color: whitesmoke; 1252 | } 1253 | body.lights-off .highlight .kc { 1254 | color: #bcbcbc; 1255 | } 1256 | body.lights-off .highlight .kd { 1257 | color: #c4c4c4; 1258 | font-style: italic; 1259 | } 1260 | body.lights-off .highlight .kn { 1261 | color: #bcbcbc; 1262 | } 1263 | body.lights-off .highlight .kp { 1264 | color: #bcbcbc; 1265 | } 1266 | body.lights-off .highlight .kr { 1267 | color: #bcbcbc; 1268 | } 1269 | body.lights-off .highlight .kt { 1270 | color: #c4c4c4; 1271 | } 1272 | body.lights-off .highlight .ld { 1273 | color: whitesmoke; 1274 | } 1275 | body.lights-off .highlight .m { 1276 | color: #c6c6c6; 1277 | } 1278 | body.lights-off .highlight .s { 1279 | color: #c3c3c3; 1280 | } 1281 | body.lights-off .highlight .na { 1282 | color: #a5a5a5; 1283 | } 1284 | body.lights-off .highlight .nb { 1285 | color: #c4c4c4; 1286 | } 1287 | body.lights-off .highlight .nc { 1288 | color: #b6b6b6; 1289 | } 1290 | body.lights-off .highlight .no { 1291 | color: whitesmoke; 1292 | } 1293 | body.lights-off .highlight .nd { 1294 | color: #c9c9c9; 1295 | } 1296 | body.lights-off .highlight .ni { 1297 | color: whitesmoke; 1298 | } 1299 | body.lights-off .highlight .ne { 1300 | color: #d5d5d5; 1301 | } 1302 | body.lights-off .highlight .nf { 1303 | color: #b6b6b6; 1304 | } 1305 | body.lights-off .highlight .nl { 1306 | color: #c4c4c4; 1307 | } 1308 | body.lights-off .highlight .nn { 1309 | color: whitesmoke; 1310 | } 1311 | body.lights-off .highlight .nx { 1312 | color: whitesmoke; 1313 | } 1314 | body.lights-off .highlight .py { 1315 | color: whitesmoke; 1316 | } 1317 | body.lights-off .highlight .nt { 1318 | color: #bcbcbc; 1319 | } 1320 | body.lights-off .highlight .nv { 1321 | color: #c4c4c4; 1322 | } 1323 | body.lights-off .highlight .ow { 1324 | color: #bcbcbc; 1325 | } 1326 | body.lights-off .highlight .w { 1327 | color: whitesmoke; 1328 | } 1329 | body.lights-off .highlight .mb { 1330 | color: #c6c6c6; 1331 | } 1332 | body.lights-off .highlight .mf { 1333 | color: #c6c6c6; 1334 | } 1335 | body.lights-off .highlight .mh { 1336 | color: #c6c6c6; 1337 | } 1338 | body.lights-off .highlight .mi { 1339 | color: #c6c6c6; 1340 | } 1341 | body.lights-off .highlight .mo { 1342 | color: #c6c6c6; 1343 | } 1344 | body.lights-off .highlight .sa { 1345 | color: #c3c3c3; 1346 | } 1347 | body.lights-off .highlight .sb { 1348 | color: #c3c3c3; 1349 | } 1350 | body.lights-off .highlight .sc { 1351 | color: #c3c3c3; 1352 | } 1353 | body.lights-off .highlight .dl { 1354 | color: #c3c3c3; 1355 | } 1356 | body.lights-off .highlight .sd { 1357 | color: #c3c3c3; 1358 | } 1359 | body.lights-off .highlight .s2 { 1360 | color: #c3c3c3; 1361 | } 1362 | body.lights-off .highlight .se { 1363 | color: #c3c3c3; 1364 | } 1365 | body.lights-off .highlight .sh { 1366 | color: #c3c3c3; 1367 | } 1368 | body.lights-off .highlight .si { 1369 | color: #c3c3c3; 1370 | } 1371 | body.lights-off .highlight .sx { 1372 | color: #c3c3c3; 1373 | } 1374 | body.lights-off .highlight .sr { 1375 | color: #c3c3c3; 1376 | } 1377 | body.lights-off .highlight .s1 { 1378 | color: #c3c3c3; 1379 | } 1380 | body.lights-off .highlight .ss { 1381 | color: #c3c3c3; 1382 | } 1383 | body.lights-off .highlight .bp { 1384 | color: #a5a5a5; 1385 | } 1386 | body.lights-off .highlight .fm { 1387 | color: #b6b6b6; 1388 | } 1389 | body.lights-off .highlight .vc { 1390 | color: #c4c4c4; 1391 | } 1392 | body.lights-off .highlight .vg { 1393 | color: #c4c4c4; 1394 | } 1395 | body.lights-off .highlight .vi { 1396 | color: #c4c4c4; 1397 | } 1398 | body.lights-off .highlight .vm { 1399 | color: #c4c4c4; 1400 | } 1401 | body.lights-off .highlight .il { 1402 | color: #c6c6c6; 1403 | } 1404 | } 1405 | @media (prefers-color-scheme: light) { 1406 | body { 1407 | color: #777; 1408 | background: #ecf0f3; 1409 | } 1410 | 1411 | hr { 1412 | border-color: #555; 1413 | } 1414 | 1415 | a { 1416 | color: #666; 1417 | } 1418 | a:visited { 1419 | color: #777; 1420 | } 1421 | 1422 | #container { 1423 | background: #ecf0f3; 1424 | } 1425 | 1426 | div.section { 1427 | border-top: 1px solid #e2e2eb; 1428 | } 1429 | 1430 | #background { 1431 | background: #ebedef; 1432 | border-left: 1px solid #e2e2eb; 1433 | } 1434 | 1435 | div.footer { 1436 | background: #d5dbe0; 1437 | } 1438 | div.footer a { 1439 | display: inline-block; 1440 | margin: 5px; 1441 | } 1442 | 1443 | a.parent { 1444 | color: #000000; 1445 | } 1446 | a.parent:after { 1447 | color: #666666; 1448 | } 1449 | 1450 | div.section:hover { 1451 | background: #f8fafb; 1452 | } 1453 | div.section:hover div.code { 1454 | background: #f8fafb; 1455 | } 1456 | div.section div.docs .section-link a { 1457 | color: #454545; 1458 | } 1459 | div.section div.docs p tt, div.section div.docs li code, div.section div.docs ol code, div.section div.docs p code { 1460 | background: #ebedef; 1461 | color: #666; 1462 | } 1463 | div.section div.code { 1464 | background: #ebedef; 1465 | } 1466 | 1467 | div.doc-strings { 1468 | color: #555; 1469 | } 1470 | 1471 | .mjx-chtml { 1472 | color: #555; 1473 | } 1474 | 1475 | code { 1476 | background: #ebedef; 1477 | border: 1px solid #cacddc; 1478 | } 1479 | 1480 | #modal { 1481 | background-color: rgba(0, 0, 0, 0.9); 1482 | } 1483 | #modal > div > p { 1484 | color: #ccc; 1485 | } 1486 | #modal > span.close { 1487 | color: #f1f1f1; 1488 | } 1489 | #modal:hover, #modal:focus { 1490 | color: #bbb; 1491 | } 1492 | 1493 | .highlight { 1494 | color: #060606; 1495 | } 1496 | .highlight .hll { 1497 | background-color: #18c043; 1498 | } 1499 | .highlight .c { 1500 | color: #213c93; 1501 | } 1502 | .highlight .err { 1503 | color: #060606; 1504 | } 1505 | .highlight .g { 1506 | color: #060606; 1507 | } 1508 | .highlight .k { 1509 | color: #eb339d; 1510 | } 1511 | .highlight .l { 1512 | color: #060606; 1513 | } 1514 | .highlight .n { 1515 | color: #060606; 1516 | } 1517 | .highlight .o { 1518 | color: #ff4996; 1519 | } 1520 | .highlight .x { 1521 | color: #060606; 1522 | } 1523 | .highlight .p { 1524 | color: #c9c900; 1525 | } 1526 | .highlight .ch { 1527 | color: #213c93; 1528 | } 1529 | .highlight .cm { 1530 | color: #213c93; 1531 | } 1532 | .highlight .cp { 1533 | color: #eb339d; 1534 | } 1535 | .highlight .cpf { 1536 | color: #213c93; 1537 | } 1538 | .highlight .c1 { 1539 | color: #213c93; 1540 | } 1541 | .highlight .cs { 1542 | color: #213c93; 1543 | } 1544 | .highlight .gd { 1545 | color: #8b080b; 1546 | } 1547 | .highlight .ge { 1548 | color: #060606; 1549 | text-decoration: underline; 1550 | } 1551 | .highlight .gr { 1552 | color: #060606; 1553 | } 1554 | .highlight .gh { 1555 | color: #060606; 1556 | font-weight: bold; 1557 | } 1558 | .highlight .gi { 1559 | color: #060606; 1560 | font-weight: bold; 1561 | } 1562 | .highlight .go { 1563 | color: #44475a; 1564 | } 1565 | .highlight .gp { 1566 | color: #060606; 1567 | } 1568 | .highlight .gs { 1569 | color: #060606; 1570 | } 1571 | .highlight .gu { 1572 | color: #060606; 1573 | font-weight: bold; 1574 | } 1575 | .highlight .gt { 1576 | color: #060606; 1577 | } 1578 | .highlight .kc { 1579 | color: #eb339d; 1580 | } 1581 | .highlight .kd { 1582 | color: #00cbf6; 1583 | font-style: italic; 1584 | } 1585 | .highlight .kn { 1586 | color: #eb339d; 1587 | } 1588 | .highlight .kp { 1589 | color: #eb339d; 1590 | } 1591 | .highlight .kr { 1592 | color: #eb339d; 1593 | } 1594 | .highlight .kt { 1595 | color: #00cbf6; 1596 | } 1597 | .highlight .ld { 1598 | color: #060606; 1599 | } 1600 | .highlight .m { 1601 | color: #733fbe; 1602 | } 1603 | .highlight .s { 1604 | color: #18c043; 1605 | } 1606 | .highlight .na { 1607 | color: #25da53; 1608 | } 1609 | .highlight .nb { 1610 | color: #00cbf6; 1611 | font-style: italic; 1612 | } 1613 | .highlight .nc { 1614 | color: #ff8707; 1615 | font-weight: bold; 1616 | } 1617 | .highlight .no { 1618 | color: #060606; 1619 | } 1620 | .highlight .nd { 1621 | color: #332b80; 1622 | } 1623 | .highlight .ni { 1624 | color: #060606; 1625 | } 1626 | .highlight .ne { 1627 | color: #b7b702; 1628 | font-style: italic; 1629 | } 1630 | .highlight .nf { 1631 | color: #ff8707; 1632 | } 1633 | .highlight .nl { 1634 | color: #00cbf6; 1635 | font-style: italic; 1636 | } 1637 | .highlight .nn { 1638 | color: #060606; 1639 | } 1640 | .highlight .nx { 1641 | color: #060606; 1642 | } 1643 | .highlight .py { 1644 | color: #060606; 1645 | } 1646 | .highlight .nt { 1647 | color: #eb339d; 1648 | } 1649 | .highlight .nv { 1650 | color: #00cbf6; 1651 | font-style: italic; 1652 | } 1653 | .highlight .ow { 1654 | color: #eb339d; 1655 | } 1656 | .highlight .w { 1657 | color: #060606; 1658 | } 1659 | .highlight .mb { 1660 | color: #733fbe; 1661 | } 1662 | .highlight .mf { 1663 | color: #733fbe; 1664 | } 1665 | .highlight .mh { 1666 | color: #733fbe; 1667 | } 1668 | .highlight .mi { 1669 | color: #733fbe; 1670 | } 1671 | .highlight .mo { 1672 | color: #733fbe; 1673 | } 1674 | .highlight .sa { 1675 | color: #18c043; 1676 | } 1677 | .highlight .sb { 1678 | color: #18c043; 1679 | } 1680 | .highlight .sc { 1681 | color: #18c043; 1682 | } 1683 | .highlight .dl { 1684 | color: #18c043; 1685 | } 1686 | .highlight .sd { 1687 | color: #18c043; 1688 | } 1689 | .highlight .s2 { 1690 | color: #18c043; 1691 | } 1692 | .highlight .se { 1693 | color: #18c043; 1694 | } 1695 | .highlight .sh { 1696 | color: #18c043; 1697 | } 1698 | .highlight .si { 1699 | color: #18c043; 1700 | } 1701 | .highlight .sx { 1702 | color: #18c043; 1703 | } 1704 | .highlight .sr { 1705 | color: #18c043; 1706 | } 1707 | .highlight .s1 { 1708 | color: #18c043; 1709 | } 1710 | .highlight .ss { 1711 | color: #18c043; 1712 | } 1713 | .highlight .bp { 1714 | color: #25da53; 1715 | } 1716 | .highlight .fm { 1717 | color: #ff8707; 1718 | font-style: italic; 1719 | } 1720 | .highlight .vc { 1721 | color: #00cbf6; 1722 | font-style: italic; 1723 | } 1724 | .highlight .vg { 1725 | color: #00cbf6; 1726 | font-style: italic; 1727 | } 1728 | .highlight .vi { 1729 | color: #00cbf6; 1730 | font-style: italic; 1731 | } 1732 | .highlight .vm { 1733 | color: #00cbf6; 1734 | font-style: italic; 1735 | } 1736 | .highlight .il { 1737 | color: #733fbe; 1738 | } 1739 | 1740 | body.lights-off .highlight .clicked, body.lights-off .katex .clicked { 1741 | color: #00ccff !important; 1742 | text-shadow: 0 0 20px #00ccff; 1743 | } 1744 | 1745 | body.lights-off .highlight { 1746 | color: #060606; 1747 | } 1748 | body.lights-off .highlight .hll { 1749 | background-color: #6c6c6c; 1750 | } 1751 | body.lights-off .highlight .c { 1752 | color: #5a5a5a; 1753 | } 1754 | body.lights-off .highlight .err { 1755 | color: #060606; 1756 | } 1757 | body.lights-off .highlight .g { 1758 | color: #060606; 1759 | } 1760 | body.lights-off .highlight .k { 1761 | color: #8f8f8f; 1762 | } 1763 | body.lights-off .highlight .l { 1764 | color: #060606; 1765 | } 1766 | body.lights-off .highlight .n { 1767 | color: #060606; 1768 | } 1769 | body.lights-off .highlight .o { 1770 | color: #a4a4a4; 1771 | } 1772 | body.lights-off .highlight .x { 1773 | color: #060606; 1774 | } 1775 | body.lights-off .highlight .p { 1776 | color: #656565; 1777 | } 1778 | body.lights-off .highlight .ch { 1779 | color: #5a5a5a; 1780 | } 1781 | body.lights-off .highlight .cm { 1782 | color: #5a5a5a; 1783 | } 1784 | body.lights-off .highlight .cp { 1785 | color: #8f8f8f; 1786 | } 1787 | body.lights-off .highlight .cpf { 1788 | color: #5a5a5a; 1789 | } 1790 | body.lights-off .highlight .c1 { 1791 | color: #5a5a5a; 1792 | } 1793 | body.lights-off .highlight .cs { 1794 | color: #5a5a5a; 1795 | } 1796 | body.lights-off .highlight .gd { 1797 | color: #4a4a4a; 1798 | } 1799 | body.lights-off .highlight .ge { 1800 | color: #060606; 1801 | } 1802 | body.lights-off .highlight .gr { 1803 | color: #060606; 1804 | } 1805 | body.lights-off .highlight .gh { 1806 | color: #060606; 1807 | font-weight: bold; 1808 | } 1809 | body.lights-off .highlight .gi { 1810 | color: #060606; 1811 | } 1812 | body.lights-off .highlight .go { 1813 | color: #4f4f4f; 1814 | } 1815 | body.lights-off .highlight .gp { 1816 | color: #060606; 1817 | } 1818 | body.lights-off .highlight .gs { 1819 | color: #060606; 1820 | } 1821 | body.lights-off .highlight .gu { 1822 | color: #060606; 1823 | } 1824 | body.lights-off .highlight .gt { 1825 | color: #060606; 1826 | } 1827 | body.lights-off .highlight .kc { 1828 | color: #8f8f8f; 1829 | } 1830 | body.lights-off .highlight .kd { 1831 | color: #7b7b7b; 1832 | font-style: italic; 1833 | } 1834 | body.lights-off .highlight .kn { 1835 | color: #8f8f8f; 1836 | } 1837 | body.lights-off .highlight .kp { 1838 | color: #8f8f8f; 1839 | } 1840 | body.lights-off .highlight .kr { 1841 | color: #8f8f8f; 1842 | } 1843 | body.lights-off .highlight .kt { 1844 | color: #7b7b7b; 1845 | } 1846 | body.lights-off .highlight .ld { 1847 | color: #060606; 1848 | } 1849 | body.lights-off .highlight .m { 1850 | color: #7f7f7f; 1851 | } 1852 | body.lights-off .highlight .s { 1853 | color: #6c6c6c; 1854 | } 1855 | body.lights-off .highlight .na { 1856 | color: gray; 1857 | } 1858 | body.lights-off .highlight .nb { 1859 | color: #7b7b7b; 1860 | } 1861 | body.lights-off .highlight .nc { 1862 | color: #838383; 1863 | } 1864 | body.lights-off .highlight .no { 1865 | color: #060606; 1866 | } 1867 | body.lights-off .highlight .nd { 1868 | color: #565656; 1869 | } 1870 | body.lights-off .highlight .ni { 1871 | color: #060606; 1872 | } 1873 | body.lights-off .highlight .ne { 1874 | color: #5d5d5d; 1875 | } 1876 | body.lights-off .highlight .nf { 1877 | color: #838383; 1878 | } 1879 | body.lights-off .highlight .nl { 1880 | color: #7b7b7b; 1881 | } 1882 | body.lights-off .highlight .nn { 1883 | color: #060606; 1884 | } 1885 | body.lights-off .highlight .nx { 1886 | color: #060606; 1887 | } 1888 | body.lights-off .highlight .py { 1889 | color: #060606; 1890 | } 1891 | body.lights-off .highlight .nt { 1892 | color: #8f8f8f; 1893 | } 1894 | body.lights-off .highlight .nv { 1895 | color: #7b7b7b; 1896 | } 1897 | body.lights-off .highlight .ow { 1898 | color: #8f8f8f; 1899 | } 1900 | body.lights-off .highlight .w { 1901 | color: #060606; 1902 | } 1903 | body.lights-off .highlight .mb { 1904 | color: #7f7f7f; 1905 | } 1906 | body.lights-off .highlight .mf { 1907 | color: #7f7f7f; 1908 | } 1909 | body.lights-off .highlight .mh { 1910 | color: #7f7f7f; 1911 | } 1912 | body.lights-off .highlight .mi { 1913 | color: #7f7f7f; 1914 | } 1915 | body.lights-off .highlight .mo { 1916 | color: #7f7f7f; 1917 | } 1918 | body.lights-off .highlight .sa { 1919 | color: #6c6c6c; 1920 | } 1921 | body.lights-off .highlight .sb { 1922 | color: #6c6c6c; 1923 | } 1924 | body.lights-off .highlight .sc { 1925 | color: #6c6c6c; 1926 | } 1927 | body.lights-off .highlight .dl { 1928 | color: #6c6c6c; 1929 | } 1930 | body.lights-off .highlight .sd { 1931 | color: #6c6c6c; 1932 | } 1933 | body.lights-off .highlight .s2 { 1934 | color: #6c6c6c; 1935 | } 1936 | body.lights-off .highlight .se { 1937 | color: #6c6c6c; 1938 | } 1939 | body.lights-off .highlight .sh { 1940 | color: #6c6c6c; 1941 | } 1942 | body.lights-off .highlight .si { 1943 | color: #6c6c6c; 1944 | } 1945 | body.lights-off .highlight .sx { 1946 | color: #6c6c6c; 1947 | } 1948 | body.lights-off .highlight .sr { 1949 | color: #6c6c6c; 1950 | } 1951 | body.lights-off .highlight .s1 { 1952 | color: #6c6c6c; 1953 | } 1954 | body.lights-off .highlight .ss { 1955 | color: #6c6c6c; 1956 | } 1957 | body.lights-off .highlight .bp { 1958 | color: gray; 1959 | } 1960 | body.lights-off .highlight .fm { 1961 | color: #838383; 1962 | } 1963 | body.lights-off .highlight .vc { 1964 | color: #7b7b7b; 1965 | } 1966 | body.lights-off .highlight .vg { 1967 | color: #7b7b7b; 1968 | } 1969 | body.lights-off .highlight .vi { 1970 | color: #7b7b7b; 1971 | } 1972 | body.lights-off .highlight .vm { 1973 | color: #7b7b7b; 1974 | } 1975 | body.lights-off .highlight .il { 1976 | color: #7f7f7f; 1977 | } 1978 | } 1979 | 1980 | /*# sourceMappingURL=pylit.css.map */ 1981 | -------------------------------------------------------------------------------- /ditk/doc/annotated/solarized.css: -------------------------------------------------------------------------------- 1 | /* 2 | Solarized theme for code-mirror 3 | http://ethanschoonover.com/solarized 4 | */ 5 | 6 | /* 7 | Solarized color palette 8 | http://ethanschoonover.com/solarized/img/solarized-palette.png 9 | */ 10 | 11 | .solarized.base03 { color: #282a36; } 12 | .solarized.base02 { color: #073642; } 13 | .solarized.base01 { color: #586e75; } 14 | .solarized.base00 { color: #657b83; } 15 | .solarized.base0 { color: #839496; } 16 | .solarized.base1 { color: #93a1a1; } 17 | .solarized.base2 { color: #eee8d5; } 18 | .solarized.base3 { color: #fdf6e3; } 19 | .solarized.solar-yellow { color: #b58900; } 20 | .solarized.solar-orange { color: #cb4b16; } 21 | .solarized.solar-red { color: #dc322f; } 22 | .solarized.solar-magenta { color: #d33682; } 23 | .solarized.solar-violet { color: #6c71c4; } 24 | .solarized.solar-blue { color: #268bd2; } 25 | .solarized.solar-cyan { color: #2aa198; } 26 | .solarized.solar-green { color: #859900; } 27 | 28 | /* Color scheme for code-mirror */ 29 | 30 | .cm-s-solarized { 31 | line-height: 1.45em; 32 | color-profile: sRGB; 33 | rendering-intent: auto; 34 | } 35 | .cm-s-solarized.cm-s-dark { 36 | color: #839496; 37 | background-color: #282a36; /* #002b36 */; 38 | height: auto; 39 | } 40 | .cm-s-solarized.cm-s-light { 41 | background-color: #fdf6e3; 42 | color: #657b83; 43 | } 44 | 45 | .cm-s-solarized .CodeMirror-widget { 46 | text-shadow: none; 47 | } 48 | 49 | .cm-s-solarized .cm-header { color: #586e75; } 50 | .cm-s-solarized .cm-quote { color: #93a1a1; } 51 | 52 | .cm-s-solarized .cm-keyword { color: #cb4b16; } 53 | .cm-s-solarized .cm-atom { color: #d33682; } 54 | .cm-s-solarized .cm-number { color: #d33682; } 55 | .cm-s-solarized .cm-def { color: #2aa198; } 56 | 57 | .cm-s-solarized .cm-variable { color: #839496; } 58 | .cm-s-solarized .cm-variable-2 { color: #b58900; } 59 | .cm-s-solarized .cm-variable-3, .cm-s-solarized .cm-type { color: #6c71c4; } 60 | 61 | .cm-s-solarized .cm-property { color: #2aa198; } 62 | .cm-s-solarized .cm-operator { color: #6c71c4; } 63 | 64 | .cm-s-solarized .cm-comment { color: #586e75; font-style:italic; } 65 | 66 | .cm-s-solarized .cm-string { color: #859900; } 67 | .cm-s-solarized .cm-string-2 { color: #b58900; } 68 | 69 | .cm-s-solarized .cm-meta { color: #859900; } 70 | .cm-s-solarized .cm-qualifier { color: #b58900; } 71 | .cm-s-solarized .cm-builtin { color: #d33682; } 72 | .cm-s-solarized .cm-bracket { color: #cb4b16; } 73 | .cm-s-solarized .CodeMirror-matchingbracket { color: #859900; } 74 | .cm-s-solarized .CodeMirror-nonmatchingbracket { color: #dc322f; } 75 | .cm-s-solarized .cm-tag { color: #93a1a1; } 76 | .cm-s-solarized .cm-attribute { color: #2aa198; } 77 | .cm-s-solarized .cm-hr { 78 | color: transparent; 79 | border-top: 1px solid #586e75; 80 | display: block; 81 | } 82 | .cm-s-solarized .cm-link { color: #93a1a1; cursor: pointer; } 83 | .cm-s-solarized .cm-special { color: #6c71c4; } 84 | .cm-s-solarized .cm-em { 85 | color: #999; 86 | text-decoration: underline; 87 | text-decoration-style: dotted; 88 | } 89 | .cm-s-solarized .cm-error, 90 | .cm-s-solarized .cm-invalidchar { 91 | color: #586e75; 92 | border-bottom: 1px dotted #dc322f; 93 | } 94 | 95 | .cm-s-solarized.cm-s-dark div.CodeMirror-selected { background: #073642; } 96 | .cm-s-solarized.cm-s-dark.CodeMirror ::selection { background: #282a36; } /* rgba(7, 54, 66, 0.99); } */ 97 | .cm-s-solarized.cm-s-dark .CodeMirror-line::-moz-selection, .cm-s-dark .CodeMirror-line > span::-moz-selection, .cm-s-dark .CodeMirror-line > span > span::-moz-selection { background: rgba(7, 54, 66, 0.99); } 98 | 99 | .cm-s-solarized.cm-s-light div.CodeMirror-selected { background: #eee8d5; } 100 | .cm-s-solarized.cm-s-light .CodeMirror-line::selection, .cm-s-light .CodeMirror-line > span::selection, .cm-s-light .CodeMirror-line > span > span::selection { background: #eee8d5; } 101 | .cm-s-solarized.cm-s-light .CodeMirror-line::-moz-selection, .cm-s-light .CodeMirror-line > span::-moz-selection, .cm-s-light .CodeMirror-line > span > span::-moz-selection { background: #eee8d5; } 102 | 103 | /* Editor styling */ 104 | 105 | 106 | 107 | /* Little shadow on the view-port of the buffer view */ 108 | .cm-s-solarized.CodeMirror { 109 | -moz-box-shadow: inset 7px 0 12px -6px #000; 110 | -webkit-box-shadow: inset 7px 0 12px -6px #000; 111 | box-shadow: inset 7px 0 12px -6px #000; 112 | } 113 | 114 | /* Remove gutter border */ 115 | .cm-s-solarized .CodeMirror-gutters { 116 | border-right: 0; 117 | } 118 | 119 | /* Gutter colors and line number styling based of color scheme (dark / light) */ 120 | 121 | /* Dark */ 122 | .cm-s-solarized.cm-s-dark .CodeMirror-gutters { 123 | background-color: #282a36; /* #073642; */ 124 | } 125 | 126 | .cm-s-solarized.cm-s-dark .CodeMirror-linenumber { 127 | color: #586e75; 128 | } 129 | 130 | /* Light */ 131 | .cm-s-solarized.cm-s-light .CodeMirror-gutters { 132 | background-color: #eee8d5; 133 | } 134 | 135 | .cm-s-solarized.cm-s-light .CodeMirror-linenumber { 136 | color: #839496; 137 | } 138 | 139 | /* Common */ 140 | .cm-s-solarized .CodeMirror-linenumber { 141 | padding: 0 5px; 142 | } 143 | .cm-s-solarized .CodeMirror-guttermarker-subtle { color: #586e75; } 144 | .cm-s-solarized.cm-s-dark .CodeMirror-guttermarker { color: #ddd; } 145 | .cm-s-solarized.cm-s-light .CodeMirror-guttermarker { color: #cb4b16; } 146 | 147 | .cm-s-solarized .CodeMirror-gutter .CodeMirror-gutter-text { 148 | color: #586e75; 149 | } 150 | 151 | /* Cursor */ 152 | .cm-s-solarized .CodeMirror-cursor { border-left: 1px solid #819090; } 153 | 154 | /* Fat cursor */ 155 | .cm-s-solarized.cm-s-light.cm-fat-cursor .CodeMirror-cursor { background: #77ee77; } 156 | .cm-s-solarized.cm-s-light .cm-animate-fat-cursor { background-color: #77ee77; } 157 | .cm-s-solarized.cm-s-dark.cm-fat-cursor .CodeMirror-cursor { background: #586e75; } 158 | .cm-s-solarized.cm-s-dark .cm-animate-fat-cursor { background-color: #586e75; } 159 | 160 | /* Active line */ 161 | .cm-s-solarized.cm-s-dark .CodeMirror-activeline-background { 162 | background: rgba(255, 255, 255, 0.06); 163 | } 164 | .cm-s-solarized.cm-s-light .CodeMirror-activeline-background { 165 | background: rgba(0, 0, 0, 0.06); 166 | } 167 | -------------------------------------------------------------------------------- /ditk/logging/__init__.py: -------------------------------------------------------------------------------- 1 | from .explicit import * 2 | from .file import LoggingFileHandler 3 | from .func import * 4 | from .inherit import * 5 | from .log import getLogger, try_init_root 6 | from .terminal import LoggingTerminalHandler 7 | -------------------------------------------------------------------------------- /ditk/logging/base.py: -------------------------------------------------------------------------------- 1 | from typing import Union 2 | 3 | _LogLevelType = Union[int, str] 4 | -------------------------------------------------------------------------------- /ditk/logging/explicit.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | __all__ = [ 4 | 'CRITICAL', 5 | 'FATAL', 6 | 'ERROR', 7 | 'WARNING', 8 | 'WARN', 9 | 'INFO', 10 | 'DEBUG', 11 | 'NOTSET', 12 | 'Logger', 13 | 'Handler', 14 | "FileHandler", 15 | 'StreamHandler', 16 | 'NullHandler', 17 | 'getLogger', 18 | ] 19 | 20 | CRITICAL = logging.CRITICAL 21 | FATAL = logging.FATAL 22 | ERROR = logging.ERROR 23 | WARNING = logging.WARNING 24 | WARN = logging.WARN 25 | INFO = logging.INFO 26 | DEBUG = logging.DEBUG 27 | NOTSET = logging.NOTSET 28 | 29 | Logger = logging.Logger 30 | Handler = logging.Handler 31 | FileHandler = logging.FileHandler 32 | StreamHandler = logging.StreamHandler 33 | NullHandler = logging.NullHandler 34 | 35 | getLogger = logging.getLogger 36 | -------------------------------------------------------------------------------- /ditk/logging/file.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import os 3 | import pathlib 4 | from typing import Optional 5 | 6 | from .base import _LogLevelType 7 | 8 | 9 | def _normpath(path: str) -> str: 10 | return os.path.normcase(os.path.normpath(os.path.abspath(path))) 11 | 12 | 13 | class LoggingFileHandler(logging.FileHandler): 14 | 15 | def __init__( 16 | self, 17 | filename: str, 18 | mode: str = 'a', 19 | encoding: Optional[str] = None, 20 | delay: bool = False, 21 | **kwargs 22 | ) -> None: 23 | logging.FileHandler.__init__(self, filename, mode, encoding, delay, **kwargs) 24 | self.__file_path = _normpath(filename) 25 | 26 | @property 27 | def file_path(self) -> str: 28 | """ 29 | Unique path of the file. 30 | """ 31 | return self.__file_path 32 | 33 | 34 | _FILE_FMT = logging.Formatter( 35 | fmt='[%(asctime)s][%(filename)s:%(lineno)d][%(levelname)s] %(message)s', 36 | datefmt="%Y-%m-%d %H:%M:%S", 37 | ) 38 | 39 | 40 | def _create_local_file(filename: str) -> str: 41 | filepath, name = os.path.split(os.path.abspath(filename)) 42 | os.makedirs(filepath, exist_ok=True) 43 | pathlib.Path(filename).touch() 44 | return filename 45 | 46 | 47 | def _create_file_handler(path: str, mode: str = 'a', level: _LogLevelType = logging.NOTSET) -> LoggingFileHandler: 48 | logger_file_path = _create_local_file(path) 49 | handler = LoggingFileHandler(logger_file_path, mode) 50 | handler.setFormatter(_FILE_FMT) 51 | handler.setLevel(level) 52 | return handler 53 | -------------------------------------------------------------------------------- /ditk/logging/func.py: -------------------------------------------------------------------------------- 1 | from typing import Mapping, Any 2 | 3 | from hbutils.testing import vpython 4 | 5 | from .log import try_init_root as _root 6 | 7 | __all__ = [ 8 | 'critical', 9 | 'fatal', 10 | 'error', 11 | 'exception', 12 | 'warning', 13 | 'warn', 14 | 'info', 15 | 'debug', 16 | 'log', 17 | ] 18 | 19 | _has_stacklevel = vpython >= '3.8' 20 | 21 | 22 | def _inc_stacklevel(kwargs: Mapping): 23 | """ 24 | Increase the value of argument ``stacklevel``. 25 | The default value should be 1, it will be increased by 1 after this function is called. 26 | """ 27 | if _has_stacklevel: 28 | retval = dict(kwargs) 29 | retval.setdefault('stacklevel', 1) 30 | retval['stacklevel'] += 1 31 | return retval 32 | else: 33 | return kwargs 34 | 35 | 36 | def _pkwargs(kwargs: Mapping[str, Any]) -> Mapping[str, Any]: 37 | """ 38 | Everytime the log functions (such as :func:`debug`, :func:`info`) is wrapped, \ 39 | this function should be used before passing the ``kwargs`` argument to the next level. 40 | """ 41 | return _inc_stacklevel(kwargs) 42 | 43 | 44 | def critical(msg, *args, **kwargs): 45 | """ 46 | Log a message with severity 'CRITICAL' on the root logger. If the logger 47 | has no handlers, call basicConfig() to add a console handler with a 48 | pre-defined format. 49 | """ 50 | _root().critical(msg, *args, **_pkwargs(kwargs)) 51 | 52 | 53 | fatal = critical 54 | 55 | 56 | def error(msg, *args, **kwargs): 57 | """ 58 | Log a message with severity 'ERROR' on the root logger. If the logger has 59 | no handlers, call basicConfig() to add a console handler with a pre-defined 60 | format. 61 | """ 62 | _root().error(msg, *args, **_pkwargs(kwargs)) 63 | 64 | 65 | def exception(msg, *args, exc_info=True, **kwargs): 66 | """ 67 | Log a message with severity 'ERROR' on the root logger, with exception 68 | information. If the logger has no handlers, basicConfig() is called to add 69 | a console handler with a pre-defined format. 70 | """ 71 | error(msg, *args, exc_info=exc_info, **_pkwargs(kwargs)) 72 | 73 | 74 | def warning(msg, *args, **kwargs): 75 | """ 76 | Log a message with severity 'WARNING' on the root logger. If the logger has 77 | no handlers, call basicConfig() to add a console handler with a pre-defined 78 | format. 79 | """ 80 | _root().warning(msg, *args, **_pkwargs(kwargs)) 81 | 82 | 83 | def warn(msg, *args, **kwargs): 84 | warning(msg, *args, **_pkwargs(kwargs)) 85 | 86 | 87 | def info(msg, *args, **kwargs): 88 | """ 89 | Log a message with severity 'INFO' on the root logger. If the logger has 90 | no handlers, call basicConfig() to add a console handler with a pre-defined 91 | format. 92 | """ 93 | _root().info(msg, *args, **_pkwargs(kwargs)) 94 | 95 | 96 | def debug(msg, *args, **kwargs): 97 | """ 98 | Log a message with severity 'DEBUG' on the root logger. If the logger has 99 | no handlers, call basicConfig() to add a console handler with a pre-defined 100 | format. 101 | """ 102 | _root().debug(msg, *args, **_pkwargs(kwargs)) 103 | 104 | 105 | def log(level, msg, *args, **kwargs): 106 | """ 107 | Log 'msg % args' with the integer severity 'level' on the root logger. If 108 | the logger has no handlers, call basicConfig() to add a console handler 109 | with a pre-defined format. 110 | """ 111 | _root().log(level, msg, *args, **_pkwargs(kwargs)) 112 | -------------------------------------------------------------------------------- /ditk/logging/inherit.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | from .explicit import __all__ as _explicit_all 4 | from .func import __all__ as _func_all 5 | from .log import __all__ as _log_all 6 | 7 | _exist_all_set = set(_func_all) | set(_explicit_all) | set(_log_all) 8 | __all__ = [name for name in logging.__all__ if name not in _exist_all_set] 9 | 10 | for _name in __all__: 11 | globals()[_name] = getattr(logging, _name) 12 | -------------------------------------------------------------------------------- /ditk/logging/log.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from typing import List, Optional, Tuple 3 | 4 | from .base import _LogLevelType 5 | from .file import _create_file_handler, LoggingFileHandler, _normpath 6 | from .terminal import LoggingTerminalHandler 7 | 8 | __all__ = [ 9 | 'try_init_root', 10 | 'getLogger', 11 | ] 12 | 13 | 14 | def try_init_root(level: Optional[_LogLevelType] = None) -> logging.Logger: 15 | root = logging.getLogger() 16 | if not root.handlers: 17 | root.addHandler(LoggingTerminalHandler()) 18 | if level is not None: 19 | root.setLevel(level) 20 | return root 21 | 22 | 23 | # noinspection PyPep8Naming 24 | def getLogger( 25 | name: Optional[str] = None, 26 | level: Optional[_LogLevelType] = None, 27 | with_files: Optional[List[str]] = None 28 | ) -> logging.Logger: 29 | """ 30 | Overview: 31 | Get :class:`logging.Logger` object, with terminal output and file output. 32 | 33 | :param name: Name of logger. 34 | :param level: Logging level of the loggers. 35 | :param with_files: The files going to output. 36 | :return logger: Logger created. 37 | """ 38 | 39 | logger = logging.getLogger(name) 40 | if level is not None: 41 | logger.setLevel(level) 42 | to_be_logged = [] 43 | 44 | with_files = with_files or [] 45 | if with_files: 46 | file_handlers: List[Tuple[LoggingFileHandler, logging.Logger]] = [] 47 | _current_logger = logger 48 | while _current_logger: 49 | for handler in _current_logger.handlers: 50 | if isinstance(handler, LoggingFileHandler): 51 | file_handlers.append((handler, _current_logger)) 52 | 53 | _current_logger = _current_logger.parent if _current_logger.propagate else None 54 | 55 | fps = {handler.file_path: _logger for handler, _logger in file_handlers} 56 | for file in with_files: 57 | nfile = _normpath(file) 58 | if nfile in fps: 59 | to_be_logged.append( 60 | ( 61 | logging.WARNING, f"File {repr(file)} has already been added to logger {repr(fps[nfile])}, " 62 | f"so this configuration will be ignored." 63 | ) 64 | ) 65 | else: 66 | handler = _create_file_handler(file) 67 | logger.addHandler(handler) 68 | fps[nfile] = logger 69 | 70 | for level_, msg in to_be_logged: 71 | logger.log(level_, msg) 72 | 73 | return logger 74 | -------------------------------------------------------------------------------- /ditk/logging/rich.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import shutil 3 | from functools import lru_cache 4 | 5 | from rich.console import Console 6 | from rich.logging import RichHandler 7 | 8 | import ditk 9 | from .base import _LogLevelType 10 | 11 | # This value is set due the requirement of displaying the tables 12 | _DEFAULT_WIDTH = 170 13 | 14 | 15 | @lru_cache() 16 | def _get_terminal_width() -> int: 17 | width, _ = shutil.get_terminal_size(fallback=(_DEFAULT_WIDTH, 24)) 18 | return width 19 | 20 | 21 | @lru_cache() 22 | def _get_rich_console(use_stdout: bool = False) -> Console: 23 | return Console(width=_get_terminal_width(), stderr=not use_stdout) 24 | 25 | 26 | _RICH_FMT = logging.Formatter(fmt="%(message)s", datefmt="[%m-%d %H:%M:%S]") 27 | 28 | 29 | def _create_rich_handler(use_stdout: bool = False, level: _LogLevelType = logging.NOTSET) -> RichHandler: 30 | handler = RichHandler( 31 | level=level, 32 | console=_get_rich_console(use_stdout), 33 | rich_tracebacks=True, 34 | markup=True, 35 | tracebacks_suppress=[ditk], 36 | ) 37 | handler.setFormatter(_RICH_FMT) 38 | return handler 39 | -------------------------------------------------------------------------------- /ditk/logging/stream.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import sys 3 | from logging import StreamHandler, LogRecord 4 | 5 | from rich.markup import render 6 | 7 | from .base import _LogLevelType 8 | 9 | _STREAM_FMT = logging.Formatter( 10 | fmt='[%(asctime)s][%(filename)s:%(lineno)d][%(levelname)s] %(message)s', 11 | datefmt="%m-%d %H:%M:%S", 12 | ) 13 | 14 | 15 | def _strip_rich_markup(text: str) -> str: 16 | return render(text).plain 17 | 18 | 19 | class NoRichStreamHandler(StreamHandler): 20 | 21 | def emit(self, record: LogRecord) -> None: 22 | if isinstance(record.msg, str): 23 | record.msg = _strip_rich_markup(record.msg) 24 | super().emit(record) 25 | 26 | 27 | def _create_stream_handler(use_stdout: bool = False, level: _LogLevelType = logging.NOTSET) -> StreamHandler: 28 | handler = NoRichStreamHandler(sys.stdout if use_stdout else sys.stderr) 29 | handler.setFormatter(_STREAM_FMT) 30 | handler.setLevel(level) 31 | return handler 32 | -------------------------------------------------------------------------------- /ditk/logging/terminal.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import os 3 | from logging import LogRecord 4 | 5 | import rich.errors 6 | 7 | from .base import _LogLevelType 8 | from .rich import _create_rich_handler 9 | from .stream import _create_stream_handler 10 | 11 | 12 | def _use_rich() -> bool: 13 | return not os.environ.get('DISABLE_RICH', '').strip() 14 | 15 | 16 | class LoggingTerminalHandler(logging.Handler): 17 | """ 18 | Overview: 19 | A handler customized in ``ditk``. 20 | 21 | When ``DISABLE_RICH`` environment variable is set to non-empty, log will be printed to \ 22 | ordinary ``StreamHandler``, otherwise ``rich.logging.RichHandler`` will be used. 23 | """ 24 | 25 | def __init__(self, use_stdout: bool = False, level: _LogLevelType = logging.NOTSET): 26 | """ 27 | Constructor of :class:`TerminalHandler`. 28 | 29 | :param use_stdout: Use ``sys.stdout`` instead of ``sys.stderr``. 30 | :param level: Log level. 31 | """ 32 | logging.Handler.__init__(self, level) 33 | self.use_stdout = not not use_stdout 34 | 35 | def _get_current_handler(self) -> logging.Handler: 36 | if _use_rich(): 37 | return _create_rich_handler(self.use_stdout, self.level) 38 | else: 39 | return _create_stream_handler(self.use_stdout, self.level) 40 | 41 | def emit(self, record: LogRecord) -> None: 42 | """ 43 | Emit the log record to handler. 44 | 45 | If ``DISABLE_RICH`` environment variable is set to non-empty, this method is equal to \ 46 | :meth:`logging.StreamHandler.emit`, otherwise equals to :meth:`rich.logging.RichHandler.emit`. 47 | """ 48 | try: 49 | return self._get_current_handler().emit(record) 50 | except rich.errors.ConsoleError: 51 | if _use_rich(): 52 | stream_handler = _create_stream_handler(self.use_stdout, self.level) 53 | return stream_handler.emit(record) 54 | else: 55 | raise 56 | -------------------------------------------------------------------------------- /ditk/tensorboard/__init__.py: -------------------------------------------------------------------------------- 1 | from .log import tb_extract_log, tb_has_log, tb_extract_recursive_logs 2 | from .plots import * 3 | -------------------------------------------------------------------------------- /ditk/tensorboard/log.py: -------------------------------------------------------------------------------- 1 | import os 2 | from typing import Dict 3 | 4 | import pandas as pd 5 | from tensorboard.backend.event_processing.event_accumulator import EventAccumulator, SCALARS 6 | from tqdm.auto import tqdm 7 | 8 | 9 | def _init_acc_from_dir(log_dir: str) -> EventAccumulator: 10 | event_acc = EventAccumulator( 11 | log_dir, 12 | size_guidance={SCALARS: 0}, 13 | ) 14 | event_acc.Reload() 15 | return event_acc 16 | 17 | 18 | def _exist_check_with_event_acc(event_acc: EventAccumulator) -> bool: 19 | for tag in event_acc.Tags()['scalars']: 20 | for _ in event_acc.Scalars(tag): 21 | return True 22 | 23 | return False 24 | 25 | 26 | def tb_has_log(log_dir: str) -> bool: 27 | event_acc = _init_acc_from_dir(log_dir) 28 | return _exist_check_with_event_acc(event_acc) 29 | 30 | 31 | def _extract_with_event_acc(event_acc: EventAccumulator, shown_name: str): 32 | scalar_data = {} 33 | 34 | def _yields(): 35 | for _tag in event_acc.Tags()['scalars']: 36 | for _event in event_acc.Scalars(_tag): 37 | yield _tag, _event 38 | 39 | progress = tqdm(_yields()) 40 | for tag, scalar_event in progress: 41 | progress.set_description(f'{shown_name} --> {tag} #{scalar_event.step}') 42 | if scalar_event.step not in scalar_data: 43 | scalar_data[scalar_event.step] = {'step': scalar_event.step} 44 | scalar_data[scalar_event.step][tag] = scalar_event.value 45 | 46 | return pd.DataFrame(data=[ 47 | item for _, item in 48 | sorted(scalar_data.items(), key=lambda x: x[0]) 49 | ]) 50 | 51 | 52 | def tb_extract_log(log_dir: str) -> pd.DataFrame: 53 | event_acc = _init_acc_from_dir(log_dir) 54 | return _extract_with_event_acc(event_acc, log_dir) 55 | 56 | 57 | def tb_extract_recursive_logs(root_log_dir: str) -> Dict[str, pd.DataFrame]: 58 | retval = {} 59 | progress = tqdm(os.walk(root_log_dir)) 60 | for current_dir, subdirs, subfiles in progress: 61 | cdir = os.path.relpath(current_dir, root_log_dir) 62 | progress.set_description(cdir) 63 | event_acc = _init_acc_from_dir(current_dir) 64 | if _exist_check_with_event_acc(event_acc): 65 | retval[cdir] = _extract_with_event_acc(event_acc, cdir) 66 | 67 | return retval 68 | -------------------------------------------------------------------------------- /ditk/tensorboard/plots/__init__.py: -------------------------------------------------------------------------------- 1 | from .range import tb_create_range_plots 2 | -------------------------------------------------------------------------------- /ditk/tensorboard/plots/range.py: -------------------------------------------------------------------------------- 1 | import os 2 | import warnings 3 | from functools import lru_cache 4 | from typing import Optional, Mapping 5 | 6 | import matplotlib.pyplot as plt 7 | import numpy as np 8 | import seaborn as sns 9 | from hbutils.string import plural_word 10 | from hbutils.testing import vpip 11 | from matplotlib.ticker import FuncFormatter 12 | from scipy import interpolate 13 | from sklearn.cluster import KMeans 14 | 15 | from ..log import tb_extract_recursive_logs 16 | 17 | 18 | @lru_cache() 19 | def _kmeans_support_n_init_auto(): 20 | return vpip('scikit-learn') >= '1.2.0' 21 | 22 | 23 | def _tb_x_format(x, _): 24 | if x < 1e3: 25 | return f'{x}' 26 | elif x < 1e6: 27 | return f'{x / 1e3:.2f}k' 28 | else: 29 | return f'{x / 1e6:.2f}M' 30 | 31 | 32 | def _tb_rplot_single_group(ax, dfs, xname, yname, label, n_samples: Optional[int] = None, 33 | lower_bound: Optional[float] = None, upper_bound: Optional[float] = None): 34 | datas = [] 35 | for d in dfs: 36 | df = d[[xname, yname]] 37 | df = df[~df[yname].isna()] 38 | func = interpolate.UnivariateSpline(df[xname], df[yname], s=0) 39 | datas.append((df[xname], df[yname], func)) 40 | 41 | if lower_bound is None: 42 | lower_bound = np.min([x.min() for x, _, _ in datas]) 43 | if upper_bound is None: 44 | upper_bound = np.max([x.max() for x, _, _ in datas]) 45 | 46 | all_xs = np.concatenate([x[(x <= upper_bound) & (x >= lower_bound)] for x, _, _ in datas]) 47 | if n_samples is None: 48 | n_samples = all_xs.shape[0] 49 | if n_samples > all_xs.shape[0]: 50 | warnings.warn(f'{plural_word(all_xs.shape[0], "sample")} found in total, ' 51 | f'n_samples ignored due to the unavailableness of {plural_word(n_samples, "sample")}.') 52 | n_samples = all_xs.shape[0] 53 | 54 | clu_algo = KMeans(n_samples, n_init='auto' if _kmeans_support_n_init_auto() else 10) 55 | clu_algo.fit(all_xs[..., None]) 56 | px = np.sort(clu_algo.cluster_centers_.squeeze(-1), kind='heapsort') 57 | if not np.isclose(px[0], lower_bound): 58 | px = np.concatenate([np.array([lower_bound]), px]) 59 | if not np.isclose(px[-1], upper_bound): 60 | px = np.concatenate([px, np.array([upper_bound])]) 61 | 62 | fx = [] 63 | fy = [] 64 | for xvalues, _, func in datas: 65 | x_min, x_max = xvalues.min(), xvalues.max() 66 | for x in px: 67 | if x_min <= x <= x_max: 68 | fx.append(x) 69 | fy.append(func(x)) 70 | fx = np.array(fx) 71 | fy = np.array(fy) 72 | 73 | sns.lineplot(x=fx, y=fy, label=label, ax=ax) 74 | 75 | 76 | def tb_create_range_plots(logdir, xname, yname, 77 | label_map: Optional[Mapping[str, str]] = None, n_samples: Optional[int] = None, 78 | lower_bound: Optional[float] = 0.0, upper_bound: Optional[float] = None, 79 | ax=None): 80 | """ 81 | Overview: 82 | Create Multi-Seed Multi-Algorithm Benchmark Plots with Mean and Standard Deviation. 83 | 84 | :param logdir: Log directory of tensorboard. Nested tensorboard log directories are supported. 85 | :param xname: Name of x-axis, ``step`` is recommended. 86 | :param yname: Name of y-axis. 87 | :param label_map: Mapping of the labels, will be used in legend. 88 | :param n_samples: Samples of x-axis, default is ``None`` which means just use all the samples. 89 | :param lower_bound: Lower bound of x-axis. Default is the minimum value of all the experiments' x. 90 | :param upper_bound: Upper bound of y-axis. Default is the maximum value of all the experiments' x. 91 | :param ax: Axes object of the matplotlib. Default is ``None`` which means use the ``plt.gca()`` as axes. 92 | """ 93 | label_map = dict(label_map or {}) 94 | log_data = tb_extract_recursive_logs(logdir) 95 | log_groups = {} 96 | for key, data in log_data.items(): 97 | first_seg = key.split(os.path.sep)[0] 98 | if first_seg not in log_groups: 99 | log_groups[first_seg] = [] 100 | log_groups[first_seg].append(data) 101 | 102 | if ax is None: 103 | ax = plt.gca() 104 | 105 | for group_name, dfs in log_groups.items(): 106 | _tb_rplot_single_group( 107 | ax, dfs, xname, yname, 108 | label=label_map.get(group_name, group_name), 109 | n_samples=n_samples, 110 | lower_bound=lower_bound, 111 | upper_bound=upper_bound, 112 | ) 113 | 114 | ax.xaxis.set_major_formatter(FuncFormatter(_tb_x_format)) 115 | ax.set_title(f'{xname!r} - {yname!r} plot') 116 | ax.set_xlabel(xname) 117 | ax.set_ylabel(yname) 118 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # You can set these variables from the command line, and also 2 | # from the environment for the first two. 3 | PROJ_DIR ?= ${CURDIR} 4 | SPHINXOPTS ?= 5 | SPHINXBUILD ?= $(shell which sphinx-build) 6 | SPHINXMULTIVERSION ?= $(shell which sphinx-multiversion) 7 | SOURCEDIR ?= ${PROJ_DIR}/source 8 | BUILDDIR ?= ${PROJ_DIR}/build 9 | 10 | # Minimal makefile for Sphinx documentation 11 | ALL_MK := ${SOURCEDIR}/all.mk 12 | ALL := $(MAKE) -f "${ALL_MK}" SOURCE=${SOURCEDIR} 13 | 14 | .EXPORT_ALL_VARIABLES: 15 | 16 | NO_CONTENTS_BUILD = true 17 | 18 | # Catch-all target: route all unknown targets to Sphinx using the new 19 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 20 | # Put it first so that "make" without argument is like "make help". 21 | .PHONY: help contents build html prod clean sourcedir builddir Makefile 22 | 23 | help: 24 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 25 | 26 | contents: 27 | @$(ALL) build 28 | build: html 29 | html: contents 30 | @$(SPHINXBUILD) -M html "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 31 | @touch "$(BUILDDIR)/html/.nojekyll" 32 | prod: 33 | @NO_CONTENTS_BUILD='' $(SPHINXMULTIVERSION) "$(SOURCEDIR)" "$(BUILDDIR)/html" $(SPHINXOPTS) $(O) 34 | @cp main_page.html "$(BUILDDIR)/html/index.html" 35 | @touch "$(BUILDDIR)/html/.nojekyll" 36 | 37 | clean: 38 | @$(ALL) clean 39 | @$(SPHINXBUILD) -M clean "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 40 | 41 | sourcedir: 42 | @echo $(shell readlink -f ${SOURCEDIR}) 43 | builddir: 44 | @echo $(shell readlink -f ${BUILDDIR}/html) -------------------------------------------------------------------------------- /docs/main_page.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Redirecting to master branch 5 | 6 | 7 | 8 | 9 | -------------------------------------------------------------------------------- /docs/source/_libs/.keep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opendilab/DI-toolkit/8d728a42aaf139b10e9f5133447fefde32149f67/docs/source/_libs/.keep -------------------------------------------------------------------------------- /docs/source/_shims/.keep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opendilab/DI-toolkit/8d728a42aaf139b10e9f5133447fefde32149f67/docs/source/_shims/.keep -------------------------------------------------------------------------------- /docs/source/_static/.keep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opendilab/DI-toolkit/8d728a42aaf139b10e9f5133447fefde32149f67/docs/source/_static/.keep -------------------------------------------------------------------------------- /docs/source/_static/css/custom.css: -------------------------------------------------------------------------------- 1 | .wy-table-responsive table td, 2 | .wy-table-responsive table th { 3 | white-space: normal; 4 | } -------------------------------------------------------------------------------- /docs/source/_templates/.keep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opendilab/DI-toolkit/8d728a42aaf139b10e9f5133447fefde32149f67/docs/source/_templates/.keep -------------------------------------------------------------------------------- /docs/source/_templates/page.html: -------------------------------------------------------------------------------- 1 | {% extends "!page.html" %} 2 | {% block body %} 3 | {% if current_version and latest_version and current_version != latest_version %} 4 |

5 | 6 | {% if current_version.is_released %} 7 | You're reading an old version of this documentation. 8 | If you want up-to-date information, please have a look at 9 | {{ latest_version.name }}. 10 | {% else %} 11 | You're reading the documentation for a development version. 12 | For the latest released version, please have a look at 13 | {{ latest_version.name }}. 14 | {% endif %} 15 | 16 |

17 | {% endif %} 18 | {{ super() }} 19 | {% endblock %}% -------------------------------------------------------------------------------- /docs/source/_templates/versions.html: -------------------------------------------------------------------------------- 1 | {%- if current_version %} 2 |
3 | 4 | Other Versions 5 | v: {{ current_version.name }} 6 | 7 | 8 |
9 | {%- if versions.tags %} 10 |
11 |
Tags
12 | {%- for item in versions.tags %} 13 |
{{ item.name }}
14 | {%- endfor %} 15 |
16 | {%- endif %} 17 | {%- if versions.branches %} 18 |
19 |
Branches
20 | {%- for item in versions.branches %} 21 |
{{ item.name }}
22 | {%- endfor %} 23 |
24 | {%- endif %} 25 |
26 |
27 | {%- endif %} -------------------------------------------------------------------------------- /docs/source/all.mk: -------------------------------------------------------------------------------- 1 | PIP := $(shell which pip) 2 | 3 | SPHINXOPTS ?= 4 | SPHINXBUILD ?= $(shell which sphinx-build) 5 | SPHINXMULTIVERSION ?= $(shell which sphinx-multiversion) 6 | SOURCEDIR ?= $(shell readlink -f ${CURDIR}) 7 | BUILDDIR ?= $(shell readlink -f ${CURDIR}/../build) 8 | 9 | DIAGRAMS_MK := ${SOURCEDIR}/diagrams.mk 10 | DIAGRAMS := $(MAKE) -f "${DIAGRAMS_MK}" SOURCE=${SOURCEDIR} 11 | GRAPHVIZ_MK := ${SOURCEDIR}/graphviz.mk 12 | GRAPHVIZ := $(MAKE) -f "${GRAPHVIZ_MK}" SOURCE=${SOURCEDIR} 13 | DEMOS_MK := ${SOURCEDIR}/demos.mk 14 | DEMOS := $(MAKE) -f "${DEMOS_MK}" SOURCE=${SOURCEDIR} 15 | 16 | _CURRENT_PATH := ${PATH} 17 | _PROJ_DIR := $(shell readlink -f ${SOURCEDIR}/../..) 18 | _LIBS_DIR := $(shell readlink -f ${SOURCEDIR}/_libs) 19 | _SHIMS_DIR := $(shell readlink -f ${SOURCEDIR}/_shims) 20 | 21 | .EXPORT_ALL_VARIABLES: 22 | 23 | PYTHONPATH = ${_PROJ_DIR}:${_LIBS_DIR} 24 | PATH = ${_SHIMS_DIR}:${_CURRENT_PATH} 25 | 26 | .PHONY: all build clean pip 27 | 28 | pip: 29 | @$(PIP) install -r ${_PROJ_DIR}/requirements.txt 30 | @$(PIP) install -r ${_PROJ_DIR}/requirements-doc.txt 31 | 32 | build: 33 | @$(DIAGRAMS) build 34 | @$(GRAPHVIZ) build 35 | @$(DEMOS) build 36 | 37 | all: build 38 | 39 | clean: 40 | @$(DIAGRAMS) clean 41 | @$(GRAPHVIZ) clean 42 | @$(DEMOS) clean 43 | 44 | -------------------------------------------------------------------------------- /docs/source/api_doc/config/index.rst: -------------------------------------------------------------------------------- 1 | ditk.config 2 | ===================== 3 | 4 | .. currentmodule:: ditk.config 5 | 6 | .. automodule:: ditk.config 7 | 8 | 9 | .. toctree:: 10 | :maxdepth: 3 11 | 12 | meta 13 | -------------------------------------------------------------------------------- /docs/source/api_doc/config/meta.rst: -------------------------------------------------------------------------------- 1 | ditk.config.meta 2 | ========================== 3 | 4 | .. currentmodule:: ditk.config.meta 5 | 6 | .. automodule:: ditk.config.meta 7 | 8 | 9 | \_\_TITLE\_\_ 10 | ------------------ 11 | 12 | .. autodata:: __TITLE__ 13 | :annotation: 14 | 15 | 16 | \_\_VERSION\_\_ 17 | ------------------ 18 | 19 | .. autodata:: __VERSION__ 20 | :annotation: 21 | 22 | 23 | \_\_DESCRIPTION\_\_ 24 | ---------------------- 25 | 26 | .. autodata:: __DESCRIPTION__ 27 | :annotation: 28 | 29 | 30 | \_\_AUTHOR\_\_ 31 | ------------------ 32 | 33 | .. autodata:: __AUTHOR__ 34 | :annotation: 35 | 36 | 37 | \_\_AUTHOR_EMAIL\_\_ 38 | ---------------------- 39 | 40 | .. autodata:: __AUTHOR_EMAIL__ 41 | :annotation: 42 | -------------------------------------------------------------------------------- /docs/source/api_doc/logging/index.rst: -------------------------------------------------------------------------------- 1 | ditk.logging 2 | ===================== 3 | 4 | .. currentmodule:: ditk.logging 5 | 6 | .. automodule:: ditk.logging 7 | 8 | 9 | 10 | LoggingTerminalHandler 11 | ------------------------- 12 | 13 | .. autoclass:: LoggingTerminalHandler 14 | :members: __init__, emit 15 | 16 | 17 | LoggingFileHandler 18 | ------------------------- 19 | 20 | .. autoclass:: LoggingFileHandler 21 | :members: __init__, file_path, emit 22 | 23 | 24 | try_init_root 25 | ------------------------- 26 | 27 | .. autofunction:: try_init_root 28 | 29 | 30 | getLogger 31 | ------------------------- 32 | 33 | .. autofunction:: getLogger 34 | 35 | 36 | 37 | -------------------------------------------------------------------------------- /docs/source/conf.py: -------------------------------------------------------------------------------- 1 | # Configuration file for the Sphinx documentation builder. 2 | # 3 | # This file only contains a selection of the most common options. For a full 4 | # list see the documentation: 5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html 6 | 7 | # -- Path setup -------------------------------------------------------------- 8 | 9 | # If extensions (or modules to document with autodoc) are in another directory, 10 | # add these directories to sys.path here. If the directory is relative to the 11 | # documentation root, use os.path.abspath to make it absolute, like shown here. 12 | # 13 | # import os 14 | # import sys 15 | # sys.path.insert(0, os.path.abspath('.')) 16 | 17 | # -- Project information ----------------------------------------------------- 18 | 19 | import os 20 | import sys 21 | from datetime import datetime 22 | from subprocess import Popen 23 | 24 | import where 25 | from packaging import version as version_ 26 | 27 | # Get current location 28 | _DOC_PATH = os.path.dirname(os.path.abspath(__file__)) 29 | _PROJ_PATH = os.path.abspath(os.path.join(_DOC_PATH, '..', '..')) 30 | _LIBS_PATH = os.path.join(_DOC_PATH, '_libs') 31 | _SHIMS_PATH = os.path.join(_DOC_PATH, '_shims') 32 | os.chdir(_PROJ_PATH) 33 | 34 | # Set environment, remove the pre-installed package 35 | sys.path.insert(0, _PROJ_PATH) 36 | modnames = [mname for mname in sys.modules if mname.startswith('ditk')] 37 | for modname in modnames: 38 | del sys.modules[modname] 39 | 40 | # Build dependencies if needed 41 | if not os.environ.get("NO_CONTENTS_BUILD"): 42 | _env = dict(os.environ) 43 | _env.update( 44 | dict( 45 | SOURCEDIR=_DOC_PATH, 46 | BUILDDIR=os.path.abspath(os.path.join(_DOC_PATH, '..', 'build')), 47 | PYTHONPATH=':'.join([_PROJ_PATH, _LIBS_PATH]), 48 | PATH=':'.join([_SHIMS_PATH, os.environ.get('PATH', '')]), 49 | ) 50 | ) 51 | 52 | if os.path.exists(os.path.join(_PROJ_PATH, 'requirements-build.txt')): 53 | pip_build_cmd = (where.first('pip'), 'install', '-r', os.path.join(_PROJ_PATH, 'requirements-build.txt')) 54 | print("Install pip requirements {cmd}...".format(cmd=repr(pip_build_cmd))) 55 | pip_build = Popen(pip_build_cmd, stdout=sys.stdout, stderr=sys.stderr, env=_env, cwd=_PROJ_PATH) 56 | if pip_build.wait() != 0: 57 | raise ChildProcessError("Pip install failed with %d." % (pip_build.returncode, )) 58 | 59 | make_build_cmd = (where.first('make'), 'clean', 'build') 60 | print("Try building extensions {cmd}...".format(cmd=repr(make_build_cmd))) 61 | make_build = Popen(make_build_cmd, stdout=sys.stdout, stderr=sys.stderr, env=_env, cwd=_PROJ_PATH) 62 | if make_build.wait() != 0: 63 | raise ChildProcessError("Extension build failed with %d." % (make_build.returncode, )) 64 | 65 | pip_cmd = (where.first('pip'), 'install', '-r', os.path.join(_PROJ_PATH, 'requirements.txt')) 66 | print("Install pip requirements {cmd}...".format(cmd=repr(pip_cmd))) 67 | pip = Popen(pip_cmd, stdout=sys.stdout, stderr=sys.stderr, env=_env, cwd=_DOC_PATH) 68 | if pip.wait() != 0: 69 | raise ChildProcessError("Pip install failed with %d." % (pip.returncode, )) 70 | 71 | pip_docs_cmd = (where.first('pip'), 'install', '-r', os.path.join(_PROJ_PATH, 'requirements-doc.txt')) 72 | print("Install pip docs requirements {cmd}...".format(cmd=repr(pip_docs_cmd))) 73 | pip_docs = Popen(pip_docs_cmd, stdout=sys.stdout, stderr=sys.stderr, env=_env, cwd=_DOC_PATH) 74 | if pip_docs.wait() != 0: 75 | raise ChildProcessError("Pip docs install failed with %d." % (pip.returncode, )) 76 | 77 | all_cmd = (where.first('make'), '-f', "all.mk", "build") 78 | print("Building all {cmd} at {cp}...".format(cmd=repr(all_cmd), cp=repr(_DOC_PATH))) 79 | all_ = Popen(all_cmd, stdout=sys.stdout, stderr=sys.stderr, env=_env, cwd=_DOC_PATH) 80 | if all_.wait() != 0: 81 | raise ChildProcessError("Diagrams failed with %d." % (all_.returncode, )) 82 | 83 | print("Build of contents complete.") 84 | 85 | from ditk.config.meta import __TITLE__, __AUTHOR__, __VERSION__ 86 | 87 | project = __TITLE__ 88 | copyright = '{year}, {author}'.format(year=datetime.now().year, author=__AUTHOR__) 89 | author = __AUTHOR__ 90 | 91 | # The short X.Y version 92 | version = version_.parse(__VERSION__).base_version 93 | # The full version, including alpha/beta/rc tags 94 | release = __VERSION__ 95 | 96 | # -- General configuration --------------------------------------------------- 97 | 98 | # Add any Sphinx extension module names here, as strings. They can be 99 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 100 | # ones. 101 | extensions = [ 102 | 'sphinx.ext.autodoc', 103 | 'sphinx.ext.doctest', 104 | 'sphinx.ext.mathjax', 105 | 'sphinx.ext.ifconfig', 106 | 'sphinx.ext.viewcode', 107 | 'sphinx.ext.githubpages', 108 | 'sphinx.ext.todo', 109 | 'sphinx.ext.graphviz', 110 | 'enum_tools.autoenum', 111 | "sphinx_multiversion", 112 | ] 113 | 114 | # Add any paths that contain templates here, relative to this directory. 115 | templates_path = ['_templates'] 116 | 117 | # The language for content autogenerated by Sphinx. Refer to documentation 118 | # for a list of supported languages. 119 | # 120 | # This is also used if you do content translation via gettext catalogs. 121 | # Usually you set "language" from the command line for these cases. 122 | language = None 123 | 124 | # List of patterns, relative to source directory, that match files and 125 | # directories to ignore when looking for source files. 126 | # This pattern also affects html_static_path and html_extra_path. 127 | exclude_patterns = [] 128 | 129 | # -- Options for HTML output ------------------------------------------------- 130 | 131 | # The theme to use for HTML and HTML Help pages. See the documentation for 132 | # a list of plugins themes. 133 | # 134 | html_theme = 'sphinx_rtd_theme' 135 | htmlhelp_basename = 'TreeValue' 136 | 137 | # Add any paths that contain custom static files (such as style sheets) here, 138 | # relative to this directory. They are copied after the plugins static files, 139 | # so a file named "default.css" will overwrite the plugins "default.css". 140 | html_static_path = ['_static'] 141 | 142 | html_css_files = [ 143 | 'css/custom.css', 144 | ] 145 | 146 | epub_title = project 147 | epub_exclude_files = ['search.html'] 148 | 149 | # Whitelist pattern for tags (set to None to ignore all tags) 150 | smv_tag_whitelist = r'^v.*$' # Include all tags start with 'v' 151 | smv_branch_whitelist = r'^.*$' # Include all branches 152 | smv_remote_whitelist = r'^.*$' # Use branches from all remotes 153 | smv_released_pattern = r'^tags/.*$' # Tags only 154 | smv_outputdir_format = '{ref.name}' # Use the branch/tag name 155 | 156 | if not os.environ.get("ENV_PROD"): 157 | todo_include_todos = True 158 | todo_emit_warnings = True 159 | -------------------------------------------------------------------------------- /docs/source/demos.mk: -------------------------------------------------------------------------------- 1 | PYTHON := $(shell which python) 2 | 3 | SOURCE ?= . 4 | PYTHON_DEMOS := $(shell find ${SOURCE} -name *.demo.py) 5 | PYTHON_DEMOXS := $(shell find ${SOURCE} -name *.demox.py) 6 | PYTHON_RESULTS := $(addsuffix .py.txt, $(basename ${PYTHON_DEMOS} ${PYTHON_DEMOXS})) 7 | 8 | SHELL_DEMOS := $(shell find ${SOURCE} -name *.demo.sh) 9 | SHELL_DEMOXS := $(shell find ${SOURCE} -name *.demox.sh) 10 | SHELL_RESULTS := $(addsuffix .sh.txt, $(basename ${SHELL_DEMOS} ${SHELL_DEMOXS})) 11 | 12 | %.demo.py.txt: %.demo.py 13 | cd "$(shell dirname $(shell readlink -f $<))" && \ 14 | PYTHONPATH="$(shell dirname $(shell readlink -f $<)):${PYTHONPATH}" \ 15 | $(PYTHON) "$(shell readlink -f $<)" > "$(shell readlink -f $@)" 16 | 17 | %.demox.py.txt: %.demox.py 18 | cd "$(shell dirname $(shell readlink -f $<))" && \ 19 | PYTHONPATH="$(shell dirname $(shell readlink -f $<)):${PYTHONPATH}" \ 20 | $(PYTHON) "$(shell readlink -f $<)" 1> "$(shell readlink -f $@)" \ 21 | 2> "$(shell readlink -f $(addsuffix .err, $(basename $@)))"; \ 22 | echo $$? > "$(shell readlink -f $(addsuffix .exitcode, $(basename $@)))" 23 | 24 | %.demo.sh.txt: %.demo.sh 25 | cd "$(shell dirname $(shell readlink -f $<))" && \ 26 | PYTHONPATH="$(shell dirname $(shell readlink -f $<)):${PYTHONPATH}" \ 27 | $(SHELL) "$(shell readlink -f $<)" > "$(shell readlink -f $@)" 28 | 29 | %.demox.sh.txt: %.demox.sh 30 | cd "$(shell dirname $(shell readlink -f $<))" && \ 31 | PYTHONPATH="$(shell dirname $(shell readlink -f $<)):${PYTHONPATH}" \ 32 | $(SHELL) "$(shell readlink -f $<)" 1> "$(shell readlink -f $@)" \ 33 | 2> "$(shell readlink -f $(addsuffix .err, $(basename $@)))"; \ 34 | echo $$? > "$(shell readlink -f $(addsuffix .exitcode, $(basename $@)))" 35 | 36 | build: ${PYTHON_RESULTS} ${SHELL_RESULTS} 37 | 38 | all: build 39 | 40 | clean: 41 | rm -rf \ 42 | $(shell find ${SOURCE} -name *.py.txt) \ 43 | $(shell find ${SOURCE} -name *.py.err) \ 44 | $(shell find ${SOURCE} -name *.py.exitcode) \ 45 | $(shell find ${SOURCE} -name *.sh.txt) \ 46 | $(shell find ${SOURCE} -name *.sh.err) \ 47 | $(shell find ${SOURCE} -name *.sh.exitcode) \ 48 | $(shell find ${SOURCE} -name *.dat.*) 49 | -------------------------------------------------------------------------------- /docs/source/diagrams.mk: -------------------------------------------------------------------------------- 1 | PLANTUMLCLI ?= $(shell which plantumlcli) 2 | 3 | SOURCE ?= . 4 | PUMLS := $(shell find ${SOURCE} -name *.puml) 5 | PNGS := $(addsuffix .puml.png, $(basename ${PUMLS})) 6 | SVGS := $(addsuffix .puml.svg, $(basename ${PUMLS})) 7 | 8 | %.puml.png: %.puml 9 | $(PLANTUMLCLI) -t png -o "$(shell readlink -f $@)" "$(shell readlink -f $<)" 10 | 11 | %.puml.svg: %.puml 12 | $(PLANTUMLCLI) -t svg -o "$(shell readlink -f $@)" "$(shell readlink -f $<)" 13 | 14 | build: ${SVGS} ${PNGS} 15 | 16 | all: build 17 | 18 | clean: 19 | rm -rf \ 20 | $(shell find ${SOURCE} -name *.puml.svg) \ 21 | $(shell find ${SOURCE} -name *.puml.png) \ 22 | -------------------------------------------------------------------------------- /docs/source/graphviz.mk: -------------------------------------------------------------------------------- 1 | DOT := $(shell which dot) 2 | 3 | SOURCE ?= . 4 | GVS := $(shell find ${SOURCE} -name *.gv) 5 | PNGS := $(addsuffix .gv.png, $(basename ${GVS})) 6 | SVGS := $(addsuffix .gv.svg, $(basename ${GVS})) 7 | 8 | %.gv.png: %.gv 9 | $(DOT) -Tpng -o"$(shell readlink -f $@)" "$(shell readlink -f $<)" 10 | 11 | %.gv.svg: %.gv 12 | $(DOT) -Tsvg -o"$(shell readlink -f $@)" "$(shell readlink -f $<)" 13 | 14 | build: ${SVGS} ${PNGS} 15 | 16 | all: build 17 | 18 | clean: 19 | rm -rf \ 20 | $(shell find ${SOURCE} -name *.gv.svg) \ 21 | $(shell find ${SOURCE} -name *.gv.png) \ 22 | -------------------------------------------------------------------------------- /docs/source/index.rst: -------------------------------------------------------------------------------- 1 | Welcome to DI-toolkit's Documentation 2 | ========================================================= 3 | 4 | Overview 5 | ------------- 6 | 7 | ``DI-toolkit`` is a module which can parse and manage the \ 8 | configuration file's structure and their versions. 9 | 10 | .. toctree:: 11 | :maxdepth: 2 12 | :caption: Tutorials 13 | 14 | tutorials/installation/index 15 | tutorials/quick_start/index 16 | 17 | .. toctree:: 18 | :maxdepth: 2 19 | :caption: Best Practice 20 | 21 | 22 | .. toctree:: 23 | :maxdepth: 2 24 | :caption: API Documentation 25 | 26 | api_doc/config/index 27 | api_doc/logging/index 28 | 29 | .. toctree:: 30 | :maxdepth: 2 31 | :caption: Contributor Guide 32 | 33 | -------------------------------------------------------------------------------- /docs/source/tutorials/installation/index.rst: -------------------------------------------------------------------------------- 1 | Installation 2 | =================== 3 | 4 | DI-toolkit is currently hosted on PyPI. It required python >= 3.6. 5 | 6 | You can simply install DI-toolkit from PyPI with the following command: 7 | 8 | .. code:: shell 9 | 10 | pip install DI-toolkit 11 | 12 | You can also install with the newest version through GitHub: 13 | 14 | .. code:: shell 15 | 16 | pip install -U git+https://github.com/opendilab/DI-toolkit@main 17 | 18 | You can check your installation by the following python \ 19 | script: 20 | 21 | .. literalinclude:: install_check.demo.py 22 | :language: python 23 | :linenos: 24 | 25 | The output should be like below, which means your installation \ 26 | is successful. 27 | 28 | .. literalinclude:: install_check.demo.py.txt 29 | :language: text 30 | :linenos: 31 | 32 | DI-toolkit is still under development, you can also check out the \ 33 | documents in stable version through `https://opendilab.github.io/DI-toolkit/main/index.html `_. 34 | -------------------------------------------------------------------------------- /docs/source/tutorials/installation/install_check.demo.py: -------------------------------------------------------------------------------- 1 | from ditk.config.meta import __TITLE__, __AUTHOR__, __VERSION__, __DESCRIPTION__ 2 | 3 | if __name__ == '__main__': 4 | print(__TITLE__, __VERSION__) 5 | print('Developed and maintained by', __AUTHOR__) 6 | print(__DESCRIPTION__) 7 | -------------------------------------------------------------------------------- /docs/source/tutorials/quick_start/index.rst: -------------------------------------------------------------------------------- 1 | Quick Start 2 | ======================= 3 | 4 | (Need to be completed.) -------------------------------------------------------------------------------- /format.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | # Usage: at the root dir >> bash scripts/format.sh . 3 | 4 | # Check yapf version. (20200318 latest is 0.29.0. Format might be changed in future version.) 5 | ver=$(yapf --version) 6 | if ! echo $ver | grep -q 0.29.0; then 7 | echo "Wrong YAPF version installed: 0.29.0 is required, not $ver. $YAPF_DOWNLOAD_COMMAND_MSG" 8 | exit 1 9 | fi 10 | 11 | yapf --in-place --recursive -p --verbose --style .style.yapf $1 12 | 13 | if [[ "$2" == '--test' ]]; then # Only for CI usage, user should not use --test flag. 14 | if ! git diff --quiet &>/dev/null; then 15 | echo '*** You have not reformatted your codes! Please run [bash format.sh] at root directory before commit! Thanks! ***' 16 | exit 1 17 | else 18 | echo "Code style test passed!" 19 | fi 20 | fi 21 | -------------------------------------------------------------------------------- /pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | timeout = 300 3 | markers = 4 | unittest 5 | benchmark 6 | ignore 7 | -------------------------------------------------------------------------------- /requirements-doc.txt: -------------------------------------------------------------------------------- 1 | Jinja2>=3.0.0 2 | sphinx>=3.2.0 3 | sphinx_rtd_theme>=0.4.3 4 | enum_tools~=0.9.0 5 | sphinx-toolbox 6 | plantumlcli>=0.0.2 7 | packaging 8 | sphinx-multiversion>=0.2.4 9 | where~=1.0.2 10 | easydict>=1.7,<2 11 | -------------------------------------------------------------------------------- /requirements-style.txt: -------------------------------------------------------------------------------- 1 | yapf==0.29.0 2 | flake8>=3,<5 -------------------------------------------------------------------------------- /requirements-test.txt: -------------------------------------------------------------------------------- 1 | coverage>=5 2 | mock>=4.0.3 3 | flake8~=3.5 4 | testfixtures>=6.18.5 5 | pytest~=6.2.5 6 | pytest-cov~=3.0.0 7 | pytest-mock~=3.6.1 8 | pytest-xdist>=1.34.0 9 | pytest-rerunfailures~=10.2 10 | pytest-timeout~=2.0.2 11 | pytest-benchmark~=3.4.0 12 | easydict>=1.7,<2 13 | testtools>=2 14 | where>=1.0.2 15 | torch>=1.3.1 16 | pytest-image-diff>=0.0.11 -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | hbutils>=0.9.1 2 | rich>=12.2.0 3 | yattag>=1.14.0 4 | click>=7.0.0 5 | pandas 6 | tensorboard 7 | tqdm 8 | scipy 9 | scikit-learn 10 | seaborn 11 | matplotlib 12 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import os 2 | import re 3 | from codecs import open 4 | from distutils.core import setup 5 | 6 | from setuptools import find_packages 7 | 8 | _package_name = "ditk" 9 | 10 | here = os.path.abspath(os.path.dirname(__file__)) 11 | meta = {} 12 | with open(os.path.join(here, _package_name, 'config', 'meta.py'), 'r', 'utf-8') as f: 13 | exec(f.read(), meta) 14 | 15 | 16 | def _load_req(file: str): 17 | with open(file, 'r', 'utf-8') as f: 18 | return [line.strip() for line in f.readlines() if line.strip()] 19 | 20 | 21 | requirements = _load_req('requirements.txt') 22 | 23 | _REQ_PATTERN = re.compile('^requirements-([a-zA-Z0-9_]+)\\.txt$') 24 | group_requirements = { 25 | item.group(1): _load_req(item.group(0)) 26 | for item in [_REQ_PATTERN.fullmatch(reqpath) for reqpath in os.listdir()] if item 27 | } 28 | 29 | with open('README.md', 'r', 'utf-8') as f: 30 | readme = f.read() 31 | 32 | setup( 33 | # information 34 | name=meta['__TITLE__'], 35 | version=meta['__VERSION__'], 36 | packages=find_packages(include=(_package_name, "%s.*" % _package_name)), 37 | package_data={package_name: ['*.yaml', '*.yml', '*.css', '*.js'] 38 | for package_name in find_packages(include=('*'))}, 39 | description=meta['__DESCRIPTION__'], 40 | long_description=readme, 41 | long_description_content_type='text/markdown', 42 | author=meta['__AUTHOR__'], 43 | author_email=meta['__AUTHOR_EMAIL__'], 44 | license='Apache License, Version 2.0', 45 | keywords='A simple tool for automatic parameter tuning.', 46 | url='https://github.com/opendilab/DI-toolkit', 47 | 48 | # environment 49 | python_requires=">=3.6", 50 | install_requires=requirements, 51 | tests_require=group_requirements['test'], 52 | extras_require=group_requirements, 53 | classifiers=[ 54 | 'Development Status :: 5 - Production/Stable', 'Intended Audience :: Developers', 55 | 'License :: OSI Approved :: Apache Software License', 'Programming Language :: Python', 56 | 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.6', 57 | 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8', 58 | 'Programming Language :: Python :: 3.9', 'Programming Language :: Python :: 3.10', 59 | 'Programming Language :: Python :: Implementation :: PyPy' 60 | ], 61 | ) 62 | -------------------------------------------------------------------------------- /test/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opendilab/DI-toolkit/8d728a42aaf139b10e9f5133447fefde32149f67/test/__init__.py -------------------------------------------------------------------------------- /test/config/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opendilab/DI-toolkit/8d728a42aaf139b10e9f5133447fefde32149f67/test/config/__init__.py -------------------------------------------------------------------------------- /test/config/test_meta.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from ditk.config.meta import __TITLE__ 4 | 5 | 6 | @pytest.mark.unittest 7 | class TestConfigMeta: 8 | 9 | def test_title(self): 10 | assert __TITLE__ == 'DI-toolkit' 11 | -------------------------------------------------------------------------------- /test/doc/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opendilab/DI-toolkit/8d728a42aaf139b10e9f5133447fefde32149f67/test/doc/__init__.py -------------------------------------------------------------------------------- /test/doc/annotated/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opendilab/DI-toolkit/8d728a42aaf139b10e9f5133447fefde32149f67/test/doc/annotated/__init__.py -------------------------------------------------------------------------------- /test/doc/annotated/test_generate.py: -------------------------------------------------------------------------------- 1 | import glob 2 | import os 3 | 4 | import pytest 5 | from hbutils.testing import isolated_directory, simulate_entry 6 | 7 | from ditk.doc.annotated import generate_annotated_doc 8 | from ditk.doc.annotated.__main__ import cli 9 | 10 | 11 | @pytest.mark.unittest 12 | def test_generate(): 13 | with isolated_directory({'ppo.py': os.path.join('ditk', 'doc', 'annotated', 'ppo.py')}): 14 | generate_annotated_doc('ppo.py', 'ppo.html', 'This is the title') 15 | assert os.path.exists('ppo.html') 16 | assert len(glob.glob(os.path.join('assets', '*.css'))) == 2 17 | 18 | 19 | @pytest.mark.unittest 20 | def test_cli(): 21 | with isolated_directory({'ppo.py': os.path.join('ditk', 'doc', 'annotated', 'ppo.py')}): 22 | result = simulate_entry(cli, ['ditk.doc.annotated', 'create', '-i', 'ppo.py', '-o', 'ppo.html']) 23 | assert result.exitcode == 0 24 | assert os.path.exists('ppo.html') 25 | assert len(glob.glob(os.path.join('assets', '*.css'))) == 2 26 | -------------------------------------------------------------------------------- /test/logging/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opendilab/DI-toolkit/8d728a42aaf139b10e9f5133447fefde32149f67/test/logging/__init__.py -------------------------------------------------------------------------------- /test/logging/conftest.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from ditk import logging 4 | 5 | 6 | @pytest.fixture(scope='module', autouse=True) 7 | def init_logging_level(): 8 | logging.try_init_root(logging.WARNING) 9 | -------------------------------------------------------------------------------- /test/logging/test_func.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import os 3 | from unittest import mock 4 | 5 | import pytest 6 | from hbutils.testing import capture_output, isolated_logger 7 | 8 | import ditk.logging 9 | 10 | 11 | @pytest.mark.unittest 12 | class TestLoggingFunc: 13 | 14 | @isolated_logger(handlers=[]) 15 | def test_loggings(self): 16 | with capture_output() as o: 17 | ditk.logging.debug('This is debug.') 18 | ditk.logging.info('This is info.') 19 | ditk.logging.warn('This is warn.') 20 | ditk.logging.warning('This is warning.') 21 | ditk.logging.error('This is error.') 22 | with mock.patch.dict(os.environ, {'DISABLE_RICH': '1'}): 23 | ditk.logging.critical('This is critical.') 24 | ditk.logging.fatal('This is fatal.') 25 | ditk.logging.log(logging.WARNING, 'This is warn log.') 26 | 27 | try: 28 | raise ValueError('This is value error.') 29 | except Exception as err: 30 | ditk.logging.exception(err) 31 | 32 | assert o.stdout.strip() == '' 33 | assert 'DEBUG This is debug.' not in o.stderr 34 | assert 'INFO This is info.' not in o.stderr 35 | assert 'WARNING This is warn.' in o.stderr 36 | assert 'WARNING This is warning.' in o.stderr 37 | assert 'ERROR This is error.' in o.stderr 38 | assert '[CRITICAL] This is critical.' in o.stderr 39 | assert '[CRITICAL] This is fatal.' in o.stderr 40 | assert '[WARNING] This is warn log.' in o.stderr 41 | assert 'ValueError: This is value error.' in o.stderr 42 | -------------------------------------------------------------------------------- /test/logging/test_inherit.py: -------------------------------------------------------------------------------- 1 | import logging 2 | 3 | import pytest 4 | 5 | import ditk.logging 6 | 7 | _CHANGED = [ 8 | 'critical', 9 | 'fatal', 10 | 'error', 11 | 'exception', 12 | 'warning', 13 | 'warn', 14 | 'info', 15 | 'debug', 16 | 'log', 17 | 'getLogger', 18 | ] 19 | 20 | 21 | @pytest.mark.unittest 22 | class TestLoggingInherit: 23 | 24 | @pytest.mark.parametrize('name', [name for name in logging.__all__ if name not in _CHANGED]) 25 | def test_inherit(self, name): 26 | assert getattr(ditk.logging, name) is getattr(logging, name) 27 | -------------------------------------------------------------------------------- /test/logging/test_log.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import os 3 | import pathlib 4 | import sys 5 | from unittest import mock 6 | 7 | import pytest 8 | from hbutils.testing import capture_output, isolated_directory, isolated_logger 9 | from rich.logging import RichHandler 10 | 11 | from ditk.logging import getLogger, try_init_root 12 | from ..testing import close_all_handlers 13 | 14 | 15 | @pytest.mark.unittest 16 | class TestLoggingLog: 17 | 18 | @isolated_logger(handlers=[]) 19 | def test_simple_rich(self): 20 | try_init_root(logging.DEBUG) 21 | with capture_output() as output: 22 | logger = getLogger() 23 | assert logger.name == 'root' 24 | 25 | logger.info('This is info.') 26 | logger.warning('This is warning.') 27 | logger.error('This is error.') 28 | logger.critical('This is critical.') 29 | 30 | stdout, stderr = output.stdout, output.stderr 31 | assert stdout.strip() == '' 32 | assert 'INFO This is info.' in stderr 33 | assert 'WARNING This is warning.' in stderr 34 | assert 'ERROR This is error.' in stderr 35 | assert 'CRITICAL This is critical.' in stderr 36 | 37 | @isolated_logger(handlers=[]) 38 | def test_simple_rich_with_style(self): 39 | try_init_root(logging.DEBUG) 40 | with capture_output() as output: 41 | logger = getLogger() 42 | assert logger.name == 'root' 43 | 44 | logger.info('[yellow]This is info[/].') 45 | logger.warning('[red]This is warning[/].') 46 | logger.error('[green]This is error[/].') 47 | logger.critical('[yellow]This is critical[/].') 48 | 49 | stdout, stderr = output.stdout, output.stderr 50 | assert stdout.strip() == '' 51 | assert 'INFO This is info.' in stderr 52 | assert 'WARNING This is warning.' in stderr 53 | assert 'ERROR This is error.' in stderr 54 | assert 'CRITICAL This is critical.' in stderr 55 | 56 | @isolated_logger(handlers=[]) 57 | def test_simple_rich_with_style_disable_rich(self): 58 | try_init_root(logging.DEBUG) 59 | with capture_output() as output: 60 | logger = getLogger() 61 | assert logger.name == 'root' 62 | 63 | with mock.patch.dict(os.environ, {'DISABLE_RICH': '1'}): 64 | logger.info('[yellow]This is info[/].') 65 | logger.warning('[red]This is warning[/].') 66 | logger.error('[green]This is error[/].') 67 | logger.critical('[yellow]This is critical[/].') 68 | 69 | stdout, stderr = output.stdout, output.stderr 70 | assert stdout.strip() == '' 71 | assert '[INFO] This is info.' in stderr 72 | assert '[WARNING] This is warning.' in stderr 73 | assert '[ERROR] This is error.' in stderr 74 | assert '[CRITICAL] This is critical.' in stderr 75 | 76 | @isolated_logger(handlers=[]) 77 | def test_stream(self): 78 | try_init_root(logging.DEBUG) 79 | with capture_output() as output: 80 | logger = getLogger('stream_test') 81 | assert logger.name == 'stream_test' 82 | 83 | with mock.patch.dict(os.environ, {'DISABLE_RICH': '1'}): 84 | logger.info('This is info.') 85 | logger.warning('This is warning.') 86 | with mock.patch.dict(os.environ, {'DISABLE_RICH': ''}): 87 | logger.error('This is error.') 88 | logger.critical('This is critical.') 89 | 90 | stdout, stderr = output.stdout, output.stderr 91 | assert '[INFO] This is info.' in stderr 92 | assert '[WARNING] This is warning.' in stderr 93 | assert 'ERROR This is error.' in stderr 94 | assert 'CRITICAL This is critical.' in stderr 95 | 96 | @isolated_logger(handlers=[]) 97 | def test_with_basic_stream(self): 98 | with capture_output() as output: 99 | root = logging.getLogger() 100 | root.setLevel(logging.DEBUG) 101 | hdl = logging.StreamHandler(sys.stdout) 102 | hdl.setFormatter( 103 | logging.Formatter( 104 | fmt='[%(asctime)s][%(filename)s:%(lineno)d][THIS IS UNITTEST][%(levelname)s] %(message)s', 105 | datefmt="%m-%d %H:%M:%S", 106 | ) 107 | ) 108 | root.addHandler(hdl) 109 | 110 | logger = getLogger('basics_stream') 111 | assert logger.name == 'basics_stream' 112 | 113 | logger.info('This is info.') 114 | logger.warning('This is warning.') 115 | logger.error('This is error.') 116 | logger.critical('This is critical.') 117 | 118 | stdout, stderr = output.stdout, output.stderr 119 | assert stderr.strip() == '' 120 | assert '[THIS IS UNITTEST][INFO] This is info.' in stdout 121 | assert '[THIS IS UNITTEST][WARNING] This is warning.' in stdout 122 | assert '[THIS IS UNITTEST][ERROR] This is error.' in stdout 123 | assert '[THIS IS UNITTEST][CRITICAL] This is critical.' in stdout 124 | 125 | @isolated_logger(handlers=[]) 126 | def test_with_basic_rich(self): 127 | try_init_root(logging.DEBUG) 128 | with capture_output() as output: 129 | root = logging.getLogger() 130 | root.setLevel(logging.DEBUG) 131 | hdl = RichHandler() 132 | hdl.setFormatter(logging.Formatter( 133 | fmt='[THIS IS UNITTEST] %(message)s', 134 | datefmt="%m-%d %H:%M:%S", 135 | )) 136 | root.addHandler(hdl) 137 | 138 | logger = getLogger('basics_rich') 139 | assert logger.name == 'basics_rich' 140 | 141 | logger.info('This is info.') 142 | logger.warning('This is warning.') 143 | logger.error('This is error.') 144 | logger.critical('This is critical.') 145 | 146 | stdout, stderr = output.stdout, output.stderr 147 | 148 | assert 'INFO [THIS IS UNITTEST] This is info.' in stdout 149 | assert 'WARNING [THIS IS UNITTEST] This is warning.' in stdout 150 | assert 'ERROR [THIS IS UNITTEST] This is error.' in stdout 151 | assert 'CRITICAL [THIS IS UNITTEST] This is critical.' in stdout 152 | 153 | assert 'INFO This is info.' in stderr 154 | assert 'WARNING This is warning.' in stderr 155 | assert 'ERROR This is error.' in stderr 156 | assert 'CRITICAL This is critical.' in stderr 157 | 158 | @isolated_logger(handlers=[]) 159 | def test_with_files(self): 160 | try_init_root(logging.DEBUG) 161 | with isolated_directory(): 162 | # function close_all_handlers should be removed soon. 163 | with close_all_handlers('with_files'), capture_output() as output: 164 | logger = getLogger('with_files', with_files=['log_file_1.txt', 'log_file_2.txt']) 165 | assert logger.name == 'with_files' 166 | 167 | logger.info('This is info.') 168 | logger.warning('This is warning.') 169 | logger.error('This is error.') 170 | logger.critical('This is critical.') 171 | 172 | stdout, stderr = output.stdout, output.stderr 173 | assert stdout.strip() == '' 174 | assert 'INFO This is info.' in stderr 175 | assert 'WARNING This is warning.' in stderr 176 | assert 'ERROR This is error.' in stderr 177 | assert 'CRITICAL This is critical.' in stderr 178 | 179 | log_file_1 = pathlib.Path('log_file_1.txt').read_text() 180 | assert '[INFO] This is info.' in log_file_1 181 | assert '[WARNING] This is warning.' in log_file_1 182 | assert '[ERROR] This is error.' in log_file_1 183 | assert '[CRITICAL] This is critical.' in log_file_1 184 | 185 | log_file_2 = pathlib.Path('log_file_2.txt').read_text() 186 | assert '[INFO] This is info.' in log_file_2 187 | assert '[WARNING] This is warning.' in log_file_2 188 | assert '[ERROR] This is error.' in log_file_2 189 | assert '[CRITICAL] This is critical.' in log_file_2 190 | 191 | @isolated_logger(handlers=[]) 192 | def test_new_level(self): 193 | try_init_root(logging.DEBUG) 194 | with capture_output() as output: 195 | _ = getLogger('new_level') 196 | 197 | logger = getLogger('new_level', level=logging.WARNING) 198 | assert logger.name == 'new_level' 199 | 200 | logger.info('This is info.') 201 | logger.warning('This is warning.') 202 | logger.error('This is error.') 203 | logger.critical('This is critical.') 204 | 205 | stdout, stderr = output.stdout, output.stderr 206 | assert stdout.strip() == '' 207 | assert 'INFO This is info.' not in stderr 208 | assert 'WARNING This is warning.' in stderr 209 | assert 'ERROR This is error.' in stderr 210 | assert 'CRITICAL This is critical.' in stderr 211 | 212 | @isolated_logger(handlers=[]) 213 | def test_new_files(self): 214 | try_init_root(logging.DEBUG) 215 | with isolated_directory(): 216 | # function close_all_handlers should be removed soon. 217 | with close_all_handlers('new_files'), capture_output() as output: 218 | _ = getLogger('new_files', with_files=['log_file_1.txt', 'log_file_2.txt']) 219 | logger = getLogger('new_files', with_files=['log_file_1.txt', 'log_file_3.txt']) 220 | assert logger.name == 'new_files' 221 | 222 | logger.info('This is info.') 223 | logger.warning('This is warning.') 224 | logger.error('This is error.') 225 | logger.critical('This is critical.') 226 | 227 | stdout, stderr = output.stdout, output.stderr 228 | assert stdout.strip() == '' 229 | assert "WARNING File 'log_file_1.txt' has" in stderr 230 | assert 'INFO This is info.' in stderr 231 | assert 'WARNING This is warning.' in stderr 232 | assert 'ERROR This is error.' in stderr 233 | assert 'CRITICAL This is critical.' in stderr 234 | 235 | log_file_1 = pathlib.Path('log_file_1.txt').read_text() 236 | assert "[WARNING] File 'log_file_1.txt' has" in log_file_1 237 | assert '[INFO] This is info.' in log_file_1 238 | assert '[WARNING] This is warning.' in log_file_1 239 | assert '[ERROR] This is error.' in log_file_1 240 | assert '[CRITICAL] This is critical.' in log_file_1 241 | 242 | log_file_2 = pathlib.Path('log_file_1.txt').read_text() 243 | assert "[WARNING] File 'log_file_1.txt' has" in log_file_2 244 | assert '[INFO] This is info.' in log_file_2 245 | assert '[WARNING] This is warning.' in log_file_2 246 | assert '[ERROR] This is error.' in log_file_2 247 | assert '[CRITICAL] This is critical.' in log_file_2 248 | 249 | log_file_3 = pathlib.Path('log_file_1.txt').read_text() 250 | assert "[WARNING] File 'log_file_1.txt' has" in log_file_3 251 | assert '[INFO] This is info.' in log_file_3 252 | assert '[WARNING] This is warning.' in log_file_3 253 | assert '[ERROR] This is error.' in log_file_3 254 | assert '[CRITICAL] This is critical.' in log_file_3 255 | -------------------------------------------------------------------------------- /test/tensorboard/plots/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opendilab/DI-toolkit/8d728a42aaf139b10e9f5133447fefde32149f67/test/tensorboard/plots/__init__.py -------------------------------------------------------------------------------- /test/tensorboard/plots/conftest.py: -------------------------------------------------------------------------------- 1 | import matplotlib.pyplot as plt 2 | import pytest 3 | 4 | 5 | @pytest.fixture(scope='module', autouse=True) 6 | def clear_previous_plot(): 7 | plt.cla() 8 | -------------------------------------------------------------------------------- /test/tensorboard/plots/test_range.py: -------------------------------------------------------------------------------- 1 | import matplotlib.pyplot as plt 2 | import pytest 3 | import seaborn as sns 4 | from hbutils.testing import isolated_directory, disable_output 5 | 6 | from ditk.tensorboard import tb_create_range_plots 7 | from test.testing import get_testfile 8 | 9 | 10 | @pytest.mark.unittest 11 | class TestTensorboardPlotsRange: 12 | def test_tb_create_range_plots(self, image_diff): 13 | answer_file = get_testfile('pong_tb_plot.png') 14 | with isolated_directory({'pong_tb': get_testfile('pong_tb')}): 15 | sns.set() 16 | plt.figure(figsize=(7, 5.5)) 17 | 18 | with disable_output(): 19 | tb_create_range_plots( 20 | 'pong_tb', 21 | 'step', 'evaluator_step/reward_mean', 22 | upper_bound=5e5 23 | ) 24 | 25 | plt.tight_layout() 26 | plt.savefig('plot.png', bbox_inches='tight') 27 | 28 | assert image_diff(answer_file, 'plot.png', throw_exception=False) < 0.05 29 | -------------------------------------------------------------------------------- /test/tensorboard/test_log.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from hbutils.testing import disable_output 3 | 4 | from ditk.tensorboard import tb_has_log, tb_extract_log, tb_extract_recursive_logs 5 | from test.testing import get_testfile 6 | 7 | 8 | @pytest.mark.unittest 9 | class TestTensorboardLog: 10 | @pytest.mark.parametrize(['segs', 'has_log'], [ 11 | (('tb1',), False), 12 | (('tb1', 'sac'), True), 13 | (('tb1', 'td3'), True), 14 | ]) 15 | def test_tb_has_log(self, segs, has_log): 16 | assert tb_has_log(get_testfile(*segs)) == has_log 17 | 18 | def test_tb_extract_log_tb1_sac(self): 19 | with disable_output(): 20 | df = tb_extract_log(get_testfile('tb1', 'sac')) 21 | assert df['step'].min() == 1000 22 | assert df['step'].max() == 32980 23 | 24 | basic_eval_episode_return_mean = \ 25 | df[~df['basic/eval_episode_return_mean'].isna()]['basic/eval_episode_return_mean'] 26 | assert basic_eval_episode_return_mean.mean() == pytest.approx(-1130.8211126327515) 27 | assert basic_eval_episode_return_mean.std() == pytest.approx(479.9282245210739) 28 | 29 | def test_tb_extract_recursive_logs_tb1_sac(self): 30 | with disable_output(): 31 | mapping = tb_extract_recursive_logs(get_testfile('tb1', 'sac')) 32 | assert isinstance(mapping, dict) 33 | assert sorted(mapping.keys()) == ['.'] 34 | 35 | df = mapping['.'] 36 | assert df['step'].min() == 1000 37 | assert df['step'].max() == 32980 38 | 39 | basic_eval_episode_return_mean = \ 40 | df[~df['basic/eval_episode_return_mean'].isna()]['basic/eval_episode_return_mean'] 41 | assert basic_eval_episode_return_mean.mean() == pytest.approx(-1130.8211126327515) 42 | assert basic_eval_episode_return_mean.std() == pytest.approx(479.9282245210739) 43 | 44 | def test_tb_extract_recursive_logs_tb1(self): 45 | with disable_output(): 46 | mapping = tb_extract_recursive_logs(get_testfile('tb1')) 47 | assert isinstance(mapping, dict) 48 | assert sorted(mapping.keys()) == ['sac', 'td3'] 49 | 50 | df_1 = mapping['sac'] 51 | assert df_1['step'].min() == 1000 52 | assert df_1['step'].max() == 32980 53 | 54 | basic_eval_episode_return_mean = \ 55 | df_1[~df_1['basic/eval_episode_return_mean'].isna()]['basic/eval_episode_return_mean'] 56 | assert basic_eval_episode_return_mean.mean() == pytest.approx(-1130.8211126327515) 57 | assert basic_eval_episode_return_mean.std() == pytest.approx(479.9282245210739) 58 | 59 | df_2 = mapping['td3'] 60 | assert df_2['step'].min() == 800 61 | assert df_2['step'].max() == 87152 62 | 63 | basic_eval_episode_return_mean = \ 64 | df_2[~df_2['basic/eval_episode_return_mean'].isna()]['basic/eval_episode_return_mean'] 65 | assert basic_eval_episode_return_mean.mean() == pytest.approx(-1125.0574578179253) 66 | assert basic_eval_episode_return_mean.std() == pytest.approx(318.2343800302068) 67 | -------------------------------------------------------------------------------- /test/testfile/pong_tb/pong_efficientzero_tb/seed0/events.out.tfevents.pong-efficientzero-seed0: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opendilab/DI-toolkit/8d728a42aaf139b10e9f5133447fefde32149f67/test/testfile/pong_tb/pong_efficientzero_tb/seed0/events.out.tfevents.pong-efficientzero-seed0 -------------------------------------------------------------------------------- /test/testfile/pong_tb/pong_efficientzero_tb/seed1/events.out.tfevents.pong-efficientzero-seed1: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opendilab/DI-toolkit/8d728a42aaf139b10e9f5133447fefde32149f67/test/testfile/pong_tb/pong_efficientzero_tb/seed1/events.out.tfevents.pong-efficientzero-seed1 -------------------------------------------------------------------------------- /test/testfile/pong_tb/pong_efficientzero_tb/seed2/events.out.tfevents.pong-efficientzero-seed2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opendilab/DI-toolkit/8d728a42aaf139b10e9f5133447fefde32149f67/test/testfile/pong_tb/pong_efficientzero_tb/seed2/events.out.tfevents.pong-efficientzero-seed2 -------------------------------------------------------------------------------- /test/testfile/pong_tb/pong_muzero_tb/seed0/events.out.tfevents.pong_muzero_seed0: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opendilab/DI-toolkit/8d728a42aaf139b10e9f5133447fefde32149f67/test/testfile/pong_tb/pong_muzero_tb/seed0/events.out.tfevents.pong_muzero_seed0 -------------------------------------------------------------------------------- /test/testfile/pong_tb/pong_muzero_tb/seed1/events.out.tfevents.pong_muzero_seed1: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opendilab/DI-toolkit/8d728a42aaf139b10e9f5133447fefde32149f67/test/testfile/pong_tb/pong_muzero_tb/seed1/events.out.tfevents.pong_muzero_seed1 -------------------------------------------------------------------------------- /test/testfile/pong_tb/pong_muzero_tb/seed2/events.out.tfevents.pong_muzero_seed2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opendilab/DI-toolkit/8d728a42aaf139b10e9f5133447fefde32149f67/test/testfile/pong_tb/pong_muzero_tb/seed2/events.out.tfevents.pong_muzero_seed2 -------------------------------------------------------------------------------- /test/testfile/pong_tb_plot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opendilab/DI-toolkit/8d728a42aaf139b10e9f5133447fefde32149f67/test/testfile/pong_tb_plot.png -------------------------------------------------------------------------------- /test/testfile/tb1/sac/events.out.tfevents.1684900409.CN0014009700M.local: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opendilab/DI-toolkit/8d728a42aaf139b10e9f5133447fefde32149f67/test/testfile/tb1/sac/events.out.tfevents.1684900409.CN0014009700M.local -------------------------------------------------------------------------------- /test/testfile/tb1/td3/events.out.tfevents.1684910134.CN0014009700M.local: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opendilab/DI-toolkit/8d728a42aaf139b10e9f5133447fefde32149f67/test/testfile/tb1/td3/events.out.tfevents.1684910134.CN0014009700M.local -------------------------------------------------------------------------------- /test/testing/__init__.py: -------------------------------------------------------------------------------- 1 | from .log import close_all_handlers 2 | from .testfile import get_testfile 3 | -------------------------------------------------------------------------------- /test/testing/log.py: -------------------------------------------------------------------------------- 1 | import logging 2 | from contextlib import contextmanager 3 | from typing import Optional 4 | 5 | 6 | @contextmanager 7 | def close_all_handlers(name: Optional[str] = None): 8 | try: 9 | yield 10 | finally: 11 | logger = logging.getLogger(name) 12 | for handler in logger.handlers: 13 | handler.close() 14 | -------------------------------------------------------------------------------- /test/testing/testfile.py: -------------------------------------------------------------------------------- 1 | import os.path 2 | 3 | _TESTFILE_DIR = os.path.normpath(os.path.join(__file__, '..', '..', 'testfile')) 4 | 5 | 6 | def get_testfile(path, *paths): 7 | return os.path.join(_TESTFILE_DIR, path, *paths) 8 | --------------------------------------------------------------------------------