├── .github └── workflows │ ├── python-publish.yml │ └── python-test.yml ├── .gitignore ├── .pylintrc ├── .vscode └── settings.json ├── CITATION.cff ├── LICENSE ├── MANIFEST.in ├── README.md ├── dev-requirements.txt ├── devnotes └── README.md ├── examples ├── EBeam.lyp ├── bezier_waveguides │ ├── .gitignore │ ├── Plotting memoized file.ipynb │ ├── analyze_profile.py │ ├── bezier_waveguides.gds │ ├── generate_bezier_interp.py │ └── main.py └── ebeam_pdk.py ├── gdslibrary ├── princeton_logo_simple.gds └── queens_logo.gds ├── hacks └── inspect_klayoutdb.py ├── mypy.ini ├── pyproject.toml ├── pytest.ini ├── setup.py ├── tests ├── .gitignore ├── __init__.py ├── cells │ ├── __init__.py │ ├── test_pcell_basic.py │ ├── test_pcell_library.py │ └── test_pcell_params.py ├── context.py ├── layout │ ├── __init__.py │ ├── test_cache.py │ ├── test_layout_write.py │ ├── test_metadata.py │ ├── test_points.py │ ├── test_rectangle.py │ └── test_waveguide.py ├── technology │ ├── EBeam.lyp │ ├── __init__.py │ ├── test_basic.py │ └── test_xml.py ├── test_waveguide_rounding_truth.gds └── tmp │ └── .gitignore ├── version.py └── zeropdk ├── __init__.py ├── default_library ├── __init__.py └── io.py ├── exceptions.py ├── klayout_extend ├── __init__.py ├── cell.py ├── layout.py ├── point.py └── polygon.py ├── layout ├── .gitignore ├── __init__.py ├── algorithms │ ├── __init__.py │ └── sampling.py ├── bezier_optimal.npz ├── cache.py ├── geometry.py ├── polygons.py ├── routing.py ├── waveguide_rounding.py └── waveguides.py ├── pcell.py ├── tech └── __init__.py └── utils └── gitpath.py /.github/workflows/python-publish.yml: -------------------------------------------------------------------------------- 1 | # This workflows will upload a Python Package using Twine when a release is created 2 | # For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries 3 | 4 | name: Upload Python Package 5 | 6 | on: 7 | release: 8 | types: [created] 9 | 10 | jobs: 11 | build: 12 | runs-on: ubuntu-latest 13 | 14 | steps: 15 | - uses: actions/checkout@v2 16 | - name: Set up Python 17 | uses: actions/setup-python@v2 18 | with: 19 | python-version: '3.x' 20 | - name: Install dependencies 21 | run: | 22 | pip install -r dev-requirements.txt 23 | - name: Test with pytest 24 | run: | 25 | pytest 26 | 27 | deploy: 28 | needs: build 29 | runs-on: ubuntu-latest 30 | 31 | steps: 32 | - uses: actions/checkout@v2 33 | - name: Set up Python 34 | uses: actions/setup-python@v2 35 | with: 36 | python-version: '3.x' 37 | - name: Install dependencies 38 | run: | 39 | python -m pip install --upgrade pip 40 | pip install setuptools wheel twine 41 | - name: Build and publish 42 | env: 43 | TWINE_USERNAME: ${{ secrets.PYPI_USERNAME }} 44 | TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }} 45 | run: | 46 | python setup.py bdist_wheel 47 | twine upload dist/* 48 | -------------------------------------------------------------------------------- /.github/workflows/python-test.yml: -------------------------------------------------------------------------------- 1 | name: Test packages 2 | 3 | on: [push] 4 | 5 | jobs: 6 | build: 7 | runs-on: ubuntu-latest 8 | 9 | steps: 10 | - uses: actions/checkout@v2 11 | - name: Set up Python 3.7 12 | uses: actions/setup-python@v2 13 | with: 14 | python-version: '3.7' 15 | - name: Install dependencies 16 | run: | 17 | pip install -r dev-requirements.txt 18 | - name: Typechecking 19 | run: | 20 | mypy zeropdk 21 | - name: Test with pytest 22 | run: | 23 | pytest -s -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | cov.xml 2 | 3 | # Created by https://www.gitignore.io/api/python,macos,windows,linux 4 | # Edit at https://www.gitignore.io/?templates=python,macos,windows,linux 5 | 6 | ### Linux ### 7 | *~ 8 | 9 | # temporary files which can be created if a process still has a handle open of a deleted file 10 | .fuse_hidden* 11 | 12 | # KDE directory preferences 13 | .directory 14 | 15 | # Linux trash folder which might appear on any partition or disk 16 | .Trash-* 17 | 18 | # .nfs files are created when an open file is removed but is still being accessed 19 | .nfs* 20 | 21 | ### macOS ### 22 | # General 23 | .DS_Store 24 | .AppleDouble 25 | .LSOverride 26 | 27 | # Icon must end with two \r 28 | Icon 29 | 30 | # Thumbnails 31 | ._* 32 | 33 | # Files that might appear in the root of a volume 34 | .DocumentRevisions-V100 35 | .fseventsd 36 | .Spotlight-V100 37 | .TemporaryItems 38 | .Trashes 39 | .VolumeIcon.icns 40 | .com.apple.timemachine.donotpresent 41 | 42 | # Directories potentially created on remote AFP share 43 | .AppleDB 44 | .AppleDesktop 45 | Network Trash Folder 46 | Temporary Items 47 | .apdisk 48 | 49 | ### Python ### 50 | # Byte-compiled / optimized / DLL files 51 | __pycache__/ 52 | *.py[cod] 53 | *$py.class 54 | 55 | # C extensions 56 | *.so 57 | 58 | # Distribution / packaging 59 | .Python 60 | build/ 61 | develop-eggs/ 62 | dist/ 63 | downloads/ 64 | eggs/ 65 | .eggs/ 66 | lib/ 67 | lib64/ 68 | parts/ 69 | sdist/ 70 | var/ 71 | wheels/ 72 | share/python-wheels/ 73 | *.egg-info/ 74 | .installed.cfg 75 | *.egg 76 | MANIFEST 77 | 78 | # PyInstaller 79 | # Usually these files are written by a python script from a template 80 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 81 | *.manifest 82 | *.spec 83 | 84 | # Installer logs 85 | pip-log.txt 86 | pip-delete-this-directory.txt 87 | 88 | # Unit test / coverage reports 89 | htmlcov/ 90 | .tox/ 91 | .nox/ 92 | .coverage 93 | .coverage.* 94 | .cache 95 | nosetests.xml 96 | coverage.xml 97 | *.cover 98 | .hypothesis/ 99 | .pytest_cache/ 100 | 101 | # Translations 102 | *.mo 103 | *.pot 104 | 105 | # Django stuff: 106 | *.log 107 | local_settings.py 108 | db.sqlite3 109 | 110 | # Flask stuff: 111 | instance/ 112 | .webassets-cache 113 | 114 | # Scrapy stuff: 115 | .scrapy 116 | 117 | # Sphinx documentation 118 | docs/_build/ 119 | 120 | # PyBuilder 121 | target/ 122 | 123 | # Jupyter Notebook 124 | .ipynb_checkpoints 125 | 126 | # IPython 127 | profile_default/ 128 | ipython_config.py 129 | 130 | # pyenv 131 | .python-version 132 | 133 | # celery beat schedule file 134 | celerybeat-schedule 135 | 136 | # SageMath parsed files 137 | *.sage.py 138 | 139 | # Environments 140 | .env 141 | .venv 142 | env/ 143 | venv/ 144 | ENV/ 145 | env.bak/ 146 | venv.bak/ 147 | 148 | # Spyder project settings 149 | .spyderproject 150 | .spyproject 151 | 152 | # Rope project settings 153 | .ropeproject 154 | 155 | # mkdocs documentation 156 | /site 157 | 158 | # mypy 159 | .mypy_cache/ 160 | .dmypy.json 161 | dmypy.json 162 | 163 | # Pyre type checker 164 | .pyre/ 165 | 166 | ### Python Patch ### 167 | .venv/ 168 | 169 | ### Windows ### 170 | # Windows thumbnail cache files 171 | Thumbs.db 172 | ehthumbs.db 173 | ehthumbs_vista.db 174 | 175 | # Dump file 176 | *.stackdump 177 | 178 | # Folder config file 179 | [Dd]esktop.ini 180 | 181 | # Recycle Bin used on file shares 182 | $RECYCLE.BIN/ 183 | 184 | # Windows Installer files 185 | *.cab 186 | *.msi 187 | *.msix 188 | *.msm 189 | *.msp 190 | 191 | # Windows shortcuts 192 | *.lnk 193 | 194 | # End of https://www.gitignore.io/api/python,macos,windows,linux 195 | dev-examples 196 | -------------------------------------------------------------------------------- /.pylintrc: -------------------------------------------------------------------------------- 1 | [MASTER] 2 | 3 | # A comma-separated list of package or module names from where C extensions may 4 | # be loaded. Extensions are loading into the active Python interpreter and may 5 | # run arbitrary code. 6 | extension-pkg-whitelist=klayout.dbcore 7 | 8 | # Specify a score threshold to be exceeded before program exits with error. 9 | fail-under=10.0 10 | 11 | # Add files or directories to the blacklist. They should be base names, not 12 | # paths. 13 | ignore=CVS 14 | 15 | # Add files or directories matching the regex patterns to the blacklist. The 16 | # regex matches against base names, not paths. 17 | ignore-patterns= 18 | 19 | # Python code to execute, usually for sys.path manipulation such as 20 | # pygtk.require(). 21 | #init-hook= 22 | 23 | # Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the 24 | # number of processors available to use. 25 | jobs=1 26 | 27 | # Control the amount of potential inferred values when inferring a single 28 | # object. This can help the performance when dealing with large functions or 29 | # complex, nested conditions. 30 | limit-inference-results=100 31 | 32 | # List of plugins (as comma separated values of python module names) to load, 33 | # usually to register additional checkers. 34 | load-plugins= 35 | 36 | # Pickle collected data for later comparisons. 37 | persistent=yes 38 | 39 | # When enabled, pylint would attempt to guess common misconfiguration and emit 40 | # user-friendly hints instead of false-positive error messages. 41 | suggestion-mode=yes 42 | 43 | # Allow loading of arbitrary C extensions. Extensions are imported into the 44 | # active Python interpreter and may run arbitrary code. 45 | unsafe-load-any-extension=no 46 | 47 | 48 | [MESSAGES CONTROL] 49 | 50 | # Only show warnings with the listed confidence levels. Leave empty to show 51 | # all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED. 52 | confidence= 53 | 54 | # Disable the message, report, category or checker with the given id(s). You 55 | # can either give multiple identifiers separated by comma (,) or put this 56 | # option multiple times (only on the command line, not in the configuration 57 | # file where it should appear only once). You can also use "--disable=all" to 58 | # disable everything first and then reenable specific checks. For example, if 59 | # you want to run only the similarities checker, you can use "--disable=all 60 | # --enable=similarities". If you want to run only the classes checker, but have 61 | # no Warning level messages displayed, use "--disable=all --enable=classes 62 | # --disable=W". 63 | disable=print-statement, 64 | parameter-unpacking, 65 | unpacking-in-except, 66 | old-raise-syntax, 67 | backtick, 68 | long-suffix, 69 | old-ne-operator, 70 | old-octal-literal, 71 | import-star-module-level, 72 | non-ascii-bytes-literal, 73 | raw-checker-failed, 74 | bad-inline-option, 75 | locally-disabled, 76 | file-ignored, 77 | suppressed-message, 78 | useless-suppression, 79 | deprecated-pragma, 80 | use-symbolic-message-instead, 81 | apply-builtin, 82 | basestring-builtin, 83 | buffer-builtin, 84 | cmp-builtin, 85 | coerce-builtin, 86 | execfile-builtin, 87 | file-builtin, 88 | long-builtin, 89 | raw_input-builtin, 90 | reduce-builtin, 91 | standarderror-builtin, 92 | unicode-builtin, 93 | xrange-builtin, 94 | coerce-method, 95 | delslice-method, 96 | getslice-method, 97 | setslice-method, 98 | no-absolute-import, 99 | old-division, 100 | dict-iter-method, 101 | dict-view-method, 102 | next-method-called, 103 | metaclass-assignment, 104 | indexing-exception, 105 | raising-string, 106 | reload-builtin, 107 | oct-method, 108 | hex-method, 109 | nonzero-method, 110 | cmp-method, 111 | input-builtin, 112 | round-builtin, 113 | intern-builtin, 114 | unichr-builtin, 115 | map-builtin-not-iterating, 116 | zip-builtin-not-iterating, 117 | range-builtin-not-iterating, 118 | filter-builtin-not-iterating, 119 | using-cmp-argument, 120 | eq-without-hash, 121 | div-method, 122 | idiv-method, 123 | rdiv-method, 124 | exception-message-attribute, 125 | invalid-str-codec, 126 | sys-max-int, 127 | bad-python3-import, 128 | deprecated-string-function, 129 | deprecated-str-translate-call, 130 | deprecated-itertools-function, 131 | deprecated-types-field, 132 | next-method-defined, 133 | dict-items-not-iterating, 134 | dict-keys-not-iterating, 135 | dict-values-not-iterating, 136 | deprecated-operator-function, 137 | deprecated-urllib-function, 138 | xreadlines-attribute, 139 | deprecated-sys-function, 140 | exception-escape, 141 | comprehension-escape, 142 | logging-fstring-interpolation, logging-not-lazy, logging-format-interpolation, 143 | invalid-name, 144 | C0330, C0326 145 | 146 | # Enable the message, report, category or checker with the given id(s). You can 147 | # either give multiple identifier separated by comma (,) or put this option 148 | # multiple time (only on the command line, not in the configuration file where 149 | # it should appear only once). See also the "--disable" option for examples. 150 | enable=c-extension-no-member 151 | 152 | 153 | [REPORTS] 154 | 155 | # Python expression which should return a score less than or equal to 10. You 156 | # have access to the variables 'error', 'warning', 'refactor', and 'convention' 157 | # which contain the number of messages in each category, as well as 'statement' 158 | # which is the total number of statements analyzed. This score is used by the 159 | # global evaluation report (RP0004). 160 | evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10) 161 | 162 | # Template used to display messages. This is a python new-style format string 163 | # used to format the message information. See doc for all details. 164 | #msg-template= 165 | 166 | # Set the output format. Available formats are text, parseable, colorized, json 167 | # and msvs (visual studio). You can also give a reporter class, e.g. 168 | # mypackage.mymodule.MyReporterClass. 169 | output-format=text 170 | 171 | # Tells whether to display a full report or only the messages. 172 | reports=no 173 | 174 | # Activate the evaluation score. 175 | score=yes 176 | 177 | 178 | [REFACTORING] 179 | 180 | # Maximum number of nested blocks for function / method body 181 | max-nested-blocks=5 182 | 183 | # Complete name of functions that never returns. When checking for 184 | # inconsistent-return-statements if a never returning function is called then 185 | # it will be considered as an explicit return statement and no message will be 186 | # printed. 187 | never-returning-functions=sys.exit 188 | 189 | 190 | [LOGGING] 191 | 192 | # The type of string formatting that logging methods do. `old` means using % 193 | # formatting, `new` is for `{}` formatting. 194 | logging-format-style=old 195 | 196 | # Logging modules to check that the string format arguments are in logging 197 | # function parameter format. 198 | logging-modules=logging 199 | 200 | 201 | [SPELLING] 202 | 203 | # Limits count of emitted suggestions for spelling mistakes. 204 | max-spelling-suggestions=4 205 | 206 | # Spelling dictionary name. Available dictionaries: none. To make it work, 207 | # install the python-enchant package. 208 | spelling-dict= 209 | 210 | # List of comma separated words that should not be checked. 211 | spelling-ignore-words= 212 | 213 | # A path to a file that contains the private dictionary; one word per line. 214 | spelling-private-dict-file= 215 | 216 | # Tells whether to store unknown words to the private dictionary (see the 217 | # --spelling-private-dict-file option) instead of raising a message. 218 | spelling-store-unknown-words=no 219 | 220 | 221 | [MISCELLANEOUS] 222 | 223 | # List of note tags to take in consideration, separated by a comma. 224 | notes=FIXME, 225 | XXX, 226 | TODO 227 | 228 | # Regular expression of note tags to take in consideration. 229 | #notes-rgx= 230 | 231 | 232 | [TYPECHECK] 233 | 234 | # List of decorators that produce context managers, such as 235 | # contextlib.contextmanager. Add to this list to register other decorators that 236 | # produce valid context managers. 237 | contextmanager-decorators=contextlib.contextmanager 238 | 239 | # List of members which are set dynamically and missed by pylint inference 240 | # system, and so shouldn't trigger E1101 when accessed. Python regular 241 | # expressions are accepted. 242 | generated-members= 243 | 244 | # Tells whether missing members accessed in mixin class should be ignored. A 245 | # mixin class is detected if its name ends with "mixin" (case insensitive). 246 | ignore-mixin-members=yes 247 | 248 | # Tells whether to warn about missing members when the owner of the attribute 249 | # is inferred to be None. 250 | ignore-none=yes 251 | 252 | # This flag controls whether pylint should warn about no-member and similar 253 | # checks whenever an opaque object is returned when inferring. The inference 254 | # can return multiple potential results while evaluating a Python object, but 255 | # some branches might not be evaluated, which results in partial inference. In 256 | # that case, it might be useful to still emit no-member and other checks for 257 | # the rest of the inferred objects. 258 | ignore-on-opaque-inference=yes 259 | 260 | # List of class names for which member attributes should not be checked (useful 261 | # for classes with dynamically set attributes). This supports the use of 262 | # qualified names. 263 | ignored-classes=optparse.Values,thread._local,_thread._local 264 | 265 | # List of module names for which member attributes should not be checked 266 | # (useful for modules/projects where namespaces are manipulated during runtime 267 | # and thus existing member attributes cannot be deduced by static analysis). It 268 | # supports qualified module names, as well as Unix pattern matching. 269 | ignored-modules= 270 | 271 | # Show a hint with possible names when a member name was not found. The aspect 272 | # of finding the hint is based on edit distance. 273 | missing-member-hint=yes 274 | 275 | # The minimum edit distance a name should have in order to be considered a 276 | # similar match for a missing member name. 277 | missing-member-hint-distance=1 278 | 279 | # The total number of similar names that should be taken in consideration when 280 | # showing a hint for a missing member. 281 | missing-member-max-choices=1 282 | 283 | # List of decorators that change the signature of a decorated function. 284 | signature-mutators= 285 | 286 | 287 | [VARIABLES] 288 | 289 | # List of additional names supposed to be defined in builtins. Remember that 290 | # you should avoid defining new builtins when possible. 291 | additional-builtins= 292 | 293 | # Tells whether unused global variables should be treated as a violation. 294 | allow-global-unused-variables=yes 295 | 296 | # List of strings which can identify a callback function by name. A callback 297 | # name must start or end with one of those strings. 298 | callbacks=cb_, 299 | _cb 300 | 301 | # A regular expression matching the name of dummy variables (i.e. expected to 302 | # not be used). 303 | dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ 304 | 305 | # Argument names that match this expression will be ignored. Default to name 306 | # with leading underscore. 307 | ignored-argument-names=_.*|^ignored_|^unused_ 308 | 309 | # Tells whether we should check for unused import in __init__ files. 310 | init-import=no 311 | 312 | # List of qualified module names which can have objects that can redefine 313 | # builtins. 314 | redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io 315 | 316 | 317 | [FORMAT] 318 | 319 | # Expected format of line ending, e.g. empty (any line ending), LF or CRLF. 320 | expected-line-ending-format= 321 | 322 | # Regexp for a line that is allowed to be longer than the limit. 323 | ignore-long-lines=^\s*(# )??$ 324 | 325 | # Number of spaces of indent required inside a hanging or continued line. 326 | indent-after-paren=4 327 | 328 | # String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 329 | # tab). 330 | indent-string=' ' 331 | 332 | # Maximum number of characters on a single line. 333 | max-line-length=100 334 | 335 | # Maximum number of lines in a module. 336 | max-module-lines=1000 337 | 338 | # Allow the body of a class to be on the same line as the declaration if body 339 | # contains single statement. 340 | single-line-class-stmt=no 341 | 342 | # Allow the body of an if to be on the same line as the test if there is no 343 | # else. 344 | single-line-if-stmt=no 345 | 346 | 347 | [SIMILARITIES] 348 | 349 | # Ignore comments when computing similarities. 350 | ignore-comments=yes 351 | 352 | # Ignore docstrings when computing similarities. 353 | ignore-docstrings=yes 354 | 355 | # Ignore imports when computing similarities. 356 | ignore-imports=no 357 | 358 | # Minimum lines number of a similarity. 359 | min-similarity-lines=4 360 | 361 | 362 | [BASIC] 363 | 364 | # Naming style matching correct argument names. 365 | argument-naming-style=snake_case 366 | 367 | # Regular expression matching correct argument names. Overrides argument- 368 | # naming-style. 369 | #argument-rgx= 370 | 371 | # Naming style matching correct attribute names. 372 | attr-naming-style=snake_case 373 | 374 | # Regular expression matching correct attribute names. Overrides attr-naming- 375 | # style. 376 | #attr-rgx= 377 | 378 | # Bad variable names which should always be refused, separated by a comma. 379 | bad-names=foo, 380 | bar, 381 | baz, 382 | toto, 383 | tutu, 384 | tata 385 | 386 | # Bad variable names regexes, separated by a comma. If names match any regex, 387 | # they will always be refused 388 | bad-names-rgxs= 389 | 390 | # Naming style matching correct class attribute names. 391 | class-attribute-naming-style=any 392 | 393 | # Regular expression matching correct class attribute names. Overrides class- 394 | # attribute-naming-style. 395 | #class-attribute-rgx= 396 | 397 | # Naming style matching correct class names. 398 | class-naming-style=PascalCase 399 | 400 | # Regular expression matching correct class names. Overrides class-naming- 401 | # style. 402 | #class-rgx= 403 | 404 | # Naming style matching correct constant names. 405 | const-naming-style=UPPER_CASE 406 | 407 | # Regular expression matching correct constant names. Overrides const-naming- 408 | # style. 409 | #const-rgx= 410 | 411 | # Minimum line length for functions/classes that require docstrings, shorter 412 | # ones are exempt. 413 | docstring-min-length=-1 414 | 415 | # Naming style matching correct function names. 416 | function-naming-style=snake_case 417 | 418 | # Regular expression matching correct function names. Overrides function- 419 | # naming-style. 420 | #function-rgx= 421 | 422 | # Good variable names which should always be accepted, separated by a comma. 423 | good-names=i, 424 | j, 425 | k, 426 | ex, 427 | Run, 428 | _ 429 | 430 | # Good variable names regexes, separated by a comma. If names match any regex, 431 | # they will always be accepted 432 | good-names-rgxs= 433 | 434 | # Include a hint for the correct naming format with invalid-name. 435 | include-naming-hint=no 436 | 437 | # Naming style matching correct inline iteration names. 438 | inlinevar-naming-style=any 439 | 440 | # Regular expression matching correct inline iteration names. Overrides 441 | # inlinevar-naming-style. 442 | #inlinevar-rgx= 443 | 444 | # Naming style matching correct method names. 445 | method-naming-style=snake_case 446 | 447 | # Regular expression matching correct method names. Overrides method-naming- 448 | # style. 449 | #method-rgx= 450 | 451 | # Naming style matching correct module names. 452 | module-naming-style=snake_case 453 | 454 | # Regular expression matching correct module names. Overrides module-naming- 455 | # style. 456 | #module-rgx= 457 | 458 | # Colon-delimited sets of names that determine each other's naming style when 459 | # the name regexes allow several styles. 460 | name-group= 461 | 462 | # Regular expression which should only match function or class names that do 463 | # not require a docstring. 464 | no-docstring-rgx=^_ 465 | 466 | # List of decorators that produce properties, such as abc.abstractproperty. Add 467 | # to this list to register other decorators that produce valid properties. 468 | # These decorators are taken in consideration only for invalid-name. 469 | property-classes=abc.abstractproperty 470 | 471 | # Naming style matching correct variable names. 472 | variable-naming-style=snake_case 473 | 474 | # Regular expression matching correct variable names. Overrides variable- 475 | # naming-style. 476 | #variable-rgx= 477 | 478 | 479 | [STRING] 480 | 481 | # This flag controls whether inconsistent-quotes generates a warning when the 482 | # character used as a quote delimiter is used inconsistently within a module. 483 | check-quote-consistency=no 484 | 485 | # This flag controls whether the implicit-str-concat should generate a warning 486 | # on implicit string concatenation in sequences defined over several lines. 487 | check-str-concat-over-line-jumps=no 488 | 489 | 490 | [IMPORTS] 491 | 492 | # List of modules that can be imported at any level, not just the top level 493 | # one. 494 | allow-any-import-level= 495 | 496 | # Allow wildcard imports from modules that define __all__. 497 | allow-wildcard-with-all=no 498 | 499 | # Analyse import fallback blocks. This can be used to support both Python 2 and 500 | # 3 compatible code, which means that the block might have code that exists 501 | # only in one or another interpreter, leading to false positives when analysed. 502 | analyse-fallback-blocks=no 503 | 504 | # Deprecated modules which should not be used, separated by a comma. 505 | deprecated-modules=optparse,tkinter.tix 506 | 507 | # Create a graph of external dependencies in the given file (report RP0402 must 508 | # not be disabled). 509 | ext-import-graph= 510 | 511 | # Create a graph of every (i.e. internal and external) dependencies in the 512 | # given file (report RP0402 must not be disabled). 513 | import-graph= 514 | 515 | # Create a graph of internal dependencies in the given file (report RP0402 must 516 | # not be disabled). 517 | int-import-graph= 518 | 519 | # Force import order to recognize a module as part of the standard 520 | # compatibility libraries. 521 | known-standard-library= 522 | 523 | # Force import order to recognize a module as part of a third party library. 524 | known-third-party=enchant 525 | 526 | # Couples of modules and preferred modules, separated by a comma. 527 | preferred-modules= 528 | 529 | 530 | [CLASSES] 531 | 532 | # List of method names used to declare (i.e. assign) instance attributes. 533 | defining-attr-methods=__init__, 534 | __new__, 535 | setUp, 536 | __post_init__ 537 | 538 | # List of member names, which should be excluded from the protected access 539 | # warning. 540 | exclude-protected=_asdict, 541 | _fields, 542 | _replace, 543 | _source, 544 | _make 545 | 546 | # List of valid names for the first argument in a class method. 547 | valid-classmethod-first-arg=cls 548 | 549 | # List of valid names for the first argument in a metaclass class method. 550 | valid-metaclass-classmethod-first-arg=cls 551 | 552 | 553 | [DESIGN] 554 | 555 | # Maximum number of arguments for function / method. 556 | max-args=5 557 | 558 | # Maximum number of attributes for a class (see R0902). 559 | max-attributes=7 560 | 561 | # Maximum number of boolean expressions in an if statement (see R0916). 562 | max-bool-expr=5 563 | 564 | # Maximum number of branch for function / method body. 565 | max-branches=12 566 | 567 | # Maximum number of locals for function / method body. 568 | max-locals=15 569 | 570 | # Maximum number of parents for a class (see R0901). 571 | max-parents=7 572 | 573 | # Maximum number of public methods for a class (see R0904). 574 | max-public-methods=20 575 | 576 | # Maximum number of return / yield for function / method body. 577 | max-returns=6 578 | 579 | # Maximum number of statements in function / method body. 580 | max-statements=50 581 | 582 | # Minimum number of public methods for a class (see R0903). 583 | min-public-methods=2 584 | 585 | 586 | [EXCEPTIONS] 587 | 588 | # Exceptions that will emit a warning when being caught. Defaults to 589 | # "BaseException, Exception". 590 | overgeneral-exceptions=BaseException, 591 | Exception 592 | -------------------------------------------------------------------------------- /.vscode/settings.json: -------------------------------------------------------------------------------- 1 | { 2 | "python.pythonPath": "/Users/tlima/envs/zeropdk/bin/python" 3 | } -------------------------------------------------------------------------------- /CITATION.cff: -------------------------------------------------------------------------------- 1 | # YAML 1.2 2 | --- 3 | abstract: "This is a pure-python PDK factory that enables klayout scripted layout. It assists in photonic integrated circuit layout, which relies on having specialized curved waveguides and non-square-corner shapes." 4 | authors: 5 | - 6 | family-names: "Ferreira de Lima" 7 | given-names: Thomas 8 | cff-version: "1.1.0" 9 | date-released: 2022-08-22 10 | license: MIT 11 | message: "If you use this software, please cite it using these metadata." 12 | repository-code: "https://github.com/lightwave-lab/zeropdk" 13 | title: zeropdk 14 | version: "22.08" 15 | ... 16 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019, Thomas Ferreira de Lima 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include zeropdk/layout/*.npz 2 | include version.py -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # ZeroPDK 2 | 3 | This is a pure-python PDK factory that enables klayout scripted layout. It assists in photonic integrated circuit layout, which relies on having specialized curved waveguides and non-square-corner shapes. 4 | 5 | ## Installation 6 | 7 | This package is heavily based on python's [klayout package](https://github.com/klayout/klayout), still in beta version as of this writing (Jul 2019). 8 | 9 | Installation with pip (virtual environment is highly recommended): 10 | 11 | ```bash 12 | pip install zeropdk 13 | ``` 14 | 15 | Installation from source: 16 | 17 | ```bash 18 | python setup.py install 19 | ``` 20 | 21 | ## Features 22 | 23 | ### KLayout extension 24 | 25 | By importing zeropdk, klayout is patched with a few useful functionalities. For example: 26 | 27 | ```python 28 | import klayout.db as kdb 29 | import zeropdk 30 | 31 | layout = kdb.Layout() 32 | plogo = layout.read_cell(cell_name='princeton_logo', filepath='gdslibrary/princeton_logo_simple.gds') 33 | 34 | # plogo is a cell in the current layout. It can be inserted in the top cell. 35 | ``` 36 | 37 | ### Easy technology layers definition 38 | 39 | Based on a KLayout's layout properties file (.lyp) containing layer definitions, it is easy to import and use all layers. For example: 40 | 41 | ```python 42 | 43 | from zeropdk import Tech 44 | lyp_path = "examples/EBeam.lyp" 45 | EBeam = Tech.load_from_xml(lyp_path) 46 | layerM1 = EBeam.layers["M1"] 47 | print(layerM1, type(layerM1)) # M1 (41/0) 48 | ``` 49 | 50 | The file above belongs to a project called [SiEPIC EBeam PDK](https://github.com/lukasc-ubc/SiEPIC_EBeam_PDK), used in passive silicon photonic foundries. 51 | 52 | ### Advanced PCell definition 53 | 54 | PCells can be hierarchical, as described in [Sec. IV.C of this article](https://ieeexplore.ieee.org/abstract/document/8718393). One PCell can use another PCell in its definition, and the parent pcell should, in this case, inherit the child's parameters. an example taken from `zeropdk.default_library.io` is: 55 | 56 | ```python 57 | class DCPadArray(DCPad): 58 | params = ParamContainer(pad_array_count, pad_array_pitch) 59 | 60 | def draw(self, cell): 61 | # ... 62 | for i in range(cp.pad_array_count): 63 | dcpad = DCPad(name=f"pad_{i}", params=cp) 64 | return cell, ports 65 | ``` 66 | 67 | In this case, `DCPadArray` simply places an array of `DCPad` Pcells, and contains parameters `pad_array_count` and also `pad_array_pitch`, but also the parameters belonging to `DCPad`, such as `layer_metal` and `layer_opening`. 68 | 69 | In the EBeam PDK example, one can edit adapt a standard library of pcells to its own parameter sets. For example, EBeam PDK uses particular layers for its metal deposition and oxide etch steps. So the DCPadArray can be changed via the following: 70 | 71 | ```python 72 | 73 | class DCPadArray(DCPadArray): 74 | params = ParamContainer( 75 | PCellParameter( 76 | name="layer_metal", 77 | type=TypeLayer, 78 | description="Metal Layer", 79 | default=EBeam.layers["M1"], 80 | ), 81 | PCellParameter( 82 | name="layer_opening", 83 | type=TypeLayer, 84 | description="Open Layer", 85 | default=EBeam.layers["13_MLopen"], 86 | ), 87 | ) 88 | ``` 89 | 90 | TODO: adapt example provided [here](https://github.com/lightwave-lab/SiEPIC_EBeam_PDK/tree/scripted_layout/Examples/scripted_layout) to zeropdk. 91 | 92 | ### Photonics-inspired layout functions 93 | 94 | Several assistive tools for handling photonic shapes. For example, it is desired, sometimes, to draw a waveguide with progressive widths (a taper). 95 | 96 | ```python 97 | from zeropdk.layout import layout_waveguide 98 | wav_polygon = layout_waveguide(cell, layer, points_list, width) 99 | ``` 100 | 101 | ## Developer notes 102 | 103 | This project is still under development phase. See the [development notes](devnotes/README.md) for more information. 104 | 105 | ## Acknowledgements 106 | 107 | This material is based in part upon work supported by the National Science Foundation under Grant Number E2CDA-1740262. Any opinions, findings, and conclusions or recommendations expressed in this material are those of the author(s) and do not necessarily reflect the views of the National Science Foundation. 108 | 109 | -------------------------------------------------------------------------------- /dev-requirements.txt: -------------------------------------------------------------------------------- 1 | numpy 2 | scipy 3 | pytest 4 | pytest-cov 5 | klayout 6 | mypy -------------------------------------------------------------------------------- /devnotes/README.md: -------------------------------------------------------------------------------- 1 | Notes from Thomas. 2 | 3 | 2019-02-04 4 | 5 | # Motivation 6 | 7 | I want to make a PDK factory so that it is easy to layout chips for new foundry runs if they do not have a proper python-compatible PDK. 8 | 9 | The first necessary component is a proper way of defining layers specific to a foundry. I will use a "technology" container for this kind of information. 10 | 11 | Secondly, we need a way to define PCell, useful for routing purposes (and for sharing). It has to have the same hierarchy ability as in normal cells, but it needs to be regenerated solely based on its parameters. The simplest "PCell" must be just a minimal extension over a cell stored in .gds format and loaded from the library. 12 | 13 | Thirdly, it would be good to have some layout algorithms in hand to ease parametric layout. This includes waveguide creation, bus routing etc. This stuff was already partially included in SiEPIC-tools, but SiEPIC-tools is (at the time of this writing) not suitable for pure python packages. 14 | 15 | Other features can include PDK management such as IP blocks and licensing, and some DRC/verification functionality, but I will not develop them yet. 16 | 17 | # Technology 18 | 19 | Today, technology serves basically to store useful information about layers. SiEPIC also uses it as a connection to Lumerical simulators. The SOEN PDK makes available information about properties of conductors, vias, and waveguides in XML format. KLayout uses a xml-like file (.lyt) to configure reader and writer options, and a (.lyp) file to configure how layers should be displayed on the GUI, but no metadata about them, which is unhelpful. 20 | 21 | Because of this complex behavior, I will choose a class as data structure to store all the information above. Static methods can be used to interact with XML, and PDKs can be free to subclass it as much as they need. 22 | 23 | ## Layers 24 | 25 | In my experience, I found that a few sets of layers should exist in every photonic technology: silicon waveguide, metals (routing and/or heating), vias, text/documentation (display only), and port documentation (display only). As a result, I will make them standard and available for simple methods such as waveguide route, Manhattan routing with vias, ports display etc. 26 | 27 | ## Other information 28 | 29 | Let's consider a waveguide, for example. Different technologies offer multiple ways of creating a waveguide: rib, ridge, slot etc. Each of them will use a set of layers and some default parameters. A PDK traditionally offers specifications on each of these types of waveguide. SiEPIC-Tools already has figured out a way to standardize them by using a list of (layer, width, offset) tuples. This is useful to make standard. 30 | 31 | # PCell 32 | 33 | A PCell is the most important concept to get right. It is hard to change once people adopt it and it is also the most important feature of procedurally generated layout. 34 | 35 | With my experience, here are some useful properties of a PCell: 36 | 37 | - Reusability across different technologies. One should be able to copy-paste the source code of a PCell, say an MRR, between different techs, e.g. Passive/Active Si, SiN, etc. 38 | - Inheritability. One should be able to augment a PCell. E.g. take a MRR cell made only with passive elements, and add a heater or PN junction to it. Very useful for users. This can be done by using Python's class inheritance scheme, where the most complex PCells inherit from the simpler ones. One can also combine different PCells into one larger one. There are caveats to this approach. Some control flow structures (e.g. if-else) must be avoided. 39 | - Interactive. To my understanding, we can only know geometric properties after it is instantiated and its parameters are known. For example, we can only know the positions of MRR pins after setting its radius. We should be able to get port positions, boundary box and other geometric properties of the cell. 40 | - Extra: Layout-tool independence. While I am biased to using KLayout, we can also have a tool-independent layout API. To allow for that, I am going to use klayout.db as my default layout tool and will be sure to pass it as parameter. An advantage of this is that we can have a very lightweight layout tool that only knows points and vectors as default, so that an entire pcell tree can be coded and built without any layout, and only at the end we can trigger the actual layout (polygon creation, cell making) mechanism. 41 | 42 | ## Ports 43 | 44 | As mentioned above, we need to define ports. These can be electrical, or optical, and they should be compatible with certain types of waveguides. They should have their own class because I foresee they being upgraded at a later point. 45 | 46 | 47 | # Layout tool API 48 | 49 | Imagine importing your favorite layout tool as lt in python. Then you can call lt.Box(coordinates) for a rectangle box, or lt.Cell for a new cell. The user has the freedom to pick whatever lt they prefer, whether klayout or other. lt can be also passed as parameter to the layout method of the PCell class. I have always viewed lt to be a module, instead of a class. The path of least friction tells me to keep it that way, because we have a lot of classes defined within a module, such as lt.Cell and lt.Layout. Bear in mind: if a layout module is passed as parameter to a pcell, changing it should trigger redrawing all the pcell hierarchy. 50 | 51 | 52 | # Collection of layout algorithms 53 | 54 | Here's an example of an import statement in one of our masks: 55 | 56 | ``` python 57 | from layout_algorithms import layout_ring, layout_path, layout_path_with_ends, box_dpolygon, \ 58 | layout_waveguide, layout_circle, layout_square, insert_shape, \ 59 | append_relative, layout_arc_with_drc_exclude, layout_arc, layout_arc2, layout_section, \ 60 | layout_connect_ports, layout_waveguide_angle, layout_disk, layout_rectangle, \ 61 | layout_connect_ports_angle, layout_box 62 | ``` 63 | 64 | These are all functions that take a cell, a layer and some arguments and draws a structure using a layout tool of choice. If we restrict to using a set of API methods, these algorithms should be portable to other tools. 65 | -------------------------------------------------------------------------------- /examples/EBeam.lyp: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | #ff80a8 5 | #ff80a8 6 | 0 7 | 0 8 | I6 9 | true 10 | true 11 | true 12 | 13 | false 14 | 0 15 | Si 16 | 1/0@1 17 | 2.0 18 | 19 | 20 | #ff0000 21 | #ff0000 22 | 0 23 | 0 24 | I9 25 | true 26 | true 27 | false 28 | 1 29 | false 30 | 0 31 | 31_Si_p6nm 32 | 31/0@1 33 | 34 | 35 | #0000ff 36 | #0000ff 37 | 0 38 | 0 39 | I5 40 | true 41 | true 42 | false 43 | 44 | false 45 | 0 46 | Text 47 | 10/0@1 48 | 49 | 50 | 0 51 | false 52 | 20/0@1 53 | Si N 54 | true 55 | I6 56 | #000000 57 | #7000FF 58 | 0 59 | false 60 | 0.0 61 | 0 62 | 63 | 64 | #0000ff 65 | #0000ff 66 | 0 67 | 0 68 | I2 69 | 70 | true 71 | true 72 | false 73 | 74 | false 75 | false 76 | 0 77 | Si N++ 78 | 24/0@1 79 | 80 | 81 | #ff00ff 82 | #ff00ff 83 | 0 84 | 0 85 | I5 86 | true 87 | true 88 | false 89 | 1 90 | false 91 | 0 92 | SEM 93 | 200/0@1 94 | 95 | 96 | #805000 97 | #805000 98 | 0 99 | 0 100 | I5 101 | true 102 | true 103 | false 104 | 105 | false 106 | 0 107 | M1 108 | 11/0@1 109 | 110 | 111 | #008000 112 | #008000 113 | 0 114 | 0 115 | I9 116 | true 117 | true 118 | false 119 | 120 | false 121 | 0 122 | 12_M2 123 | 12/0@1 124 | 125 | 126 | #008000 127 | #008000 128 | 0 129 | 0 130 | I7 131 | true 132 | true 133 | false 134 | 135 | false 136 | 0 137 | 13_MLopen 138 | 13/0@1 139 | 140 | 141 | #8086ff 142 | #8086ff 143 | 0 144 | 0 145 | I17 146 | 147 | true 148 | true 149 | false 150 | 151 | false 152 | false 153 | 0 154 | VC 155 | 40/0@1 156 | 157 | 158 | #80a8ff 159 | #80a8ff 160 | 0 161 | 0 162 | I15 163 | 164 | true 165 | true 166 | false 167 | 2.0 168 | false 169 | false 170 | 0 171 | M1 172 | 41/0@1 173 | 174 | 175 | #0080ff 176 | #0080ff 177 | 0 178 | 0 179 | I5 180 | 181 | true 182 | true 183 | false 184 | 185 | false 186 | false 187 | 0 188 | M Heater 189 | 47/0@1 190 | 191 | 192 | #8000ff 193 | #8000ff 194 | 0 195 | 0 196 | I1 197 | true 198 | true 199 | false 200 | 3 201 | false 202 | 0 203 | FloorPlan 204 | 99/0@1 205 | 206 | 207 | #004080 208 | #004080 209 | 0 210 | 0 211 | I1 212 | true 213 | true 214 | true 215 | 1 216 | false 217 | 0 218 | DevRec 219 | 68/0@1 220 | 221 | 222 | #004080 223 | #004080 224 | 0 225 | 0 226 | I11 227 | true 228 | true 229 | false 230 | 2 231 | false 232 | 0 233 | PinRec 234 | 1/10@1 235 | 236 | 237 | #004080 238 | #004080 239 | 0 240 | 0 241 | I9 242 | true 243 | true 244 | false 245 | 2 246 | false 247 | 0 248 | FbrTgt 249 | 81/0@1 250 | 251 | 252 | #805000 253 | #000080 254 | 0 255 | 0 256 | I9 257 | true 258 | true 259 | false 260 | 3 261 | true 262 | 0 263 | Errors 264 | 999/0@1 265 | 266 | 267 | #800057 268 | #800057 269 | 0 270 | 0 271 | I1 272 | true 273 | true 274 | false 275 | 3 276 | false 277 | 0 278 | Lumerical 279 | 733/0@1 280 | 281 | 282 | 283 | 284 | 0 285 | 0 286 | 287 | true 288 | false 289 | false 290 | 291 | false 292 | 0 293 | Extra 294 | */*@* 295 | 296 | #ff80a8 297 | #ff80a8 298 | 0 299 | 0 300 | I5 301 | true 302 | true 303 | false 304 | 1 305 | false 306 | 0 307 | Waveguide 308 | 1/0@1 309 | 310 | 311 | #ff80a8 312 | #ff80a8 313 | 0 314 | 0 315 | I5 316 | true 317 | true 318 | false 319 | 1 320 | false 321 | 0 322 | Si 323 | 1/0@1 324 | 325 | 326 | 327 | 328 | -------------------------------------------------------------------------------- /examples/bezier_waveguides/.gitignore: -------------------------------------------------------------------------------- 1 | results.profile 2 | *.npz 3 | *.gds 4 | -------------------------------------------------------------------------------- /examples/bezier_waveguides/analyze_profile.py: -------------------------------------------------------------------------------- 1 | import pstats 2 | from pstats import SortKey 3 | 4 | p = pstats.Stats("results.profile") 5 | p.sort_stats(SortKey.CUMULATIVE).print_stats() 6 | -------------------------------------------------------------------------------- /examples/bezier_waveguides/bezier_waveguides.gds: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lightwave-lab/zeropdk/3121a72c806b911f68cd6d40a4bb1eac4aef3e95/examples/bezier_waveguides/bezier_waveguides.gds -------------------------------------------------------------------------------- /examples/bezier_waveguides/generate_bezier_interp.py: -------------------------------------------------------------------------------- 1 | """ I noticed that _bezier_optimal takes about 50ms on average. 2 | This function is called every time we need a bezier waveguide. 3 | It is worth therefore saving a pre-computed interpolated function 4 | computed across a wide variety of angles. 5 | 6 | This generates the bezier_optimal.npz file 7 | """ 8 | 9 | from zeropdk.layout.geometry import _original_bezier_optimal as _bezier_optimal 10 | 11 | from scipy.interpolate import interp2d 12 | import numpy as np 13 | import os 14 | 15 | 16 | def generate_npz(): 17 | print("Generating npz... ", end="", flush=True) 18 | x = y = np.linspace(-170, 170, 69) * np.pi / 180 19 | 20 | xx, yy = np.meshgrid(x, y) 21 | z_a, z_b = np.frompyfunc(_bezier_optimal, 2, 2)(xx, yy) 22 | z_a = z_a.astype(np.float) 23 | z_b = z_b.astype(np.float) 24 | 25 | # need to store x, y, z_a and z_b 26 | # recall with interpolate(angles_0, angles_3, z_a, kind='cubic') 27 | np.savez("bezier_optimal.npz", x=x, y=y, z_a=z_a, z_b=z_b) 28 | print("Saved bezier_optimal.npz. Done.") 29 | 30 | 31 | def memoized_bezier_optimal(angle0, angle3, file): 32 | 33 | npzfile = np.load(file) 34 | x = npzfile["x"] 35 | y = npzfile["y"] 36 | z_a = npzfile["z_a"] 37 | z_b = npzfile["z_b"] 38 | 39 | a = interp2d(x, y, z_a)(angle0, angle3)[0] 40 | b = interp2d(x, y, z_b)(angle0, angle3)[0] 41 | return a, b 42 | 43 | 44 | if __name__ == "__main__": 45 | if not os.path.isfile("bezier_optimal.npz"): 46 | generate_npz() 47 | 48 | # testing 49 | x = y = np.linspace(-170, 170, 13) * np.pi / 180 50 | for x, y in zip(np.random.choice(x, 10), np.random.choice(y, 10)): 51 | print("trying (x,y) == ({}, {})".format(x, y)) 52 | print(memoized_bezier_optimal(x, y, file="bezier_optimal.npz")) 53 | a, b, result = _bezier_optimal(x, y, return_result=True) 54 | print((a, b), result.nit) 55 | print("---") 56 | -------------------------------------------------------------------------------- /examples/bezier_waveguides/main.py: -------------------------------------------------------------------------------- 1 | import klayout.db as pya 2 | 3 | # import zeropdk's tech 4 | 5 | from zeropdk.layout.geometry import bezier_optimal 6 | from zeropdk.layout.waveguides import layout_waveguide 7 | 8 | import numpy as np 9 | 10 | 11 | def bezier_curve(origin, angle0, angle3, ex, ey): 12 | P0 = origin 13 | P3 = origin + 100 * ex 14 | 15 | curve = bezier_optimal(P0, P3, angle0, angle3) 16 | return curve 17 | 18 | 19 | def main(): 20 | layout = pya.Layout() 21 | TOP = layout.create_cell("TOP") 22 | 23 | layer = pya.LayerInfo(1, 0) # First layer 24 | 25 | origin = pya.DPoint(0, 0) 26 | ex = pya.DVector(1, 0) 27 | ey = pya.DVector(0, 1) 28 | 29 | angles = np.linspace(-170, 170, 13) 30 | 31 | for i, angle_0 in enumerate(angles): 32 | for j, angle_3 in enumerate(angles): 33 | print("Bezier({:>2d}, {:>2d})".format(i, j)) 34 | curve = bezier_curve( 35 | origin + ey * i * 150 + ex * j * 150, angle_0, angle_3, ex, ey 36 | ) 37 | layout_waveguide(TOP, layer, curve, width=0.5) 38 | 39 | layout.write("bezier_waveguides.gds") 40 | 41 | 42 | if __name__ == "__main__": 43 | main() 44 | -------------------------------------------------------------------------------- /examples/ebeam_pdk.py: -------------------------------------------------------------------------------- 1 | """ Minimal PDK for EBeam constructed with ZeroPDK. """ 2 | 3 | import os 4 | import logging 5 | from collections import abc 6 | from zeropdk import Tech 7 | from zeropdk.pcell import PCell 8 | 9 | logger = logging.getLogger() 10 | 11 | 12 | lyp_path = os.path.join(os.path.dirname(__file__), "EBeam.lyp") 13 | 14 | 15 | # Technology file 16 | EBeam = Tech.load_from_xml(lyp_path) 17 | 18 | 19 | # Helper functions 20 | 21 | 22 | def draw_ports(cell, ports): 23 | """ Draws ports in the Pin Recognition layer (SiEPIC) 24 | """ 25 | 26 | if isinstance(ports, abc.Mapping): # dictionary 27 | for port in ports.values(): 28 | port.draw(cell, EBeam.layers["PinRec"]) 29 | elif isinstance(ports, abc.Sequence): # list 30 | for port in ports: 31 | port.draw(cell, EBeam.layers["PinRec"]) 32 | else: 33 | raise RuntimeError("Give a list or dict of Ports") 34 | 35 | 36 | # PCells 37 | 38 | from zeropdk.default_library.io import DCPad, DCPadArray 39 | from zeropdk.pcell import PCellParameter, TypeLayer, ParamContainer 40 | 41 | # Overriding default layers 42 | 43 | 44 | class DCPad(DCPad): 45 | params = ParamContainer( 46 | PCellParameter( 47 | name="layer_metal", 48 | type=TypeLayer, 49 | description="Metal Layer", 50 | default=EBeam.layers["M1"], 51 | ), 52 | PCellParameter( 53 | name="layer_opening", 54 | type=TypeLayer, 55 | description="Open Layer", 56 | default=EBeam.layers["13_MLopen"], 57 | ), 58 | ) 59 | 60 | 61 | class DCPadArray(DCPadArray): 62 | params = ParamContainer( 63 | PCellParameter( 64 | name="layer_metal", 65 | type=TypeLayer, 66 | description="Metal Layer", 67 | default=EBeam.layers["M1"], 68 | ), 69 | PCellParameter( 70 | name="layer_opening", 71 | type=TypeLayer, 72 | description="Open Layer", 73 | default=EBeam.layers["13_MLopen"], 74 | ), 75 | ) 76 | -------------------------------------------------------------------------------- /gdslibrary/princeton_logo_simple.gds: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lightwave-lab/zeropdk/3121a72c806b911f68cd6d40a4bb1eac4aef3e95/gdslibrary/princeton_logo_simple.gds -------------------------------------------------------------------------------- /gdslibrary/queens_logo.gds: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lightwave-lab/zeropdk/3121a72c806b911f68cd6d40a4bb1eac4aef3e95/gdslibrary/queens_logo.gds -------------------------------------------------------------------------------- /hacks/inspect_klayoutdb.py: -------------------------------------------------------------------------------- 1 | """ Utils to inspect klayout.db and create a tree of classes and methods.""" 2 | 3 | import inspect 4 | from textwrap import indent, fill 5 | 6 | # print4 = lambda str: print(indent(fill(str, 100, replace_whitespace=False), ' ' * 4)) 7 | # print8 = lambda str: print(indent(fill(str, 100, replace_whitespace=False), ' ' * 8)) 8 | 9 | # print4 = lambda str: print(indent(str, ' ' * 4)) 10 | # print8 = lambda str: print(indent(str, ' ' * 8)) 11 | 12 | 13 | def wrapfill(str, width): 14 | paragraphs = [] 15 | for section in str.split("\n\n"): 16 | paragraphs.append(fill(section.replace("\\", "\\\\"), width)) 17 | return "\n\n".join(paragraphs) 18 | 19 | 20 | print4 = lambda str: print(indent(wrapfill(str, 100 - 4), " " * 4)) 21 | print8 = lambda str: print(indent(wrapfill(str, 100 - 8), " " * 8)) 22 | 23 | 24 | def inspect_module(module): 25 | class_dict = dict(inspect.getmembers(module, inspect.isclass)) 26 | for name, klass in class_dict.items(): 27 | print("class {name}:".format(name=name)) 28 | print4("'''" + inspect.getdoc(klass) + "\n'''") 29 | inspect_class(klass) 30 | print("") 31 | 32 | 33 | def inspect_class(klass): 34 | """ This was designed specifically for klayout.db""" 35 | 36 | # typically methods 37 | method_dict = dict(inspect.getmembers(klass, inspect.ismethoddescriptor)) 38 | 39 | # typically static methods 40 | builtin_dict = dict(inspect.getmembers(klass, inspect.isbuiltin)) 41 | 42 | # typically attributes 43 | getset_dict = dict(inspect.getmembers(klass, inspect.isgetsetdescriptor)) 44 | 45 | print4("# Attributes") 46 | for name, attribute in getset_dict.items(): 47 | try: 48 | print4("'''" + inspect.getdoc(attribute) + "'''") 49 | except Exception: 50 | print8("pass") 51 | print4("{name} = None".format(name=name)) 52 | print("") 53 | 54 | print4("# Methods") 55 | for name, method in method_dict.items(): 56 | print4("def {name}(self, ...):".format(name=name)) 57 | try: 58 | print8("'''" + inspect.getdoc(method) + "'''") 59 | except Exception: 60 | print8("pass") 61 | print("") 62 | 63 | print4("Static Methods") 64 | for name, method in builtin_dict.items(): 65 | print4("@classmethod") 66 | print4("def {name}(cls, ...):".format(name=name)) 67 | try: 68 | print8("'''" + inspect.getdoc(method) + "'''") 69 | except Exception: 70 | print8("pass") 71 | print("") 72 | 73 | 74 | if __name__ == "__main__": 75 | import klayout.db 76 | 77 | inspect_module(klayout.db) 78 | -------------------------------------------------------------------------------- /mypy.ini: -------------------------------------------------------------------------------- 1 | [mypy] 2 | python_version = 3.6 3 | 4 | [mypy-klayout.*] 5 | ignore_missing_imports = True 6 | [mypy-pya.*] 7 | ignore_missing_imports = True 8 | [mypy-numpy.*] 9 | ignore_missing_imports = True 10 | [mypy-scipy.*] 11 | ignore_missing_imports = True 12 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.black] 2 | line-length = 100 3 | target-version = ['py37'] -------------------------------------------------------------------------------- /pytest.ini: -------------------------------------------------------------------------------- 1 | [pytest] 2 | addopts = --cov-report xml:cov.xml --cov zeropdk -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | 4 | import os 5 | from setuptools import setup, find_packages 6 | 7 | 8 | def touch(fname, times=None): 9 | with open(fname, "a"): 10 | os.utime(fname, times) 11 | 12 | 13 | def main(): 14 | with open("README.md") as f: 15 | readme = f.read() 16 | 17 | with open("LICENSE") as f: 18 | license_text = f.read() 19 | 20 | with open("version.py") as f: 21 | code = compile(f.read(), "version.py", "exec") 22 | version_dict = {} 23 | exec(code, {}, version_dict) # pylint: disable=exec-used 24 | release = version_dict["release"] 25 | 26 | metadata = dict( 27 | name="zeropdk", 28 | version=release, 29 | description="PDK factory for klayout", 30 | long_description=readme, 31 | long_description_content_type="text/markdown", 32 | license=license_text.split("\n")[0], 33 | python_requires=">=3.7", 34 | packages=find_packages(include=("zeropdk.*")), 35 | url="https://github.com/lightwave-lab/zeropdk", 36 | author="Thomas Ferreira de Lima ", 37 | author_email="tlima@princeton.edu", 38 | include_package_data=True, 39 | classifiers=( 40 | "Programming Language :: Python :: 3.6", 41 | "Programming Language :: Python :: 3.7", 42 | "License :: OSI Approved :: MIT License", 43 | "Operating System :: OS Independent", 44 | "Topic :: Scientific/Engineering", 45 | ), 46 | install_requires=["numpy", "klayout", "scipy"], 47 | ) 48 | 49 | setup(**metadata) 50 | 51 | 52 | if __name__ == "__main__": 53 | main() 54 | -------------------------------------------------------------------------------- /tests/.gitignore: -------------------------------------------------------------------------------- 1 | tmp/* 2 | -------------------------------------------------------------------------------- /tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lightwave-lab/zeropdk/3121a72c806b911f68cd6d40a4bb1eac4aef3e95/tests/__init__.py -------------------------------------------------------------------------------- /tests/cells/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lightwave-lab/zeropdk/3121a72c806b911f68cd6d40a4bb1eac4aef3e95/tests/cells/__init__.py -------------------------------------------------------------------------------- /tests/cells/test_pcell_basic.py: -------------------------------------------------------------------------------- 1 | from _pytest.config import filename_arg 2 | import pytest 3 | import os 4 | 5 | 6 | from ..context import zeropdk # noqa 7 | from zeropdk.exceptions import ZeroPDKWarning 8 | from zeropdk.pcell import PCell, PCellParameter, ParamContainer, TypeDouble, TypeInt 9 | from zeropdk.pcell import GDSCell 10 | 11 | import klayout.db as kdb 12 | 13 | pad_size = PCellParameter( 14 | name="pad_size", 15 | type=TypeDouble, 16 | description="Size of electrical pad.", 17 | default=100, 18 | unit="um", 19 | ) 20 | 21 | pad_array_count = PCellParameter( 22 | name="pad_array_count", type=TypeInt, description="Number of pads" 23 | ) 24 | 25 | 26 | class Pad(PCell): 27 | params = ParamContainer(pad_size) 28 | 29 | 30 | class PadArray(Pad): 31 | params = ParamContainer(pad_array_count) 32 | 33 | 34 | def test_pcell_initializer(): 35 | pad = Pad(name="testname", params={"pad_size": 10}) 36 | assert pad.params.pad_size == 10 37 | 38 | 39 | def test_pcell_inheritance(): 40 | pad = Pad(name="testname") 41 | pad_array = PadArray(name="testname") 42 | assert "pad_size" in pad_array.params 43 | assert "pad_array_count" in pad_array.params 44 | 45 | assert pad_array.params["pad_size"] is pad.params["pad_size"] 46 | assert pad_array.params["pad_size"] is pad_array.params.pad_size 47 | 48 | 49 | # Testing the most basic cells: GDSCell 50 | 51 | gdslibpath = os.path.abspath( 52 | os.path.join(os.path.dirname(__file__), "../../gdslibrary") 53 | ) 54 | 55 | 56 | @pytest.fixture 57 | def top_cell(): 58 | def _top_cell(): 59 | layout = kdb.Layout() 60 | layout.dbu = 0.001 61 | TOP = layout.create_cell("TOP") 62 | return TOP, layout 63 | 64 | return _top_cell 65 | 66 | 67 | def test_gdscell(top_cell): 68 | 69 | gds_dir = gdslibpath 70 | princeton_logo = GDSCell("princeton_logo", "princeton_logo_simple.gds", gds_dir)( 71 | name="xyz" 72 | ) 73 | TOP, layout = top_cell() 74 | ex = kdb.DPoint(1, 0) 75 | plogo, _ = princeton_logo.new_cell(layout) 76 | size = (plogo.dbbox().p2 - plogo.dbbox().p1).norm() 77 | for i in range(10): 78 | angle = 10 * i 79 | origin = ex * i * size 80 | TOP.insert_cell(plogo, origin, angle) 81 | 82 | # The top cell will contain several instances of the same cell 83 | # Deleting cell named 'priceton_logo' will delete all instances: 84 | # plogo.delete() 85 | TOP.write("tests/tmp/princeton_logo_test.gds") 86 | 87 | cell_count = 0 88 | for cell in layout.each_cell(): 89 | if cell.name.startswith("xyz"): 90 | cell_count += 1 91 | assert cell_count == 1 92 | 93 | 94 | def test_wrong_gdscellname(top_cell): 95 | gds_dir = gdslibpath 96 | princeton_logo = GDSCell("princeton_logo_wrong_name", "princeton_logo_simple.gds", gds_dir)( 97 | name="xyz" 98 | ) 99 | TOP, layout = top_cell() 100 | with pytest.raises(RuntimeError, match='princeton_logo_wrong_name'): 101 | plogo, _ = princeton_logo.new_cell(layout) 102 | 103 | def test_wrong_filename(top_cell): 104 | gds_dir = gdslibpath 105 | with pytest.warns(ZeroPDKWarning, match=f"'princeton_logo_simple_wrongname.gds' not found in '{gdslibpath}'"): 106 | princeton_logo = GDSCell("princeton_logo", "princeton_logo_simple_wrongname.gds", gds_dir)( 107 | name="xyz" 108 | ) 109 | TOP, layout = top_cell() 110 | with pytest.raises(RuntimeError, match='princeton_logo_simple_wrongname.gds'): 111 | plogo, _ = princeton_logo.new_cell(layout) 112 | 113 | def test_gdscellcache(top_cell): 114 | 115 | gds_dir = gdslibpath 116 | princeton_logo = GDSCell("princeton_logo", "princeton_logo_simple.gds", gds_dir)( 117 | name="xyz" 118 | ) 119 | TOP, layout = top_cell() 120 | ex = kdb.DPoint(1, 0) 121 | 122 | for i in range(10): 123 | # The new_cell method will create a new cell every time it is called. 124 | plogo, _ = princeton_logo.new_cell(layout) 125 | size = (plogo.dbbox().p2 - plogo.dbbox().p1).norm() 126 | angle = 10 * i 127 | origin = ex * i * size 128 | TOP.insert_cell(plogo, origin, angle) 129 | 130 | # The top cell will contain several instances of different cells 131 | # 'plogo'. All 'plogos' will contain the same instance of the inner 132 | # gdscell loaded from a file. 133 | TOP.write("tests/tmp/princeton_logo_testcache.gds") 134 | 135 | # ony one cell "xyz" exists 136 | cell_count = 0 137 | for cell in layout.each_cell(): 138 | if cell.name.startswith("xyz"): 139 | cell_count += 1 140 | assert cell_count == 10 141 | 142 | # 10 instances of cell "xyz" exists 143 | inst_count = 0 144 | for inst in TOP.each_inst(): 145 | if inst.cell.name.startswith("xyz"): 146 | inst_count += 1 147 | assert inst_count == 10 148 | 149 | cell_count = 0 150 | for cell in layout.each_cell(): 151 | if cell.name.startswith("princeton_logo"): 152 | cell_count += 1 153 | assert cell_count == 1 154 | -------------------------------------------------------------------------------- /tests/cells/test_pcell_library.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from ..context import zeropdk # noqa 3 | from zeropdk.default_library import io 4 | 5 | import klayout.db as kdb 6 | 7 | DCPad = io.DCPad 8 | 9 | 10 | @pytest.fixture 11 | def top_cell(): 12 | def _top_cell(): 13 | layout = kdb.Layout() 14 | layout.dbu = 0.001 15 | TOP = layout.create_cell("TOP") 16 | return TOP, layout 17 | 18 | return _top_cell 19 | 20 | 21 | def test_pad_pcell(top_cell): 22 | pad = DCPad(name="testname") 23 | pad.params.layer_metal = kdb.LayerInfo(1, 0) 24 | pad.params.layer_opening = kdb.LayerInfo(2, 0) 25 | 26 | # This will get automatically converted to LayerInfo 27 | # No Error 28 | pad.params.layer_metal = "1/0" 29 | 30 | # TODO set defaults here 31 | TOP, layout = top_cell() 32 | cell, ports = pad.new_cell(layout) 33 | assert "el0" in ports 34 | origin, angle = kdb.DPoint(0, 0), 0 35 | TOP.insert_cell(cell, origin, angle) 36 | TOP.write("tests/tmp/pad.gds") 37 | -------------------------------------------------------------------------------- /tests/cells/test_pcell_params.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from ..context import zeropdk 3 | from zeropdk.pcell import ParamContainer, PCellParameter 4 | 5 | from klayout.db import DPoint 6 | 7 | 8 | def test_basic_usage(): 9 | pc = ParamContainer() 10 | 11 | pc.add_param(PCellParameter(name="orange", default=1)) 12 | assert pc.orange == 1 13 | 14 | pc.orange = 2 15 | assert pc.orange == 2 16 | pc["orange"] == 2 # it is also accessible as a dictionary 17 | 18 | with pytest.raises(TypeError, match="Cannot set 'orange' to 'blah'"): 19 | pc.orange = "blah" 20 | 21 | with pytest.raises( 22 | RuntimeError, match="Unkown parameter type, cannot determine from default." 23 | ): 24 | pc.add_param(PCellParameter(name="apple")) 25 | pc.add_param(PCellParameter(name="apple", type=int)) 26 | 27 | with pytest.raises(TypeError, match="Cannot set 'apple' to 'one'"): 28 | pc.apple = "one" 29 | 30 | pc.add_param(PCellParameter(name="strawberry", default=DPoint(0, 0))) 31 | assert type(pc.strawberry) == DPoint 32 | 33 | with pytest.raises(TypeError, match="Cannot set 'strawberry' to 'test'"): 34 | pc.strawberry = "test" 35 | 36 | 37 | def test_quirky_cases(): 38 | pc = ParamContainer() 39 | 40 | pc.add_param(PCellParameter(name="orange", default=1)) 41 | 42 | # Don't try to set any value here 43 | with pytest.raises( 44 | TypeError, match="'ParamContainer' object does not support item assignment" 45 | ): 46 | pc["orange"] = 2 47 | -------------------------------------------------------------------------------- /tests/context.py: -------------------------------------------------------------------------------- 1 | import os 2 | import sys 3 | 4 | sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))) 5 | 6 | import zeropdk # noqa 7 | -------------------------------------------------------------------------------- /tests/layout/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lightwave-lab/zeropdk/3121a72c806b911f68cd6d40a4bb1eac4aef3e95/tests/layout/__init__.py -------------------------------------------------------------------------------- /tests/layout/test_cache.py: -------------------------------------------------------------------------------- 1 | from functools import partial 2 | import pytest 3 | from math import pi 4 | import os 5 | from shutil import rmtree 6 | from zeropdk.layout.cache import cache_cell, produce_hash 7 | from zeropdk.layout.geometry import rotate, rotate90 8 | from zeropdk.pcell import PCell, ParamContainer, Port, TypeDouble, port_to_pin_helper 9 | import klayout.db as kdb 10 | 11 | CACHE_DIR = os.path.join(os.path.dirname(os.path.realpath(__file__)), "..", "tmp", "cache") 12 | 13 | cache_cell = partial( 14 | cache_cell, 15 | cache_dir=CACHE_DIR, 16 | ) 17 | 18 | def define_param(name, type, description, default=None, **kwargs): 19 | from zeropdk.pcell import PCellParameter 20 | 21 | return PCellParameter( 22 | name=name, type=type, description=description, default=default, **kwargs 23 | ) 24 | 25 | @cache_cell 26 | class EmptyPCell(PCell): # type: ignore 27 | params = ParamContainer( 28 | define_param("angle_ex", TypeDouble, "x-axis angle (deg)", default=0), 29 | ) 30 | 31 | def origin_ex_ey(self, multiple_of_90=False): # pylint: disable=unused-argument 32 | EX = kdb.DVector(1, 0) 33 | cp = self.get_cell_params() 34 | origin = kdb.DPoint(0, 0) 35 | # if 'angle_ex' not in cp.__dict__: 36 | # cp.angle_ex = 0 37 | if multiple_of_90: 38 | if cp.angle_ex % 90 != 0: 39 | raise RuntimeError("Specify an angle multiple of 90 degrees") 40 | ex = rotate(EX, cp.angle_ex * pi / 180) 41 | ey = rotate90(ex) 42 | return origin, ex, ey 43 | 44 | 45 | def draw(self, cell): 46 | layout = cell.layout() 47 | 48 | origin, ex, ey = self.origin_ex_ey() 49 | waveguide_width = 1 50 | layer = layout.layer(kdb.LayerInfo(1, 0)) 51 | 52 | ports = [Port("opt1", origin, ex, waveguide_width)] 53 | port_to_pin_helper(ports, cell, layer) 54 | 55 | return cell, {port.name: port for port in ports} 56 | 57 | @pytest.fixture 58 | def top_cell(): 59 | rmtree(CACHE_DIR, ignore_errors=True) 60 | def _top_cell(): 61 | layout = kdb.Layout() 62 | layout.dbu = 0.001 63 | TOP = layout.create_cell("TOP") 64 | return TOP, layout 65 | 66 | return _top_cell 67 | 68 | def test_new_pcell(top_cell): 69 | TOP, layout = top_cell() 70 | ex = kdb.DPoint(1, 0) 71 | pcell = EmptyPCell("single_port") 72 | pcell.place_cell(TOP, 0 * ex, "opt1") 73 | pcell.place_cell(TOP, 100 * ex) 74 | 75 | pcell2 = EmptyPCell("single_port2") 76 | pcell2.place_cell(TOP, 0 * ex, "opt1") 77 | 78 | TOP.write("tests/tmp/single_port.gds") 79 | 80 | # Inspect written file 81 | layout2 = kdb.Layout() 82 | layout2.dbu = 0.001 83 | 84 | TOP2: kdb.Cell = layout2.read_cell("TOP", "tests/tmp/single_port.gds") 85 | assert TOP2.name == "TOP" 86 | cell_list = [c.name for c in layout2.each_cell()] 87 | short_hash = produce_hash(pcell, extra=(layout.dbu, None)) 88 | assert "TOP" in cell_list 89 | cell_list.remove("TOP") 90 | assert "single_port" in cell_list 91 | cell_list.remove("single_port") 92 | assert "single_port$1" in cell_list 93 | cell_list.remove("single_port$1") 94 | assert "single_port2" in cell_list 95 | cell_list.remove("single_port2") 96 | assert len(cell_list) == 2 97 | assert f"cache_EmptyPCell_{short_hash}" in cell_list 98 | assert cell_list[0].startswith("cache_EmptyPCell") 99 | assert cell_list[1].startswith("cache_EmptyPCell") 100 | 101 | # Creating cell in a new layout (force reading from cache) 102 | layout3 = kdb.Layout() 103 | layout3.dbu = 0.001 104 | 105 | TOP3 = layout3.create_cell("TOP3") 106 | pcell.place_cell(TOP3, 0 * ex) 107 | cell_list = [c.name for c in layout3.each_cell()] 108 | assert "TOP3" in cell_list 109 | cell_list.remove("TOP3") 110 | assert "single_port" in cell_list 111 | cell_list.remove("single_port") 112 | 113 | assert len(cell_list) == 1 114 | assert cell_list[0].startswith("cache_EmptyPCell") 115 | assert f"cache_EmptyPCell_{short_hash}" == cell_list[0] -------------------------------------------------------------------------------- /tests/layout/test_layout_write.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from ..context import zeropdk # noqa 3 | from zeropdk.layout.polygons import rectangle 4 | from zeropdk.layout import insert_shape 5 | 6 | import klayout.db as kdb 7 | 8 | 9 | @pytest.fixture 10 | def top_cell(): 11 | def _top_cell(): 12 | layout = kdb.Layout() 13 | layout.dbu = 0.001 14 | TOP = layout.create_cell("TOP") 15 | return TOP, layout 16 | 17 | return _top_cell 18 | 19 | 20 | def test_rectangle_write(top_cell): 21 | TOP, layout = top_cell() 22 | layer = "1/0" 23 | center = kdb.DPoint(0, 0) 24 | width = 20 25 | height = 10 26 | ex = kdb.DVector(1, 1) 27 | ey = kdb.DVector(0, 1) 28 | r = rectangle(center, width, height, ex, ey) 29 | assert str(r) == "(-10,-15;-10,-5;10,15;10,5)" 30 | 31 | insert_shape(TOP, layer, r) 32 | TOP.write("tests/tmp/test_rectangle.gds") 33 | -------------------------------------------------------------------------------- /tests/layout/test_metadata.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | from ..context import zeropdk # noqa 3 | 4 | import klayout.db as kdb 5 | from zeropdk.layout.cache import CACHE_PROP_ID 6 | 7 | def test_metadata(): 8 | """ 9 | KLayout can save some properties into the cell instance. But GDS does not serialize this into file. 10 | See more in https://github.com/KLayout/klayout/issues/670 11 | """ 12 | save_options = kdb.SaveLayoutOptions() 13 | save_options.gds2_write_file_properties = True 14 | save_options.gds2_write_cell_properties = True 15 | load_options = kdb.LoadLayoutOptions() 16 | load_options.properties_enabled = True 17 | layout = kdb.Layout() 18 | TOP = layout.create_cell("TOP") 19 | TOP.set_property("key", "test1") 20 | TOP.set_property(123, "test2") 21 | layout.write("tests/tmp/test_metadata.gds", save_options) 22 | layout2 = kdb.Layout() 23 | layout2.read("tests/tmp/test_metadata.gds", load_options) 24 | TOP = layout2.top_cell() 25 | assert TOP.property(123) == "test2" 26 | # This test fails due to a limitation of GDS 27 | # assert TOP.property("key") == "test1" 28 | layout.write("tests/tmp/test_metadata.oas", save_options) 29 | layout2 = kdb.Layout() 30 | layout2.read("tests/tmp/test_metadata.oas", load_options) 31 | TOP = layout2.top_cell() 32 | assert TOP.property(123) == "test2" 33 | # for some reason this fails too, but it shouldn't. 34 | # assert TOP.property("key") == "test1" 35 | 36 | def test_cache_metadata(): 37 | save_options = kdb.SaveLayoutOptions() 38 | save_options.gds2_write_file_properties = True 39 | layout = kdb.Layout() 40 | layout.set_property(CACHE_PROP_ID, "test1") 41 | layout.write("tests/tmp/test_cache_metadata.gds", save_options) 42 | layout2 = kdb.Layout() 43 | assert layout2.property(CACHE_PROP_ID) is None 44 | layout2.set_property(CACHE_PROP_ID, "test2") 45 | assert layout2.property(CACHE_PROP_ID) == "test2" 46 | layout2.read("tests/tmp/test_cache_metadata.gds") 47 | assert layout2.property(CACHE_PROP_ID) == "test1" -------------------------------------------------------------------------------- /tests/layout/test_points.py: -------------------------------------------------------------------------------- 1 | import random 2 | import numpy as np 3 | from ..context import zeropdk # noqa 4 | 5 | import klayout.db as kdb 6 | 7 | 8 | def random_point(Point, a=-10, b=10): 9 | a = 0 10 | b = 10 11 | 12 | x = random.uniform(a, b) 13 | y = random.uniform(a, b) 14 | p = Point(x, y) 15 | return p 16 | 17 | 18 | def test_add_sub(): 19 | p1 = random_point(kdb.Point) 20 | p2 = random_point(kdb.Point) 21 | 22 | sump = p1 + p2 23 | assert sump.x == p1.x + p2.x 24 | assert sump.y == p1.y + p2.y 25 | assert isinstance(sump, kdb.Point) 26 | 27 | diffp = p2 - p1 28 | assert diffp.x == p2.x - p1.x 29 | assert diffp.y == p2.y - p1.y 30 | assert isinstance(diffp, kdb.Vector) 31 | 32 | assert p1 == (sump - diffp) / 2 33 | assert p2 == (sump + diffp) / 2 34 | 35 | 36 | def test_mul(): 37 | p_classes = (kdb.Point, kdb.Vector) 38 | 39 | for p_class in p_classes: 40 | p1 = random_point(kdb.Vector) 41 | p2 = random_point(kdb.Vector) 42 | 43 | assert p1 * p2 == p1.x * p2.x + p1.y * p2.y 44 | 45 | p3 = p1 * 2 46 | assert p3.x == p1.x * 2 47 | assert p3.y == p1.y * 2 48 | 49 | 50 | def test_numpy(): 51 | t = np.arange(3) 52 | ex = kdb.Point(1, 0) 53 | 54 | # Point should consume a numpy array and produce a np.array of points 55 | point_array = t * ex 56 | assert isinstance(point_array, np.ndarray) 57 | assert np.all([0 * ex, 1 * ex, 2 * ex] == point_array) 58 | 59 | def test_float_operations(): 60 | assert kdb.DPoint(1, 2) / 1.0 == kdb.DPoint(1, 2) 61 | assert 0.5 * kdb.DPoint(1, 2) == kdb.DPoint(0.5, 1) 62 | -------------------------------------------------------------------------------- /tests/layout/test_rectangle.py: -------------------------------------------------------------------------------- 1 | import random 2 | import pytest 3 | from ..context import zeropdk # noqa 4 | from zeropdk.layout.polygons import square 5 | import klayout.db as kdb 6 | 7 | 8 | def test_square(): 9 | a, b = 0, 10 10 | ex = kdb.DVector(1, 0) 11 | ey = kdb.DVector(0, 1) 12 | size = random.uniform(a, b) 13 | origin = kdb.DPoint(0, 0) 14 | sq = square(origin, size, ex, ey) 15 | 16 | # This is true for any rectangle 17 | p1, p2, p3, p4 = sq.each_point() 18 | assert p1 + p3 == p2 + p4 19 | assert p2 - p1 == p3 - p4 20 | assert p3 - p2 == p4 - p1 21 | 22 | # True for squares only 23 | assert (p2 - p1).norm() == (p4 - p1).norm() 24 | 25 | # area computes normally 26 | assert sq.area() == pytest.approx(size ** 2) 27 | 28 | # origin is inside square 29 | assert sq.inside(origin) 30 | -------------------------------------------------------------------------------- /tests/layout/test_waveguide.py: -------------------------------------------------------------------------------- 1 | from typing import Callable, Tuple 2 | import warnings 3 | import numpy as np 4 | import pytest 5 | from zeropdk.klayout_extend.layout import layout_read_cell 6 | 7 | from zeropdk.layout.waveguide_rounding import compute_rounded_path, layout_waveguide_from_points 8 | from ..context import zeropdk # noqa 9 | from zeropdk.layout.waveguides import waveguide_dpolygon 10 | from zeropdk.layout import insert_shape 11 | 12 | import klayout.db as kdb 13 | 14 | 15 | @pytest.fixture 16 | def top_cell(): 17 | def _top_cell() -> Tuple[kdb.Cell, kdb.Layout]: 18 | layout = kdb.Layout() 19 | layout.dbu = 0.001 20 | TOP = layout.create_cell("TOP") 21 | return TOP, layout 22 | 23 | return _top_cell 24 | 25 | 26 | def test_waveguide(top_cell: Callable[[], Tuple[kdb.Cell, kdb.Layout]]): 27 | t = np.linspace(-1, 1, 100) 28 | ex = kdb.DPoint(1, 0) 29 | ey = kdb.DPoint(0, 1) 30 | 31 | # list of points depicting a parabola 32 | points_list = 100 * t * ex + 100 * t ** 2 * ey 33 | dbu = 0.001 34 | width = 1 35 | 36 | wg = waveguide_dpolygon(points_list, width, dbu, smooth=True) 37 | 38 | # write to test_waveguide.gds (we should see a parabola) 39 | TOP, layout = top_cell() 40 | layer = "1/0" 41 | insert_shape(TOP, layer, wg) 42 | TOP.write("tests/tmp/test_waveguide.gds") 43 | 44 | 45 | def test_waveguide_rounding(top_cell: Callable[[], Tuple[kdb.Cell, kdb.Layout]]): 46 | def trace_rounded_path(cell, layer, rounded_path, width): 47 | points = [] 48 | for item in rounded_path: 49 | points.extend(item.get_points()) 50 | 51 | dpath = kdb.DPath(points, width, 0, 0) 52 | 53 | cell.shapes(layer).insert(dpath) 54 | 55 | def trace_reference_path(cell, layer, points, width): 56 | dpath = kdb.DPath(points, width, 0, 0) 57 | cell.shapes(layer).insert(dpath) 58 | 59 | TOP, layout = top_cell() 60 | layer = kdb.LayerInfo(10, 0) 61 | layerRec = kdb.LayerInfo(1001, 0) 62 | 63 | ex, ey = kdb.DPoint(1, 0), kdb.DPoint(0, 1) 64 | 65 | # Begin tests 66 | 67 | points = [0 * ex, 10 * ex, 10 * (ex + ey), 30 * ex] 68 | origin = 0 * ey 69 | points = [origin + point for point in points] 70 | x = compute_rounded_path(points, 3) 71 | trace_rounded_path(TOP, layer, x, 0.5) 72 | trace_reference_path(TOP, layerRec, points, 0.5) 73 | 74 | points = [0 * ex, 10 * ex, 5 * (ex - ey), 17 * ex, 30 * ex] 75 | origin = 30 * ey 76 | points = [origin + point for point in points] 77 | x = compute_rounded_path(points, 3) 78 | trace_rounded_path(TOP, layer, x, 0.5) 79 | trace_reference_path(TOP, layerRec, points, 0.5) 80 | 81 | radius = 3 82 | for ex2 in (ex, -ex): 83 | points = [2 * ex2] 84 | for d in np.arange(1, 10, 2.5): 85 | origin = points[-1] 86 | displacements = [ 87 | 4 * radius * ex2, 88 | 4 * radius * ex2 + d * ey - 1 * d * ex2, 89 | d * ey, 90 | (d + 2 * radius) * ey, 91 | ] 92 | points += [origin + displacement for displacement in displacements] 93 | origin = 15 * ex + 40 * ey 94 | points = [origin + point for point in points] 95 | x = compute_rounded_path(points, radius) 96 | trace_rounded_path(TOP, layer, x, 0.5) 97 | trace_reference_path(TOP, layerRec, points, 0.5) 98 | 99 | # Layout tapered waveguide 100 | points = [ 101 | 0 * ex, 102 | 100 * ex, 103 | 100 * ex + 20 * ey, 104 | 10 * ex + 5 * ey, 105 | 10 * ex + 25 * ey, 106 | 100 * ex + 30 * ey, 107 | ] 108 | 109 | # Untapered 110 | origin = 40 * ex 111 | points_ = [origin + point for point in points] 112 | layout_waveguide_from_points(TOP, layer, points_, 0.5, 5) 113 | 114 | # Tapered 115 | origin = 40 * ex + 40 * ey 116 | points_ = [origin + point for point in points] 117 | layout_waveguide_from_points(TOP, layer, points_, 0.5, 5, taper_width=3, taper_length=10) 118 | 119 | 120 | # Stress test about ClearanceRewind when forward would work. 121 | origin = 40 * ex + 80 * ey 122 | points = [ 123 | 0 * ex, 124 | 222 * ey, 125 | 20 * ex + 222 * ey, 126 | 20 * ex + 371 * ey, 127 | ] 128 | points_ = [origin + point for point in points] 129 | layout_waveguide_from_points(TOP, layer, points_, 5, 500) 130 | 131 | # Stress test on trying forward first after ClearanceRewind. 132 | 133 | origin = 60 * ex + 80 * ey 134 | points = [ 135 | 0 * ex, 136 | 222 * ey, 137 | 231 * ex + 222 * ey, 138 | 231 * ex + 460 * ey, 139 | ] 140 | points_ = [origin + point for point in points] 141 | # breakpoint() 142 | layout_waveguide_from_points(TOP, layer, points_, 5, 230) 143 | 144 | origin = 80 * ex + 80 * ey 145 | points = [ 146 | 0 * ex, 147 | 100 * ey, 148 | 30 * ex + 100 * ey, 149 | 30 * ex + 200 * ey, 150 | ] 151 | points_ = [origin + point for point in points] 152 | with warnings.catch_warnings(): 153 | warnings.simplefilter("ignore") 154 | layout_waveguide_from_points(TOP, layer, points_, 5, 550) 155 | 156 | TOP_reference = layout_read_cell(layout, "TOP", "tests/test_waveguide_rounding_truth.gds") 157 | 158 | new_waveguides = kdb.Region(TOP.shapes(layer)) 159 | ref_waveguides = kdb.Region(TOP_reference.shapes(layer)) 160 | new_waveguides -= ref_waveguides 161 | assert new_waveguides.area() == 0 162 | 163 | TOP.write("tests/tmp/test_waveguide_rounding.gds") -------------------------------------------------------------------------------- /tests/technology/EBeam.lyp: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | #ff80a8 5 | #ff80a8 6 | 0 7 | 0 8 | I6 9 | true 10 | true 11 | true 12 | 13 | false 14 | 0 15 | Si 16 | 1/0@1 17 | 2.0 18 | 19 | 20 | #ff0000 21 | #ff0000 22 | 0 23 | 0 24 | I9 25 | true 26 | true 27 | false 28 | 1 29 | false 30 | 0 31 | 31_Si_p6nm 32 | 31/0@1 33 | 34 | 35 | #0000ff 36 | #0000ff 37 | 0 38 | 0 39 | I5 40 | true 41 | true 42 | false 43 | 44 | false 45 | 0 46 | Text 47 | 10/0@1 48 | 49 | 50 | 0 51 | false 52 | 20/0@1 53 | Si N 54 | true 55 | I6 56 | #000000 57 | #7000FF 58 | 0 59 | false 60 | 0.0 61 | 0 62 | 63 | 64 | #0000ff 65 | #0000ff 66 | 0 67 | 0 68 | I2 69 | 70 | true 71 | true 72 | false 73 | 74 | false 75 | false 76 | 0 77 | Si N++ 78 | 24/0@1 79 | 80 | 81 | #ff00ff 82 | #ff00ff 83 | 0 84 | 0 85 | I5 86 | true 87 | true 88 | false 89 | 1 90 | false 91 | 0 92 | SEM 93 | 200/0@1 94 | 95 | 96 | #805000 97 | #805000 98 | 0 99 | 0 100 | I5 101 | true 102 | true 103 | false 104 | 105 | false 106 | 0 107 | M1 108 | 11/0@1 109 | 110 | 111 | #008000 112 | #008000 113 | 0 114 | 0 115 | I9 116 | true 117 | true 118 | false 119 | 120 | false 121 | 0 122 | 12_M2 123 | 12/0@1 124 | 125 | 126 | #008000 127 | #008000 128 | 0 129 | 0 130 | I7 131 | true 132 | true 133 | false 134 | 135 | false 136 | 0 137 | 13_MLopen 138 | 13/0@1 139 | 140 | 141 | #8086ff 142 | #8086ff 143 | 0 144 | 0 145 | I17 146 | 147 | true 148 | true 149 | false 150 | 151 | false 152 | false 153 | 0 154 | VC 155 | 40/0@1 156 | 157 | 158 | #80a8ff 159 | #80a8ff 160 | 0 161 | 0 162 | I15 163 | 164 | true 165 | true 166 | false 167 | 2.0 168 | false 169 | false 170 | 0 171 | M1 172 | 41/0@1 173 | 174 | 175 | #0080ff 176 | #0080ff 177 | 0 178 | 0 179 | I5 180 | 181 | true 182 | true 183 | false 184 | 185 | false 186 | false 187 | 0 188 | M Heater 189 | 47/0@1 190 | 191 | 192 | #8000ff 193 | #8000ff 194 | 0 195 | 0 196 | I1 197 | true 198 | true 199 | false 200 | 3 201 | false 202 | 0 203 | FloorPlan 204 | 99/0@1 205 | 206 | 207 | #004080 208 | #004080 209 | 0 210 | 0 211 | I1 212 | true 213 | true 214 | true 215 | 1 216 | false 217 | 0 218 | DevRec 219 | 68/0@1 220 | 221 | 222 | #004080 223 | #004080 224 | 0 225 | 0 226 | I11 227 | true 228 | true 229 | false 230 | 2 231 | false 232 | 0 233 | PinRec 234 | 1/10@1 235 | 236 | 237 | #004080 238 | #004080 239 | 0 240 | 0 241 | I9 242 | true 243 | true 244 | false 245 | 2 246 | false 247 | 0 248 | FbrTgt 249 | 81/0@1 250 | 251 | 252 | #805000 253 | #000080 254 | 0 255 | 0 256 | I9 257 | true 258 | true 259 | false 260 | 3 261 | true 262 | 0 263 | Errors 264 | 999/0@1 265 | 266 | 267 | #800057 268 | #800057 269 | 0 270 | 0 271 | I1 272 | true 273 | true 274 | false 275 | 3 276 | false 277 | 0 278 | Lumerical 279 | 733/0@1 280 | 281 | 282 | 283 | 284 | 0 285 | 0 286 | 287 | true 288 | false 289 | false 290 | 291 | false 292 | 0 293 | Extra 294 | */*@* 295 | 296 | #ff80a8 297 | #ff80a8 298 | 0 299 | 0 300 | I5 301 | true 302 | true 303 | false 304 | 1 305 | false 306 | 0 307 | Waveguide 308 | 1/0@1 309 | 310 | 311 | #ff80a8 312 | #ff80a8 313 | 0 314 | 0 315 | I5 316 | true 317 | true 318 | false 319 | 1 320 | false 321 | 0 322 | Si 323 | 1/0@1 324 | 325 | 326 | 327 | 328 | -------------------------------------------------------------------------------- /tests/technology/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lightwave-lab/zeropdk/3121a72c806b911f68cd6d40a4bb1eac4aef3e95/tests/technology/__init__.py -------------------------------------------------------------------------------- /tests/technology/test_basic.py: -------------------------------------------------------------------------------- 1 | from ..context import zeropdk # noqa 2 | 3 | import klayout.db as kdb 4 | 5 | from zeropdk.tech import Tech 6 | 7 | 8 | class ExampleTech(Tech): 9 | def __init__(self): 10 | super().__init__() 11 | self.add_layer("layer_metal", "1/0") 12 | self.add_layer("layer_opening", "1/0") 13 | 14 | 15 | def test_layers(): 16 | t = ExampleTech() 17 | assert t.layers["layer_metal"] == kdb.LayerInfo(1, 0, "layer_metal") 18 | -------------------------------------------------------------------------------- /tests/technology/test_xml.py: -------------------------------------------------------------------------------- 1 | from ..context import zeropdk # noqa 2 | 3 | from pathlib import Path 4 | import os 5 | from zeropdk.tech import Tech 6 | 7 | import klayout.db as kdb 8 | 9 | 10 | def test_load_from_xml(): 11 | filepath = Path(os.path.dirname(__file__)).resolve() / "EBeam.lyp" 12 | ebeam = Tech.load_from_xml(filepath) 13 | assert ebeam.layers["M1"] == kdb.LayerInfo(41, 0, "M1") 14 | -------------------------------------------------------------------------------- /tests/test_waveguide_rounding_truth.gds: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lightwave-lab/zeropdk/3121a72c806b911f68cd6d40a4bb1eac4aef3e95/tests/test_waveguide_rounding_truth.gds -------------------------------------------------------------------------------- /tests/tmp/.gitignore: -------------------------------------------------------------------------------- 1 | * 2 | -------------------------------------------------------------------------------- /version.py: -------------------------------------------------------------------------------- 1 | # version.py 2 | 3 | # The short X.Y version. 4 | version = "22.08" 5 | # The full version, including alpha/beta/rc tags. 6 | release = version + "" 7 | 8 | if __name__ == "__main__": 9 | print("zeropdk v" + version) 10 | -------------------------------------------------------------------------------- /zeropdk/__init__.py: -------------------------------------------------------------------------------- 1 | import os 2 | import logging 3 | from zeropdk.tech import Tech # noqa 4 | from zeropdk import klayout_extend # noqa 5 | 6 | logger = logging.getLogger(__name__) 7 | logger.setLevel(logging.INFO) 8 | stdout_ch = logging.StreamHandler() 9 | logger.addHandler(stdout_ch) 10 | 11 | DEBUG = os.environ.get("ZEROPDK_DEBUG", "false") == "true" 12 | if DEBUG: 13 | logger.setLevel(logging.DEBUG) 14 | stdout_ch.setLevel(logging.DEBUG) 15 | -------------------------------------------------------------------------------- /zeropdk/default_library/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lightwave-lab/zeropdk/3121a72c806b911f68cd6d40a4bb1eac4aef3e95/zeropdk/default_library/__init__.py -------------------------------------------------------------------------------- /zeropdk/default_library/io.py: -------------------------------------------------------------------------------- 1 | from zeropdk.pcell import ( 2 | PCell, 3 | PCellParameter, 4 | TypeDouble, 5 | TypeInt, 6 | TypeLayer, 7 | TypePoint, 8 | Port, 9 | ParamContainer, 10 | ) 11 | from zeropdk.layout import insert_shape 12 | from zeropdk.layout.polygons import rectangle 13 | 14 | from klayout.db import DPoint, DVector 15 | 16 | pad_width = PCellParameter( 17 | name="pad_width", 18 | type=TypeDouble, 19 | description="Width of electrical pad.", 20 | default=120, 21 | unit="um", 22 | ) 23 | 24 | pad_height = PCellParameter( 25 | name="pad_height", 26 | type=TypeDouble, 27 | description="Height of electrical pad.", 28 | default=120, 29 | unit="um", 30 | ) 31 | 32 | port_width = PCellParameter( 33 | name="port_width", 34 | type=TypeDouble, 35 | description="Port width (same as trace width)", 36 | default=20, 37 | unit="um", 38 | ) 39 | 40 | pad_array_count = PCellParameter( 41 | name="pad_array_count", type=TypeInt, description="Number of pads", default=10 42 | ) 43 | 44 | pad_array_pitch = PCellParameter( 45 | name="pad_array_pitch", 46 | type=TypeDouble, 47 | description="Pad array pitch", 48 | default=150, 49 | unit="um", 50 | ) 51 | 52 | origin = PCellParameter(name="origin", type=TypePoint, description="Origin", default=DPoint(0, 0)) 53 | 54 | ex = PCellParameter( 55 | name="ex", type=TypePoint, description="x-axis unit vector", default=DPoint(1, 0) 56 | ) 57 | 58 | ey = PCellParameter( 59 | name="ey", type=TypePoint, description="y-axis unit vector", default=DPoint(0, 1) 60 | ) 61 | layer_metal = PCellParameter(name="layer_metal", type=TypeLayer, description="Metal Layer") 62 | 63 | layer_opening = PCellParameter(name="layer_opening", type=TypeLayer, description="Open Layer") 64 | 65 | 66 | class OrientedCell(PCell): 67 | """A standard cell that has the following parameters: 68 | - origin: Point 69 | - ex: unit vector of x axis 70 | - ey: unit vector of y axis 71 | """ 72 | 73 | params = ParamContainer(origin, ex, ey) 74 | 75 | def origin_ex_ey(self): 76 | origin = DPoint(self.params["origin"]) 77 | ex = DVector(self.params.ex) 78 | ey = DVector(self.params.ey) 79 | return origin, ex, ey 80 | 81 | 82 | class DCPad(OrientedCell): 83 | """A standard DC pad. 84 | 85 | Ports: el0 86 | """ 87 | 88 | params = ParamContainer(pad_width, pad_height, port_width, layer_metal, layer_opening) 89 | 90 | def draw(self, cell): 91 | layout = cell.layout() 92 | 93 | origin, ex, ey = self.origin_ex_ey() 94 | cp = self.params 95 | 96 | def make_shape_from_dpolygon(dpoly, resize_dx, dbu, layer): 97 | dpoly.resize(resize_dx, dbu) 98 | # if resize_dx > dbu: 99 | # dpoly.round_corners(resize_dx, 100) 100 | insert_shape(cell, layer, dpoly) 101 | return dpoly 102 | 103 | def make_pad(origin, pad_width, pad_height, ex, ey): 104 | pad_square = rectangle(origin, pad_width, pad_height, ex, ey) 105 | make_shape_from_dpolygon(pad_square, 0, layout.dbu, cp.layer_metal) 106 | make_shape_from_dpolygon(pad_square, -2.5, layout.dbu, cp.layer_opening) 107 | 108 | make_pad(origin + cp.pad_height * ey / 2, cp.pad_width, cp.pad_height, ex, ey) 109 | 110 | port = Port("el0", origin + cp.port_width * ey / 2, -ey, cp.port_width, "el_dc") 111 | 112 | return cell, {"el0": port} 113 | 114 | 115 | class DCPadArray(DCPad): 116 | params = ParamContainer(pad_array_count, pad_array_pitch) 117 | 118 | def draw(self, cell): 119 | cp = self.params 120 | origin, ex, _ = self.origin_ex_ey() 121 | 122 | ports = {} 123 | 124 | for i in range(cp.pad_array_count): 125 | dcpad = DCPad(name=f"pad_{i}", params=cp) 126 | dc_ports = dcpad.place_cell(cell, origin + cp.pad_array_pitch * i * ex) 127 | ports[f"el_{i}"] = dc_ports["el0"].rename(f"el_{i}") 128 | # self.add_port(dc_ports["el0"].rename(f"el_{i}")) 129 | 130 | return cell, ports 131 | -------------------------------------------------------------------------------- /zeropdk/exceptions.py: -------------------------------------------------------------------------------- 1 | class ZeroPDKWarning(UserWarning): 2 | """Warning related to the usage of ZeroPDK. The responsibility falls on the user to fix these warnings.""" 3 | 4 | class ZeroPDKUserError(Exception): 5 | """Exception resulting from impossible design inputs for ZeroPDK.""" -------------------------------------------------------------------------------- /zeropdk/klayout_extend/__init__.py: -------------------------------------------------------------------------------- 1 | from . import point, cell, layout, polygon # noqa 2 | -------------------------------------------------------------------------------- /zeropdk/klayout_extend/cell.py: -------------------------------------------------------------------------------- 1 | """Extends kdb.Cell object by introducing or replacing with the following methods: 2 | - Cell.insert_cell 3 | - Cell.shapes 4 | """ 5 | 6 | from typing import Type 7 | from functools import wraps 8 | import klayout.db as kdb 9 | from klayout.db import Cell, DPoint 10 | 11 | 12 | def cell_insert_cell( 13 | cell: Type[Cell], other_cell: Type[Cell], origin: Type[DPoint], angle_deg: float 14 | ) -> Type[Cell]: 15 | mag = 1 16 | rot = angle_deg 17 | mirrx = False 18 | u = DPoint(origin) 19 | trans = kdb.DCplxTrans(mag, rot, mirrx, u) 20 | 21 | cell.insert(kdb.DCellInstArray(other_cell.cell_index(), trans)) 22 | return cell 23 | 24 | 25 | Cell.insert_cell = cell_insert_cell 26 | 27 | 28 | def override_layer(method): 29 | old_method = method 30 | 31 | @wraps(old_method) 32 | def new_method(self: Type[Cell], layer, *args, **kwargs): 33 | if isinstance(layer, (kdb.LayerInfo, str)): 34 | layer_index = self.layout().layer(layer) 35 | else: 36 | layer_index = layer 37 | return old_method(self, layer_index, *args, **kwargs) 38 | 39 | return new_method 40 | 41 | 42 | # All the methods that have layer_index as first argument 43 | # I would like to allow LayerInfo to be passed as parameter 44 | # Taken from https://www.klayout.de/doc-qt5/code/class_Cell.html 45 | Cell.shapes = override_layer(Cell.shapes) 46 | Cell.begin_shapes_rec = override_layer(Cell.begin_shapes_rec) 47 | Cell.bbox_per_layer = override_layer(Cell.bbox_per_layer) 48 | Cell.dbbox_per_layer = override_layer(Cell.dbbox_per_layer) 49 | Cell.each_shape = override_layer(Cell.each_shape) 50 | Cell.each_touching_shape = override_layer(Cell.each_touching_shape) 51 | Cell.each_overlapping_shape = override_layer(Cell.each_overlapping_shape) 52 | -------------------------------------------------------------------------------- /zeropdk/klayout_extend/layout.py: -------------------------------------------------------------------------------- 1 | from typing import Callable 2 | from klayout.db import Layout, Cell 3 | 4 | 5 | def layout_read_cell(layout: Layout, cell_name: str, filepath: str) -> Cell: 6 | """Imports a cell from a file into current layout. 7 | 8 | layout [pya.Layout]: layout to insert cell into 9 | cell_name [str]: cell name from the file in filepath 10 | filepath [str]: location of layout file you want to import 11 | 12 | If the name already exists in the current layout, klayout will 13 | create a new one based on its internal rules for naming 14 | collision: name$1, name$2, ... 15 | """ 16 | 17 | # BUG loading this file twice segfaults klayout 18 | layout2 = Layout() 19 | layout2.read(filepath) 20 | gdscell2 = layout2.cell(cell_name) 21 | if gdscell2 is None: 22 | raise RuntimeError(f"The file '{filepath}' does not contain a cell named '{cell_name}'. This name is case sensitive.") 23 | gdscell = layout.create_cell(cell_name) 24 | gdscell.copy_tree(gdscell2) 25 | del gdscell2 26 | del layout2 27 | return gdscell 28 | 29 | 30 | Layout.read_cell = layout_read_cell 31 | -------------------------------------------------------------------------------- /zeropdk/klayout_extend/point.py: -------------------------------------------------------------------------------- 1 | """ 2 | klayout.db.Point Extensions: 3 | - P * np/number 4 | - np/number * P 5 | - P * P 6 | - P / number 7 | - P.norm() 8 | - P.normalize() = P / P.norm() 9 | """ 10 | from numbers import Number 11 | from math import sqrt 12 | from klayout.db import Point, DPoint, DVector, Vector 13 | 14 | try: 15 | import numpy as np 16 | 17 | MODULE_NUMPY = True 18 | except ImportError: 19 | MODULE_NUMPY = False 20 | 21 | # Point-like classes 22 | PointLike = (Point, DPoint, DVector, Vector) 23 | 24 | 25 | def pyaPoint__rmul__(self, factor): 26 | """This implements factor * P""" 27 | if isinstance(factor, Number): 28 | return self.__class__(self.x * factor, self.y * factor) 29 | elif MODULE_NUMPY and isinstance(factor, np.ndarray): # ideally this is never called 30 | return factor.__mul__(self) 31 | else: 32 | return NotImplemented 33 | 34 | 35 | def pyaPoint__mul__(self, factor): 36 | """This implements P * factor""" 37 | if isinstance(factor, Number): 38 | return self.__class__(self.x * factor, self.y * factor) 39 | elif MODULE_NUMPY and isinstance(factor, np.ndarray): # Numpy can multiply any object 40 | return factor.__mul__(self) 41 | elif isinstance(factor, PointLike): 42 | return self.x * factor.x + self.y * factor.y 43 | else: 44 | return NotImplemented 45 | 46 | 47 | def pyaPoint__truediv__(self, dividend): 48 | """This implements P / dividend""" 49 | return self.__class__(self.x / dividend, self.y / dividend) 50 | 51 | 52 | def pyaPoint__deepcopy__(self, memo): 53 | return self.__class__(self.x, self.y) 54 | 55 | 56 | def pyaPoint_norm(self): 57 | """This implements the L2 norm""" 58 | return sqrt(self.x**2 + self.y**2) 59 | 60 | 61 | def pyaPoint_normalize(self): 62 | return self / self.norm() 63 | 64 | 65 | def pyaPoint__init__(self, *args): 66 | try: 67 | self.x, self.y = args 68 | except (TypeError, ValueError): 69 | if len(args) == 1: 70 | (p,) = args 71 | try: 72 | self.x = p.x 73 | self.y = p.y 74 | except: 75 | raise ValueError("Cannot understand {}".format(p)) 76 | except Exception: 77 | raise ValueError("Unknown constructor") 78 | 79 | 80 | def pyaPoint__getstate__(self): 81 | return (self.x, self.y) 82 | 83 | 84 | def pyaPoint__setstate__(self, state): 85 | self.x, self.y = state 86 | 87 | def pyaPoint__repr__(self): 88 | return f"{self.__class__.__name__}({self.x}, {self.y})" 89 | 90 | for klass in PointLike: 91 | klass.__init__ = pyaPoint__init__ 92 | klass.__rmul__ = pyaPoint__rmul__ 93 | klass.__mul__ = pyaPoint__mul__ 94 | klass.__truediv__ = pyaPoint__truediv__ 95 | klass.__deepcopy__ = pyaPoint__deepcopy__ 96 | klass.__getstate__ = pyaPoint__getstate__ 97 | klass.__setstate__ = pyaPoint__setstate__ 98 | klass.__repr__ = pyaPoint__repr__ 99 | klass.normalize = pyaPoint_normalize 100 | klass.norm = pyaPoint_norm 101 | -------------------------------------------------------------------------------- /zeropdk/klayout_extend/polygon.py: -------------------------------------------------------------------------------- 1 | import pya 2 | import klayout.db as kdb 3 | from zeropdk import klayout_extend # noqa 4 | 5 | import numpy as np 6 | from numpy import pi, sqrt 7 | from zeropdk.layout.geometry import rotate90, rotate 8 | 9 | 10 | def patch_simple_polygon(backend): 11 | class _SimplePolygon(backend.DSimplePolygon): 12 | """SimplePolygon with some added functionalities: 13 | - transform_and_rotate 14 | - clip 15 | - layout 16 | - layout_drc_exclude 17 | - resize 18 | - round_corners 19 | """ 20 | 21 | def transform_and_rotate(self, center, ex=None): 22 | """Translates the polygon by 'center' and rotates by the 'ex' orientation. 23 | 24 | Example: if current polygon is a unit square with bottom-left corner at (0,0), 25 | then square.transform_and_rotate(DPoint(0, 1), DVector(0, 1)) will 26 | rotate the square by 90 degrees and translate it by 1 y-unit. 27 | The new square's bottom-left corner will be at (-1, 1). 28 | """ 29 | if ex is None: 30 | ex = backend.DVector(1, 0) 31 | ey = rotate90(ex) 32 | 33 | polygon_dpoints_transformed = [center + p.x * ex + p.y * ey for p in self.each_point()] 34 | self.assign(_SimplePolygon(polygon_dpoints_transformed)) 35 | return self 36 | 37 | def clip(self, x_bounds=(-np.inf, np.inf), y_bounds=(-np.inf, np.inf)): 38 | """Clips the polygon at four possible boundaries. 39 | The boundaries are tuples based on absolute coordinates and cartesian axes. 40 | This method is very powerful when used with transform_and_rotate. 41 | """ 42 | # Add points exactly at the boundary, so that the filter below works. 43 | x_bounds = (np.min(x_bounds), np.max(x_bounds)) 44 | y_bounds = (np.min(y_bounds), np.max(y_bounds)) 45 | 46 | check_within_bounds = ( 47 | lambda p: x_bounds[0] <= p.x 48 | and x_bounds[1] >= p.x 49 | and y_bounds[0] <= p.y 50 | and y_bounds[1] >= p.y 51 | ) 52 | 53 | def intersect_left_boundary(p1, p2, x_bounds, y_bounds): 54 | left_most, right_most = (p1, p2) if p1.x < p2.x else (p2, p1) 55 | bottom_most, top_most = (p1, p2) if p1.y < p2.y else (p2, p1) 56 | if left_most.x < x_bounds[0] and right_most.x > x_bounds[0]: 57 | # outside the box, on the left 58 | y_intersect = np.interp( 59 | x_bounds[0], 60 | [left_most.x, right_most.x], 61 | [left_most.y, right_most.y], 62 | ) 63 | if y_bounds[0] < y_intersect and y_bounds[1] > y_intersect: 64 | return backend.DPoint(float(x_bounds[0]), float(y_intersect)) 65 | return None 66 | 67 | def intersect(p1, p2, x_bounds, y_bounds): 68 | intersect_list = list() 69 | last_intersect = None 70 | 71 | def rotate_bounds90(x_bounds, y_bounds, i_times): 72 | for _ in range(i_times): 73 | x_bounds, y_bounds = ( 74 | (-y_bounds[1], -y_bounds[0]), 75 | (x_bounds[0], x_bounds[1]), 76 | ) 77 | return x_bounds, y_bounds 78 | 79 | for i in range(4): 80 | p1i, p2i = rotate(p1, i * pi / 2), rotate(p2, i * pi / 2) 81 | x_boundsi, y_boundsi = rotate_bounds90(x_bounds, y_bounds, i) 82 | p = intersect_left_boundary(p1i, p2i, x_boundsi, y_boundsi) 83 | if p is not None: 84 | last_intersect = i 85 | intersect_list.append(rotate(p, -i * pi / 2)) 86 | return intersect_list, last_intersect 87 | 88 | polygon_dpoints_clipped = list() 89 | polygon_dpoints = list(self.each_point()) 90 | 91 | def boundary_vertex(edge_from, edge_to): 92 | # left edge:0, top edge:1, right edge:2, bottom edge:3 93 | # returns the vertex between two edges 94 | assert abs(edge_from - edge_to) == 1 95 | if edge_from % 2 == 0: 96 | vertical_edge = edge_from 97 | horizontal_edge = edge_to 98 | else: 99 | vertical_edge = edge_to 100 | horizontal_edge = edge_from 101 | x = x_bounds[(vertical_edge // 2) % 2] 102 | y = y_bounds[(1 - (horizontal_edge - 1) // 2) % 2] 103 | return backend.DPoint(x, y) 104 | 105 | # Rotate point list so we can start from a point inside 106 | # (helps the boundary_vertex algorithm) 107 | for idx, point in enumerate(polygon_dpoints): 108 | if check_within_bounds(point): 109 | break 110 | else: 111 | # polygon was never within bounds 112 | # this can only happen if boundaries are finite 113 | # return boundary vertices 114 | boundary_vertices = [boundary_vertex(i, i - 1) for i in range(4, 0, -1)] 115 | self.assign(_SimplePolygon(boundary_vertices)) 116 | return self 117 | 118 | idx += 1 # make previous_point below already be inside 119 | polygon_dpoints = polygon_dpoints[idx:] + polygon_dpoints[:idx] 120 | 121 | previous_point = polygon_dpoints[-1] 122 | previous_intersect = None 123 | for point in polygon_dpoints: 124 | # compute new intersecting point and add to list 125 | intersected_points, last_intersect = intersect( 126 | previous_point, point, x_bounds, y_bounds 127 | ) 128 | if ( 129 | previous_intersect is not None 130 | and last_intersect is not None 131 | and last_intersect != previous_intersect 132 | ): 133 | if check_within_bounds(point): 134 | # this means that we are entering the box at a different edge 135 | # need to add the edge points 136 | 137 | # this assumes a certain polygon orientation 138 | # assume points go clockwise, which means that 139 | # from edge 0 to 2, it goes through 1 140 | i = previous_intersect 141 | while i % 4 != last_intersect: 142 | polygon_dpoints_clipped.append(boundary_vertex(i, i + 1)) 143 | i = i + 1 144 | polygon_dpoints_clipped.extend(intersected_points) 145 | if check_within_bounds(point): 146 | polygon_dpoints_clipped.append(point) 147 | previous_point = point 148 | if last_intersect is not None: 149 | previous_intersect = last_intersect 150 | self.assign(_SimplePolygon(polygon_dpoints_clipped)) 151 | return self 152 | 153 | def layout(self, cell, layer): 154 | """Places polygon as a shape into a cell at a particular layer.""" 155 | from zeropdk.layout import insert_shape 156 | 157 | return insert_shape(cell, layer, self) 158 | 159 | def layout_drc_exclude(self, cell, drclayer, ex): 160 | """Places a drc exclude square at every corner. 161 | A corner is defined by an outer angle greater than 85 degrees (conservative) 162 | """ 163 | from zeropdk.layout.polygons import layout_square 164 | 165 | if drclayer is not None: 166 | points = list(self.each_point()) 167 | assert len(points) > 3 168 | prev_delta = points[-1] - points[-2] 169 | prev_angle = np.arctan2(prev_delta.y, prev_delta.x) 170 | for i in range(len(points)): 171 | delta = points[i] - points[i - 1] 172 | angle = np.arctan2(delta.y, delta.x) 173 | if delta.y == 0 or delta.x == 0: 174 | thresh_angle = pi / 2 175 | else: 176 | thresh_angle = pi * 85 / 180 177 | delta_angle = angle - prev_angle 178 | delta_angle = abs(((delta_angle + pi) % (2 * pi)) - pi) 179 | if delta_angle > thresh_angle: 180 | layout_square(cell, drclayer, points[i - 1], 0.1, ex) 181 | prev_delta, prev_angle = delta, angle 182 | 183 | def resize(self, dx, dbu): 184 | """Resizes the polygon by a positive or negative quantity dx. 185 | Args: 186 | dbu: typically 0.001 187 | """ 188 | 189 | # TODO Very klayout specific 190 | 191 | dpoly = backend.DPolygon(self) 192 | dpoly.size(dx, 5) 193 | dpoly = backend.EdgeProcessor().simple_merge_p2p([dpoly.to_itype(dbu)], False, False, 1) 194 | dpoly = dpoly[0].to_dtype(dbu) # backend.DPolygon 195 | 196 | def norm(p): 197 | return sqrt(p.x**2 + p.y**2) 198 | 199 | # Filter edges if they are too small 200 | points = list(dpoly.each_point_hull()) 201 | new_points = list([points[0]]) 202 | for i in range(0, len(points)): 203 | delta = points[i] - new_points[-1] 204 | if norm(delta) > min(10 * dbu, abs(dx)): 205 | new_points.append(points[i]) 206 | 207 | sdpoly = self.__class__(new_points) # convert to SimplePolygon 208 | self.assign(sdpoly) 209 | return self 210 | 211 | def round_corners(self, radius, N): 212 | """This only works if the polygon edges are longer than the radius.""" 213 | 214 | dpoly = super().round_corners(radius, radius, N) 215 | self.assign(dpoly) 216 | return self 217 | 218 | def moved(self, dx_or_dpoint, dy=None): 219 | if isinstance(dx_or_dpoint, (backend.DPoint, backend.DVector)): 220 | dx_or_dpoint = dx_or_dpoint.x 221 | dy = dx_or_dpoint.y 222 | pya_dpoly = super().moved(dx_or_dpoint, dy) 223 | siepic_dpoly = self.__class__() 224 | siepic_dpoly.__dict__.update(pya_dpoly) 225 | return siepic_dpoly 226 | 227 | backend.DSimplePolygon = _SimplePolygon 228 | 229 | 230 | patch_simple_polygon(kdb) 231 | patch_simple_polygon(pya) 232 | -------------------------------------------------------------------------------- /zeropdk/layout/.gitignore: -------------------------------------------------------------------------------- 1 | waveguide_rounding.gds 2 | -------------------------------------------------------------------------------- /zeropdk/layout/__init__.py: -------------------------------------------------------------------------------- 1 | def insert_shape(cell, layer, shape): 2 | if layer is not None: 3 | cell.shapes(layer).insert(shape) 4 | 5 | 6 | import klayout.db as kdb 7 | 8 | 9 | def layout_pgtext(cell, layer, x, y, text, mag, inv=False, angle=0): 10 | layout = kdb.Layout() 11 | lylayer = layout.layer(layer) 12 | for i, line in enumerate(text.splitlines()): 13 | pcell = layout.create_cell( 14 | "TEXT", "Basic", {"text": line, "layer": layer, "mag": mag, "inverse": inv} 15 | ) 16 | pcell.transform_into(kdb.DCplxTrans(1, angle, False, x, y - i * mag * 5 / 4)) 17 | lylayer_new = cell.layout().layer(layer) 18 | cell.shapes(lylayer_new).insert(pcell.shapes(lylayer)) 19 | 20 | 21 | from .polygons import * 22 | from .waveguides import * 23 | from .routing import * 24 | -------------------------------------------------------------------------------- /zeropdk/layout/algorithms/__init__.py: -------------------------------------------------------------------------------- 1 | from .sampling import sample_function # noqa 2 | -------------------------------------------------------------------------------- /zeropdk/layout/algorithms/sampling.py: -------------------------------------------------------------------------------- 1 | # Adapted from: https://stackoverflow.com/questions/14084634/adaptive-plotting-of-a-function-in-python 2 | # Old license: "Adaptive sampling of 1D functions" by unknown CC0 3 | 4 | # Modified by Thomas Ferreira de Lima @thomaslima 5 | # License: CC BY 4.0 https://creativecommons.org/licenses/by/4.0/ 6 | 7 | from typing import Callable 8 | 9 | try: 10 | import numpy as np 11 | except ImportError: 12 | def sample_function( 13 | func: Callable, points, tol=0.001, min_points=8, max_level=16, sample_transform=None 14 | ): 15 | raise NotImplementedError("Need numpy package to be installed") 16 | 17 | else: 18 | 19 | def sample_function( 20 | func: Callable, points, tol=0.001, min_points=8, max_level=16, sample_transform=None 21 | ): 22 | """ 23 | Sample a 1D function to given tolerance by adaptive subdivision. 24 | 25 | The function itself can be multidimensional. 26 | 27 | The result of sampling is a set of points that, if plotted, 28 | produces a smooth curve with also sharp features of the function 29 | resolved. 30 | 31 | Parameters 32 | ---------- 33 | func : callable 34 | Function func(x) of a single argument. It is assumed to be vectorized. 35 | points : array-like, 1D 36 | Initial points to sample, sorted in ascending order. 37 | These will determine also the bounds of sampling. 38 | tol : float, optional 39 | Tolerance to sample to. The condition is roughly that the total 40 | length of the curve on the (x, y) plane is computed up to this 41 | tolerance. 42 | min_point : int, optional 43 | Minimum number of points to sample. 44 | max_level : int, optional 45 | Maximum subdivision depth. 46 | sample_transform : callable, optional 47 | Function w = g(x, y). The x-samples are generated so that w 48 | is sampled. 49 | 50 | Returns 51 | ------- 52 | x : ndarray 53 | X-coordinates (WARNING: returns sorted samples) 54 | y : ndarray 55 | Corresponding values of func(x) 56 | 57 | Notes 58 | ----- 59 | This routine is useful in computing functions that are expensive 60 | to compute, and have sharp features --- it makes more sense to 61 | adaptively dedicate more sampling points for the sharp features 62 | than the smooth parts. 63 | 64 | Examples 65 | -------- 66 | >>> def func(x): 67 | ... '''Function with a sharp peak on a smooth background''' 68 | ... a = 0.001 69 | ... return x + a**2/(a**2 + x**2) 70 | ... 71 | >>> x, y = sample_function(func, [-1, 1], tol=1e-3) 72 | 73 | >>> import matplotlib.pyplot as plt 74 | >>> xx = np.linspace(-1, 1, 12000) 75 | >>> plt.plot(xx, func(xx), '-', x, y[0], '.') 76 | >>> plt.show() 77 | 78 | """ 79 | with np.errstate(divide="ignore", invalid="ignore"): # type: ignore 80 | return _sample_function( 81 | func, 82 | points, 83 | values=None, 84 | mask=None, 85 | depth=0, 86 | tol=tol, 87 | min_points=min_points, 88 | max_level=max_level, 89 | sample_transform=sample_transform, 90 | ) 91 | 92 | def _sample_function( 93 | func: Callable, 94 | points, 95 | values=None, 96 | mask=None, 97 | tol=0.05, 98 | depth=0, 99 | min_points=16, 100 | max_level=16, 101 | sample_transform=None, 102 | ): 103 | points = np.unique(points) 104 | 105 | if values is None: 106 | values = np.atleast_2d(func(points)) 107 | 108 | if mask is None: 109 | mask = slice(None) 110 | 111 | if depth > max_level: 112 | # recursion limit 113 | return points, values 114 | 115 | x_a = points[..., :-1][..., mask] 116 | x_b = points[..., 1:][..., mask] 117 | 118 | x_c = 0.5 * (x_a + x_b) 119 | y_c = np.atleast_2d(func(x_c)) 120 | 121 | x_2 = np.r_[points, x_c] 122 | y_2 = np.r_["-1", values, y_c] 123 | j = np.argsort(x_2) 124 | 125 | x_2 = x_2[..., j] 126 | y_2 = y_2[..., j] 127 | 128 | # -- Determine the intervals at which refinement is necessary 129 | 130 | if len(x_2) < min_points: 131 | mask = np.ones([len(x_2) - 1], dtype=bool) 132 | else: 133 | # represent the data as a path in N dimensions (scaled to unit box) 134 | if sample_transform is not None: 135 | y_2_val = sample_transform(x_2, y_2) 136 | else: 137 | y_2_val = y_2 138 | 139 | p = np.r_[ 140 | "0", 141 | x_2[None, :], 142 | y_2_val.real.reshape(-1, y_2_val.shape[-1]), 143 | y_2_val.imag.reshape(-1, y_2_val.shape[-1]), 144 | ] 145 | 146 | sz = (p.shape[0] - 1) // 2 147 | 148 | xscale = x_2.ptp(axis=-1) 149 | yscale = np.abs(y_2_val.ptp(axis=-1)).ravel() 150 | 151 | p[0] /= xscale 152 | 153 | p[1 : sz + 1] /= yscale[:, None] 154 | p[sz + 1 :] /= yscale[:, None] 155 | 156 | # compute the length of each line segment in the path 157 | dp = np.diff(p, axis=-1) 158 | s = np.sqrt((dp**2).sum(axis=0)) 159 | s_tot = s.sum() 160 | 161 | # compute the angle between consecutive line segments 162 | dp /= s 163 | dcos = np.arccos(np.clip((dp[:, 1:] * dp[:, :-1]).sum(axis=0), -1, 1)) 164 | 165 | # determine where to subdivide: the condition is roughly that 166 | # the total length of the path (in the scaled data) is computed 167 | # to accuracy `tol` 168 | dp_piece = dcos * 0.5 * (s[1:] + s[:-1]) 169 | mask = dp_piece > tol * s_tot 170 | 171 | mask = np.r_[mask, False] 172 | mask[1:] |= mask[:-1].copy() 173 | 174 | # -- Refine, if necessary 175 | 176 | if mask.any(): 177 | return _sample_function( 178 | func, 179 | x_2, 180 | y_2, 181 | mask, 182 | tol=tol, 183 | depth=depth + 1, 184 | min_points=min_points, 185 | max_level=max_level, 186 | sample_transform=sample_transform, 187 | ) 188 | else: 189 | return x_2, y_2 190 | -------------------------------------------------------------------------------- /zeropdk/layout/bezier_optimal.npz: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/lightwave-lab/zeropdk/3121a72c806b911f68cd6d40a4bb1eac4aef3e95/zeropdk/layout/bezier_optimal.npz -------------------------------------------------------------------------------- /zeropdk/layout/cache.py: -------------------------------------------------------------------------------- 1 | """Caching algorithms for pcells.""" 2 | 3 | import os 4 | import inspect 5 | import pickle 6 | import logging 7 | from hashlib import sha256 8 | from functools import partial, wraps 9 | from typing import Any, Type, Union, Callable, Dict 10 | 11 | import klayout.db as pya 12 | from zeropdk.pcell import PCell 13 | 14 | logger = logging.getLogger(__name__) 15 | layer_map_dict: Dict[pya.Layout, pya.LayerMap] = dict() 16 | CACHE_ACTIVATED = os.environ.get("ZEROPDK_CACHE_ACTIVATED", "true") == "true" 17 | CACHE_DIR = os.environ.get("ZEROPDK_CACHE_DIR", os.path.join(os.getcwd(), "cache")) 18 | CACHE_PROP_ID = 458 19 | 20 | def produce_hash(self: PCell, extra: Any = None) -> str: 21 | """Produces a hash of a PCell instance based on: 22 | 1. the source code of the class and its bases. 23 | 2. the non-default parameter with which the pcell method is called 24 | 3. the name of the pcell 25 | 4. PCell's layout.dbu variable 26 | 4. extra provided arcuments 27 | """ 28 | # copy source code of class and all its ancestors 29 | source_code = "".join( 30 | [inspect.getsource(klass) for klass in self.__class__.__mro__ if issubclass(klass, PCell)] 31 | ) 32 | 33 | diff_params = dict(self.params) 34 | # str(diff_params) calls __repr__ in inner values, instead of __str__ () 35 | # therefore it would fail for instances without readable __repr__ methods 36 | str_diff_params = "{%s}" % ", ".join("%r: %s" % p for p in diff_params.items()) 37 | 38 | long_hash_pcell = sha256( 39 | (source_code + str_diff_params + self.name + str(extra)).encode() 40 | ).hexdigest() 41 | short_hash_pcell = long_hash_pcell[0:7] 42 | return short_hash_pcell 43 | 44 | 45 | def read_layout(layout: pya.Layout, gds_filename: str, disambiguation_name: str = ""): 46 | """Reads the layout in the gds file and imports all cells into 47 | layout without overwriting existing cells. 48 | """ 49 | load_options = pya.LoadLayoutOptions() 50 | load_options.text_enabled = True 51 | load_options.set_layer_map(layer_map_dict[layout], True) 52 | load_options.properties_enabled = True 53 | 54 | # store and take away the cell names of all cells read so far 55 | # (by setting the cell name to "" the cells basically become invisible for 56 | # the following read) 57 | # take out the pcells 58 | cell_list = list(layout.each_cell()) 59 | cell_indices = {cell.name: cell.cell_index() for cell in cell_list} 60 | 61 | # this assumes that there are no duplicate names, which is true in gds (let's assert) 62 | assert len(cell_list) == len( 63 | cell_indices 64 | ), "There is a duplicate cell name in the current layout" 65 | for i in cell_indices.values(): 66 | layout.rename_cell(i, "") 67 | 68 | # Store cache cell names (these cannot be deduplicated) 69 | cache_set = set() 70 | if layout.property(CACHE_PROP_ID) is not None: 71 | cache_set |= set(layout.property(CACHE_PROP_ID).split(",")) 72 | layout.delete_property(CACHE_PROP_ID) 73 | 74 | # Read the new gds_filename 75 | lmap = layout.read(gds_filename, load_options) 76 | if layout.property(CACHE_PROP_ID) is not None: 77 | cache_set |= set(layout.property(CACHE_PROP_ID).split(",")) 78 | if cache_set: 79 | logger.debug("cache_set state: %s", cache_set) 80 | layout.set_property(CACHE_PROP_ID, ",".join(cache_set)) 81 | 82 | # in the new layout, get all cell names, assuming, again, that there are no duplicates 83 | cell_names2 = [(cell.cell_index(), cell.name) for cell in layout.each_cell()] 84 | 85 | # make those cells point to older cells if they are duplicate: 86 | # - if it is a cached cell, reuse the cell in the layout 87 | # - if it is not, then disambiguate by using the disambiguation_name 88 | # - if there is a duplicate even with the disambiguated name, add a counter 89 | 90 | if disambiguation_name != "": 91 | disambiguation_name = f"_{disambiguation_name}" 92 | # new cell name will be duplicate_name_disambiguation_name 93 | 94 | prune_cells_indices = [] 95 | used_cell_names = list(cell_indices.keys()) 96 | for i_duplicate, name_cached_cell in cell_names2: 97 | if name_cached_cell in cell_indices.keys(): 98 | if name_cached_cell.startswith("cache_") or (name_cached_cell in cache_set): 99 | # cell_indices[name_cached_cell] contains a reference to the "original" cell 100 | # we want to find every instance to duplicates (layout.cell(i_duplicate)) 101 | # and replace the cell pointer to the "original" cell. 102 | # We also want to delete the de-referenced cell. 103 | # This for loop modifies the cell in-place, hence the list around the iterator. 104 | for parent_inst_array in list(layout.cell(i_duplicate).each_parent_inst()): 105 | cell_instance = parent_inst_array.child_inst() 106 | cell_instance.cell = layout.cell(cell_indices[name_cached_cell]) 107 | prune_cells_indices.append(i_duplicate) 108 | elif name_cached_cell + disambiguation_name not in used_cell_names: 109 | layout.rename_cell(i_duplicate, name_cached_cell + disambiguation_name) 110 | used_cell_names.append(name_cached_cell + disambiguation_name) 111 | else: 112 | k = 1 113 | while (name_cached_cell + disambiguation_name + f"_{k}") in used_cell_names: 114 | k += 1 115 | layout.rename_cell(i_duplicate, name_cached_cell + disambiguation_name + f"_{k}") 116 | used_cell_names.append(name_cached_cell + disambiguation_name + f"_{k}") 117 | 118 | for i_pruned in prune_cells_indices: 119 | logger.debug(f"WARNING: deleting cell {layout.cell(i_pruned).name}") 120 | layout.prune_cell(i_pruned, -1) 121 | 122 | # every conflict should have been caught above 123 | for name, cell_index in cell_indices.items(): 124 | layout.rename_cell(cell_index, name) 125 | 126 | layer_map_dict[layout] = lmap 127 | return lmap 128 | 129 | 130 | def cache_cell( 131 | cls: Type[PCell] = None, *, extra_hash: Any = None, cache_dir: str = CACHE_DIR 132 | ) -> Union[Type[PCell], Callable]: 133 | """Caches results of pcell call to save build time. 134 | 135 | First, it computes a hash based on: 136 | 1. the source code of the class and its bases. 137 | 2. the non-default parameter with which the pcell method is called 138 | 3. the name of the pcell 139 | 140 | Second, it saves a cell with name cache_HASH in cache_HASH.gds inside 141 | the cache folder. The port list and position is also saved in cache_HASH.klayout.pkl, 142 | and it is a pickle of the ports dictionary. 143 | 144 | Third, if wraps the pcell method so it loads the cached cell and cached port 145 | positions instead of recalculating everything. 146 | 147 | Warnings: 148 | - If the cell contents depend on something other than the contents 149 | of the hash described above, for example an external .gds file, any 150 | external change will not be seen by the caching algorithm. You have 151 | to manually delete the corresponding cache file so it get updated 152 | in the mask. 153 | 154 | Use as a decorator: 155 | 156 | @cache_cell 157 | class MyCell(PCell): 158 | pass 159 | """ 160 | 161 | if cls is None: 162 | # tip taken from https://pybit.es/decorator-optional-argument.html 163 | return partial(cache_cell, extra_hash=extra_hash, cache_dir=cache_dir) 164 | 165 | if not CACHE_ACTIVATED: 166 | return cls 167 | 168 | # decorate draw 169 | def cache_decorator(draw): 170 | @wraps(draw) 171 | def wrapper_draw(self, cell): 172 | layout = cell.layout() 173 | try: 174 | layer_map_dict[layout] 175 | except KeyError: 176 | layer_map_dict[layout] = pya.LayerMap() 177 | 178 | # Adding the dbu of the layout in the hash (bit us in the butt last time) 179 | short_hash_pcell = produce_hash(self, extra=(layout.dbu, extra_hash)) 180 | 181 | # cache paths 182 | cache_fname = f"cache_{self.__class__.__qualname__}_{short_hash_pcell}" 183 | cache_fname_gds = f"{cache_fname}.gds" 184 | cache_fname_pkl = f"{cache_fname}.klayout.pkl" 185 | 186 | os.makedirs(cache_dir, mode=0o775, exist_ok=True) 187 | 188 | cache_fpath_gds = os.path.join(cache_dir, cache_fname_gds) 189 | cache_fpath_pkl = os.path.join(cache_dir, cache_fname_pkl) 190 | 191 | if os.path.isfile(cache_fpath_gds) and os.path.isfile(cache_fpath_pkl): 192 | with open(cache_fpath_pkl, "rb") as file: 193 | ports, read_short_hash_pcell, cellname = pickle.load( 194 | file 195 | ) # pylint: disable=unused-variable 196 | 197 | logger.debug(f"Reading from cache: {cache_fname}: {cellname}, {ports}") 198 | print("r", end="", flush=True) 199 | if not layout.has_cell(cache_fname): 200 | read_layout(layout, cache_fpath_gds, disambiguation_name=cellname) 201 | # cell.move_tree(retrieved_cell) 202 | else: 203 | if layout.has_cell(cache_fname): 204 | logger.warning( 205 | f"WARNING: {cache_fname_gds} does not exist but {cache_fname} is in layout." 206 | ) 207 | 208 | # populating .gds and .pkl 209 | empty_layout = pya.Layout() 210 | empty_layout.dbu = layout.dbu 211 | empty_cell = empty_layout.create_cell(cell.name) 212 | filled_cell, ports = draw(self, empty_cell) 213 | 214 | logger.debug(f"Writing to cache: {cache_fname}: {filled_cell.name}, {ports}") 215 | print("w", end="", flush=True) 216 | 217 | cellname, filled_cell.name = filled_cell.name, cache_fname 218 | # There can be duplicate cell names in subcells here. 219 | # We are saving a list of them inside a property named CACHE_PROP_ID 220 | # So we need to allow the properties to be saved inside the gds file (incompatible with the GDS2 standard) 221 | save_options = pya.SaveLayoutOptions() 222 | save_options.gds2_write_file_properties = True 223 | empty_layout.write(cache_fpath_gds, save_options) 224 | with open(cache_fpath_pkl, "wb") as file: 225 | pickle.dump((ports, short_hash_pcell, cellname), file) 226 | 227 | # Make sure we delete the empty_layout to not grow 228 | # helps debug 229 | layer_map_dict.pop(empty_layout, None) 230 | del empty_layout 231 | assert not layout.has_cell(cache_fname) 232 | 233 | read_layout(layout, cache_fpath_gds, disambiguation_name=cellname) 234 | 235 | # Place the imported cell into the parent (e.g. TOP) cell. 236 | retrieved_cell = layout.cell(cache_fname) 237 | cell.insert( 238 | pya.DCellInstArray( 239 | retrieved_cell.cell_index(), 240 | pya.DTrans(pya.DTrans.R0, pya.DPoint(0, 0)), 241 | ) 242 | ) 243 | return cell, ports 244 | 245 | return wrapper_draw 246 | 247 | if hasattr(cls, "draw") and cls.draw.__name__ != "wrapper_draw": 248 | setattr(cls, "draw", cache_decorator(cls.draw)) 249 | 250 | return cls 251 | -------------------------------------------------------------------------------- /zeropdk/layout/geometry.py: -------------------------------------------------------------------------------- 1 | import logging 2 | import os 3 | from functools import lru_cache, partial 4 | from typing import Callable, Tuple 5 | import numpy as np 6 | from scipy.interpolate import interp2d 7 | from zeropdk.layout.algorithms.sampling import sample_function 8 | 9 | logger = logging.getLogger(__name__) 10 | 11 | 12 | def rotate(point, angle_rad: float): 13 | """Rotates point counter-clockwisely about its origin by an angle given in radians""" 14 | th = angle_rad 15 | x, y = point.x, point.y 16 | new_x = x * np.cos(th) - y * np.sin(th) 17 | new_y = y * np.cos(th) + x * np.sin(th) 18 | return point.__class__(new_x, new_y) 19 | 20 | def rotate_deg(point, angle_deg: float): 21 | """Rotates point counter-clockwisely about its origin by an angle given in degrees""" 22 | angle_rad = angle_deg / 180 * np.pi 23 | return rotate(point, angle_rad) 24 | 25 | 26 | rotate90 = lambda point: rotate(point, np.pi / 2) 27 | 28 | 29 | def cross_prod(p1, p2): 30 | return p1.x * p2.y - p1.y * p2.x 31 | 32 | 33 | def find_arc(A, B, C): 34 | """Finds the arc of a circle containing points A, B, C. 35 | Returns the center of the circle, and the radius: 36 | (O[Point], R) 37 | If A,B,C falls on a line, the center is None and the radius is infinite. 38 | """ 39 | 40 | # check if it is not a valid triangle 41 | AB = B - A 42 | BC = C - B 43 | area = cross_prod(AB, BC) 44 | 45 | if np.isclose(area, 0): 46 | return None, np.inf 47 | 48 | ex = AB / AB.norm() 49 | ey = rotate90(ex) 50 | 51 | D = (A + B) / 2 52 | E = (B + C) / 2 53 | 54 | LHS_sys = np.array([[BC * ex, BC * ey], [AB * ex, AB * ey]]) 55 | RHS_sys = np.array([[E * BC], [D * AB]]) 56 | 57 | sol = np.linalg.inv(LHS_sys).dot(RHS_sys) 58 | h, k = sol.flatten() 59 | 60 | O = h * ex + k * ey 61 | R = (A - O).norm() 62 | return O, R 63 | 64 | 65 | def project(v, ex, ey=None): 66 | """Compute a such that v = a * ex + b * ey""" 67 | if ey is None: 68 | ey = rotate90(ex) 69 | 70 | if cross_prod(ex, ey) == 0: 71 | raise RuntimeError(f"ex={repr(ex)} and ey={repr(ey)} are not orthogonal.") 72 | 73 | # Simple formula 74 | # https://math.stackexchange.com/questions/148199/equation-for-non-orthogonal-projection-of-a-point-onto-two-vectors-representing 75 | 76 | a = cross_prod(ey, v) / cross_prod(ey, ex) 77 | # b = cross_prod(ex, v) / cross_prod(ex, ey) 78 | 79 | # v == a * ex + b * ey 80 | return a 81 | 82 | 83 | def curve_length(curve, t0=0, t1=1): 84 | """Computes the total length of a curve. 85 | 86 | Args: 87 | curve: list of Points, or 88 | parametric function of points, to be computed from t0 to t1. 89 | """ 90 | # TODO possible bug: if the curve is a loop, it will return 0 (BAD) 91 | if isinstance(curve, list): 92 | # assuming curve is a list of points 93 | scale = (curve[-1] - curve[0]).norm() 94 | if scale <= 0: 95 | return 0 96 | coords = np.array([[point.x, point.y] for point in curve]).T 97 | dp = np.diff(coords, axis=-1) 98 | else: 99 | # assuming curve is a function. 100 | curve_func = curve 101 | scale = (curve_func(t1) - curve_func(t0)).norm() 102 | if scale <= 0: 103 | return 0 104 | coords = lambda t: np.array([curve_func(t).x, curve_func(t).y]) 105 | _, sampled_coords = sample_function( 106 | coords, [t0, t1], tol=0.0001 / scale, min_points=100 107 | ) # 1000 times more precise than the scale 108 | dp = np.diff(sampled_coords, axis=-1) 109 | ds = np.sqrt((dp**2).sum(axis=0)) 110 | return ds.sum() 111 | 112 | 113 | def manhattan_intersection(vertical_point, horizontal_point, ex): 114 | """returns the point that intersects vertical_point's x coordinate 115 | and horizontal_point's y coordinate. 116 | 117 | Args: ex (Vector/Point): orientation of x axis. 118 | 119 | Caveat: this formula only works for orthogonal coordinate systems. 120 | """ 121 | ey = rotate90(ex) 122 | return vertical_point * ex * ex + horizontal_point * ey * ey 123 | 124 | 125 | def find_Z_orientation(P0, P1, ex): 126 | """Compute the orientation of Point P0 against Point P1 127 | P1 is assumed to be above P0. 128 | 129 | Args: ex (Vector/Point): orientation of x axis. 130 | 131 | Returns: 132 | 0 for Z-oriented and 1 for S-oriented 133 | 134 | """ 135 | if P1 * ex > P0 * ex: 136 | orient = 0 # Z-oriented 137 | else: 138 | orient = 1 # S-oriented 139 | return orient 140 | 141 | 142 | def cluster_ports(ports_from, ports_to, ex): 143 | """Given two (equal length) port arrays, divide them into clusters 144 | based on the connection orientation. The idea is that each cluster 145 | can be routed independently with an array of Z or S traces that don't 146 | touch each other. 147 | 148 | Args: 149 | - ex (Vector/Point): orientation of the axis along with the 150 | ports are placed. 151 | 152 | TODO document more. 153 | 154 | Returns: 155 | an array of k 2-tuples (port_pair_list, orientation), 156 | where k is the number of clusters, 157 | port_pair list an array of (p0, p1), 158 | and orientation is 0 for Z and 1 for S 159 | """ 160 | assert len(ports_from) == len(ports_to), "Port array must have the same length" 161 | if len(ports_from) == 0: 162 | return [] 163 | 164 | orient_old = None 165 | port_cluster = [] 166 | port_clusters = [] 167 | # sort the arrays first 168 | proj_ex = lambda p: p.position * ex 169 | ports_from = sorted(ports_from, key=proj_ex) 170 | ports_to = sorted(ports_to, key=proj_ex) 171 | for port_from, port_to in zip(ports_from, ports_to): 172 | new_cluster = False 173 | orient_new = find_Z_orientation(port_from.position, port_to.position, ex) 174 | # first pair 175 | if orient_old is None: 176 | port_cluster.append((port_from, port_to)) 177 | # the rest of the pairs 178 | elif orient_new == orient_old: 179 | # if the ports are too spaced apart, initiate new cluster 180 | right_port = min(port_from, port_to, key=proj_ex) 181 | left_port = max(port_cluster[-1], key=proj_ex) 182 | if proj_ex(right_port) - right_port.width > proj_ex(left_port) + left_port.width: 183 | new_cluster = True 184 | else: 185 | port_cluster.append((port_from, port_to)) 186 | else: 187 | new_cluster = True 188 | 189 | if new_cluster: 190 | port_clusters.append((port_cluster, orient_old)) 191 | port_cluster = [] 192 | port_cluster.append((port_from, port_to)) 193 | orient_old = orient_new 194 | port_clusters.append((port_cluster, orient_old)) 195 | return port_clusters 196 | 197 | 198 | def bezier_line(P0, P1, P2, P3): 199 | """Cubic Bézier formula 200 | 201 | Returns: 202 | Function of parameter t (1d array) 203 | 204 | Reference 205 | https://en.wikipedia.org/wiki/Bézier_curve""" 206 | 207 | curve_func = ( 208 | lambda t: (1 - t) ** 3 * P0 209 | + 3 * (1 - t) ** 2 * t * P1 210 | + 3 * (1 - t) * t**2 * P2 211 | + t**3 * P3 212 | ) 213 | return curve_func 214 | 215 | 216 | def curvature_bezier(P0, P1, P2, P3): 217 | """Measures the curvature of the Bézier curve at every point t 218 | 219 | Returns: 220 | Function of parameter t (1d array) 221 | 222 | References: 223 | https://en.wikipedia.org/wiki/Radius_of_curvature 224 | https://en.wikipedia.org/wiki/Bézier_curve 225 | """ 226 | b_prime = ( 227 | lambda t: 3 * (1 - t) ** 2 * (P1 - P0) 228 | + 6 * (1 - t) * t * (P2 - P1) 229 | + 3 * t**2 * (P3 - P2) 230 | ) 231 | b_second = lambda t: 6 * (1 - t) * (P2 - 2 * P1 + P0) + 6 * t * (P3 - 2 * P2 + P1) 232 | dx = lambda t: b_prime(t).x 233 | dy = lambda t: b_prime(t).y 234 | ddx = lambda t: b_second(t).x 235 | ddy = lambda t: b_second(t).y 236 | curv_func = lambda t: (dx(t) * ddy(t) - dy(t) * ddx(t)) / (dx(t) ** 2 + dy(t) ** 2) ** (3 / 2) 237 | return curv_func 238 | 239 | 240 | from scipy.optimize import minimize 241 | 242 | 243 | def max_curvature(P0, P1, P2, P3): 244 | """Gets the maximum curvature of Bezier curve""" 245 | t = np.linspace(0, 1, 300) 246 | curv = curvature_bezier(P0, P1, P2, P3)(t) 247 | max_curv = np.max(np.abs(curv.flatten())) 248 | return max_curv 249 | 250 | 251 | def _curvature_penalty(P0, P1, P2, P3): 252 | """Penalty on the curvyness of Bezier curve""" 253 | t = np.linspace(0, 1, 300) 254 | 255 | curv = np.abs(curvature_bezier(P0, P1, P2, P3)(t).flatten()) 256 | max_curv = np.max(curv) 257 | curv_initial = curv[0] 258 | curv_final = curv[-1] 259 | 260 | # this will cause the minimum curvature to be about 4 times lower 261 | # than at the origin and end points. 262 | penalty = max_curv + 2 * (curv_initial + curv_final) 263 | return penalty 264 | 265 | 266 | def fix_angle(angle): 267 | """Returns the angle in the -pi to pi range""" 268 | return (angle + np.pi) % (2 * np.pi) - np.pi 269 | 270 | 271 | def logistic_penalty(x, a): 272 | return 1 / (1 + np.exp(-x / a)) 273 | 274 | 275 | # #### The following classes (Point and Line) exist only to speed up 276 | # the code in bezier_optimal. klayout objects have a slower 277 | # interface 278 | 279 | 280 | MAGIC_NUMBER = 15.0 281 | from numpy import sqrt 282 | 283 | 284 | class _Point(object): 285 | """Defines a point with two coordinates. Mimics pya.Point""" 286 | 287 | def __init__(self, x, y): 288 | self.x = x 289 | self.y = y 290 | 291 | def __add__(self, other): 292 | x = self.x + other.x 293 | y = self.y + other.y 294 | return self.__class__(x, y) 295 | 296 | def __sub__(self, other): 297 | x = self.x - other.x 298 | y = self.y - other.y 299 | return self.__class__(x, y) 300 | 301 | __array_priority__ = MAGIC_NUMBER #: This allows rmul to be called first. See https://stackoverflow.com/questions/38229953/array-and-rmul-operator-in-python-numpy""" 302 | 303 | def __mul__(self, factor): 304 | """This implements P * factor""" 305 | if isinstance(factor, np.ndarray): 306 | # Return a Line instead 307 | return _Line(self.x * factor, self.y * factor) 308 | elif isinstance(factor, _Point): 309 | return self.x * factor.x + self.y * factor.y 310 | return self.__class__(self.x * factor, self.y * factor) 311 | 312 | def __rmul__(self, factor): 313 | """This implements factor * P""" 314 | if isinstance(factor, np.ndarray): 315 | return self.__mul__(factor) 316 | return self.__class__(self.x * factor, self.y * factor) 317 | 318 | def __eq__(self, other): 319 | return self.x == other.x and self.y == other.y 320 | 321 | def __str__(self): 322 | return f"Point({self.x}, {self.y})" 323 | 324 | def norm(self): 325 | return sqrt(self.x**2 + self.y**2) 326 | 327 | 328 | class _Line(_Point): 329 | """Defines a line""" 330 | 331 | def __init__(self, x, y): 332 | self.x, self.y = np.asarray(x), np.asarray(y) 333 | assert np.shape(self.x) == np.shape(self.y) 334 | 335 | def __eq__(self, other): 336 | return np.all(self.x == other.x) and np.all(self.y == other.y) 337 | 338 | 339 | @lru_cache(maxsize=128) 340 | def _original_bezier_optimal(angle0: float, angle3: float) -> Tuple[float, float]: 341 | """This is a reduced problem of the bézier connection. 342 | 343 | Args: 344 | angle0: starting angle in radians 345 | angle3: ending angle in radians 346 | 347 | This assumes P0 = (0,0), P3 = (1,0). 348 | """ 349 | 350 | angle0 = fix_angle(angle0) 351 | angle3 = fix_angle(angle3) 352 | 353 | # print(f"Solving for angles: {angle0}, {angle3}", end='...\t\t') 354 | 355 | def J(a, b, a_max, b_max, cross=False): 356 | """Energy function for bezier optimization""" 357 | P0 = _Point(0, 0) 358 | P3 = _Point(1, 0) 359 | P1 = P0 + a * _Point(np.cos(angle0), np.sin(angle0)) 360 | P2 = P3 - b * _Point(np.cos(angle3), np.sin(angle3)) 361 | 362 | main_penalty = _curvature_penalty(P0, P1, P2, P3) 363 | 364 | # Constraint penalty 365 | constraint_penalty = np.exp(-a / 0.05) 366 | constraint_penalty += np.exp(-b / 0.05) 367 | 368 | # Only for potentially crossing P0-P1, P1-P2 segments (prevents loops) 369 | if cross: 370 | constraint_penalty -= np.log(np.maximum(1e-3, np.minimum(a_max - a, b_max - b))) 371 | else: 372 | constraint_penalty += np.exp((a - a_max) / 0.05) 373 | constraint_penalty += np.exp((b - b_max) / 0.05) 374 | 375 | # print(f"{a:.2f}, {b:.2f}: {main_penalty}/{constraint_penalty}") 376 | return main_penalty + constraint_penalty 377 | 378 | MAX = 1.5 379 | 380 | # If these angles have opposite signs, then calculate the bounds 381 | # so that P1 and P2 do not *both* hit the intersection of the 382 | # initial tangents. This prevents loops. 383 | if angle0 * angle3 < 0 and np.abs(angle3 - angle0) < np.pi: # potential cross 384 | # Initialize problem 385 | a = b = 0.05 386 | 387 | third_angle = np.pi - np.abs(angle3) - np.abs(angle0) 388 | a_bound = min(2 * np.abs(np.sin(angle3)) / np.sin(third_angle), MAX * 3) 389 | b_bound = min(2 * np.abs(np.sin(angle0)) / np.sin(third_angle), MAX * 3) 390 | 391 | initial_simplex = np.array([[a, b], [a * 1.1, b], [a, b * 1.1]]) 392 | 393 | result = minimize( 394 | # lambda x: J(x[0], x[1], MAX * 3, MAX * 3), 395 | lambda x: J(x[0], x[1], a_bound, b_bound, cross=True), 396 | np.array([a, b]), 397 | method="Nelder-Mead", 398 | options=dict(initial_simplex=initial_simplex), 399 | ) 400 | else: # no potential cross 401 | # Initialize problem 402 | a = b = 0.3 403 | initial_simplex = np.array([[a, b], [a * 1.1, b], [a, b * 1.1]]) 404 | a_bound = b_bound = MAX 405 | 406 | result = minimize( 407 | lambda x: J(x[0], x[1], a_bound, b_bound, cross=False), 408 | np.array([a, b]), 409 | method="Nelder-Mead", 410 | options=dict(initial_simplex=initial_simplex), 411 | ) 412 | 413 | if result.success: 414 | a, b = result.x[0], result.x[1] 415 | else: 416 | if result.message == "Maximum number of function evaluations has been exceeded.": 417 | a, b = result.x[0], result.x[1] 418 | else: 419 | print(f"Could not optimize. Exited with message:{result.message}") 420 | # print("a={:.3f}<{:.3f} b={:.3f}<{:.3f}".format(a, a_bound, b, b_bound)) 421 | return a, b 422 | 423 | 424 | # STABLE MEMOIZATION 425 | 426 | pwd = os.path.dirname(os.path.realpath(__file__)) 427 | bezier_optimal_fpath = os.path.join(pwd, "bezier_optimal.npz") 428 | 429 | 430 | def memoized_bezier_optimal(angle0: float, angle3: float, file: str) -> Tuple[float, float]: 431 | try: 432 | npzfile = np.load(file) 433 | x = npzfile["x"] 434 | y = npzfile["y"] 435 | z_a = npzfile["z_a"] 436 | z_b = npzfile["z_b"] 437 | 438 | a = interp2d(x, y, z_a)(angle0, angle3)[0] 439 | b = interp2d(x, y, z_b)(angle0, angle3)[0] 440 | return a, b 441 | except Exception: 442 | logger.error(f"Optimal Bezier interpolation has failed for angles({angle0}, {angle3}).") 443 | return _original_bezier_optimal(angle0, angle3) 444 | 445 | _bezier_optimal: Callable[[float, float], Tuple[float, float]] 446 | 447 | if os.path.isfile(bezier_optimal_fpath): 448 | _bezier_optimal = partial(memoized_bezier_optimal, file=bezier_optimal_fpath) 449 | else: 450 | _bezier_optimal = _original_bezier_optimal 451 | 452 | 453 | def bezier_optimal(P0, P3, angle0: float, angle3: float): 454 | """Computes the optimal bezier curve from P0 to P3 with angles 0 and 3 455 | 456 | Args: 457 | P0, P3: Point 458 | Angles in degrees 459 | """ 460 | 461 | angle0 = angle0 * np.pi / 180 462 | angle3 = angle3 * np.pi / 180 463 | 464 | vector = P3 - P0 465 | angle_m = np.arctan2(vector.y, vector.x) 466 | a, b = _bezier_optimal(angle0 - angle_m, angle3 - angle_m) 467 | 468 | scaling = vector.norm() 469 | if scaling > 0: 470 | P1 = a * scaling * _Point(np.cos(angle0), np.sin(angle0)) + P0 471 | P2 = P3 - b * scaling * _Point(np.cos(angle3), np.sin(angle3)) 472 | curve_func = bezier_line(P0, P1, P2, P3) 473 | with np.errstate(divide="ignore"): # type: ignore 474 | # warn if minimum radius is smaller than 3um 475 | min_radius = np.true_divide(1, max_curvature(P0, P1, P2, P3)) 476 | if min_radius < 3: 477 | print( 478 | "Warning! Min radius: {:.2f} um".format( 479 | np.true_divide(1, max_curvature(P0, P1, P2, P3)) 480 | ) 481 | ) 482 | # print("Total length: {:.3f} um".format(curve_length(curve_func, 0, 1))) 483 | return curve_func 484 | else: 485 | raise RuntimeError(f"Error: calling bezier between two identical points: {P0}, {P3}") 486 | 487 | 488 | # Allow us to use these functions directly with pya.DPoints 489 | 490 | try: 491 | import klayout.db as pya 492 | 493 | _bezier_optimal_pure = bezier_optimal 494 | 495 | def bezier_optimal(P0, P3, angle0: float, angle3: float): 496 | """If inside KLayout, return computed list of KLayout points.""" 497 | P0 = _Point(P0.x, P0.y) 498 | P3 = _Point(P3.x, P3.y) 499 | scale = (P3 - P0).norm() # rough length. 500 | # if scale > 1000: # if in nanometers, convert to microns 501 | # scale /= 1000 502 | # This function returns a np.array of Points. 503 | # We need to convert to array of Point coordinates 504 | new_bezier_line = _bezier_optimal_pure(P0, P3, angle0, angle3) 505 | bezier_point_coordinates = lambda t: np.array([new_bezier_line(t).x, new_bezier_line(t).y]) 506 | 507 | t_sampled, bezier_point_coordinates_sampled = sample_function( 508 | bezier_point_coordinates, [0, 1], tol=0.005 / scale 509 | ) # tol about 5 nm 510 | 511 | # The following adds two points right after the first and before the last point 512 | # to guarantee that the first edge of the path goes out in the direction 513 | # of the 'port'. 514 | 515 | insert_at = np.argmax(0.001 / scale < t_sampled) 516 | t_sampled = np.insert(t_sampled, insert_at, 0.001 / scale) 517 | bezier_point_coordinates_sampled = np.insert( 518 | bezier_point_coordinates_sampled, 519 | insert_at, 520 | bezier_point_coordinates(0.001 / scale), 521 | axis=1, 522 | ) # add a point right after the first one 523 | insert_at = np.argmax(1 - 0.001 / scale < t_sampled) 524 | # t_sampled = np.insert(t_sampled, insert_at, 1 - 0.001 / scale) 525 | bezier_point_coordinates_sampled = np.insert( 526 | bezier_point_coordinates_sampled, 527 | insert_at, 528 | bezier_point_coordinates(1 - 0.001 / scale), 529 | axis=1, 530 | ) # add a point right before the last one 531 | # bezier_point_coordinates_sampled = \ 532 | # np.append(bezier_point_coordinates_sampled, np.atleast_2d(bezier_point_coordinates(1 + .001 / scale)).T, 533 | # axis=1) # finish the waveguide a little bit after 534 | 535 | return [pya.DPoint(x, y) for (x, y) in zip(*(bezier_point_coordinates_sampled))] 536 | 537 | except ImportError: 538 | logger.error("klayout not detected. It is a requirement of zeropdk for now.") 539 | raise 540 | -------------------------------------------------------------------------------- /zeropdk/layout/polygons.py: -------------------------------------------------------------------------------- 1 | from typing import Iterable 2 | from zeropdk.layout import insert_shape 3 | from zeropdk.layout.geometry import cross_prod, project, rotate90 4 | 5 | import klayout.db as kdb 6 | 7 | 8 | def box(point1, point3, ex, ey): 9 | """Returns a polygon of a box defined by point1, point3 and orientation ex. 10 | p2 ----- p3 11 | | | 12 | p1 ----- p4 13 | ex ---> 14 | 15 | """ 16 | 17 | point2 = project(point3 - point1, ey, ex) * ey + point1 18 | point4 = point1 + point3 - point2 19 | return kdb.DSimplePolygon([point1, point2, point3, point4]) 20 | 21 | 22 | def layout_box(cell, layer, point1, point3, ex): 23 | """Lays out a box 24 | 25 | Args: 26 | point1: bottom-left point 27 | point3: top-right point 28 | 29 | """ 30 | 31 | ey = rotate90(ex) 32 | polygon = box(point1, point3, ex, ey) 33 | insert_shape(cell, layer, polygon) 34 | return polygon 35 | 36 | 37 | def rectangle(center, width, height, ex, ey): 38 | """ 39 | returns the polygon of a rectangle centered at center, 40 | aligned with ex, with width and height in microns 41 | 42 | Args: 43 | center: pya.DPoint (um units) 44 | width (x axis): float (um units) 45 | height (y axis): float (um unit) 46 | ex: orientation of x axis 47 | ey: orientation of y axis 48 | """ 49 | 50 | if cross_prod(ex, ey) == 0: 51 | raise RuntimeError(f"ex={repr(ex)} and ey={repr(ey)} are not orthogonal.") 52 | 53 | point1 = center - width / 2 * ex - height / 2 * ey 54 | point3 = center + width / 2 * ex + height / 2 * ey 55 | 56 | return box(point1, point3, ex=ex, ey=ey) 57 | 58 | 59 | def square(center, width, ex, ey): 60 | """ 61 | returns the polygon of a square centered at center, 62 | aligned with ex, with width in microns 63 | 64 | Args: 65 | center: pya.DPoint (um units) 66 | width: float (um units) 67 | ex: orientation 68 | """ 69 | return rectangle(center, width, width, ex=ex, ey=ey) 70 | 71 | 72 | def layout_square(cell, layer, center, width, ex=None): 73 | """Lays out a square in a layer 74 | 75 | Args: 76 | center: pya.DPoint (um units) 77 | width: float (um units) 78 | ex: orientation 79 | 80 | """ 81 | 82 | if ex is None: 83 | ex = pya.DPoint(1, 0) 84 | ey = rotate90(ex) 85 | 86 | shape = square(center, width, ex, ey) 87 | insert_shape(cell, layer, shape) 88 | return shape 89 | 90 | 91 | def layout_rectangle(cell, layer, center, width, height, ex): 92 | """Lays out a rectangle 93 | 94 | Args: 95 | center: pya.DPoint (um units) 96 | width: float (um units) 97 | height: float (um unit) 98 | ex: orientation 99 | 100 | """ 101 | 102 | ey = rotate90(ex) 103 | 104 | shape = rectangle(center, width, height, ex, ey) 105 | insert_shape(cell, layer, shape) 106 | return shape 107 | 108 | 109 | # TODO: Reorganize later 110 | pya = kdb 111 | import numpy as np 112 | from math import pi 113 | 114 | 115 | def layout_path(cell, layer: kdb.LayerInfo, point_iterator: Iterable[kdb.DPoint], w: float): 116 | """ Simple wrapper for pya.DPath.""" 117 | path = pya.DPath(list(point_iterator), w, 0, 0).to_itype(cell.layout().dbu) 118 | cell.shapes(layer).insert(pya.Path.from_dpath(path)) 119 | 120 | 121 | def layout_path_with_ends(cell, layer: kdb.LayerInfo, point_iterator: Iterable[kdb.DPoint], w: float): 122 | """ Simple wrapper for pya.DPath.""" 123 | dpath = pya.DPath(list(point_iterator), w, w / 2, w / 2) 124 | cell.shapes(layer).insert(dpath) 125 | 126 | 127 | def append_relative(points, *relative_vectors): 128 | """Appends to list of points in relative steps: 129 | It takes a list of points, and adds new points to it in relative coordinates. 130 | For example, if you call append_relative([A, B], C, D), the result will be [A, B, B+C, B+C+D]. 131 | """ 132 | try: 133 | if len(points) > 0: 134 | origin = points[-1] 135 | except TypeError: 136 | raise TypeError("First argument must be a list of points") 137 | 138 | for vector in relative_vectors: 139 | points.append(origin + vector) 140 | origin = points[-1] 141 | return points 142 | 143 | 144 | from zeropdk.layout.algorithms import sample_function 145 | 146 | 147 | def layout_ring(cell, layer, center, r, w): 148 | """ 149 | function to produce the layout of a ring 150 | cell: layout cell to place the layout 151 | layer: which layer to use 152 | center: origin DPoint 153 | r: radius 154 | w: waveguide width 155 | units in microns 156 | 157 | """ 158 | 159 | # outer arc 160 | # optimal sampling 161 | assert r - w / 2 > 0 162 | radius = r + w / 2 163 | arc_function = lambda t: np.array([radius * np.cos(t), radius * np.sin(t)]) 164 | t, coords = sample_function(arc_function, [0, 2 * pi], tol=0.002 / radius) 165 | 166 | # create original waveguide poligon prior to clipping and rotation 167 | points_hull = [center + pya.DPoint(x, y) for x, y in zip(*coords)] 168 | del points_hull[-1] 169 | 170 | radius = r - w / 2 171 | arc_function = lambda t: np.array([radius * np.cos(t), radius * np.sin(t)]) 172 | t, coords = sample_function(arc_function, [0, 2 * pi], tol=0.002 / radius) 173 | 174 | # create original waveguide poligon prior to clipping and rotation 175 | points_hole = [center + pya.DPoint(x, y) for x, y in zip(*coords)] 176 | del points_hole[-1] 177 | 178 | dpoly = pya.DPolygon(list(reversed(points_hull))) 179 | dpoly.insert_hole(points_hole) 180 | dpoly.compress(True) 181 | insert_shape(cell, layer, dpoly) 182 | return dpoly 183 | 184 | 185 | def layout_circle( 186 | cell, layer, center, r, ex=None, x_bounds=(-np.inf, np.inf), y_bounds=(-np.inf, np.inf) 187 | ): 188 | """ 189 | function to produce the layout of a filled circle 190 | cell: layout cell to place the layout 191 | layer: which layer to use 192 | center: origin DPoint 193 | r: radius 194 | x_bounds and y_bounds relative to the center, before rotation by ex. 195 | units in microns 196 | optimal sampling 197 | """ 198 | 199 | arc_function = lambda t: np.array([r * np.cos(t), r * np.sin(t)]) 200 | t, coords = sample_function(arc_function, [0, 2 * np.pi - 0.001], tol=0.002 / r) 201 | 202 | # dbu = cell.layout().dbu 203 | dpolygon = pya.DSimplePolygon([pya.DPoint(x, y) for x, y in zip(*coords)]) 204 | # clip dpolygon to bounds 205 | dpolygon.clip(x_bounds=x_bounds, y_bounds=y_bounds) 206 | # Transform points (translation + rotation) 207 | dpolygon.transform_and_rotate(center, ex) 208 | dpolygon.compress(True) 209 | insert_shape(cell, layer, dpolygon) 210 | return dpolygon 211 | 212 | 213 | layout_disk = layout_circle 214 | 215 | 216 | def layout_donut(cell, layer, center, r1, r2): 217 | """Layout donut shape. 218 | cell: layout cell to place the layout 219 | layer: which layer to use 220 | center: origin DPoint (not affected by ex) 221 | r1: internal radius 222 | r2: external radius 223 | """ 224 | 225 | assert r2 > r1 226 | 227 | arc_function = lambda t: np.array([center.x + r2 * np.cos(t), center.y + r2 * np.sin(t)]) 228 | t, coords = sample_function(arc_function, [0, 2 * np.pi - 0.001], tol=0.002 / r2) 229 | 230 | external_points = [pya.DPoint(x, y) for x, y in zip(*coords)] 231 | 232 | arc_function = lambda t: np.array([center.x + r1 * np.cos(-t), center.y + r1 * np.sin(-t)]) 233 | t, coords = sample_function(arc_function, [0, 2 * np.pi - 0.001], tol=0.002 / r1) 234 | 235 | internal_points = [pya.DPoint(x, y) for x, y in zip(*coords)] 236 | 237 | dpoly = pya.DPolygon(external_points) 238 | dpoly.insert_hole(internal_points) 239 | insert_shape(cell, layer, dpoly) 240 | return dpoly 241 | 242 | 243 | def layout_section( 244 | cell, 245 | layer, 246 | center, 247 | r2, 248 | theta_start, 249 | theta_end, 250 | ex=None, 251 | x_bounds=(-np.inf, np.inf), 252 | y_bounds=(-np.inf, np.inf), 253 | ): 254 | """Layout section of a circle. 255 | cell: layout cell to place the layout 256 | layer: which layer to use 257 | center: origin DPoint (not affected by ex) 258 | r2: radius 259 | theta_start, theta_end: angle in radians 260 | x_bounds and y_bounds relative to the center, before rotation by ex. 261 | units in microns 262 | returns a dpolygon 263 | """ 264 | 265 | assert r2 > 0 266 | 267 | # optimal sampling 268 | arc_function = lambda t: np.array([r2 * np.cos(t), r2 * np.sin(t)]) 269 | t, coords = sample_function(arc_function, [theta_start, theta_end], tol=0.002 / r2) 270 | 271 | # # This yields a better polygon 272 | if theta_end < theta_start: 273 | theta_start, theta_end = theta_end, theta_start 274 | 275 | coords = np.insert( 276 | coords, 0, arc_function(theta_start - 0.001), axis=1 277 | ) # start the waveguide a little bit before 278 | coords = np.append( 279 | coords, np.atleast_2d(arc_function(theta_end + 0.001)).T, axis=1 280 | ) # finish the waveguide a little bit after 281 | 282 | # create original waveguide poligon prior to clipping and rotation 283 | dpoints_list = [pya.DPoint(x, y) for x, y in zip(*coords)] 284 | dpolygon = pya.DSimplePolygon(dpoints_list + [pya.DPoint(0, 0)]) 285 | 286 | # clip dpolygon to bounds 287 | dpolygon.clip(x_bounds=x_bounds, y_bounds=y_bounds) 288 | # Transform points (translation + rotation) 289 | dpolygon.transform_and_rotate(center, ex) 290 | dpolygon.compress(True) 291 | dpolygon.layout(cell, layer) 292 | return dpolygon 293 | 294 | 295 | def layout_arc( 296 | cell, 297 | layer, 298 | center, 299 | r, 300 | w, 301 | theta_start, 302 | theta_end, 303 | ex=None, 304 | x_bounds=(-np.inf, np.inf), 305 | y_bounds=(-np.inf, np.inf), 306 | ): 307 | """function to produce the layout of an arc 308 | cell: layout cell to place the layout 309 | layer: which layer to use 310 | center: origin DPoint (not affected by ex) 311 | r: radius 312 | w: waveguide width 313 | theta_start, theta_end: angle in radians 314 | x_bounds and y_bounds relative to the center, before rotation by ex. 315 | units in microns 316 | returns a dpolygon 317 | 318 | """ 319 | # fetch the database parameters 320 | 321 | if r <= 0: 322 | raise RuntimeError(f"Please give me a positive radius. Bad r={r}") 323 | 324 | # optimal sampling 325 | if theta_end < theta_start: 326 | theta_start, theta_end = theta_end, theta_start 327 | 328 | arc_function = lambda t: np.array([r * np.cos(t), r * np.sin(t)]) 329 | t, coords = sample_function(arc_function, [theta_start, theta_end], tol=0.002 / r) 330 | 331 | dt = 0.0001 332 | # # This yields a better polygon 333 | insert_at = np.argmax(theta_start + dt < t) 334 | t = np.insert(t, insert_at, theta_start + dt) 335 | coords = np.insert( 336 | coords, insert_at, arc_function(theta_start + dt), axis=1 337 | ) # start the second point a little bit after the first 338 | 339 | insert_at = np.argmax(theta_end - dt < t) 340 | t = np.insert(t, insert_at, theta_end - dt) 341 | coords = np.insert( 342 | coords, insert_at, arc_function(theta_end - dt), axis=1 343 | ) # start the second to last point a little bit before the final 344 | 345 | # create original waveguide poligon prior to clipping and rotation 346 | dpoints_list = [pya.DPoint(x, y) for x, y in zip(*coords)] 347 | from zeropdk.layout import waveguide_dpolygon 348 | 349 | dpolygon = waveguide_dpolygon(dpoints_list, w, cell.layout().dbu) 350 | 351 | # clip dpolygon to bounds 352 | dpolygon.clip(x_bounds=x_bounds, y_bounds=y_bounds) 353 | # Transform points (translation + rotation) 354 | dpolygon.transform_and_rotate(center, ex) 355 | dpolygon.compress(True) 356 | dpolygon.layout(cell, layer) 357 | return dpolygon 358 | 359 | 360 | def layout_arc_degree( 361 | cell, 362 | layer, 363 | center, 364 | r, 365 | w, 366 | theta_start, 367 | theta_end, 368 | ex=None, 369 | x_bounds=(-np.inf, np.inf), 370 | y_bounds=(-np.inf, np.inf), 371 | ): 372 | """same as layout_arc, but with theta in degrees instead of radians""" 373 | 374 | theta_start *= np.pi / 180 375 | theta_end *= np.pi / 180 376 | return layout_arc( 377 | cell, 378 | layer, 379 | center, 380 | r, 381 | w, 382 | theta_start, 383 | theta_end, 384 | ex=ex, 385 | x_bounds=x_bounds, 386 | y_bounds=y_bounds, 387 | ) 388 | 389 | 390 | def layout_arc2( 391 | cell, 392 | layer, 393 | center, 394 | r1, 395 | r2, 396 | theta_start, 397 | theta_end, 398 | ex=None, 399 | x_bounds=(-np.inf, np.inf), 400 | y_bounds=(-np.inf, np.inf), 401 | ): 402 | """modified layout_arc with r1 and r2, instead of r (radius) and w (width).""" 403 | r1, r2 = min(r1, r2), max(r1, r2) 404 | 405 | r = (r1 + r2) / 2 406 | w = r2 - r1 407 | return layout_arc( 408 | cell, 409 | layer, 410 | center, 411 | r, 412 | w, 413 | theta_start, 414 | theta_end, 415 | ex=ex, 416 | x_bounds=x_bounds, 417 | y_bounds=y_bounds, 418 | ) 419 | 420 | 421 | def layout_arc_with_drc_exclude( 422 | cell, layer, drc_layer, center, r, w, theta_start, theta_end, ex=None, **kwargs 423 | ): 424 | """Layout arc with drc exclude squares on sharp corners""" 425 | dpoly = layout_arc(cell, layer, center, r, w, theta_start, theta_end, ex, **kwargs) 426 | dpoly.layout_drc_exclude(cell, drc_layer, ex) 427 | return dpoly 428 | 429 | 430 | def layout_arc2_with_drc_exclude( 431 | cell, layer, drc_layer, center, r1, r2, theta_start, theta_end, ex=None, **kwargs 432 | ): 433 | """Layout arc2 with drc exclude squares on sharp corners""" 434 | dpoly = layout_arc2(cell, layer, center, r1, r2, theta_start, theta_end, ex, **kwargs) 435 | dpoly.layout_drc_exclude(cell, drc_layer, ex) 436 | return dpoly 437 | -------------------------------------------------------------------------------- /zeropdk/layout/routing.py: -------------------------------------------------------------------------------- 1 | """ Module containing routines for routing optical and metal waveguides.""" 2 | 3 | import logging 4 | import math 5 | import numpy as np 6 | import pya 7 | from zeropdk.layout.geometry import bezier_optimal 8 | 9 | 10 | # from siepic_ebeam_pdk import EBEAM_TECH 11 | from zeropdk.layout.geometry import rotate90, manhattan_intersection, cluster_ports 12 | from zeropdk.layout.waveguides import ( 13 | layout_waveguide, 14 | layout_waveguide_angle, 15 | layout_waveguide_angle2, 16 | ) 17 | 18 | logger = logging.getLogger(__name__) 19 | 20 | WAVEGUIDE_RADIUS = 10 21 | WAVEGUIDE_WIDTH = 0.5 22 | TAPER_WIDTH = 3 23 | TAPER_LENGTH = 20 24 | 25 | 26 | # The function below is just a reference. You need to provide an EBEAM_TECH 27 | # or replace the layer in the call to layout_waveguide_from_points 28 | def layout_ebeam_waveguide_from_points( 29 | cell, points_list, radius=None, width=None, taper_width=None, taper_length=None 30 | ): 31 | """Takes a list of points and lays out a rounded waveguide with optional tapers""" 32 | 33 | TECHNOLOGY = EBEAM_TECH 34 | if radius is None: 35 | radius = WAVEGUIDE_RADIUS 36 | if width is None: 37 | width = WAVEGUIDE_WIDTH 38 | if taper_width is None: 39 | taper_width = TAPER_WIDTH 40 | if taper_length is None: 41 | taper_length = TAPER_LENGTH 42 | 43 | from .waveguide_rounding import layout_waveguide_from_points 44 | 45 | layout_waveguide_from_points( 46 | cell, 47 | TECHNOLOGY.layers["Si"], 48 | points_list, 49 | width, 50 | radius, 51 | taper_width, 52 | taper_length, 53 | ) 54 | 55 | return cell 56 | 57 | 58 | def ensure_layer(layout, layer): 59 | if isinstance(layer, pya.LayerInfo): 60 | return layout.layer(layer) 61 | elif isinstance(layer, type(1)): 62 | return layer 63 | else: 64 | logger.error(f"{layer} not recognized") 65 | 66 | 67 | def common_layout_manhattan_traces( 68 | cell, layer1, layer2, layervia, via_cell_placer, path, ex, initiate_with_via=False 69 | ): 70 | """Lays out a manhattan trace, potentially with vias 71 | 72 | Args: 73 | layer1 and layer2 are given to layout.LayerInfo(layer), generally 74 | layer2 is on top 75 | via_cell_placer: returns a cell when called with 76 | via_cell_placer(parent_cell, pya.DPoint origin, width, layer1, layer2, layervia, ex) 77 | path: list of tuples containing necessary info ((x, y) or pya.DPoint, layer, width) 78 | 79 | Returns: 80 | path 81 | 82 | Algorithm places a via when there is a change of layers. To terminate with a via, 83 | have the last layer be different than the penultimate one. 84 | """ 85 | 86 | assert isinstance(ex, (pya.DPoint, pya.DVector)) 87 | ey = rotate90(ex) 88 | 89 | first_point, _, first_width = path[0] 90 | if initiate_with_via: 91 | via_cell_placer(cell, first_point, first_width, layer1, layer2, layervia, ex) 92 | 93 | points_list = [] 94 | widths_list = [] 95 | _, previous_layer, _ = path[0] 96 | layout = cell.layout() 97 | 98 | for point, layer, width in path: 99 | if isinstance(point, tuple): # point are (x, y) coordinates 100 | x, y = point 101 | point = x * ex + y * ey 102 | else: 103 | assert isinstance(point, (pya.DPoint, pya.DVector)) 104 | if isinstance(point, pya.DVector): 105 | point = pya.DPoint(point) 106 | 107 | points_list.append(point) # store points 108 | widths_list.append(width) 109 | if layer != previous_layer: # time to place a via and layout 110 | layout_waveguide( 111 | cell, 112 | ensure_layer(layout, previous_layer), 113 | points_list, 114 | widths_list, 115 | smooth=True, 116 | ) 117 | 118 | via_cell_placer(cell, point, width, layer1, layer2, layervia, ex) 119 | 120 | # delete all but the last point 121 | del points_list[:-1] 122 | del widths_list[:-1] 123 | previous_layer = layer 124 | 125 | # layout last trace 126 | if len(points_list) >= 2: 127 | layout_waveguide( 128 | cell, 129 | ensure_layer(layout, previous_layer), 130 | points_list, 131 | widths_list, 132 | smooth=True, 133 | ) 134 | 135 | return path 136 | 137 | 138 | def layout_manhattan_traces(cell, path, ex): 139 | def via_cell_placer(*args, **kwargs): 140 | pass 141 | 142 | return common_layout_manhattan_traces( 143 | cell, None, None, None, via_cell_placer, path, ex, initiate_with_via=False 144 | ) 145 | 146 | 147 | def connect_ports_L(cell, cplayer, ports_from, ports_to, ex): 148 | """ Connects ports ports_from to ports_to, always leaving vertically (with respect to ex)""" 149 | 150 | ey = rotate90(ex) 151 | for port_from, port_to in zip(ports_from, ports_to): 152 | assert port_from.direction in [ey, -ey] 153 | o_y = ey if port_to.position * ey > port_from.position * ey else -ey 154 | o_x = ex if port_to.position * ex > port_from.position * ex else -ex 155 | 156 | middle_point = manhattan_intersection(port_from.position, port_to.position, ex) 157 | layout_waveguide( 158 | cell, 159 | ensure_layer(cell.layout(), cplayer), 160 | [port_from.position, middle_point + port_to.width * 0.5 * o_y], 161 | port_from.width, 162 | ) 163 | layout_waveguide( 164 | cell, 165 | ensure_layer(cell.layout(), cplayer), 166 | [middle_point - port_from.width * 0.5 * o_x, port_to.position], 167 | port_to.width, 168 | ) 169 | 170 | 171 | def compute_paths_from_clusters( 172 | ports_clusters, layer, ex, pitch=None, middle_taper=False, initial_height=0 173 | ): 174 | """ 175 | Args: 176 | - middle_taper: Adds a middle point in the Z-shaped trace attempting to avoid collisions and DRC errors. 177 | provide a pitch for optical waveguides. electrical waveguides are figured 178 | out automatically. 179 | path: list of tuples containing necessary info (pya.DPoint, layer, width) 180 | """ 181 | 182 | Z = 0 183 | S = 1 184 | ey = rotate90(ex) 185 | 186 | paths = [] 187 | 188 | for ports_cluster, orientation in ports_clusters: 189 | assert orientation in (Z, S) 190 | 191 | # start from the lowest height Z trace 192 | height = initial_height 193 | if orientation == S: 194 | ports_iterator = list(iter(ports_cluster)) 195 | elif orientation == Z: 196 | ports_iterator = list(reversed(ports_cluster)) 197 | 198 | is_to_top = is_to_bottom = False 199 | # check which row is on the top: 200 | for port_from, port_to in ports_iterator: 201 | if (port_to.position - port_from.position) * ey > 0: 202 | is_to_top = True or is_to_top 203 | else: 204 | is_to_bottom = True or is_to_bottom 205 | 206 | assert not ( 207 | is_to_bottom and is_to_top 208 | ), "There must be a line dividing the top and bottom port rows. Maybe you are using the wrong ex argument?" 209 | 210 | if is_to_top: 211 | offset_port_from = max(port_from.position * ey for port_from, _ in ports_iterator) 212 | 213 | else: 214 | offset_port_from = min(port_from.position * ey for port_from, _ in ports_iterator) 215 | 216 | paths_cluster = [] 217 | for port_from, port_to in ports_iterator: 218 | 219 | # # Make port_from be the one with largest width 220 | # if port_from.width < port_to.width: 221 | # port_from, port_to = port_to, port_from 222 | 223 | P0 = port_from.position # + port_from.direction * port_from.width / 2 224 | P3 = port_to.position # + port_to.direction * port_to.width / 2 225 | 226 | if pitch is None: 227 | new_pitch = max(port_from.width, port_to.width) * 1.5 228 | else: 229 | new_pitch = max(max(port_from.width, port_to.width), pitch) 230 | 231 | height += new_pitch 232 | new_height = height + abs(offset_port_from - P0 * ey) 233 | paths_cluster.append( 234 | append_Z_trace_vertical( 235 | [(P0, layer, port_from.width)], 236 | (P3, layer, port_to.width), 237 | new_height, 238 | ex, 239 | middle_taper=middle_taper, 240 | ) 241 | ) 242 | if orientation == S: 243 | paths.extend(paths_cluster) 244 | elif orientation == Z: 245 | paths.extend(reversed(paths_cluster)) 246 | return paths 247 | 248 | 249 | def bus_route_Z(cell, ports_from, ports_to, ex, pitch=WAVEGUIDE_RADIUS, radius=WAVEGUIDE_RADIUS): 250 | port_clusters = cluster_ports(ports_from, ports_to, ex) 251 | paths = compute_paths_from_clusters(port_clusters, None, ex, pitch) 252 | 253 | for trace_path in paths: 254 | path = [point for point, _, _ in trace_path] 255 | layout_ebeam_waveguide_from_points(cell, path, radius) 256 | 257 | 258 | def append_Z_trace_vertical(path, new_point, height, ex, middle_layer=None, middle_taper=False): 259 | """Adds new_point to the path list plus TWO Z or S manhattan interesections. 260 | Args: 261 | path: list of tuples containing necessary info (pya.DPoint, layer, width) 262 | new_point: tuple ((x, y) or pya.DPoint, layer, width) 263 | height: y-coordinate of where to place the inner point, 264 | from 0 to abs(new_point.y - path.y) 265 | ex: orientation of ports 266 | middle_layer (optional): layer of middle trace 267 | middle_taper (default False): Adds a middle point in the Z-shaped trace attempting to avoid collisions and DRC errors. 268 | """ 269 | 270 | assert len(path) > 0 271 | 272 | ey = rotate90(ex) 273 | 274 | P0, l0, w0 = path[-1] 275 | P3, l3, w3 = new_point 276 | 277 | height = abs(height) 278 | # assert height <= abs(P0 * ey - P3 * ey) 279 | 280 | # Invert sign of height if P3 is below P0 281 | if P3 * ey < P0 * ey: 282 | height = -height 283 | 284 | P1 = P0 + height * ey 285 | P2 = P1 * ey * ey + P3 * ex * ex 286 | 287 | # selecting middle_layer 288 | if middle_layer is None: 289 | l1, l2 = l0, l3 290 | else: 291 | l1 = l2 = middle_layer 292 | 293 | # lmid defined below 294 | 295 | # selecting middle widths 296 | w1, w2 = w0, w3 297 | if (P2 - P1).norm() <= w1 + w2: 298 | # w1 = w2 = min(w1, w2) 299 | middle_taper = False # middle taper when points are that close looks weird 300 | if w1 < w2: 301 | wmid = w1 302 | lmid = l1 303 | else: 304 | wmid = w2 305 | lmid = l2 306 | 307 | path.append((P1, l1, w1)) 308 | 309 | # move P2 a little bit to avoid acute corners 310 | delta_w = abs(w2 - w1) / 2 311 | if P3 * ey < P0 * ey: 312 | delta_w = -delta_w 313 | P2 += delta_w * ey 314 | 315 | Pmid = (P1 + P2) / 2 316 | 317 | if (P1 - P2).norm() <= max(w1, w2): 318 | if (P3 - P2) * ey > max(w1, w2) * 3: 319 | path.append((P2 + ey * max(w1, w2) * 3, l2, w2)) 320 | else: 321 | path.append((P3 + ey * max(w1, w2) * 0.2, l3, w3)) 322 | else: 323 | if middle_taper: 324 | path.append((Pmid, lmid, wmid)) 325 | path.append((P2, l2, w2)) 326 | path.append(new_point) 327 | return path 328 | 329 | 330 | def layout_connect_ports(cell, layer, port_from, port_to, smooth=True): 331 | """Places an "optimal" bezier curve from port_from to port_to.""" 332 | 333 | if port_from.name.startswith("el"): 334 | assert port_to.name.startswith("el") 335 | P0 = port_from.position + port_from.direction * port_from.width / 2 336 | P3 = port_to.position + port_to.direction * port_to.width / 2 337 | smooth = smooth and True 338 | else: 339 | dbu = cell.layout().dbu 340 | P0 = port_from.position - dbu * port_from.direction 341 | P3 = port_to.position - dbu * port_to.direction 342 | smooth = smooth or True 343 | angle_from = np.arctan2(port_from.direction.y, port_from.direction.x) * 180 / math.pi 344 | angle_to = np.arctan2(-port_to.direction.y, -port_to.direction.x) * 180 / math.pi 345 | 346 | curve = bezier_optimal(P0, P3, angle_from, angle_to) 347 | logger.debug(f"bezier_optimal({P0}, {P3}, {angle_from}, {angle_to})") 348 | return layout_waveguide(cell, layer, curve, [port_from.width, port_to.width], smooth=smooth) 349 | 350 | 351 | def layout_connect_ports_angle(cell, layer, port_from, port_to, angle): 352 | """Places an "optimal" bezier curve from port_from to port_to, with a fixed orientation angle. 353 | 354 | Args: 355 | angle: degrees 356 | Use when connecting ports that are like horizontal-in and horizontal-out. 357 | """ 358 | 359 | if port_from.name.startswith("el"): 360 | assert port_to.name.startswith("el") 361 | P0 = port_from.position + port_from.direction * port_from.width / 2 362 | P3 = port_to.position + port_to.direction * port_to.width / 2 363 | 364 | # straight lines for electrical connectors 365 | curve = [P0, P3] 366 | else: 367 | P0 = port_from.position 368 | P3 = port_to.position 369 | curve = bezier_optimal(P0, P3, angle, angle) 370 | 371 | return layout_waveguide_angle(cell, layer, curve, [port_from.width, port_to.width], angle) 372 | 373 | 374 | def layout_connect_ports_angle2(cell, layer, port_from, port_to, angle_from, angle_to): 375 | """Places an "optimal" bezier curve from port_from to port_to, with a fixed orientation angle. 376 | 377 | Args: 378 | angle: degrees 379 | Use when connecting ports that are like horizontal-in and horizontal-out. 380 | """ 381 | 382 | if port_from.name.startswith("el"): 383 | assert port_to.name.startswith("el") 384 | P0 = port_from.position + port_from.direction * port_from.width / 2 385 | P3 = port_to.position + port_to.direction * port_to.width / 2 386 | 387 | # straight lines for electrical connectors 388 | curve = [P0, P3] 389 | else: 390 | P0 = port_from.position 391 | P3 = port_to.position 392 | curve = bezier_optimal(P0, P3, angle_from, angle_to) 393 | 394 | return layout_waveguide_angle2( 395 | cell, layer, curve, [port_from.width, port_to.width], angle_from, angle_to 396 | ) 397 | 398 | 399 | def append_L_trace(path, new_point, middle_layer, ex): 400 | """Adds new_point to the path list plus ONE L manhattan intersection. 401 | 402 | Args: 403 | path: list of tuples containing necessary info ((x, y) or pya.DPoint, layer, width) 404 | new_point: tuple ((x, y) or pya.DPoint, layer, width) 405 | """ 406 | 407 | assert len(path) > 0 408 | 409 | p1, l1, w1 = path[-1] # pylint: disable=unused-variable 410 | p2, l2, w2 = new_point # pylint: disable=unused-variable 411 | joint_width = min(w1, w2) 412 | joint_point = manhattan_intersection(p1, p2, ex) 413 | path.append((joint_point, middle_layer, joint_width)) 414 | path.append(new_point) 415 | return path 416 | -------------------------------------------------------------------------------- /zeropdk/layout/waveguides.py: -------------------------------------------------------------------------------- 1 | """ Layout helper functions. 2 | 3 | Author: Thomas Ferreira de Lima @thomaslima 4 | 5 | The following functions are useful for scripted layout, or making 6 | PDK Pcells. 7 | 8 | TODO: enhance documentation 9 | TODO: make some of the functions in util use these. 10 | """ 11 | 12 | from itertools import repeat 13 | from typing import List, Tuple 14 | import numpy as np 15 | from numpy import cos, sin, pi, sqrt 16 | from functools import reduce 17 | from zeropdk.layout.geometry import curve_length, cross_prod, find_arc 18 | from zeropdk.exceptions import ZeroPDKUserError 19 | 20 | import klayout.db as pya 21 | 22 | debug = False 23 | 24 | def norm(self): 25 | return self.norm() 26 | 27 | def _remove_duplicates(point_tuple_list: List[Tuple[pya.DPoint, ...]]) -> List[Tuple[pya.DPoint, ...]]: 28 | """ Iterates through point_tuple_list and deletes entries with consecutive duplicate points.""" 29 | 30 | if len(point_tuple_list) < 2: 31 | return point_tuple_list 32 | 33 | unique_points = [point_tuple_list[0]] 34 | previous_point = point_tuple_list[0] 35 | for p_tuple in point_tuple_list[1:]: 36 | if (p_tuple[0] - previous_point[0]).norm() > 0: 37 | unique_points.append(p_tuple) 38 | previous_point = p_tuple 39 | 40 | return unique_points 41 | 42 | def waveguide_dpolygon(points_list, width, dbu, smooth=True): 43 | """Returns a polygon outlining a waveguide. 44 | 45 | This was updated over many iterations of failure. It can be used for both 46 | smooth optical waveguides or DC metal traces with corners. It is better 47 | than klayout's Path because it can have varying width. 48 | 49 | Args: 50 | points_list: list of pya.DPoint (at least 2 points) 51 | width (microns): constant, 2-element list, or list. 52 | If 2-element list, then widths are interpolated alongside the waveguide. 53 | If list, then it has to either have the same length as points. 54 | dbu: dbu: typically 0.001, only used for accuracy calculations. 55 | smooth: tries to smooth final polygons to avoid very sharp edges (greater than 130 deg) 56 | Returns: 57 | polygon DPoints 58 | 59 | """ 60 | if len(points_list) < 2: 61 | raise NotImplementedError("ERROR: Not enough points to draw a waveguide.") 62 | return 63 | 64 | # Prepares a joint point and width iterators 65 | try: 66 | if len(width) == len(points_list): 67 | width_iterator = iter(width) 68 | elif len(width) == 2: 69 | # assume width[0] is initial width and 70 | # width[1] is final width 71 | # interpolate with points_list 72 | L = curve_length(points_list) 73 | distance = 0 74 | widths_list = [width[0]] 75 | widths_func = lambda t: (1 - t) * width[0] + t * width[1] 76 | old_point = points_list[0] 77 | for point in points_list[1:]: 78 | distance += norm(point - old_point) 79 | old_point = point 80 | widths_list.append(widths_func(distance / L)) 81 | width_iterator = iter(widths_list) 82 | else: 83 | width_iterator = repeat(width[0]) 84 | except TypeError: 85 | width_iterator = repeat(width) 86 | finally: 87 | points_iterator = iter(points_list) 88 | 89 | points_low = list() 90 | points_high = list() 91 | 92 | def cos_angle(point1, point2): 93 | cos_angle = point1 * point2 / norm(point1) / norm(point2) 94 | 95 | # ensure it's between -1 and 1 (nontrivial numerically) 96 | if abs(cos_angle) > 1: 97 | return cos_angle / abs(cos_angle) 98 | else: 99 | return cos_angle 100 | 101 | def sin_angle(point1, point2): 102 | return cross_prod(point1, point2) / norm(point1) / norm(point2) 103 | 104 | point_width_list = list(zip(points_iterator, width_iterator)) 105 | # Remove duplicate consecutive points here, because it would create 106 | # problems for the algorithm below. 107 | point_width_list = _remove_duplicates(point_width_list) 108 | N = len(point_width_list) 109 | 110 | if N < 2: 111 | raise ZeroPDKUserError("Error: Attempted to layout a zero-length waveguide.") 112 | 113 | first_point, first_width = point_width_list[0] 114 | next_point, next_width = point_width_list[1] 115 | 116 | delta = next_point - first_point 117 | theta = np.arctan2(delta.y, delta.x) 118 | first_high_point = first_point + 0.5 * first_width * pya.DPoint( 119 | cos(theta + pi / 2), sin(theta + pi / 2) 120 | ) 121 | first_low_point = first_point + 0.5 * first_width * pya.DPoint( 122 | cos(theta - pi / 2), sin(theta - pi / 2) 123 | ) 124 | points_high.append(first_high_point) 125 | points_low.append(first_low_point) 126 | 127 | for i in range(1, N - 1): 128 | prev_point, prev_width = point_width_list[i - 1] 129 | point, width = point_width_list[i] 130 | next_point, next_width = point_width_list[i + 1] 131 | delta_prev = point - prev_point 132 | delta_next = next_point - point 133 | 134 | # based on these points, there are two algorithms available: 135 | # 1. arc algorithm. it detects you are trying to draw an arc 136 | # so it will compute the center and radius of that arc and 137 | # layout accordingly. 138 | # 2. linear trace algorithm. it is not an arc, and you want 139 | # straight lines with sharp corners. 140 | 141 | # to detect an arc, the points need to go in the same direction 142 | # and the width has to be bigger than the smallest distance between 143 | # two points. 144 | 145 | is_small = min(delta_next.norm(), delta_prev.norm()) < width 146 | is_arc = cos_angle(delta_next, delta_prev) > cos(30 * pi / 180) 147 | is_arc = is_arc and is_small 148 | center_arc, radius = find_arc(prev_point, point, next_point) 149 | if is_arc and radius < np.inf: # algorithm 1 150 | ray = point - center_arc 151 | ray /= ray.norm() 152 | # if orientation is positive, the arc is going counterclockwise 153 | orientation = (cross_prod(ray, delta_prev) > 0) * 2 - 1 154 | points_low.append(point + orientation * width * ray / 2) 155 | points_high.append(point - orientation * width * ray / 2) 156 | else: # algorithm 2 157 | theta_prev = np.arctan2(delta_prev.y, delta_prev.x) 158 | theta_next = np.arctan2(delta_next.y, delta_next.x) 159 | 160 | next_point_high = next_point + 0.5 * next_width * pya.DPoint( 161 | cos(theta_next + pi / 2), sin(theta_next + pi / 2) 162 | ) 163 | next_point_low = next_point + 0.5 * next_width * pya.DPoint( 164 | cos(theta_next - pi / 2), sin(theta_next - pi / 2) 165 | ) 166 | 167 | forward_point_high = point + 0.5 * width * pya.DPoint( 168 | cos(theta_next + pi / 2), sin(theta_next + pi / 2) 169 | ) 170 | forward_point_low = point + 0.5 * width * pya.DPoint( 171 | cos(theta_next - pi / 2), sin(theta_next - pi / 2) 172 | ) 173 | 174 | prev_point_high = prev_point + 0.5 * prev_width * pya.DPoint( 175 | cos(theta_prev + pi / 2), sin(theta_prev + pi / 2) 176 | ) 177 | prev_point_low = prev_point + 0.5 * prev_width * pya.DPoint( 178 | cos(theta_prev - pi / 2), sin(theta_prev - pi / 2) 179 | ) 180 | 181 | backward_point_high = point + 0.5 * width * pya.DPoint( 182 | cos(theta_prev + pi / 2), sin(theta_prev + pi / 2) 183 | ) 184 | backward_point_low = point + 0.5 * width * pya.DPoint( 185 | cos(theta_prev - pi / 2), sin(theta_prev - pi / 2) 186 | ) 187 | 188 | fix_angle = lambda theta: ((theta + pi) % (2 * pi)) - pi 189 | 190 | # High point decision 191 | next_high_edge = pya.DEdge(forward_point_high, next_point_high) 192 | prev_high_edge = pya.DEdge(backward_point_high, prev_point_high) 193 | 194 | if next_high_edge.crossed_by(prev_high_edge): 195 | intersect_point = next_high_edge.crossing_point(prev_high_edge) 196 | points_high.append(intersect_point) 197 | else: 198 | cos_dd = cos_angle(delta_next, delta_prev) 199 | if width * (1 - cos_dd) > dbu and fix_angle(theta_next - theta_prev) < 0: 200 | points_high.append(backward_point_high) 201 | points_high.append(forward_point_high) 202 | else: 203 | points_high.append((backward_point_high + forward_point_high) * 0.5) 204 | 205 | # Low point decision 206 | next_low_edge = pya.DEdge(forward_point_low, next_point_low) 207 | prev_low_edge = pya.DEdge(backward_point_low, prev_point_low) 208 | 209 | if next_low_edge.crossed_by(prev_low_edge): 210 | intersect_point = next_low_edge.crossing_point(prev_low_edge) 211 | points_low.append(intersect_point) 212 | else: 213 | cos_dd = cos_angle(delta_next, delta_prev) 214 | if width * (1 - cos_dd) > dbu and fix_angle(theta_next - theta_prev) > 0: 215 | points_low.append(backward_point_low) 216 | points_low.append(forward_point_low) 217 | else: 218 | points_low.append((backward_point_low + forward_point_low) * 0.5) 219 | 220 | last_point, last_width = point_width_list[-1] 221 | point, width = point_width_list[-2] 222 | delta = last_point - point 223 | theta = np.arctan2(delta.y, delta.x) 224 | final_high_point = last_point + 0.5 * last_width * pya.DPoint( 225 | cos(theta + pi / 2), sin(theta + pi / 2) 226 | ) 227 | final_low_point = last_point + 0.5 * last_width * pya.DPoint( 228 | cos(theta - pi / 2), sin(theta - pi / 2) 229 | ) 230 | if (final_high_point - points_high[-1]) * delta > 0: 231 | points_high.append(final_high_point) 232 | if (final_low_point - points_low[-1]) * delta > 0: 233 | points_low.append(final_low_point) 234 | 235 | # Append point only if the area of the triangle built with 236 | # neighboring edges is above a certain threshold. 237 | # In addition, if smooth is true: 238 | # Append point only if change in direction is less than 130 degrees. 239 | 240 | def smooth_append(point_list, point): 241 | if len(point_list) < 1: 242 | point_list.append(point) 243 | return point_list 244 | elif len(point_list) < 2: 245 | curr_edge = point - point_list[-1] 246 | if norm(curr_edge) > 0: 247 | point_list.append(point) 248 | return point_list 249 | 250 | curr_edge = point - point_list[-1] 251 | if norm(curr_edge) > 0: 252 | prev_edge = point_list[-1] - point_list[-2] 253 | 254 | # Only add new point if the area of the triangle built with 255 | # current edge and previous edge is greater than dbu^2/2 256 | if abs(cross_prod(prev_edge, curr_edge)) > dbu**2 / 2: 257 | if smooth: 258 | # avoid corners when smoothing 259 | if cos_angle(curr_edge, prev_edge) > cos(130 / 180 * pi): 260 | point_list.append(point) 261 | elif norm(curr_edge) > norm(prev_edge): 262 | # edge case when there is prev_edge is small and 263 | # needs to be deleted to get rid of the corner 264 | point_list[-1] = point 265 | else: 266 | point_list.append(point) 267 | # avoid unnecessary points 268 | else: 269 | point_list[-1] = point 270 | return point_list 271 | 272 | if debug and False: 273 | print("Points to be smoothed:") 274 | for point, width in point_width_list: 275 | print(point, width) 276 | 277 | smooth_points_high = list(reduce(smooth_append, points_high, list())) 278 | smooth_points_low = list(reduce(smooth_append, points_low, list())) 279 | # smooth_points_low = points_low 280 | # polygon_dpoints = points_high + list(reversed(points_low)) 281 | # polygon_dpoints = list(reduce(smooth_append, polygon_dpoints, list())) 282 | polygon_dpoints = smooth_points_high + list(reversed(smooth_points_low)) 283 | return pya.DSimplePolygon(polygon_dpoints) 284 | 285 | 286 | def layout_waveguide(cell, layer, points_list, width, smooth=False): 287 | """Lays out a waveguide (or trace) with a certain width along given points. 288 | 289 | This is very useful for laying out Bezier curves with or without adiabatic tapers. 290 | 291 | Args: 292 | cell: cell to place into 293 | layer: layer to place into. It is done with cell.shapes(layer).insert(pya.Polygon) 294 | points_list: list of pya.DPoint (at least 2 points) 295 | width (microns): constant, 2-element list, or list. 296 | If 2-element list, then widths are interpolated alongside the waveguide. 297 | If list, then it has to either have the same length as points. 298 | smooth: tries to smooth final polygons to avoid very sharp edges (greater than 130 deg) 299 | 300 | """ 301 | 302 | dbu = cell.layout().dbu 303 | 304 | dpolygon = waveguide_dpolygon(points_list, width, dbu, smooth=smooth) 305 | dpolygon.compress(True) 306 | dpolygon.layout(cell, layer) 307 | return dpolygon 308 | 309 | 310 | def layout_waveguide_angle(cell, layer, points_list, width, angle): 311 | """Lays out a waveguide (or trace) with a certain width along 312 | given points and with fixed orientation at all points. 313 | 314 | This is very useful for laying out Bezier curves with or without adiabatic tapers. 315 | 316 | Args: 317 | cell: cell to place into 318 | layer: layer to place into. It is done with cell.shapes(layer).insert(pya.Polygon) 319 | points_list: list of pya.DPoint (at least 2 points) 320 | width (microns): constant, 2-element list, or list. 321 | If 2-element list, then widths are interpolated alongside the waveguide. 322 | If list, then it has to either have the same length as points. 323 | angle (degrees) 324 | """ 325 | return layout_waveguide_angle2(cell, layer, points_list, width, angle, angle) 326 | 327 | 328 | def layout_waveguide_angle2(cell, layer, points_list, width, angle_from, angle_to): 329 | """Lays out a waveguide (or trace) with a certain width along 330 | given points and with fixed orientation at all points. 331 | 332 | This is very useful for laying out Bezier curves with or without adiabatic tapers. 333 | 334 | Args: 335 | cell: cell to place into 336 | layer: layer to place into. It is done with cell.shapes(layer).insert(pya.Polygon) 337 | points_list: list of pya.DPoint (at least 2 points) 338 | width (microns): constant, 2-element list, or list. 339 | If 2-element list, then widths are interpolated alongside the waveguide. 340 | If list, then it has to either have the same length as points. 341 | angle_from (degrees): normal angle of the first waveguide point 342 | angle_to (degrees): normal angle of the last waveguide point 343 | 344 | """ 345 | if len(points_list) < 2: 346 | raise NotImplemented("ERROR: points_list too short") 347 | return 348 | 349 | try: 350 | if len(width) == len(points_list): 351 | width_iterator = iter(width) 352 | elif len(width) == 2: 353 | # assume width[0] is initial width and 354 | # width[1] is final width 355 | # interpolate with points_list 356 | L = curve_length(points_list) 357 | distance = 0 358 | widths_list = [width[0]] 359 | widths_func = lambda t: (1 - t) * width[0] + t * width[1] 360 | old_point = points_list[0] 361 | for point in points_list[1:]: 362 | distance += norm(point - old_point) 363 | old_point = point 364 | widths_list.append(widths_func(distance / L)) 365 | width_iterator = iter(widths_list) 366 | else: 367 | width_iterator = repeat(width[0]) 368 | except TypeError: 369 | width_iterator = repeat(width) 370 | finally: 371 | points_iterator = iter(points_list) 372 | 373 | points_low = list() 374 | points_high = list() 375 | 376 | point_width_list = list(zip(points_iterator, width_iterator)) 377 | N = len(point_width_list) 378 | 379 | angle_list = list(np.linspace(angle_from, angle_to, N)) 380 | 381 | for i in range(0, N): 382 | point, width = point_width_list[i] 383 | angle = angle_list[i] 384 | theta = angle * pi / 180 385 | 386 | point_high = point + 0.5 * width * pya.DPoint(cos(theta + pi / 2), sin(theta + pi / 2)) 387 | points_high.append(point_high) 388 | point_low = point + 0.5 * width * pya.DPoint(cos(theta - pi / 2), sin(theta - pi / 2)) 389 | points_low.append(point_low) 390 | 391 | polygon_points = points_high + list(reversed(points_low)) 392 | 393 | poly = pya.DSimplePolygon(polygon_points) 394 | cell.shapes(layer).insert(poly) 395 | return poly 396 | -------------------------------------------------------------------------------- /zeropdk/tech/__init__.py: -------------------------------------------------------------------------------- 1 | import klayout.db as kdb 2 | 3 | 4 | class Tech: 5 | layers = None 6 | 7 | def __init__(self): 8 | if self.layers is None: 9 | self.layers = {} 10 | 11 | def add_layer(self, layer_name, layer_def): 12 | """Adds a layer to the technology file. 13 | layer_name: str: name of layer. (Useless in GDS, useful in OASIS) 14 | layer_def: str: 10/0, 10 = layer index, 0, datatype 15 | """ 16 | 17 | layer_idx, datatype = layer_def.split("/") 18 | layer_idx = int(layer_idx) 19 | datatype = int(datatype) 20 | self.layers[layer_name] = kdb.LayerInfo(layer_idx, datatype, layer_name) 21 | 22 | @classmethod 23 | def load_from_xml(cls, lyp_filename): 24 | import os 25 | 26 | lyp_filepath = os.path.realpath(lyp_filename) 27 | with open(lyp_filepath, "r") as file: 28 | layer_dict = xml_to_dict(file.read())["layer-properties"]["properties"] 29 | 30 | layer_map = {} 31 | 32 | for k in layer_dict: 33 | layerInfo = k["source"].split("@")[0] 34 | if "group-members" in k: 35 | # encoutered a layer group, look inside: 36 | j = k["group-members"] 37 | if "name" in j: 38 | layerInfo_j = j["source"].split("@")[0] 39 | layer_map[j["name"]] = layerInfo_j 40 | else: 41 | for j in k["group-members"]: 42 | layerInfo_j = j["source"].split("@")[0] 43 | layer_map[j["name"]] = layerInfo_j 44 | if k["source"] != "*/*@*": 45 | layer_map[k["name"]] = layerInfo 46 | else: 47 | try: 48 | layer_map[k["name"]] = layerInfo 49 | except TypeError as e: 50 | new_message = ( 51 | f"Bad name for layer {layerInfo}. Check your .lyp XML file for errors." 52 | ) 53 | 54 | raise TypeError(new_message) from e 55 | 56 | # layer_map should contain values like '12/0' 57 | # 12 is the layer and 0 is the datatype 58 | 59 | obj = cls() 60 | 61 | for layer_name, layer_string in layer_map.items(): 62 | obj.add_layer(layer_name, layer_string) 63 | 64 | return obj 65 | 66 | 67 | # XML functions 68 | 69 | 70 | def etree_to_dict(t): 71 | """XML to Dict parser 72 | from: https://stackoverflow.com/questions/2148119/how-to-convert-an-xml-string-to-a-dictionary-in-python/10077069 73 | """ 74 | from collections import defaultdict 75 | 76 | d = {t.tag: {} if t.attrib else None} 77 | children = list(t) 78 | if children: 79 | dd = defaultdict(list) 80 | for dc in map(etree_to_dict, children): 81 | for k, v in dc.items(): 82 | dd[k].append(v) 83 | d = {t.tag: {k: v[0] if len(v) == 1 else v for k, v in dd.items()}} 84 | if t.attrib: 85 | d[t.tag].update((f"@{k}", v) for k, v in t.attrib.items()) 86 | if t.text: 87 | text = t.text.strip() 88 | if children or t.attrib: 89 | if text: 90 | d[t.tag]["#text"] = text 91 | else: 92 | d[t.tag] = text 93 | return d 94 | 95 | 96 | def xml_to_dict(t): 97 | from xml.etree import ElementTree as ET 98 | 99 | try: 100 | e = ET.XML(t) 101 | except ET.ParseError: 102 | raise 103 | except Exception: 104 | raise UserWarning("Error in the XML file.") 105 | return etree_to_dict(e) 106 | -------------------------------------------------------------------------------- /zeropdk/utils/gitpath.py: -------------------------------------------------------------------------------- 1 | """All credit goes to https://github.com/MaxNoe/python-gitpath 2 | 3 | 4 | The MIT License (MIT) 5 | 6 | Copyright (c) 2015 Maximilian Nöthe 7 | 8 | Permission is hereby granted, free of charge, to any person obtaining a copy 9 | of this software and associated documentation files (the "Software"), to deal 10 | in the Software without restriction, including without limitation the rights 11 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 12 | copies of the Software, and to permit persons to whom the Software is 13 | furnished to do so, subject to the following conditions: 14 | 15 | The above copyright notice and this permission notice shall be included in all 16 | copies or substantial portions of the Software. 17 | 18 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 19 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 20 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 21 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 22 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 23 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 24 | SOFTWARE. 25 | 26 | """ 27 | 28 | from subprocess import check_output, CalledProcessError 29 | from functools import lru_cache 30 | import os.path 31 | 32 | 33 | @lru_cache(maxsize=1) 34 | def root(): 35 | """returns the absolute path of the repository root""" 36 | try: 37 | base = check_output(["git", "rev-parse", "--show-toplevel"]) 38 | except CalledProcessError: 39 | raise IOError(f"'{os.getcwd()}' is not a git repository") 40 | return base.decode("utf-8").strip() 41 | 42 | 43 | def abspath(relpath): 44 | """returns the absolute path for a path given relative to the root of 45 | the git repository 46 | """ 47 | return os.path.join(root(), relpath) 48 | --------------------------------------------------------------------------------