├── .github └── workflows │ └── ci.yml ├── .gitignore ├── .gitmodules ├── LICENSE ├── README.md ├── artifact.nix ├── ci-util └── cache-nix.sh ├── flake.lock ├── flake.nix ├── headers ├── array.h ├── list.h ├── set.h └── tuples.h ├── iso-racket-links.rktd ├── katara-dark.png ├── katara-light.png ├── katara ├── __init__.py ├── aci.py ├── auto_grammar.py ├── lattices.py ├── search_structures.py └── synthesis.py ├── llvm-pass ├── CMakeLists.txt ├── addEmptyBlocks │ ├── AddEmptyBlocks.cpp │ └── CMakeLists.txt └── instructions.txt ├── mypy.ini ├── poetry.lock ├── pyproject.toml ├── rosette-packages-flake.nix └── tests ├── compile-add-blocks ├── compile-all ├── plot_distribution.py ├── sequential1.c ├── sequential1_clock.c ├── sequential2.c ├── sequential_flag.c ├── sequential_register.c └── synthesize_crdt.py /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: Katara CI 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | pull_request: 8 | release: 9 | types: [published] 10 | 11 | jobs: 12 | lint: 13 | runs-on: ubuntu-latest 14 | steps: 15 | - uses: actions/checkout@v2 16 | - uses: psf/black@stable 17 | with: 18 | black_args: --check --diff . 19 | version: "22.3.0" 20 | 21 | mypy: 22 | runs-on: ubuntu-latest 23 | steps: 24 | - uses: actions/checkout@v2 25 | with: 26 | submodules: "recursive" 27 | 28 | - uses: cachix/install-nix-action@v16 29 | with: 30 | nix_path: nixpkgs=channel:nixos-22.05 31 | 32 | - name: Cache Nix store 33 | id: cache 34 | uses: actions/cache@v3 35 | with: 36 | path: ~/nix-cache 37 | key: nix-flake-cache-${{ hashFiles('flake.nix') }}-${{ hashFiles('flake.lock') }}-${{ hashFiles('poetry.lock') }} 38 | restore-keys: | 39 | nix-flake-cache- 40 | 41 | - name: Load Nix cache 42 | run: | 43 | if [ -f "$HOME/nix-cache/cache.nar" ]; then 44 | nix-store --import < ~/nix-cache/cache.nar 45 | rm -rf ~/nix-cache 46 | fi 47 | 48 | - name: Run mypy 49 | shell: nix develop --command bash -e {0} 50 | run: mypy . 51 | 52 | - name: Save Nix cache 53 | if: steps.cache.outputs.cache-hit != 'true' 54 | run: ./ci-util/cache-nix.sh 55 | 56 | build: 57 | runs-on: ubuntu-latest 58 | needs: [lint, mypy] 59 | 60 | steps: 61 | - uses: actions/checkout@v2 62 | with: 63 | submodules: "recursive" 64 | 65 | - uses: cachix/install-nix-action@v16 66 | with: 67 | nix_path: nixpkgs=channel:nixos-22.05 68 | 69 | - name: Cache Nix store 70 | id: cache 71 | uses: actions/cache@v3 72 | with: 73 | path: ~/nix-cache 74 | key: nix-flake-cache-${{ hashFiles('flake.nix') }}-${{ hashFiles('flake.lock') }}-${{ hashFiles('poetry.lock') }} 75 | restore-keys: | 76 | nix-flake-cache- 77 | 78 | - name: Load Nix cache 79 | run: | 80 | if [ -f "$HOME/nix-cache/cache.nar" ]; then 81 | nix-store --import < ~/nix-cache/cache.nar 82 | rm -rf ~/nix-cache 83 | fi 84 | 85 | # https://github.com/Bogdanp/setup-racket-cache-example 86 | - name: Cache Racket dependencies 87 | id: cache-racket 88 | uses: actions/cache@v3 89 | with: 90 | path: | 91 | ~/.cache/racket 92 | ~/.local/share/racket 93 | key: ${{ runner.os }}-racket-8.4-rosette-4.1 94 | 95 | - name: Install Racket 96 | uses: Bogdanp/setup-racket@v1.7 97 | with: 98 | architecture: 'x64' 99 | distribution: 'full' 100 | variant: 'CS' 101 | version: '8.4' 102 | 103 | - name: Install Rosette (4.1) 104 | run: | 105 | raco pkg install --auto --skip-installed https://github.com/emina/rosette.git#10178550a0a21e6d80598d0f43c33c9228728f14 106 | 107 | - name: Build LLVM Pass 108 | shell: nix develop --command bash -e {0} 109 | run: | 110 | cd llvm-pass 111 | mkdir build 112 | cd build 113 | cmake .. 114 | make 115 | cd .. 116 | 117 | - name: Build Test Inputs 118 | shell: nix develop --command bash -e {0} 119 | run: | 120 | cd tests 121 | ./compile-all 122 | cd .. 123 | 124 | - name: Test CRDT Synthesis (fixed) 125 | shell: nix develop --command bash -e {0} 126 | run: | 127 | python -m tests.synthesize_crdt synth g_set --fixed 128 | python -m tests.synthesize_crdt synth 2p_set --fixed 129 | python -m tests.synthesize_crdt synth flag_ew --fixed 130 | python -m tests.synthesize_crdt synth flag_dw --fixed 131 | python -m tests.synthesize_crdt synth grow_only_counter --fixed 132 | 133 | - name: Save Nix cache 134 | if: steps.cache.outputs.cache-hit != 'true' 135 | run: ./ci-util/cache-nix.sh 136 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | !shell.nix 2 | .DS_Store 3 | *.pyc 4 | llvm-pass/build/ 5 | synthesisLogs/ 6 | tests/*.ll 7 | tests/*.loops 8 | results*.csv 9 | search*.csv 10 | distribution*.png 11 | dist/ 12 | rosette-packages/packages 13 | result 14 | nixos.qcow2 15 | -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "metalift"] 2 | path = metalift 3 | url = git@github.com:metalift/metalift.git 4 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | 2 | Apache License 3 | Version 2.0, January 2004 4 | http://www.apache.org/licenses/ 5 | 6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 7 | 8 | 1. Definitions. 9 | 10 | "License" shall mean the terms and conditions for use, reproduction, 11 | and distribution as defined by Sections 1 through 9 of this document. 12 | 13 | "Licensor" shall mean the copyright owner or entity authorized by 14 | the copyright owner that is granting the License. 15 | 16 | "Legal Entity" shall mean the union of the acting entity and all 17 | other entities that control, are controlled by, or are under common 18 | control with that entity. For the purposes of this definition, 19 | "control" means (i) the power, direct or indirect, to cause the 20 | direction or management of such entity, whether by contract or 21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 22 | outstanding shares, or (iii) beneficial ownership of such entity. 23 | 24 | "You" (or "Your") shall mean an individual or Legal Entity 25 | exercising permissions granted by this License. 26 | 27 | "Source" form shall mean the preferred form for making modifications, 28 | including but not limited to software source code, documentation 29 | source, and configuration files. 30 | 31 | "Object" form shall mean any form resulting from mechanical 32 | transformation or translation of a Source form, including but 33 | not limited to compiled object code, generated documentation, 34 | and conversions to other media types. 35 | 36 | "Work" shall mean the work of authorship, whether in Source or 37 | Object form, made available under the License, as indicated by a 38 | copyright notice that is included in or attached to the work 39 | (an example is provided in the Appendix below). 40 | 41 | "Derivative Works" shall mean any work, whether in Source or Object 42 | form, that is based on (or derived from) the Work and for which the 43 | editorial revisions, annotations, elaborations, or other modifications 44 | represent, as a whole, an original work of authorship. For the purposes 45 | of this License, Derivative Works shall not include works that remain 46 | separable from, or merely link (or bind by name) to the interfaces of, 47 | the Work and Derivative Works thereof. 48 | 49 | "Contribution" shall mean any work of authorship, including 50 | the original version of the Work and any modifications or additions 51 | to that Work or Derivative Works thereof, that is intentionally 52 | submitted to Licensor for inclusion in the Work by the copyright owner 53 | or by an individual or Legal Entity authorized to submit on behalf of 54 | the copyright owner. For the purposes of this definition, "submitted" 55 | means any form of electronic, verbal, or written communication sent 56 | to the Licensor or its representatives, including but not limited to 57 | communication on electronic mailing lists, source code control systems, 58 | and issue tracking systems that are managed by, or on behalf of, the 59 | Licensor for the purpose of discussing and improving the Work, but 60 | excluding communication that is conspicuously marked or otherwise 61 | designated in writing by the copyright owner as "Not a Contribution." 62 | 63 | "Contributor" shall mean Licensor and any individual or Legal Entity 64 | on behalf of whom a Contribution has been received by Licensor and 65 | subsequently incorporated within the Work. 66 | 67 | 2. Grant of Copyright License. Subject to the terms and conditions of 68 | this License, each Contributor hereby grants to You a perpetual, 69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 70 | copyright license to reproduce, prepare Derivative Works of, 71 | publicly display, publicly perform, sublicense, and distribute the 72 | Work and such Derivative Works in Source or Object form. 73 | 74 | 3. Grant of Patent License. Subject to the terms and conditions of 75 | this License, each Contributor hereby grants to You a perpetual, 76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 77 | (except as stated in this section) patent license to make, have made, 78 | use, offer to sell, sell, import, and otherwise transfer the Work, 79 | where such license applies only to those patent claims licensable 80 | by such Contributor that are necessarily infringed by their 81 | Contribution(s) alone or by combination of their Contribution(s) 82 | with the Work to which such Contribution(s) was submitted. If You 83 | institute patent litigation against any entity (including a 84 | cross-claim or counterclaim in a lawsuit) alleging that the Work 85 | or a Contribution incorporated within the Work constitutes direct 86 | or contributory patent infringement, then any patent licenses 87 | granted to You under this License for that Work shall terminate 88 | as of the date such litigation is filed. 89 | 90 | 4. Redistribution. You may reproduce and distribute copies of the 91 | Work or Derivative Works thereof in any medium, with or without 92 | modifications, and in Source or Object form, provided that You 93 | meet the following conditions: 94 | 95 | (a) You must give any other recipients of the Work or 96 | Derivative Works a copy of this License; and 97 | 98 | (b) You must cause any modified files to carry prominent notices 99 | stating that You changed the files; and 100 | 101 | (c) You must retain, in the Source form of any Derivative Works 102 | that You distribute, all copyright, patent, trademark, and 103 | attribution notices from the Source form of the Work, 104 | excluding those notices that do not pertain to any part of 105 | the Derivative Works; and 106 | 107 | (d) If the Work includes a "NOTICE" text file as part of its 108 | distribution, then any Derivative Works that You distribute must 109 | include a readable copy of the attribution notices contained 110 | within such NOTICE file, excluding those notices that do not 111 | pertain to any part of the Derivative Works, in at least one 112 | of the following places: within a NOTICE text file distributed 113 | as part of the Derivative Works; within the Source form or 114 | documentation, if provided along with the Derivative Works; or, 115 | within a display generated by the Derivative Works, if and 116 | wherever such third-party notices normally appear. The contents 117 | of the NOTICE file are for informational purposes only and 118 | do not modify the License. You may add Your own attribution 119 | notices within Derivative Works that You distribute, alongside 120 | or as an addendum to the NOTICE text from the Work, provided 121 | that such additional attribution notices cannot be construed 122 | as modifying the License. 123 | 124 | You may add Your own copyright statement to Your modifications and 125 | may provide additional or different license terms and conditions 126 | for use, reproduction, or distribution of Your modifications, or 127 | for any such Derivative Works as a whole, provided Your use, 128 | reproduction, and distribution of the Work otherwise complies with 129 | the conditions stated in this License. 130 | 131 | 5. Submission of Contributions. Unless You explicitly state otherwise, 132 | any Contribution intentionally submitted for inclusion in the Work 133 | by You to the Licensor shall be under the terms and conditions of 134 | this License, without any additional terms or conditions. 135 | Notwithstanding the above, nothing herein shall supersede or modify 136 | the terms of any separate license agreement you may have executed 137 | with Licensor regarding such Contributions. 138 | 139 | 6. Trademarks. This License does not grant permission to use the trade 140 | names, trademarks, service marks, or product names of the Licensor, 141 | except as required for reasonable and customary use in describing the 142 | origin of the Work and reproducing the content of the NOTICE file. 143 | 144 | 7. Disclaimer of Warranty. Unless required by applicable law or 145 | agreed to in writing, Licensor provides the Work (and each 146 | Contributor provides its Contributions) on an "AS IS" BASIS, 147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 148 | implied, including, without limitation, any warranties or conditions 149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 150 | PARTICULAR PURPOSE. You are solely responsible for determining the 151 | appropriateness of using or redistributing the Work and assume any 152 | risks associated with Your exercise of permissions under this License. 153 | 154 | 8. Limitation of Liability. In no event and under no legal theory, 155 | whether in tort (including negligence), contract, or otherwise, 156 | unless required by applicable law (such as deliberate and grossly 157 | negligent acts) or agreed to in writing, shall any Contributor be 158 | liable to You for damages, including any direct, indirect, special, 159 | incidental, or consequential damages of any character arising as a 160 | result of this License or out of the use or inability to use the 161 | Work (including but not limited to damages for loss of goodwill, 162 | work stoppage, computer failure or malfunction, or any and all 163 | other commercial damages or losses), even if such Contributor 164 | has been advised of the possibility of such damages. 165 | 166 | 9. Accepting Warranty or Additional Liability. While redistributing 167 | the Work or Derivative Works thereof, You may choose to offer, 168 | and charge a fee for, acceptance of support, warranty, indemnity, 169 | or other liability obligations and/or rights consistent with this 170 | License. However, in accepting such obligations, You may act only 171 | on Your own behalf and on Your sole responsibility, not on behalf 172 | of any other Contributor, and only if You agree to indemnify, 173 | defend, and hold each Contributor harmless for any liability 174 | incurred by, or claims asserted against, such Contributor by reason 175 | of your accepting any such warranty or additional liability. 176 | 177 | END OF TERMS AND CONDITIONS 178 | 179 | APPENDIX: How to apply the Apache License to your work. 180 | 181 | To apply the Apache License to your work, attach the following 182 | boilerplate notice, with the fields enclosed by brackets "[]" 183 | replaced with your own identifying information. (Don't include 184 | the brackets!) The text should be enclosed in the appropriate 185 | comment syntax for the file format. We also recommend that a 186 | file or class name and description of purpose be included on the 187 | same "printed page" as the copyright notice for easier 188 | identification within third-party archives. 189 | 190 | Copyright 2022 Katara Contributors 191 | 192 | Licensed under the Apache License, Version 2.0 (the "License"); 193 | you may not use this file except in compliance with the License. 194 | You may obtain a copy of the License at 195 | 196 | http://www.apache.org/licenses/LICENSE-2.0 197 | 198 | Unless required by applicable law or agreed to in writing, software 199 | distributed under the License is distributed on an "AS IS" BASIS, 200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 201 | See the License for the specific language governing permissions and 202 | limitations under the License. -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | 2 | 3 |

Synthesize CRDTs that mirror your existing data types!

4 | 5 | Katara is a program synthesis engine that can automatically generate CRDT designs that mirror the behavior of a sequential data type annotated with a conflict resolution policy for non-commutative operations. See our [paper](https://arxiv.org/pdf/2205.12425.pdf) for more information! 6 | 7 | ## Setup 8 | ### Install (with Nix) 9 | To get a development environment up and running, one option is to use [Nix](https://nixos.org/), which can automatically pull and build the necessary dependencies. First, you'll need to [install Nix](https://nixos.org/download.html). Note that this _will_ require temporary root access as Nix sets up a daemon to handle builds, and will set up a separate volume for storing build artifacts if on macOS. 10 | 11 | Once you've got Nix installed, you'll need to enable [flakes](https://nixos.wiki/wiki/Flakes). 12 | 13 | Then, all you have to do is navigate to the Metalift directory and run the following command: 14 | ```bash 15 | $ nix develop 16 | ``` 17 | 18 | This will build all of Metalift's dependencies and drop you into a temporary shell with all the dependencies available. 19 | 20 | **Note**: you still will need to install Racket and Rosette separately. There _is_ a solution for doing this through Nix, but it requires [nix-ld](https://github.com/Mic92/nix-ld) to be installed and is generally not recommended unless you run NixOS. 21 | 22 | ### Install (without Nix) 23 | You'll need the following dependencies installed to use Katara: 24 | - Python 3.8 with Poetry 25 | - [Rosette](https://emina.github.io/rosette) 26 | - [CVC5](https://cvc5.github.io) 27 | - [LLVM 11](https://llvm.org/) 28 | 29 | We use [Poetry](https://python-poetry.org/) for dependency management. To set up the environment, simply install Poetry, run `poetry install`, and then `poetry shell` to enter an environment with the dependencies installed. 30 | 31 | ## Build the LLVM Pass 32 | 33 | **We currently support LLVM 11** 34 | 35 | Run the following to build the LLVM pass for processing branch instructions (works for LLVM 11): 36 | ````bash 37 | cd llvm-pass 38 | mkdir build 39 | cd build 40 | cmake .. 41 | make 42 | cd ../.. 43 | ```` 44 | 45 | This pass is called in `tests/compile-add-blocks`. 46 | 47 | ## Synthesizing CRDTs 48 | The first step to synthesizing a CRDT is to compile the sequential reference. We have provided a set of benchmark sequential data types in the `tests/` folder. These can be compiled by entering the folder and running `compile-all`: 49 | ```bash 50 | cd tests 51 | ./compile-all 52 | cd .. 53 | ``` 54 | 55 | Then, from the base directory of the project, we can run the synthesis benchmarks defined in `tests/synthesize_crdt.py` (in the `benchmarks` variable). Each benchmark is configured with the sequential data type to process, the ordering constraing as defined in our paper (`opOrder`), flags to enable synthesis of non-idempotent operations, and type hints to optimize the synthesis process. With a benchmark configured, we can run it as 56 | ```bash 57 | python -m tests.synthesize_crdt synth 58 | ``` 59 | 60 | For example, we can synthesize for the 2P-Set benchmark with 61 | ```bash 62 | python -m tests.synthesize_crdt synth 2p_set 63 | ``` 64 | 65 | In general, you can use the following command structure: 66 | ```bash 67 | python -m tests.synthesize_crdt [--fixed] [--first ] [--repeat ] 68 | ``` 69 | Where: 70 | - `` is either `synth` for bounded synthesis with pruning or `synth-unbounded` for direct unbounded synthesis. 71 | - `` is the name of the benchmark or `all` to run all benchmarks. 72 | - `--fixed` (optional) uses a fixed lattice structure instead of exploring all structures. 73 | - `--first ` (optional) synthesizes the first N structures. 74 | - `--repeat ` (optional) specifies the number of repetitions for the synthesis process. 75 | -------------------------------------------------------------------------------- /artifact.nix: -------------------------------------------------------------------------------- 1 | { config, options, lib, pkgs, specialArgs, ... }: 2 | 3 | lib.mkMerge [{ 4 | environment.systemPackages = with pkgs; [ 5 | (poetry2nix.mkPoetryEnv { 6 | python = python38; 7 | projectDir = ./.; 8 | 9 | overrides = poetry2nix.overrides.withDefaults (_: poetrySuper: { 10 | metalift = poetrySuper.metalift.overrideAttrs(_: super: { 11 | nativeBuildInputs = super.nativeBuildInputs ++ [ poetrySuper.poetry ]; 12 | }); 13 | 14 | autoflake = poetrySuper.autoflake.overrideAttrs(_: super: { 15 | nativeBuildInputs = super.nativeBuildInputs ++ [ poetrySuper.hatchling ]; 16 | }); 17 | }); 18 | }) 19 | 20 | cvc5 21 | gnumake 22 | cmake 23 | llvm_11 24 | llvm_11.dev 25 | clang_11 26 | 27 | racket 28 | 29 | htop 30 | nano 31 | vim 32 | emacs 33 | ]; 34 | 35 | users = { 36 | mutableUsers = false; 37 | allowNoPasswordLogin = true; 38 | 39 | users = { 40 | demo = { 41 | home = "/home/demo"; 42 | password = "demo"; 43 | extraGroups = [ "wheel" ]; 44 | isNormalUser = true; 45 | }; 46 | }; 47 | }; 48 | 49 | security.sudo.wheelNeedsPassword = false; 50 | 51 | boot.postBootCommands = let source = 52 | let 53 | inherit (specialArgs.gitignore.lib) gitignoreSource; 54 | in pkgs.lib.cleanSourceWith 55 | { filter = (path: type: 56 | ! (builtins.any 57 | (r: (builtins.match r (builtins.baseNameOf path)) != null) 58 | []) 59 | ); 60 | src = gitignoreSource ./.; 61 | } ; 62 | in '' 63 | echo "Loading source code for the artifact" 64 | 65 | ${pkgs.rsync}/bin/rsync -r --owner --group --chown=demo:users --perms --chmod=u+rw ${source}/ /home/demo 66 | 67 | mkdir -p /home/demo/.racket/8.7/pkgs 68 | ln -s ${./iso-racket-links.rktd} /home/demo/.racket/8.7/links.rktd 69 | ln -s ${specialArgs.rosette-packages.packages}/* /home/demo/.racket/8.7/pkgs/ 70 | 71 | rm /home/demo/.racket/8.7/pkgs/rosette 72 | ${pkgs.rsync}/bin/rsync -r --owner --group --chown=demo:users --perms --chmod=u+rw ${specialArgs.rosette-packages.packages}/rosette/ /home/demo/.racket/8.7/pkgs/rosette 73 | mkdir /home/demo/.racket/8.7/pkgs/rosette/bin 74 | ln -s ${(pkgs.z3.overrideAttrs(self: { 75 | version = "4.8.8"; 76 | 77 | src = pkgs.fetchFromGitHub { 78 | owner = "Z3Prover"; 79 | repo = "z3"; 80 | rev = "z3-4.8.8"; 81 | hash = "sha256-qpmi75I27m89dhKSy8D2zkzqKpLoFBPRBrhzDB8axeY="; 82 | }; 83 | }))}/bin/z3 /home/demo/.racket/8.7/pkgs/rosette/bin/z3 84 | ''; 85 | 86 | services.getty.autologinUser = "demo"; 87 | 88 | services.openssh.enable = true; 89 | networking.firewall.allowedTCPPorts = [ 22 ]; 90 | 91 | services.xserver.enable = true; 92 | } (lib.optionalAttrs (builtins.hasAttr "isoImage" options) { 93 | isoImage.appendToMenuLabel = " OOPSLA CRDT Synthesis Artifact"; 94 | services.xserver.displayManager.startx.enable = true; 95 | }) (lib.optionalAttrs (builtins.hasAttr "virtualbox" options) { 96 | nixpkgs.config = { 97 | allowUnfree = true; 98 | }; 99 | 100 | virtualbox.vmName = "OOPSLA CRDT Synthesis Artifact"; 101 | virtualbox.memorySize = 1024 * 8; 102 | virtualbox.params.cpus = 8; 103 | virtualbox.params.usb = "off"; 104 | virtualbox.params.usbehci = "off"; 105 | 106 | services.xserver.desktopManager.gnome.enable = true; 107 | services.xserver.displayManager.lightdm.enable = true; 108 | 109 | services.getty.autologinUser = pkgs.lib.mkForce null; 110 | 111 | environment.systemPackages = with pkgs; [ 112 | vscode 113 | sublime 114 | firefox 115 | ]; 116 | })] 117 | -------------------------------------------------------------------------------- /ci-util/cache-nix.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | cur_dir=$(pwd) 4 | 5 | paths_to_save=$(nix-store --query --references $(nix path-info --derivation ".#devShell.x86_64-linux.inputDerivation") | \ 6 | xargs nix-store --realise | \ 7 | xargs nix-store --query --requisites) 8 | 9 | mkdir ~/nix-cache 10 | 11 | cd ~/nix-cache 12 | 13 | nix-store --export $paths_to_save > ~/nix-cache/cache.nar 14 | 15 | cd $cur_dir 16 | -------------------------------------------------------------------------------- /flake.lock: -------------------------------------------------------------------------------- 1 | { 2 | "nodes": { 3 | "flake-utils": { 4 | "locked": { 5 | "lastModified": 1676283394, 6 | "narHash": "sha256-XX2f9c3iySLCw54rJ/CZs+ZK6IQy7GXNY4nSOyu2QG4=", 7 | "owner": "numtide", 8 | "repo": "flake-utils", 9 | "rev": "3db36a8b464d0c4532ba1c7dda728f4576d6d073", 10 | "type": "github" 11 | }, 12 | "original": { 13 | "owner": "numtide", 14 | "repo": "flake-utils", 15 | "type": "github" 16 | } 17 | }, 18 | "flake-utils_2": { 19 | "locked": { 20 | "lastModified": 1676283394, 21 | "narHash": "sha256-XX2f9c3iySLCw54rJ/CZs+ZK6IQy7GXNY4nSOyu2QG4=", 22 | "owner": "numtide", 23 | "repo": "flake-utils", 24 | "rev": "3db36a8b464d0c4532ba1c7dda728f4576d6d073", 25 | "type": "github" 26 | }, 27 | "original": { 28 | "owner": "numtide", 29 | "repo": "flake-utils", 30 | "type": "github" 31 | } 32 | }, 33 | "gitignore": { 34 | "inputs": { 35 | "nixpkgs": [ 36 | "nixpkgs" 37 | ] 38 | }, 39 | "locked": { 40 | "lastModified": 1660459072, 41 | "narHash": "sha256-8DFJjXG8zqoONA1vXtgeKXy68KdJL5UaXR8NtVMUbx8=", 42 | "owner": "hercules-ci", 43 | "repo": "gitignore.nix", 44 | "rev": "a20de23b925fd8264fd7fad6454652e142fd7f73", 45 | "type": "github" 46 | }, 47 | "original": { 48 | "owner": "hercules-ci", 49 | "repo": "gitignore.nix", 50 | "type": "github" 51 | } 52 | }, 53 | "nixlib": { 54 | "locked": { 55 | "lastModified": 1677373009, 56 | "narHash": "sha256-kxhz4QUP8tXa/yVSpEzDDZSEp9FvhzRqZzb+SeUaekw=", 57 | "owner": "nix-community", 58 | "repo": "nixpkgs.lib", 59 | "rev": "c9d4f2476046c6a7a2ce3c2118c48455bf0272ea", 60 | "type": "github" 61 | }, 62 | "original": { 63 | "owner": "nix-community", 64 | "repo": "nixpkgs.lib", 65 | "type": "github" 66 | } 67 | }, 68 | "nixos-generators": { 69 | "inputs": { 70 | "nixlib": "nixlib", 71 | "nixpkgs": [ 72 | "nixpkgs" 73 | ] 74 | }, 75 | "locked": { 76 | "lastModified": 1678351966, 77 | "narHash": "sha256-tRFEU0lu3imZb3dtELBY+UbEhWXbb0xlBrsIlpICb+A=", 78 | "owner": "nix-community", 79 | "repo": "nixos-generators", 80 | "rev": "0c043999b16caba6fb571af2d228775729829943", 81 | "type": "github" 82 | }, 83 | "original": { 84 | "owner": "nix-community", 85 | "repo": "nixos-generators", 86 | "type": "github" 87 | } 88 | }, 89 | "nixpkgs": { 90 | "locked": { 91 | "lastModified": 1678230755, 92 | "narHash": "sha256-SFAXgNjNTXzcAideXcP0takfUGVft/VR5CACmYHg+Fc=", 93 | "owner": "NixOS", 94 | "repo": "nixpkgs", 95 | "rev": "a7cc81913bb3cd1ef05ed0ece048b773e1839e51", 96 | "type": "github" 97 | }, 98 | "original": { 99 | "owner": "NixOS", 100 | "ref": "nixos-22.11", 101 | "repo": "nixpkgs", 102 | "type": "github" 103 | } 104 | }, 105 | "poetry2nix-flake": { 106 | "inputs": { 107 | "flake-utils": "flake-utils_2", 108 | "nixpkgs": [ 109 | "nixpkgs" 110 | ] 111 | }, 112 | "locked": { 113 | "lastModified": 1678135815, 114 | "narHash": "sha256-U9rGLDafi7CcrNuYD1LkM3LSN2rZ5zgF0GeInrAihAE=", 115 | "owner": "nix-community", 116 | "repo": "poetry2nix", 117 | "rev": "4c424edc8a546952e640f2a3017354b05bf15f0b", 118 | "type": "github" 119 | }, 120 | "original": { 121 | "owner": "nix-community", 122 | "repo": "poetry2nix", 123 | "type": "github" 124 | } 125 | }, 126 | "root": { 127 | "inputs": { 128 | "flake-utils": "flake-utils", 129 | "gitignore": "gitignore", 130 | "nixos-generators": "nixos-generators", 131 | "nixpkgs": "nixpkgs", 132 | "poetry2nix-flake": "poetry2nix-flake", 133 | "rosette-packages": "rosette-packages" 134 | } 135 | }, 136 | "rosette-packages": { 137 | "locked": { 138 | "lastModified": 1630400035, 139 | "narHash": "sha256-MWaVOCzuFwp09wZIW9iHq5wWen5C69I940N1swZLEQ0=", 140 | "owner": "input-output-hk", 141 | "repo": "empty-flake", 142 | "rev": "2040a05b67bf9a669ce17eca56beb14b4206a99a", 143 | "type": "github" 144 | }, 145 | "original": { 146 | "owner": "input-output-hk", 147 | "repo": "empty-flake", 148 | "rev": "2040a05b67bf9a669ce17eca56beb14b4206a99a", 149 | "type": "github" 150 | } 151 | } 152 | }, 153 | "root": "root", 154 | "version": 7 155 | } 156 | -------------------------------------------------------------------------------- /flake.nix: -------------------------------------------------------------------------------- 1 | { 2 | description = "katara"; 3 | 4 | inputs.flake-utils.url = "github:numtide/flake-utils"; 5 | inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-22.11"; 6 | inputs.poetry2nix-flake = { 7 | url = "github:nix-community/poetry2nix"; 8 | inputs.nixpkgs.follows = "nixpkgs"; 9 | }; 10 | 11 | inputs.nixos-generators = { 12 | url = "github:nix-community/nixos-generators"; 13 | inputs.nixpkgs.follows = "nixpkgs"; 14 | }; 15 | 16 | inputs.gitignore = { 17 | url = "github:hercules-ci/gitignore.nix"; 18 | inputs.nixpkgs.follows = "nixpkgs"; 19 | }; 20 | 21 | inputs.rosette-packages.url = "github:input-output-hk/empty-flake?rev=2040a05b67bf9a669ce17eca56beb14b4206a99a"; 22 | 23 | outputs = { self, nixpkgs, flake-utils, poetry2nix-flake, nixos-generators, gitignore, rosette-packages }: (flake-utils.lib.eachDefaultSystem (system: 24 | with import nixpkgs { 25 | inherit system; 26 | overlays = [ poetry2nix-flake.overlay ]; 27 | }; 28 | 29 | { 30 | devShell = mkShell { 31 | buildInputs = [ 32 | (poetry2nix.mkPoetryEnv { 33 | python = python38; 34 | projectDir = ./.; 35 | 36 | overrides = poetry2nix.overrides.withDefaults (_: poetrySuper: { 37 | metalift = poetrySuper.metalift.overrideAttrs(_: super: { 38 | nativeBuildInputs = super.nativeBuildInputs ++ [ poetrySuper.poetry ]; 39 | }); 40 | 41 | autoflake = poetrySuper.autoflake.overrideAttrs(_: super: { 42 | nativeBuildInputs = super.nativeBuildInputs ++ [ poetrySuper.hatchling ]; 43 | }); 44 | }); 45 | }) 46 | 47 | cvc5 48 | cmake 49 | llvm_11 50 | clang_11 51 | ]; 52 | }; 53 | } 54 | )) // { 55 | packages.x86_64-linux = { 56 | # nix build .#vm-nogui --override-input rosette-packages ../rosette-packages 57 | vm-nogui = nixos-generators.nixosGenerate { 58 | system = "x86_64-linux"; 59 | modules = [ 60 | ({ pkgs, ... }: { 61 | nixpkgs.overlays = [ poetry2nix-flake.overlay ]; 62 | }) 63 | ./artifact.nix 64 | ]; 65 | format = "vm-nogui"; 66 | 67 | specialArgs = { 68 | gitignore = gitignore; 69 | rosette-packages = rosette-packages; 70 | }; 71 | }; 72 | }; 73 | }; 74 | } 75 | -------------------------------------------------------------------------------- /headers/array.h: -------------------------------------------------------------------------------- 1 | #include 2 | 3 | template 4 | struct Array 5 | { 6 | std::vector contents; 7 | }; 8 | 9 | template 10 | using Array = Array *; 11 | 12 | 13 | template 14 | int arrayLength (Array l) 15 | { 16 | return l->contents.size(); 17 | } 18 | 19 | template 20 | Array newarray() 21 | { 22 | //return (Array)malloc(sizeof(struct Array)); 23 | return new Array(100); 24 | } 25 | 26 | template 27 | T arrayGet (Array l, int i) 28 | { 29 | return l->contents[i]; 30 | } 31 | 32 | template 33 | void arraySet (Array l, int i, int v) 34 | { 35 | l->contents[i] = v ; 36 | } -------------------------------------------------------------------------------- /headers/list.h: -------------------------------------------------------------------------------- 1 | #include 2 | 3 | template 4 | struct list 5 | { 6 | std::vector contents; 7 | }; 8 | 9 | template 10 | using List = list *; 11 | 12 | 13 | template 14 | int listLength (List l) 15 | { 16 | return l->contents.size(); 17 | } 18 | 19 | template 20 | List newList() 21 | { 22 | //return (List)malloc(sizeof(struct list)); 23 | return new list(); 24 | } 25 | 26 | template 27 | T listGet (List l, int i) 28 | { 29 | return l->contents[i]; 30 | } 31 | 32 | 33 | template 34 | List listAppend (List in, T e) 35 | { 36 | List r = newList(); 37 | for (int i = 0; i < listLength(in); ++i) 38 | r->contents.push_back(listGet(in, i)); 39 | r->contents.push_back(e); 40 | return r; 41 | } 42 | 43 | template 44 | List listConcat (List in, List e) 45 | { 46 | List r = newList(); 47 | for (int i = 0; i < listLength(in); ++i) 48 | r->contents.push_back(listGet(in, i)); 49 | for (int i = 0; i < listLength(e); ++i) 50 | r->contents.push_back(listGet(e, i)); 51 | return r; 52 | } 53 | 54 | 55 | 56 | 57 | -------------------------------------------------------------------------------- /headers/set.h: -------------------------------------------------------------------------------- 1 | #include 2 | 3 | typedef struct set {} set; 4 | 5 | set* set_create(); 6 | set* set_add(set* s, int x); 7 | set* set_remove(set* s, int x); 8 | int set_contains(set* s, int v); 9 | -------------------------------------------------------------------------------- /headers/tuples.h: -------------------------------------------------------------------------------- 1 | #include 2 | template 3 | struct tup 4 | { 5 | std::tuple contents; 6 | }; 7 | template 8 | using Tuple = tup *; 9 | 10 | template 11 | Tuple newTuple() 12 | { 13 | 14 | return new tup(); 15 | } 16 | template 17 | Tuple MakeTuple(T...args) 18 | { 19 | Tuple r = newTuple(); 20 | r->contents = std::make_tuple(args...); 21 | return r; 22 | } 23 | 24 | //template 25 | template 26 | typename std::enable_if<(I < sizeof...(T)), 27 | int>::type 28 | tupleGet(Tuple t, int i) 29 | { 30 | 31 | return std::get(t->contents); 32 | 33 | // switch (i) { 34 | // case 0: return get(t->contents); 35 | // case 1: return get<1>(t->contents); 36 | // } 37 | } 38 | 39 | 40 | -------------------------------------------------------------------------------- /iso-racket-links.rktd: -------------------------------------------------------------------------------- 1 | ((root (#"pkgs" #"rosette")) 2 | ("custom-load" (#"pkgs" #"custom-load")) 3 | (root (#"pkgs" #"rfc6455"))) 4 | -------------------------------------------------------------------------------- /katara-dark.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hydro-project/katara/b5c0e0cb24e1890651f83d1718bc503dff18405b/katara-dark.png -------------------------------------------------------------------------------- /katara-light.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hydro-project/katara/b5c0e0cb24e1890651f83d1718bc503dff18405b/katara-light.png -------------------------------------------------------------------------------- /katara/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/hydro-project/katara/b5c0e0cb24e1890651f83d1718bc503dff18405b/katara/__init__.py -------------------------------------------------------------------------------- /katara/aci.py: -------------------------------------------------------------------------------- 1 | import sys 2 | 3 | from metalift.analysis_new import VariableTracker, analyze as analyze_new 4 | from metalift.ir import * 5 | from metalift.smt_util import toSMT 6 | 7 | import subprocess 8 | from metalift.synthesize_cvc5 import generateAST, toExpr 9 | 10 | import typing 11 | from typing import Any 12 | 13 | 14 | def check_aci(filename: str, fnNameBase: str, loopsFile: str, cvcPath: str) -> None: 15 | """Check if the actor is commutative and idempotent. 16 | 17 | Args: 18 | filename (str): path to the file containing the actor 19 | fnNameBase (str): name of the actor 20 | loopsFile (str): path to the file containing the loop information 21 | cvcPath (str): path to the CVC4 executable 22 | """ 23 | check_c(filename, fnNameBase, loopsFile, cvcPath) 24 | check_i(filename, fnNameBase, loopsFile, cvcPath) 25 | 26 | 27 | def check_c(filename: str, fnNameBase: str, loopsFile: str, cvcPath: str) -> None: 28 | """Check if the actor is commutative. 29 | 30 | Args: 31 | filename (str): path to the file containing the actor 32 | fnNameBase (str): name of the actor 33 | loopsFile (str): path to the file containing the loop information 34 | cvcPath (str): path to the cvc5 executable 35 | """ 36 | state_transition_analysis = analyze_new( 37 | filename, fnNameBase + "_next_state", loopsFile 38 | ) 39 | 40 | tracker = VariableTracker() 41 | 42 | initial_state = tracker.variable( 43 | "initial_state", state_transition_analysis.arguments[0].type 44 | ) 45 | 46 | op1_group = tracker.group("op1") 47 | op1 = [ 48 | op1_group.variable(v.name(), v.type) 49 | for v in state_transition_analysis.arguments[1:] 50 | ] 51 | 52 | op2_group = tracker.group("op2") 53 | op2 = [ 54 | op2_group.variable(v.name(), v.type) 55 | for v in state_transition_analysis.arguments[1:] 56 | ] 57 | 58 | afterState_0_op1 = tracker.variable( 59 | "afterState_0_op1", state_transition_analysis.arguments[0].type 60 | ) 61 | afterState_0_op2 = tracker.variable( 62 | "afterState_0_op2", state_transition_analysis.arguments[0].type 63 | ) 64 | 65 | afterState_1_op2 = tracker.variable( 66 | "afterState_1_op2", state_transition_analysis.arguments[0].type 67 | ) 68 | afterState_1_op1 = tracker.variable( 69 | "afterState_1_op1", state_transition_analysis.arguments[0].type 70 | ) 71 | 72 | vc = state_transition_analysis.call(initial_state, *op1)( 73 | tracker, 74 | lambda obj0_after_op1: Implies( 75 | Eq(obj0_after_op1, afterState_0_op1), 76 | state_transition_analysis.call(obj0_after_op1, *op2)( 77 | tracker, 78 | lambda obj0_after_op2: Implies( 79 | Eq(obj0_after_op2, afterState_0_op2), 80 | state_transition_analysis.call(initial_state, *op2)( 81 | tracker, 82 | lambda obj1_after_op2: Implies( 83 | Eq(obj1_after_op2, afterState_1_op2), 84 | state_transition_analysis.call(obj1_after_op2, *op1)( 85 | tracker, 86 | lambda obj1_after_op1: Implies( 87 | Eq(obj1_after_op1, afterState_1_op1), 88 | Eq(obj0_after_op2, obj1_after_op1), 89 | ), 90 | ), 91 | ), 92 | ), 93 | ), 94 | ), 95 | ), 96 | ) 97 | 98 | toSMT( 99 | [], 100 | set(tracker.all()), 101 | [], 102 | [], 103 | vc, 104 | "./synthesisLogs/aci-test.smt", 105 | [], 106 | [], 107 | ) 108 | 109 | procVerify = subprocess.run( 110 | [ 111 | cvcPath, 112 | "--lang=smt", 113 | "--produce-models", 114 | "--tlimit=100000", 115 | "./synthesisLogs/aci-test.smt", 116 | ], 117 | stdout=subprocess.PIPE, 118 | ) 119 | 120 | procOutput = procVerify.stdout 121 | resultVerify = procOutput.decode("utf-8").split("\n") 122 | 123 | if resultVerify[0] == "sat" or resultVerify[0] == "unknown": 124 | print("Counterexample Found for Commutativity Check") 125 | print(f"Operation 1: {[lookup_var(v, resultVerify) for v in op1]}") 126 | print(f"Operation 2: {[lookup_var(v, resultVerify) for v in op2]}") 127 | print(f"Initial State: {lookup_var(initial_state, resultVerify)}") 128 | print() 129 | print(f"Actor 1 (after op 1): {lookup_var(afterState_0_op1, resultVerify)}") 130 | print(f"Actor 1 (after op 1 + 2): {lookup_var(afterState_0_op2, resultVerify)}") 131 | print() 132 | print(f"Actor 2 (after op 2): {lookup_var(afterState_1_op2, resultVerify)}") 133 | print(f"Actor 2 (after op 2 + 1): {lookup_var(afterState_1_op1, resultVerify)}") 134 | else: 135 | print("Actor is commutative") 136 | 137 | 138 | def check_i(filename: str, fnNameBase: str, loopsFile: str, cvcPath: str) -> None: 139 | """Check if the actor is idempotent. 140 | 141 | Args: 142 | filename (str): path to the file containing the actor 143 | fnNameBase (str): name of the actor 144 | loopsFile (str): path to the file containing the loop information 145 | cvcPath (str): path to the cvc5 executable 146 | """ 147 | 148 | state_transition_analysis = analyze_new( 149 | filename, fnNameBase + "_next_state", loopsFile 150 | ) 151 | 152 | tracker = VariableTracker() 153 | 154 | initial_state = tracker.variable( 155 | "initial_state", state_transition_analysis.arguments[0].type 156 | ) 157 | 158 | op_group = tracker.group("op") 159 | op = [ 160 | op_group.variable(v.name(), v.type) 161 | for v in state_transition_analysis.arguments[1:] 162 | ] 163 | 164 | afterState_op = tracker.variable( 165 | "afterState_op", state_transition_analysis.arguments[0].type 166 | ) 167 | afterState_op_op = tracker.variable( 168 | "afterState_op_op", state_transition_analysis.arguments[0].type 169 | ) 170 | 171 | vc = state_transition_analysis.call(initial_state, *op)( 172 | tracker, 173 | lambda obj0_after_op: Implies( 174 | Eq(obj0_after_op, afterState_op), 175 | state_transition_analysis.call(obj0_after_op, *op)( 176 | tracker, 177 | lambda obj0_after_op_op: Implies( 178 | Eq(obj0_after_op_op, afterState_op_op), 179 | Eq(obj0_after_op, obj0_after_op_op), 180 | ), 181 | ), 182 | ), 183 | ) 184 | 185 | toSMT( 186 | [], 187 | set(tracker.all()), 188 | [], 189 | [], 190 | vc, 191 | "./synthesisLogs/idempotence-test.smt", 192 | [], 193 | [], 194 | ) 195 | 196 | procVerify = subprocess.run( 197 | [ 198 | cvcPath, 199 | "--lang=smt", 200 | "--produce-models", 201 | "--tlimit=100000", 202 | "./synthesisLogs/idempotence-test.smt", 203 | ], 204 | stdout=subprocess.PIPE, 205 | ) 206 | 207 | procOutput = procVerify.stdout 208 | resultVerify = procOutput.decode("utf-8").split("\n") 209 | 210 | if resultVerify[0] == "sat" or resultVerify[0] == "unknown": 211 | print("Counterexample Found for Idempotence Check") 212 | print(f"Operations: {[lookup_var(v, resultVerify) for v in op]}") 213 | print(f"Initial State: {lookup_var(initial_state, resultVerify)}") 214 | print() 215 | print(f"After 1 operation: {lookup_var(afterState_op, resultVerify)}") 216 | print( 217 | f"After 2 operations (op + op): {lookup_var(afterState_op_op, resultVerify)}" 218 | ) 219 | else: 220 | print("Actor is Idempotent") 221 | 222 | pass 223 | 224 | 225 | def lookup_var(v: Expr, resultVerify: typing.List[Any]) -> Expr: 226 | """Given a variable and a list of lines from the CVC4 output, find the function 227 | which defines the variable and return the value of the variable in the counterexample. 228 | 229 | Args: 230 | v (Expr): variable to look up 231 | resultVerify (typing.List[Any]): list of lines from CVC4 output 232 | 233 | Raises: 234 | Exception: if the variable cannot be found 235 | 236 | Returns: 237 | Expr: value of the variable in the counterexample 238 | """ 239 | for line in resultVerify: 240 | if line.startswith("(define-fun " + v.args[0] + " "): 241 | return toExpr(generateAST(line)[0][4], [], [], {}, {}) 242 | raise Exception("Could not find variable " + v.args[0]) 243 | 244 | 245 | if __name__ == "__main__": 246 | filename = f"tests/{sys.argv[1]}.ll" 247 | fnNameBase = "test" 248 | loopsFile = f"tests/{sys.argv[1]}.loops" 249 | cvcPath = "cvc5" 250 | 251 | if len(sys.argv) > 2: 252 | checkType = sys.argv[2] # "c" for commutativity, "i" for idempotence 253 | if checkType == "i": 254 | check_i(filename, fnNameBase, loopsFile, cvcPath) 255 | elif checkType == "c": 256 | check_c(filename, fnNameBase, loopsFile, cvcPath) 257 | else: 258 | check_aci(filename, fnNameBase, loopsFile, cvcPath) 259 | -------------------------------------------------------------------------------- /katara/auto_grammar.py: -------------------------------------------------------------------------------- 1 | from katara import lattices 2 | from katara.lattices import Lattice 3 | from metalift.ir import * 4 | 5 | import typing 6 | from typing import Union, Dict 7 | from llvmlite.binding import ValueRef 8 | 9 | equality_supported_types = [Bool(), Int(), ClockInt(), EnumInt(), OpaqueInt()] 10 | comparison_supported_types = [Int(), ClockInt(), OpaqueInt()] 11 | 12 | 13 | def get_expansions( 14 | input_types: typing.List[Type], 15 | available_types: typing.List[Type], 16 | out_types: typing.List[Type], 17 | allow_node_id_reductions: bool = False, 18 | ) -> Dict[Type, typing.List[typing.Callable[[typing.Callable[[Type], Expr]], Expr]]]: 19 | out: Dict[ 20 | Type, typing.List[typing.Callable[[typing.Callable[[Type], Expr]], Expr]] 21 | ] = { 22 | Bool(): [ 23 | lambda get: BoolLit(False), 24 | lambda get: BoolLit(True), 25 | lambda get: And(get(Bool()), get(Bool())), 26 | lambda get: Or(get(Bool()), get(Bool())), 27 | lambda get: Not(get(Bool())), 28 | *[ 29 | (lambda t: lambda get: Eq(get(t), get(t)))(t) 30 | for t in equality_supported_types 31 | ], 32 | *[ 33 | (lambda t: lambda get: Gt(get(t), get(t)))(t) 34 | for t in comparison_supported_types 35 | ], 36 | *[ 37 | (lambda t: lambda get: Ge(get(t), get(t)))(t) 38 | for t in comparison_supported_types 39 | ], 40 | ], 41 | } 42 | 43 | def gen_set_ops(t: Type) -> None: 44 | out[SetT(t)] = [ 45 | lambda get: Call("set-minus", SetT(t), get(SetT(t)), get(SetT(t))), 46 | lambda get: Call("set-union", SetT(t), get(SetT(t)), get(SetT(t))), 47 | lambda get: Call("set-insert", SetT(t), get(t), get(SetT(t))), 48 | ] 49 | 50 | out[Bool()].append(lambda get: Eq(get(SetT(t)), get(SetT(t)))) 51 | out[Bool()].append(lambda get: Eq(get(SetT(t)), Call("set-create", SetT(t)))) 52 | out[Bool()].append( 53 | lambda get: Call("set-subset", Bool(), get(SetT(t)), get(SetT(t))) 54 | ) 55 | out[Bool()].append(lambda get: Call("set-member", Bool(), get(t), get(SetT(t)))) 56 | 57 | for t in equality_supported_types: 58 | if t in input_types: 59 | gen_set_ops(t) 60 | else: 61 | out[SetT(t)] = [] 62 | 63 | if SetT(t) in out_types: 64 | out[SetT(t)] += [ 65 | ((lambda t: lambda get: Call("set-create", SetT(t)))(t)) 66 | if t in input_types 67 | else ((lambda t: lambda get: Call("set-create", SetT(get(t).type)))(t)), 68 | (lambda t: lambda get: Call("set-singleton", SetT(t), get(t)))(t), 69 | ] 70 | 71 | def gen_map_ops(k: Type, v: Type, allow_zero_create: bool) -> None: 72 | if MapT(k, v) in out_types: 73 | if MapT(k, v) not in out: 74 | out[MapT(k, v)] = [] 75 | out[MapT(k, v)] += [ 76 | (lambda get: Call("map-create", MapT(k, v))) 77 | if allow_zero_create 78 | else (lambda get: Call("map-create", MapT(get(k).type, v))), 79 | lambda get: Call("map-singleton", MapT(k, v), get(k), get(v)), 80 | ] 81 | 82 | if v not in out: 83 | out[v] = [] 84 | 85 | if MapT(k, v) in input_types: 86 | if v.erase() == Int(): 87 | out[v] += [ 88 | lambda get: Call("map-get", v, get(MapT(k, v)), get(k), Lit(0, v)), 89 | ] 90 | 91 | if k == NodeIDInt() and allow_node_id_reductions: 92 | merge_a = Var("merge_into", v) 93 | merge_b = Var("merge_v", v) 94 | 95 | if v == Int(): 96 | out[v] += [ 97 | lambda get: Call( 98 | "reduce_int", 99 | v, 100 | Call("map-values", ListT(v), get(MapT(k, v))), 101 | Lambda( 102 | v, 103 | Add(merge_a, merge_b), 104 | merge_b, 105 | merge_a, 106 | ), 107 | IntLit(0), 108 | ) 109 | ] 110 | elif v == Bool(): 111 | out[v] += [ 112 | lambda get: Call( 113 | "map-get", 114 | v, 115 | get(MapT(k, v)), 116 | get(k), 117 | Choose(BoolLit(False), BoolLit(True)), 118 | ), 119 | ] 120 | 121 | if k == NodeIDInt() and allow_node_id_reductions: 122 | merge_a = Var("merge_into", v) 123 | merge_b = Var("merge_v", v) 124 | 125 | out[v] += [ 126 | lambda get: Call( 127 | "reduce_bool", 128 | v, 129 | Call("map-values", ListT(v), get(MapT(k, v))), 130 | Lambda( 131 | v, 132 | Or(merge_a, merge_b), 133 | merge_b, 134 | merge_a, 135 | ), 136 | BoolLit(False), 137 | ), 138 | lambda get: Call( 139 | "reduce_bool", 140 | v, 141 | Call("map-values", ListT(v), get(MapT(k, v))), 142 | Lambda( 143 | v, 144 | And(merge_a, merge_b), 145 | merge_b, 146 | merge_a, 147 | ), 148 | BoolLit(True), 149 | ), 150 | ] 151 | elif v.name == "Map": 152 | out[v] += [ 153 | lambda get: Call( 154 | "map-get", v, get(MapT(k, v)), get(k), Call("map-create", v) 155 | ), 156 | ] 157 | else: 158 | raise Exception("NYI") 159 | 160 | for t in available_types: 161 | if t.name == "Map": 162 | gen_map_ops(t.args[0], t.args[1], t.args[0] in input_types) 163 | 164 | if Int() in input_types: 165 | if Int() not in out: 166 | out[Int()] = [] 167 | out[Int()] += [ 168 | lambda get: IntLit(0), 169 | lambda get: IntLit(1), 170 | lambda get: Add(get(Int()), get(Int())), 171 | lambda get: Sub(get(Int()), get(Int())), 172 | ] 173 | 174 | if EnumInt() in available_types: 175 | if EnumInt() not in out: 176 | out[EnumInt()] = [] 177 | out[EnumInt()] += [(lambda i: lambda get: EnumIntLit(i))(i) for i in range(2)] 178 | 179 | if ClockInt() in input_types: 180 | if ClockInt() not in out: 181 | out[ClockInt()] = [] 182 | out[ClockInt()] += [lambda get: Lit(0, ClockInt())] 183 | 184 | return out 185 | 186 | 187 | def all_node_id_gets( 188 | input: Expr, 189 | node_id: Expr, 190 | args: Dict[Type, Expr], 191 | ) -> typing.List[Expr]: 192 | if input.type.name == "Map": 193 | v = input.type.args[1] 194 | default: typing.Optional[Expr] = None 195 | if v.erase() == Int(): 196 | default = Lit(0, v) 197 | elif v == Bool(): 198 | default = Choose(BoolLit(False), BoolLit(True)) 199 | elif v.name == "Map": 200 | default = Call("map-create", v) 201 | else: 202 | raise Exception("NYI") 203 | 204 | if input.type.args[0] == NodeIDInt(): 205 | return [Call("map-get", v, input, node_id, default)] 206 | elif input.type.args[0] in args: 207 | return all_node_id_gets( 208 | Call("map-get", v, input, args[input.type.args[0]], default), 209 | node_id, 210 | args, 211 | ) 212 | else: 213 | return [] 214 | elif input.type.name == "Tuple": 215 | out = [] 216 | for i in range(len(input.type.args)): 217 | out += all_node_id_gets(TupleGet(input, IntLit(i)), node_id, args) 218 | return out 219 | else: 220 | return [] 221 | 222 | 223 | def auto_grammar( 224 | out_type: typing.Optional[Type], 225 | depth: int, 226 | *inputs: Union[Expr, ValueRef], 227 | enable_ite: bool = False, 228 | allow_node_id_reductions: bool = False, 229 | ) -> Expr: 230 | if out_type and out_type.name == "Tuple": 231 | return Tuple( 232 | *[ 233 | auto_grammar( 234 | t, 235 | depth, 236 | *inputs, 237 | enable_ite=enable_ite, 238 | allow_node_id_reductions=allow_node_id_reductions, 239 | ) 240 | for t in out_type.args 241 | ] 242 | ) 243 | 244 | input_pool: Dict[Type, typing.List[Expr]] = {} 245 | 246 | def extract_inputs(input_type: Type, input: typing.Optional[Expr]) -> None: 247 | if input_type.name == "Tuple": 248 | for i, t in enumerate(input_type.args): 249 | if input != None: 250 | extract_inputs(t, TupleGet(input, IntLit(i))) # type: ignore 251 | else: 252 | extract_inputs(t, None) 253 | else: 254 | if not input_type in input_pool: 255 | input_pool[input_type] = [] 256 | if input != None: 257 | input_pool[input_type].append(input) # type: ignore 258 | if input_type.name == "Set": 259 | extract_inputs(input_type.args[0], None) 260 | elif input_type.name == "Map": 261 | extract_inputs(input_type.args[0], None) 262 | extract_inputs(input_type.args[1], None) 263 | 264 | for input in inputs: 265 | input_type = parseTypeRef(input.type) 266 | extract_inputs(input_type, input) 267 | 268 | input_types = list(input_pool.keys()) 269 | 270 | if out_type and out_type not in input_pool: 271 | extract_inputs(out_type, None) 272 | 273 | out_types = list(set(input_pool.keys()) - set(input_types)) 274 | 275 | expansions = get_expansions( 276 | input_types, list(input_pool.keys()), out_types, allow_node_id_reductions 277 | ) 278 | 279 | pool: Dict[Type, Expr] = {} 280 | for t, exprs in input_pool.items(): 281 | zero_input_expansions = [] 282 | if t in expansions: 283 | for e in expansions[t]: 284 | try: 285 | zero_input_expansions.append(e(lambda t: dict()[t])) # type: ignore 286 | except KeyError: 287 | pass 288 | if (len(exprs) + len(zero_input_expansions)) > 0: 289 | pool[t] = Choose(*exprs, *zero_input_expansions) 290 | 291 | for i in range(depth): 292 | next_pool = dict(pool) 293 | for t, expansion_list in expansions.items(): 294 | new_elements = [] 295 | for expansion in expansion_list: 296 | try: 297 | new_elements.append(expansion(lambda t: pool[t])) 298 | except KeyError: 299 | pass 300 | 301 | if ( 302 | t in next_pool 303 | and isinstance(next_pool[t], Expr) 304 | and isinstance(next_pool[t], Choose) 305 | ): 306 | existing_set = set(next_pool[t].args) 307 | new_elements = [e for e in new_elements if e not in existing_set] 308 | 309 | if len(new_elements) > 0: 310 | if t in pool: 311 | next_pool[t] = Choose(next_pool[t], *new_elements) 312 | else: 313 | next_pool[t] = Choose(*new_elements) 314 | 315 | if enable_ite and Bool() in pool: 316 | for t in pool.keys(): 317 | if t.name != "Set" and t.name != "Map": 318 | next_pool[t] = Choose( 319 | next_pool[t], Ite(pool[Bool()], pool[t], pool[t]) 320 | ) 321 | 322 | pool = next_pool 323 | 324 | if out_type: 325 | return pool[out_type] 326 | else: 327 | return pool # type: ignore 328 | 329 | 330 | def expand_lattice_logic(*inputs: typing.Tuple[Expr, Lattice]) -> typing.List[Expr]: 331 | lattice_to_exprs: typing.Dict[Lattice, typing.List[Expr]] = {} 332 | for input, lattice in inputs: 333 | if lattice not in lattice_to_exprs: 334 | lattice_to_exprs[lattice] = [] 335 | lattice_to_exprs[lattice].append(input) 336 | 337 | next_pool = dict(lattice_to_exprs) 338 | for lattice in lattice_to_exprs.keys(): 339 | if isinstance(lattice, lattices.Map): 340 | merge_a = Var("merge_a", lattice.valueType.ir_type()) 341 | merge_b = Var("merge_b", lattice.valueType.ir_type()) 342 | for value in lattice_to_exprs[lattice]: 343 | value_max = Call( # does the remove set have any concurrent values? 344 | "reduce_bool" 345 | if lattice.valueType.ir_type() == Bool() 346 | else "reduce_int", 347 | lattice.valueType.ir_type(), 348 | Call("map-values", ListT(lattice.valueType.ir_type()), value), 349 | Lambda( 350 | lattice.valueType.ir_type(), 351 | lattice.valueType.merge(merge_a, merge_b), 352 | merge_a, 353 | merge_b, 354 | ), 355 | lattice.valueType.bottom(), 356 | ) 357 | 358 | if lattice.valueType not in next_pool: 359 | next_pool[lattice.valueType] = [] 360 | if value_max not in next_pool[lattice.valueType]: 361 | next_pool[lattice.valueType].append(value_max) 362 | 363 | lattice_to_exprs = next_pool 364 | next_pool = dict(lattice_to_exprs) 365 | 366 | for lattice in lattice_to_exprs.keys(): 367 | choices = Choose(*lattice_to_exprs[lattice]) 368 | lattice_to_exprs[lattice].append(lattice.merge(choices, choices)) 369 | 370 | lattice_to_exprs = next_pool 371 | 372 | return [Choose(*lattice_to_exprs[lattice]) for lattice in lattice_to_exprs.keys()] 373 | -------------------------------------------------------------------------------- /katara/lattices.py: -------------------------------------------------------------------------------- 1 | from dataclasses import dataclass 2 | from metalift import ir 3 | import typing 4 | import itertools 5 | 6 | 7 | class Lattice: 8 | def ir_type(self) -> ir.Type: 9 | raise NotImplementedError() 10 | 11 | def merge(self, a: ir.Expr, b: ir.Expr) -> ir.Expr: 12 | raise NotImplementedError() 13 | 14 | def bottom(self) -> ir.Expr: 15 | raise NotImplementedError() 16 | 17 | def check_is_valid(self, v: ir.Expr) -> ir.Expr: 18 | raise NotImplementedError() 19 | 20 | def has_node_id(self) -> bool: 21 | raise NotImplementedError() 22 | 23 | 24 | @dataclass(frozen=True) 25 | class MaxInt(Lattice): 26 | int_type: ir.Type = ir.Int() 27 | 28 | def ir_type(self) -> ir.Type: 29 | return self.int_type 30 | 31 | def merge(self, a: ir.Expr, b: ir.Expr) -> ir.Expr: 32 | a_var = ir.Var("max_merge_a", self.int_type) 33 | b_var = ir.Var("max_merge_b", self.int_type) 34 | return ir.Let( 35 | a_var, a, ir.Let(b_var, b, ir.Ite(ir.Ge(a_var, b_var), a_var, b_var)) 36 | ) 37 | 38 | def bottom(self) -> ir.Expr: 39 | return ir.Lit(0, self.int_type) 40 | 41 | def check_is_valid(self, v: ir.Expr) -> ir.Expr: 42 | return ir.Ge(v, self.bottom()) 43 | 44 | def has_node_id(self) -> bool: 45 | return self.int_type == ir.NodeIDInt() 46 | 47 | 48 | @dataclass(frozen=True) 49 | class OrBool(Lattice): 50 | def ir_type(self) -> ir.Type: 51 | return ir.Bool() 52 | 53 | def merge(self, a: ir.Expr, b: ir.Expr) -> ir.Expr: 54 | return ir.Or(a, b) 55 | 56 | def bottom(self) -> ir.Expr: 57 | return ir.BoolLit(False) 58 | 59 | def check_is_valid(self, v: ir.Expr) -> ir.Expr: 60 | return ir.BoolLit(True) 61 | 62 | def has_node_id(self) -> bool: 63 | return False 64 | 65 | 66 | @dataclass(frozen=True) 67 | class Set(Lattice): 68 | innerType: ir.Type 69 | 70 | def ir_type(self) -> ir.Type: 71 | return ir.SetT(self.innerType) 72 | 73 | def merge(self, a: ir.Expr, b: ir.Expr) -> ir.Expr: 74 | return ir.Call("set-union", ir.SetT(self.innerType), a, b) 75 | 76 | def bottom(self) -> ir.Expr: 77 | return ir.Call("set-create", ir.SetT(self.innerType)) 78 | 79 | def check_is_valid(self, v: ir.Expr) -> ir.Expr: 80 | return ir.BoolLit(True) 81 | 82 | def has_node_id(self) -> bool: 83 | return self.innerType == ir.NodeIDInt() 84 | 85 | 86 | @dataclass(frozen=True) 87 | class Map(Lattice): 88 | keyType: ir.Type 89 | valueType: Lattice 90 | 91 | def ir_type(self) -> ir.Type: 92 | return ir.MapT(self.keyType, self.valueType.ir_type()) 93 | 94 | def merge(self, a: ir.Expr, b: ir.Expr) -> ir.Expr: 95 | v_a = ir.Var("map_merge_a", self.valueType.ir_type()) 96 | v_b = ir.Var("map_merge_b", self.valueType.ir_type()) 97 | 98 | return ir.Call( 99 | "map-union", 100 | ir.MapT(self.keyType, self.valueType.ir_type()), 101 | a, 102 | b, 103 | ir.Lambda( 104 | self.valueType.ir_type(), self.valueType.merge(v_a, v_b), v_a, v_b 105 | ), 106 | ) 107 | 108 | def bottom(self) -> ir.Expr: 109 | return ir.Call("map-create", self.ir_type()) 110 | 111 | def check_is_valid(self, v: ir.Expr) -> ir.Expr: 112 | merge_a = ir.Var("merge_into", ir.Bool()) 113 | merge_b = ir.Var("merge_v", self.valueType.ir_type()) 114 | 115 | return ir.Call( 116 | "reduce_bool", 117 | ir.Bool(), 118 | ir.Call("map-values", ir.ListT(self.valueType.ir_type()), v), 119 | ir.Lambda( 120 | ir.Bool(), 121 | ir.And(merge_a, self.valueType.check_is_valid(merge_b)), 122 | merge_b, 123 | merge_a, 124 | ), 125 | ir.BoolLit(True), 126 | ) 127 | 128 | def has_node_id(self) -> bool: 129 | return self.keyType == ir.NodeIDInt() or self.valueType.has_node_id() 130 | 131 | 132 | @dataclass(frozen=True) 133 | class LexicalProduct(Lattice): 134 | l1: Lattice 135 | l2: Lattice 136 | 137 | def ir_type(self) -> ir.Type: 138 | return ir.TupleT(self.l1.ir_type(), self.l2.ir_type()) 139 | 140 | def merge(self, a: ir.Expr, b: ir.Expr) -> ir.Expr: 141 | mergeA = ir.Var("cascade_merge_a", a.type) 142 | mergeB = ir.Var("cascade_merge_b", b.type) 143 | 144 | keyA = ir.TupleGet(mergeA, ir.IntLit(0)) 145 | keyB = ir.TupleGet(mergeB, ir.IntLit(0)) 146 | valueA = ir.TupleGet(mergeA, ir.IntLit(1)) 147 | valueB = ir.TupleGet(mergeB, ir.IntLit(1)) 148 | 149 | keyMerged = self.l1.merge(keyA, keyB) 150 | valueMerged = self.l2.merge(valueA, valueB) 151 | 152 | return ir.Let( 153 | mergeA, 154 | a, 155 | ir.Let( 156 | mergeB, 157 | b, 158 | ir.Tuple( 159 | keyMerged, 160 | ir.Ite( 161 | ir.Or( 162 | ir.Eq(keyA, keyB), 163 | ir.And( 164 | ir.Not(ir.Eq(keyA, keyMerged)), 165 | ir.Not(ir.Eq(keyB, keyMerged)), 166 | ), 167 | ), 168 | valueMerged, 169 | self.l2.merge( 170 | ir.Ite( 171 | ir.Eq(keyA, keyMerged), 172 | valueA, 173 | valueB, 174 | ), 175 | self.l2.bottom(), 176 | ), 177 | ), 178 | ), 179 | ), 180 | ) 181 | 182 | def bottom(self) -> ir.Expr: 183 | return ir.Tuple(self.l1.bottom(), self.l2.bottom()) 184 | 185 | def check_is_valid(self, v: ir.Expr) -> ir.Expr: 186 | return ir.And( 187 | self.l1.check_is_valid(ir.TupleGet(v, ir.IntLit(0))), 188 | self.l2.check_is_valid(ir.TupleGet(v, ir.IntLit(1))), 189 | ) 190 | 191 | def has_node_id(self) -> bool: 192 | return self.l1.has_node_id() or self.l2.has_node_id() 193 | 194 | 195 | def gen_types(depth: int) -> typing.Iterator[ir.Type]: 196 | if depth == 1: 197 | yield ir.Int() 198 | yield ir.ClockInt() 199 | yield ir.EnumInt() 200 | yield ir.OpaqueInt() 201 | yield ir.NodeIDInt() 202 | yield ir.Bool() 203 | else: 204 | for innerType in gen_types(depth - 1): 205 | yield innerType 206 | # TODO: anything else? 207 | 208 | 209 | int_like = {ir.Int().name, ir.ClockInt().name, ir.EnumInt().name, ir.OpaqueInt().name} 210 | comparable_int = {ir.Int().name, ir.ClockInt().name, ir.OpaqueInt().name} 211 | set_supported_elem = {ir.Int().name, ir.OpaqueInt().name} 212 | map_supported_elem = {ir.OpaqueInt().name, ir.NodeIDInt().name} 213 | 214 | 215 | def gen_lattice_types(max_depth: int) -> typing.Iterator[Lattice]: 216 | if max_depth == 1: 217 | yield OrBool() 218 | 219 | for innerType in gen_types(max_depth): 220 | if innerType.name in comparable_int: 221 | yield MaxInt(innerType) 222 | 223 | if max_depth > 1: 224 | for innerLatticeType in gen_lattice_types(max_depth - 1): 225 | yield innerLatticeType 226 | 227 | for innerType in gen_types(max_depth - 1): 228 | if innerType.name in set_supported_elem: 229 | yield Set(innerType) 230 | 231 | for keyType in gen_types(max_depth - 1): 232 | if keyType.name in map_supported_elem: 233 | for valueType in gen_lattice_types(max_depth - 1): 234 | yield Map(keyType, valueType) 235 | 236 | for innerTypePair in itertools.permutations( 237 | gen_lattice_types(max_depth - 1), 2 238 | ): 239 | yield LexicalProduct(*innerTypePair) 240 | 241 | 242 | def gen_structures(max_depth: int) -> typing.Iterator[typing.Any]: 243 | cur_type_depth = 1 244 | seen = set() 245 | while cur_type_depth <= max_depth: 246 | print(f"Type depth: {cur_type_depth}") 247 | cur_tuple_size = 1 248 | while cur_tuple_size <= cur_type_depth: 249 | print(f"Tuple size: {cur_tuple_size}") 250 | for lattice_types in itertools.combinations_with_replacement( 251 | gen_lattice_types(cur_type_depth), cur_tuple_size 252 | ): 253 | if tuple(lattice_types) in seen: 254 | continue 255 | else: 256 | seen.add(tuple(lattice_types)) 257 | yield lattice_types 258 | cur_tuple_size += 1 259 | cur_type_depth += 1 260 | -------------------------------------------------------------------------------- /katara/search_structures.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | 3 | import multiprocessing as mp 4 | import multiprocessing.pool 5 | import queue 6 | from time import time 7 | import traceback 8 | import typing 9 | 10 | from katara.lattices import Lattice 11 | from metalift import process_tracker 12 | from metalift import ir 13 | from metalift.ir import Expr, FnDecl 14 | from katara.synthesis import SynthesizeFun, synthesize_crdt 15 | from metalift.synthesis_common import SynthesisFailed 16 | 17 | from typing import Any, Callable, Iterator, List, Optional, Tuple 18 | 19 | 20 | def synthesize_crdt_e2e( 21 | queue: queue.Queue[ 22 | Tuple[ 23 | int, 24 | Any, 25 | int, 26 | Optional[typing.Union[str, List[FnDecl]]], 27 | ] 28 | ], 29 | synthStateStructure: List[Lattice], 30 | initState: Callable[[Any], Expr], 31 | grammarStateInvariant: Callable[[Expr, Any, int, int], Expr], 32 | grammarSupportedCommand: Callable[[Expr, Any, Any, int, int], Expr], 33 | inOrder: Callable[[Any, Any], Expr], 34 | opPrecondition: Callable[[Any], Expr], 35 | grammar: Callable[[Expr, List[ir.Var], Any, int], Expr], 36 | grammarQuery: Callable[[str, List[ir.Var], ir.Type, int], ir.Synth], 37 | grammarEquivalence: Callable[[Expr, Expr, List[ir.Var], int], Expr], 38 | targetLang: Callable[ 39 | [], List[typing.Union[FnDecl, ir.FnDeclNonRecursive, ir.Axiom]] 40 | ], 41 | synthesize: SynthesizeFun, 42 | useOpList: bool, 43 | stateTypeHint: Optional[ir.Type], 44 | opArgTypeHint: Optional[List[ir.Type]], 45 | queryArgTypeHint: Optional[List[ir.Type]], 46 | queryRetTypeHint: Optional[ir.Type], 47 | baseDepth: int, 48 | filename: str, 49 | fnNameBase: str, 50 | loopsFile: str, 51 | cvcPath: str, 52 | uid: int, 53 | ) -> None: 54 | synthStateType = ir.TupleT(*[a.ir_type() for a in synthStateStructure]) 55 | 56 | try: 57 | queue.put( 58 | ( 59 | uid, 60 | synthStateStructure, 61 | baseDepth, 62 | synthesize_crdt( 63 | filename, 64 | fnNameBase, 65 | loopsFile, 66 | cvcPath, 67 | synthStateType, 68 | lambda: initState(synthStateStructure), 69 | lambda s, baseDepth, invariantBoost: grammarStateInvariant( 70 | s, synthStateStructure, baseDepth, invariantBoost 71 | ), 72 | lambda s, a, baseDepth, invariantBoost: grammarSupportedCommand( 73 | s, a, synthStateStructure, baseDepth, invariantBoost 74 | ), 75 | inOrder, 76 | opPrecondition, 77 | lambda inState, args, baseDepth: grammar( 78 | inState, args, synthStateStructure, baseDepth 79 | ), 80 | grammarQuery, 81 | grammarEquivalence, 82 | targetLang, 83 | synthesize, 84 | uid=uid, 85 | useOpList=useOpList, 86 | stateTypeHint=stateTypeHint, 87 | opArgTypeHint=opArgTypeHint, 88 | queryArgTypeHint=queryArgTypeHint, 89 | queryRetTypeHint=queryRetTypeHint, 90 | baseDepth=baseDepth, 91 | log=False, 92 | ), 93 | ) 94 | ) 95 | except SynthesisFailed: 96 | queue.put((uid, synthStateStructure, baseDepth, None)) 97 | except: 98 | queue.put((uid, synthStateStructure, baseDepth, traceback.format_exc())) 99 | 100 | 101 | def search_crdt_structures( 102 | initState: Callable[[Any], Expr], 103 | grammarStateInvariant: Callable[[Expr, Any, int, int], Expr], 104 | grammarSupportedCommand: Callable[[Expr, Any, Any, int, int], Expr], 105 | inOrder: Callable[[Any, Any], Expr], 106 | opPrecondition: Callable[[Any], Expr], 107 | grammar: Callable[[Expr, List[ir.Var], Any, int], Expr], 108 | grammarQuery: Callable[[str, List[ir.Var], ir.Type, int], ir.Synth], 109 | grammarEquivalence: Callable[[Expr, Expr, List[ir.Var], int], Expr], 110 | targetLang: Callable[ 111 | [], List[typing.Union[FnDecl, ir.FnDeclNonRecursive, ir.Axiom]] 112 | ], 113 | synthesize: SynthesizeFun, 114 | filename: str, 115 | fnNameBase: str, 116 | loopsFile: str, 117 | cvcPath: str, 118 | useOpList: bool, 119 | structureCandidates: Iterator[Tuple[int, Any]], 120 | reportFile: str, 121 | stateTypeHint: Optional[ir.Type] = None, 122 | opArgTypeHint: Optional[List[ir.Type]] = None, 123 | queryArgTypeHint: Optional[List[ir.Type]] = None, 124 | queryRetTypeHint: Optional[ir.Type] = None, 125 | maxThreads: int = mp.cpu_count(), 126 | upToUid: Optional[int] = None, 127 | exitFirstSuccess: bool = True, 128 | ) -> Tuple[Any, List[ir.Expr]]: 129 | q: queue.Queue[ 130 | Tuple[int, Any, int, Optional[typing.Union[str, List[Expr]]]] 131 | ] = queue.Queue() 132 | queue_size = 0 133 | next_uid = 0 134 | 135 | next_res_type = None 136 | next_res = None 137 | 138 | start_times = {} 139 | 140 | try: 141 | with multiprocessing.pool.ThreadPool() as pool: 142 | with open(reportFile, "w") as report: 143 | while True: 144 | while queue_size < (maxThreads // 2 if maxThreads > 1 else 1) and ( 145 | upToUid == None or next_uid < upToUid # type: ignore 146 | ): 147 | next_structure_tuple = next(structureCandidates, None) 148 | if next_structure_tuple is None: 149 | break 150 | else: 151 | baseDepth, next_structure_type = next_structure_tuple 152 | 153 | def error_callback(e: BaseException) -> None: 154 | raise e 155 | 156 | try: 157 | synthStateType = ir.TupleT( 158 | *[a.ir_type() for a in next_structure_type] 159 | ) 160 | synthesize_crdt( 161 | filename, 162 | fnNameBase, 163 | loopsFile, 164 | cvcPath, 165 | synthStateType, 166 | lambda: initState(next_structure_type), 167 | lambda s, baseDepth, invariantBoost: grammarStateInvariant( 168 | s, 169 | next_structure_type, 170 | baseDepth, 171 | invariantBoost, 172 | ), 173 | lambda s, a, baseDepth, invariantBoost: grammarSupportedCommand( 174 | s, 175 | a, 176 | next_structure_type, 177 | baseDepth, 178 | invariantBoost, 179 | ), 180 | inOrder, 181 | opPrecondition, 182 | lambda inState, args, baseDepth: grammar( 183 | inState, 184 | args, 185 | next_structure_type, 186 | baseDepth, 187 | ), 188 | grammarQuery, 189 | grammarEquivalence, 190 | targetLang, 191 | synthesize, 192 | uid=next_uid, 193 | useOpList=useOpList, 194 | stateTypeHint=stateTypeHint, 195 | opArgTypeHint=opArgTypeHint, 196 | queryArgTypeHint=queryArgTypeHint, 197 | queryRetTypeHint=queryRetTypeHint, 198 | baseDepth=baseDepth, 199 | log=False, 200 | skipSynth=True, 201 | ) 202 | except KeyError as k: 203 | # this is due to a grammar not being able to find a value 204 | continue 205 | 206 | print( 207 | f"Enqueueing #{next_uid} (structure: {next_structure_type}, base depth: {baseDepth})" 208 | ) 209 | start_times[next_uid] = time() 210 | pool.apply_async( 211 | synthesize_crdt_e2e, 212 | args=( 213 | q, 214 | next_structure_type, 215 | initState, 216 | grammarStateInvariant, 217 | grammarSupportedCommand, 218 | inOrder, 219 | opPrecondition, 220 | grammar, 221 | grammarQuery, 222 | grammarEquivalence, 223 | targetLang, 224 | synthesize, 225 | useOpList, 226 | stateTypeHint, 227 | opArgTypeHint, 228 | queryArgTypeHint, 229 | queryRetTypeHint, 230 | baseDepth, 231 | filename, 232 | fnNameBase, 233 | loopsFile, 234 | cvcPath, 235 | next_uid, 236 | ), 237 | error_callback=error_callback, 238 | ) 239 | next_uid += 1 240 | queue_size += 1 241 | 242 | if queue_size == 0: 243 | if exitFirstSuccess: 244 | raise Exception("no more structures") 245 | else: 246 | break 247 | else: 248 | (ret_uid, next_res_type, baseDepth, next_res) = q.get( 249 | block=True, timeout=None 250 | ) 251 | time_took = time() - start_times[ret_uid] 252 | report.write( 253 | f'{ret_uid},{time_took},"{str(next_res_type)}",{1},{next_res != None}\n' 254 | ) 255 | report.flush() 256 | queue_size -= 1 257 | if isinstance(next_res, str): 258 | raise Exception( 259 | "Synthesis procedure crashed, aborting\n" + next_res 260 | ) 261 | elif next_res != None: 262 | if exitFirstSuccess: 263 | break 264 | else: 265 | print( 266 | f"Failed to synthesize #{ret_uid} (structure: {next_res_type}, base depth: {baseDepth})" 267 | ) 268 | 269 | if exitFirstSuccess: 270 | if next_res == None: 271 | raise Exception("Synthesis failed") 272 | else: 273 | print( 274 | "\n========================= SYNTHESIS COMPLETE =========================\n" 275 | ) 276 | print("State Structure:", next_res_type) 277 | print("\nRuntime Logic:") 278 | print("\n\n".join([c.toRosette() for c in next_res])) # type: ignore 279 | return (next_res_type, next_res) # type: ignore 280 | else: 281 | print(f"See report file ({reportFile}) for results") 282 | return (next_res_type, []) 283 | finally: 284 | for p in process_tracker.all_processes: 285 | p.terminate() 286 | process_tracker.all_processes = [] 287 | -------------------------------------------------------------------------------- /katara/synthesis.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from metalift.analysis_new import VariableTracker, analyze 4 | from metalift.ir import * 5 | 6 | import typing 7 | from typing import Callable, Union, Protocol 8 | 9 | from metalift.synthesis_common import SynthesisFailed, VerificationFailed 10 | 11 | 12 | def observeEquivalence( 13 | inputState: Expr, synthState: Expr, queryParams: typing.List[Var] 14 | ) -> Expr: 15 | return Call("equivalence", Bool(), inputState, synthState, *queryParams) 16 | 17 | 18 | def opsListInvariant( 19 | fnNameBase: str, synthState: Expr, synthStateType: Type, opType: Type 20 | ) -> Expr: 21 | return And( 22 | Eq( 23 | Call( 24 | "apply_state_transitions", 25 | synthStateType, 26 | TupleGet(synthState, IntLit(len(synthStateType.args) - 1)), 27 | Var( 28 | f"{fnNameBase}_next_state", 29 | FnT(synthStateType, synthStateType, *opType.args), 30 | ), 31 | Var( 32 | f"{fnNameBase}_init_state", 33 | FnT(synthStateType), 34 | ), 35 | ), 36 | synthState, 37 | ), 38 | Call( 39 | "ops_in_order", 40 | Bool(), 41 | TupleGet(synthState, IntLit(len(synthStateType.args) - 1)), 42 | ), 43 | ) 44 | 45 | 46 | def supportedCommand(synthState: Expr, args: typing.Any) -> Expr: 47 | return Call("supportedCommand", Bool(), synthState, *args) 48 | 49 | 50 | def unpackOp(op: Expr) -> typing.List[Expr]: 51 | if op.type.name == "Tuple": 52 | return [TupleGet(op, IntLit(i)) for i in range(len(op.type.args))] 53 | else: 54 | return [op] 55 | 56 | 57 | def opListAdditionalFns( 58 | synthStateType: Type, 59 | opType: Type, 60 | initState: Callable[[], Expr], 61 | inOrder: Callable[[typing.Any, typing.Any], Expr], 62 | opPrecondition: Callable[[typing.Any], Expr], 63 | ) -> typing.List[Union[FnDecl, FnDeclNonRecursive, Axiom]]: 64 | def list_length(l: Expr) -> Expr: 65 | return Call("list_length", Int(), l) 66 | 67 | def list_get(l: Expr, i: Expr) -> Expr: 68 | return Call("list_get", opType, l, i) 69 | 70 | def list_tail(l: Expr, i: Expr) -> Expr: 71 | return Call("list_tail", ListT(opType), l, i) 72 | 73 | data = Var("data", ListT(opType)) 74 | next_state_fn = Var( 75 | "next_state_fn", 76 | FnT( 77 | synthStateType, 78 | synthStateType, 79 | *(opType.args if opType.name == "Tuple" else [opType]), 80 | ), 81 | ) 82 | 83 | init_state_fn = Var("init_state_fn", FnT(synthStateType)) 84 | 85 | reduce_fn = FnDecl( 86 | "apply_state_transitions", 87 | synthStateType, 88 | Ite( 89 | Eq(list_length(data), IntLit(0)), 90 | CallValue(init_state_fn), 91 | CallValue( 92 | next_state_fn, 93 | Call( 94 | "apply_state_transitions", 95 | synthStateType, 96 | list_tail(data, IntLit(1)), 97 | next_state_fn, 98 | init_state_fn, 99 | ), 100 | *( 101 | [ 102 | # TODO(shadaj): unnecessary cast 103 | typing.cast( 104 | Expr, TupleGet(list_get(data, IntLit(0)), IntLit(i)) 105 | ) 106 | for i in range(len(opType.args)) 107 | ] 108 | if opType.name == "Tuple" 109 | else [list_get(data, IntLit(0))] 110 | ), 111 | ), 112 | ), 113 | data, 114 | next_state_fn, 115 | init_state_fn, 116 | ) 117 | 118 | next_op = Var("next_op", opType) 119 | ops_in_order_helper = FnDecl( 120 | "ops_in_order_helper", 121 | Bool(), 122 | And( 123 | opPrecondition(unpackOp(next_op)), 124 | Ite( 125 | Eq(list_length(data), IntLit(0)), 126 | BoolLit(True), 127 | And( 128 | inOrder(unpackOp(list_get(data, IntLit(0))), unpackOp(next_op)), 129 | Call( 130 | "ops_in_order_helper", 131 | Bool(), 132 | list_get(data, IntLit(0)), 133 | list_tail(data, IntLit(1)), 134 | ), 135 | ), 136 | ), 137 | ), 138 | next_op, 139 | data, 140 | ) 141 | 142 | ops_in_order = FnDecl( 143 | "ops_in_order", 144 | Bool(), 145 | Ite( 146 | Eq(list_length(data), IntLit(0)), 147 | BoolLit(True), 148 | Call( 149 | "ops_in_order_helper", 150 | Bool(), 151 | list_get(data, IntLit(0)), 152 | list_tail(data, IntLit(1)), 153 | ), 154 | ), 155 | data, 156 | ) 157 | 158 | return [reduce_fn, ops_in_order_helper, ops_in_order] 159 | 160 | 161 | class SynthesizeFun(Protocol): 162 | def __call__( 163 | self, 164 | basename: str, 165 | targetLang: typing.List[Union[FnDecl, FnDeclNonRecursive, Axiom]], 166 | vars: typing.Set[Var], 167 | invAndPs: typing.List[Synth], 168 | preds: Union[str, typing.List[Expr]], 169 | vc: Expr, 170 | loopAndPsInfo: typing.List[Expr], 171 | cvcPath: str = "cvc5", 172 | uid: int = 0, 173 | noVerify: bool = False, 174 | unboundedInts: bool = False, 175 | optimize_vc_equality: bool = False, 176 | listBound: int = 2, 177 | log: bool = True, 178 | ) -> typing.List[FnDecl]: 179 | ... 180 | 181 | 182 | def synthesize_crdt( 183 | filename: str, 184 | fnNameBase: str, 185 | loopsFile: str, 186 | cvcPath: str, 187 | synthStateType: Type, 188 | initState: Callable[[], Expr], 189 | grammarStateInvariant: Callable[[Expr, int, int], Expr], 190 | grammarSupportedCommand: Callable[[Expr, typing.Any, int, int], Expr], 191 | inOrder: Callable[[typing.Any, typing.Any], Expr], 192 | opPrecondition: Callable[[typing.Any], Expr], 193 | grammar: Callable[[Expr, typing.List[Var], int], Expr], 194 | grammarQuery: Callable[[str, typing.List[Var], Type, int], Synth], 195 | grammarEquivalence: Callable[[Expr, Expr, typing.List[Var], int], Expr], 196 | targetLang: Callable[[], typing.List[Union[FnDecl, FnDeclNonRecursive, Axiom]]], 197 | synthesize: SynthesizeFun, 198 | stateTypeHint: typing.Optional[Type] = None, 199 | opArgTypeHint: typing.Optional[typing.List[Type]] = None, 200 | queryArgTypeHint: typing.Optional[typing.List[Type]] = None, 201 | queryRetTypeHint: typing.Optional[Type] = None, 202 | uid: int = 0, 203 | unboundedInts: bool = True, 204 | useOpList: bool = False, 205 | listBound: int = 1, 206 | baseDepth: int = 2, 207 | invariantBoost: int = 0, 208 | log: bool = True, 209 | skipSynth: bool = False, 210 | ) -> typing.List[FnDecl]: 211 | basename = os.path.splitext(os.path.basename(filename))[0] 212 | 213 | tracker = VariableTracker() 214 | 215 | state_transition_analysis = analyze( 216 | filename, 217 | fnNameBase + "_next_state", 218 | loopsFile, 219 | ) 220 | 221 | query_analysis = analyze( 222 | filename, 223 | fnNameBase + "_response", 224 | loopsFile, 225 | ) 226 | 227 | origSynthStateType = synthStateType 228 | 229 | op_arg_types = ( 230 | [v.type for v in state_transition_analysis.arguments[1:]] 231 | if opArgTypeHint is None 232 | else opArgTypeHint 233 | ) 234 | opType = TupleT(*op_arg_types) if len(op_arg_types) > 1 else op_arg_types[1] 235 | 236 | if useOpList: 237 | synthStateType = TupleT(*synthStateType.args, ListT(opType)) 238 | 239 | queryParameterTypes = ( 240 | [v.type for v in query_analysis.arguments[1:]] 241 | if queryArgTypeHint is None 242 | else queryArgTypeHint 243 | ) 244 | 245 | def supportedCommandWithList(synthState: Expr, args: typing.Any) -> Expr: 246 | return And( 247 | opPrecondition(args), 248 | Ite( 249 | Eq( 250 | Call( 251 | "list_length", 252 | Int(), 253 | TupleGet(synthState, IntLit(len(synthStateType.args) - 1)), 254 | ), 255 | IntLit(0), 256 | ), 257 | BoolLit(True), 258 | inOrder( 259 | unpackOp( 260 | Call( 261 | "list_get", 262 | opType, 263 | TupleGet(synthState, IntLit(len(synthStateType.args) - 1)), 264 | IntLit(0), 265 | ) 266 | ), 267 | args, 268 | ), 269 | ), 270 | ) 271 | 272 | seq_start_state = tracker.variable( 273 | "seq_start_state", state_transition_analysis.arguments[0].type 274 | ) 275 | synth_start_state = tracker.variable("synth_start_state", synthStateType) 276 | equivalence_query_vars = [ 277 | tracker.variable( 278 | f"start_state_query_var_{i}", query_analysis.arguments[i + 1].type 279 | ) 280 | for i in range(len(query_analysis.arguments) - 1) 281 | ] 282 | 283 | synth_after_op = tracker.variable("synth_after_op", synthStateType) 284 | 285 | first_op_group = tracker.group("first_op") 286 | first_op_args = [ 287 | first_op_group.variable(v.name(), t) 288 | for v, t in zip(state_transition_analysis.arguments[1:], op_arg_types) 289 | ] 290 | 291 | second_op_group = tracker.group("second_op") 292 | second_op_args = [ 293 | second_op_group.variable(v.name(), t) 294 | for v, t in zip(state_transition_analysis.arguments[1:], op_arg_types) 295 | ] 296 | 297 | vcStateTransition = state_transition_analysis.call(seq_start_state, *first_op_args)( 298 | tracker, 299 | lambda seq_after_op: Implies( 300 | And( 301 | observeEquivalence( 302 | seq_start_state, synth_start_state, equivalence_query_vars 303 | ), 304 | *( 305 | [ 306 | opsListInvariant( 307 | fnNameBase, synth_start_state, synthStateType, opType 308 | ), 309 | supportedCommandWithList(synth_start_state, first_op_args), 310 | ] 311 | if useOpList 312 | else [ 313 | opPrecondition(first_op_args), 314 | supportedCommand(synth_start_state, first_op_args), 315 | ] 316 | ), 317 | Eq( 318 | synth_after_op, 319 | Call( 320 | f"{fnNameBase}_next_state", 321 | synthStateType, 322 | synth_start_state, 323 | *first_op_args, 324 | ), 325 | ), 326 | ), 327 | query_analysis.call(seq_start_state, *equivalence_query_vars)( 328 | tracker, 329 | lambda seqQueryResult: Implies( 330 | Eq( 331 | seqQueryResult, 332 | Call( 333 | f"{fnNameBase}_response", 334 | seqQueryResult.type, 335 | synth_start_state, 336 | *equivalence_query_vars, 337 | ), 338 | ), 339 | And( 340 | observeEquivalence( 341 | seq_after_op, synth_after_op, equivalence_query_vars 342 | ), 343 | query_analysis.call(seq_after_op, *equivalence_query_vars)( 344 | tracker, 345 | lambda seqQueryResult: Eq( 346 | seqQueryResult, 347 | Call( 348 | f"{fnNameBase}_response", 349 | seqQueryResult.type, 350 | synth_after_op, 351 | *equivalence_query_vars, 352 | ), 353 | ), 354 | ), 355 | *( 356 | [ 357 | Implies( 358 | And( 359 | inOrder(first_op_args, second_op_args), 360 | opPrecondition(second_op_args), 361 | ), 362 | supportedCommand(synth_after_op, second_op_args), 363 | ) 364 | ] 365 | if not useOpList 366 | else [] 367 | ), 368 | ), 369 | ), 370 | ), 371 | ), 372 | ) 373 | 374 | # define synthesis problem for state transition 375 | cur_state_param = Var("cur_state", synthStateType) 376 | 377 | op_arg_vars = [ 378 | Var(v.name(), t) 379 | for v, t in zip(state_transition_analysis.arguments[1:], op_arg_types) 380 | ] 381 | 382 | stateTransitionSynthNode = grammar( 383 | cur_state_param, 384 | op_arg_vars, 385 | baseDepth, 386 | ) 387 | 388 | invAndPsStateTransition = ( 389 | [ 390 | Synth( 391 | fnNameBase + "_next_state", 392 | Tuple( 393 | # the grammar directly produces the tupled next state, unpack to tack on the op-list 394 | *stateTransitionSynthNode.args, 395 | Call( 396 | "list_prepend", 397 | ListT(opType), 398 | Tuple(*op_arg_vars) if len(op_arg_vars) > 1 else op_arg_vars[0], 399 | TupleGet( 400 | cur_state_param, 401 | IntLit(len(synthStateType.args) - 1), 402 | ), 403 | ), 404 | ), 405 | cur_state_param, 406 | *op_arg_vars, 407 | ) 408 | ] 409 | if useOpList 410 | else [ 411 | Synth( 412 | fnNameBase + "_next_state", 413 | stateTransitionSynthNode, 414 | cur_state_param, 415 | *op_arg_vars, 416 | ) 417 | ] 418 | ) 419 | # end state transition (in order) 420 | 421 | # begin query 422 | invAndPsQuery = [ 423 | grammarQuery( 424 | query_analysis.name, 425 | [Var(query_analysis.arguments[0].name(), synthStateType)] 426 | + ( 427 | [ 428 | Var(query_analysis.arguments[i + 1].name(), queryArgTypeHint[i]) 429 | for i in range(len(queryArgTypeHint)) 430 | ] 431 | if queryArgTypeHint 432 | else query_analysis.arguments[1:] 433 | ), 434 | query_analysis.return_type 435 | if queryRetTypeHint is None 436 | else queryRetTypeHint, 437 | baseDepth, 438 | ) 439 | ] 440 | # end query 441 | 442 | # begin init state 443 | initState_analysis = analyze( 444 | filename, 445 | fnNameBase + "_init_state", 446 | loopsFile, 447 | ) 448 | 449 | synthInitState = tracker.variable("synth_init_state", synthStateType) 450 | 451 | init_op_arg_vars = [] 452 | for i, typ in enumerate(op_arg_types): 453 | init_op_arg_vars.append(tracker.variable(f"init_op_arg_{i}", typ)) 454 | 455 | queryParamVars = [ 456 | tracker.variable( 457 | f"init_state_equivalence_query_param_{i}", 458 | query_analysis.arguments[i + 1].type, 459 | ) 460 | for i in range(len(query_analysis.arguments) - 1) 461 | ] 462 | 463 | vcInitState = initState_analysis.call()( 464 | tracker, 465 | lambda seqInitialState: Implies( 466 | Eq(synthInitState, Call(f"{fnNameBase}_init_state", synthStateType)), 467 | And( 468 | observeEquivalence(seqInitialState, synthInitState, queryParamVars), 469 | query_analysis.call(seqInitialState, *queryParamVars)( 470 | tracker, 471 | lambda seqQueryResult: Eq( 472 | seqQueryResult, 473 | Call( 474 | f"{fnNameBase}_response", 475 | seqQueryResult.type, 476 | synthInitState, 477 | *queryParamVars, 478 | ), 479 | ), 480 | ), 481 | BoolLit(True) 482 | if useOpList 483 | else Implies( 484 | opPrecondition(init_op_arg_vars), 485 | supportedCommand(synthInitState, init_op_arg_vars), 486 | ), 487 | ), 488 | ), 489 | ) 490 | 491 | initStateSynthNode = initState() 492 | invAndPsInitState = [ 493 | Synth( 494 | fnNameBase + "_init_state", 495 | Tuple( 496 | *initStateSynthNode.args, 497 | Call("list_empty", ListT(opType)), 498 | ) 499 | if useOpList 500 | else Tuple( 501 | *initStateSynthNode.args, 502 | ), 503 | ) 504 | ] 505 | # end init state 506 | 507 | # begin equivalence 508 | inputStateForEquivalence = Var( 509 | "inputState", 510 | state_transition_analysis.arguments[0].type 511 | if stateTypeHint is None 512 | else stateTypeHint, 513 | ) 514 | synthStateForEquivalence = Var("synthState", synthStateType) 515 | 516 | equivalenceQueryParams = [ 517 | Var(f"equivalence_query_param_{i}", queryParameterTypes[i]) 518 | for i in range(len(queryParameterTypes)) 519 | ] 520 | 521 | invAndPsEquivalence = [ 522 | Synth( 523 | "equivalence", 524 | And( 525 | grammarEquivalence( 526 | inputStateForEquivalence, 527 | synthStateForEquivalence, 528 | equivalenceQueryParams, 529 | baseDepth, 530 | ), 531 | *( 532 | [ 533 | grammarStateInvariant( 534 | synthStateForEquivalence, baseDepth, invariantBoost 535 | ) 536 | ] 537 | if not useOpList 538 | else [] 539 | ), 540 | ), 541 | inputStateForEquivalence, 542 | synthStateForEquivalence, 543 | *equivalenceQueryParams, 544 | ) 545 | ] 546 | 547 | synthStateForSupported = Var(f"supported_synthState", synthStateType) 548 | argList = [ 549 | Var( 550 | f"supported_arg_{i}", 551 | op_arg_types[i], 552 | ) 553 | for i in range(len(op_arg_types)) 554 | ] 555 | invAndPsSupported = ( 556 | [ 557 | Synth( 558 | "supportedCommand", 559 | grammarSupportedCommand( 560 | synthStateForSupported, argList, baseDepth, invariantBoost 561 | ), 562 | synthStateForSupported, 563 | *argList, 564 | ) 565 | ] 566 | if not useOpList 567 | else [] 568 | ) 569 | # end equivalence 570 | 571 | if log: 572 | print("====== synthesis") 573 | 574 | combinedVCVars = set(tracker.all()) 575 | 576 | combinedInvAndPs = ( 577 | invAndPsStateTransition 578 | + invAndPsQuery 579 | + invAndPsInitState 580 | + invAndPsEquivalence 581 | + invAndPsSupported 582 | ) 583 | 584 | combinedVC = And(vcStateTransition, vcInitState) 585 | 586 | lang = targetLang() 587 | if useOpList: 588 | lang = lang + opListAdditionalFns( 589 | synthStateType, opType, initState, inOrder, opPrecondition 590 | ) 591 | 592 | if skipSynth: 593 | return # type: ignore 594 | 595 | try: 596 | out = synthesize( 597 | basename, 598 | lang, 599 | combinedVCVars, 600 | combinedInvAndPs, 601 | [], 602 | combinedVC, 603 | [*combinedInvAndPs], 604 | cvcPath, 605 | uid=uid, 606 | unboundedInts=unboundedInts, 607 | noVerify=useOpList, 608 | listBound=listBound, 609 | log=log, 610 | ) 611 | except VerificationFailed: 612 | # direct synthesis mode 613 | print( 614 | f"#{uid}: CVC5 failed to verify synthesized design, increasing Rosette data structure bounds to", 615 | listBound + 1, 616 | ) 617 | return synthesize_crdt( 618 | filename, 619 | fnNameBase, 620 | loopsFile, 621 | cvcPath, 622 | origSynthStateType, 623 | initState, 624 | grammarStateInvariant, 625 | grammarSupportedCommand, 626 | inOrder, 627 | opPrecondition, 628 | grammar, 629 | grammarQuery, 630 | grammarEquivalence, 631 | targetLang, 632 | synthesize, 633 | stateTypeHint=stateTypeHint, 634 | opArgTypeHint=opArgTypeHint, 635 | queryArgTypeHint=queryArgTypeHint, 636 | queryRetTypeHint=queryRetTypeHint, 637 | uid=uid, 638 | unboundedInts=unboundedInts, 639 | useOpList=useOpList, 640 | listBound=listBound + 1, 641 | baseDepth=baseDepth, 642 | invariantBoost=invariantBoost, 643 | log=log, 644 | ) 645 | 646 | if useOpList: 647 | print( 648 | f"#{uid}: Synthesizing invariants for unbounded verification (Rosette structure/history bound: {listBound})" 649 | ) 650 | equivalence_fn = [x for x in out if x.args[0] == "equivalence"][0] 651 | state_transition_fn = [ 652 | x for x in out if x.args[0] == f"{fnNameBase}_next_state" 653 | ][0] 654 | query_fn = [x for x in out if x.args[0] == f"{fnNameBase}_response"][0] 655 | init_state_fn = [x for x in out if x.args[0] == f"{fnNameBase}_init_state"][0] 656 | 657 | equivalence_fn.args[3] = Var( 658 | equivalence_fn.args[3].args[0], 659 | TupleT(*equivalence_fn.args[3].type.args[:-1]), 660 | ) 661 | 662 | equivalence_fn.args[1] = equivalence_fn.args[1].rewrite( 663 | {equivalence_fn.args[3].args[0]: equivalence_fn.args[3]} 664 | ) 665 | 666 | state_transition_fn.args[2] = Var( 667 | state_transition_fn.args[2].args[0], 668 | TupleT(*state_transition_fn.args[2].type.args[:-1]), 669 | ) 670 | 671 | # drop the op-list 672 | state_transition_fn.args[1] = Tuple( 673 | *[ 674 | e.rewrite( 675 | {state_transition_fn.args[2].args[0]: state_transition_fn.args[2]} 676 | ) 677 | for e in state_transition_fn.args[1].args[:-1] 678 | ] 679 | ) 680 | 681 | query_fn.args[2] = Var( 682 | query_fn.args[2].args[0], TupleT(*query_fn.args[2].type.args[:-1]) 683 | ) 684 | 685 | query_fn.args[1] = query_fn.args[1].rewrite( 686 | {query_fn.args[2].args[0]: query_fn.args[2]} 687 | ) 688 | 689 | init_state_fn.args[1] = Tuple(*init_state_fn.args[1].args[:-1]) 690 | 691 | try: 692 | # attempt to synthesize the invariants 693 | return synthesize_crdt( 694 | filename, 695 | fnNameBase, 696 | loopsFile, 697 | cvcPath, 698 | origSynthStateType, 699 | lambda: init_state_fn.args[1], # type: ignore 700 | grammarStateInvariant, 701 | grammarSupportedCommand, 702 | inOrder, 703 | opPrecondition, 704 | lambda inState, args, _baseDepth: typing.cast( 705 | Expr, state_transition_fn.args[1] 706 | ).rewrite( 707 | { 708 | cur_state_param.name(): inState, 709 | **{orig.name(): new for orig, new in zip(op_arg_vars, args)}, 710 | } 711 | ), 712 | lambda _name, _args, _retT, _baseDepth: Synth( 713 | query_fn.args[0], query_fn.args[1], *query_fn.args[2:] 714 | ), 715 | lambda a, b, _baseDepth, _invariantBoost: equivalence_fn.args[1], # type: ignore 716 | targetLang, 717 | synthesize, 718 | stateTypeHint=stateTypeHint, 719 | opArgTypeHint=opArgTypeHint, 720 | queryArgTypeHint=queryArgTypeHint, 721 | queryRetTypeHint=queryRetTypeHint, 722 | uid=uid, 723 | unboundedInts=unboundedInts, 724 | useOpList=False, 725 | listBound=listBound, 726 | baseDepth=baseDepth, 727 | invariantBoost=invariantBoost, 728 | log=log, 729 | ) 730 | except SynthesisFailed: 731 | try: 732 | # try to re-verify with a larger bound 733 | print( 734 | f"#{uid}: re-verifying with history bound {listBound + 1} and attempting to re-synthesize invariants with deeper grammar" 735 | ) 736 | return synthesize_crdt( 737 | filename, 738 | fnNameBase, 739 | loopsFile, 740 | cvcPath, 741 | origSynthStateType, 742 | lambda: init_state_fn.args[1], # type: ignore 743 | grammarStateInvariant, 744 | grammarSupportedCommand, 745 | inOrder, 746 | opPrecondition, 747 | lambda inState, args, _baseDepth: typing.cast( 748 | Expr, state_transition_fn.args[1] 749 | ).rewrite( 750 | { 751 | cur_state_param.name(): inState, 752 | **{ 753 | orig.name(): new for orig, new in zip(op_arg_vars, args) 754 | }, 755 | } 756 | ), 757 | lambda _name, args, _retT, _baseDepth: Synth( 758 | query_fn.args[0], query_fn.args[1], *args 759 | ), 760 | lambda a, b, c, _baseDepth: equivalence_fn.args[1], # type: ignore 761 | targetLang, 762 | synthesize, 763 | stateTypeHint=stateTypeHint, 764 | opArgTypeHint=opArgTypeHint, 765 | queryArgTypeHint=queryArgTypeHint, 766 | queryRetTypeHint=queryRetTypeHint, 767 | uid=uid, 768 | unboundedInts=unboundedInts, 769 | useOpList=useOpList, 770 | listBound=listBound + 1, 771 | baseDepth=baseDepth, 772 | invariantBoost=invariantBoost + 1, 773 | log=log, 774 | ) 775 | except SynthesisFailed: 776 | print( 777 | f"#{uid}: could not synthesize invariants, re-synthesizing entire design with history bound {listBound + 1}" 778 | ) 779 | return synthesize_crdt( 780 | filename, 781 | fnNameBase, 782 | loopsFile, 783 | cvcPath, 784 | origSynthStateType, 785 | initState, 786 | grammarStateInvariant, 787 | grammarSupportedCommand, 788 | inOrder, 789 | opPrecondition, 790 | grammar, 791 | grammarQuery, 792 | grammarEquivalence, 793 | targetLang, 794 | synthesize, 795 | stateTypeHint=stateTypeHint, 796 | opArgTypeHint=opArgTypeHint, 797 | queryArgTypeHint=queryArgTypeHint, 798 | queryRetTypeHint=queryRetTypeHint, 799 | uid=uid, 800 | unboundedInts=unboundedInts, 801 | useOpList=useOpList, 802 | listBound=listBound + 1, 803 | baseDepth=baseDepth, 804 | invariantBoost=invariantBoost, 805 | log=log, 806 | ) 807 | else: 808 | return out 809 | -------------------------------------------------------------------------------- /llvm-pass/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | cmake_minimum_required(VERSION 3.1) 2 | project(AddEmptyBlocks) 3 | 4 | # support C++14 features used by LLVM 10.0.0 5 | set(CMAKE_CXX_STANDARD 14) 6 | SET(CMAKE_CXX_FLAGS "-D__GLIBCXX_USE_CXX11_ABI=0 -fno-rtti") 7 | 8 | find_package(LLVM REQUIRED CONFIG) 9 | add_definitions(${LLVM_DEFINITIONS}) 10 | include_directories(${LLVM_INCLUDE_DIRS}) 11 | link_directories(${LLVM_LIBRARY_DIRS}) 12 | 13 | add_subdirectory(addEmptyBlocks) # Use your pass name here. 14 | -------------------------------------------------------------------------------- /llvm-pass/addEmptyBlocks/AddEmptyBlocks.cpp: -------------------------------------------------------------------------------- 1 | #include 2 | #include "llvm/Pass.h" 3 | #include "llvm/IR/Function.h" 4 | #include "llvm/Support/raw_ostream.h" 5 | #include "llvm/IR/LegacyPassManager.h" 6 | #include "llvm/IR/InstrTypes.h" 7 | #include "llvm/Transforms/IPO/PassManagerBuilder.h" 8 | #include "llvm/IR/IRBuilder.h" 9 | #include "llvm/Transforms/Utils/BasicBlockUtils.h" 10 | 11 | using namespace llvm; 12 | 13 | namespace 14 | { 15 | /* 16 | Add empty BBs after all conditional branches to ensure that there is always a "else" BB. 17 | The idea is to transform a CFG like: 18 | 19 | /---------\ 20 | b1 -----> b2-----> b3 21 | 22 | into: 23 | 24 | /----b4----------\ 25 | b1 -----> b5 ---> b2 -----> b3 26 | 27 | To do so we need to do two things: 28 | 1. add new blocks b4 and b5 and make them the successors of b1 29 | 2. change all PHI nodes in the original successors of the two branches such that they 30 | point to the newly created BBs as their predecessors instead 31 | */ 32 | struct AddEmptyBlockPass : public FunctionPass 33 | { 34 | static char ID; 35 | AddEmptyBlockPass() : FunctionPass(ID) {} 36 | 37 | BasicBlock *createBB(Function &F, BasicBlock * succ) 38 | { 39 | auto * bb = BasicBlock::Create(F.getContext(), "", &F); 40 | IRBuilder<> builder(bb); 41 | builder.CreateBr(succ); 42 | return bb; 43 | } 44 | 45 | virtual bool runOnFunction(Function &F) 46 | { 47 | std::map toReplace; 48 | 49 | for (auto &B : F) 50 | { 51 | for (auto &I : B) 52 | { 53 | if (auto *op = dyn_cast(&I)) 54 | { 55 | if (op->isConditional()) 56 | { 57 | BasicBlock * trueBB = createBB(F, op->getSuccessor(0)); 58 | BasicBlock * falseBB = createBB(F, op->getSuccessor(1)); 59 | 60 | op->getSuccessor(0)->replacePhiUsesWith(&B, trueBB); 61 | op->getSuccessor(1)->replacePhiUsesWith(&B, falseBB); 62 | 63 | BranchInst *br = BranchInst::Create(trueBB, falseBB, op->getCondition()); 64 | toReplace.insert(std::pair(op, br)); 65 | } 66 | } 67 | } 68 | } 69 | 70 | for (auto &kv : toReplace) 71 | { 72 | if (!kv.first->use_empty()) 73 | kv.first->replaceAllUsesWith(kv.second); 74 | errs() << "replace: " << *kv.first << " with " << *kv.second << "\n"; 75 | ReplaceInstWithInst(kv.first, kv.second); 76 | } 77 | 78 | return toReplace.size(); 79 | } 80 | }; 81 | } 82 | 83 | char AddEmptyBlockPass::ID = 0; 84 | 85 | static RegisterPass X("addEmptyBlock", "Add empty blocks for conditionals pass", 86 | false /* Only looks at CFG */, 87 | false /* Analysis Pass */); 88 | 89 | static RegisterStandardPasses Y( 90 | PassManagerBuilder::EP_EarlyAsPossible, 91 | [](const PassManagerBuilder &Builder, 92 | legacy::PassManagerBase &PM) { 93 | PM.add(new AddEmptyBlockPass()); 94 | }); 95 | -------------------------------------------------------------------------------- /llvm-pass/addEmptyBlocks/CMakeLists.txt: -------------------------------------------------------------------------------- 1 | add_library(AddEmptyBlocksPass MODULE 2 | # List your source files here. 3 | AddEmptyBlocks.cpp 4 | ) 5 | 6 | # Use C++11 to compile our pass (i.e., supply -std=c++11). 7 | target_compile_features(AddEmptyBlocksPass PRIVATE cxx_range_for cxx_auto_type) 8 | 9 | # LLVM is (typically) built with no C++ RTTI. We need to match that; 10 | # otherwise, we'll get linker errors about missing RTTI data. 11 | set_target_properties(AddEmptyBlocksPass PROPERTIES 12 | COMPILE_FLAGS "-fno-rtti -g" 13 | ) 14 | 15 | # Get proper shared-library behavior (where symbols are not necessarily 16 | # resolved when the shared library is linked) on OS X. 17 | if(APPLE) 18 | set_target_properties(AddEmptyBlocksPass PROPERTIES 19 | LINK_FLAGS "-undefined dynamic_lookup" 20 | ) 21 | endif(APPLE) 22 | -------------------------------------------------------------------------------- /llvm-pass/instructions.txt: -------------------------------------------------------------------------------- 1 | 1. mkdir build 2 | 2. cd build; cmake ../ 3 | 3. make 4 | 4. built library is in build/addEmptyBlocks/ 5 | -------------------------------------------------------------------------------- /mypy.ini: -------------------------------------------------------------------------------- 1 | [mypy] 2 | python_version = 3.8 3 | ignore_missing_imports = True 4 | strict = True 5 | exclude = (?x)( 6 | ^tests/| 7 | ^metalift/ 8 | ) 9 | -------------------------------------------------------------------------------- /poetry.lock: -------------------------------------------------------------------------------- 1 | [[package]] 2 | name = "autoflake" 3 | version = "1.7.8" 4 | description = "Removes unused imports and unused variables" 5 | category = "dev" 6 | optional = false 7 | python-versions = ">=3.7" 8 | 9 | [package.dependencies] 10 | pyflakes = ">=1.1.0,<3" 11 | tomli = {version = ">=2.0.1", markers = "python_version < \"3.11\""} 12 | 13 | [[package]] 14 | name = "black" 15 | version = "22.3.0" 16 | description = "The uncompromising code formatter." 17 | category = "dev" 18 | optional = false 19 | python-versions = ">=3.6.2" 20 | 21 | [package.dependencies] 22 | click = ">=8.0.0" 23 | mypy-extensions = ">=0.4.3" 24 | pathspec = ">=0.9.0" 25 | platformdirs = ">=2" 26 | tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} 27 | typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} 28 | 29 | [package.extras] 30 | colorama = ["colorama (>=0.4.3)"] 31 | d = ["aiohttp (>=3.7.4)"] 32 | jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] 33 | uvloop = ["uvloop (>=0.15.2)"] 34 | 35 | [[package]] 36 | name = "click" 37 | version = "8.1.3" 38 | description = "Composable command line interface toolkit" 39 | category = "dev" 40 | optional = false 41 | python-versions = ">=3.7" 42 | 43 | [package.dependencies] 44 | colorama = {version = "*", markers = "platform_system == \"Windows\""} 45 | 46 | [[package]] 47 | name = "colorama" 48 | version = "0.4.6" 49 | description = "Cross-platform colored terminal text." 50 | category = "dev" 51 | optional = false 52 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" 53 | 54 | [[package]] 55 | name = "contourpy" 56 | version = "1.0.6" 57 | description = "Python library for calculating contours of 2D quadrilateral grids" 58 | category = "dev" 59 | optional = false 60 | python-versions = ">=3.7" 61 | 62 | [package.dependencies] 63 | numpy = ">=1.16" 64 | 65 | [package.extras] 66 | bokeh = ["bokeh", "selenium"] 67 | docs = ["docutils (<0.18)", "sphinx (<=5.2.0)", "sphinx-rtd-theme"] 68 | test = ["pytest", "matplotlib", "pillow", "flake8", "isort"] 69 | test-minimal = ["pytest"] 70 | test-no-codebase = ["pytest", "matplotlib", "pillow"] 71 | 72 | [[package]] 73 | name = "cycler" 74 | version = "0.11.0" 75 | description = "Composable style cycles" 76 | category = "dev" 77 | optional = false 78 | python-versions = ">=3.6" 79 | 80 | [[package]] 81 | name = "fonttools" 82 | version = "4.38.0" 83 | description = "Tools to manipulate font files" 84 | category = "dev" 85 | optional = false 86 | python-versions = ">=3.7" 87 | 88 | [package.extras] 89 | all = ["fs (>=2.2.0,<3)", "lxml (>=4.0,<5)", "zopfli (>=0.1.4)", "lz4 (>=1.7.4.2)", "matplotlib", "sympy", "skia-pathops (>=0.5.0)", "uharfbuzz (>=0.23.0)", "brotlicffi (>=0.8.0)", "scipy", "brotli (>=1.0.1)", "munkres", "unicodedata2 (>=14.0.0)", "xattr"] 90 | graphite = ["lz4 (>=1.7.4.2)"] 91 | interpolatable = ["scipy", "munkres"] 92 | lxml = ["lxml (>=4.0,<5)"] 93 | pathops = ["skia-pathops (>=0.5.0)"] 94 | plot = ["matplotlib"] 95 | repacker = ["uharfbuzz (>=0.23.0)"] 96 | symfont = ["sympy"] 97 | type1 = ["xattr"] 98 | ufo = ["fs (>=2.2.0,<3)"] 99 | unicode = ["unicodedata2 (>=14.0.0)"] 100 | woff = ["zopfli (>=0.1.4)", "brotlicffi (>=0.8.0)", "brotli (>=1.0.1)"] 101 | 102 | [[package]] 103 | name = "kiwisolver" 104 | version = "1.4.4" 105 | description = "A fast implementation of the Cassowary constraint solver" 106 | category = "dev" 107 | optional = false 108 | python-versions = ">=3.7" 109 | 110 | [[package]] 111 | name = "llvmlite" 112 | version = "0.39.1" 113 | description = "lightweight wrapper around basic LLVM functionality" 114 | category = "main" 115 | optional = false 116 | python-versions = ">=3.7" 117 | 118 | [[package]] 119 | name = "matplotlib" 120 | version = "3.6.2" 121 | description = "Python plotting package" 122 | category = "dev" 123 | optional = false 124 | python-versions = ">=3.8" 125 | 126 | [package.dependencies] 127 | contourpy = ">=1.0.1" 128 | cycler = ">=0.10" 129 | fonttools = ">=4.22.0" 130 | kiwisolver = ">=1.0.1" 131 | numpy = ">=1.19" 132 | packaging = ">=20.0" 133 | pillow = ">=6.2.0" 134 | pyparsing = ">=2.2.1" 135 | python-dateutil = ">=2.7" 136 | setuptools_scm = ">=7" 137 | 138 | [[package]] 139 | name = "metalift" 140 | version = "0.1.0" 141 | description = "" 142 | category = "main" 143 | optional = false 144 | python-versions = ">=3.8,<3.11" 145 | develop = false 146 | 147 | [package.dependencies] 148 | llvmlite = "^0.39.1" 149 | pyparsing = "^3.0.8" 150 | 151 | [package.source] 152 | type = "git" 153 | url = "https://github.com/metalift/metalift.git" 154 | reference = "main" 155 | resolved_reference = "dc4a575e526621972288f0ac238e9170566d5690" 156 | 157 | [[package]] 158 | name = "mypy" 159 | version = "0.950" 160 | description = "Optional static typing for Python" 161 | category = "dev" 162 | optional = false 163 | python-versions = ">=3.6" 164 | 165 | [package.dependencies] 166 | mypy-extensions = ">=0.4.3" 167 | tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} 168 | typing-extensions = ">=3.10" 169 | 170 | [package.extras] 171 | dmypy = ["psutil (>=4.0)"] 172 | python2 = ["typed-ast (>=1.4.0,<2)"] 173 | reports = ["lxml"] 174 | 175 | [[package]] 176 | name = "mypy-extensions" 177 | version = "0.4.3" 178 | description = "Experimental type system extensions for programs checked with the mypy typechecker." 179 | category = "dev" 180 | optional = false 181 | python-versions = "*" 182 | 183 | [[package]] 184 | name = "numpy" 185 | version = "1.23.5" 186 | description = "NumPy is the fundamental package for array computing with Python." 187 | category = "dev" 188 | optional = false 189 | python-versions = ">=3.8" 190 | 191 | [[package]] 192 | name = "packaging" 193 | version = "21.3" 194 | description = "Core utilities for Python packages" 195 | category = "dev" 196 | optional = false 197 | python-versions = ">=3.6" 198 | 199 | [package.dependencies] 200 | pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" 201 | 202 | [[package]] 203 | name = "pandas" 204 | version = "1.5.2" 205 | description = "Powerful data structures for data analysis, time series, and statistics" 206 | category = "dev" 207 | optional = false 208 | python-versions = ">=3.8" 209 | 210 | [package.dependencies] 211 | numpy = [ 212 | {version = ">=1.20.3", markers = "python_version < \"3.10\""}, 213 | {version = ">=1.21.0", markers = "python_version >= \"3.10\""}, 214 | ] 215 | python-dateutil = ">=2.8.1" 216 | pytz = ">=2020.1" 217 | 218 | [package.extras] 219 | test = ["hypothesis (>=5.5.3)", "pytest (>=6.0)", "pytest-xdist (>=1.31)"] 220 | 221 | [[package]] 222 | name = "pathspec" 223 | version = "0.10.2" 224 | description = "Utility library for gitignore style pattern matching of file paths." 225 | category = "dev" 226 | optional = false 227 | python-versions = ">=3.7" 228 | 229 | [[package]] 230 | name = "pillow" 231 | version = "9.3.0" 232 | description = "Python Imaging Library (Fork)" 233 | category = "dev" 234 | optional = false 235 | python-versions = ">=3.7" 236 | 237 | [package.extras] 238 | docs = ["furo", "olefile", "sphinx (>=2.4)", "sphinx-copybutton", "sphinx-issues (>=3.0.1)", "sphinx-removed-in", "sphinxext-opengraph"] 239 | tests = ["check-manifest", "coverage", "defusedxml", "markdown2", "olefile", "packaging", "pyroma", "pytest", "pytest-cov", "pytest-timeout"] 240 | 241 | [[package]] 242 | name = "platformdirs" 243 | version = "2.5.4" 244 | description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." 245 | category = "dev" 246 | optional = false 247 | python-versions = ">=3.7" 248 | 249 | [package.extras] 250 | docs = ["furo (>=2022.9.29)", "proselint (>=0.13)", "sphinx-autodoc-typehints (>=1.19.4)", "sphinx (>=5.3)"] 251 | test = ["appdirs (==1.4.4)", "pytest-cov (>=4)", "pytest-mock (>=3.10)", "pytest (>=7.2)"] 252 | 253 | [[package]] 254 | name = "pyflakes" 255 | version = "2.5.0" 256 | description = "passive checker of Python programs" 257 | category = "dev" 258 | optional = false 259 | python-versions = ">=3.6" 260 | 261 | [[package]] 262 | name = "pyparsing" 263 | version = "3.0.9" 264 | description = "pyparsing module - Classes and methods to define and execute parsing grammars" 265 | category = "main" 266 | optional = false 267 | python-versions = ">=3.6.8" 268 | 269 | [package.extras] 270 | diagrams = ["railroad-diagrams", "jinja2"] 271 | 272 | [[package]] 273 | name = "python-dateutil" 274 | version = "2.8.2" 275 | description = "Extensions to the standard Python datetime module" 276 | category = "dev" 277 | optional = false 278 | python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" 279 | 280 | [package.dependencies] 281 | six = ">=1.5" 282 | 283 | [[package]] 284 | name = "pytz" 285 | version = "2022.6" 286 | description = "World timezone definitions, modern and historical" 287 | category = "dev" 288 | optional = false 289 | python-versions = "*" 290 | 291 | [[package]] 292 | name = "setuptools-scm" 293 | version = "7.0.5" 294 | description = "the blessed package to manage your versions by scm tags" 295 | category = "dev" 296 | optional = false 297 | python-versions = ">=3.7" 298 | 299 | [package.dependencies] 300 | packaging = ">=20.0" 301 | tomli = ">=1.0.0" 302 | typing-extensions = "*" 303 | 304 | [package.extras] 305 | test = ["pytest (>=6.2)", "virtualenv (>20)"] 306 | toml = ["setuptools (>=42)"] 307 | 308 | [[package]] 309 | name = "six" 310 | version = "1.16.0" 311 | description = "Python 2 and 3 compatibility utilities" 312 | category = "dev" 313 | optional = false 314 | python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" 315 | 316 | [[package]] 317 | name = "tomli" 318 | version = "2.0.1" 319 | description = "A lil' TOML parser" 320 | category = "dev" 321 | optional = false 322 | python-versions = ">=3.7" 323 | 324 | [[package]] 325 | name = "typing-extensions" 326 | version = "4.4.0" 327 | description = "Backported and Experimental Type Hints for Python 3.7+" 328 | category = "dev" 329 | optional = false 330 | python-versions = ">=3.7" 331 | 332 | [metadata] 333 | lock-version = "1.1" 334 | python-versions = ">=3.8,<3.11" 335 | content-hash = "7b849ca16e17782700b2a65712e75cc9cc2bbb41d78d07447f3582537ebdb135" 336 | 337 | [metadata.files] 338 | autoflake = [ 339 | {file = "autoflake-1.7.8-py3-none-any.whl", hash = "sha256:46373ef69b6714f5064c923bb28bd797c4f8a9497f557d87fc36665c6d956b39"}, 340 | {file = "autoflake-1.7.8.tar.gz", hash = "sha256:e7e46372dee46fa1c97acf310d99d922b63d369718a270809d7c278d34a194cf"}, 341 | ] 342 | black = [ 343 | {file = "black-22.3.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:2497f9c2386572e28921fa8bec7be3e51de6801f7459dffd6e62492531c47e09"}, 344 | {file = "black-22.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5795a0375eb87bfe902e80e0c8cfaedf8af4d49694d69161e5bd3206c18618bb"}, 345 | {file = "black-22.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e3556168e2e5c49629f7b0f377070240bd5511e45e25a4497bb0073d9dda776a"}, 346 | {file = "black-22.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67c8301ec94e3bcc8906740fe071391bce40a862b7be0b86fb5382beefecd968"}, 347 | {file = "black-22.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:fd57160949179ec517d32ac2ac898b5f20d68ed1a9c977346efbac9c2f1e779d"}, 348 | {file = "black-22.3.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:cc1e1de68c8e5444e8f94c3670bb48a2beef0e91dddfd4fcc29595ebd90bb9ce"}, 349 | {file = "black-22.3.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d2fc92002d44746d3e7db7cf9313cf4452f43e9ea77a2c939defce3b10b5c82"}, 350 | {file = "black-22.3.0-cp36-cp36m-win_amd64.whl", hash = "sha256:a6342964b43a99dbc72f72812bf88cad8f0217ae9acb47c0d4f141a6416d2d7b"}, 351 | {file = "black-22.3.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:328efc0cc70ccb23429d6be184a15ce613f676bdfc85e5fe8ea2a9354b4e9015"}, 352 | {file = "black-22.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:06f9d8846f2340dfac80ceb20200ea5d1b3f181dd0556b47af4e8e0b24fa0a6b"}, 353 | {file = "black-22.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:ad4efa5fad66b903b4a5f96d91461d90b9507a812b3c5de657d544215bb7877a"}, 354 | {file = "black-22.3.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8477ec6bbfe0312c128e74644ac8a02ca06bcdb8982d4ee06f209be28cdf163"}, 355 | {file = "black-22.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:637a4014c63fbf42a692d22b55d8ad6968a946b4a6ebc385c5505d9625b6a464"}, 356 | {file = "black-22.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:863714200ada56cbc366dc9ae5291ceb936573155f8bf8e9de92aef51f3ad0f0"}, 357 | {file = "black-22.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10dbe6e6d2988049b4655b2b739f98785a884d4d6b85bc35133a8fb9a2233176"}, 358 | {file = "black-22.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:cee3e11161dde1b2a33a904b850b0899e0424cc331b7295f2a9698e79f9a69a0"}, 359 | {file = "black-22.3.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5891ef8abc06576985de8fa88e95ab70641de6c1fca97e2a15820a9b69e51b20"}, 360 | {file = "black-22.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:30d78ba6bf080eeaf0b7b875d924b15cd46fec5fd044ddfbad38c8ea9171043a"}, 361 | {file = "black-22.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ee8f1f7228cce7dffc2b464f07ce769f478968bfb3dd1254a4c2eeed84928aad"}, 362 | {file = "black-22.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ee227b696ca60dd1c507be80a6bc849a5a6ab57ac7352aad1ffec9e8b805f21"}, 363 | {file = "black-22.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:9b542ced1ec0ceeff5b37d69838106a6348e60db7b8fdd245294dc1d26136265"}, 364 | {file = "black-22.3.0-py3-none-any.whl", hash = "sha256:bc58025940a896d7e5356952228b68f793cf5fcb342be703c3a2669a1488cb72"}, 365 | {file = "black-22.3.0.tar.gz", hash = "sha256:35020b8886c022ced9282b51b5a875b6d1ab0c387b31a065b84db7c33085ca79"}, 366 | ] 367 | click = [ 368 | {file = "click-8.1.3-py3-none-any.whl", hash = "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48"}, 369 | {file = "click-8.1.3.tar.gz", hash = "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e"}, 370 | ] 371 | colorama = [ 372 | {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, 373 | {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, 374 | ] 375 | contourpy = [ 376 | {file = "contourpy-1.0.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:613c665529899b5d9fade7e5d1760111a0b011231277a0d36c49f0d3d6914bd6"}, 377 | {file = "contourpy-1.0.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:78ced51807ccb2f45d4ea73aca339756d75d021069604c2fccd05390dc3c28eb"}, 378 | {file = "contourpy-1.0.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b3b1bd7577c530eaf9d2bc52d1a93fef50ac516a8b1062c3d1b9bcec9ebe329b"}, 379 | {file = "contourpy-1.0.6-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8834c14b8c3dd849005e06703469db9bf96ba2d66a3f88ecc539c9a8982e0ee"}, 380 | {file = "contourpy-1.0.6-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f4052a8a4926d4468416fc7d4b2a7b2a3e35f25b39f4061a7e2a3a2748c4fc48"}, 381 | {file = "contourpy-1.0.6-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1c0e1308307a75e07d1f1b5f0f56b5af84538a5e9027109a7bcf6cb47c434e72"}, 382 | {file = "contourpy-1.0.6-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9fc4e7973ed0e1fe689435842a6e6b330eb7ccc696080dda9a97b1a1b78e41db"}, 383 | {file = "contourpy-1.0.6-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:08e8d09d96219ace6cb596506fb9b64ea5f270b2fb9121158b976d88871fcfd1"}, 384 | {file = "contourpy-1.0.6-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:f33da6b5d19ad1bb5e7ad38bb8ba5c426d2178928bc2b2c44e8823ea0ecb6ff3"}, 385 | {file = "contourpy-1.0.6-cp310-cp310-win32.whl", hash = "sha256:12a7dc8439544ed05c6553bf026d5e8fa7fad48d63958a95d61698df0e00092b"}, 386 | {file = "contourpy-1.0.6-cp310-cp310-win_amd64.whl", hash = "sha256:eadad75bf91897f922e0fb3dca1b322a58b1726a953f98c2e5f0606bd8408621"}, 387 | {file = "contourpy-1.0.6-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:913bac9d064cff033cf3719e855d4f1db9f1c179e0ecf3ba9fdef21c21c6a16a"}, 388 | {file = "contourpy-1.0.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:46deb310a276cc5c1fd27958e358cce68b1e8a515fa5a574c670a504c3a3fe30"}, 389 | {file = "contourpy-1.0.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b64f747e92af7da3b85631a55d68c45a2d728b4036b03cdaba4bd94bcc85bd6f"}, 390 | {file = "contourpy-1.0.6-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50627bf76abb6ba291ad08db583161939c2c5fab38c38181b7833423ab9c7de3"}, 391 | {file = "contourpy-1.0.6-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:358f6364e4873f4d73360b35da30066f40387dd3c427a3e5432c6b28dd24a8fa"}, 392 | {file = "contourpy-1.0.6-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c78bfbc1a7bff053baf7e508449d2765964d67735c909b583204e3240a2aca45"}, 393 | {file = "contourpy-1.0.6-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e43255a83835a129ef98f75d13d643844d8c646b258bebd11e4a0975203e018f"}, 394 | {file = "contourpy-1.0.6-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:375d81366afd547b8558c4720337218345148bc2fcffa3a9870cab82b29667f2"}, 395 | {file = "contourpy-1.0.6-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b98c820608e2dca6442e786817f646d11057c09a23b68d2b3737e6dcb6e4a49b"}, 396 | {file = "contourpy-1.0.6-cp311-cp311-win32.whl", hash = "sha256:0e4854cc02006ad6684ce092bdadab6f0912d131f91c2450ce6dbdea78ee3c0b"}, 397 | {file = "contourpy-1.0.6-cp311-cp311-win_amd64.whl", hash = "sha256:d2eff2af97ea0b61381828b1ad6cd249bbd41d280e53aea5cccd7b2b31b8225c"}, 398 | {file = "contourpy-1.0.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:5b117d29433fc8393b18a696d794961464e37afb34a6eeb8b2c37b5f4128a83e"}, 399 | {file = "contourpy-1.0.6-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:341330ed19074f956cb20877ad8d2ae50e458884bfa6a6df3ae28487cc76c768"}, 400 | {file = "contourpy-1.0.6-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:371f6570a81dfdddbb837ba432293a63b4babb942a9eb7aaa699997adfb53278"}, 401 | {file = "contourpy-1.0.6-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9447c45df407d3ecb717d837af3b70cfef432138530712263730783b3d016512"}, 402 | {file = "contourpy-1.0.6-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:730c27978a0003b47b359935478b7d63fd8386dbb2dcd36c1e8de88cbfc1e9de"}, 403 | {file = "contourpy-1.0.6-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:da1ef35fd79be2926ba80fbb36327463e3656c02526e9b5b4c2b366588b74d9a"}, 404 | {file = "contourpy-1.0.6-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:cd2bc0c8f2e8de7dd89a7f1c10b8844e291bca17d359373203ef2e6100819edd"}, 405 | {file = "contourpy-1.0.6-cp37-cp37m-win32.whl", hash = "sha256:3a1917d3941dd58732c449c810fa7ce46cc305ce9325a11261d740118b85e6f3"}, 406 | {file = "contourpy-1.0.6-cp37-cp37m-win_amd64.whl", hash = "sha256:06ca79e1efbbe2df795822df2fa173d1a2b38b6e0f047a0ec7903fbca1d1847e"}, 407 | {file = "contourpy-1.0.6-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e626cefff8491bce356221c22af5a3ea528b0b41fbabc719c00ae233819ea0bf"}, 408 | {file = "contourpy-1.0.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:dbe6fe7a1166b1ddd7b6d887ea6fa8389d3f28b5ed3f73a8f40ece1fc5a3d340"}, 409 | {file = "contourpy-1.0.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e13b31d1b4b68db60b3b29f8e337908f328c7f05b9add4b1b5c74e0691180109"}, 410 | {file = "contourpy-1.0.6-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a79d239fc22c3b8d9d3de492aa0c245533f4f4c7608e5749af866949c0f1b1b9"}, 411 | {file = "contourpy-1.0.6-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9e8e686a6db92a46111a1ee0ee6f7fbfae4048f0019de207149f43ac1812cf95"}, 412 | {file = "contourpy-1.0.6-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:acd2bd02f1a7adff3a1f33e431eb96ab6d7987b039d2946a9b39fe6fb16a1036"}, 413 | {file = "contourpy-1.0.6-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:03d1b9c6b44a9e30d554654c72be89af94fab7510b4b9f62356c64c81cec8b7d"}, 414 | {file = "contourpy-1.0.6-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:b48d94386f1994db7c70c76b5808c12e23ed7a4ee13693c2fc5ab109d60243c0"}, 415 | {file = "contourpy-1.0.6-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:208bc904889c910d95aafcf7be9e677726df9ef71e216780170dbb7e37d118fa"}, 416 | {file = "contourpy-1.0.6-cp38-cp38-win32.whl", hash = "sha256:444fb776f58f4906d8d354eb6f6ce59d0a60f7b6a720da6c1ccb839db7c80eb9"}, 417 | {file = "contourpy-1.0.6-cp38-cp38-win_amd64.whl", hash = "sha256:9bc407a6af672da20da74823443707e38ece8b93a04009dca25856c2d9adadb1"}, 418 | {file = "contourpy-1.0.6-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:aa4674cf3fa2bd9c322982644967f01eed0c91bb890f624e0e0daf7a5c3383e9"}, 419 | {file = "contourpy-1.0.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:6f56515e7c6fae4529b731f6c117752247bef9cdad2b12fc5ddf8ca6a50965a5"}, 420 | {file = "contourpy-1.0.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:344cb3badf6fc7316ad51835f56ac387bdf86c8e1b670904f18f437d70da4183"}, 421 | {file = "contourpy-1.0.6-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b1e66346acfb17694d46175a0cea7d9036f12ed0c31dfe86f0f405eedde2bdd"}, 422 | {file = "contourpy-1.0.6-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8468b40528fa1e15181cccec4198623b55dcd58306f8815a793803f51f6c474a"}, 423 | {file = "contourpy-1.0.6-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1dedf4c64185a216c35eb488e6f433297c660321275734401760dafaeb0ad5c2"}, 424 | {file = "contourpy-1.0.6-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:494efed2c761f0f37262815f9e3c4bb9917c5c69806abdee1d1cb6611a7174a0"}, 425 | {file = "contourpy-1.0.6-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:75a2e638042118118ab39d337da4c7908c1af74a8464cad59f19fbc5bbafec9b"}, 426 | {file = "contourpy-1.0.6-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a628bba09ba72e472bf7b31018b6281fd4cc903f0888049a3724afba13b6e0b8"}, 427 | {file = "contourpy-1.0.6-cp39-cp39-win32.whl", hash = "sha256:e1739496c2f0108013629aa095cc32a8c6363444361960c07493818d0dea2da4"}, 428 | {file = "contourpy-1.0.6-cp39-cp39-win_amd64.whl", hash = "sha256:a457ee72d9032e86730f62c5eeddf402e732fdf5ca8b13b41772aa8ae13a4563"}, 429 | {file = "contourpy-1.0.6-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d912f0154a20a80ea449daada904a7eb6941c83281a9fab95de50529bfc3a1da"}, 430 | {file = "contourpy-1.0.6-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4081918147fc4c29fad328d5066cfc751da100a1098398742f9f364be63803fc"}, 431 | {file = "contourpy-1.0.6-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0537cc1195245bbe24f2913d1f9211b8f04eb203de9044630abd3664c6cc339c"}, 432 | {file = "contourpy-1.0.6-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcd556c8fc37a342dd636d7eef150b1399f823a4462f8c968e11e1ebeabee769"}, 433 | {file = "contourpy-1.0.6-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:f6ca38dd8d988eca8f07305125dec6f54ac1c518f1aaddcc14d08c01aebb6efc"}, 434 | {file = "contourpy-1.0.6-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:c1baa49ab9fedbf19d40d93163b7d3e735d9cd8d5efe4cce9907902a6dad391f"}, 435 | {file = "contourpy-1.0.6-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:211dfe2bd43bf5791d23afbe23a7952e8ac8b67591d24be3638cabb648b3a6eb"}, 436 | {file = "contourpy-1.0.6-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c38c6536c2d71ca2f7e418acaf5bca30a3af7f2a2fa106083c7d738337848dbe"}, 437 | {file = "contourpy-1.0.6-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1b1ee48a130da4dd0eb8055bbab34abf3f6262957832fd575e0cab4979a15a41"}, 438 | {file = "contourpy-1.0.6-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:5641927cc5ae66155d0c80195dc35726eae060e7defc18b7ab27600f39dd1fe7"}, 439 | {file = "contourpy-1.0.6-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7ee394502026d68652c2824348a40bf50f31351a668977b51437131a90d777ea"}, 440 | {file = "contourpy-1.0.6-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b97454ed5b1368b66ed414c754cba15b9750ce69938fc6153679787402e4cdf"}, 441 | {file = "contourpy-1.0.6-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0236875c5a0784215b49d00ebbe80c5b6b5d5244b3655a36dda88105334dea17"}, 442 | {file = "contourpy-1.0.6-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84c593aeff7a0171f639da92cb86d24954bbb61f8a1b530f74eb750a14685832"}, 443 | {file = "contourpy-1.0.6-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:9b0e7fe7f949fb719b206548e5cde2518ffb29936afa4303d8a1c4db43dcb675"}, 444 | {file = "contourpy-1.0.6.tar.gz", hash = "sha256:6e459ebb8bb5ee4c22c19cc000174f8059981971a33ce11e17dddf6aca97a142"}, 445 | ] 446 | cycler = [ 447 | {file = "cycler-0.11.0-py3-none-any.whl", hash = "sha256:3a27e95f763a428a739d2add979fa7494c912a32c17c4c38c4d5f082cad165a3"}, 448 | {file = "cycler-0.11.0.tar.gz", hash = "sha256:9c87405839a19696e837b3b818fed3f5f69f16f1eec1a1ad77e043dcea9c772f"}, 449 | ] 450 | fonttools = [ 451 | {file = "fonttools-4.38.0-py3-none-any.whl", hash = "sha256:820466f43c8be8c3009aef8b87e785014133508f0de64ec469e4efb643ae54fb"}, 452 | {file = "fonttools-4.38.0.zip", hash = "sha256:2bb244009f9bf3fa100fc3ead6aeb99febe5985fa20afbfbaa2f8946c2fbdaf1"}, 453 | ] 454 | kiwisolver = [ 455 | {file = "kiwisolver-1.4.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:2f5e60fabb7343a836360c4f0919b8cd0d6dbf08ad2ca6b9cf90bf0c76a3c4f6"}, 456 | {file = "kiwisolver-1.4.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:10ee06759482c78bdb864f4109886dff7b8a56529bc1609d4f1112b93fe6423c"}, 457 | {file = "kiwisolver-1.4.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c79ebe8f3676a4c6630fd3f777f3cfecf9289666c84e775a67d1d358578dc2e3"}, 458 | {file = "kiwisolver-1.4.4-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:abbe9fa13da955feb8202e215c4018f4bb57469b1b78c7a4c5c7b93001699938"}, 459 | {file = "kiwisolver-1.4.4-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7577c1987baa3adc4b3c62c33bd1118c3ef5c8ddef36f0f2c950ae0b199e100d"}, 460 | {file = "kiwisolver-1.4.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f8ad8285b01b0d4695102546b342b493b3ccc6781fc28c8c6a1bb63e95d22f09"}, 461 | {file = "kiwisolver-1.4.4-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8ed58b8acf29798b036d347791141767ccf65eee7f26bde03a71c944449e53de"}, 462 | {file = "kiwisolver-1.4.4-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a68b62a02953b9841730db7797422f983935aeefceb1679f0fc85cbfbd311c32"}, 463 | {file = "kiwisolver-1.4.4-cp310-cp310-win32.whl", hash = "sha256:e92a513161077b53447160b9bd8f522edfbed4bd9759e4c18ab05d7ef7e49408"}, 464 | {file = "kiwisolver-1.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:3fe20f63c9ecee44560d0e7f116b3a747a5d7203376abeea292ab3152334d004"}, 465 | {file = "kiwisolver-1.4.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:e0ea21f66820452a3f5d1655f8704a60d66ba1191359b96541eaf457710a5fc6"}, 466 | {file = "kiwisolver-1.4.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:bc9db8a3efb3e403e4ecc6cd9489ea2bac94244f80c78e27c31dcc00d2790ac2"}, 467 | {file = "kiwisolver-1.4.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d5b61785a9ce44e5a4b880272baa7cf6c8f48a5180c3e81c59553ba0cb0821ca"}, 468 | {file = "kiwisolver-1.4.4-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c2dbb44c3f7e6c4d3487b31037b1bdbf424d97687c1747ce4ff2895795c9bf69"}, 469 | {file = "kiwisolver-1.4.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6295ecd49304dcf3bfbfa45d9a081c96509e95f4b9d0eb7ee4ec0530c4a96514"}, 470 | {file = "kiwisolver-1.4.4-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4bd472dbe5e136f96a4b18f295d159d7f26fd399136f5b17b08c4e5f498cd494"}, 471 | {file = "kiwisolver-1.4.4-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bf7d9fce9bcc4752ca4a1b80aabd38f6d19009ea5cbda0e0856983cf6d0023f5"}, 472 | {file = "kiwisolver-1.4.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78d6601aed50c74e0ef02f4204da1816147a6d3fbdc8b3872d263338a9052c51"}, 473 | {file = "kiwisolver-1.4.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:877272cf6b4b7e94c9614f9b10140e198d2186363728ed0f701c6eee1baec1da"}, 474 | {file = "kiwisolver-1.4.4-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:db608a6757adabb32f1cfe6066e39b3706d8c3aa69bbc353a5b61edad36a5cb4"}, 475 | {file = "kiwisolver-1.4.4-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:5853eb494c71e267912275e5586fe281444eb5e722de4e131cddf9d442615626"}, 476 | {file = "kiwisolver-1.4.4-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:f0a1dbdb5ecbef0d34eb77e56fcb3e95bbd7e50835d9782a45df81cc46949750"}, 477 | {file = "kiwisolver-1.4.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:283dffbf061a4ec60391d51e6155e372a1f7a4f5b15d59c8505339454f8989e4"}, 478 | {file = "kiwisolver-1.4.4-cp311-cp311-win32.whl", hash = "sha256:d06adcfa62a4431d404c31216f0f8ac97397d799cd53800e9d3efc2fbb3cf14e"}, 479 | {file = "kiwisolver-1.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:e7da3fec7408813a7cebc9e4ec55afed2d0fd65c4754bc376bf03498d4e92686"}, 480 | {file = "kiwisolver-1.4.4-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:62ac9cc684da4cf1778d07a89bf5f81b35834cb96ca523d3a7fb32509380cbf6"}, 481 | {file = "kiwisolver-1.4.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:41dae968a94b1ef1897cb322b39360a0812661dba7c682aa45098eb8e193dbdf"}, 482 | {file = "kiwisolver-1.4.4-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:02f79693ec433cb4b5f51694e8477ae83b3205768a6fb48ffba60549080e295b"}, 483 | {file = "kiwisolver-1.4.4-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d0611a0a2a518464c05ddd5a3a1a0e856ccc10e67079bb17f265ad19ab3c7597"}, 484 | {file = "kiwisolver-1.4.4-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:db5283d90da4174865d520e7366801a93777201e91e79bacbac6e6927cbceede"}, 485 | {file = "kiwisolver-1.4.4-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:1041feb4cda8708ce73bb4dcb9ce1ccf49d553bf87c3954bdfa46f0c3f77252c"}, 486 | {file = "kiwisolver-1.4.4-cp37-cp37m-win32.whl", hash = "sha256:a553dadda40fef6bfa1456dc4be49b113aa92c2a9a9e8711e955618cd69622e3"}, 487 | {file = "kiwisolver-1.4.4-cp37-cp37m-win_amd64.whl", hash = "sha256:03baab2d6b4a54ddbb43bba1a3a2d1627e82d205c5cf8f4c924dc49284b87166"}, 488 | {file = "kiwisolver-1.4.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:841293b17ad704d70c578f1f0013c890e219952169ce8a24ebc063eecf775454"}, 489 | {file = "kiwisolver-1.4.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f4f270de01dd3e129a72efad823da90cc4d6aafb64c410c9033aba70db9f1ff0"}, 490 | {file = "kiwisolver-1.4.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:f9f39e2f049db33a908319cf46624a569b36983c7c78318e9726a4cb8923b26c"}, 491 | {file = "kiwisolver-1.4.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c97528e64cb9ebeff9701e7938653a9951922f2a38bd847787d4a8e498cc83ae"}, 492 | {file = "kiwisolver-1.4.4-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d1573129aa0fd901076e2bfb4275a35f5b7aa60fbfb984499d661ec950320b0"}, 493 | {file = "kiwisolver-1.4.4-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ad881edc7ccb9d65b0224f4e4d05a1e85cf62d73aab798943df6d48ab0cd79a1"}, 494 | {file = "kiwisolver-1.4.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:b428ef021242344340460fa4c9185d0b1f66fbdbfecc6c63eff4b7c29fad429d"}, 495 | {file = "kiwisolver-1.4.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:2e407cb4bd5a13984a6c2c0fe1845e4e41e96f183e5e5cd4d77a857d9693494c"}, 496 | {file = "kiwisolver-1.4.4-cp38-cp38-win32.whl", hash = "sha256:75facbe9606748f43428fc91a43edb46c7ff68889b91fa31f53b58894503a191"}, 497 | {file = "kiwisolver-1.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:5bce61af018b0cb2055e0e72e7d65290d822d3feee430b7b8203d8a855e78766"}, 498 | {file = "kiwisolver-1.4.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8c808594c88a025d4e322d5bb549282c93c8e1ba71b790f539567932722d7bd8"}, 499 | {file = "kiwisolver-1.4.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f0a71d85ecdd570ded8ac3d1c0f480842f49a40beb423bb8014539a9f32a5897"}, 500 | {file = "kiwisolver-1.4.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b533558eae785e33e8c148a8d9921692a9fe5aa516efbdff8606e7d87b9d5824"}, 501 | {file = "kiwisolver-1.4.4-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:efda5fc8cc1c61e4f639b8067d118e742b812c930f708e6667a5ce0d13499e29"}, 502 | {file = "kiwisolver-1.4.4-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:7c43e1e1206cd421cd92e6b3280d4385d41d7166b3ed577ac20444b6995a445f"}, 503 | {file = "kiwisolver-1.4.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc8d3bd6c72b2dd9decf16ce70e20abcb3274ba01b4e1c96031e0c4067d1e7cd"}, 504 | {file = "kiwisolver-1.4.4-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4ea39b0ccc4f5d803e3337dd46bcce60b702be4d86fd0b3d7531ef10fd99a1ac"}, 505 | {file = "kiwisolver-1.4.4-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:968f44fdbf6dd757d12920d63b566eeb4d5b395fd2d00d29d7ef00a00582aac9"}, 506 | {file = "kiwisolver-1.4.4-cp39-cp39-win32.whl", hash = "sha256:da7e547706e69e45d95e116e6939488d62174e033b763ab1496b4c29b76fabea"}, 507 | {file = "kiwisolver-1.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:ba59c92039ec0a66103b1d5fe588fa546373587a7d68f5c96f743c3396afc04b"}, 508 | {file = "kiwisolver-1.4.4-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:91672bacaa030f92fc2f43b620d7b337fd9a5af28b0d6ed3f77afc43c4a64b5a"}, 509 | {file = "kiwisolver-1.4.4-pp37-pypy37_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:787518a6789009c159453da4d6b683f468ef7a65bbde796bcea803ccf191058d"}, 510 | {file = "kiwisolver-1.4.4-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da152d8cdcab0e56e4f45eb08b9aea6455845ec83172092f09b0e077ece2cf7a"}, 511 | {file = "kiwisolver-1.4.4-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:ecb1fa0db7bf4cff9dac752abb19505a233c7f16684c5826d1f11ebd9472b871"}, 512 | {file = "kiwisolver-1.4.4-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:28bc5b299f48150b5f822ce68624e445040595a4ac3d59251703779836eceff9"}, 513 | {file = "kiwisolver-1.4.4-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:81e38381b782cc7e1e46c4e14cd997ee6040768101aefc8fa3c24a4cc58e98f8"}, 514 | {file = "kiwisolver-1.4.4-pp38-pypy38_pp73-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:2a66fdfb34e05b705620dd567f5a03f239a088d5a3f321e7b6ac3239d22aa286"}, 515 | {file = "kiwisolver-1.4.4-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:872b8ca05c40d309ed13eb2e582cab0c5a05e81e987ab9c521bf05ad1d5cf5cb"}, 516 | {file = "kiwisolver-1.4.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:70e7c2e7b750585569564e2e5ca9845acfaa5da56ac46df68414f29fea97be9f"}, 517 | {file = "kiwisolver-1.4.4-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9f85003f5dfa867e86d53fac6f7e6f30c045673fa27b603c397753bebadc3008"}, 518 | {file = "kiwisolver-1.4.4-pp39-pypy39_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e307eb9bd99801f82789b44bb45e9f541961831c7311521b13a6c85afc09767"}, 519 | {file = "kiwisolver-1.4.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b1792d939ec70abe76f5054d3f36ed5656021dcad1322d1cc996d4e54165cef9"}, 520 | {file = "kiwisolver-1.4.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6cb459eea32a4e2cf18ba5fcece2dbdf496384413bc1bae15583f19e567f3b2"}, 521 | {file = "kiwisolver-1.4.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:36dafec3d6d6088d34e2de6b85f9d8e2324eb734162fba59d2ba9ed7a2043d5b"}, 522 | {file = "kiwisolver-1.4.4.tar.gz", hash = "sha256:d41997519fcba4a1e46eb4a2fe31bc12f0ff957b2b81bac28db24744f333e955"}, 523 | ] 524 | llvmlite = [ 525 | {file = "llvmlite-0.39.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6717c7a6e93c9d2c3d07c07113ec80ae24af45cde536b34363d4bcd9188091d9"}, 526 | {file = "llvmlite-0.39.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ddab526c5a2c4ccb8c9ec4821fcea7606933dc53f510e2a6eebb45a418d3488a"}, 527 | {file = "llvmlite-0.39.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a3f331a323d0f0ada6b10d60182ef06c20a2f01be21699999d204c5750ffd0b4"}, 528 | {file = "llvmlite-0.39.1-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e2c00ff204afa721b0bb9835b5bf1ba7fba210eefcec5552a9e05a63219ba0dc"}, 529 | {file = "llvmlite-0.39.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16f56eb1eec3cda3a5c526bc3f63594fc24e0c8d219375afeb336f289764c6c7"}, 530 | {file = "llvmlite-0.39.1-cp310-cp310-win32.whl", hash = "sha256:d0bfd18c324549c0fec2c5dc610fd024689de6f27c6cc67e4e24a07541d6e49b"}, 531 | {file = "llvmlite-0.39.1-cp310-cp310-win_amd64.whl", hash = "sha256:7ebf1eb9badc2a397d4f6a6c8717447c81ac011db00064a00408bc83c923c0e4"}, 532 | {file = "llvmlite-0.39.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:6546bed4e02a1c3d53a22a0bced254b3b6894693318b16c16c8e43e29d6befb6"}, 533 | {file = "llvmlite-0.39.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1578f5000fdce513712e99543c50e93758a954297575610f48cb1fd71b27c08a"}, 534 | {file = "llvmlite-0.39.1-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3803f11ad5f6f6c3d2b545a303d68d9fabb1d50e06a8d6418e6fcd2d0df00959"}, 535 | {file = "llvmlite-0.39.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50aea09a2b933dab7c9df92361b1844ad3145bfb8dd2deb9cd8b8917d59306fb"}, 536 | {file = "llvmlite-0.39.1-cp37-cp37m-win32.whl", hash = "sha256:b1a0bbdb274fb683f993198775b957d29a6f07b45d184c571ef2a721ce4388cf"}, 537 | {file = "llvmlite-0.39.1-cp37-cp37m-win_amd64.whl", hash = "sha256:e172c73fccf7d6db4bd6f7de963dedded900d1a5c6778733241d878ba613980e"}, 538 | {file = "llvmlite-0.39.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e31f4b799d530255aaf0566e3da2df5bfc35d3cd9d6d5a3dcc251663656c27b1"}, 539 | {file = "llvmlite-0.39.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:62c0ea22e0b9dffb020601bb65cb11dd967a095a488be73f07d8867f4e327ca5"}, 540 | {file = "llvmlite-0.39.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9ffc84ade195abd4abcf0bd3b827b9140ae9ef90999429b9ea84d5df69c9058c"}, 541 | {file = "llvmlite-0.39.1-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c0f158e4708dda6367d21cf15afc58de4ebce979c7a1aa2f6b977aae737e2a54"}, 542 | {file = "llvmlite-0.39.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:22d36591cd5d02038912321d9ab8e4668e53ae2211da5523f454e992b5e13c36"}, 543 | {file = "llvmlite-0.39.1-cp38-cp38-win32.whl", hash = "sha256:4c6ebace910410daf0bebda09c1859504fc2f33d122e9a971c4c349c89cca630"}, 544 | {file = "llvmlite-0.39.1-cp38-cp38-win_amd64.whl", hash = "sha256:fb62fc7016b592435d3e3a8f680e3ea8897c3c9e62e6e6cc58011e7a4801439e"}, 545 | {file = "llvmlite-0.39.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:fa9b26939ae553bf30a9f5c4c754db0fb2d2677327f2511e674aa2f5df941789"}, 546 | {file = "llvmlite-0.39.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e4f212c018db951da3e1dc25c2651abc688221934739721f2dad5ff1dd5f90e7"}, 547 | {file = "llvmlite-0.39.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39dc2160aed36e989610fc403487f11b8764b6650017ff367e45384dff88ffbf"}, 548 | {file = "llvmlite-0.39.1-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1ec3d70b3e507515936e475d9811305f52d049281eaa6c8273448a61c9b5b7e2"}, 549 | {file = "llvmlite-0.39.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:60f8dd1e76f47b3dbdee4b38d9189f3e020d22a173c00f930b52131001d801f9"}, 550 | {file = "llvmlite-0.39.1-cp39-cp39-win32.whl", hash = "sha256:03aee0ccd81735696474dc4f8b6be60774892a2929d6c05d093d17392c237f32"}, 551 | {file = "llvmlite-0.39.1-cp39-cp39-win_amd64.whl", hash = "sha256:3fc14e757bc07a919221f0cbaacb512704ce5774d7fcada793f1996d6bc75f2a"}, 552 | {file = "llvmlite-0.39.1.tar.gz", hash = "sha256:b43abd7c82e805261c425d50335be9a6c4f84264e34d6d6e475207300005d572"}, 553 | ] 554 | matplotlib = [ 555 | {file = "matplotlib-3.6.2-cp310-cp310-macosx_10_12_universal2.whl", hash = "sha256:8d0068e40837c1d0df6e3abf1cdc9a34a6d2611d90e29610fa1d2455aeb4e2e5"}, 556 | {file = "matplotlib-3.6.2-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:252957e208c23db72ca9918cb33e160c7833faebf295aaedb43f5b083832a267"}, 557 | {file = "matplotlib-3.6.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d50e8c1e571ee39b5dfbc295c11ad65988879f68009dd281a6e1edbc2ff6c18c"}, 558 | {file = "matplotlib-3.6.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d840adcad7354be6f2ec28d0706528b0026e4c3934cc6566b84eac18633eab1b"}, 559 | {file = "matplotlib-3.6.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:78ec3c3412cf277e6252764ee4acbdbec6920cc87ad65862272aaa0e24381eee"}, 560 | {file = "matplotlib-3.6.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9347cc6822f38db2b1d1ce992f375289670e595a2d1c15961aacbe0977407dfc"}, 561 | {file = "matplotlib-3.6.2-cp310-cp310-win32.whl", hash = "sha256:e0bbee6c2a5bf2a0017a9b5e397babb88f230e6f07c3cdff4a4c4bc75ed7c617"}, 562 | {file = "matplotlib-3.6.2-cp310-cp310-win_amd64.whl", hash = "sha256:8a0ae37576ed444fe853709bdceb2be4c7df6f7acae17b8378765bd28e61b3ae"}, 563 | {file = "matplotlib-3.6.2-cp311-cp311-macosx_10_12_universal2.whl", hash = "sha256:5ecfc6559132116dedfc482d0ad9df8a89dc5909eebffd22f3deb684132d002f"}, 564 | {file = "matplotlib-3.6.2-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:9f335e5625feb90e323d7e3868ec337f7b9ad88b5d633f876e3b778813021dab"}, 565 | {file = "matplotlib-3.6.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b2604c6450f9dd2c42e223b1f5dca9643a23cfecc9fde4a94bb38e0d2693b136"}, 566 | {file = "matplotlib-3.6.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e5afe0a7ea0e3a7a257907060bee6724a6002b7eec55d0db16fd32409795f3e1"}, 567 | {file = "matplotlib-3.6.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca0e7a658fbafcddcaefaa07ba8dae9384be2343468a8e011061791588d839fa"}, 568 | {file = "matplotlib-3.6.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32d29c8c26362169c80c5718ce367e8c64f4dd068a424e7110df1dd2ed7bd428"}, 569 | {file = "matplotlib-3.6.2-cp311-cp311-win32.whl", hash = "sha256:5024b8ed83d7f8809982d095d8ab0b179bebc07616a9713f86d30cf4944acb73"}, 570 | {file = "matplotlib-3.6.2-cp311-cp311-win_amd64.whl", hash = "sha256:52c2bdd7cd0bf9d5ccdf9c1816568fd4ccd51a4d82419cc5480f548981b47dd0"}, 571 | {file = "matplotlib-3.6.2-cp38-cp38-macosx_10_12_universal2.whl", hash = "sha256:8a8dbe2cb7f33ff54b16bb5c500673502a35f18ac1ed48625e997d40c922f9cc"}, 572 | {file = "matplotlib-3.6.2-cp38-cp38-macosx_10_12_x86_64.whl", hash = "sha256:380d48c15ec41102a2b70858ab1dedfa33eb77b2c0982cb65a200ae67a48e9cb"}, 573 | {file = "matplotlib-3.6.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0844523dfaaff566e39dbfa74e6f6dc42e92f7a365ce80929c5030b84caa563a"}, 574 | {file = "matplotlib-3.6.2-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:7f716b6af94dc1b6b97c46401774472f0867e44595990fe80a8ba390f7a0a028"}, 575 | {file = "matplotlib-3.6.2-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:74153008bd24366cf099d1f1e83808d179d618c4e32edb0d489d526523a94d9f"}, 576 | {file = "matplotlib-3.6.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f41e57ad63d336fe50d3a67bb8eaa26c09f6dda6a59f76777a99b8ccd8e26aec"}, 577 | {file = "matplotlib-3.6.2-cp38-cp38-win32.whl", hash = "sha256:d0e9ac04065a814d4cf2c6791a2ad563f739ae3ae830d716d54245c2b96fead6"}, 578 | {file = "matplotlib-3.6.2-cp38-cp38-win_amd64.whl", hash = "sha256:8a9d899953c722b9afd7e88dbefd8fb276c686c3116a43c577cfabf636180558"}, 579 | {file = "matplotlib-3.6.2-cp39-cp39-macosx_10_12_universal2.whl", hash = "sha256:f04f97797df35e442ed09f529ad1235d1f1c0f30878e2fe09a2676b71a8801e0"}, 580 | {file = "matplotlib-3.6.2-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:3964934731fd7a289a91d315919cf757f293969a4244941ab10513d2351b4e83"}, 581 | {file = "matplotlib-3.6.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:168093410b99f647ba61361b208f7b0d64dde1172b5b1796d765cd243cadb501"}, 582 | {file = "matplotlib-3.6.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e16dcaecffd55b955aa5e2b8a804379789c15987e8ebd2f32f01398a81e975b"}, 583 | {file = "matplotlib-3.6.2-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83dc89c5fd728fdb03b76f122f43b4dcee8c61f1489e232d9ad0f58020523e1c"}, 584 | {file = "matplotlib-3.6.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:795ad83940732b45d39b82571f87af0081c120feff2b12e748d96bb191169e33"}, 585 | {file = "matplotlib-3.6.2-cp39-cp39-win32.whl", hash = "sha256:19d61ee6414c44a04addbe33005ab1f87539d9f395e25afcbe9a3c50ce77c65c"}, 586 | {file = "matplotlib-3.6.2-cp39-cp39-win_amd64.whl", hash = "sha256:5ba73aa3aca35d2981e0b31230d58abb7b5d7ca104e543ae49709208d8ce706a"}, 587 | {file = "matplotlib-3.6.2-pp38-pypy38_pp73-macosx_10_12_x86_64.whl", hash = "sha256:1836f366272b1557a613f8265db220eb8dd883202bbbabe01bad5a4eadfd0c95"}, 588 | {file = "matplotlib-3.6.2-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0eda9d1b43f265da91fb9ae10d6922b5a986e2234470a524e6b18f14095b20d2"}, 589 | {file = "matplotlib-3.6.2-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec9be0f4826cdb3a3a517509dcc5f87f370251b76362051ab59e42b6b765f8c4"}, 590 | {file = "matplotlib-3.6.2-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:3cef89888a466228fc4e4b2954e740ce8e9afde7c4315fdd18caa1b8de58ca17"}, 591 | {file = "matplotlib-3.6.2-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:54fa9fe27f5466b86126ff38123261188bed568c1019e4716af01f97a12fe812"}, 592 | {file = "matplotlib-3.6.2-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e68be81cd8c22b029924b6d0ee814c337c0e706b8d88495a617319e5dd5441c3"}, 593 | {file = "matplotlib-3.6.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b0ca2c60d3966dfd6608f5f8c49b8a0fcf76de6654f2eda55fc6ef038d5a6f27"}, 594 | {file = "matplotlib-3.6.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:4426c74761790bff46e3d906c14c7aab727543293eed5a924300a952e1a3a3c1"}, 595 | {file = "matplotlib-3.6.2.tar.gz", hash = "sha256:b03fd10a1709d0101c054883b550f7c4c5e974f751e2680318759af005964990"}, 596 | ] 597 | metalift = [] 598 | mypy = [ 599 | {file = "mypy-0.950-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:cf9c261958a769a3bd38c3e133801ebcd284ffb734ea12d01457cb09eacf7d7b"}, 600 | {file = "mypy-0.950-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b5b5bd0ffb11b4aba2bb6d31b8643902c48f990cc92fda4e21afac658044f0c0"}, 601 | {file = "mypy-0.950-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5e7647df0f8fc947388e6251d728189cfadb3b1e558407f93254e35abc026e22"}, 602 | {file = "mypy-0.950-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:eaff8156016487c1af5ffa5304c3e3fd183edcb412f3e9c72db349faf3f6e0eb"}, 603 | {file = "mypy-0.950-cp310-cp310-win_amd64.whl", hash = "sha256:563514c7dc504698fb66bb1cf897657a173a496406f1866afae73ab5b3cdb334"}, 604 | {file = "mypy-0.950-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:dd4d670eee9610bf61c25c940e9ade2d0ed05eb44227275cce88701fee014b1f"}, 605 | {file = "mypy-0.950-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:ca75ecf2783395ca3016a5e455cb322ba26b6d33b4b413fcdedfc632e67941dc"}, 606 | {file = "mypy-0.950-cp36-cp36m-win_amd64.whl", hash = "sha256:6003de687c13196e8a1243a5e4bcce617d79b88f83ee6625437e335d89dfebe2"}, 607 | {file = "mypy-0.950-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4c653e4846f287051599ed8f4b3c044b80e540e88feec76b11044ddc5612ffed"}, 608 | {file = "mypy-0.950-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e19736af56947addedce4674c0971e5dceef1b5ec7d667fe86bcd2b07f8f9075"}, 609 | {file = "mypy-0.950-cp37-cp37m-win_amd64.whl", hash = "sha256:ef7beb2a3582eb7a9f37beaf38a28acfd801988cde688760aea9e6cc4832b10b"}, 610 | {file = "mypy-0.950-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:0112752a6ff07230f9ec2f71b0d3d4e088a910fdce454fdb6553e83ed0eced7d"}, 611 | {file = "mypy-0.950-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:ee0a36edd332ed2c5208565ae6e3a7afc0eabb53f5327e281f2ef03a6bc7687a"}, 612 | {file = "mypy-0.950-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:77423570c04aca807508a492037abbd72b12a1fb25a385847d191cd50b2c9605"}, 613 | {file = "mypy-0.950-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:5ce6a09042b6da16d773d2110e44f169683d8cc8687e79ec6d1181a72cb028d2"}, 614 | {file = "mypy-0.950-cp38-cp38-win_amd64.whl", hash = "sha256:5b231afd6a6e951381b9ef09a1223b1feabe13625388db48a8690f8daa9b71ff"}, 615 | {file = "mypy-0.950-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:0384d9f3af49837baa92f559d3fa673e6d2652a16550a9ee07fc08c736f5e6f8"}, 616 | {file = "mypy-0.950-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1fdeb0a0f64f2a874a4c1f5271f06e40e1e9779bf55f9567f149466fc7a55038"}, 617 | {file = "mypy-0.950-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:61504b9a5ae166ba5ecfed9e93357fd51aa693d3d434b582a925338a2ff57fd2"}, 618 | {file = "mypy-0.950-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:a952b8bc0ae278fc6316e6384f67bb9a396eb30aced6ad034d3a76120ebcc519"}, 619 | {file = "mypy-0.950-cp39-cp39-win_amd64.whl", hash = "sha256:eaea21d150fb26d7b4856766e7addcf929119dd19fc832b22e71d942835201ef"}, 620 | {file = "mypy-0.950-py3-none-any.whl", hash = "sha256:a4d9898f46446bfb6405383b57b96737dcfd0a7f25b748e78ef3e8c576bba3cb"}, 621 | {file = "mypy-0.950.tar.gz", hash = "sha256:1b333cfbca1762ff15808a0ef4f71b5d3eed8528b23ea1c3fb50543c867d68de"}, 622 | ] 623 | mypy-extensions = [ 624 | {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, 625 | {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, 626 | ] 627 | numpy = [ 628 | {file = "numpy-1.23.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9c88793f78fca17da0145455f0d7826bcb9f37da4764af27ac945488116efe63"}, 629 | {file = "numpy-1.23.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e9f4c4e51567b616be64e05d517c79a8a22f3606499941d97bb76f2ca59f982d"}, 630 | {file = "numpy-1.23.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7903ba8ab592b82014713c491f6c5d3a1cde5b4a3bf116404e08f5b52f6daf43"}, 631 | {file = "numpy-1.23.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e05b1c973a9f858c74367553e236f287e749465f773328c8ef31abe18f691e1"}, 632 | {file = "numpy-1.23.5-cp310-cp310-win32.whl", hash = "sha256:522e26bbf6377e4d76403826ed689c295b0b238f46c28a7251ab94716da0b280"}, 633 | {file = "numpy-1.23.5-cp310-cp310-win_amd64.whl", hash = "sha256:dbee87b469018961d1ad79b1a5d50c0ae850000b639bcb1b694e9981083243b6"}, 634 | {file = "numpy-1.23.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:ce571367b6dfe60af04e04a1834ca2dc5f46004ac1cc756fb95319f64c095a96"}, 635 | {file = "numpy-1.23.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:56e454c7833e94ec9769fa0f86e6ff8e42ee38ce0ce1fa4cbb747ea7e06d56aa"}, 636 | {file = "numpy-1.23.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5039f55555e1eab31124a5768898c9e22c25a65c1e0037f4d7c495a45778c9f2"}, 637 | {file = "numpy-1.23.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:58f545efd1108e647604a1b5aa809591ccd2540f468a880bedb97247e72db387"}, 638 | {file = "numpy-1.23.5-cp311-cp311-win32.whl", hash = "sha256:b2a9ab7c279c91974f756c84c365a669a887efa287365a8e2c418f8b3ba73fb0"}, 639 | {file = "numpy-1.23.5-cp311-cp311-win_amd64.whl", hash = "sha256:0cbe9848fad08baf71de1a39e12d1b6310f1d5b2d0ea4de051058e6e1076852d"}, 640 | {file = "numpy-1.23.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f063b69b090c9d918f9df0a12116029e274daf0181df392839661c4c7ec9018a"}, 641 | {file = "numpy-1.23.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0aaee12d8883552fadfc41e96b4c82ee7d794949e2a7c3b3a7201e968c7ecab9"}, 642 | {file = "numpy-1.23.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:92c8c1e89a1f5028a4c6d9e3ccbe311b6ba53694811269b992c0b224269e2398"}, 643 | {file = "numpy-1.23.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d208a0f8729f3fb790ed18a003f3a57895b989b40ea4dce4717e9cf4af62c6bb"}, 644 | {file = "numpy-1.23.5-cp38-cp38-win32.whl", hash = "sha256:06005a2ef6014e9956c09ba07654f9837d9e26696a0470e42beedadb78c11b07"}, 645 | {file = "numpy-1.23.5-cp38-cp38-win_amd64.whl", hash = "sha256:ca51fcfcc5f9354c45f400059e88bc09215fb71a48d3768fb80e357f3b457e1e"}, 646 | {file = "numpy-1.23.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8969bfd28e85c81f3f94eb4a66bc2cf1dbdc5c18efc320af34bffc54d6b1e38f"}, 647 | {file = "numpy-1.23.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a7ac231a08bb37f852849bbb387a20a57574a97cfc7b6cabb488a4fc8be176de"}, 648 | {file = "numpy-1.23.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf837dc63ba5c06dc8797c398db1e223a466c7ece27a1f7b5232ba3466aafe3d"}, 649 | {file = "numpy-1.23.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:33161613d2269025873025b33e879825ec7b1d831317e68f4f2f0f84ed14c719"}, 650 | {file = "numpy-1.23.5-cp39-cp39-win32.whl", hash = "sha256:af1da88f6bc3d2338ebbf0e22fe487821ea4d8e89053e25fa59d1d79786e7481"}, 651 | {file = "numpy-1.23.5-cp39-cp39-win_amd64.whl", hash = "sha256:09b7847f7e83ca37c6e627682f145856de331049013853f344f37b0c9690e3df"}, 652 | {file = "numpy-1.23.5-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:abdde9f795cf292fb9651ed48185503a2ff29be87770c3b8e2a14b0cd7aa16f8"}, 653 | {file = "numpy-1.23.5-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f9a909a8bae284d46bbfdefbdd4a262ba19d3bc9921b1e76126b1d21c3c34135"}, 654 | {file = "numpy-1.23.5-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:01dd17cbb340bf0fc23981e52e1d18a9d4050792e8fb8363cecbf066a84b827d"}, 655 | {file = "numpy-1.23.5.tar.gz", hash = "sha256:1b1766d6f397c18153d40015ddfc79ddb715cabadc04d2d228d4e5a8bc4ded1a"}, 656 | ] 657 | packaging = [ 658 | {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, 659 | {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, 660 | ] 661 | pandas = [ 662 | {file = "pandas-1.5.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e9dbacd22555c2d47f262ef96bb4e30880e5956169741400af8b306bbb24a273"}, 663 | {file = "pandas-1.5.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e2b83abd292194f350bb04e188f9379d36b8dfac24dd445d5c87575f3beaf789"}, 664 | {file = "pandas-1.5.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2552bffc808641c6eb471e55aa6899fa002ac94e4eebfa9ec058649122db5824"}, 665 | {file = "pandas-1.5.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fc87eac0541a7d24648a001d553406f4256e744d92df1df8ebe41829a915028"}, 666 | {file = "pandas-1.5.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d0d8fd58df5d17ddb8c72a5075d87cd80d71b542571b5f78178fb067fa4e9c72"}, 667 | {file = "pandas-1.5.2-cp310-cp310-win_amd64.whl", hash = "sha256:4aed257c7484d01c9a194d9a94758b37d3d751849c05a0050c087a358c41ad1f"}, 668 | {file = "pandas-1.5.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:375262829c8c700c3e7cbb336810b94367b9c4889818bbd910d0ecb4e45dc261"}, 669 | {file = "pandas-1.5.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc3cd122bea268998b79adebbb8343b735a5511ec14efb70a39e7acbc11ccbdc"}, 670 | {file = "pandas-1.5.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b4f5a82afa4f1ff482ab8ded2ae8a453a2cdfde2001567b3ca24a4c5c5ca0db3"}, 671 | {file = "pandas-1.5.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8092a368d3eb7116e270525329a3e5c15ae796ccdf7ccb17839a73b4f5084a39"}, 672 | {file = "pandas-1.5.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f6257b314fc14958f8122779e5a1557517b0f8e500cfb2bd53fa1f75a8ad0af2"}, 673 | {file = "pandas-1.5.2-cp311-cp311-win_amd64.whl", hash = "sha256:82ae615826da838a8e5d4d630eb70c993ab8636f0eff13cb28aafc4291b632b5"}, 674 | {file = "pandas-1.5.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:457d8c3d42314ff47cc2d6c54f8fc0d23954b47977b2caed09cd9635cb75388b"}, 675 | {file = "pandas-1.5.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:c009a92e81ce836212ce7aa98b219db7961a8b95999b97af566b8dc8c33e9519"}, 676 | {file = "pandas-1.5.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:71f510b0efe1629bf2f7c0eadb1ff0b9cf611e87b73cd017e6b7d6adb40e2b3a"}, 677 | {file = "pandas-1.5.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a40dd1e9f22e01e66ed534d6a965eb99546b41d4d52dbdb66565608fde48203f"}, 678 | {file = "pandas-1.5.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5ae7e989f12628f41e804847a8cc2943d362440132919a69429d4dea1f164da0"}, 679 | {file = "pandas-1.5.2-cp38-cp38-win32.whl", hash = "sha256:530948945e7b6c95e6fa7aa4be2be25764af53fba93fe76d912e35d1c9ee46f5"}, 680 | {file = "pandas-1.5.2-cp38-cp38-win_amd64.whl", hash = "sha256:73f219fdc1777cf3c45fde7f0708732ec6950dfc598afc50588d0d285fddaefc"}, 681 | {file = "pandas-1.5.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:9608000a5a45f663be6af5c70c3cbe634fa19243e720eb380c0d378666bc7702"}, 682 | {file = "pandas-1.5.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:315e19a3e5c2ab47a67467fc0362cb36c7c60a93b6457f675d7d9615edad2ebe"}, 683 | {file = "pandas-1.5.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e18bc3764cbb5e118be139b3b611bc3fbc5d3be42a7e827d1096f46087b395eb"}, 684 | {file = "pandas-1.5.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0183cb04a057cc38fde5244909fca9826d5d57c4a5b7390c0cc3fa7acd9fa883"}, 685 | {file = "pandas-1.5.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:344021ed3e639e017b452aa8f5f6bf38a8806f5852e217a7594417fb9bbfa00e"}, 686 | {file = "pandas-1.5.2-cp39-cp39-win32.whl", hash = "sha256:e7469271497960b6a781eaa930cba8af400dd59b62ec9ca2f4d31a19f2f91090"}, 687 | {file = "pandas-1.5.2-cp39-cp39-win_amd64.whl", hash = "sha256:c218796d59d5abd8780170c937b812c9637e84c32f8271bbf9845970f8c1351f"}, 688 | {file = "pandas-1.5.2.tar.gz", hash = "sha256:220b98d15cee0b2cd839a6358bd1f273d0356bf964c1a1aeb32d47db0215488b"}, 689 | ] 690 | pathspec = [ 691 | {file = "pathspec-0.10.2-py3-none-any.whl", hash = "sha256:88c2606f2c1e818b978540f73ecc908e13999c6c3a383daf3705652ae79807a5"}, 692 | {file = "pathspec-0.10.2.tar.gz", hash = "sha256:8f6bf73e5758fd365ef5d58ce09ac7c27d2833a8d7da51712eac6e27e35141b0"}, 693 | ] 694 | pillow = [ 695 | {file = "Pillow-9.3.0-1-cp37-cp37m-win32.whl", hash = "sha256:e6ea6b856a74d560d9326c0f5895ef8050126acfdc7ca08ad703eb0081e82b74"}, 696 | {file = "Pillow-9.3.0-1-cp37-cp37m-win_amd64.whl", hash = "sha256:32a44128c4bdca7f31de5be641187367fe2a450ad83b833ef78910397db491aa"}, 697 | {file = "Pillow-9.3.0-cp310-cp310-macosx_10_10_x86_64.whl", hash = "sha256:0b7257127d646ff8676ec8a15520013a698d1fdc48bc2a79ba4e53df792526f2"}, 698 | {file = "Pillow-9.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b90f7616ea170e92820775ed47e136208e04c967271c9ef615b6fbd08d9af0e3"}, 699 | {file = "Pillow-9.3.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:68943d632f1f9e3dce98908e873b3a090f6cba1cbb1b892a9e8d97c938871fbe"}, 700 | {file = "Pillow-9.3.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:be55f8457cd1eac957af0c3f5ece7bc3f033f89b114ef30f710882717670b2a8"}, 701 | {file = "Pillow-9.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5d77adcd56a42d00cc1be30843d3426aa4e660cab4a61021dc84467123f7a00c"}, 702 | {file = "Pillow-9.3.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:829f97c8e258593b9daa80638aee3789b7df9da5cf1336035016d76f03b8860c"}, 703 | {file = "Pillow-9.3.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:801ec82e4188e935c7f5e22e006d01611d6b41661bba9fe45b60e7ac1a8f84de"}, 704 | {file = "Pillow-9.3.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:871b72c3643e516db4ecf20efe735deb27fe30ca17800e661d769faab45a18d7"}, 705 | {file = "Pillow-9.3.0-cp310-cp310-win32.whl", hash = "sha256:655a83b0058ba47c7c52e4e2df5ecf484c1b0b0349805896dd350cbc416bdd91"}, 706 | {file = "Pillow-9.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:9f47eabcd2ded7698106b05c2c338672d16a6f2a485e74481f524e2a23c2794b"}, 707 | {file = "Pillow-9.3.0-cp311-cp311-macosx_10_10_x86_64.whl", hash = "sha256:57751894f6618fd4308ed8e0c36c333e2f5469744c34729a27532b3db106ee20"}, 708 | {file = "Pillow-9.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7db8b751ad307d7cf238f02101e8e36a128a6cb199326e867d1398067381bff4"}, 709 | {file = "Pillow-9.3.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3033fbe1feb1b59394615a1cafaee85e49d01b51d54de0cbf6aa8e64182518a1"}, 710 | {file = "Pillow-9.3.0-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22b012ea2d065fd163ca096f4e37e47cd8b59cf4b0fd47bfca6abb93df70b34c"}, 711 | {file = "Pillow-9.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b9a65733d103311331875c1dca05cb4606997fd33d6acfed695b1232ba1df193"}, 712 | {file = "Pillow-9.3.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:502526a2cbfa431d9fc2a079bdd9061a2397b842bb6bc4239bb176da00993812"}, 713 | {file = "Pillow-9.3.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:90fb88843d3902fe7c9586d439d1e8c05258f41da473952aa8b328d8b907498c"}, 714 | {file = "Pillow-9.3.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:89dca0ce00a2b49024df6325925555d406b14aa3efc2f752dbb5940c52c56b11"}, 715 | {file = "Pillow-9.3.0-cp311-cp311-win32.whl", hash = "sha256:3168434d303babf495d4ba58fc22d6604f6e2afb97adc6a423e917dab828939c"}, 716 | {file = "Pillow-9.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:18498994b29e1cf86d505edcb7edbe814d133d2232d256db8c7a8ceb34d18cef"}, 717 | {file = "Pillow-9.3.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:772a91fc0e03eaf922c63badeca75e91baa80fe2f5f87bdaed4280662aad25c9"}, 718 | {file = "Pillow-9.3.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:afa4107d1b306cdf8953edde0534562607fe8811b6c4d9a486298ad31de733b2"}, 719 | {file = "Pillow-9.3.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b4012d06c846dc2b80651b120e2cdd787b013deb39c09f407727ba90015c684f"}, 720 | {file = "Pillow-9.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77ec3e7be99629898c9a6d24a09de089fa5356ee408cdffffe62d67bb75fdd72"}, 721 | {file = "Pillow-9.3.0-cp37-cp37m-manylinux_2_28_aarch64.whl", hash = "sha256:6c738585d7a9961d8c2821a1eb3dcb978d14e238be3d70f0a706f7fa9316946b"}, 722 | {file = "Pillow-9.3.0-cp37-cp37m-manylinux_2_28_x86_64.whl", hash = "sha256:828989c45c245518065a110434246c44a56a8b2b2f6347d1409c787e6e4651ee"}, 723 | {file = "Pillow-9.3.0-cp37-cp37m-win32.whl", hash = "sha256:82409ffe29d70fd733ff3c1025a602abb3e67405d41b9403b00b01debc4c9a29"}, 724 | {file = "Pillow-9.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:41e0051336807468be450d52b8edd12ac60bebaa97fe10c8b660f116e50b30e4"}, 725 | {file = "Pillow-9.3.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:b03ae6f1a1878233ac620c98f3459f79fd77c7e3c2b20d460284e1fb370557d4"}, 726 | {file = "Pillow-9.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4390e9ce199fc1951fcfa65795f239a8a4944117b5935a9317fb320e7767b40f"}, 727 | {file = "Pillow-9.3.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40e1ce476a7804b0fb74bcfa80b0a2206ea6a882938eaba917f7a0f004b42502"}, 728 | {file = "Pillow-9.3.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a0a06a052c5f37b4ed81c613a455a81f9a3a69429b4fd7bb913c3fa98abefc20"}, 729 | {file = "Pillow-9.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03150abd92771742d4a8cd6f2fa6246d847dcd2e332a18d0c15cc75bf6703040"}, 730 | {file = "Pillow-9.3.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:15c42fb9dea42465dfd902fb0ecf584b8848ceb28b41ee2b58f866411be33f07"}, 731 | {file = "Pillow-9.3.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:51e0e543a33ed92db9f5ef69a0356e0b1a7a6b6a71b80df99f1d181ae5875636"}, 732 | {file = "Pillow-9.3.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:3dd6caf940756101205dffc5367babf288a30043d35f80936f9bfb37f8355b32"}, 733 | {file = "Pillow-9.3.0-cp38-cp38-win32.whl", hash = "sha256:f1ff2ee69f10f13a9596480335f406dd1f70c3650349e2be67ca3139280cade0"}, 734 | {file = "Pillow-9.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:276a5ca930c913f714e372b2591a22c4bd3b81a418c0f6635ba832daec1cbcfc"}, 735 | {file = "Pillow-9.3.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:73bd195e43f3fadecfc50c682f5055ec32ee2c933243cafbfdec69ab1aa87cad"}, 736 | {file = "Pillow-9.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1c7c8ae3864846fc95f4611c78129301e203aaa2af813b703c55d10cc1628535"}, 737 | {file = "Pillow-9.3.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e0918e03aa0c72ea56edbb00d4d664294815aa11291a11504a377ea018330d3"}, 738 | {file = "Pillow-9.3.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b0915e734b33a474d76c28e07292f196cdf2a590a0d25bcc06e64e545f2d146c"}, 739 | {file = "Pillow-9.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af0372acb5d3598f36ec0914deed2a63f6bcdb7b606da04dc19a88d31bf0c05b"}, 740 | {file = "Pillow-9.3.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:ad58d27a5b0262c0c19b47d54c5802db9b34d38bbf886665b626aff83c74bacd"}, 741 | {file = "Pillow-9.3.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:97aabc5c50312afa5e0a2b07c17d4ac5e865b250986f8afe2b02d772567a380c"}, 742 | {file = "Pillow-9.3.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9aaa107275d8527e9d6e7670b64aabaaa36e5b6bd71a1015ddd21da0d4e06448"}, 743 | {file = "Pillow-9.3.0-cp39-cp39-win32.whl", hash = "sha256:bac18ab8d2d1e6b4ce25e3424f709aceef668347db8637c2296bcf41acb7cf48"}, 744 | {file = "Pillow-9.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:b472b5ea442148d1c3e2209f20f1e0bb0eb556538690fa70b5e1f79fa0ba8dc2"}, 745 | {file = "Pillow-9.3.0-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:ab388aaa3f6ce52ac1cb8e122c4bd46657c15905904b3120a6248b5b8b0bc228"}, 746 | {file = "Pillow-9.3.0-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dbb8e7f2abee51cef77673be97760abff1674ed32847ce04b4af90f610144c7b"}, 747 | {file = "Pillow-9.3.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bca31dd6014cb8b0b2db1e46081b0ca7d936f856da3b39744aef499db5d84d02"}, 748 | {file = "Pillow-9.3.0-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:c7025dce65566eb6e89f56c9509d4f628fddcedb131d9465cacd3d8bac337e7e"}, 749 | {file = "Pillow-9.3.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:ebf2029c1f464c59b8bdbe5143c79fa2045a581ac53679733d3a91d400ff9efb"}, 750 | {file = "Pillow-9.3.0-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:b59430236b8e58840a0dfb4099a0e8717ffb779c952426a69ae435ca1f57210c"}, 751 | {file = "Pillow-9.3.0-pp38-pypy38_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:12ce4932caf2ddf3e41d17fc9c02d67126935a44b86df6a206cf0d7161548627"}, 752 | {file = "Pillow-9.3.0-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ae5331c23ce118c53b172fa64a4c037eb83c9165aba3a7ba9ddd3ec9fa64a699"}, 753 | {file = "Pillow-9.3.0-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:0b07fffc13f474264c336298d1b4ce01d9c5a011415b79d4ee5527bb69ae6f65"}, 754 | {file = "Pillow-9.3.0-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:073adb2ae23431d3b9bcbcff3fe698b62ed47211d0716b067385538a1b0f28b8"}, 755 | {file = "Pillow-9.3.0.tar.gz", hash = "sha256:c935a22a557a560108d780f9a0fc426dd7459940dc54faa49d83249c8d3e760f"}, 756 | ] 757 | platformdirs = [ 758 | {file = "platformdirs-2.5.4-py3-none-any.whl", hash = "sha256:af0276409f9a02373d540bf8480021a048711d572745aef4b7842dad245eba10"}, 759 | {file = "platformdirs-2.5.4.tar.gz", hash = "sha256:1006647646d80f16130f052404c6b901e80ee4ed6bef6792e1f238a8969106f7"}, 760 | ] 761 | pyflakes = [ 762 | {file = "pyflakes-2.5.0-py2.py3-none-any.whl", hash = "sha256:4579f67d887f804e67edb544428f264b7b24f435b263c4614f384135cea553d2"}, 763 | {file = "pyflakes-2.5.0.tar.gz", hash = "sha256:491feb020dca48ccc562a8c0cbe8df07ee13078df59813b83959cbdada312ea3"}, 764 | ] 765 | pyparsing = [ 766 | {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, 767 | {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, 768 | ] 769 | python-dateutil = [ 770 | {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, 771 | {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, 772 | ] 773 | pytz = [ 774 | {file = "pytz-2022.6-py2.py3-none-any.whl", hash = "sha256:222439474e9c98fced559f1709d89e6c9cbf8d79c794ff3eb9f8800064291427"}, 775 | {file = "pytz-2022.6.tar.gz", hash = "sha256:e89512406b793ca39f5971bc999cc538ce125c0e51c27941bef4568b460095e2"}, 776 | ] 777 | setuptools-scm = [ 778 | {file = "setuptools_scm-7.0.5-py3-none-any.whl", hash = "sha256:7930f720905e03ccd1e1d821db521bff7ec2ac9cf0ceb6552dd73d24a45d3b02"}, 779 | {file = "setuptools_scm-7.0.5.tar.gz", hash = "sha256:031e13af771d6f892b941adb6ea04545bbf91ebc5ce68c78aaf3fff6e1fb4844"}, 780 | ] 781 | six = [ 782 | {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, 783 | {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, 784 | ] 785 | tomli = [ 786 | {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, 787 | {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, 788 | ] 789 | typing-extensions = [ 790 | {file = "typing_extensions-4.4.0-py3-none-any.whl", hash = "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"}, 791 | {file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"}, 792 | ] 793 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [tool.poetry] 2 | name = "katara" 3 | version = "0.1.0" 4 | description = "" 5 | authors = ["Shadaj Laddad "] 6 | 7 | [tool.poetry.dependencies] 8 | python = ">=3.8,<3.11" 9 | metalift = { git = "https://github.com/metalift/metalift.git", branch = "main" } 10 | 11 | [tool.poetry.dev-dependencies] 12 | mypy = "^0.950" 13 | black = "22.3.0" # make sure to update ci.yml! 14 | autoflake = "^1.4" 15 | pandas = "^1.4.3" 16 | matplotlib = "^3.5.2" 17 | 18 | [build-system] 19 | requires = ["poetry-core>=1.0.0"] 20 | build-backend = "poetry.core.masonry.api" 21 | 22 | [tool.black] 23 | line-length = 88 24 | target-version = ['py38'] 25 | extend-exclude = ''' 26 | ^/tests| 27 | ^/rosette-packages 28 | ''' 29 | -------------------------------------------------------------------------------- /rosette-packages-flake.nix: -------------------------------------------------------------------------------- 1 | # copy me to ../rosette-packages/flake.nix 2 | # then run raco pkg install --scope-dir ../rosette-packages/packages rosette 3 | { 4 | description = "rosette-packages"; 5 | 6 | inputs = {}; 7 | 8 | outputs = { self }: { 9 | packages = ./packages; 10 | }; 11 | } 12 | -------------------------------------------------------------------------------- /tests/compile-add-blocks: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | # compiles benchmark and run switch converter and instruction namer on it 4 | 5 | file="$1" 6 | ext="${file##*\.}" 7 | 8 | clang_out=${file/.$ext/.ll} 9 | 10 | echo "output to:" $clang_out 11 | 12 | include_path="/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/usr/include" 13 | 14 | #clang -g -I $include_path -c -emit-llvm -S $file -o $clang_out 15 | if [[ $file == *.c ]] 16 | then 17 | clang -O0 -I $include_path -I../headers -c -emit-llvm -S $file -o $clang_out 18 | else 19 | clang++ -std=c++17 -O0 -I $include_path -I../headers -c -emit-llvm -S $file -o $clang_out 20 | fi 21 | 22 | #opt -lowerswitch -instnamer -S $clang_out > tmp.ll 23 | opt -load ../llvm-pass/build/addEmptyBlocks/libAddEmptyBlocksPass.so -addEmptyBlock -lowerinvoke --unreachableblockelim -instnamer -S $clang_out > tmp.ll 24 | 25 | mv tmp.ll $clang_out 26 | 27 | loops_out=${file/.$ext/.loops} 28 | echo "output loops info to:" $loops_out 29 | opt -analyze -loops $clang_out > $loops_out -------------------------------------------------------------------------------- /tests/compile-all: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | ls *.c | xargs -n 1 ./compile-add-blocks 4 | -------------------------------------------------------------------------------- /tests/plot_distribution.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import pandas 3 | import matplotlib.pyplot as plt 4 | 5 | if __name__ == "__main__": 6 | bench = sys.argv[1] 7 | first_n = int(sys.argv[2].split("first_")[1]) 8 | 9 | no_pruning = pandas.read_csv(f"benchmarks-{bench}-0-direct-unbounded-first_{first_n}-distribution.csv") 10 | no_pruning["time"] = no_pruning["time"] / 60 11 | bounded_history = pandas.read_csv(f"benchmarks-{bench}-0-bounded-pruning-first_{first_n}-distribution.csv") 12 | bounded_history["time"] = bounded_history["time"] / 60 13 | 14 | plt.figure() 15 | ax = bounded_history.plot(x="time",y="percent",label="bounded history") 16 | no_pruning.plot(x="time",y="percent",label="direct unbounded",ax=ax) 17 | ax.set_xlabel("Synthesis Time (minutes)") 18 | ax.set_ylabel("Candidates Evaluated") 19 | plt.legend(loc='lower right') 20 | 21 | plt.savefig(f"distribution-{bench}-first_{first_n}.png") 22 | plt.show() 23 | -------------------------------------------------------------------------------- /tests/sequential1.c: -------------------------------------------------------------------------------- 1 | #include "set.h" 2 | 3 | set* test_init_state() { 4 | set* out = set_create(); 5 | return out; 6 | } 7 | 8 | set* test_next_state(set* state, int add, int value) { 9 | if (add == 1) { 10 | state = set_add(state, value); 11 | } else { 12 | state = set_remove(state, value); 13 | } 14 | 15 | return state; 16 | } 17 | 18 | int test_response(set* state, int value) { 19 | return set_contains(state, value); 20 | } 21 | -------------------------------------------------------------------------------- /tests/sequential1_clock.c: -------------------------------------------------------------------------------- 1 | #include "set.h" 2 | 3 | set* test_init_state() { 4 | set* out = set_create(); 5 | return out; 6 | } 7 | 8 | set* test_next_state(set* state, int add, int value, int clock) { 9 | if (add == 1) { 10 | state = set_add(state, value); 11 | } else { 12 | state = set_remove(state, value); 13 | } 14 | 15 | return state; 16 | } 17 | 18 | int test_response(set* state, int value) { 19 | return set_contains(state, value); 20 | } 21 | -------------------------------------------------------------------------------- /tests/sequential2.c: -------------------------------------------------------------------------------- 1 | int test_init_state() { 2 | int out = 0; 3 | return out; 4 | } 5 | 6 | int test_next_state(int state, int add, int node_id) { 7 | if (add == 1) { 8 | state = state + 1; 9 | } else { 10 | state = state - 1; 11 | } 12 | 13 | return state; 14 | } 15 | 16 | int test_response(int state) { 17 | return state; 18 | } 19 | -------------------------------------------------------------------------------- /tests/sequential_flag.c: -------------------------------------------------------------------------------- 1 | int test_init_state() { 2 | return 0; 3 | } 4 | 5 | int test_next_state(int state, int enable, int clock) { 6 | if (enable == 1) { 7 | return 1; 8 | } else { 9 | return 0; 10 | } 11 | } 12 | 13 | int test_response(int state) { 14 | return state; 15 | } 16 | -------------------------------------------------------------------------------- /tests/sequential_register.c: -------------------------------------------------------------------------------- 1 | int test_init_state() { 2 | return 0; 3 | } 4 | 5 | int test_next_state(int state, int value, int clock) { 6 | state = value; 7 | return state; 8 | } 9 | 10 | int test_response(int state) { 11 | return state; 12 | } 13 | -------------------------------------------------------------------------------- /tests/synthesize_crdt.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import contextlib 3 | import csv 4 | from time import time 5 | from typing import List 6 | from katara.search_structures import search_crdt_structures 7 | from metalift.analysis import CodeInfo 8 | from metalift.ir import * 9 | import katara.lattices as lat 10 | from katara.auto_grammar import all_node_id_gets, auto_grammar, expand_lattice_logic 11 | import sys 12 | import multiprocessing as mp 13 | from metalift.maps_lang import mapsLang 14 | 15 | from metalift.synthesize_auto import synthesize 16 | 17 | def fold_conditions(out, conditions): 18 | for c in conditions: 19 | out = Ite(c, out, out) 20 | return out 21 | 22 | # we can make equivalence and state invariant shallower because 23 | # they are augmented with query and lattice validity checks 24 | 25 | def grammarEquivalence(inputState, synthState, queryParams, baseDepth): 26 | return auto_grammar( 27 | Bool(), 28 | max(baseDepth - 1, 1), 29 | inputState, synthState, *queryParams 30 | ) 31 | 32 | 33 | def grammarStateInvariant(synthState, synthStateStructure, baseDepth, invariantBoost): 34 | state_valid = And(*[ 35 | synthStateStructure[i].check_is_valid( 36 | TupleGet(synthState, IntLit(i)) 37 | ) 38 | for i in range(len(synthStateStructure)) 39 | ]) 40 | 41 | return And( 42 | state_valid, 43 | auto_grammar(Bool(), max(baseDepth - 1, 1) + invariantBoost, synthState) 44 | ) 45 | 46 | 47 | def grammarSupportedCommand(synthState, args, synthStateStructure, baseDepth, invariantBoost): 48 | conditions = [Eq(a, IntLit(1)) for a in args if a.type == EnumInt()] 49 | 50 | out = auto_grammar( 51 | Bool(), baseDepth + invariantBoost, 52 | synthState, *args, 53 | *expand_lattice_logic(*[ 54 | (TupleGet(synthState, IntLit(i)), synthStateStructure[i]) 55 | for i in range(len(synthStateStructure)) 56 | ]), 57 | enable_ite=True 58 | ) 59 | 60 | return fold_conditions(out, conditions) 61 | 62 | # we make query deeper because it requires more computation 63 | 64 | def grammarQuery(name: str, args: List[Expr], retT: Type, baseDepth): 65 | if retT == EnumInt(): 66 | condition = auto_grammar( 67 | Bool(), 68 | baseDepth + 1, 69 | *args, 70 | allow_node_id_reductions=True, 71 | ) 72 | 73 | summary = Ite(condition, IntLit(1), IntLit(0)) 74 | else: 75 | summary = auto_grammar( 76 | parseTypeRef(retT), baseDepth + 1, 77 | *args, 78 | enable_ite=True, 79 | allow_node_id_reductions=True, 80 | ) 81 | 82 | return Synth(name, summary, *args) 83 | 84 | 85 | def grammar(inputState: Expr, args: List[Expr], synthStateStructure, baseDepth): 86 | conditions = [Eq(a, IntLit(1)) for a in args if a.type == EnumInt()] 87 | 88 | non_associative_data = [] 89 | for a in args: 90 | if a.type == NodeIDInt(): 91 | non_associative_data = all_node_id_gets( 92 | inputState, a, 93 | auto_grammar(None, 0, *args) 94 | ) 95 | break 96 | 97 | return Tuple( 98 | *[ 99 | synthStateStructure[i].merge( 100 | TupleGet(inputState, IntLit(i)), 101 | fold_conditions(auto_grammar( 102 | TupleGet(inputState, IntLit(i)).type, 103 | baseDepth, 104 | *args, 105 | *non_associative_data 106 | ), conditions) 107 | ) 108 | for i in range(len(synthStateStructure)) 109 | ], 110 | ) 111 | 112 | 113 | def initState(synthStateStructure): 114 | return Tuple( 115 | *[auto_grammar(elem.ir_type(), 1, elem.bottom()) for elem in synthStateStructure] 116 | ) 117 | 118 | def targetLang(): 119 | return mapsLang() 120 | 121 | 122 | benchmarks = { 123 | "flag_dw": { 124 | "ll_name": "sequential_flag", 125 | "inOrder": lambda arg1, arg2: Ite( 126 | Eq(arg1[0], IntLit(1)), # if first is enable 127 | BoolLit(True), # second can be anything 128 | Not(Eq(arg2[0], IntLit(1))), # but if remove, must be remove next 129 | ), 130 | "opPrecondition": lambda op: Ge(op[-1], IntLit(1)), 131 | "stateTypeHint": EnumInt(), 132 | "opArgTypeHint": [EnumInt(), ClockInt()], 133 | "queryArgTypeHint": None, 134 | "queryRetTypeHint": EnumInt(), 135 | "fixedLatticeType": (lat.LexicalProduct(lat.MaxInt(ClockInt()), lat.OrBool()),), 136 | }, 137 | "flag_ew": { 138 | "ll_name": "sequential_flag", 139 | "inOrder": lambda arg1, arg2: Ite( 140 | Eq(arg1[0], IntLit(1)), # if first is enable 141 | Eq(arg2[0], IntLit(1)), # second must be enable 142 | BoolLit(True), # but if remove, can be anything next 143 | ), 144 | "opPrecondition": lambda op: Ge(op[-1], IntLit(1)), 145 | "stateTypeHint": EnumInt(), 146 | "opArgTypeHint": [EnumInt(), ClockInt()], 147 | "queryArgTypeHint": None, 148 | "queryRetTypeHint": EnumInt(), 149 | "fixedLatticeType": (lat.LexicalProduct(lat.MaxInt(ClockInt()), lat.OrBool()),), 150 | }, 151 | "lww_register": { 152 | "ll_name": "sequential_register", 153 | "inOrder": lambda arg1, arg2: Ge(arg2[0], arg1[0]), 154 | "opPrecondition": lambda op: And( 155 | Ge(op[-1], IntLit(1)), 156 | Ge(op[0], IntLit(0)) 157 | ), 158 | "stateTypeHint": OpaqueInt(), 159 | "opArgTypeHint": [OpaqueInt(), ClockInt()], 160 | "queryArgTypeHint": [], 161 | "queryRetTypeHint": OpaqueInt(), 162 | "fixedLatticeType": (lat.LexicalProduct(lat.MaxInt(ClockInt()), lat.MaxInt(OpaqueInt())),), 163 | }, 164 | "g_set": { 165 | "ll_name": "sequential1", 166 | "inOrder": lambda arg1, arg2: Ite( 167 | Eq(arg1[0], IntLit(1)), # if first command is insert 168 | Eq(arg2[0], IntLit(1)), # second must be insert 169 | BoolLit(True), # but if remove, can be anything next 170 | ), 171 | "opPrecondition": lambda op: BoolLit(True), 172 | "stateTypeHint": SetT(OpaqueInt()), 173 | "opArgTypeHint": [EnumInt(), OpaqueInt()], 174 | "queryArgTypeHint": [OpaqueInt()], 175 | "queryRetTypeHint": EnumInt(), 176 | "fixedLatticeType": (lat.Set(OpaqueInt()),), 177 | }, 178 | "2p_set": { 179 | "ll_name": "sequential1", 180 | "inOrder": lambda arg1, arg2: Ite( 181 | Eq(arg1[0], IntLit(1)), # if first command is insert 182 | BoolLit(True), # second can be insert or remove 183 | Not(Eq(arg2[0], IntLit(1))), # but if remove, must be remove next 184 | ), 185 | "opPrecondition": lambda op: BoolLit(True), 186 | "stateTypeHint": SetT(OpaqueInt()), 187 | "opArgTypeHint": [EnumInt(), OpaqueInt()], 188 | "queryArgTypeHint": [OpaqueInt()], 189 | "queryRetTypeHint": EnumInt(), 190 | "fixedLatticeType": (lat.Map(OpaqueInt(), lat.OrBool()),), 191 | }, 192 | "add_wins_set": { 193 | "ll_name": "sequential1_clock", 194 | "inOrder": lambda arg1, arg2: Ite( 195 | Eq(arg1[0], IntLit(1)), # if first command is insert 196 | Eq(arg2[0], IntLit(1)), # second command must be insert 197 | BoolLit(True), # second can be insert or remove 198 | ), 199 | "opPrecondition": lambda op: Ge(op[-1], IntLit(1)), 200 | "stateTypeHint": SetT(OpaqueInt()), 201 | "opArgTypeHint": [EnumInt(), OpaqueInt(), ClockInt()], 202 | "queryArgTypeHint": [OpaqueInt()], 203 | "queryRetTypeHint": EnumInt(), 204 | "fixedLatticeType": (lat.Map(OpaqueInt(), lat.MaxInt(ClockInt())),lat.Map(OpaqueInt(), lat.MaxInt(ClockInt()))), 205 | }, 206 | "remove_wins_set": { 207 | "ll_name": "sequential1_clock", 208 | "inOrder": lambda arg1, arg2: Ite( 209 | Eq(arg1[0], IntLit(1)), # if first command is insert 210 | BoolLit(True), # second can be insert or remove 211 | Not(Eq(arg2[0], IntLit(1))), # but if remove, must be remove next 212 | ), 213 | "opPrecondition": lambda op: Ge(op[-1], IntLit(1)), 214 | "stateTypeHint": SetT(OpaqueInt()), 215 | "opArgTypeHint": [EnumInt(), OpaqueInt(), ClockInt()], 216 | "queryArgTypeHint": [OpaqueInt()], 217 | "queryRetTypeHint": EnumInt(), 218 | "fixedLatticeType": (lat.Map(OpaqueInt(), lat.MaxInt(ClockInt())),lat.Map(OpaqueInt(), lat.MaxInt(ClockInt()))), 219 | }, 220 | "grow_only_counter": { 221 | "ll_name": "sequential2", 222 | "inOrder": lambda arg1, arg2: And( 223 | Eq(arg1[0], IntLit(1)), 224 | Eq(arg2[0], IntLit(1)) 225 | ), 226 | "opPrecondition": lambda op: Eq(op[0], IntLit(1)), 227 | "stateTypeHint": Int(), 228 | "opArgTypeHint": [EnumInt(), NodeIDInt()], 229 | "queryArgTypeHint": [], 230 | "queryRetTypeHint": Int(), 231 | "nonIdempotent": True, 232 | "fixedLatticeType": (lat.Map(NodeIDInt(), lat.MaxInt(Int())),), 233 | }, 234 | "general_counter": { 235 | "ll_name": "sequential2", 236 | "inOrder": lambda arg1, arg2: BoolLit(True), 237 | "opPrecondition": lambda op: BoolLit(True), 238 | "stateTypeHint": Int(), 239 | "opArgTypeHint": [EnumInt(), NodeIDInt()], 240 | "queryArgTypeHint": [], 241 | "queryRetTypeHint": Int(), 242 | "nonIdempotent": True, 243 | "fixedLatticeType": (lat.Map(NodeIDInt(), lat.MaxInt(Int())),lat.Map(NodeIDInt(), lat.MaxInt(Int()))), 244 | } 245 | } 246 | 247 | def has_node_id(tup): 248 | for v in tup: 249 | if v.has_node_id(): 250 | return True 251 | return False 252 | 253 | def increasing_depth_structures(underlying, nonIdempotent): 254 | base_depth = 1 255 | while True: 256 | # we synthesize structures of complexity base_depth + 1 257 | all_structures = underlying(base_depth) 258 | filtered_structures = all_structures 259 | if nonIdempotent: 260 | filtered_structures = filter(has_node_id, all_structures) 261 | for struct in filtered_structures: 262 | yield (base_depth, struct) 263 | base_depth += 1 264 | 265 | def main(): 266 | parser = argparse.ArgumentParser(description='Synthesize CRDTs from sequential types.') 267 | parser.add_argument('mode', choices=['synth', 'synth-unbounded'], help='synthesis mode') 268 | parser.add_argument('benchmark', help='benchmark name or "all"') 269 | parser.add_argument('--fixed', action='store_true', help='use fixed lattice structure') 270 | parser.add_argument('--first', type=int, help='synthesize the first N structures') 271 | parser.add_argument('--repeat', type=int, default=1, help='number of repetitions') 272 | 273 | args = parser.parse_args() 274 | 275 | mode = args.mode 276 | bench = args.benchmark 277 | fixed_structure = args.fixed 278 | first_n = args.first 279 | reps = args.repeat 280 | 281 | if bench == "all": 282 | benches = list(benchmarks.keys()) 283 | else: 284 | benches = [bench] 285 | 286 | useOpList = True 287 | if mode == "synth-unbounded": 288 | useOpList = False 289 | 290 | bounded_bench_str = "bounded-pruning" if useOpList else "direct-unbounded" 291 | 292 | bench_file = open(f"results-{bench}-{bounded_bench_str}.csv", "w") if first_n == None else contextlib.nullcontext() 293 | with bench_file as report: 294 | for bench in benches: 295 | bench_data = benchmarks[bench] 296 | 297 | filename = f"tests/{bench_data['ll_name']}.ll" 298 | fnNameBase = "test" 299 | loopsFile = f"tests/{bench_data['ll_name']}.loops" 300 | cvcPath = "cvc5" 301 | 302 | nonIdempotent = "nonIdempotent" in bench_data and bench_data["nonIdempotent"] 303 | 304 | for rep in range(reps): 305 | structure_generator = increasing_depth_structures( 306 | (lambda base_depth: lat.gen_structures(base_depth)) 307 | if not fixed_structure 308 | else 309 | (lambda _: [bench_data["fixedLatticeType"]]), 310 | nonIdempotent 311 | ) 312 | 313 | start_time = time() 314 | report_file = f"search-{bench}-{rep}-{bounded_bench_str}-first_{first_n}.csv" 315 | 316 | clock_augmented_order = bench_data["inOrder"] 317 | if bench_data["opArgTypeHint"] and bench_data["opArgTypeHint"][-1] == ClockInt(): 318 | orig_order = clock_augmented_order 319 | clock_augmented_order = lambda arg1, arg2: Ite( 320 | Lt(arg1[-1], arg2[-1]), # if clocks in order 321 | BoolLit(True), 322 | Ite( 323 | Eq(arg1[-1], arg2[-1]), # if clocks concurrent 324 | orig_order(arg1, arg2), 325 | BoolLit(False), # clocks out of order 326 | ) 327 | ) 328 | 329 | (result_type, result_fns) = search_crdt_structures( 330 | initState, 331 | grammarStateInvariant, 332 | grammarSupportedCommand, 333 | clock_augmented_order, 334 | bench_data["opPrecondition"], 335 | grammar, 336 | grammarQuery, 337 | grammarEquivalence, 338 | targetLang, 339 | synthesize, 340 | filename, fnNameBase, loopsFile, cvcPath, useOpList, 341 | structure_generator, 342 | reportFile=report_file, 343 | stateTypeHint=bench_data["stateTypeHint"], 344 | opArgTypeHint=bench_data["opArgTypeHint"], 345 | queryArgTypeHint=bench_data["queryArgTypeHint"], 346 | queryRetTypeHint=bench_data["queryRetTypeHint"], 347 | maxThreads=1 if fixed_structure else mp.cpu_count(), 348 | upToUid=first_n, 349 | exitFirstSuccess=first_n == None, 350 | ) 351 | end_time = time() 352 | 353 | if first_n == None: 354 | print(f"{bench} (repetition {rep}) took {end_time - start_time} seconds\n\n") 355 | results_code = ";".join([c.toRosette().replace("\n", " ") for c in result_fns]) 356 | report.write(f"{bench},{rep},{end_time - start_time},\"{result_type}\",\"{results_code}\"\n") 357 | report.flush() 358 | else: 359 | with open(report_file, newline='') as csvfile: 360 | report_reader = csv.reader(csvfile) 361 | times = sorted([float(row[1]) for row in report_reader]) 362 | with open(f"benchmarks-{bench}-{bounded_bench_str}-first_{first_n}-distribution.csv", "w") as distribution_file: 363 | distribution_file.write(f"time,percent\n") 364 | for (i, measured_time) in enumerate(times): 365 | percent = (i + 1) / len(times) 366 | distribution_file.write(f"{measured_time},{percent}\n") 367 | 368 | if __name__ == "__main__": 369 | main() 370 | --------------------------------------------------------------------------------