├── .github
└── workflows
│ └── ci.yml
├── .gitignore
├── .gitmodules
├── LICENSE
├── README.md
├── artifact.nix
├── ci-util
└── cache-nix.sh
├── flake.lock
├── flake.nix
├── headers
├── array.h
├── list.h
├── set.h
└── tuples.h
├── iso-racket-links.rktd
├── katara-dark.png
├── katara-light.png
├── katara
├── __init__.py
├── aci.py
├── auto_grammar.py
├── lattices.py
├── search_structures.py
└── synthesis.py
├── llvm-pass
├── CMakeLists.txt
├── addEmptyBlocks
│ ├── AddEmptyBlocks.cpp
│ └── CMakeLists.txt
└── instructions.txt
├── mypy.ini
├── poetry.lock
├── pyproject.toml
├── rosette-packages-flake.nix
└── tests
├── compile-add-blocks
├── compile-all
├── plot_distribution.py
├── sequential1.c
├── sequential1_clock.c
├── sequential2.c
├── sequential_flag.c
├── sequential_register.c
└── synthesize_crdt.py
/.github/workflows/ci.yml:
--------------------------------------------------------------------------------
1 | name: Katara CI
2 |
3 | on:
4 | push:
5 | branches:
6 | - main
7 | pull_request:
8 | release:
9 | types: [published]
10 |
11 | jobs:
12 | lint:
13 | runs-on: ubuntu-latest
14 | steps:
15 | - uses: actions/checkout@v2
16 | - uses: psf/black@stable
17 | with:
18 | black_args: --check --diff .
19 | version: "22.3.0"
20 |
21 | mypy:
22 | runs-on: ubuntu-latest
23 | steps:
24 | - uses: actions/checkout@v2
25 | with:
26 | submodules: "recursive"
27 |
28 | - uses: cachix/install-nix-action@v16
29 | with:
30 | nix_path: nixpkgs=channel:nixos-22.05
31 |
32 | - name: Cache Nix store
33 | id: cache
34 | uses: actions/cache@v3
35 | with:
36 | path: ~/nix-cache
37 | key: nix-flake-cache-${{ hashFiles('flake.nix') }}-${{ hashFiles('flake.lock') }}-${{ hashFiles('poetry.lock') }}
38 | restore-keys: |
39 | nix-flake-cache-
40 |
41 | - name: Load Nix cache
42 | run: |
43 | if [ -f "$HOME/nix-cache/cache.nar" ]; then
44 | nix-store --import < ~/nix-cache/cache.nar
45 | rm -rf ~/nix-cache
46 | fi
47 |
48 | - name: Run mypy
49 | shell: nix develop --command bash -e {0}
50 | run: mypy .
51 |
52 | - name: Save Nix cache
53 | if: steps.cache.outputs.cache-hit != 'true'
54 | run: ./ci-util/cache-nix.sh
55 |
56 | build:
57 | runs-on: ubuntu-latest
58 | needs: [lint, mypy]
59 |
60 | steps:
61 | - uses: actions/checkout@v2
62 | with:
63 | submodules: "recursive"
64 |
65 | - uses: cachix/install-nix-action@v16
66 | with:
67 | nix_path: nixpkgs=channel:nixos-22.05
68 |
69 | - name: Cache Nix store
70 | id: cache
71 | uses: actions/cache@v3
72 | with:
73 | path: ~/nix-cache
74 | key: nix-flake-cache-${{ hashFiles('flake.nix') }}-${{ hashFiles('flake.lock') }}-${{ hashFiles('poetry.lock') }}
75 | restore-keys: |
76 | nix-flake-cache-
77 |
78 | - name: Load Nix cache
79 | run: |
80 | if [ -f "$HOME/nix-cache/cache.nar" ]; then
81 | nix-store --import < ~/nix-cache/cache.nar
82 | rm -rf ~/nix-cache
83 | fi
84 |
85 | # https://github.com/Bogdanp/setup-racket-cache-example
86 | - name: Cache Racket dependencies
87 | id: cache-racket
88 | uses: actions/cache@v3
89 | with:
90 | path: |
91 | ~/.cache/racket
92 | ~/.local/share/racket
93 | key: ${{ runner.os }}-racket-8.4-rosette-4.1
94 |
95 | - name: Install Racket
96 | uses: Bogdanp/setup-racket@v1.7
97 | with:
98 | architecture: 'x64'
99 | distribution: 'full'
100 | variant: 'CS'
101 | version: '8.4'
102 |
103 | - name: Install Rosette (4.1)
104 | run: |
105 | raco pkg install --auto --skip-installed https://github.com/emina/rosette.git#10178550a0a21e6d80598d0f43c33c9228728f14
106 |
107 | - name: Build LLVM Pass
108 | shell: nix develop --command bash -e {0}
109 | run: |
110 | cd llvm-pass
111 | mkdir build
112 | cd build
113 | cmake ..
114 | make
115 | cd ..
116 |
117 | - name: Build Test Inputs
118 | shell: nix develop --command bash -e {0}
119 | run: |
120 | cd tests
121 | ./compile-all
122 | cd ..
123 |
124 | - name: Test CRDT Synthesis (fixed)
125 | shell: nix develop --command bash -e {0}
126 | run: |
127 | python -m tests.synthesize_crdt synth g_set --fixed
128 | python -m tests.synthesize_crdt synth 2p_set --fixed
129 | python -m tests.synthesize_crdt synth flag_ew --fixed
130 | python -m tests.synthesize_crdt synth flag_dw --fixed
131 | python -m tests.synthesize_crdt synth grow_only_counter --fixed
132 |
133 | - name: Save Nix cache
134 | if: steps.cache.outputs.cache-hit != 'true'
135 | run: ./ci-util/cache-nix.sh
136 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | !shell.nix
2 | .DS_Store
3 | *.pyc
4 | llvm-pass/build/
5 | synthesisLogs/
6 | tests/*.ll
7 | tests/*.loops
8 | results*.csv
9 | search*.csv
10 | distribution*.png
11 | dist/
12 | rosette-packages/packages
13 | result
14 | nixos.qcow2
15 |
--------------------------------------------------------------------------------
/.gitmodules:
--------------------------------------------------------------------------------
1 | [submodule "metalift"]
2 | path = metalift
3 | url = git@github.com:metalift/metalift.git
4 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 |
2 | Apache License
3 | Version 2.0, January 2004
4 | http://www.apache.org/licenses/
5 |
6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
7 |
8 | 1. Definitions.
9 |
10 | "License" shall mean the terms and conditions for use, reproduction,
11 | and distribution as defined by Sections 1 through 9 of this document.
12 |
13 | "Licensor" shall mean the copyright owner or entity authorized by
14 | the copyright owner that is granting the License.
15 |
16 | "Legal Entity" shall mean the union of the acting entity and all
17 | other entities that control, are controlled by, or are under common
18 | control with that entity. For the purposes of this definition,
19 | "control" means (i) the power, direct or indirect, to cause the
20 | direction or management of such entity, whether by contract or
21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
22 | outstanding shares, or (iii) beneficial ownership of such entity.
23 |
24 | "You" (or "Your") shall mean an individual or Legal Entity
25 | exercising permissions granted by this License.
26 |
27 | "Source" form shall mean the preferred form for making modifications,
28 | including but not limited to software source code, documentation
29 | source, and configuration files.
30 |
31 | "Object" form shall mean any form resulting from mechanical
32 | transformation or translation of a Source form, including but
33 | not limited to compiled object code, generated documentation,
34 | and conversions to other media types.
35 |
36 | "Work" shall mean the work of authorship, whether in Source or
37 | Object form, made available under the License, as indicated by a
38 | copyright notice that is included in or attached to the work
39 | (an example is provided in the Appendix below).
40 |
41 | "Derivative Works" shall mean any work, whether in Source or Object
42 | form, that is based on (or derived from) the Work and for which the
43 | editorial revisions, annotations, elaborations, or other modifications
44 | represent, as a whole, an original work of authorship. For the purposes
45 | of this License, Derivative Works shall not include works that remain
46 | separable from, or merely link (or bind by name) to the interfaces of,
47 | the Work and Derivative Works thereof.
48 |
49 | "Contribution" shall mean any work of authorship, including
50 | the original version of the Work and any modifications or additions
51 | to that Work or Derivative Works thereof, that is intentionally
52 | submitted to Licensor for inclusion in the Work by the copyright owner
53 | or by an individual or Legal Entity authorized to submit on behalf of
54 | the copyright owner. For the purposes of this definition, "submitted"
55 | means any form of electronic, verbal, or written communication sent
56 | to the Licensor or its representatives, including but not limited to
57 | communication on electronic mailing lists, source code control systems,
58 | and issue tracking systems that are managed by, or on behalf of, the
59 | Licensor for the purpose of discussing and improving the Work, but
60 | excluding communication that is conspicuously marked or otherwise
61 | designated in writing by the copyright owner as "Not a Contribution."
62 |
63 | "Contributor" shall mean Licensor and any individual or Legal Entity
64 | on behalf of whom a Contribution has been received by Licensor and
65 | subsequently incorporated within the Work.
66 |
67 | 2. Grant of Copyright License. Subject to the terms and conditions of
68 | this License, each Contributor hereby grants to You a perpetual,
69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
70 | copyright license to reproduce, prepare Derivative Works of,
71 | publicly display, publicly perform, sublicense, and distribute the
72 | Work and such Derivative Works in Source or Object form.
73 |
74 | 3. Grant of Patent License. Subject to the terms and conditions of
75 | this License, each Contributor hereby grants to You a perpetual,
76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
77 | (except as stated in this section) patent license to make, have made,
78 | use, offer to sell, sell, import, and otherwise transfer the Work,
79 | where such license applies only to those patent claims licensable
80 | by such Contributor that are necessarily infringed by their
81 | Contribution(s) alone or by combination of their Contribution(s)
82 | with the Work to which such Contribution(s) was submitted. If You
83 | institute patent litigation against any entity (including a
84 | cross-claim or counterclaim in a lawsuit) alleging that the Work
85 | or a Contribution incorporated within the Work constitutes direct
86 | or contributory patent infringement, then any patent licenses
87 | granted to You under this License for that Work shall terminate
88 | as of the date such litigation is filed.
89 |
90 | 4. Redistribution. You may reproduce and distribute copies of the
91 | Work or Derivative Works thereof in any medium, with or without
92 | modifications, and in Source or Object form, provided that You
93 | meet the following conditions:
94 |
95 | (a) You must give any other recipients of the Work or
96 | Derivative Works a copy of this License; and
97 |
98 | (b) You must cause any modified files to carry prominent notices
99 | stating that You changed the files; and
100 |
101 | (c) You must retain, in the Source form of any Derivative Works
102 | that You distribute, all copyright, patent, trademark, and
103 | attribution notices from the Source form of the Work,
104 | excluding those notices that do not pertain to any part of
105 | the Derivative Works; and
106 |
107 | (d) If the Work includes a "NOTICE" text file as part of its
108 | distribution, then any Derivative Works that You distribute must
109 | include a readable copy of the attribution notices contained
110 | within such NOTICE file, excluding those notices that do not
111 | pertain to any part of the Derivative Works, in at least one
112 | of the following places: within a NOTICE text file distributed
113 | as part of the Derivative Works; within the Source form or
114 | documentation, if provided along with the Derivative Works; or,
115 | within a display generated by the Derivative Works, if and
116 | wherever such third-party notices normally appear. The contents
117 | of the NOTICE file are for informational purposes only and
118 | do not modify the License. You may add Your own attribution
119 | notices within Derivative Works that You distribute, alongside
120 | or as an addendum to the NOTICE text from the Work, provided
121 | that such additional attribution notices cannot be construed
122 | as modifying the License.
123 |
124 | You may add Your own copyright statement to Your modifications and
125 | may provide additional or different license terms and conditions
126 | for use, reproduction, or distribution of Your modifications, or
127 | for any such Derivative Works as a whole, provided Your use,
128 | reproduction, and distribution of the Work otherwise complies with
129 | the conditions stated in this License.
130 |
131 | 5. Submission of Contributions. Unless You explicitly state otherwise,
132 | any Contribution intentionally submitted for inclusion in the Work
133 | by You to the Licensor shall be under the terms and conditions of
134 | this License, without any additional terms or conditions.
135 | Notwithstanding the above, nothing herein shall supersede or modify
136 | the terms of any separate license agreement you may have executed
137 | with Licensor regarding such Contributions.
138 |
139 | 6. Trademarks. This License does not grant permission to use the trade
140 | names, trademarks, service marks, or product names of the Licensor,
141 | except as required for reasonable and customary use in describing the
142 | origin of the Work and reproducing the content of the NOTICE file.
143 |
144 | 7. Disclaimer of Warranty. Unless required by applicable law or
145 | agreed to in writing, Licensor provides the Work (and each
146 | Contributor provides its Contributions) on an "AS IS" BASIS,
147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
148 | implied, including, without limitation, any warranties or conditions
149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
150 | PARTICULAR PURPOSE. You are solely responsible for determining the
151 | appropriateness of using or redistributing the Work and assume any
152 | risks associated with Your exercise of permissions under this License.
153 |
154 | 8. Limitation of Liability. In no event and under no legal theory,
155 | whether in tort (including negligence), contract, or otherwise,
156 | unless required by applicable law (such as deliberate and grossly
157 | negligent acts) or agreed to in writing, shall any Contributor be
158 | liable to You for damages, including any direct, indirect, special,
159 | incidental, or consequential damages of any character arising as a
160 | result of this License or out of the use or inability to use the
161 | Work (including but not limited to damages for loss of goodwill,
162 | work stoppage, computer failure or malfunction, or any and all
163 | other commercial damages or losses), even if such Contributor
164 | has been advised of the possibility of such damages.
165 |
166 | 9. Accepting Warranty or Additional Liability. While redistributing
167 | the Work or Derivative Works thereof, You may choose to offer,
168 | and charge a fee for, acceptance of support, warranty, indemnity,
169 | or other liability obligations and/or rights consistent with this
170 | License. However, in accepting such obligations, You may act only
171 | on Your own behalf and on Your sole responsibility, not on behalf
172 | of any other Contributor, and only if You agree to indemnify,
173 | defend, and hold each Contributor harmless for any liability
174 | incurred by, or claims asserted against, such Contributor by reason
175 | of your accepting any such warranty or additional liability.
176 |
177 | END OF TERMS AND CONDITIONS
178 |
179 | APPENDIX: How to apply the Apache License to your work.
180 |
181 | To apply the Apache License to your work, attach the following
182 | boilerplate notice, with the fields enclosed by brackets "[]"
183 | replaced with your own identifying information. (Don't include
184 | the brackets!) The text should be enclosed in the appropriate
185 | comment syntax for the file format. We also recommend that a
186 | file or class name and description of purpose be included on the
187 | same "printed page" as the copyright notice for easier
188 | identification within third-party archives.
189 |
190 | Copyright 2022 Katara Contributors
191 |
192 | Licensed under the Apache License, Version 2.0 (the "License");
193 | you may not use this file except in compliance with the License.
194 | You may obtain a copy of the License at
195 |
196 | http://www.apache.org/licenses/LICENSE-2.0
197 |
198 | Unless required by applicable law or agreed to in writing, software
199 | distributed under the License is distributed on an "AS IS" BASIS,
200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
201 | See the License for the specific language governing permissions and
202 | limitations under the License.
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 |
Synthesize CRDTs that mirror your existing data types!
4 |
5 | Katara is a program synthesis engine that can automatically generate CRDT designs that mirror the behavior of a sequential data type annotated with a conflict resolution policy for non-commutative operations. See our [paper](https://arxiv.org/pdf/2205.12425.pdf) for more information!
6 |
7 | ## Setup
8 | ### Install (with Nix)
9 | To get a development environment up and running, one option is to use [Nix](https://nixos.org/), which can automatically pull and build the necessary dependencies. First, you'll need to [install Nix](https://nixos.org/download.html). Note that this _will_ require temporary root access as Nix sets up a daemon to handle builds, and will set up a separate volume for storing build artifacts if on macOS.
10 |
11 | Once you've got Nix installed, you'll need to enable [flakes](https://nixos.wiki/wiki/Flakes).
12 |
13 | Then, all you have to do is navigate to the Metalift directory and run the following command:
14 | ```bash
15 | $ nix develop
16 | ```
17 |
18 | This will build all of Metalift's dependencies and drop you into a temporary shell with all the dependencies available.
19 |
20 | **Note**: you still will need to install Racket and Rosette separately. There _is_ a solution for doing this through Nix, but it requires [nix-ld](https://github.com/Mic92/nix-ld) to be installed and is generally not recommended unless you run NixOS.
21 |
22 | ### Install (without Nix)
23 | You'll need the following dependencies installed to use Katara:
24 | - Python 3.8 with Poetry
25 | - [Rosette](https://emina.github.io/rosette)
26 | - [CVC5](https://cvc5.github.io)
27 | - [LLVM 11](https://llvm.org/)
28 |
29 | We use [Poetry](https://python-poetry.org/) for dependency management. To set up the environment, simply install Poetry, run `poetry install`, and then `poetry shell` to enter an environment with the dependencies installed.
30 |
31 | ## Build the LLVM Pass
32 |
33 | **We currently support LLVM 11**
34 |
35 | Run the following to build the LLVM pass for processing branch instructions (works for LLVM 11):
36 | ````bash
37 | cd llvm-pass
38 | mkdir build
39 | cd build
40 | cmake ..
41 | make
42 | cd ../..
43 | ````
44 |
45 | This pass is called in `tests/compile-add-blocks`.
46 |
47 | ## Synthesizing CRDTs
48 | The first step to synthesizing a CRDT is to compile the sequential reference. We have provided a set of benchmark sequential data types in the `tests/` folder. These can be compiled by entering the folder and running `compile-all`:
49 | ```bash
50 | cd tests
51 | ./compile-all
52 | cd ..
53 | ```
54 |
55 | Then, from the base directory of the project, we can run the synthesis benchmarks defined in `tests/synthesize_crdt.py` (in the `benchmarks` variable). Each benchmark is configured with the sequential data type to process, the ordering constraing as defined in our paper (`opOrder`), flags to enable synthesis of non-idempotent operations, and type hints to optimize the synthesis process. With a benchmark configured, we can run it as
56 | ```bash
57 | python -m tests.synthesize_crdt synth
58 | ```
59 |
60 | For example, we can synthesize for the 2P-Set benchmark with
61 | ```bash
62 | python -m tests.synthesize_crdt synth 2p_set
63 | ```
64 |
65 | In general, you can use the following command structure:
66 | ```bash
67 | python -m tests.synthesize_crdt [--fixed] [--first ] [--repeat ]
68 | ```
69 | Where:
70 | - `` is either `synth` for bounded synthesis with pruning or `synth-unbounded` for direct unbounded synthesis.
71 | - `` is the name of the benchmark or `all` to run all benchmarks.
72 | - `--fixed` (optional) uses a fixed lattice structure instead of exploring all structures.
73 | - `--first ` (optional) synthesizes the first N structures.
74 | - `--repeat ` (optional) specifies the number of repetitions for the synthesis process.
75 |
--------------------------------------------------------------------------------
/artifact.nix:
--------------------------------------------------------------------------------
1 | { config, options, lib, pkgs, specialArgs, ... }:
2 |
3 | lib.mkMerge [{
4 | environment.systemPackages = with pkgs; [
5 | (poetry2nix.mkPoetryEnv {
6 | python = python38;
7 | projectDir = ./.;
8 |
9 | overrides = poetry2nix.overrides.withDefaults (_: poetrySuper: {
10 | metalift = poetrySuper.metalift.overrideAttrs(_: super: {
11 | nativeBuildInputs = super.nativeBuildInputs ++ [ poetrySuper.poetry ];
12 | });
13 |
14 | autoflake = poetrySuper.autoflake.overrideAttrs(_: super: {
15 | nativeBuildInputs = super.nativeBuildInputs ++ [ poetrySuper.hatchling ];
16 | });
17 | });
18 | })
19 |
20 | cvc5
21 | gnumake
22 | cmake
23 | llvm_11
24 | llvm_11.dev
25 | clang_11
26 |
27 | racket
28 |
29 | htop
30 | nano
31 | vim
32 | emacs
33 | ];
34 |
35 | users = {
36 | mutableUsers = false;
37 | allowNoPasswordLogin = true;
38 |
39 | users = {
40 | demo = {
41 | home = "/home/demo";
42 | password = "demo";
43 | extraGroups = [ "wheel" ];
44 | isNormalUser = true;
45 | };
46 | };
47 | };
48 |
49 | security.sudo.wheelNeedsPassword = false;
50 |
51 | boot.postBootCommands = let source =
52 | let
53 | inherit (specialArgs.gitignore.lib) gitignoreSource;
54 | in pkgs.lib.cleanSourceWith
55 | { filter = (path: type:
56 | ! (builtins.any
57 | (r: (builtins.match r (builtins.baseNameOf path)) != null)
58 | [])
59 | );
60 | src = gitignoreSource ./.;
61 | } ;
62 | in ''
63 | echo "Loading source code for the artifact"
64 |
65 | ${pkgs.rsync}/bin/rsync -r --owner --group --chown=demo:users --perms --chmod=u+rw ${source}/ /home/demo
66 |
67 | mkdir -p /home/demo/.racket/8.7/pkgs
68 | ln -s ${./iso-racket-links.rktd} /home/demo/.racket/8.7/links.rktd
69 | ln -s ${specialArgs.rosette-packages.packages}/* /home/demo/.racket/8.7/pkgs/
70 |
71 | rm /home/demo/.racket/8.7/pkgs/rosette
72 | ${pkgs.rsync}/bin/rsync -r --owner --group --chown=demo:users --perms --chmod=u+rw ${specialArgs.rosette-packages.packages}/rosette/ /home/demo/.racket/8.7/pkgs/rosette
73 | mkdir /home/demo/.racket/8.7/pkgs/rosette/bin
74 | ln -s ${(pkgs.z3.overrideAttrs(self: {
75 | version = "4.8.8";
76 |
77 | src = pkgs.fetchFromGitHub {
78 | owner = "Z3Prover";
79 | repo = "z3";
80 | rev = "z3-4.8.8";
81 | hash = "sha256-qpmi75I27m89dhKSy8D2zkzqKpLoFBPRBrhzDB8axeY=";
82 | };
83 | }))}/bin/z3 /home/demo/.racket/8.7/pkgs/rosette/bin/z3
84 | '';
85 |
86 | services.getty.autologinUser = "demo";
87 |
88 | services.openssh.enable = true;
89 | networking.firewall.allowedTCPPorts = [ 22 ];
90 |
91 | services.xserver.enable = true;
92 | } (lib.optionalAttrs (builtins.hasAttr "isoImage" options) {
93 | isoImage.appendToMenuLabel = " OOPSLA CRDT Synthesis Artifact";
94 | services.xserver.displayManager.startx.enable = true;
95 | }) (lib.optionalAttrs (builtins.hasAttr "virtualbox" options) {
96 | nixpkgs.config = {
97 | allowUnfree = true;
98 | };
99 |
100 | virtualbox.vmName = "OOPSLA CRDT Synthesis Artifact";
101 | virtualbox.memorySize = 1024 * 8;
102 | virtualbox.params.cpus = 8;
103 | virtualbox.params.usb = "off";
104 | virtualbox.params.usbehci = "off";
105 |
106 | services.xserver.desktopManager.gnome.enable = true;
107 | services.xserver.displayManager.lightdm.enable = true;
108 |
109 | services.getty.autologinUser = pkgs.lib.mkForce null;
110 |
111 | environment.systemPackages = with pkgs; [
112 | vscode
113 | sublime
114 | firefox
115 | ];
116 | })]
117 |
--------------------------------------------------------------------------------
/ci-util/cache-nix.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | cur_dir=$(pwd)
4 |
5 | paths_to_save=$(nix-store --query --references $(nix path-info --derivation ".#devShell.x86_64-linux.inputDerivation") | \
6 | xargs nix-store --realise | \
7 | xargs nix-store --query --requisites)
8 |
9 | mkdir ~/nix-cache
10 |
11 | cd ~/nix-cache
12 |
13 | nix-store --export $paths_to_save > ~/nix-cache/cache.nar
14 |
15 | cd $cur_dir
16 |
--------------------------------------------------------------------------------
/flake.lock:
--------------------------------------------------------------------------------
1 | {
2 | "nodes": {
3 | "flake-utils": {
4 | "locked": {
5 | "lastModified": 1676283394,
6 | "narHash": "sha256-XX2f9c3iySLCw54rJ/CZs+ZK6IQy7GXNY4nSOyu2QG4=",
7 | "owner": "numtide",
8 | "repo": "flake-utils",
9 | "rev": "3db36a8b464d0c4532ba1c7dda728f4576d6d073",
10 | "type": "github"
11 | },
12 | "original": {
13 | "owner": "numtide",
14 | "repo": "flake-utils",
15 | "type": "github"
16 | }
17 | },
18 | "flake-utils_2": {
19 | "locked": {
20 | "lastModified": 1676283394,
21 | "narHash": "sha256-XX2f9c3iySLCw54rJ/CZs+ZK6IQy7GXNY4nSOyu2QG4=",
22 | "owner": "numtide",
23 | "repo": "flake-utils",
24 | "rev": "3db36a8b464d0c4532ba1c7dda728f4576d6d073",
25 | "type": "github"
26 | },
27 | "original": {
28 | "owner": "numtide",
29 | "repo": "flake-utils",
30 | "type": "github"
31 | }
32 | },
33 | "gitignore": {
34 | "inputs": {
35 | "nixpkgs": [
36 | "nixpkgs"
37 | ]
38 | },
39 | "locked": {
40 | "lastModified": 1660459072,
41 | "narHash": "sha256-8DFJjXG8zqoONA1vXtgeKXy68KdJL5UaXR8NtVMUbx8=",
42 | "owner": "hercules-ci",
43 | "repo": "gitignore.nix",
44 | "rev": "a20de23b925fd8264fd7fad6454652e142fd7f73",
45 | "type": "github"
46 | },
47 | "original": {
48 | "owner": "hercules-ci",
49 | "repo": "gitignore.nix",
50 | "type": "github"
51 | }
52 | },
53 | "nixlib": {
54 | "locked": {
55 | "lastModified": 1677373009,
56 | "narHash": "sha256-kxhz4QUP8tXa/yVSpEzDDZSEp9FvhzRqZzb+SeUaekw=",
57 | "owner": "nix-community",
58 | "repo": "nixpkgs.lib",
59 | "rev": "c9d4f2476046c6a7a2ce3c2118c48455bf0272ea",
60 | "type": "github"
61 | },
62 | "original": {
63 | "owner": "nix-community",
64 | "repo": "nixpkgs.lib",
65 | "type": "github"
66 | }
67 | },
68 | "nixos-generators": {
69 | "inputs": {
70 | "nixlib": "nixlib",
71 | "nixpkgs": [
72 | "nixpkgs"
73 | ]
74 | },
75 | "locked": {
76 | "lastModified": 1678351966,
77 | "narHash": "sha256-tRFEU0lu3imZb3dtELBY+UbEhWXbb0xlBrsIlpICb+A=",
78 | "owner": "nix-community",
79 | "repo": "nixos-generators",
80 | "rev": "0c043999b16caba6fb571af2d228775729829943",
81 | "type": "github"
82 | },
83 | "original": {
84 | "owner": "nix-community",
85 | "repo": "nixos-generators",
86 | "type": "github"
87 | }
88 | },
89 | "nixpkgs": {
90 | "locked": {
91 | "lastModified": 1678230755,
92 | "narHash": "sha256-SFAXgNjNTXzcAideXcP0takfUGVft/VR5CACmYHg+Fc=",
93 | "owner": "NixOS",
94 | "repo": "nixpkgs",
95 | "rev": "a7cc81913bb3cd1ef05ed0ece048b773e1839e51",
96 | "type": "github"
97 | },
98 | "original": {
99 | "owner": "NixOS",
100 | "ref": "nixos-22.11",
101 | "repo": "nixpkgs",
102 | "type": "github"
103 | }
104 | },
105 | "poetry2nix-flake": {
106 | "inputs": {
107 | "flake-utils": "flake-utils_2",
108 | "nixpkgs": [
109 | "nixpkgs"
110 | ]
111 | },
112 | "locked": {
113 | "lastModified": 1678135815,
114 | "narHash": "sha256-U9rGLDafi7CcrNuYD1LkM3LSN2rZ5zgF0GeInrAihAE=",
115 | "owner": "nix-community",
116 | "repo": "poetry2nix",
117 | "rev": "4c424edc8a546952e640f2a3017354b05bf15f0b",
118 | "type": "github"
119 | },
120 | "original": {
121 | "owner": "nix-community",
122 | "repo": "poetry2nix",
123 | "type": "github"
124 | }
125 | },
126 | "root": {
127 | "inputs": {
128 | "flake-utils": "flake-utils",
129 | "gitignore": "gitignore",
130 | "nixos-generators": "nixos-generators",
131 | "nixpkgs": "nixpkgs",
132 | "poetry2nix-flake": "poetry2nix-flake",
133 | "rosette-packages": "rosette-packages"
134 | }
135 | },
136 | "rosette-packages": {
137 | "locked": {
138 | "lastModified": 1630400035,
139 | "narHash": "sha256-MWaVOCzuFwp09wZIW9iHq5wWen5C69I940N1swZLEQ0=",
140 | "owner": "input-output-hk",
141 | "repo": "empty-flake",
142 | "rev": "2040a05b67bf9a669ce17eca56beb14b4206a99a",
143 | "type": "github"
144 | },
145 | "original": {
146 | "owner": "input-output-hk",
147 | "repo": "empty-flake",
148 | "rev": "2040a05b67bf9a669ce17eca56beb14b4206a99a",
149 | "type": "github"
150 | }
151 | }
152 | },
153 | "root": "root",
154 | "version": 7
155 | }
156 |
--------------------------------------------------------------------------------
/flake.nix:
--------------------------------------------------------------------------------
1 | {
2 | description = "katara";
3 |
4 | inputs.flake-utils.url = "github:numtide/flake-utils";
5 | inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-22.11";
6 | inputs.poetry2nix-flake = {
7 | url = "github:nix-community/poetry2nix";
8 | inputs.nixpkgs.follows = "nixpkgs";
9 | };
10 |
11 | inputs.nixos-generators = {
12 | url = "github:nix-community/nixos-generators";
13 | inputs.nixpkgs.follows = "nixpkgs";
14 | };
15 |
16 | inputs.gitignore = {
17 | url = "github:hercules-ci/gitignore.nix";
18 | inputs.nixpkgs.follows = "nixpkgs";
19 | };
20 |
21 | inputs.rosette-packages.url = "github:input-output-hk/empty-flake?rev=2040a05b67bf9a669ce17eca56beb14b4206a99a";
22 |
23 | outputs = { self, nixpkgs, flake-utils, poetry2nix-flake, nixos-generators, gitignore, rosette-packages }: (flake-utils.lib.eachDefaultSystem (system:
24 | with import nixpkgs {
25 | inherit system;
26 | overlays = [ poetry2nix-flake.overlay ];
27 | };
28 |
29 | {
30 | devShell = mkShell {
31 | buildInputs = [
32 | (poetry2nix.mkPoetryEnv {
33 | python = python38;
34 | projectDir = ./.;
35 |
36 | overrides = poetry2nix.overrides.withDefaults (_: poetrySuper: {
37 | metalift = poetrySuper.metalift.overrideAttrs(_: super: {
38 | nativeBuildInputs = super.nativeBuildInputs ++ [ poetrySuper.poetry ];
39 | });
40 |
41 | autoflake = poetrySuper.autoflake.overrideAttrs(_: super: {
42 | nativeBuildInputs = super.nativeBuildInputs ++ [ poetrySuper.hatchling ];
43 | });
44 | });
45 | })
46 |
47 | cvc5
48 | cmake
49 | llvm_11
50 | clang_11
51 | ];
52 | };
53 | }
54 | )) // {
55 | packages.x86_64-linux = {
56 | # nix build .#vm-nogui --override-input rosette-packages ../rosette-packages
57 | vm-nogui = nixos-generators.nixosGenerate {
58 | system = "x86_64-linux";
59 | modules = [
60 | ({ pkgs, ... }: {
61 | nixpkgs.overlays = [ poetry2nix-flake.overlay ];
62 | })
63 | ./artifact.nix
64 | ];
65 | format = "vm-nogui";
66 |
67 | specialArgs = {
68 | gitignore = gitignore;
69 | rosette-packages = rosette-packages;
70 | };
71 | };
72 | };
73 | };
74 | }
75 |
--------------------------------------------------------------------------------
/headers/array.h:
--------------------------------------------------------------------------------
1 | #include
2 |
3 | template
4 | struct Array
5 | {
6 | std::vector contents;
7 | };
8 |
9 | template
10 | using Array = Array *;
11 |
12 |
13 | template
14 | int arrayLength (Array l)
15 | {
16 | return l->contents.size();
17 | }
18 |
19 | template
20 | Array newarray()
21 | {
22 | //return (Array)malloc(sizeof(struct Array));
23 | return new Array(100);
24 | }
25 |
26 | template
27 | T arrayGet (Array l, int i)
28 | {
29 | return l->contents[i];
30 | }
31 |
32 | template
33 | void arraySet (Array l, int i, int v)
34 | {
35 | l->contents[i] = v ;
36 | }
--------------------------------------------------------------------------------
/headers/list.h:
--------------------------------------------------------------------------------
1 | #include
2 |
3 | template
4 | struct list
5 | {
6 | std::vector contents;
7 | };
8 |
9 | template
10 | using List = list *;
11 |
12 |
13 | template
14 | int listLength (List l)
15 | {
16 | return l->contents.size();
17 | }
18 |
19 | template
20 | List newList()
21 | {
22 | //return (List)malloc(sizeof(struct list));
23 | return new list();
24 | }
25 |
26 | template
27 | T listGet (List l, int i)
28 | {
29 | return l->contents[i];
30 | }
31 |
32 |
33 | template
34 | List listAppend (List in, T e)
35 | {
36 | List r = newList();
37 | for (int i = 0; i < listLength(in); ++i)
38 | r->contents.push_back(listGet(in, i));
39 | r->contents.push_back(e);
40 | return r;
41 | }
42 |
43 | template
44 | List listConcat (List in, List e)
45 | {
46 | List r = newList();
47 | for (int i = 0; i < listLength(in); ++i)
48 | r->contents.push_back(listGet(in, i));
49 | for (int i = 0; i < listLength(e); ++i)
50 | r->contents.push_back(listGet(e, i));
51 | return r;
52 | }
53 |
54 |
55 |
56 |
57 |
--------------------------------------------------------------------------------
/headers/set.h:
--------------------------------------------------------------------------------
1 | #include
2 |
3 | typedef struct set {} set;
4 |
5 | set* set_create();
6 | set* set_add(set* s, int x);
7 | set* set_remove(set* s, int x);
8 | int set_contains(set* s, int v);
9 |
--------------------------------------------------------------------------------
/headers/tuples.h:
--------------------------------------------------------------------------------
1 | #include
2 | template
3 | struct tup
4 | {
5 | std::tuple contents;
6 | };
7 | template
8 | using Tuple = tup *;
9 |
10 | template
11 | Tuple newTuple()
12 | {
13 |
14 | return new tup();
15 | }
16 | template
17 | Tuple MakeTuple(T...args)
18 | {
19 | Tuple r = newTuple();
20 | r->contents = std::make_tuple(args...);
21 | return r;
22 | }
23 |
24 | //template
25 | template
26 | typename std::enable_if<(I < sizeof...(T)),
27 | int>::type
28 | tupleGet(Tuple t, int i)
29 | {
30 |
31 | return std::get(t->contents);
32 |
33 | // switch (i) {
34 | // case 0: return get(t->contents);
35 | // case 1: return get<1>(t->contents);
36 | // }
37 | }
38 |
39 |
40 |
--------------------------------------------------------------------------------
/iso-racket-links.rktd:
--------------------------------------------------------------------------------
1 | ((root (#"pkgs" #"rosette"))
2 | ("custom-load" (#"pkgs" #"custom-load"))
3 | (root (#"pkgs" #"rfc6455")))
4 |
--------------------------------------------------------------------------------
/katara-dark.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/hydro-project/katara/b5c0e0cb24e1890651f83d1718bc503dff18405b/katara-dark.png
--------------------------------------------------------------------------------
/katara-light.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/hydro-project/katara/b5c0e0cb24e1890651f83d1718bc503dff18405b/katara-light.png
--------------------------------------------------------------------------------
/katara/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/hydro-project/katara/b5c0e0cb24e1890651f83d1718bc503dff18405b/katara/__init__.py
--------------------------------------------------------------------------------
/katara/aci.py:
--------------------------------------------------------------------------------
1 | import sys
2 |
3 | from metalift.analysis_new import VariableTracker, analyze as analyze_new
4 | from metalift.ir import *
5 | from metalift.smt_util import toSMT
6 |
7 | import subprocess
8 | from metalift.synthesize_cvc5 import generateAST, toExpr
9 |
10 | import typing
11 | from typing import Any
12 |
13 |
14 | def check_aci(filename: str, fnNameBase: str, loopsFile: str, cvcPath: str) -> None:
15 | """Check if the actor is commutative and idempotent.
16 |
17 | Args:
18 | filename (str): path to the file containing the actor
19 | fnNameBase (str): name of the actor
20 | loopsFile (str): path to the file containing the loop information
21 | cvcPath (str): path to the CVC4 executable
22 | """
23 | check_c(filename, fnNameBase, loopsFile, cvcPath)
24 | check_i(filename, fnNameBase, loopsFile, cvcPath)
25 |
26 |
27 | def check_c(filename: str, fnNameBase: str, loopsFile: str, cvcPath: str) -> None:
28 | """Check if the actor is commutative.
29 |
30 | Args:
31 | filename (str): path to the file containing the actor
32 | fnNameBase (str): name of the actor
33 | loopsFile (str): path to the file containing the loop information
34 | cvcPath (str): path to the cvc5 executable
35 | """
36 | state_transition_analysis = analyze_new(
37 | filename, fnNameBase + "_next_state", loopsFile
38 | )
39 |
40 | tracker = VariableTracker()
41 |
42 | initial_state = tracker.variable(
43 | "initial_state", state_transition_analysis.arguments[0].type
44 | )
45 |
46 | op1_group = tracker.group("op1")
47 | op1 = [
48 | op1_group.variable(v.name(), v.type)
49 | for v in state_transition_analysis.arguments[1:]
50 | ]
51 |
52 | op2_group = tracker.group("op2")
53 | op2 = [
54 | op2_group.variable(v.name(), v.type)
55 | for v in state_transition_analysis.arguments[1:]
56 | ]
57 |
58 | afterState_0_op1 = tracker.variable(
59 | "afterState_0_op1", state_transition_analysis.arguments[0].type
60 | )
61 | afterState_0_op2 = tracker.variable(
62 | "afterState_0_op2", state_transition_analysis.arguments[0].type
63 | )
64 |
65 | afterState_1_op2 = tracker.variable(
66 | "afterState_1_op2", state_transition_analysis.arguments[0].type
67 | )
68 | afterState_1_op1 = tracker.variable(
69 | "afterState_1_op1", state_transition_analysis.arguments[0].type
70 | )
71 |
72 | vc = state_transition_analysis.call(initial_state, *op1)(
73 | tracker,
74 | lambda obj0_after_op1: Implies(
75 | Eq(obj0_after_op1, afterState_0_op1),
76 | state_transition_analysis.call(obj0_after_op1, *op2)(
77 | tracker,
78 | lambda obj0_after_op2: Implies(
79 | Eq(obj0_after_op2, afterState_0_op2),
80 | state_transition_analysis.call(initial_state, *op2)(
81 | tracker,
82 | lambda obj1_after_op2: Implies(
83 | Eq(obj1_after_op2, afterState_1_op2),
84 | state_transition_analysis.call(obj1_after_op2, *op1)(
85 | tracker,
86 | lambda obj1_after_op1: Implies(
87 | Eq(obj1_after_op1, afterState_1_op1),
88 | Eq(obj0_after_op2, obj1_after_op1),
89 | ),
90 | ),
91 | ),
92 | ),
93 | ),
94 | ),
95 | ),
96 | )
97 |
98 | toSMT(
99 | [],
100 | set(tracker.all()),
101 | [],
102 | [],
103 | vc,
104 | "./synthesisLogs/aci-test.smt",
105 | [],
106 | [],
107 | )
108 |
109 | procVerify = subprocess.run(
110 | [
111 | cvcPath,
112 | "--lang=smt",
113 | "--produce-models",
114 | "--tlimit=100000",
115 | "./synthesisLogs/aci-test.smt",
116 | ],
117 | stdout=subprocess.PIPE,
118 | )
119 |
120 | procOutput = procVerify.stdout
121 | resultVerify = procOutput.decode("utf-8").split("\n")
122 |
123 | if resultVerify[0] == "sat" or resultVerify[0] == "unknown":
124 | print("Counterexample Found for Commutativity Check")
125 | print(f"Operation 1: {[lookup_var(v, resultVerify) for v in op1]}")
126 | print(f"Operation 2: {[lookup_var(v, resultVerify) for v in op2]}")
127 | print(f"Initial State: {lookup_var(initial_state, resultVerify)}")
128 | print()
129 | print(f"Actor 1 (after op 1): {lookup_var(afterState_0_op1, resultVerify)}")
130 | print(f"Actor 1 (after op 1 + 2): {lookup_var(afterState_0_op2, resultVerify)}")
131 | print()
132 | print(f"Actor 2 (after op 2): {lookup_var(afterState_1_op2, resultVerify)}")
133 | print(f"Actor 2 (after op 2 + 1): {lookup_var(afterState_1_op1, resultVerify)}")
134 | else:
135 | print("Actor is commutative")
136 |
137 |
138 | def check_i(filename: str, fnNameBase: str, loopsFile: str, cvcPath: str) -> None:
139 | """Check if the actor is idempotent.
140 |
141 | Args:
142 | filename (str): path to the file containing the actor
143 | fnNameBase (str): name of the actor
144 | loopsFile (str): path to the file containing the loop information
145 | cvcPath (str): path to the cvc5 executable
146 | """
147 |
148 | state_transition_analysis = analyze_new(
149 | filename, fnNameBase + "_next_state", loopsFile
150 | )
151 |
152 | tracker = VariableTracker()
153 |
154 | initial_state = tracker.variable(
155 | "initial_state", state_transition_analysis.arguments[0].type
156 | )
157 |
158 | op_group = tracker.group("op")
159 | op = [
160 | op_group.variable(v.name(), v.type)
161 | for v in state_transition_analysis.arguments[1:]
162 | ]
163 |
164 | afterState_op = tracker.variable(
165 | "afterState_op", state_transition_analysis.arguments[0].type
166 | )
167 | afterState_op_op = tracker.variable(
168 | "afterState_op_op", state_transition_analysis.arguments[0].type
169 | )
170 |
171 | vc = state_transition_analysis.call(initial_state, *op)(
172 | tracker,
173 | lambda obj0_after_op: Implies(
174 | Eq(obj0_after_op, afterState_op),
175 | state_transition_analysis.call(obj0_after_op, *op)(
176 | tracker,
177 | lambda obj0_after_op_op: Implies(
178 | Eq(obj0_after_op_op, afterState_op_op),
179 | Eq(obj0_after_op, obj0_after_op_op),
180 | ),
181 | ),
182 | ),
183 | )
184 |
185 | toSMT(
186 | [],
187 | set(tracker.all()),
188 | [],
189 | [],
190 | vc,
191 | "./synthesisLogs/idempotence-test.smt",
192 | [],
193 | [],
194 | )
195 |
196 | procVerify = subprocess.run(
197 | [
198 | cvcPath,
199 | "--lang=smt",
200 | "--produce-models",
201 | "--tlimit=100000",
202 | "./synthesisLogs/idempotence-test.smt",
203 | ],
204 | stdout=subprocess.PIPE,
205 | )
206 |
207 | procOutput = procVerify.stdout
208 | resultVerify = procOutput.decode("utf-8").split("\n")
209 |
210 | if resultVerify[0] == "sat" or resultVerify[0] == "unknown":
211 | print("Counterexample Found for Idempotence Check")
212 | print(f"Operations: {[lookup_var(v, resultVerify) for v in op]}")
213 | print(f"Initial State: {lookup_var(initial_state, resultVerify)}")
214 | print()
215 | print(f"After 1 operation: {lookup_var(afterState_op, resultVerify)}")
216 | print(
217 | f"After 2 operations (op + op): {lookup_var(afterState_op_op, resultVerify)}"
218 | )
219 | else:
220 | print("Actor is Idempotent")
221 |
222 | pass
223 |
224 |
225 | def lookup_var(v: Expr, resultVerify: typing.List[Any]) -> Expr:
226 | """Given a variable and a list of lines from the CVC4 output, find the function
227 | which defines the variable and return the value of the variable in the counterexample.
228 |
229 | Args:
230 | v (Expr): variable to look up
231 | resultVerify (typing.List[Any]): list of lines from CVC4 output
232 |
233 | Raises:
234 | Exception: if the variable cannot be found
235 |
236 | Returns:
237 | Expr: value of the variable in the counterexample
238 | """
239 | for line in resultVerify:
240 | if line.startswith("(define-fun " + v.args[0] + " "):
241 | return toExpr(generateAST(line)[0][4], [], [], {}, {})
242 | raise Exception("Could not find variable " + v.args[0])
243 |
244 |
245 | if __name__ == "__main__":
246 | filename = f"tests/{sys.argv[1]}.ll"
247 | fnNameBase = "test"
248 | loopsFile = f"tests/{sys.argv[1]}.loops"
249 | cvcPath = "cvc5"
250 |
251 | if len(sys.argv) > 2:
252 | checkType = sys.argv[2] # "c" for commutativity, "i" for idempotence
253 | if checkType == "i":
254 | check_i(filename, fnNameBase, loopsFile, cvcPath)
255 | elif checkType == "c":
256 | check_c(filename, fnNameBase, loopsFile, cvcPath)
257 | else:
258 | check_aci(filename, fnNameBase, loopsFile, cvcPath)
259 |
--------------------------------------------------------------------------------
/katara/auto_grammar.py:
--------------------------------------------------------------------------------
1 | from katara import lattices
2 | from katara.lattices import Lattice
3 | from metalift.ir import *
4 |
5 | import typing
6 | from typing import Union, Dict
7 | from llvmlite.binding import ValueRef
8 |
9 | equality_supported_types = [Bool(), Int(), ClockInt(), EnumInt(), OpaqueInt()]
10 | comparison_supported_types = [Int(), ClockInt(), OpaqueInt()]
11 |
12 |
13 | def get_expansions(
14 | input_types: typing.List[Type],
15 | available_types: typing.List[Type],
16 | out_types: typing.List[Type],
17 | allow_node_id_reductions: bool = False,
18 | ) -> Dict[Type, typing.List[typing.Callable[[typing.Callable[[Type], Expr]], Expr]]]:
19 | out: Dict[
20 | Type, typing.List[typing.Callable[[typing.Callable[[Type], Expr]], Expr]]
21 | ] = {
22 | Bool(): [
23 | lambda get: BoolLit(False),
24 | lambda get: BoolLit(True),
25 | lambda get: And(get(Bool()), get(Bool())),
26 | lambda get: Or(get(Bool()), get(Bool())),
27 | lambda get: Not(get(Bool())),
28 | *[
29 | (lambda t: lambda get: Eq(get(t), get(t)))(t)
30 | for t in equality_supported_types
31 | ],
32 | *[
33 | (lambda t: lambda get: Gt(get(t), get(t)))(t)
34 | for t in comparison_supported_types
35 | ],
36 | *[
37 | (lambda t: lambda get: Ge(get(t), get(t)))(t)
38 | for t in comparison_supported_types
39 | ],
40 | ],
41 | }
42 |
43 | def gen_set_ops(t: Type) -> None:
44 | out[SetT(t)] = [
45 | lambda get: Call("set-minus", SetT(t), get(SetT(t)), get(SetT(t))),
46 | lambda get: Call("set-union", SetT(t), get(SetT(t)), get(SetT(t))),
47 | lambda get: Call("set-insert", SetT(t), get(t), get(SetT(t))),
48 | ]
49 |
50 | out[Bool()].append(lambda get: Eq(get(SetT(t)), get(SetT(t))))
51 | out[Bool()].append(lambda get: Eq(get(SetT(t)), Call("set-create", SetT(t))))
52 | out[Bool()].append(
53 | lambda get: Call("set-subset", Bool(), get(SetT(t)), get(SetT(t)))
54 | )
55 | out[Bool()].append(lambda get: Call("set-member", Bool(), get(t), get(SetT(t))))
56 |
57 | for t in equality_supported_types:
58 | if t in input_types:
59 | gen_set_ops(t)
60 | else:
61 | out[SetT(t)] = []
62 |
63 | if SetT(t) in out_types:
64 | out[SetT(t)] += [
65 | ((lambda t: lambda get: Call("set-create", SetT(t)))(t))
66 | if t in input_types
67 | else ((lambda t: lambda get: Call("set-create", SetT(get(t).type)))(t)),
68 | (lambda t: lambda get: Call("set-singleton", SetT(t), get(t)))(t),
69 | ]
70 |
71 | def gen_map_ops(k: Type, v: Type, allow_zero_create: bool) -> None:
72 | if MapT(k, v) in out_types:
73 | if MapT(k, v) not in out:
74 | out[MapT(k, v)] = []
75 | out[MapT(k, v)] += [
76 | (lambda get: Call("map-create", MapT(k, v)))
77 | if allow_zero_create
78 | else (lambda get: Call("map-create", MapT(get(k).type, v))),
79 | lambda get: Call("map-singleton", MapT(k, v), get(k), get(v)),
80 | ]
81 |
82 | if v not in out:
83 | out[v] = []
84 |
85 | if MapT(k, v) in input_types:
86 | if v.erase() == Int():
87 | out[v] += [
88 | lambda get: Call("map-get", v, get(MapT(k, v)), get(k), Lit(0, v)),
89 | ]
90 |
91 | if k == NodeIDInt() and allow_node_id_reductions:
92 | merge_a = Var("merge_into", v)
93 | merge_b = Var("merge_v", v)
94 |
95 | if v == Int():
96 | out[v] += [
97 | lambda get: Call(
98 | "reduce_int",
99 | v,
100 | Call("map-values", ListT(v), get(MapT(k, v))),
101 | Lambda(
102 | v,
103 | Add(merge_a, merge_b),
104 | merge_b,
105 | merge_a,
106 | ),
107 | IntLit(0),
108 | )
109 | ]
110 | elif v == Bool():
111 | out[v] += [
112 | lambda get: Call(
113 | "map-get",
114 | v,
115 | get(MapT(k, v)),
116 | get(k),
117 | Choose(BoolLit(False), BoolLit(True)),
118 | ),
119 | ]
120 |
121 | if k == NodeIDInt() and allow_node_id_reductions:
122 | merge_a = Var("merge_into", v)
123 | merge_b = Var("merge_v", v)
124 |
125 | out[v] += [
126 | lambda get: Call(
127 | "reduce_bool",
128 | v,
129 | Call("map-values", ListT(v), get(MapT(k, v))),
130 | Lambda(
131 | v,
132 | Or(merge_a, merge_b),
133 | merge_b,
134 | merge_a,
135 | ),
136 | BoolLit(False),
137 | ),
138 | lambda get: Call(
139 | "reduce_bool",
140 | v,
141 | Call("map-values", ListT(v), get(MapT(k, v))),
142 | Lambda(
143 | v,
144 | And(merge_a, merge_b),
145 | merge_b,
146 | merge_a,
147 | ),
148 | BoolLit(True),
149 | ),
150 | ]
151 | elif v.name == "Map":
152 | out[v] += [
153 | lambda get: Call(
154 | "map-get", v, get(MapT(k, v)), get(k), Call("map-create", v)
155 | ),
156 | ]
157 | else:
158 | raise Exception("NYI")
159 |
160 | for t in available_types:
161 | if t.name == "Map":
162 | gen_map_ops(t.args[0], t.args[1], t.args[0] in input_types)
163 |
164 | if Int() in input_types:
165 | if Int() not in out:
166 | out[Int()] = []
167 | out[Int()] += [
168 | lambda get: IntLit(0),
169 | lambda get: IntLit(1),
170 | lambda get: Add(get(Int()), get(Int())),
171 | lambda get: Sub(get(Int()), get(Int())),
172 | ]
173 |
174 | if EnumInt() in available_types:
175 | if EnumInt() not in out:
176 | out[EnumInt()] = []
177 | out[EnumInt()] += [(lambda i: lambda get: EnumIntLit(i))(i) for i in range(2)]
178 |
179 | if ClockInt() in input_types:
180 | if ClockInt() not in out:
181 | out[ClockInt()] = []
182 | out[ClockInt()] += [lambda get: Lit(0, ClockInt())]
183 |
184 | return out
185 |
186 |
187 | def all_node_id_gets(
188 | input: Expr,
189 | node_id: Expr,
190 | args: Dict[Type, Expr],
191 | ) -> typing.List[Expr]:
192 | if input.type.name == "Map":
193 | v = input.type.args[1]
194 | default: typing.Optional[Expr] = None
195 | if v.erase() == Int():
196 | default = Lit(0, v)
197 | elif v == Bool():
198 | default = Choose(BoolLit(False), BoolLit(True))
199 | elif v.name == "Map":
200 | default = Call("map-create", v)
201 | else:
202 | raise Exception("NYI")
203 |
204 | if input.type.args[0] == NodeIDInt():
205 | return [Call("map-get", v, input, node_id, default)]
206 | elif input.type.args[0] in args:
207 | return all_node_id_gets(
208 | Call("map-get", v, input, args[input.type.args[0]], default),
209 | node_id,
210 | args,
211 | )
212 | else:
213 | return []
214 | elif input.type.name == "Tuple":
215 | out = []
216 | for i in range(len(input.type.args)):
217 | out += all_node_id_gets(TupleGet(input, IntLit(i)), node_id, args)
218 | return out
219 | else:
220 | return []
221 |
222 |
223 | def auto_grammar(
224 | out_type: typing.Optional[Type],
225 | depth: int,
226 | *inputs: Union[Expr, ValueRef],
227 | enable_ite: bool = False,
228 | allow_node_id_reductions: bool = False,
229 | ) -> Expr:
230 | if out_type and out_type.name == "Tuple":
231 | return Tuple(
232 | *[
233 | auto_grammar(
234 | t,
235 | depth,
236 | *inputs,
237 | enable_ite=enable_ite,
238 | allow_node_id_reductions=allow_node_id_reductions,
239 | )
240 | for t in out_type.args
241 | ]
242 | )
243 |
244 | input_pool: Dict[Type, typing.List[Expr]] = {}
245 |
246 | def extract_inputs(input_type: Type, input: typing.Optional[Expr]) -> None:
247 | if input_type.name == "Tuple":
248 | for i, t in enumerate(input_type.args):
249 | if input != None:
250 | extract_inputs(t, TupleGet(input, IntLit(i))) # type: ignore
251 | else:
252 | extract_inputs(t, None)
253 | else:
254 | if not input_type in input_pool:
255 | input_pool[input_type] = []
256 | if input != None:
257 | input_pool[input_type].append(input) # type: ignore
258 | if input_type.name == "Set":
259 | extract_inputs(input_type.args[0], None)
260 | elif input_type.name == "Map":
261 | extract_inputs(input_type.args[0], None)
262 | extract_inputs(input_type.args[1], None)
263 |
264 | for input in inputs:
265 | input_type = parseTypeRef(input.type)
266 | extract_inputs(input_type, input)
267 |
268 | input_types = list(input_pool.keys())
269 |
270 | if out_type and out_type not in input_pool:
271 | extract_inputs(out_type, None)
272 |
273 | out_types = list(set(input_pool.keys()) - set(input_types))
274 |
275 | expansions = get_expansions(
276 | input_types, list(input_pool.keys()), out_types, allow_node_id_reductions
277 | )
278 |
279 | pool: Dict[Type, Expr] = {}
280 | for t, exprs in input_pool.items():
281 | zero_input_expansions = []
282 | if t in expansions:
283 | for e in expansions[t]:
284 | try:
285 | zero_input_expansions.append(e(lambda t: dict()[t])) # type: ignore
286 | except KeyError:
287 | pass
288 | if (len(exprs) + len(zero_input_expansions)) > 0:
289 | pool[t] = Choose(*exprs, *zero_input_expansions)
290 |
291 | for i in range(depth):
292 | next_pool = dict(pool)
293 | for t, expansion_list in expansions.items():
294 | new_elements = []
295 | for expansion in expansion_list:
296 | try:
297 | new_elements.append(expansion(lambda t: pool[t]))
298 | except KeyError:
299 | pass
300 |
301 | if (
302 | t in next_pool
303 | and isinstance(next_pool[t], Expr)
304 | and isinstance(next_pool[t], Choose)
305 | ):
306 | existing_set = set(next_pool[t].args)
307 | new_elements = [e for e in new_elements if e not in existing_set]
308 |
309 | if len(new_elements) > 0:
310 | if t in pool:
311 | next_pool[t] = Choose(next_pool[t], *new_elements)
312 | else:
313 | next_pool[t] = Choose(*new_elements)
314 |
315 | if enable_ite and Bool() in pool:
316 | for t in pool.keys():
317 | if t.name != "Set" and t.name != "Map":
318 | next_pool[t] = Choose(
319 | next_pool[t], Ite(pool[Bool()], pool[t], pool[t])
320 | )
321 |
322 | pool = next_pool
323 |
324 | if out_type:
325 | return pool[out_type]
326 | else:
327 | return pool # type: ignore
328 |
329 |
330 | def expand_lattice_logic(*inputs: typing.Tuple[Expr, Lattice]) -> typing.List[Expr]:
331 | lattice_to_exprs: typing.Dict[Lattice, typing.List[Expr]] = {}
332 | for input, lattice in inputs:
333 | if lattice not in lattice_to_exprs:
334 | lattice_to_exprs[lattice] = []
335 | lattice_to_exprs[lattice].append(input)
336 |
337 | next_pool = dict(lattice_to_exprs)
338 | for lattice in lattice_to_exprs.keys():
339 | if isinstance(lattice, lattices.Map):
340 | merge_a = Var("merge_a", lattice.valueType.ir_type())
341 | merge_b = Var("merge_b", lattice.valueType.ir_type())
342 | for value in lattice_to_exprs[lattice]:
343 | value_max = Call( # does the remove set have any concurrent values?
344 | "reduce_bool"
345 | if lattice.valueType.ir_type() == Bool()
346 | else "reduce_int",
347 | lattice.valueType.ir_type(),
348 | Call("map-values", ListT(lattice.valueType.ir_type()), value),
349 | Lambda(
350 | lattice.valueType.ir_type(),
351 | lattice.valueType.merge(merge_a, merge_b),
352 | merge_a,
353 | merge_b,
354 | ),
355 | lattice.valueType.bottom(),
356 | )
357 |
358 | if lattice.valueType not in next_pool:
359 | next_pool[lattice.valueType] = []
360 | if value_max not in next_pool[lattice.valueType]:
361 | next_pool[lattice.valueType].append(value_max)
362 |
363 | lattice_to_exprs = next_pool
364 | next_pool = dict(lattice_to_exprs)
365 |
366 | for lattice in lattice_to_exprs.keys():
367 | choices = Choose(*lattice_to_exprs[lattice])
368 | lattice_to_exprs[lattice].append(lattice.merge(choices, choices))
369 |
370 | lattice_to_exprs = next_pool
371 |
372 | return [Choose(*lattice_to_exprs[lattice]) for lattice in lattice_to_exprs.keys()]
373 |
--------------------------------------------------------------------------------
/katara/lattices.py:
--------------------------------------------------------------------------------
1 | from dataclasses import dataclass
2 | from metalift import ir
3 | import typing
4 | import itertools
5 |
6 |
7 | class Lattice:
8 | def ir_type(self) -> ir.Type:
9 | raise NotImplementedError()
10 |
11 | def merge(self, a: ir.Expr, b: ir.Expr) -> ir.Expr:
12 | raise NotImplementedError()
13 |
14 | def bottom(self) -> ir.Expr:
15 | raise NotImplementedError()
16 |
17 | def check_is_valid(self, v: ir.Expr) -> ir.Expr:
18 | raise NotImplementedError()
19 |
20 | def has_node_id(self) -> bool:
21 | raise NotImplementedError()
22 |
23 |
24 | @dataclass(frozen=True)
25 | class MaxInt(Lattice):
26 | int_type: ir.Type = ir.Int()
27 |
28 | def ir_type(self) -> ir.Type:
29 | return self.int_type
30 |
31 | def merge(self, a: ir.Expr, b: ir.Expr) -> ir.Expr:
32 | a_var = ir.Var("max_merge_a", self.int_type)
33 | b_var = ir.Var("max_merge_b", self.int_type)
34 | return ir.Let(
35 | a_var, a, ir.Let(b_var, b, ir.Ite(ir.Ge(a_var, b_var), a_var, b_var))
36 | )
37 |
38 | def bottom(self) -> ir.Expr:
39 | return ir.Lit(0, self.int_type)
40 |
41 | def check_is_valid(self, v: ir.Expr) -> ir.Expr:
42 | return ir.Ge(v, self.bottom())
43 |
44 | def has_node_id(self) -> bool:
45 | return self.int_type == ir.NodeIDInt()
46 |
47 |
48 | @dataclass(frozen=True)
49 | class OrBool(Lattice):
50 | def ir_type(self) -> ir.Type:
51 | return ir.Bool()
52 |
53 | def merge(self, a: ir.Expr, b: ir.Expr) -> ir.Expr:
54 | return ir.Or(a, b)
55 |
56 | def bottom(self) -> ir.Expr:
57 | return ir.BoolLit(False)
58 |
59 | def check_is_valid(self, v: ir.Expr) -> ir.Expr:
60 | return ir.BoolLit(True)
61 |
62 | def has_node_id(self) -> bool:
63 | return False
64 |
65 |
66 | @dataclass(frozen=True)
67 | class Set(Lattice):
68 | innerType: ir.Type
69 |
70 | def ir_type(self) -> ir.Type:
71 | return ir.SetT(self.innerType)
72 |
73 | def merge(self, a: ir.Expr, b: ir.Expr) -> ir.Expr:
74 | return ir.Call("set-union", ir.SetT(self.innerType), a, b)
75 |
76 | def bottom(self) -> ir.Expr:
77 | return ir.Call("set-create", ir.SetT(self.innerType))
78 |
79 | def check_is_valid(self, v: ir.Expr) -> ir.Expr:
80 | return ir.BoolLit(True)
81 |
82 | def has_node_id(self) -> bool:
83 | return self.innerType == ir.NodeIDInt()
84 |
85 |
86 | @dataclass(frozen=True)
87 | class Map(Lattice):
88 | keyType: ir.Type
89 | valueType: Lattice
90 |
91 | def ir_type(self) -> ir.Type:
92 | return ir.MapT(self.keyType, self.valueType.ir_type())
93 |
94 | def merge(self, a: ir.Expr, b: ir.Expr) -> ir.Expr:
95 | v_a = ir.Var("map_merge_a", self.valueType.ir_type())
96 | v_b = ir.Var("map_merge_b", self.valueType.ir_type())
97 |
98 | return ir.Call(
99 | "map-union",
100 | ir.MapT(self.keyType, self.valueType.ir_type()),
101 | a,
102 | b,
103 | ir.Lambda(
104 | self.valueType.ir_type(), self.valueType.merge(v_a, v_b), v_a, v_b
105 | ),
106 | )
107 |
108 | def bottom(self) -> ir.Expr:
109 | return ir.Call("map-create", self.ir_type())
110 |
111 | def check_is_valid(self, v: ir.Expr) -> ir.Expr:
112 | merge_a = ir.Var("merge_into", ir.Bool())
113 | merge_b = ir.Var("merge_v", self.valueType.ir_type())
114 |
115 | return ir.Call(
116 | "reduce_bool",
117 | ir.Bool(),
118 | ir.Call("map-values", ir.ListT(self.valueType.ir_type()), v),
119 | ir.Lambda(
120 | ir.Bool(),
121 | ir.And(merge_a, self.valueType.check_is_valid(merge_b)),
122 | merge_b,
123 | merge_a,
124 | ),
125 | ir.BoolLit(True),
126 | )
127 |
128 | def has_node_id(self) -> bool:
129 | return self.keyType == ir.NodeIDInt() or self.valueType.has_node_id()
130 |
131 |
132 | @dataclass(frozen=True)
133 | class LexicalProduct(Lattice):
134 | l1: Lattice
135 | l2: Lattice
136 |
137 | def ir_type(self) -> ir.Type:
138 | return ir.TupleT(self.l1.ir_type(), self.l2.ir_type())
139 |
140 | def merge(self, a: ir.Expr, b: ir.Expr) -> ir.Expr:
141 | mergeA = ir.Var("cascade_merge_a", a.type)
142 | mergeB = ir.Var("cascade_merge_b", b.type)
143 |
144 | keyA = ir.TupleGet(mergeA, ir.IntLit(0))
145 | keyB = ir.TupleGet(mergeB, ir.IntLit(0))
146 | valueA = ir.TupleGet(mergeA, ir.IntLit(1))
147 | valueB = ir.TupleGet(mergeB, ir.IntLit(1))
148 |
149 | keyMerged = self.l1.merge(keyA, keyB)
150 | valueMerged = self.l2.merge(valueA, valueB)
151 |
152 | return ir.Let(
153 | mergeA,
154 | a,
155 | ir.Let(
156 | mergeB,
157 | b,
158 | ir.Tuple(
159 | keyMerged,
160 | ir.Ite(
161 | ir.Or(
162 | ir.Eq(keyA, keyB),
163 | ir.And(
164 | ir.Not(ir.Eq(keyA, keyMerged)),
165 | ir.Not(ir.Eq(keyB, keyMerged)),
166 | ),
167 | ),
168 | valueMerged,
169 | self.l2.merge(
170 | ir.Ite(
171 | ir.Eq(keyA, keyMerged),
172 | valueA,
173 | valueB,
174 | ),
175 | self.l2.bottom(),
176 | ),
177 | ),
178 | ),
179 | ),
180 | )
181 |
182 | def bottom(self) -> ir.Expr:
183 | return ir.Tuple(self.l1.bottom(), self.l2.bottom())
184 |
185 | def check_is_valid(self, v: ir.Expr) -> ir.Expr:
186 | return ir.And(
187 | self.l1.check_is_valid(ir.TupleGet(v, ir.IntLit(0))),
188 | self.l2.check_is_valid(ir.TupleGet(v, ir.IntLit(1))),
189 | )
190 |
191 | def has_node_id(self) -> bool:
192 | return self.l1.has_node_id() or self.l2.has_node_id()
193 |
194 |
195 | def gen_types(depth: int) -> typing.Iterator[ir.Type]:
196 | if depth == 1:
197 | yield ir.Int()
198 | yield ir.ClockInt()
199 | yield ir.EnumInt()
200 | yield ir.OpaqueInt()
201 | yield ir.NodeIDInt()
202 | yield ir.Bool()
203 | else:
204 | for innerType in gen_types(depth - 1):
205 | yield innerType
206 | # TODO: anything else?
207 |
208 |
209 | int_like = {ir.Int().name, ir.ClockInt().name, ir.EnumInt().name, ir.OpaqueInt().name}
210 | comparable_int = {ir.Int().name, ir.ClockInt().name, ir.OpaqueInt().name}
211 | set_supported_elem = {ir.Int().name, ir.OpaqueInt().name}
212 | map_supported_elem = {ir.OpaqueInt().name, ir.NodeIDInt().name}
213 |
214 |
215 | def gen_lattice_types(max_depth: int) -> typing.Iterator[Lattice]:
216 | if max_depth == 1:
217 | yield OrBool()
218 |
219 | for innerType in gen_types(max_depth):
220 | if innerType.name in comparable_int:
221 | yield MaxInt(innerType)
222 |
223 | if max_depth > 1:
224 | for innerLatticeType in gen_lattice_types(max_depth - 1):
225 | yield innerLatticeType
226 |
227 | for innerType in gen_types(max_depth - 1):
228 | if innerType.name in set_supported_elem:
229 | yield Set(innerType)
230 |
231 | for keyType in gen_types(max_depth - 1):
232 | if keyType.name in map_supported_elem:
233 | for valueType in gen_lattice_types(max_depth - 1):
234 | yield Map(keyType, valueType)
235 |
236 | for innerTypePair in itertools.permutations(
237 | gen_lattice_types(max_depth - 1), 2
238 | ):
239 | yield LexicalProduct(*innerTypePair)
240 |
241 |
242 | def gen_structures(max_depth: int) -> typing.Iterator[typing.Any]:
243 | cur_type_depth = 1
244 | seen = set()
245 | while cur_type_depth <= max_depth:
246 | print(f"Type depth: {cur_type_depth}")
247 | cur_tuple_size = 1
248 | while cur_tuple_size <= cur_type_depth:
249 | print(f"Tuple size: {cur_tuple_size}")
250 | for lattice_types in itertools.combinations_with_replacement(
251 | gen_lattice_types(cur_type_depth), cur_tuple_size
252 | ):
253 | if tuple(lattice_types) in seen:
254 | continue
255 | else:
256 | seen.add(tuple(lattice_types))
257 | yield lattice_types
258 | cur_tuple_size += 1
259 | cur_type_depth += 1
260 |
--------------------------------------------------------------------------------
/katara/search_structures.py:
--------------------------------------------------------------------------------
1 | from __future__ import annotations
2 |
3 | import multiprocessing as mp
4 | import multiprocessing.pool
5 | import queue
6 | from time import time
7 | import traceback
8 | import typing
9 |
10 | from katara.lattices import Lattice
11 | from metalift import process_tracker
12 | from metalift import ir
13 | from metalift.ir import Expr, FnDecl
14 | from katara.synthesis import SynthesizeFun, synthesize_crdt
15 | from metalift.synthesis_common import SynthesisFailed
16 |
17 | from typing import Any, Callable, Iterator, List, Optional, Tuple
18 |
19 |
20 | def synthesize_crdt_e2e(
21 | queue: queue.Queue[
22 | Tuple[
23 | int,
24 | Any,
25 | int,
26 | Optional[typing.Union[str, List[FnDecl]]],
27 | ]
28 | ],
29 | synthStateStructure: List[Lattice],
30 | initState: Callable[[Any], Expr],
31 | grammarStateInvariant: Callable[[Expr, Any, int, int], Expr],
32 | grammarSupportedCommand: Callable[[Expr, Any, Any, int, int], Expr],
33 | inOrder: Callable[[Any, Any], Expr],
34 | opPrecondition: Callable[[Any], Expr],
35 | grammar: Callable[[Expr, List[ir.Var], Any, int], Expr],
36 | grammarQuery: Callable[[str, List[ir.Var], ir.Type, int], ir.Synth],
37 | grammarEquivalence: Callable[[Expr, Expr, List[ir.Var], int], Expr],
38 | targetLang: Callable[
39 | [], List[typing.Union[FnDecl, ir.FnDeclNonRecursive, ir.Axiom]]
40 | ],
41 | synthesize: SynthesizeFun,
42 | useOpList: bool,
43 | stateTypeHint: Optional[ir.Type],
44 | opArgTypeHint: Optional[List[ir.Type]],
45 | queryArgTypeHint: Optional[List[ir.Type]],
46 | queryRetTypeHint: Optional[ir.Type],
47 | baseDepth: int,
48 | filename: str,
49 | fnNameBase: str,
50 | loopsFile: str,
51 | cvcPath: str,
52 | uid: int,
53 | ) -> None:
54 | synthStateType = ir.TupleT(*[a.ir_type() for a in synthStateStructure])
55 |
56 | try:
57 | queue.put(
58 | (
59 | uid,
60 | synthStateStructure,
61 | baseDepth,
62 | synthesize_crdt(
63 | filename,
64 | fnNameBase,
65 | loopsFile,
66 | cvcPath,
67 | synthStateType,
68 | lambda: initState(synthStateStructure),
69 | lambda s, baseDepth, invariantBoost: grammarStateInvariant(
70 | s, synthStateStructure, baseDepth, invariantBoost
71 | ),
72 | lambda s, a, baseDepth, invariantBoost: grammarSupportedCommand(
73 | s, a, synthStateStructure, baseDepth, invariantBoost
74 | ),
75 | inOrder,
76 | opPrecondition,
77 | lambda inState, args, baseDepth: grammar(
78 | inState, args, synthStateStructure, baseDepth
79 | ),
80 | grammarQuery,
81 | grammarEquivalence,
82 | targetLang,
83 | synthesize,
84 | uid=uid,
85 | useOpList=useOpList,
86 | stateTypeHint=stateTypeHint,
87 | opArgTypeHint=opArgTypeHint,
88 | queryArgTypeHint=queryArgTypeHint,
89 | queryRetTypeHint=queryRetTypeHint,
90 | baseDepth=baseDepth,
91 | log=False,
92 | ),
93 | )
94 | )
95 | except SynthesisFailed:
96 | queue.put((uid, synthStateStructure, baseDepth, None))
97 | except:
98 | queue.put((uid, synthStateStructure, baseDepth, traceback.format_exc()))
99 |
100 |
101 | def search_crdt_structures(
102 | initState: Callable[[Any], Expr],
103 | grammarStateInvariant: Callable[[Expr, Any, int, int], Expr],
104 | grammarSupportedCommand: Callable[[Expr, Any, Any, int, int], Expr],
105 | inOrder: Callable[[Any, Any], Expr],
106 | opPrecondition: Callable[[Any], Expr],
107 | grammar: Callable[[Expr, List[ir.Var], Any, int], Expr],
108 | grammarQuery: Callable[[str, List[ir.Var], ir.Type, int], ir.Synth],
109 | grammarEquivalence: Callable[[Expr, Expr, List[ir.Var], int], Expr],
110 | targetLang: Callable[
111 | [], List[typing.Union[FnDecl, ir.FnDeclNonRecursive, ir.Axiom]]
112 | ],
113 | synthesize: SynthesizeFun,
114 | filename: str,
115 | fnNameBase: str,
116 | loopsFile: str,
117 | cvcPath: str,
118 | useOpList: bool,
119 | structureCandidates: Iterator[Tuple[int, Any]],
120 | reportFile: str,
121 | stateTypeHint: Optional[ir.Type] = None,
122 | opArgTypeHint: Optional[List[ir.Type]] = None,
123 | queryArgTypeHint: Optional[List[ir.Type]] = None,
124 | queryRetTypeHint: Optional[ir.Type] = None,
125 | maxThreads: int = mp.cpu_count(),
126 | upToUid: Optional[int] = None,
127 | exitFirstSuccess: bool = True,
128 | ) -> Tuple[Any, List[ir.Expr]]:
129 | q: queue.Queue[
130 | Tuple[int, Any, int, Optional[typing.Union[str, List[Expr]]]]
131 | ] = queue.Queue()
132 | queue_size = 0
133 | next_uid = 0
134 |
135 | next_res_type = None
136 | next_res = None
137 |
138 | start_times = {}
139 |
140 | try:
141 | with multiprocessing.pool.ThreadPool() as pool:
142 | with open(reportFile, "w") as report:
143 | while True:
144 | while queue_size < (maxThreads // 2 if maxThreads > 1 else 1) and (
145 | upToUid == None or next_uid < upToUid # type: ignore
146 | ):
147 | next_structure_tuple = next(structureCandidates, None)
148 | if next_structure_tuple is None:
149 | break
150 | else:
151 | baseDepth, next_structure_type = next_structure_tuple
152 |
153 | def error_callback(e: BaseException) -> None:
154 | raise e
155 |
156 | try:
157 | synthStateType = ir.TupleT(
158 | *[a.ir_type() for a in next_structure_type]
159 | )
160 | synthesize_crdt(
161 | filename,
162 | fnNameBase,
163 | loopsFile,
164 | cvcPath,
165 | synthStateType,
166 | lambda: initState(next_structure_type),
167 | lambda s, baseDepth, invariantBoost: grammarStateInvariant(
168 | s,
169 | next_structure_type,
170 | baseDepth,
171 | invariantBoost,
172 | ),
173 | lambda s, a, baseDepth, invariantBoost: grammarSupportedCommand(
174 | s,
175 | a,
176 | next_structure_type,
177 | baseDepth,
178 | invariantBoost,
179 | ),
180 | inOrder,
181 | opPrecondition,
182 | lambda inState, args, baseDepth: grammar(
183 | inState,
184 | args,
185 | next_structure_type,
186 | baseDepth,
187 | ),
188 | grammarQuery,
189 | grammarEquivalence,
190 | targetLang,
191 | synthesize,
192 | uid=next_uid,
193 | useOpList=useOpList,
194 | stateTypeHint=stateTypeHint,
195 | opArgTypeHint=opArgTypeHint,
196 | queryArgTypeHint=queryArgTypeHint,
197 | queryRetTypeHint=queryRetTypeHint,
198 | baseDepth=baseDepth,
199 | log=False,
200 | skipSynth=True,
201 | )
202 | except KeyError as k:
203 | # this is due to a grammar not being able to find a value
204 | continue
205 |
206 | print(
207 | f"Enqueueing #{next_uid} (structure: {next_structure_type}, base depth: {baseDepth})"
208 | )
209 | start_times[next_uid] = time()
210 | pool.apply_async(
211 | synthesize_crdt_e2e,
212 | args=(
213 | q,
214 | next_structure_type,
215 | initState,
216 | grammarStateInvariant,
217 | grammarSupportedCommand,
218 | inOrder,
219 | opPrecondition,
220 | grammar,
221 | grammarQuery,
222 | grammarEquivalence,
223 | targetLang,
224 | synthesize,
225 | useOpList,
226 | stateTypeHint,
227 | opArgTypeHint,
228 | queryArgTypeHint,
229 | queryRetTypeHint,
230 | baseDepth,
231 | filename,
232 | fnNameBase,
233 | loopsFile,
234 | cvcPath,
235 | next_uid,
236 | ),
237 | error_callback=error_callback,
238 | )
239 | next_uid += 1
240 | queue_size += 1
241 |
242 | if queue_size == 0:
243 | if exitFirstSuccess:
244 | raise Exception("no more structures")
245 | else:
246 | break
247 | else:
248 | (ret_uid, next_res_type, baseDepth, next_res) = q.get(
249 | block=True, timeout=None
250 | )
251 | time_took = time() - start_times[ret_uid]
252 | report.write(
253 | f'{ret_uid},{time_took},"{str(next_res_type)}",{1},{next_res != None}\n'
254 | )
255 | report.flush()
256 | queue_size -= 1
257 | if isinstance(next_res, str):
258 | raise Exception(
259 | "Synthesis procedure crashed, aborting\n" + next_res
260 | )
261 | elif next_res != None:
262 | if exitFirstSuccess:
263 | break
264 | else:
265 | print(
266 | f"Failed to synthesize #{ret_uid} (structure: {next_res_type}, base depth: {baseDepth})"
267 | )
268 |
269 | if exitFirstSuccess:
270 | if next_res == None:
271 | raise Exception("Synthesis failed")
272 | else:
273 | print(
274 | "\n========================= SYNTHESIS COMPLETE =========================\n"
275 | )
276 | print("State Structure:", next_res_type)
277 | print("\nRuntime Logic:")
278 | print("\n\n".join([c.toRosette() for c in next_res])) # type: ignore
279 | return (next_res_type, next_res) # type: ignore
280 | else:
281 | print(f"See report file ({reportFile}) for results")
282 | return (next_res_type, [])
283 | finally:
284 | for p in process_tracker.all_processes:
285 | p.terminate()
286 | process_tracker.all_processes = []
287 |
--------------------------------------------------------------------------------
/katara/synthesis.py:
--------------------------------------------------------------------------------
1 | import os
2 |
3 | from metalift.analysis_new import VariableTracker, analyze
4 | from metalift.ir import *
5 |
6 | import typing
7 | from typing import Callable, Union, Protocol
8 |
9 | from metalift.synthesis_common import SynthesisFailed, VerificationFailed
10 |
11 |
12 | def observeEquivalence(
13 | inputState: Expr, synthState: Expr, queryParams: typing.List[Var]
14 | ) -> Expr:
15 | return Call("equivalence", Bool(), inputState, synthState, *queryParams)
16 |
17 |
18 | def opsListInvariant(
19 | fnNameBase: str, synthState: Expr, synthStateType: Type, opType: Type
20 | ) -> Expr:
21 | return And(
22 | Eq(
23 | Call(
24 | "apply_state_transitions",
25 | synthStateType,
26 | TupleGet(synthState, IntLit(len(synthStateType.args) - 1)),
27 | Var(
28 | f"{fnNameBase}_next_state",
29 | FnT(synthStateType, synthStateType, *opType.args),
30 | ),
31 | Var(
32 | f"{fnNameBase}_init_state",
33 | FnT(synthStateType),
34 | ),
35 | ),
36 | synthState,
37 | ),
38 | Call(
39 | "ops_in_order",
40 | Bool(),
41 | TupleGet(synthState, IntLit(len(synthStateType.args) - 1)),
42 | ),
43 | )
44 |
45 |
46 | def supportedCommand(synthState: Expr, args: typing.Any) -> Expr:
47 | return Call("supportedCommand", Bool(), synthState, *args)
48 |
49 |
50 | def unpackOp(op: Expr) -> typing.List[Expr]:
51 | if op.type.name == "Tuple":
52 | return [TupleGet(op, IntLit(i)) for i in range(len(op.type.args))]
53 | else:
54 | return [op]
55 |
56 |
57 | def opListAdditionalFns(
58 | synthStateType: Type,
59 | opType: Type,
60 | initState: Callable[[], Expr],
61 | inOrder: Callable[[typing.Any, typing.Any], Expr],
62 | opPrecondition: Callable[[typing.Any], Expr],
63 | ) -> typing.List[Union[FnDecl, FnDeclNonRecursive, Axiom]]:
64 | def list_length(l: Expr) -> Expr:
65 | return Call("list_length", Int(), l)
66 |
67 | def list_get(l: Expr, i: Expr) -> Expr:
68 | return Call("list_get", opType, l, i)
69 |
70 | def list_tail(l: Expr, i: Expr) -> Expr:
71 | return Call("list_tail", ListT(opType), l, i)
72 |
73 | data = Var("data", ListT(opType))
74 | next_state_fn = Var(
75 | "next_state_fn",
76 | FnT(
77 | synthStateType,
78 | synthStateType,
79 | *(opType.args if opType.name == "Tuple" else [opType]),
80 | ),
81 | )
82 |
83 | init_state_fn = Var("init_state_fn", FnT(synthStateType))
84 |
85 | reduce_fn = FnDecl(
86 | "apply_state_transitions",
87 | synthStateType,
88 | Ite(
89 | Eq(list_length(data), IntLit(0)),
90 | CallValue(init_state_fn),
91 | CallValue(
92 | next_state_fn,
93 | Call(
94 | "apply_state_transitions",
95 | synthStateType,
96 | list_tail(data, IntLit(1)),
97 | next_state_fn,
98 | init_state_fn,
99 | ),
100 | *(
101 | [
102 | # TODO(shadaj): unnecessary cast
103 | typing.cast(
104 | Expr, TupleGet(list_get(data, IntLit(0)), IntLit(i))
105 | )
106 | for i in range(len(opType.args))
107 | ]
108 | if opType.name == "Tuple"
109 | else [list_get(data, IntLit(0))]
110 | ),
111 | ),
112 | ),
113 | data,
114 | next_state_fn,
115 | init_state_fn,
116 | )
117 |
118 | next_op = Var("next_op", opType)
119 | ops_in_order_helper = FnDecl(
120 | "ops_in_order_helper",
121 | Bool(),
122 | And(
123 | opPrecondition(unpackOp(next_op)),
124 | Ite(
125 | Eq(list_length(data), IntLit(0)),
126 | BoolLit(True),
127 | And(
128 | inOrder(unpackOp(list_get(data, IntLit(0))), unpackOp(next_op)),
129 | Call(
130 | "ops_in_order_helper",
131 | Bool(),
132 | list_get(data, IntLit(0)),
133 | list_tail(data, IntLit(1)),
134 | ),
135 | ),
136 | ),
137 | ),
138 | next_op,
139 | data,
140 | )
141 |
142 | ops_in_order = FnDecl(
143 | "ops_in_order",
144 | Bool(),
145 | Ite(
146 | Eq(list_length(data), IntLit(0)),
147 | BoolLit(True),
148 | Call(
149 | "ops_in_order_helper",
150 | Bool(),
151 | list_get(data, IntLit(0)),
152 | list_tail(data, IntLit(1)),
153 | ),
154 | ),
155 | data,
156 | )
157 |
158 | return [reduce_fn, ops_in_order_helper, ops_in_order]
159 |
160 |
161 | class SynthesizeFun(Protocol):
162 | def __call__(
163 | self,
164 | basename: str,
165 | targetLang: typing.List[Union[FnDecl, FnDeclNonRecursive, Axiom]],
166 | vars: typing.Set[Var],
167 | invAndPs: typing.List[Synth],
168 | preds: Union[str, typing.List[Expr]],
169 | vc: Expr,
170 | loopAndPsInfo: typing.List[Expr],
171 | cvcPath: str = "cvc5",
172 | uid: int = 0,
173 | noVerify: bool = False,
174 | unboundedInts: bool = False,
175 | optimize_vc_equality: bool = False,
176 | listBound: int = 2,
177 | log: bool = True,
178 | ) -> typing.List[FnDecl]:
179 | ...
180 |
181 |
182 | def synthesize_crdt(
183 | filename: str,
184 | fnNameBase: str,
185 | loopsFile: str,
186 | cvcPath: str,
187 | synthStateType: Type,
188 | initState: Callable[[], Expr],
189 | grammarStateInvariant: Callable[[Expr, int, int], Expr],
190 | grammarSupportedCommand: Callable[[Expr, typing.Any, int, int], Expr],
191 | inOrder: Callable[[typing.Any, typing.Any], Expr],
192 | opPrecondition: Callable[[typing.Any], Expr],
193 | grammar: Callable[[Expr, typing.List[Var], int], Expr],
194 | grammarQuery: Callable[[str, typing.List[Var], Type, int], Synth],
195 | grammarEquivalence: Callable[[Expr, Expr, typing.List[Var], int], Expr],
196 | targetLang: Callable[[], typing.List[Union[FnDecl, FnDeclNonRecursive, Axiom]]],
197 | synthesize: SynthesizeFun,
198 | stateTypeHint: typing.Optional[Type] = None,
199 | opArgTypeHint: typing.Optional[typing.List[Type]] = None,
200 | queryArgTypeHint: typing.Optional[typing.List[Type]] = None,
201 | queryRetTypeHint: typing.Optional[Type] = None,
202 | uid: int = 0,
203 | unboundedInts: bool = True,
204 | useOpList: bool = False,
205 | listBound: int = 1,
206 | baseDepth: int = 2,
207 | invariantBoost: int = 0,
208 | log: bool = True,
209 | skipSynth: bool = False,
210 | ) -> typing.List[FnDecl]:
211 | basename = os.path.splitext(os.path.basename(filename))[0]
212 |
213 | tracker = VariableTracker()
214 |
215 | state_transition_analysis = analyze(
216 | filename,
217 | fnNameBase + "_next_state",
218 | loopsFile,
219 | )
220 |
221 | query_analysis = analyze(
222 | filename,
223 | fnNameBase + "_response",
224 | loopsFile,
225 | )
226 |
227 | origSynthStateType = synthStateType
228 |
229 | op_arg_types = (
230 | [v.type for v in state_transition_analysis.arguments[1:]]
231 | if opArgTypeHint is None
232 | else opArgTypeHint
233 | )
234 | opType = TupleT(*op_arg_types) if len(op_arg_types) > 1 else op_arg_types[1]
235 |
236 | if useOpList:
237 | synthStateType = TupleT(*synthStateType.args, ListT(opType))
238 |
239 | queryParameterTypes = (
240 | [v.type for v in query_analysis.arguments[1:]]
241 | if queryArgTypeHint is None
242 | else queryArgTypeHint
243 | )
244 |
245 | def supportedCommandWithList(synthState: Expr, args: typing.Any) -> Expr:
246 | return And(
247 | opPrecondition(args),
248 | Ite(
249 | Eq(
250 | Call(
251 | "list_length",
252 | Int(),
253 | TupleGet(synthState, IntLit(len(synthStateType.args) - 1)),
254 | ),
255 | IntLit(0),
256 | ),
257 | BoolLit(True),
258 | inOrder(
259 | unpackOp(
260 | Call(
261 | "list_get",
262 | opType,
263 | TupleGet(synthState, IntLit(len(synthStateType.args) - 1)),
264 | IntLit(0),
265 | )
266 | ),
267 | args,
268 | ),
269 | ),
270 | )
271 |
272 | seq_start_state = tracker.variable(
273 | "seq_start_state", state_transition_analysis.arguments[0].type
274 | )
275 | synth_start_state = tracker.variable("synth_start_state", synthStateType)
276 | equivalence_query_vars = [
277 | tracker.variable(
278 | f"start_state_query_var_{i}", query_analysis.arguments[i + 1].type
279 | )
280 | for i in range(len(query_analysis.arguments) - 1)
281 | ]
282 |
283 | synth_after_op = tracker.variable("synth_after_op", synthStateType)
284 |
285 | first_op_group = tracker.group("first_op")
286 | first_op_args = [
287 | first_op_group.variable(v.name(), t)
288 | for v, t in zip(state_transition_analysis.arguments[1:], op_arg_types)
289 | ]
290 |
291 | second_op_group = tracker.group("second_op")
292 | second_op_args = [
293 | second_op_group.variable(v.name(), t)
294 | for v, t in zip(state_transition_analysis.arguments[1:], op_arg_types)
295 | ]
296 |
297 | vcStateTransition = state_transition_analysis.call(seq_start_state, *first_op_args)(
298 | tracker,
299 | lambda seq_after_op: Implies(
300 | And(
301 | observeEquivalence(
302 | seq_start_state, synth_start_state, equivalence_query_vars
303 | ),
304 | *(
305 | [
306 | opsListInvariant(
307 | fnNameBase, synth_start_state, synthStateType, opType
308 | ),
309 | supportedCommandWithList(synth_start_state, first_op_args),
310 | ]
311 | if useOpList
312 | else [
313 | opPrecondition(first_op_args),
314 | supportedCommand(synth_start_state, first_op_args),
315 | ]
316 | ),
317 | Eq(
318 | synth_after_op,
319 | Call(
320 | f"{fnNameBase}_next_state",
321 | synthStateType,
322 | synth_start_state,
323 | *first_op_args,
324 | ),
325 | ),
326 | ),
327 | query_analysis.call(seq_start_state, *equivalence_query_vars)(
328 | tracker,
329 | lambda seqQueryResult: Implies(
330 | Eq(
331 | seqQueryResult,
332 | Call(
333 | f"{fnNameBase}_response",
334 | seqQueryResult.type,
335 | synth_start_state,
336 | *equivalence_query_vars,
337 | ),
338 | ),
339 | And(
340 | observeEquivalence(
341 | seq_after_op, synth_after_op, equivalence_query_vars
342 | ),
343 | query_analysis.call(seq_after_op, *equivalence_query_vars)(
344 | tracker,
345 | lambda seqQueryResult: Eq(
346 | seqQueryResult,
347 | Call(
348 | f"{fnNameBase}_response",
349 | seqQueryResult.type,
350 | synth_after_op,
351 | *equivalence_query_vars,
352 | ),
353 | ),
354 | ),
355 | *(
356 | [
357 | Implies(
358 | And(
359 | inOrder(first_op_args, second_op_args),
360 | opPrecondition(second_op_args),
361 | ),
362 | supportedCommand(synth_after_op, second_op_args),
363 | )
364 | ]
365 | if not useOpList
366 | else []
367 | ),
368 | ),
369 | ),
370 | ),
371 | ),
372 | )
373 |
374 | # define synthesis problem for state transition
375 | cur_state_param = Var("cur_state", synthStateType)
376 |
377 | op_arg_vars = [
378 | Var(v.name(), t)
379 | for v, t in zip(state_transition_analysis.arguments[1:], op_arg_types)
380 | ]
381 |
382 | stateTransitionSynthNode = grammar(
383 | cur_state_param,
384 | op_arg_vars,
385 | baseDepth,
386 | )
387 |
388 | invAndPsStateTransition = (
389 | [
390 | Synth(
391 | fnNameBase + "_next_state",
392 | Tuple(
393 | # the grammar directly produces the tupled next state, unpack to tack on the op-list
394 | *stateTransitionSynthNode.args,
395 | Call(
396 | "list_prepend",
397 | ListT(opType),
398 | Tuple(*op_arg_vars) if len(op_arg_vars) > 1 else op_arg_vars[0],
399 | TupleGet(
400 | cur_state_param,
401 | IntLit(len(synthStateType.args) - 1),
402 | ),
403 | ),
404 | ),
405 | cur_state_param,
406 | *op_arg_vars,
407 | )
408 | ]
409 | if useOpList
410 | else [
411 | Synth(
412 | fnNameBase + "_next_state",
413 | stateTransitionSynthNode,
414 | cur_state_param,
415 | *op_arg_vars,
416 | )
417 | ]
418 | )
419 | # end state transition (in order)
420 |
421 | # begin query
422 | invAndPsQuery = [
423 | grammarQuery(
424 | query_analysis.name,
425 | [Var(query_analysis.arguments[0].name(), synthStateType)]
426 | + (
427 | [
428 | Var(query_analysis.arguments[i + 1].name(), queryArgTypeHint[i])
429 | for i in range(len(queryArgTypeHint))
430 | ]
431 | if queryArgTypeHint
432 | else query_analysis.arguments[1:]
433 | ),
434 | query_analysis.return_type
435 | if queryRetTypeHint is None
436 | else queryRetTypeHint,
437 | baseDepth,
438 | )
439 | ]
440 | # end query
441 |
442 | # begin init state
443 | initState_analysis = analyze(
444 | filename,
445 | fnNameBase + "_init_state",
446 | loopsFile,
447 | )
448 |
449 | synthInitState = tracker.variable("synth_init_state", synthStateType)
450 |
451 | init_op_arg_vars = []
452 | for i, typ in enumerate(op_arg_types):
453 | init_op_arg_vars.append(tracker.variable(f"init_op_arg_{i}", typ))
454 |
455 | queryParamVars = [
456 | tracker.variable(
457 | f"init_state_equivalence_query_param_{i}",
458 | query_analysis.arguments[i + 1].type,
459 | )
460 | for i in range(len(query_analysis.arguments) - 1)
461 | ]
462 |
463 | vcInitState = initState_analysis.call()(
464 | tracker,
465 | lambda seqInitialState: Implies(
466 | Eq(synthInitState, Call(f"{fnNameBase}_init_state", synthStateType)),
467 | And(
468 | observeEquivalence(seqInitialState, synthInitState, queryParamVars),
469 | query_analysis.call(seqInitialState, *queryParamVars)(
470 | tracker,
471 | lambda seqQueryResult: Eq(
472 | seqQueryResult,
473 | Call(
474 | f"{fnNameBase}_response",
475 | seqQueryResult.type,
476 | synthInitState,
477 | *queryParamVars,
478 | ),
479 | ),
480 | ),
481 | BoolLit(True)
482 | if useOpList
483 | else Implies(
484 | opPrecondition(init_op_arg_vars),
485 | supportedCommand(synthInitState, init_op_arg_vars),
486 | ),
487 | ),
488 | ),
489 | )
490 |
491 | initStateSynthNode = initState()
492 | invAndPsInitState = [
493 | Synth(
494 | fnNameBase + "_init_state",
495 | Tuple(
496 | *initStateSynthNode.args,
497 | Call("list_empty", ListT(opType)),
498 | )
499 | if useOpList
500 | else Tuple(
501 | *initStateSynthNode.args,
502 | ),
503 | )
504 | ]
505 | # end init state
506 |
507 | # begin equivalence
508 | inputStateForEquivalence = Var(
509 | "inputState",
510 | state_transition_analysis.arguments[0].type
511 | if stateTypeHint is None
512 | else stateTypeHint,
513 | )
514 | synthStateForEquivalence = Var("synthState", synthStateType)
515 |
516 | equivalenceQueryParams = [
517 | Var(f"equivalence_query_param_{i}", queryParameterTypes[i])
518 | for i in range(len(queryParameterTypes))
519 | ]
520 |
521 | invAndPsEquivalence = [
522 | Synth(
523 | "equivalence",
524 | And(
525 | grammarEquivalence(
526 | inputStateForEquivalence,
527 | synthStateForEquivalence,
528 | equivalenceQueryParams,
529 | baseDepth,
530 | ),
531 | *(
532 | [
533 | grammarStateInvariant(
534 | synthStateForEquivalence, baseDepth, invariantBoost
535 | )
536 | ]
537 | if not useOpList
538 | else []
539 | ),
540 | ),
541 | inputStateForEquivalence,
542 | synthStateForEquivalence,
543 | *equivalenceQueryParams,
544 | )
545 | ]
546 |
547 | synthStateForSupported = Var(f"supported_synthState", synthStateType)
548 | argList = [
549 | Var(
550 | f"supported_arg_{i}",
551 | op_arg_types[i],
552 | )
553 | for i in range(len(op_arg_types))
554 | ]
555 | invAndPsSupported = (
556 | [
557 | Synth(
558 | "supportedCommand",
559 | grammarSupportedCommand(
560 | synthStateForSupported, argList, baseDepth, invariantBoost
561 | ),
562 | synthStateForSupported,
563 | *argList,
564 | )
565 | ]
566 | if not useOpList
567 | else []
568 | )
569 | # end equivalence
570 |
571 | if log:
572 | print("====== synthesis")
573 |
574 | combinedVCVars = set(tracker.all())
575 |
576 | combinedInvAndPs = (
577 | invAndPsStateTransition
578 | + invAndPsQuery
579 | + invAndPsInitState
580 | + invAndPsEquivalence
581 | + invAndPsSupported
582 | )
583 |
584 | combinedVC = And(vcStateTransition, vcInitState)
585 |
586 | lang = targetLang()
587 | if useOpList:
588 | lang = lang + opListAdditionalFns(
589 | synthStateType, opType, initState, inOrder, opPrecondition
590 | )
591 |
592 | if skipSynth:
593 | return # type: ignore
594 |
595 | try:
596 | out = synthesize(
597 | basename,
598 | lang,
599 | combinedVCVars,
600 | combinedInvAndPs,
601 | [],
602 | combinedVC,
603 | [*combinedInvAndPs],
604 | cvcPath,
605 | uid=uid,
606 | unboundedInts=unboundedInts,
607 | noVerify=useOpList,
608 | listBound=listBound,
609 | log=log,
610 | )
611 | except VerificationFailed:
612 | # direct synthesis mode
613 | print(
614 | f"#{uid}: CVC5 failed to verify synthesized design, increasing Rosette data structure bounds to",
615 | listBound + 1,
616 | )
617 | return synthesize_crdt(
618 | filename,
619 | fnNameBase,
620 | loopsFile,
621 | cvcPath,
622 | origSynthStateType,
623 | initState,
624 | grammarStateInvariant,
625 | grammarSupportedCommand,
626 | inOrder,
627 | opPrecondition,
628 | grammar,
629 | grammarQuery,
630 | grammarEquivalence,
631 | targetLang,
632 | synthesize,
633 | stateTypeHint=stateTypeHint,
634 | opArgTypeHint=opArgTypeHint,
635 | queryArgTypeHint=queryArgTypeHint,
636 | queryRetTypeHint=queryRetTypeHint,
637 | uid=uid,
638 | unboundedInts=unboundedInts,
639 | useOpList=useOpList,
640 | listBound=listBound + 1,
641 | baseDepth=baseDepth,
642 | invariantBoost=invariantBoost,
643 | log=log,
644 | )
645 |
646 | if useOpList:
647 | print(
648 | f"#{uid}: Synthesizing invariants for unbounded verification (Rosette structure/history bound: {listBound})"
649 | )
650 | equivalence_fn = [x for x in out if x.args[0] == "equivalence"][0]
651 | state_transition_fn = [
652 | x for x in out if x.args[0] == f"{fnNameBase}_next_state"
653 | ][0]
654 | query_fn = [x for x in out if x.args[0] == f"{fnNameBase}_response"][0]
655 | init_state_fn = [x for x in out if x.args[0] == f"{fnNameBase}_init_state"][0]
656 |
657 | equivalence_fn.args[3] = Var(
658 | equivalence_fn.args[3].args[0],
659 | TupleT(*equivalence_fn.args[3].type.args[:-1]),
660 | )
661 |
662 | equivalence_fn.args[1] = equivalence_fn.args[1].rewrite(
663 | {equivalence_fn.args[3].args[0]: equivalence_fn.args[3]}
664 | )
665 |
666 | state_transition_fn.args[2] = Var(
667 | state_transition_fn.args[2].args[0],
668 | TupleT(*state_transition_fn.args[2].type.args[:-1]),
669 | )
670 |
671 | # drop the op-list
672 | state_transition_fn.args[1] = Tuple(
673 | *[
674 | e.rewrite(
675 | {state_transition_fn.args[2].args[0]: state_transition_fn.args[2]}
676 | )
677 | for e in state_transition_fn.args[1].args[:-1]
678 | ]
679 | )
680 |
681 | query_fn.args[2] = Var(
682 | query_fn.args[2].args[0], TupleT(*query_fn.args[2].type.args[:-1])
683 | )
684 |
685 | query_fn.args[1] = query_fn.args[1].rewrite(
686 | {query_fn.args[2].args[0]: query_fn.args[2]}
687 | )
688 |
689 | init_state_fn.args[1] = Tuple(*init_state_fn.args[1].args[:-1])
690 |
691 | try:
692 | # attempt to synthesize the invariants
693 | return synthesize_crdt(
694 | filename,
695 | fnNameBase,
696 | loopsFile,
697 | cvcPath,
698 | origSynthStateType,
699 | lambda: init_state_fn.args[1], # type: ignore
700 | grammarStateInvariant,
701 | grammarSupportedCommand,
702 | inOrder,
703 | opPrecondition,
704 | lambda inState, args, _baseDepth: typing.cast(
705 | Expr, state_transition_fn.args[1]
706 | ).rewrite(
707 | {
708 | cur_state_param.name(): inState,
709 | **{orig.name(): new for orig, new in zip(op_arg_vars, args)},
710 | }
711 | ),
712 | lambda _name, _args, _retT, _baseDepth: Synth(
713 | query_fn.args[0], query_fn.args[1], *query_fn.args[2:]
714 | ),
715 | lambda a, b, _baseDepth, _invariantBoost: equivalence_fn.args[1], # type: ignore
716 | targetLang,
717 | synthesize,
718 | stateTypeHint=stateTypeHint,
719 | opArgTypeHint=opArgTypeHint,
720 | queryArgTypeHint=queryArgTypeHint,
721 | queryRetTypeHint=queryRetTypeHint,
722 | uid=uid,
723 | unboundedInts=unboundedInts,
724 | useOpList=False,
725 | listBound=listBound,
726 | baseDepth=baseDepth,
727 | invariantBoost=invariantBoost,
728 | log=log,
729 | )
730 | except SynthesisFailed:
731 | try:
732 | # try to re-verify with a larger bound
733 | print(
734 | f"#{uid}: re-verifying with history bound {listBound + 1} and attempting to re-synthesize invariants with deeper grammar"
735 | )
736 | return synthesize_crdt(
737 | filename,
738 | fnNameBase,
739 | loopsFile,
740 | cvcPath,
741 | origSynthStateType,
742 | lambda: init_state_fn.args[1], # type: ignore
743 | grammarStateInvariant,
744 | grammarSupportedCommand,
745 | inOrder,
746 | opPrecondition,
747 | lambda inState, args, _baseDepth: typing.cast(
748 | Expr, state_transition_fn.args[1]
749 | ).rewrite(
750 | {
751 | cur_state_param.name(): inState,
752 | **{
753 | orig.name(): new for orig, new in zip(op_arg_vars, args)
754 | },
755 | }
756 | ),
757 | lambda _name, args, _retT, _baseDepth: Synth(
758 | query_fn.args[0], query_fn.args[1], *args
759 | ),
760 | lambda a, b, c, _baseDepth: equivalence_fn.args[1], # type: ignore
761 | targetLang,
762 | synthesize,
763 | stateTypeHint=stateTypeHint,
764 | opArgTypeHint=opArgTypeHint,
765 | queryArgTypeHint=queryArgTypeHint,
766 | queryRetTypeHint=queryRetTypeHint,
767 | uid=uid,
768 | unboundedInts=unboundedInts,
769 | useOpList=useOpList,
770 | listBound=listBound + 1,
771 | baseDepth=baseDepth,
772 | invariantBoost=invariantBoost + 1,
773 | log=log,
774 | )
775 | except SynthesisFailed:
776 | print(
777 | f"#{uid}: could not synthesize invariants, re-synthesizing entire design with history bound {listBound + 1}"
778 | )
779 | return synthesize_crdt(
780 | filename,
781 | fnNameBase,
782 | loopsFile,
783 | cvcPath,
784 | origSynthStateType,
785 | initState,
786 | grammarStateInvariant,
787 | grammarSupportedCommand,
788 | inOrder,
789 | opPrecondition,
790 | grammar,
791 | grammarQuery,
792 | grammarEquivalence,
793 | targetLang,
794 | synthesize,
795 | stateTypeHint=stateTypeHint,
796 | opArgTypeHint=opArgTypeHint,
797 | queryArgTypeHint=queryArgTypeHint,
798 | queryRetTypeHint=queryRetTypeHint,
799 | uid=uid,
800 | unboundedInts=unboundedInts,
801 | useOpList=useOpList,
802 | listBound=listBound + 1,
803 | baseDepth=baseDepth,
804 | invariantBoost=invariantBoost,
805 | log=log,
806 | )
807 | else:
808 | return out
809 |
--------------------------------------------------------------------------------
/llvm-pass/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | cmake_minimum_required(VERSION 3.1)
2 | project(AddEmptyBlocks)
3 |
4 | # support C++14 features used by LLVM 10.0.0
5 | set(CMAKE_CXX_STANDARD 14)
6 | SET(CMAKE_CXX_FLAGS "-D__GLIBCXX_USE_CXX11_ABI=0 -fno-rtti")
7 |
8 | find_package(LLVM REQUIRED CONFIG)
9 | add_definitions(${LLVM_DEFINITIONS})
10 | include_directories(${LLVM_INCLUDE_DIRS})
11 | link_directories(${LLVM_LIBRARY_DIRS})
12 |
13 | add_subdirectory(addEmptyBlocks) # Use your pass name here.
14 |
--------------------------------------------------------------------------------
/llvm-pass/addEmptyBlocks/AddEmptyBlocks.cpp:
--------------------------------------------------------------------------------
1 | #include