├── .clang-format
├── .devcontainer.json
├── .dockerignore
├── .github
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── ISSUE_TEMPLATE
│ ├── bug_report.md
│ └── feature_request.md
└── workflows
│ ├── benchmark.yml
│ ├── ci.yml
│ ├── pr-assignee.yml
│ ├── publish-latest.yml
│ └── publish-release.yml
├── .gitignore
├── .gitmodules
├── .rsync-filter
├── BENCHMARKING.md
├── CHANGELOG.md
├── CMakeLists.txt
├── Dockerfile.ignore
├── Dockerfile.sgx
├── Dockerfile.virtual
├── LICENSE
├── Makefile
├── README.md
├── SECURITY.md
├── benchmark
├── all.py
├── analysis.py
├── certs.py
├── ci.py
├── commit-latency-analysis.ipynb
├── common.py
├── distributed.py
├── distributed
│ ├── files
│ │ └── ccf-binary-dir.diff
│ ├── get_ips.yaml
│ ├── setup_nodes.yaml
│ └── values.yaml
├── etcd-analysis.ipynb
├── etcd.py
├── etcd_cluster.py
├── generator.py
├── go-ycsb
│ └── workloads
│ │ ├── workload_template
│ │ ├── workloada
│ │ ├── workloadb
│ │ ├── workloadc
│ │ ├── workloadd
│ │ ├── workloade
│ │ └── workloadf
├── k6-analysis.ipynb
├── k6.js
├── k6.py
├── local.py
├── lskv_cluster.py
├── perf-analysis.ipynb
├── perf_system.py
├── piccolo-requests-http1.parquet
├── piccolo_generate.py
├── receipt_verify.py
├── stores.py
├── ycsb-analysis.ipynb
└── ycsb.py
├── cgmanifest.json
├── cmake
└── version.cmake
├── config
├── cchost_config.sgx.json
└── cchost_config.virtual.json
├── constitution
├── actions.js
├── apply.js
├── resolve.js
└── validate.js
├── docs
├── index.md
└── receipts.md
├── etcdctl.sh
├── flake.lock
├── flake.nix
├── integration-tests.sh
├── nix
├── README.md
├── az-dcap.nix
├── ccf-sandbox.nix
├── ccf-sandbox.sh
├── ccf.nix
├── ci-checks.nix
├── default.nix
├── k6.nix
├── lib.nix
├── lskv-sandbox.nix
├── lskv.nix
├── lvi-mitigation.nix
├── openenclave.nix
├── patches
│ ├── ccf-ignore-submitter.diff
│ ├── ccf-no-python-pb2.diff
│ ├── ccf-no-python.diff
│ ├── ccf-protoc-binary.diff
│ ├── openenclave-pkgconfig.diff
│ └── openenclave.diff
├── python
│ ├── adtk.nix
│ ├── better-exceptions.nix
│ ├── ccf_infra_setup.py
│ ├── cimetrics.nix
│ ├── columnar.nix
│ ├── pycose.nix
│ ├── pyright.nix
│ ├── python-ccf-infra.nix
│ ├── python-ccf.nix
│ ├── string-color.nix
│ └── types-paramiko.nix
└── sgx-dcap.nix
├── oe_sign.conf
├── out_parser.py
├── patches
├── 0001-etcd-patches.patch
└── k6-micro.diff
├── proto
├── CMakeLists.txt
├── build.sh
├── etcd.proto
├── lskvserver.proto
└── status.proto
├── python
├── lskv
│ └── governance.py
└── pyproject.toml
├── receipts.sh
├── requirements.txt
├── scripts
├── check-cmake-format.sh
├── check-format.sh
├── check-issues.sh
├── check-todo.sh
├── ci-checks.sh
└── notice_check.py
├── src
└── app
│ ├── app.cpp
│ ├── exceptions.h
│ ├── grpc.h
│ ├── index.cpp
│ ├── index.h
│ ├── json_grpc.h
│ ├── kvstore.cpp
│ ├── kvstore.h
│ ├── leases.cpp
│ ├── leases.h
│ └── node_data.h
└── tests
├── test_common.py
└── test_single.py
/.clang-format:
--------------------------------------------------------------------------------
1 | ---
2 | Language: Cpp
3 | AccessModifierOffset: -2
4 | AlignAfterOpenBracket: AlwaysBreak
5 | AlignConsecutiveMacros: false
6 | AlignConsecutiveAssignments: false
7 | AlignConsecutiveDeclarations: false
8 | AlignEscapedNewlines: DontAlign
9 | AlignOperands: false
10 | AlignTrailingComments: false
11 | AllowAllArgumentsOnNextLine: true
12 | AllowAllConstructorInitializersOnNextLine: false
13 | AllowAllParametersOfDeclarationOnNextLine: true
14 | AllowShortBlocksOnASingleLine: Never
15 | AllowShortCaseLabelsOnASingleLine: false
16 | AllowShortFunctionsOnASingleLine: Empty
17 | AllowShortLambdasOnASingleLine: All
18 | AllowShortIfStatementsOnASingleLine: Never
19 | AllowShortLoopsOnASingleLine: false
20 | AlwaysBreakAfterReturnType: None
21 | AlwaysBreakBeforeMultilineStrings: true
22 | AlwaysBreakTemplateDeclarations: Yes
23 | BinPackArguments: false
24 | BinPackParameters: false
25 | BraceWrapping:
26 | AfterCaseLabel: true
27 | AfterClass: true
28 | AfterControlStatement: true
29 | AfterEnum: true
30 | AfterFunction: true
31 | AfterNamespace: true
32 | AfterObjCDeclaration: true
33 | AfterStruct: true
34 | AfterUnion: true
35 | AfterExternBlock: true
36 | BeforeCatch: true
37 | BeforeElse: true
38 | IndentBraces: false
39 | SplitEmptyFunction: false
40 | SplitEmptyRecord: false
41 | SplitEmptyNamespace: false
42 | BreakBeforeBinaryOperators: None
43 | BreakBeforeBraces: Custom
44 | BreakInheritanceList: BeforeColon
45 | BreakBeforeTernaryOperators: false
46 | BreakConstructorInitializers: AfterColon
47 | BreakStringLiterals: true
48 | ColumnLimit: 80
49 | CommentPragmas: '^ IWYU pragma:'
50 | CompactNamespaces: false
51 | ConstructorInitializerAllOnOneLineOrOnePerLine: true
52 | ConstructorInitializerIndentWidth: 2
53 | ContinuationIndentWidth: 2
54 | Cpp11BracedListStyle: true
55 | DeriveLineEnding: true
56 | DerivePointerAlignment: false
57 | DisableFormat: false
58 | ExperimentalAutoDetectBinPacking: false
59 | FixNamespaceComments: false
60 | ForEachMacros:
61 | - FOREACH
62 | - Q_FOREACH
63 | - BOOST_FOREACH
64 | IncludeBlocks: Regroup
65 | IncludeCategories:
66 | - Regex: '^"(llvm|llvm-c|clang|clang-c)/'
67 | Priority: 2
68 | SortPriority: 0
69 | - Regex: '^(<|"(gtest|gmock|isl|json)/)'
70 | Priority: 3
71 | SortPriority: 0
72 | - Regex: '.*'
73 | Priority: 1
74 | SortPriority: 0
75 | IncludeIsMainRegex: '(Test)?$'
76 | IncludeIsMainSourceRegex: ''
77 | IndentCaseLabels: true
78 | IndentGotoLabels: true
79 | IndentPPDirectives: AfterHash
80 | IndentWidth: 2
81 | IndentWrappedFunctionNames: false
82 | JavaScriptQuotes: Leave
83 | JavaScriptWrapImports: true
84 | KeepEmptyLinesAtTheStartOfBlocks: false
85 | MacroBlockBegin: ''
86 | MacroBlockEnd: ''
87 | MaxEmptyLinesToKeep: 1
88 | NamespaceIndentation: All
89 | ObjCBinPackProtocolList: Auto
90 | ObjCBlockIndentWidth: 2
91 | ObjCSpaceAfterProperty: false
92 | ObjCSpaceBeforeProtocolList: true
93 | PenaltyBreakAssignment: 2
94 | PenaltyBreakBeforeFirstCallParameter: 19
95 | PenaltyBreakComment: 300
96 | PenaltyBreakFirstLessLess: 120
97 | PenaltyBreakString: 1000
98 | PenaltyBreakTemplateDeclaration: 10
99 | PenaltyExcessCharacter: 1000000
100 | PenaltyReturnTypeOnItsOwnLine: 600
101 | PointerAlignment: Left
102 | ReflowComments: true
103 | SortIncludes: true
104 | SortUsingDeclarations: true
105 | SpaceAfterCStyleCast: false
106 | SpaceAfterLogicalNot: false
107 | SpaceAfterTemplateKeyword: true
108 | SpaceBeforeAssignmentOperators: true
109 | SpaceBeforeCpp11BracedList: false
110 | SpaceBeforeCtorInitializerColon: true
111 | SpaceBeforeInheritanceColon: true
112 | SpaceBeforeParens: ControlStatements
113 | SpaceBeforeRangeBasedForLoopColon: true
114 | SpaceInEmptyBlock: false
115 | SpaceInEmptyParentheses: false
116 | SpacesBeforeTrailingComments: 1
117 | SpacesInAngles: false
118 | SpacesInConditionalStatement: false
119 | SpacesInContainerLiterals: false
120 | SpacesInCStyleCastParentheses: false
121 | SpacesInParentheses: false
122 | SpacesInSquareBrackets: false
123 | SpaceBeforeSquareBrackets: false
124 | Standard: c++20
125 | StatementMacros:
126 | - Q_UNUSED
127 | - QT_REQUIRE_VERSION
128 | TabWidth: 2
129 | UseCRLF: false
130 | UseTab: Never
131 | ---
132 | Language: Proto
133 | IndentWidth: 2
134 | UseTab: Never
135 | BreakBeforeBraces: Stroustrup
136 |
--------------------------------------------------------------------------------
/.devcontainer.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "Development Container for CCF C++ Apps",
3 | "context": "..",
4 | "image": "mcr.microsoft.com/ccf/app/dev:4.0.7-virtual",
5 | "runArgs": [],
6 | "extensions": ["ms-vscode.cpptools"]
7 | }
8 |
--------------------------------------------------------------------------------
/.dockerignore:
--------------------------------------------------------------------------------
1 | *.pyc
2 | /*.deb
3 | /*.etcd/
4 | /*.json
5 | /*.md
6 | /*.out
7 | /*.sh
8 | /.cache/
9 | /.clang-format
10 | /.dockerignore
11 | /.github/
12 | /.gitignore
13 | /.gitmodules
14 | /.ipynb_checkpoints/
15 | /.mypy_cache/
16 | /.venv/
17 | /.venv_ccf_sandbox/
18 | /.venv_ccf_sandbox/
19 | /.vscode/
20 | /3rdparty/
21 | /Dockerfile.*
22 | /LICENSE
23 | /Makefile
24 | /analysis_files
25 | /bench/
26 | /benchmark/
27 | /bin/
28 | /build/
29 | /certs-config/
30 | /certs/
31 | /docs
32 | /node_modules/
33 | /patches
34 | /plots/
35 | /scripts/
36 | /scripts/env/
37 | /workspace/
38 | requirements.txt
39 | *.zip
40 | *.bak
41 |
--------------------------------------------------------------------------------
/.github/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | ## Code of Conduct
2 |
3 | This project has adopted the [Microsoft Open Source Code of Conduct](https://opensource.microsoft.com/codeofconduct/).
4 | For more information see the [Code of Conduct FAQ](https://opensource.microsoft.com/codeofconduct/faq/)
5 | or contact [opencode@microsoft.com](mailto:opencode@microsoft.com) with any additional questions or comments.
6 |
--------------------------------------------------------------------------------
/.github/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | ## Contributing
2 |
3 | This project welcomes contributions and suggestions. Most contributions require you to
4 | agree to a Contributor License Agreement (CLA) declaring that you have the right to,
5 | and actually do, grant us the rights to use your contribution. For details, visit
6 | https://cla.microsoft.com.
7 |
8 | When you submit a pull request, a CLA-bot will automatically determine whether you need
9 | to provide a CLA and decorate the PR appropriately (e.g., label, comment). Simply follow the instructions provided by the bot. You will only need to do this once across all repositories using our CLA.
10 |
11 | Note that we only accept pull requests from forks so please fork this repository before making any changes. You should contribute your changes on a branch on that fork and create a pull request on the [microsoft/LSKV repository](https://github.com/microsoft/LSKV/compare) from there.
12 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/bug_report.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Bug report
3 | about: Create a report to help us improve
4 | title: ""
5 | labels: bug
6 | assignees: ""
7 | ---
8 |
9 | **Describe the bug**
10 | A clear and concise description of what the bug is.
11 |
12 | **To Reproduce**
13 | Steps to reproduce the behavior:
14 |
15 | 1. Go to '...'
16 | 2. Click on '....'
17 | 3. Scroll down to '....'
18 | 4. See error
19 |
20 | **Expected behavior**
21 | A clear and concise description of what you expected to happen.
22 |
23 | **Additional context**
24 | Add any other context about the problem here.
25 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/feature_request.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Feature request
3 | about: Suggest an idea for this project
4 | title: ""
5 | labels: enhancement
6 | assignees: ""
7 | ---
8 |
9 | **Is your feature request related to a problem? Please describe.**
10 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
11 |
12 | **Describe the solution you'd like**
13 | A clear and concise description of what you want to happen.
14 |
15 | **Describe alternatives you've considered**
16 | A clear and concise description of any alternative solutions or features you've considered.
17 |
18 | **Additional context**
19 | Add any other context about the feature request here.
20 |
--------------------------------------------------------------------------------
/.github/workflows/benchmark.yml:
--------------------------------------------------------------------------------
1 | name: "Perf testing"
2 |
3 | on:
4 | pull_request:
5 | branches: [main]
6 |
7 | concurrency:
8 | group: ${{ github.workflow }}-${{ github.ref }}
9 | cancel-in-progress: true
10 |
11 | jobs:
12 | benchmark:
13 | runs-on: ubuntu-20.04
14 | container: mcr.microsoft.com/ccf/app/dev:4.0.7-virtual
15 |
16 | steps:
17 | - name: Checkout repository
18 | uses: actions/checkout@v3
19 | with:
20 | submodules: true
21 | fetch-depth: 0
22 |
23 | - name: Install Go
24 | run: |
25 | curl -L -o go1.19.1.linux-amd64.tar.gz https://go.dev/dl/go1.19.1.linux-amd64.tar.gz
26 | mkdir -p ~/.local/
27 | tar -C ~/.local/ -xzf go1.19.1.linux-amd64.tar.gz
28 |
29 | - name: Get etcd
30 | run: make bin/etcd
31 |
32 | - name: Install gh cli
33 | run: |
34 | type -p curl >/dev/null || sudo apt install curl -y
35 | curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg | sudo dd of=/usr/share/keyrings/githubcli-archive-keyring.gpg
36 | sudo chmod go+r /usr/share/keyrings/githubcli-archive-keyring.gpg
37 | echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | sudo tee /etc/apt/sources.list.d/github-cli.list > /dev/null
38 | sudo apt update
39 | sudo apt install gh -y
40 |
41 | - name: Build benchmark
42 | run: export PATH=$HOME/.local/go/bin:$PATH && make bin/benchmark
43 |
44 | - name: Build go-ycsb
45 | run: export PATH=$HOME/.local/go/bin:$PATH && make bin/go-ycsb
46 |
47 | - name: Build k6
48 | run: export PATH=$HOME/.local/go/bin:$PATH && make bin/k6
49 |
50 | - name: Install venv
51 | run: sudo apt install -y python3.8-venv
52 |
53 | - name: Install pip
54 | run: sudo apt install -y python3-pip
55 |
56 | - name: Upgrade pip
57 | run: pip3 install --upgrade pip
58 |
59 | - name: Make .venv
60 | run: make .venv
61 |
62 | - name: Run benchmark
63 | run: export PATH=$HOME/.local/go/bin:$PATH && . .venv/bin/activate && python3 benchmark/ci.py
64 |
65 | # - name: Plot results
66 | # run: make execute-notebook
67 |
68 | # - name: Mark safe directories
69 | # run: git config --global --add safe.directory $(pwd)
70 |
71 | # - name: Set up git author
72 | # run: |
73 | # git config --global user.email "62645686+ccf-bot@users.noreply.github.com"
74 | # git config --global user.name "ccf-bot"
75 |
76 | # - name: Commit and push plots
77 | # run: |
78 | # set -x
79 | # git checkout -- benchmark
80 | # git checkout ci-plots
81 | # git pull origin ci-plots
82 | # mkdir ${{ github.sha }}
83 | # mv plots/* ${{ github.sha }}/.
84 | # rm -f ${{ github.sha }}/**/*.svg
85 | # git add ${{ github.sha }}/**/*.jpg
86 | # git commit -m "add plots for ${{ github.sha }}"
87 | # git push origin ci-plots
88 |
89 | # - name: Set up comment file
90 | # run: |
91 | # repo_url=${{ github.server_url }}/${{ github.repository }}
92 | # echo "## Plots from benchmarking at ${{ github.sha }}" >> comment.md
93 | # echo >> comment.md
94 | # echo "> Available from [here](${repo_url}/tree/ci-plots/${{ github.sha }})" >> comment.md
95 | # echo >> comment.md
96 | # for dir in ${{ github.sha }}/*; do
97 | # for f in $dir/*; do
98 | # short="${f#*/}"
99 | # echo "" >> comment.md
100 | # echo "$short
" >> comment.md
101 | # echo "
" >> comment.md
102 | # echo " " >> comment.md
103 | # done
104 | # done
105 |
106 | # - name: Upload plots to PR
107 | # run: |
108 | # gh pr comment ${{ github.event.number }} --edit-last --body-file comment.md || \
109 | # gh pr comment ${{ github.event.number }} --body-file comment.md
110 | # env:
111 | # GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
112 |
113 | # - name: Upload plots as artifact
114 | # uses: actions/upload-artifact@v3
115 | # with:
116 | # name: plots
117 | # path: ${{ github.sha }}/
118 |
--------------------------------------------------------------------------------
/.github/workflows/ci.yml:
--------------------------------------------------------------------------------
1 | name: "CI"
2 |
3 | on:
4 | push:
5 | branches: [main]
6 | pull_request:
7 |
8 | concurrency:
9 | group: ${{ github.workflow }}-${{ github.ref }}
10 | cancel-in-progress: true
11 |
12 | env:
13 | DOCKER_BUILDKIT: 1 # https://docs.docker.com/develop/develop-images/build_enhancements/
14 |
15 | jobs:
16 | build-and-test:
17 | runs-on: ubuntu-20.04
18 | container: mcr.microsoft.com/ccf/app/dev:4.0.7-virtual
19 |
20 | steps:
21 | - name: Checkout repository
22 | uses: actions/checkout@v3
23 | with:
24 | fetch-depth: 0
25 |
26 | - name: Make sure github workspace is git safe
27 | run: git config --global --add safe.directory "$GITHUB_WORKSPACE"
28 |
29 | - name: Build app
30 | run: make build-virtual
31 |
32 | - name: Install go
33 | run: curl -L -o go1.19.1.linux-amd64.tar.gz https://go.dev/dl/go1.19.1.linux-amd64.tar.gz && tar -C /usr/local -xzf go1.19.1.linux-amd64.tar.gz
34 |
35 | - name: Install pip
36 | run: sudo apt-get update && sudo apt install -y python3-pip
37 |
38 | - name: Upgrade pip
39 | run: pip3 install --upgrade pip
40 |
41 | - name: Test app
42 | run: make tests
43 |
44 | - name: Test app (etcd integration tests)
45 | run: export PATH=$PATH:/usr/local/go/bin && make test-virtual
46 | continue-on-error: true
47 |
48 | build-docker-virtual:
49 | runs-on: ubuntu-20.04
50 |
51 | steps:
52 | - name: Checkout repository
53 | uses: actions/checkout@v3
54 | with:
55 | fetch-depth: 0
56 |
57 | - name: Build container
58 | run: docker build -t lskv:latest-virtual -f Dockerfile.virtual .
59 |
60 | build-docker-sgx:
61 | runs-on: ubuntu-20.04
62 |
63 | steps:
64 | - name: Checkout repository
65 | uses: actions/checkout@v3
66 | with:
67 | fetch-depth: 0
68 |
69 | - name: Build container
70 | run: docker build -t lskv:latest-sgx -f Dockerfile.sgx .
71 |
72 | checks:
73 | runs-on: ubuntu-latest
74 | container: ccfmsrc.azurecr.io/ccf/ci:16-08-2023-1-virtual-clang15
75 |
76 | steps:
77 | - name: Make sure github workspace is git safe
78 | run: git config --global --add safe.directory "$GITHUB_WORKSPACE"
79 |
80 | - name: Checkout repository
81 | uses: actions/checkout@v3
82 | with:
83 | fetch-depth: 0
84 |
85 | - name: Run checks
86 | run: ./scripts/ci-checks.sh
87 |
88 | check-issues:
89 | runs-on: ubuntu-20.04
90 |
91 | steps:
92 | - name: Checkout repository
93 | uses: actions/checkout@v3
94 | with:
95 | fetch-depth: 0
96 |
97 | - name: Check issues
98 | run: ./scripts/check-issues.sh
99 | env:
100 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
101 |
102 | nix:
103 | runs-on: ubuntu-22.04
104 |
105 | steps:
106 | - name: Checkout repository
107 | uses: actions/checkout@v3
108 | with:
109 | fetch-depth: 0
110 |
111 | - name: Install nix
112 | uses: cachix/install-nix-action@v18
113 |
114 | - name: Run ci checks
115 | run: nix build .#ci-check-all -L
116 |
--------------------------------------------------------------------------------
/.github/workflows/pr-assignee.yml:
--------------------------------------------------------------------------------
1 | name: PR Assignee
2 |
3 | on:
4 | pull_request_target:
5 | types:
6 | - opened
7 |
8 | jobs:
9 | assign:
10 | runs-on: ubuntu-latest
11 | steps:
12 | - name: Install gh cli
13 | run: |
14 | type -p curl >/dev/null || sudo apt install curl -y
15 | curl -fsSL https://cli.github.com/packages/githubcli-archive-keyring.gpg | sudo dd of=/usr/share/keyrings/githubcli-archive-keyring.gpg
16 | sudo chmod go+r /usr/share/keyrings/githubcli-archive-keyring.gpg
17 | echo "deb [arch=$(dpkg --print-architecture) signed-by=/usr/share/keyrings/githubcli-archive-keyring.gpg] https://cli.github.com/packages stable main" | sudo tee /etc/apt/sources.list.d/github-cli.list > /dev/null
18 | sudo apt update
19 | sudo apt install gh -y
20 |
21 | - name: Assign PR to author
22 | run: |
23 | gh pr edit ${{ github.event.number }} --add-assignee ${{ github.event.sender.login }} --repo ${{ github.repository }}
24 | env:
25 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
26 |
--------------------------------------------------------------------------------
/.github/workflows/publish-latest.yml:
--------------------------------------------------------------------------------
1 | name: "Publish latest"
2 |
3 | on:
4 | push:
5 | branches: [main]
6 |
7 | concurrency:
8 | group: ${{ github.workflow }}-${{ github.ref }}
9 |
10 | env:
11 | DOCKER_BUILDKIT: 1 # https://docs.docker.com/develop/develop-images/build_enhancements/
12 |
13 | jobs:
14 | publish:
15 | runs-on: ubuntu-20.04
16 |
17 | steps:
18 | - name: Checkout repository
19 | uses: actions/checkout@v3
20 | with:
21 | fetch-depth: 0
22 |
23 | - name: Build containers
24 | run: |
25 | docker build -t lskv:latest-virtual -f Dockerfile.virtual .
26 | docker build -t lskv:latest-sgx -f Dockerfile.sgx .
27 |
28 | - name: Copy files out of images
29 | run: |
30 | docker create --name lskv-virtual lskv:latest-virtual
31 | docker cp lskv-virtual:/app/liblskv.virtual.so liblskv.virtual.so
32 | docker rm lskv-virtual
33 |
34 | docker create --name lskv-sgx lskv:latest-sgx
35 | docker cp lskv-sgx:/app/liblskv.enclave.so.signed liblskv.enclave.so.signed
36 | docker rm lskv-sgx
37 |
38 | - name: Tag latest-main
39 | run: |
40 | git tag latest-main
41 | git push --force origin latest-main
42 |
43 | - name: Publish latest release
44 | uses: softprops/action-gh-release@v1
45 | with:
46 | name: Latest main
47 | tag_name: latest-main
48 | prerelease: true
49 | files: |
50 | liblskv.virtual.so
51 | liblskv.enclave.so.signed
52 |
--------------------------------------------------------------------------------
/.github/workflows/publish-release.yml:
--------------------------------------------------------------------------------
1 | name: "Publish release"
2 |
3 | on:
4 | push:
5 | tags:
6 | - "v[0-9]+.[0-9]+.[0-9]+*"
7 |
8 | env:
9 | DOCKER_BUILDKIT: 1 # https://docs.docker.com/develop/develop-images/build_enhancements/
10 |
11 | jobs:
12 | publish:
13 | runs-on: ubuntu-20.04
14 |
15 | steps:
16 | - name: Checkout repository
17 | uses: actions/checkout@v3
18 | with:
19 | fetch-depth: 0
20 |
21 | - name: Build containers
22 | run: |
23 | docker build -t lskv:${{ github.ref }}-virtual -f Dockerfile.virtual .
24 | docker build -t lskv:${{ github.ref }}-sgx -f Dockerfile.sgx .
25 |
26 | - name: Copy files out of images
27 | run: |
28 | docker create --name lskv-virtual lskv:${{ github.ref }}-virtual
29 | docker cp lskv-virtual:/app/liblskv.virtual.so liblskv.virtual.so
30 | docker rm lskv-virtual
31 |
32 | docker create --name lskv-sgx lskv:${{ github.ref }}-sgx
33 | docker cp lskv-sgx:/app/liblskv.enclave.so.signed liblskv.enclave.so.signed
34 | docker rm lskv-sgx
35 |
36 | - name: Publish ${{ github.ref }} release
37 | uses: softprops/action-gh-release@v1
38 | with:
39 | name: ${{ github.ref }}
40 | tag_name: ${{ github.ref }}
41 | files: |
42 | liblskv.virtual.so
43 | liblskv.enclave.so.signed
44 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | build/
2 | *.deb
3 | /.venv_ccf_sandbox/
4 | workspace/
5 | /.cache/
6 | .venv_ccf_sandbox/
7 |
8 | /scripts/env/
9 | node_modules/
10 | bin/
11 | .vscode/
12 |
13 | /bench/
14 | /*.etcd/
15 |
16 | /.venv/
17 | /certs/
18 |
19 | .ipynb_checkpoints/
20 | /plots/
21 |
22 | *.pyc
23 | *.egg-info
24 |
25 | *.out
26 | *.err
27 |
28 | *_pb2.py
29 | *_pb2.pyi
30 |
31 | /docker-workspace/
32 |
33 | /*.parquet
34 |
35 | /.direnv/
36 | /.envrc
37 |
38 | /result
39 |
40 | /*.parquet
41 |
42 | *.pub
43 | hosts
44 |
--------------------------------------------------------------------------------
/.gitmodules:
--------------------------------------------------------------------------------
1 | [submodule "3rdparty/etcd"]
2 | path = 3rdparty/etcd
3 | url = https://github.com/etcd-io/etcd.git
4 | [submodule "3rdparty/go-ycsb"]
5 | path = 3rdparty/go-ycsb
6 | url = https://github.com/pingcap/go-ycsb.git
7 | [submodule "3rdparty/k6"]
8 | path = 3rdparty/k6
9 | url = https://github.com/grafana/k6.git
10 |
--------------------------------------------------------------------------------
/.rsync-filter:
--------------------------------------------------------------------------------
1 | # Copyright (c) Microsoft Corporation. All rights reserved.
2 | # Licensed under the MIT License.
3 | - /workspace/
4 | - /.direnv/
5 | - /docker-workspace/
6 | - /plots/
7 | - /.git/
8 | - /.venv/
9 | - /.venv_ccf_sandbox/
10 | - /scripts/env/
11 | - /.cache/
12 | - /.mypy_cache/
13 | - /*.deb
14 | - /result/
15 | - /bench.*/
16 |
--------------------------------------------------------------------------------
/BENCHMARKING.md:
--------------------------------------------------------------------------------
1 | # Benchmarking LSKV
2 |
3 | ## Distributed benchmarking
4 |
5 | ### Requirements
6 |
7 | The distributed benchmarking assumes a set of machines are available (running Ubuntu 20.04).
8 | Their IP addresses should be available in a `hosts` file one per line.
9 | The first node listed in this file will be the _leader_ node and be responsible for launching the benchmarks from.
10 |
11 | If using a VM scale set in Azure, running the following will get the IP addreses into the hosts file for you if they have public IP addresses:
12 |
13 | ```sh
14 | az vmss list-instance-public-ips --name --resource-group | jq -r '.[].ipAddress' | tee hosts
15 | ```
16 |
17 | For VMs with private IP addresses reachable from a jumpbox you'll be running the commands from:
18 |
19 | ```sh
20 | az vmss nic list --vmss-name --resource-group | jq -r '.[].ipConfigurations[].privateIpAddress' | tee hosts
21 | ```
22 |
23 | After provisioning the nodes, they need to be setup with the correct dependencies:
24 |
25 | ```sh
26 | ansible-playbook -i hosts benchmark/distributed/setup_nodes.yaml -e @benchmark/distributed/values.yaml
27 | ```
28 |
29 | Then, to run the benchmarks, ssh onto the first node (`ssh @$(head -n 1 hosts)`), `cd /tmp/lskv` and run things from there (e.g. `. .venv/bin/activate && python3 benchmark/distributed.py`).
30 |
31 | ## Receipt verification
32 |
33 | The receipt verification benchmark (`benchmark/receipt_verify.py`) uses a hard-coded receipt so can be run standalone without a running datastore.
34 | It can be run with `python benchmark/receipt_verify.py`.
35 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | # Changelog
2 |
3 | All notable changes to this project will be documented in this file.
4 |
5 | The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
6 | and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
7 |
8 | ## [Unreleased]
9 |
10 | ## [0.0.1]
11 |
12 | Initial release.
13 |
14 | [unreleased]: https://github.com/microsoft/lskv/compare/v0.0.1...HEAD
15 | [0.0.1]: https://github.com/microsoft/lskv/releases/tag/v0.0.1
16 |
--------------------------------------------------------------------------------
/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | # Copyright (c) Microsoft Corporation. All rights reserved. Licensed under the
2 | # MIT License.
3 |
4 | cmake_minimum_required(VERSION 3.16)
5 |
6 | include(${CMAKE_CURRENT_SOURCE_DIR}/cmake/version.cmake)
7 |
8 | project(
9 | lskv
10 | LANGUAGES C CXX
11 | VERSION ${LSKV_VERSION_SHORT})
12 |
13 | option(
14 | COMPILE_TARGET
15 | "Compile target to build for, one of [virtual;sgx;snp], defaults to virtual"
16 | virtual)
17 |
18 | set(CCF "ccf_${COMPILE_TARGET}")
19 |
20 | option(CCF_UNSAFE "build with unsafe options" OFF)
21 | if(${CCF_UNSAFE})
22 | if(${COMPILE_TARGET} STREQUAL "sgx")
23 | message(WARNING "Building with unsafe options")
24 | set(CCF "${CCF}_unsafe")
25 | else()
26 | message(
27 | FATAL_ERROR "CCF_UNSAFE is not support for target ${COMPILE_TARGET}")
28 | endif()
29 | endif()
30 |
31 | if(NOT TARGET ${CCF})
32 | find_package(${CCF} REQUIRED)
33 | endif()
34 |
35 | add_subdirectory(proto)
36 |
37 | option(PUBLIC_LEASES
38 | "If enabled, leases are recorded in plaintext in the ledger (insecure!)"
39 | OFF)
40 |
41 | option(VERBOSE_LOGGING "enable verbose logging" OFF)
42 |
43 | add_compile_definitions(LSKV_VERSION="${LSKV_VERSION}")
44 | add_compile_definitions(CCF_LOGGER_NO_DEPRECATE)
45 | # work around an issue in outdated protobuf from CCF
46 | # https://github.com/protocolbuffers/protobuf/issues/10108
47 | add_compile_definitions(GOOGLE_PROTOBUF_INTERNAL_DONATE_STEAL_INLINE=0)
48 |
49 | add_ccf_app(
50 | lskv
51 | SRCS
52 | src/app/app.cpp
53 | src/app/kvstore.cpp
54 | src/app/index.cpp
55 | src/app/leases.cpp
56 | INCLUDE_DIRS
57 | "${CMAKE_BINARY_DIR}/proto"
58 | "${CCF_DIR}/include/ccf/_private"
59 | LINK_LIBS_ENCLAVE
60 | etcd.enclave
61 | lskvserver.enclave
62 | status.enclave
63 | protobuf.enclave
64 | LINK_LIBS_VIRTUAL
65 | etcd.virtual
66 | lskvserver.virtual
67 | status.virtual
68 | protobuf.virtual
69 | INSTALL_LIBS
70 | ON)
71 |
72 | if(VERBOSE_LOGGING)
73 | message(STATUS "Using verbose logging")
74 | add_compile_definitions(VERBOSE_LOGGING)
75 | else()
76 | message(STATUS "Using terse logging")
77 | endif()
78 |
79 | # Generate an ephemeral signing key
80 | add_custom_command(
81 | OUTPUT ${CMAKE_CURRENT_BINARY_DIR}/signing_key.pem
82 | COMMAND openssl genrsa -out ${CMAKE_CURRENT_BINARY_DIR}/signing_key.pem -3
83 | 3072)
84 | add_custom_target(app_signing_key ALL
85 | DEPENDS ${CMAKE_CURRENT_BINARY_DIR}/signing_key.pem)
86 |
87 | sign_app_library(lskv.enclave ${CMAKE_CURRENT_SOURCE_DIR}/oe_sign.conf
88 | ${CMAKE_CURRENT_BINARY_DIR}/signing_key.pem INSTALL_LIBS ON)
89 |
--------------------------------------------------------------------------------
/Dockerfile.ignore:
--------------------------------------------------------------------------------
1 | # Copyright (c) Microsoft Corporation. All rights reserved.
2 | # Licensed under the MIT License.
3 |
4 | FROM busybox
5 | WORKDIR /build-context
6 | COPY . .
7 | CMD find .
8 |
--------------------------------------------------------------------------------
/Dockerfile.sgx:
--------------------------------------------------------------------------------
1 | # Copyright (c) Microsoft Corporation. All rights reserved.
2 | # Licensed under the MIT License.
3 |
4 | # Build
5 | FROM mcr.microsoft.com/ccf/app/dev:4.0.7-sgx as builder
6 |
7 | COPY . /src/
8 | WORKDIR /build
9 | RUN CC=$(command -v clang-11) CXX=$(command -v clang++-11) cmake -GNinja -DCOMPILE_TARGET=sgx /src && ninja
10 |
11 | # Run
12 | FROM mcr.microsoft.com/ccf/app/run:4.0.7-sgx
13 |
14 | LABEL org.opencontainers.image.source=https://github.com/microsoft/lskv
15 | LABEL org.opencontainers.image.description="LSKV SGX node"
16 | LABEL org.opencontainers.image.licenses=MIT
17 |
18 | COPY --from=builder /build/liblskv.enclave.so.signed /app/
19 | WORKDIR /app
20 | RUN mkdir -p /app/certs # somewhere for the cchost to place the service_cert.pem
21 |
22 | CMD ["/usr/bin/cchost", "--config", "/app/config/config.json"]
23 |
--------------------------------------------------------------------------------
/Dockerfile.virtual:
--------------------------------------------------------------------------------
1 | # Copyright (c) Microsoft Corporation. All rights reserved.
2 | # Licensed under the MIT License.
3 |
4 | # Build
5 | FROM mcr.microsoft.com/ccf/app/dev:4.0.7-virtual as builder
6 |
7 | COPY . /src/
8 | WORKDIR /build
9 | RUN CC=$(command -v clang-15) CXX=$(command -v clang++-15) cmake -GNinja -DCOMPILE_TARGET=virtual /src && ninja
10 |
11 | # Run
12 | FROM mcr.microsoft.com/ccf/app/run:4.0.7-virtual
13 |
14 | LABEL org.opencontainers.image.source=https://github.com/microsoft/lskv
15 | LABEL org.opencontainers.image.description="LSKV virtual node"
16 | LABEL org.opencontainers.image.licenses=MIT
17 |
18 | COPY --from=builder /build/liblskv.virtual.so /app/
19 | WORKDIR /app
20 | RUN mkdir -p /app/certs # somewhere for the cchost to place the service_cert.pem
21 |
22 | CMD ["/usr/bin/cchost", "--config", "/app/config/config.json"]
23 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) Microsoft Corporation. All rights reserved.
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE
--------------------------------------------------------------------------------
/SECURITY.md:
--------------------------------------------------------------------------------
1 | ## Security
2 |
3 | Microsoft takes the security of our software products and services seriously, which includes all source code repositories managed through our GitHub organizations, which include [Microsoft](https://github.com/Microsoft), [Azure](https://github.com/Azure), [DotNet](https://github.com/dotnet), [AspNet](https://github.com/aspnet), [Xamarin](https://github.com/xamarin), and [our GitHub organizations](https://opensource.microsoft.com/).
4 |
5 | If you believe you have found a security vulnerability in any Microsoft-owned repository that meets Microsoft's [Microsoft's definition of a security vulnerability](), please report it to us as described below.
6 |
7 | ## Reporting Security Issues
8 |
9 | **Please do not report security vulnerabilities through public GitHub issues.**
10 |
11 | Instead, please report them to the Microsoft Security Response Center (MSRC) at [https://msrc.microsoft.com/create-report](https://msrc.microsoft.com/create-report).
12 |
13 | If you prefer to submit without logging in, send email to [secure@microsoft.com](mailto:secure@microsoft.com). If possible, encrypt your message with our PGP key; please download it from the the [Microsoft Security Response Center PGP Key page](https://www.microsoft.com/en-us/msrc/pgp-key-msrc).
14 |
15 | You should receive a response within 24 hours. If for some reason you do not, please follow up via email to ensure we received your original message. Additional information can be found at [microsoft.com/msrc](https://www.microsoft.com/msrc).
16 |
17 | Please include the requested information listed below (as much as you can provide) to help us better understand the nature and scope of the possible issue:
18 |
19 | - Type of issue (e.g. buffer overflow, SQL injection, cross-site scripting, etc.)
20 | - Full paths of source file(s) related to the manifestation of the issue
21 | - The location of the affected source code (tag/branch/commit or direct URL)
22 | - Any special configuration required to reproduce the issue
23 | - Step-by-step instructions to reproduce the issue
24 | - Proof-of-concept or exploit code (if possible)
25 | - Impact of the issue, including how an attacker might exploit the issue
26 |
27 | This information will help us triage your report more quickly.
28 |
29 | If you are reporting for a bug bounty, more complete reports can contribute to a higher bounty award. Please visit our [Microsoft Bug Bounty Program](https://microsoft.com/msrc/bounty) page for more details about our active programs.
30 |
31 | ## Preferred Languages
32 |
33 | We prefer all communications to be in English.
34 |
35 | ## Policy
36 |
37 | Microsoft follows the principle of [Coordinated Vulnerability Disclosure](https://www.microsoft.com/en-us/msrc/cvd).
38 |
--------------------------------------------------------------------------------
/benchmark/all.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # Copyright (c) Microsoft Corporation. All rights reserved.
3 | # Licensed under the MIT License.
4 |
5 | """
6 | Run all configurations of the benchmarks
7 | """
8 |
9 | import argparse
10 | from typing import List
11 |
12 | import common
13 | import etcd
14 | import perf_system
15 | import ycsb
16 | import k6
17 |
18 |
19 | def all_common_configurations(args: argparse.Namespace):
20 | """
21 | Fill in the args for all common configurations.
22 | """
23 | args.nodes = ["local://127.0.0.1:8000"]
24 | args.worker_threads = [1, 2, 4]
25 | args.enclave = ["virtual", "sgx"]
26 | args.http2 = True
27 | args.etcd = True
28 |
29 | args.sig_tx_intervals = [5000, 10000, 20000]
30 | args.sig_ms_intervals = [100, 1000, 10000]
31 | args.ledger_chunk_bytes = ["20KB", "100KB", "1MB"]
32 | args.snapshot_tx_intervals = [10, 100, 1000]
33 |
34 |
35 | def all_etcd_configurations(args: argparse.Namespace) -> List[etcd.EtcdConfig]:
36 | """
37 | Set args for all etcd configurations.
38 | """
39 | args.bench_args = [
40 | ["put"],
41 | ["range", "0000", "1000"],
42 | ["txn-put"],
43 | ["txn-mixed", "txn-mixed-key"],
44 | ]
45 | args.clients = [1, 10, 100]
46 | args.connections = [1, 10, 100]
47 | args.rate = [100, 200, 300]
48 |
49 | all_common_configurations(args)
50 | return etcd.make_configurations(args)
51 |
52 |
53 | def all_ycsb_configurations(args: argparse.Namespace) -> List[ycsb.YCSBConfig]:
54 | """
55 | Set args for all ycsb configurations.
56 | """
57 | args.workloads = ["a", "b", "c", "d", "e", "f"]
58 | args.rate = [100, 200, 300]
59 | args.threads = [1, 2, 4]
60 |
61 | all_common_configurations(args)
62 | return ycsb.make_configurations(args)
63 |
64 |
65 | def all_perf_configurations(args: argparse.Namespace) -> List[perf_system.PerfConfig]:
66 | """
67 | Set args for all perf configurations.
68 | """
69 | args.http1 = True
70 | args.http2 = True
71 |
72 | all_common_configurations(args)
73 | return perf_system.make_configurations(args)
74 |
75 |
76 | def all_k6_configurations(args: argparse.Namespace) -> List[k6.K6Config]:
77 | """
78 | Set args for all k6 configurations.
79 | """
80 | args.http1 = True
81 | args.http2 = True
82 | args.etcd = False
83 |
84 | all_common_configurations(args)
85 | return k6.make_configurations(args)
86 |
87 |
88 | if __name__ == "__main__":
89 | common.main(
90 | "etcd", etcd.get_arguments, all_etcd_configurations, etcd.execute_config
91 | )
92 | common.main(
93 | "ycsb", ycsb.get_arguments, all_ycsb_configurations, ycsb.execute_config
94 | )
95 | common.main(
96 | "perf",
97 | perf_system.get_arguments,
98 | all_perf_configurations,
99 | perf_system.execute_config,
100 | )
101 | common.main(
102 | "k6",
103 | k6.get_arguments,
104 | all_k6_configurations,
105 | k6.execute_config,
106 | )
107 |
--------------------------------------------------------------------------------
/benchmark/certs.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # Copyright (c) Microsoft Corporation. All rights reserved.
3 | # Licensed under the MIT License.
4 |
5 | """
6 | Generate certificates for use with etcd clusters.
7 | """
8 |
9 | import argparse
10 | import json
11 | import os
12 | import shutil
13 | import subprocess
14 | import sys
15 | from typing import Dict, Any
16 |
17 | from loguru import logger
18 |
19 | CA_CONFIG = {
20 | "signing": {
21 | "default": {"expiry": "8760h"},
22 | "profiles": {
23 | "server": {
24 | "expiry": "8760h",
25 | "usages": ["signing", "key encipherment", "server auth", "client auth"],
26 | },
27 | "client": {
28 | "expiry": "8760h",
29 | "usages": ["signing", "key encipherment", "client auth"],
30 | },
31 | "peer": {
32 | "expiry": "8760h",
33 | "usages": ["signing", "key encipherment", "server auth", "client auth"],
34 | },
35 | },
36 | }
37 | }
38 |
39 | CA_CSR = {
40 | "CN": "auto-ca",
41 | "hosts": ["127.0.0.1"],
42 | "key": {"algo": "ecdsa", "size": 384},
43 | "names": [{"C": "UK", "L": "London", "ST": "London"}],
44 | }
45 |
46 | SERVER_CSR = {
47 | "CN": "etcd",
48 | "hosts": ["127.0.0.1"],
49 | "key": {"algo": "ecdsa", "size": 384},
50 | "names": [{"C": "UK", "L": "London", "ST": "London"}],
51 | }
52 |
53 | PEER_CSR = {
54 | "CN": "node0",
55 | "hosts": ["127.0.0.1"],
56 | "key": {"algo": "ecdsa", "size": 384},
57 | "names": [{"C": "UK", "L": "London", "ST": "London"}],
58 | }
59 |
60 | CLIENT_CSR = {
61 | "CN": "client",
62 | "hosts": [""],
63 | "key": {"algo": "ecdsa", "size": 384},
64 | "names": [{"C": "UK", "L": "London", "ST": "London"}],
65 | }
66 |
67 |
68 | def make_ca(certs: str, cfssl: str, cfssljson: str):
69 | """
70 | Make a CA certificate with cfssl.
71 | """
72 | logger.info("Making CA certificate")
73 | with open(
74 | os.path.join(certs, "ca-config.json"), "w", encoding="utf-8"
75 | ) as config_file:
76 | logger.info("Writing CA config to {}", config_file.name)
77 | config_file.write(json.dumps(CA_CONFIG))
78 | with open(os.path.join(certs, "ca-csr.json"), "w", encoding="utf-8") as csr_file:
79 | logger.info("Writing CA csr to {}", csr_file.name)
80 | csr_file.write(json.dumps(CA_CSR))
81 | logger.info("Running cfssl gencert")
82 | subprocess.run(
83 | f"{cfssl} gencert -initca ca-csr.json | {cfssljson} -bare ca -",
84 | input=json.dumps(CA_CSR).encode("utf-8"),
85 | cwd=certs,
86 | shell=True,
87 | check=True,
88 | )
89 |
90 |
91 | # pylint: disable=too-many-arguments
92 | def make_certs(
93 | certs: str,
94 | cfssl: str,
95 | cfssljson: str,
96 | profile: str,
97 | name: str,
98 | csr_data: Dict[str, Any],
99 | ):
100 | """
101 | Make certs with cfssl
102 | """
103 | logger.info("Making certificates for {}", name)
104 | with open(
105 | os.path.join(certs, f"{name}.json"), "w", encoding="utf-8"
106 | ) as config_file:
107 | logger.info("Writing csr to {}", config_file.name)
108 | config_file.write(json.dumps(csr_data))
109 | logger.info("Running cfssl gencert")
110 | subprocess.run(
111 | f"{cfssl} gencert -ca=ca.pem -ca-key=ca-key.pem -config=ca-config.json "
112 | f"-profile={profile} {name}.json | {cfssljson} -bare {name} -",
113 | input=json.dumps(CA_CSR).encode("utf-8"),
114 | cwd=certs,
115 | shell=True,
116 | check=True,
117 | )
118 |
119 |
120 | def main():
121 | """
122 | Main entry point for generating certificates.
123 | """
124 | parser = argparse.ArgumentParser()
125 | parser.add_argument(
126 | "--nodes", type=int, default=3, help="Number of nodes to generate certs for"
127 | )
128 | parser.add_argument(
129 | "--certs-dir", type=str, default="certs", help="Directory to store certs"
130 | )
131 | parser.add_argument(
132 | "--cfssl", type=str, default="bin/cfssl", help="Path to cfssl binary"
133 | )
134 | parser.add_argument(
135 | "--cfssljson",
136 | type=str,
137 | default="bin/cfssljson",
138 | help="Path to cfssljson binary",
139 | )
140 | args = parser.parse_args()
141 |
142 | logger.info("Generating certificates with arguments: {}", args)
143 |
144 | certs_dir = args.certs_dir
145 | logger.info("Removing certs dir: {}", certs_dir)
146 | shutil.rmtree(certs_dir, ignore_errors=True)
147 | logger.info("Creating certs dir: {}", certs_dir)
148 | os.makedirs(certs_dir)
149 |
150 | cfssl = os.path.abspath(args.cfssl)
151 | if os.path.exists(cfssl):
152 | logger.info("Found cfssl: {}", cfssl)
153 | else:
154 | logger.info("Failed to find cfssl: {}", cfssl)
155 | sys.exit(1)
156 |
157 | cfssljson = os.path.abspath(args.cfssljson)
158 | if os.path.exists(cfssljson):
159 | logger.info("Found cfssljson: {}", cfssljson)
160 | else:
161 | logger.info("Failed to find cfssljson: {}", cfssljson)
162 | sys.exit(1)
163 |
164 | make_ca(certs_dir, cfssl, cfssljson)
165 | make_certs(certs_dir, cfssl, cfssljson, "server", "server", SERVER_CSR)
166 | num_nodes = args.nodes
167 | for i in range(num_nodes):
168 | peer_csr = PEER_CSR
169 | name = f"node{i}"
170 | peer_csr["CN"] = name
171 | peer_csr["hosts"].append(name)
172 | make_certs(certs_dir, cfssl, cfssljson, "peer", name, peer_csr)
173 | make_certs(certs_dir, cfssl, cfssljson, "client", "client", CLIENT_CSR)
174 |
175 |
176 | if __name__ == "__main__":
177 | main()
178 |
--------------------------------------------------------------------------------
/benchmark/ci.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # Copyright (c) Microsoft Corporation. All rights reserved.
3 | # Licensed under the MIT License.
4 |
5 | """
6 | Run ci pull_request configurations of the benchmarks
7 | """
8 |
9 | import argparse
10 | from typing import List
11 |
12 | # pylint: disable=duplicate-code
13 | from loguru import logger
14 | import common
15 | import etcd
16 | import k6
17 | import perf_system as perf
18 | import ycsb
19 |
20 |
21 | def common_configurations(args: argparse.Namespace):
22 | """
23 | Fill in the args for all common configurations.
24 | """
25 | args.worker_threads = [0]
26 | args.etcd = True
27 | args.enclave = ["virtual"]
28 | args.http2 = True
29 | args.nodes = ["local://127.0.0.1:8000"]
30 |
31 |
32 | def etcd_configurations(args: argparse.Namespace) -> List[etcd.EtcdConfig]:
33 | """
34 | Set args for all etcd configurations.
35 | """
36 | args.bench_args = [["put"]]
37 | args.clients = [10]
38 | args.connections = [10]
39 | args.rate = [200]
40 |
41 | common_configurations(args)
42 |
43 | return etcd.make_configurations(args)
44 |
45 |
46 | def ycsb_configurations(args: argparse.Namespace) -> List[ycsb.YCSBConfig]:
47 | """
48 | Set args for all ycsb configurations.
49 | """
50 | args.workloads = ["a"]
51 | args.rate = [200]
52 | args.threads = [1]
53 |
54 | common_configurations(args)
55 |
56 | return ycsb.make_configurations(args)
57 |
58 |
59 | def perf_configurations(args: argparse.Namespace) -> List[perf.PerfConfig]:
60 | """
61 | Set args for all perf configurations.
62 | """
63 | common_configurations(args)
64 | args.http1 = True
65 | args.http2 = False
66 | args.etcd = False
67 | args.workloads = ["benchmark/piccolo-requests-http1.parquet"]
68 | args.max_inflight_requests = [2]
69 |
70 | return perf.make_configurations(args)
71 |
72 |
73 | def k6_configurations(args: argparse.Namespace) -> List[k6.K6Config]:
74 | """
75 | Set args for all k6 configurations.
76 | """
77 | common_configurations(args)
78 | args.http1 = True
79 | args.http2 = True
80 | args.etcd = False
81 | args.rate = [200]
82 | args.func = [
83 | "put_single",
84 | # "put_single_wait",
85 | # "get_single",
86 | # "get_range",
87 | # "delete_single",
88 | # "delete_single_wait",
89 | # "mixed_single",
90 | # "get_receipt",
91 | ]
92 |
93 | return k6.make_configurations(args)
94 |
95 |
96 | if __name__ == "__main__":
97 | logger.info("Running etcd")
98 | common.main("etcd", etcd.get_arguments, etcd_configurations, etcd.execute_config)
99 | logger.info("Running ycsb")
100 | common.main("ycsb", ycsb.get_arguments, ycsb_configurations, ycsb.execute_config)
101 | logger.info("Running perf")
102 | common.main("perf", perf.get_arguments, perf_configurations, perf.execute_config)
103 | logger.info("Running k6")
104 | common.main("k6", k6.get_arguments, k6_configurations, k6.execute_config)
105 |
--------------------------------------------------------------------------------
/benchmark/commit-latency-analysis.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": null,
6 | "metadata": {},
7 | "outputs": [],
8 | "source": [
9 | "import json\n",
10 | "import os\n",
11 | "import matplotlib.pyplot as plt"
12 | ]
13 | },
14 | {
15 | "cell_type": "code",
16 | "execution_count": null,
17 | "metadata": {},
18 | "outputs": [],
19 | "source": [
20 | "bench_dir = \"../bench/k6\"\n",
21 | "\n",
22 | "for d in os.listdir(bench_dir):\n",
23 | " log_file = os.path.join(bench_dir, d, \"console.log\")\n",
24 | " config_file = os.path.join(bench_dir, d, \"config.json\")\n",
25 | " with open(config_file, \"r\") as f:\n",
26 | " config = json.loads(f.read())\n",
27 | "\n",
28 | " if \"receipt\" in config[\"func\"]:\n",
29 | " continue\n",
30 | " if config[\"enclave\"] != \"sgx\":\n",
31 | " continue\n",
32 | " if config[\"http_version\"] != 2:\n",
33 | " continue\n",
34 | " if len(config[\"nodes\"]) != 1:\n",
35 | " continue\n",
36 | " if config[\"content_type\"] != \"grpc\":\n",
37 | " continue\n",
38 | "\n",
39 | " print(config)\n",
40 | "\n",
41 | " print(log_file)\n",
42 | " with open(log_file, \"r\") as f:\n",
43 | " times = []\n",
44 | " cterms = []\n",
45 | " crevs = []\n",
46 | " terms = []\n",
47 | " revs = []\n",
48 | "\n",
49 | " for line in f.readlines():\n",
50 | " line = line.strip()\n",
51 | " line = json.loads(line)\n",
52 | " time = line[\"time\"]\n",
53 | " method = line[\"method\"]\n",
54 | " if \"header\" not in line[\"res\"]:\n",
55 | " continue\n",
56 | " cterm = int(line[\"res\"][\"header\"][\"committedRaftTerm\"])\n",
57 | " crev = int(line[\"res\"][\"header\"][\"committedRevision\"])\n",
58 | " term = int(line[\"res\"][\"header\"][\"raftTerm\"])\n",
59 | " rev = int(line[\"res\"][\"header\"][\"revision\"])\n",
60 | " # print(method, cterm, crev, term, rev)\n",
61 | " times.append(time)\n",
62 | " cterms.append(cterm)\n",
63 | " crevs.append(crev)\n",
64 | " terms.append(term)\n",
65 | " revs.append(rev)\n",
66 | "\n",
67 | " start = min(times)\n",
68 | " times = [t - start for t in times]"
69 | ]
70 | },
71 | {
72 | "cell_type": "code",
73 | "execution_count": null,
74 | "metadata": {},
75 | "outputs": [],
76 | "source": [
77 | "fig1 = plt.figure(figsize=(6, 2))\n",
78 | "\n",
79 | "times_s = [t / 1000 for t in times]\n",
80 | "\n",
81 | "ax1 = fig1.add_subplot(111)\n",
82 | "ax1.plot(times_s, revs, label=\"Latest\")\n",
83 | "ax1.plot(times_s, crevs, label=\"Committed\")\n",
84 | "ax1.set_xlabel(\"Time (s)\")\n",
85 | "ax1.set_ylabel(\"Revision\")\n",
86 | "ax1.legend()\n",
87 | "plt.tight_layout()\n",
88 | "\n",
89 | "x = []\n",
90 | "y1 = []\n",
91 | "y2 = []\n",
92 | "\n",
93 | "for t, r, c in zip(times_s, revs, crevs):\n",
94 | " if 30 <= t <= 33:\n",
95 | " x.append(t)\n",
96 | " y1.append(r)\n",
97 | " y2.append(c)\n",
98 | "\n",
99 | "ax2 = plt.axes([0.75, 0.45, 0.2, 0.25])\n",
100 | "ax2.plot(x, y1)\n",
101 | "ax2.plot(x, y2)\n",
102 | "\n",
103 | "plt.savefig(\"../plots/k6/final-commit-latency.pdf\")"
104 | ]
105 | }
106 | ],
107 | "metadata": {
108 | "kernelspec": {
109 | "display_name": "Python 3.8.10 ('.venv': venv)",
110 | "language": "python",
111 | "name": "python3"
112 | },
113 | "language_info": {
114 | "codemirror_mode": {
115 | "name": "ipython",
116 | "version": 3
117 | },
118 | "file_extension": ".py",
119 | "mimetype": "text/x-python",
120 | "name": "python",
121 | "nbconvert_exporter": "python",
122 | "pygments_lexer": "ipython3",
123 | "version": "3.8.10"
124 | },
125 | "vscode": {
126 | "interpreter": {
127 | "hash": "4a93498aa965ac8ed639b230be16e07b1d0996cdf6d66355a89e4f9e95715a96"
128 | }
129 | }
130 | },
131 | "nbformat": 4,
132 | "nbformat_minor": 2
133 | }
134 |
--------------------------------------------------------------------------------
/benchmark/distributed/files/ccf-binary-dir.diff:
--------------------------------------------------------------------------------
1 | # Copyright (c) Microsoft Corporation. All rights reserved.
2 | # Licensed under the MIT License.
3 | diff --git a/tests/infra/remote.py b/tests/infra/remote.py
4 | index c603920df..8503ba193 100644
5 | --- a/tests/infra/remote.py
6 | +++ b/tests/infra/remote.py
7 | @@ -146,6 +146,7 @@ class SSHRemote(CmdMixin):
8 | common_dir,
9 | env=None,
10 | pid_file=None,
11 | + binary_dir=".",
12 | ):
13 | """
14 | Runs a command on a remote host, through an SSH connection. A temporary
15 |
--------------------------------------------------------------------------------
/benchmark/distributed/get_ips.yaml:
--------------------------------------------------------------------------------
1 | # Copyright (c) Microsoft Corporation. All rights reserved.
2 | # Licensed under the MIT License.
3 | ---
4 | - hosts: all
5 | remote_user: "{{ vm_user }}"
6 | tasks:
7 | - debug: var=hostvars[inventory_hostname]['ansible_default_ipv4']['address']
8 |
--------------------------------------------------------------------------------
/benchmark/distributed/setup_nodes.yaml:
--------------------------------------------------------------------------------
1 | # Copyright (c) Microsoft Corporation. All rights reserved.
2 | # Licensed under the MIT License.
3 | ---
4 | - hosts: all
5 | gather_facts: false
6 | tasks:
7 | - name: Accept new ssh fingerprints
8 | shell: ssh-keyscan {{ inventory_hostname }} >> ~/.ssh/known_hosts
9 | delegate_to: localhost
10 |
11 | - hosts: all
12 | remote_user: "{{ vm_user }}"
13 | gather_facts: false
14 | tasks:
15 | - name: Wait for ssh to be up on the nodes
16 | ansible.builtin.wait_for_connection:
17 |
18 | - name: ping
19 | ping:
20 |
21 | - hosts: all[0]
22 | remote_user: "{{ vm_user }}"
23 | gather_facts: false
24 | tasks:
25 | - name: Generate an OpenSSH keypair with the default values (4096 bits, rsa)
26 | community.crypto.openssh_keypair:
27 | path: /home/{{ vm_user }}/.ssh/id_rsa
28 |
29 | - name: Get pub key for the first node
30 | ansible.builtin.fetch:
31 | src: /home/{{ vm_user }}/.ssh/id_rsa.pub
32 | dest: bench_id_rsa.pub
33 | flat: true
34 |
35 | - hosts: all
36 | remote_user: "{{ vm_user }}"
37 | gather_facts: false
38 | tasks:
39 | - name: Set authorized pub key
40 | authorized_key:
41 | state: present
42 | user: "{{ vm_user }}"
43 | key: "{{ lookup('file', 'bench_id_rsa.pub') }}"
44 |
45 | - name: Copy lskv repo over
46 | ansible.posix.synchronize:
47 | src: "{{ playbook_dir }}/../../"
48 | dest: /tmp/lskv
49 | tags: copy
50 |
51 | - name: Clone the CCF repo
52 | ansible.builtin.git:
53 | repo: "https://github.com/microsoft/CCF"
54 | dest: /tmp/CCF
55 | version: ccf-{{ ccf_ver }}
56 |
57 | - name: Run the CCF sgx getting started playbook
58 | shell: ./getting_started/setup_vm/run.sh ./getting_started/setup_vm/app-run.yml --extra-vars 'platform=sgx' --extra-vars 'ccf_ver={{ ccf_ver }}'
59 | args:
60 | chdir: /tmp/CCF
61 |
62 | - name: Run the CCF virtual getting started playbook
63 | shell: ./getting_started/setup_vm/run.sh ./getting_started/setup_vm/app-run.yml --extra-vars 'platform=virtual' --extra-vars 'ccf_ver={{ ccf_ver }}'
64 | args:
65 | chdir: /tmp/CCF
66 |
67 | - name: Patch virtual sandbox infra
68 | ansible.posix.patch:
69 | src: ccf-binary-dir.diff
70 | dest: /opt/ccf_virtual/bin/infra/remote.py
71 | become: true
72 |
73 | - name: Patch sgx sandbox infra
74 | ansible.posix.patch:
75 | src: ccf-binary-dir.diff
76 | dest: /opt/ccf_sgx/bin/infra/remote.py
77 | become: true
78 |
79 | - name: Install apt packages
80 | ansible.builtin.apt:
81 | name:
82 | - make
83 | - python3.8
84 | - python3.8-venv
85 | - gcc
86 | update_cache: true
87 | become: true
88 |
89 | - hosts: all[0]
90 | remote_user: "{{ vm_user }}"
91 | gather_facts: false
92 | tasks:
93 | - name: Install pip for sandbox
94 | ansible.builtin.pip:
95 | name: pip
96 | virtualenv: /tmp/lskv/.venv_ccf_sandbox
97 | virtualenv_command: python3 -m venv
98 |
99 | - name: Install ccf for sandbox
100 | ansible.builtin.pip:
101 | name: ccf=={{ py_ccf_ver }}
102 | virtualenv: /tmp/lskv/.venv_ccf_sandbox
103 | virtualenv_command: python3 -m venv
104 |
105 | - name: Install requirements for sandbox
106 | ansible.builtin.pip:
107 | requirements: /opt/ccf_sgx/bin/requirements.txt
108 | virtualenv: /tmp/lskv/.venv_ccf_sandbox
109 | virtualenv_command: python3 -m venv
110 |
111 | - name: Install requirements for benchmarking
112 | ansible.builtin.pip:
113 | requirements: /tmp/lskv/requirements.txt
114 | virtualenv: /tmp/lskv/.venv
115 | virtualenv_command: python3 -m venv
116 | args:
117 | chdir: /tmp/lskv
118 |
--------------------------------------------------------------------------------
/benchmark/distributed/values.yaml:
--------------------------------------------------------------------------------
1 | # Copyright (c) Microsoft Corporation. All rights reserved.
2 | # Licensed under the MIT License.
3 | ---
4 | # values for the distributed benchmarking setup
5 | vm_user: "apj39"
6 |
7 | py_ccf_ver: "4.0.7"
8 | ccf_ver: "4.0.7"
9 |
--------------------------------------------------------------------------------
/benchmark/etcd-analysis.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": null,
6 | "id": "386f259a",
7 | "metadata": {
8 | "execution": {
9 | "iopub.execute_input": "2022-10-18T15:29:46.082399Z",
10 | "iopub.status.busy": "2022-10-18T15:29:46.081875Z",
11 | "iopub.status.idle": "2022-10-18T15:29:46.697360Z",
12 | "shell.execute_reply": "2022-10-18T15:29:46.696711Z"
13 | }
14 | },
15 | "outputs": [],
16 | "source": [
17 | "import analysis"
18 | ]
19 | },
20 | {
21 | "cell_type": "code",
22 | "execution_count": null,
23 | "id": "a76eeb76",
24 | "metadata": {},
25 | "outputs": [],
26 | "source": [
27 | "analyser = analysis.Analyser(\"etcd\")\n",
28 | "all_data = analyser.get_data()\n",
29 | "all_data"
30 | ]
31 | },
32 | {
33 | "cell_type": "code",
34 | "execution_count": null,
35 | "id": "d62e67d7",
36 | "metadata": {
37 | "execution": {
38 | "iopub.execute_input": "2022-10-18T15:29:46.737868Z",
39 | "iopub.status.busy": "2022-10-18T15:29:46.737408Z",
40 | "iopub.status.idle": "2022-10-18T15:29:46.744962Z",
41 | "shell.execute_reply": "2022-10-18T15:29:46.744560Z"
42 | }
43 | },
44 | "outputs": [],
45 | "source": [
46 | "all_data[all_data.latency_ms < 0]"
47 | ]
48 | },
49 | {
50 | "cell_type": "code",
51 | "execution_count": null,
52 | "id": "a2da46f1",
53 | "metadata": {
54 | "execution": {
55 | "iopub.execute_input": "2022-10-18T15:29:46.747087Z",
56 | "iopub.status.busy": "2022-10-18T15:29:46.746780Z",
57 | "iopub.status.idle": "2022-10-18T15:29:46.752023Z",
58 | "shell.execute_reply": "2022-10-18T15:29:46.751616Z"
59 | }
60 | },
61 | "outputs": [],
62 | "source": [
63 | "all_data = all_data[all_data.latency_ms >= 0]"
64 | ]
65 | },
66 | {
67 | "cell_type": "code",
68 | "execution_count": null,
69 | "id": "d9d6b524",
70 | "metadata": {
71 | "execution": {
72 | "iopub.execute_input": "2022-10-18T15:29:46.766760Z",
73 | "iopub.status.busy": "2022-10-18T15:29:46.766428Z",
74 | "iopub.status.idle": "2022-10-18T15:29:46.777711Z",
75 | "shell.execute_reply": "2022-10-18T15:29:46.777284Z"
76 | }
77 | },
78 | "outputs": [],
79 | "source": [
80 | "all_data"
81 | ]
82 | },
83 | {
84 | "cell_type": "code",
85 | "execution_count": null,
86 | "id": "850cd973",
87 | "metadata": {},
88 | "outputs": [],
89 | "source": [
90 | "ignore_vars = [\n",
91 | " \"ledger_chunk_bytes\",\n",
92 | " \"snapshot_tx_interval\",\n",
93 | " \"sig_ms_interval\",\n",
94 | " \"sig_tx_interval\",\n",
95 | " \"total\",\n",
96 | " \"nodes\",\n",
97 | "]"
98 | ]
99 | },
100 | {
101 | "cell_type": "code",
102 | "execution_count": null,
103 | "id": "9cbbe238",
104 | "metadata": {},
105 | "outputs": [],
106 | "source": [
107 | "# plot_data = all_data.copy(deep=False)\n",
108 | "# plot_data = plot_data[plot_data[\"start_ms\"] > 250]\n",
109 | "# p = analyser.plot_scatter(plot_data, ignore_vars=ignore_vars)"
110 | ]
111 | },
112 | {
113 | "cell_type": "code",
114 | "execution_count": null,
115 | "id": "df402edd",
116 | "metadata": {
117 | "execution": {
118 | "iopub.execute_input": "2022-10-18T15:29:49.262360Z",
119 | "iopub.status.busy": "2022-10-18T15:29:49.261986Z",
120 | "iopub.status.idle": "2022-10-18T15:29:49.727868Z",
121 | "shell.execute_reply": "2022-10-18T15:29:49.727422Z"
122 | }
123 | },
124 | "outputs": [],
125 | "source": [
126 | "plot_data = all_data.copy(deep=False)\n",
127 | "plot_data = plot_data[plot_data[\"start_ms\"] > 250]\n",
128 | "p = analyser.plot_ecdf(plot_data, ignore_vars=ignore_vars)\n",
129 | "p.figure.suptitle(\"\")\n",
130 | "p.set(xlabel=\"latency (ms)\")"
131 | ]
132 | },
133 | {
134 | "cell_type": "code",
135 | "execution_count": null,
136 | "id": "743e9893",
137 | "metadata": {},
138 | "outputs": [],
139 | "source": [
140 | "plot_data = all_data.copy(deep=False)\n",
141 | "plot_data = plot_data[plot_data[\"start_ms\"] > 250]\n",
142 | "p = analyser.plot_percentile_latency_over_time(\n",
143 | " plot_data, col=\"http_version\", ignore_vars=ignore_vars, percentile=0.99\n",
144 | ")\n",
145 | "p.figure.suptitle(\"\")\n",
146 | "p.set(xlabel=\"time (ms)\", ylabel=\"latency (ms)\")"
147 | ]
148 | },
149 | {
150 | "cell_type": "code",
151 | "execution_count": null,
152 | "id": "b88995f5",
153 | "metadata": {},
154 | "outputs": [],
155 | "source": [
156 | "plot_data = all_data.copy(deep=False)\n",
157 | "plot_data = plot_data[plot_data[\"start_ms\"] > 250]\n",
158 | "p = analyser.plot_throughput_over_time(\n",
159 | " plot_data, col=\"http_version\", ignore_vars=ignore_vars + [\"endpoint\"], interval=1000\n",
160 | ")\n",
161 | "p.figure.suptitle(\"\")\n",
162 | "p.set(xlabel=\"time (ms)\", ylabel=\"achieved throughput (req/s)\")"
163 | ]
164 | },
165 | {
166 | "cell_type": "code",
167 | "execution_count": null,
168 | "id": "e4d16042",
169 | "metadata": {},
170 | "outputs": [],
171 | "source": [
172 | "plot_data = all_data.copy(deep=False)\n",
173 | "\n",
174 | "plot_data = plot_data[plot_data[\"start_ms\"] > 250]\n",
175 | "analyser.plot_achieved_throughput_bar(plot_data, col=\"nodes\", ignore_vars=ignore_vars)"
176 | ]
177 | },
178 | {
179 | "cell_type": "code",
180 | "execution_count": null,
181 | "id": "355b55d2",
182 | "metadata": {
183 | "execution": {
184 | "iopub.execute_input": "2022-10-18T15:29:49.730498Z",
185 | "iopub.status.busy": "2022-10-18T15:29:49.730132Z",
186 | "iopub.status.idle": "2022-10-18T15:29:50.097606Z",
187 | "shell.execute_reply": "2022-10-18T15:29:50.097135Z"
188 | }
189 | },
190 | "outputs": [],
191 | "source": [
192 | "plot_data = all_data.copy(deep=False)\n",
193 | "\n",
194 | "plot_data = plot_data[plot_data[\"start_ms\"] > 250]\n",
195 | "analyser.plot_throughput_bar(plot_data, col=\"nodes\", ignore_vars=ignore_vars)"
196 | ]
197 | },
198 | {
199 | "cell_type": "code",
200 | "execution_count": null,
201 | "id": "b95deb49",
202 | "metadata": {},
203 | "outputs": [],
204 | "source": [
205 | "plot_data = all_data.copy(deep=False)\n",
206 | "analyser.plot_target_throughput_latency_line(\n",
207 | " plot_data, col=\"nodes\", ignore_vars=ignore_vars\n",
208 | ")"
209 | ]
210 | }
211 | ],
212 | "metadata": {
213 | "kernelspec": {
214 | "display_name": "Python 3.8.10 64-bit",
215 | "language": "python",
216 | "name": "python3"
217 | },
218 | "language_info": {
219 | "codemirror_mode": {
220 | "name": "ipython",
221 | "version": 3
222 | },
223 | "file_extension": ".py",
224 | "mimetype": "text/x-python",
225 | "name": "python",
226 | "nbconvert_exporter": "python",
227 | "pygments_lexer": "ipython3",
228 | "version": "3.8.10"
229 | },
230 | "vscode": {
231 | "interpreter": {
232 | "hash": "31f2aee4e71d21fbe5cf8b01ff0e069b9275f58929596ceb00d14d90e3e16cd6"
233 | }
234 | }
235 | },
236 | "nbformat": 4,
237 | "nbformat_minor": 5
238 | }
239 |
--------------------------------------------------------------------------------
/benchmark/generator.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) Microsoft Corporation. All rights reserved.
2 | # Licensed under the MIT License.
3 | #
4 | # pylint: disable=line-too-long
5 | # From https://github.com/microsoft/CCF/blob/2b6ac3e06d0398b57e1e52293900ad97723fea92/tests/perf-system/generator/generator.py
6 | """
7 | Generate requests
8 | """
9 |
10 | # pylint: disable=import-error
11 | import fastparquet as fp # type: ignore
12 | import pandas as pd # type: ignore
13 |
14 |
15 | class Messages:
16 | """
17 | Messages that will be processed by the submitter.
18 | """
19 |
20 | def __init__(self):
21 | self.requests = pd.DataFrame(columns=["messageID", "request"])
22 |
23 | # pylint: disable=too-many-arguments
24 | def append(
25 | self,
26 | host,
27 | path,
28 | verb,
29 | request_type="HTTP/1.1",
30 | content_type="application/json",
31 | data="",
32 | iterations=1,
33 | ):
34 | """
35 | Create a new df with the contents specified by the arguments,
36 | append it to self.df and return the new df
37 | """
38 | batch_df = pd.DataFrame(columns=["messageID", "request"])
39 | data_headers = "\r\n"
40 | if len(data) > 0:
41 | data_headers = "content-length: " + str(len(data)) + "\r\n\r\n" + data
42 |
43 | df_size = len(self.requests.index)
44 |
45 | for ind in range(iterations):
46 | batch_df.loc[ind] = [
47 | str(ind + df_size),
48 | verb.upper()
49 | + " "
50 | + path
51 | + " "
52 | + request_type
53 | + "\r\n"
54 | + "host: "
55 | + host
56 | + "\r\n"
57 | + "content-type: "
58 | + content_type.lower()
59 | + "\r\n"
60 | + data_headers,
61 | ]
62 |
63 | self.requests = pd.concat([self.requests, batch_df])
64 | return batch_df
65 |
66 | def to_parquet_file(self, path):
67 | """
68 | Write out the current set of messages to a parquet file for ingestion by the submitter.
69 | """
70 | fp.write(path, self.requests)
71 |
--------------------------------------------------------------------------------
/benchmark/go-ycsb/workloads/workloada:
--------------------------------------------------------------------------------
1 | # Copyright (c) 2010 Yahoo! Inc. All rights reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License"); you
4 | # may not use this file except in compliance with the License. You
5 | # may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
12 | # implied. See the License for the specific language governing
13 | # permissions and limitations under the License. See accompanying
14 | # LICENSE file.
15 |
16 |
17 | # Yahoo! Cloud System Benchmark
18 | # Workload A: Update heavy workload
19 | # Application example: Session store recording recent actions
20 | #
21 | # Read/update ratio: 50/50
22 | # Default data size: 1 KB records (10 fields, 100 bytes each, plus key)
23 | # Request distribution: zipfian
24 |
25 | recordcount=1000
26 | operationcount=100000
27 | workload=core
28 |
29 | readallfields=true
30 |
31 | readproportion=0.5
32 | updateproportion=0.5
33 | scanproportion=0
34 | insertproportion=0
35 |
36 | requestdistribution=zipfian
37 |
38 |
--------------------------------------------------------------------------------
/benchmark/go-ycsb/workloads/workloadb:
--------------------------------------------------------------------------------
1 | # Copyright (c) 2010 Yahoo! Inc. All rights reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License"); you
4 | # may not use this file except in compliance with the License. You
5 | # may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
12 | # implied. See the License for the specific language governing
13 | # permissions and limitations under the License. See accompanying
14 | # LICENSE file.
15 |
16 | # Yahoo! Cloud System Benchmark
17 | # Workload B: Read mostly workload
18 | # Application example: photo tagging; add a tag is an update, but most operations are to read tags
19 | #
20 | # Read/update ratio: 95/5
21 | # Default data size: 1 KB records (10 fields, 100 bytes each, plus key)
22 | # Request distribution: zipfian
23 |
24 | recordcount=1000
25 | operationcount=100000
26 | workload=core
27 |
28 | readallfields=true
29 |
30 | readproportion=0.95
31 | updateproportion=0.05
32 | scanproportion=0
33 | insertproportion=0
34 |
35 | requestdistribution=zipfian
36 |
37 |
--------------------------------------------------------------------------------
/benchmark/go-ycsb/workloads/workloadc:
--------------------------------------------------------------------------------
1 | # Copyright (c) 2010 Yahoo! Inc. All rights reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License"); you
4 | # may not use this file except in compliance with the License. You
5 | # may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
12 | # implied. See the License for the specific language governing
13 | # permissions and limitations under the License. See accompanying
14 | # LICENSE file.
15 |
16 | # Yahoo! Cloud System Benchmark
17 | # Workload C: Read only
18 | # Application example: user profile cache, where profiles are constructed elsewhere (e.g., Hadoop)
19 | #
20 | # Read/update ratio: 100/0
21 | # Default data size: 1 KB records (10 fields, 100 bytes each, plus key)
22 | # Request distribution: zipfian
23 |
24 | recordcount=1000
25 | operationcount=100000
26 | workload=core
27 |
28 | readallfields=true
29 |
30 | readproportion=1
31 | updateproportion=0
32 | scanproportion=0
33 | insertproportion=0
34 |
35 | requestdistribution=zipfian
36 |
37 |
38 |
39 |
--------------------------------------------------------------------------------
/benchmark/go-ycsb/workloads/workloadd:
--------------------------------------------------------------------------------
1 | # Copyright (c) 2010 Yahoo! Inc. All rights reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License"); you
4 | # may not use this file except in compliance with the License. You
5 | # may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
12 | # implied. See the License for the specific language governing
13 | # permissions and limitations under the License. See accompanying
14 | # LICENSE file.
15 |
16 | # Yahoo! Cloud System Benchmark
17 | # Workload D: Read latest workload
18 | # Application example: user status updates; people want to read the latest
19 | #
20 | # Read/update/insert ratio: 95/0/5
21 | # Default data size: 1 KB records (10 fields, 100 bytes each, plus key)
22 | # Request distribution: latest
23 |
24 | # The insert order for this is hashed, not ordered. The "latest" items may be
25 | # scattered around the keyspace if they are keyed by userid.timestamp. A workload
26 | # which orders items purely by time, and demands the latest, is very different than
27 | # workload here (which we believe is more typical of how people build systems.)
28 |
29 | recordcount=1000
30 | operationcount=100000
31 | workload=core
32 |
33 | readallfields=true
34 |
35 | readproportion=0.95
36 | updateproportion=0
37 | scanproportion=0
38 | insertproportion=0.05
39 |
40 | requestdistribution=latest
41 |
42 |
--------------------------------------------------------------------------------
/benchmark/go-ycsb/workloads/workloade:
--------------------------------------------------------------------------------
1 | # Copyright (c) 2010 Yahoo! Inc. All rights reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License"); you
4 | # may not use this file except in compliance with the License. You
5 | # may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
12 | # implied. See the License for the specific language governing
13 | # permissions and limitations under the License. See accompanying
14 | # LICENSE file.
15 |
16 | # Yahoo! Cloud System Benchmark
17 | # Workload E: Short ranges
18 | # Application example: threaded conversations, where each scan is for the posts in a given thread (assumed to be clustered by thread id)
19 | #
20 | # Scan/insert ratio: 95/5
21 | # Default data size: 1 KB records (10 fields, 100 bytes each, plus key)
22 | # Request distribution: zipfian
23 |
24 | # The insert order is hashed, not ordered. Although the scans are ordered, it does not necessarily
25 | # follow that the data is inserted in order. For example, posts for thread 342 may not be inserted contiguously, but
26 | # instead interspersed with posts from lots of other threads. The way the YCSB client works is that it will pick a start
27 | # key, and then request a number of records; this works fine even for hashed insertion.
28 |
29 | recordcount=1000
30 | operationcount=1000
31 | workload=core
32 |
33 | readallfields=true
34 |
35 | readproportion=0
36 | updateproportion=0
37 | scanproportion=0.95
38 | insertproportion=0.05
39 |
40 | requestdistribution=zipfian
41 |
42 | maxscanlength=100
43 |
44 | scanlengthdistribution=zipfian
45 |
46 |
47 |
--------------------------------------------------------------------------------
/benchmark/go-ycsb/workloads/workloadf:
--------------------------------------------------------------------------------
1 | # Copyright (c) 2010 Yahoo! Inc. All rights reserved.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License"); you
4 | # may not use this file except in compliance with the License. You
5 | # may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
12 | # implied. See the License for the specific language governing
13 | # permissions and limitations under the License. See accompanying
14 | # LICENSE file.
15 |
16 | # Yahoo! Cloud System Benchmark
17 | # Workload F: Read-modify-write workload
18 | # Application example: user database, where user records are read and modified by the user or to record user activity.
19 | #
20 | # Read/read-modify-write ratio: 50/50
21 | # Default data size: 1 KB records (10 fields, 100 bytes each, plus key)
22 | # Request distribution: zipfian
23 |
24 | recordcount=1000
25 | operationcount=100000
26 | workload=core
27 |
28 | readallfields=true
29 |
30 | readproportion=0.5
31 | updateproportion=0
32 | scanproportion=0
33 | insertproportion=0
34 | readmodifywriteproportion=0.5
35 |
36 | requestdistribution=zipfian
37 |
38 |
--------------------------------------------------------------------------------
/benchmark/k6.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # Copyright (c) Microsoft Corporation. All rights reserved.
3 | # Licensed under the MIT License.
4 |
5 | """
6 | Run benchmarks in various configurations for each defined datastore.
7 | """
8 |
9 | import argparse
10 | import os
11 | from dataclasses import asdict, dataclass
12 | from typing import List
13 |
14 | import common
15 | from common import Store
16 | from loguru import logger
17 | from stores import LSKVStore
18 |
19 | BENCH_DIR = os.path.join(common.BENCH_DIR, "k6")
20 |
21 |
22 | # pylint: disable=too-many-instance-attributes
23 | @dataclass
24 | class K6Config(common.Config):
25 | """
26 | Config holds the configuration options for a given benchmark run.
27 | """
28 |
29 | rate: int
30 | vus: int
31 | func: str
32 | content_type: str
33 | value_size: int
34 |
35 | def bench_name(self) -> str:
36 | """
37 | Get the name of the benchmark.
38 | """
39 | return "k6"
40 |
41 |
42 | class K6Benchmark(common.Benchmark):
43 | """
44 | YCS benchmark.
45 | """
46 |
47 | def __init__(self, config: K6Config):
48 | self.config = config
49 |
50 | def run_cmd(self, store: Store) -> List[str]:
51 | """
52 | Return the command to run the benchmark for the given store.
53 | """
54 | timings_file = os.path.join(self.config.output_dir(), "timings.csv")
55 | log_file = os.path.join(self.config.output_dir(), "console.log")
56 | workspace = store.cert()
57 | workspace = os.path.dirname(workspace)
58 | bench = [
59 | "bin/k6",
60 | "run",
61 | "--out",
62 | f"csv={timings_file}",
63 | "--env",
64 | f"RATE={self.config.rate}",
65 | "--env",
66 | f"VALUE_SIZE={self.config.value_size}",
67 | "--env",
68 | f"WORKSPACE={workspace}",
69 | "--env",
70 | f"FUNC={self.config.func}",
71 | "--env",
72 | f"CONTENT_TYPE={self.config.content_type}",
73 | "--env",
74 | f"PRE_ALLOCATED_VUS={self.config.vus}",
75 | "--env",
76 | f"MAX_VUS={self.config.vus}",
77 | "--env",
78 | f"ADDR={store.get_leader_address()}",
79 | "benchmark/k6.js",
80 | "--console-output",
81 | log_file,
82 | "--log-format",
83 | "raw",
84 | ]
85 | logger.debug("run cmd: {}", bench)
86 | return bench
87 |
88 |
89 | # pylint: disable=duplicate-code
90 | def run_benchmark(config: K6Config, store: Store, benchmark: K6Benchmark) -> str:
91 | """
92 | Run the benchmark for the given store with the given bench command.
93 | """
94 | with store:
95 | store.wait_for_ready()
96 |
97 | logger.info("starting benchmark")
98 | run_cmd = benchmark.run_cmd(store)
99 | common.run(run_cmd, "bench", config.output_dir())
100 | logger.info("stopping benchmark")
101 |
102 | timings_file = os.path.join(config.output_dir(), "timings.csv")
103 |
104 | return timings_file
105 |
106 |
107 | # pylint: disable=duplicate-code
108 | # pylint: disable=too-many-locals
109 | def run_metrics(_name: str, _cmd: str, file: str):
110 | """
111 | Run metric gathering.
112 | """
113 | if not os.path.exists(file):
114 | logger.warning("no metrics file found at {}", file)
115 | return
116 | logger.warning("no metrics implemented yet")
117 |
118 |
119 | def get_arguments():
120 | """
121 | Parse command line arguments.
122 | """
123 | parser = common.get_argument_parser()
124 |
125 | parser.add_argument(
126 | "--rate",
127 | action="extend",
128 | nargs="+",
129 | type=int,
130 | default=[],
131 | help="Maximum requests per second",
132 | )
133 | parser.add_argument(
134 | "--vus",
135 | action="extend",
136 | nargs="+",
137 | type=int,
138 | default=[],
139 | help="VUs to use",
140 | )
141 | parser.add_argument(
142 | "--func",
143 | action="extend",
144 | nargs="+",
145 | type=int,
146 | default=[],
147 | help="Functions to run",
148 | )
149 | parser.add_argument(
150 | "--content-type",
151 | action="extend",
152 | nargs="+",
153 | type=int,
154 | choices=["json", "grpc"],
155 | default=[],
156 | help="content type payload to use",
157 | )
158 | parser.add_argument(
159 | "--value-sizes",
160 | action="extend",
161 | nargs="+",
162 | type=int,
163 | default=[],
164 | help="Size of values written to the datastore",
165 | )
166 |
167 | args = parser.parse_args()
168 |
169 | if not args.rate:
170 | args.rate = [1000]
171 | if not args.vus:
172 | args.vus = [100]
173 | if not args.func:
174 | args.func = ["put_single"]
175 | if not args.content_type:
176 | args.content_type = ["json"]
177 | if not args.value_sizes:
178 | args.value_sizes = [256]
179 |
180 | return args
181 |
182 |
183 | # pylint: disable=duplicate-code
184 | def execute_config(config: K6Config):
185 | """
186 | Execute the given configuration.
187 | """
188 | if config.store == "etcd":
189 | logger.warning("skipping etcd for k6 benchmark")
190 | return
191 | store = LSKVStore(config)
192 |
193 | benchmark = K6Benchmark(config)
194 |
195 | timings_file = run_benchmark(
196 | config,
197 | store,
198 | benchmark,
199 | )
200 | run_metrics(
201 | config.to_str(),
202 | "todo",
203 | timings_file,
204 | )
205 |
206 |
207 | def make_configurations(args: argparse.Namespace) -> List[K6Config]:
208 | """
209 | Build up a list of configurations to run.
210 | """
211 | configs = []
212 |
213 | # pylint: disable=too-many-nested-blocks
214 | for common_config in common.make_common_configurations(args):
215 | for rate in args.rate:
216 | logger.debug("adding rate: {}", rate)
217 | for vus in args.vus:
218 | logger.debug("adding vus: {}", vus)
219 | for func in args.func:
220 | logger.debug("adding func: {}", func)
221 | for content_type in args.content_type:
222 | logger.debug("adding content_type: {}", content_type)
223 | for value_size in args.value_sizes:
224 | logger.debug("adding value_size: {}", value_size)
225 | conf = K6Config(
226 | **asdict(common_config),
227 | rate=rate,
228 | vus=vus,
229 | func=func,
230 | content_type=content_type,
231 | value_size=value_size,
232 | )
233 | configs.append(conf)
234 |
235 | return configs
236 |
237 |
238 | if __name__ == "__main__":
239 | common.main("k6", get_arguments, make_configurations, execute_config)
240 |
--------------------------------------------------------------------------------
/benchmark/perf-analysis.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": null,
6 | "metadata": {},
7 | "outputs": [],
8 | "source": [
9 | "import analysis"
10 | ]
11 | },
12 | {
13 | "cell_type": "code",
14 | "execution_count": null,
15 | "metadata": {},
16 | "outputs": [],
17 | "source": [
18 | "analyser = analysis.Analyser(\"perf\")\n",
19 | "all_data = analyser.get_data()\n",
20 | "all_data"
21 | ]
22 | },
23 | {
24 | "cell_type": "code",
25 | "execution_count": null,
26 | "metadata": {},
27 | "outputs": [],
28 | "source": [
29 | "# split out the rawResponse\n",
30 | "# raw_data = all_data[\"rawResponse\"]\n",
31 | "# all_data[[\"header\", \"length\", \"type\", \"ignore\", \"data\"]] = raw_data.str.split(\"\\r\\n\", expand=True)"
32 | ]
33 | },
34 | {
35 | "cell_type": "code",
36 | "execution_count": null,
37 | "metadata": {},
38 | "outputs": [],
39 | "source": [
40 | "# header = all_data[\"header\"]\n",
41 | "# all_data[[\"http\", \"status\", \"status_text\"]] = header.str.split(\" \", expand=True)"
42 | ]
43 | },
44 | {
45 | "cell_type": "code",
46 | "execution_count": null,
47 | "metadata": {},
48 | "outputs": [],
49 | "source": [
50 | "all_data.head()"
51 | ]
52 | },
53 | {
54 | "cell_type": "code",
55 | "execution_count": null,
56 | "metadata": {},
57 | "outputs": [],
58 | "source": [
59 | "ignore_vars = [\"messageID\", \"rawResponse\"]"
60 | ]
61 | },
62 | {
63 | "cell_type": "code",
64 | "execution_count": null,
65 | "metadata": {},
66 | "outputs": [],
67 | "source": [
68 | "all_data.dtypes"
69 | ]
70 | },
71 | {
72 | "cell_type": "code",
73 | "execution_count": null,
74 | "metadata": {},
75 | "outputs": [],
76 | "source": [
77 | "plot_data = all_data.copy(deep=False)\n",
78 | "analyser.plot_scatter(plot_data, ignore_vars=ignore_vars)"
79 | ]
80 | },
81 | {
82 | "cell_type": "code",
83 | "execution_count": null,
84 | "metadata": {},
85 | "outputs": [],
86 | "source": [
87 | "plot_data = all_data.copy(deep=False)\n",
88 | "p = analyser.plot_ecdf(plot_data, ignore_vars=ignore_vars)\n",
89 | "# p.set(xlabel=\"\")"
90 | ]
91 | },
92 | {
93 | "cell_type": "code",
94 | "execution_count": null,
95 | "metadata": {},
96 | "outputs": [],
97 | "source": [
98 | "plot_data = all_data.copy(deep=False)\n",
99 | "ax = analyser.plot_achieved_throughput_bar(\n",
100 | " plot_data, x_column=\"max_inflight_requests\", ignore_vars=ignore_vars, hue=\"vars\"\n",
101 | ")\n",
102 | "ax.figure.suptitle(\"\")\n",
103 | "ax.set(xlabel=\"max inflight requests\", ylabel=\"achieved throughput (req/s)\")"
104 | ]
105 | },
106 | {
107 | "cell_type": "code",
108 | "execution_count": null,
109 | "metadata": {},
110 | "outputs": [],
111 | "source": [
112 | "plot_data = all_data.copy(deep=False)\n",
113 | "p = analyser.plot_throughput_bar(\n",
114 | " plot_data, x_column=\"max_inflight_requests\", ignore_vars=ignore_vars\n",
115 | ")\n",
116 | "p.figure.suptitle(\"\")"
117 | ]
118 | }
119 | ],
120 | "metadata": {
121 | "kernelspec": {
122 | "display_name": "Python 3.8.10 64-bit",
123 | "language": "python",
124 | "name": "python3"
125 | },
126 | "language_info": {
127 | "codemirror_mode": {
128 | "name": "ipython",
129 | "version": 3
130 | },
131 | "file_extension": ".py",
132 | "mimetype": "text/x-python",
133 | "name": "python",
134 | "nbconvert_exporter": "python",
135 | "pygments_lexer": "ipython3",
136 | "version": "3.8.10"
137 | },
138 | "vscode": {
139 | "interpreter": {
140 | "hash": "916dbcbb3f70747c44a77c7bcd40155683ae19c65e1c03b4aa3499c5328201f1"
141 | }
142 | }
143 | },
144 | "nbformat": 4,
145 | "nbformat_minor": 2
146 | }
147 |
--------------------------------------------------------------------------------
/benchmark/perf_system.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # Copyright (c) Microsoft Corporation. All rights reserved.
3 | # Licensed under the MIT License.
4 |
5 | """
6 | Perf system benchmark.
7 |
8 | This benchmark is primarily for comparing http1 and http2 performance of LSKV over the JSON API.
9 | """
10 |
11 | import argparse
12 | import os
13 | from dataclasses import asdict, dataclass
14 | from typing import List
15 |
16 | import common
17 | from common import Store
18 | from loguru import logger
19 | from stores import LSKVStore
20 |
21 | BENCH_DIR = os.path.join(common.BENCH_DIR, "perf")
22 |
23 |
24 | @dataclass
25 | class PerfConfig(common.Config):
26 | """
27 | Benchmark configuration.
28 | """
29 |
30 | workload: str
31 | max_inflight_requests: int
32 |
33 | def bench_name(self) -> str:
34 | return "perf"
35 |
36 |
37 | class PerfBenchmark(common.Benchmark):
38 | """
39 | Benchmark implementation for serial requests.
40 | """
41 |
42 | def __init__(self, config: PerfConfig):
43 | self.config = config
44 |
45 | # pylint: disable=duplicate-code
46 | def run_cmd(self, store: Store) -> List[str]:
47 | """
48 | Return the command to run the benchmark for the given store.
49 | """
50 | ccf_bin = f"/opt/ccf_{self.config.enclave}/bin"
51 | bench = [
52 | f"{ccf_bin}/submit",
53 | "--send-filepath",
54 | os.path.join(self.config.output_dir(), "requests.parquet"),
55 | "--response-filepath",
56 | os.path.join(self.config.output_dir(), "responses.parquet"),
57 | "--generator-filepath",
58 | self.config.workload,
59 | "--max-inflight-requests",
60 | str(self.config.max_inflight_requests),
61 | "--cacert",
62 | store.cacert(),
63 | "--cert",
64 | store.cert(),
65 | "--key",
66 | store.key(),
67 | ]
68 | return bench
69 |
70 |
71 | def run_benchmark(config: PerfConfig, store: Store, benchmark: PerfBenchmark) -> str:
72 | """
73 | Run the benchmark for the given store with the given bench command.
74 | """
75 | with store:
76 | store.wait_for_ready()
77 |
78 | logger.info("starting benchmark")
79 | run_cmd = benchmark.run_cmd(store)
80 | common.run(run_cmd, "bench", config.output_dir())
81 | logger.info("stopping benchmark")
82 |
83 | # pylint: disable=duplicate-code
84 | return ""
85 |
86 |
87 | # pylint: disable=duplicate-code
88 | # pylint: disable=too-many-locals
89 | def run_metrics(_name: str, _cmd: str, file: str):
90 | """
91 | Run metric gathering.
92 | """
93 | if not os.path.exists(file):
94 | logger.warning("no metrics file found at {}", file)
95 | return
96 | logger.warning("no metrics implemented yet")
97 |
98 |
99 | def get_arguments():
100 | """
101 | Parse command line arguments.
102 | """
103 | parser = common.get_argument_parser()
104 |
105 | parser.add_argument(
106 | "--workloads",
107 | type=str,
108 | action="extend",
109 | nargs="+",
110 | help="The workload files to submit",
111 | )
112 | parser.add_argument(
113 | "--max-inflight-requests",
114 | type=int,
115 | action="extend",
116 | nargs="+",
117 | help="Number of outstanding requests to allow",
118 | )
119 |
120 | args = parser.parse_args()
121 |
122 | if not args.max_inflight_requests:
123 | args.max_inflight_requests = [0]
124 |
125 | return args
126 |
127 |
128 | # pylint: disable=duplicate-code
129 | def execute_config(config: PerfConfig):
130 | """
131 | Execute the given configuration.
132 | """
133 | if config.store == "etcd":
134 | # doesn't work with the etcd API
135 | logger.info("skipping test with etcd store")
136 | return
137 | store = LSKVStore(config)
138 | benchmark = PerfBenchmark(config)
139 |
140 | timings_file = run_benchmark(
141 | config,
142 | store,
143 | benchmark,
144 | )
145 | run_metrics(
146 | config.to_str(),
147 | "todo",
148 | timings_file,
149 | )
150 |
151 |
152 | def make_configurations(args: argparse.Namespace) -> List[PerfConfig]:
153 | """
154 | Build up a list of configurations to run.
155 | """
156 | configs = []
157 |
158 | for workload in args.workloads:
159 | logger.debug("Adding configuration for workload {}", workload)
160 | for max_inflight_requests in args.max_inflight_requests:
161 | logger.debug(
162 | "Adding configuration for max_inflight_requests {}",
163 | max_inflight_requests,
164 | )
165 | for common_config in common.make_common_configurations(args):
166 | conf = PerfConfig(
167 | **asdict(common_config),
168 | workload=workload,
169 | max_inflight_requests=max_inflight_requests,
170 | )
171 | configs.append(conf)
172 |
173 | return configs
174 |
175 |
176 | if __name__ == "__main__":
177 | common.main("perf", get_arguments, make_configurations, execute_config)
178 |
--------------------------------------------------------------------------------
/benchmark/piccolo-requests-http1.parquet:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/microsoft/LSKV/1e1214d4efa69e664dc115420d2023feffd31bb4/benchmark/piccolo-requests-http1.parquet
--------------------------------------------------------------------------------
/benchmark/piccolo_generate.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # Copyright (c) Microsoft Corporation. All rights reserved.
3 | # Licensed under the MIT License.
4 |
5 | """
6 | Generate a simple workload to run piccolo with.
7 | """
8 |
9 | import base64
10 | import json
11 | import sys
12 | import time
13 |
14 | from generator import Messages
15 | from loguru import logger
16 |
17 |
18 | # pylint: disable=too-many-arguments
19 | def put(
20 | msgs: Messages,
21 | key: str,
22 | value: str,
23 | host: str = "127.0.0.1:8000",
24 | http_version: int = 1,
25 | count=1,
26 | ):
27 | """
28 | Add put requests to the messages.
29 | """
30 | logger.debug("Adding Put request for key {} and value {}", key, value)
31 | request_type = "HTTP/1.1" if http_version == 1 else "HTTP/2"
32 | msgs.append(
33 | host,
34 | "/v3/kv/put",
35 | "POST",
36 | request_type,
37 | "application/json",
38 | json.dumps({"key": b64encode(key), "value": b64encode(value)}),
39 | count,
40 | )
41 |
42 |
43 | # pylint: disable=too-many-arguments
44 | def get(
45 | msgs: Messages,
46 | key: str,
47 | range_end: str = "",
48 | host: str = "127.0.0.1:8000",
49 | http_version: int = 1,
50 | count=1,
51 | ):
52 | """
53 | Add get requests to the messages.
54 | """
55 | data = {"key": b64encode(key)}
56 | if range_end:
57 | data["range_end"] = b64encode(range_end)
58 | logger.debug("Adding Get request for key {} and range_end {}", key, range_end)
59 | request_type = "HTTP/1.1" if http_version == 1 else "HTTP/2"
60 | msgs.append(
61 | host,
62 | "/v3/kv/range",
63 | "POST",
64 | request_type,
65 | "application/json",
66 | json.dumps(data),
67 | count,
68 | )
69 |
70 |
71 | # pylint: disable=too-many-arguments
72 | def delete(
73 | msgs: Messages,
74 | key: str,
75 | range_end: str = "",
76 | host: str = "127.0.0.1:8000",
77 | http_version: int = 1,
78 | count=1,
79 | ):
80 | """
81 | Add delete requests to the messages.
82 | """
83 | data = {"key": b64encode(key)}
84 | if range_end:
85 | data["range_end"] = b64encode(range_end)
86 | logger.debug("Adding Delete request for key {} and range_end {}", key, range_end)
87 | request_type = "HTTP/1.1" if http_version == 1 else "HTTP/2"
88 | msgs.append(
89 | host,
90 | "/v3/kv/delete_range",
91 | "POST",
92 | request_type,
93 | "application/json",
94 | json.dumps(data),
95 | count,
96 | )
97 |
98 |
99 | def b64encode(string: str) -> str:
100 | """
101 | Base64 encode a string.
102 | """
103 | return base64.b64encode(string.encode()).decode()
104 |
105 |
106 | def generate_scenario(http_version: int):
107 | """
108 | Generate a scenario for a given http version.
109 | """
110 | msgs = Messages()
111 | # this is slow but could be made faster if the indexes in
112 | # the Messages dataframe were added at the end as a batch.
113 | # then we could make one part and repeat it
114 | start = time.time()
115 | for i in range(100):
116 | logger.info("adding batch {}", i)
117 | for i in range(100):
118 | key = f"key{i}"
119 | value = f"value{i}"
120 | put(msgs, key, value, http_version=http_version)
121 | get(msgs, key, http_version=http_version)
122 | delete(msgs, key, http_version=http_version)
123 | logger.info("took {}", time.time() - start)
124 |
125 | parquet_file = f"piccolo-requests-http{http_version}.parquet"
126 | logger.info("Writing messages to file {}", parquet_file)
127 | msgs.to_parquet_file(parquet_file)
128 |
129 |
130 | def main():
131 | """
132 | Run the generator.
133 | """
134 | logger.remove()
135 | logger.add(sys.stderr, level="INFO")
136 |
137 | generate_scenario(1)
138 | # http2 is not supported yet
139 | # generate_scenario(2)
140 |
141 |
142 | if __name__ == "__main__":
143 | main()
144 |
--------------------------------------------------------------------------------
/benchmark/receipt_verify.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # Copyright (c) Microsoft Corporation. All rights reserved.
3 | # Licensed under the MIT License.
4 |
5 | """
6 | Benchmark verifying a local receipt.
7 | """
8 |
9 | import base64
10 | import hashlib
11 | import json
12 | import timeit
13 |
14 | import ccf.receipt # type: ignore
15 |
16 | # pylint: disable=import-error
17 | import etcd_pb2 # type: ignore
18 |
19 | # pylint: disable=import-error
20 | import lskvserver_pb2 # type: ignore
21 | from cryptography.x509 import load_pem_x509_certificate # type: ignore
22 | from google.protobuf.json_format import ParseDict
23 |
24 |
25 | # pylint: disable=too-many-arguments
26 | # pylint: disable=too-many-locals
27 | def check_receipt(
28 | req_type: str, request, response, receipt, receipt_json, service_cert
29 | ):
30 | """
31 | Check a receipt for a request and response.
32 | """
33 | # pylint: disable=duplicate-code
34 | receipt = receipt.receipt
35 | tx_receipt = receipt.tx_receipt
36 | leaf_components = tx_receipt.leaf_components
37 | claims_digest = leaf_components.claims_digest
38 | write_set_digest = leaf_components.write_set_digest
39 | commit_evidence = leaf_components.commit_evidence
40 |
41 | response.ClearField("header")
42 |
43 | commit_evidence_digest = hashlib.sha256(commit_evidence.encode()).digest()
44 | leaf_parts = [
45 | bytes.fromhex(write_set_digest),
46 | commit_evidence_digest,
47 | bytes.fromhex(claims_digest),
48 | ]
49 | leaf = hashlib.sha256(b"".join(leaf_parts)).hexdigest()
50 |
51 | signature = receipt.signature
52 | cert = receipt.cert
53 | node_cert = load_pem_x509_certificate(cert.encode())
54 |
55 | proof = receipt_json["receipt"]["txReceipt"]["proof"]
56 | root = ccf.receipt.root(leaf, proof)
57 |
58 | signature = base64.b64encode(signature).decode()
59 | ccf.receipt.verify(root, signature, node_cert)
60 |
61 | # receipt is valid, check if it matches our claims too
62 | claims = lskvserver_pb2.ReceiptClaims()
63 | getattr(claims, f"request_{req_type}").CopyFrom(request)
64 | getattr(claims, f"response_{req_type}").CopyFrom(response)
65 | claims_ser = claims.SerializeToString()
66 | claims_digest_calculated = hashlib.sha256(claims_ser).hexdigest()
67 | assert claims_digest == claims_digest_calculated
68 |
69 | ccf.receipt.check_endorsement(node_cert, service_cert)
70 |
71 |
72 | def check_json_receipt(req_type, req, res, receipt, service_cert):
73 | """
74 | Check a receipt obtained in JSON form.
75 | """
76 | req = json.loads(req)
77 | req_pb = ParseDict(req, etcd_pb2.PutRequest())
78 | res = json.loads(res)
79 | res_pb = ParseDict(res, etcd_pb2.PutResponse())
80 | receipt = json.loads(receipt)
81 | receipt_pb = ParseDict(receipt, lskvserver_pb2.GetReceiptResponse())
82 |
83 | check_receipt(req_type, req_pb, res_pb, receipt_pb, receipt, service_cert)
84 |
85 |
86 | def main():
87 | """
88 | Main function to run.
89 | """
90 | iterations = 1000
91 | repeats = 10
92 |
93 | req_type = "put"
94 | req = '{"key":"Zm9v","value":"YmFy"}'
95 | # pylint: disable=line-too-long
96 | res = '{"header":{"cluster_id":1201806628430307423,"member_id":15247274768972111916,"revision":11,"raft_term":2}}'
97 | receipt = '{"header":{"clusterId":"3170697659173810570","memberId":"17617643898592276624","revision":"12","raftTerm":"2","committedRevision":"12","committedRaftTerm":"2"},"receipt":{"cert":"-----BEGIN CERTIFICATE-----\\nMIIBwjCCAUigAwIBAgIQelWMyA4HX9YNRNHbjBB+GjAKBggqhkjOPQQDAzAWMRQw\\nEgYDVQQDDAtDQ0YgTmV0d29yazAeFw0yMzAxMTAxMTA4MDFaFw0yMzA0MTAxMTA4\\nMDBaMBMxETAPBgNVBAMMCENDRiBOb2RlMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAE\\nRcyck/p0JwM2vzGWxZwre3KF6HZWaC1pKDgBXCmOzLQ8FkOpiLiCdbiKum4wXZUB\\ncFRZXhBB0aFqPuLBlJrhF/EsDHZXw3q55cc1vJwQA6B8xqZpI5EeHBHzf/5nhVCt\\no14wXDAJBgNVHRMEAjAAMB0GA1UdDgQWBBSUkeYJQCUK2zRePcyb3ijK4joAzjAf\\nBgNVHSMEGDAWgBS70Mmt6puVhK8amGjMh9N8IHgPMDAPBgNVHREECDAGhwR/AAAB\\nMAoGCCqGSM49BAMDA2gAMGUCMQCr2zslxIrmzZua/6cVjWMMHTQsFWQQWqIEMpnN\\nt1QPC8H8kXmObnUBLnZ7xzPxLAICMAlui+wqJ2T781mlb9srf1ZB+LA4jUTCMa7/\\nRetTCm6wS4NSw2KODmt/awa11e6sUg==\\n-----END CERTIFICATE-----\\n","signature":"MGUCMB/QRsaoEmWwn5lf9E/7Suct4zsUCAoFr06wqL6mMd2+0VfH6WtfAGj0grSOjeheaAIxAPp8pKiQg8m/A9ykTq4JmkrNfgIjHpt5lx+tbBQWq7+v8EwwT2GJw6NdB/HOq1ibWA==","nodeId":"90b0eb92c8717ef4af94f2ad35dad1d0197c8dd9e9c46c2444cd0dced76780db","txReceipt":{"leafComponents":{"claimsDigest":"1e2ce1004f14e362833d5791b3d9702468d4a276665606349e9afb58b45365bd","commitEvidence":"ce:2.11:c296e4e2f3541ad12dd3cc7d90682f06a4ef6bb6fe78c97da227fccadb213ba9","writeSetDigest":"0dcbe265fd4844cf8e191df4edf8b359da18aa25d61a5d8927541f43b5e1851f"},"proof":[{"left":"6bc6a9ca7318c9c5464ad52a23761f2088090ae4632b33472b262e087bdf5e95"},{"left":"14ca017e27abc6e1e7713419c46b902b4d4926bc8639a92a916894ce83090a15"},{"left":"bf45d3693fe0bb96f73532dfe74bb1134a76c89bd6d113c4aaef3afa12e194d5"}]}}}'
98 | service_cert = load_pem_x509_certificate(
99 | """-----BEGIN CERTIFICATE-----
100 | MIIBuDCCAT6gAwIBAgIRALGT6aKJj03G0glrrTxVumAwCgYIKoZIzj0EAwMwFjEU
101 | MBIGA1UEAwwLQ0NGIE5ldHdvcmswHhcNMjMwMTEwMTEwODAxWhcNMjMwNDEwMTEw
102 | ODAwWjAWMRQwEgYDVQQDDAtDQ0YgTmV0d29yazB2MBAGByqGSM49AgEGBSuBBAAi
103 | A2IABBUSXLZ/qHxuDio17jtXUzo0fbi8x0+nbaYRMV1sam48OmMWzKqcXjiPPxht
104 | JlefGs4011X4btVFvK7sJtpC7nj36RdwPSh7dsozjlRKmJo73yeMreSq7DoIWILi
105 | De98G6NQME4wDAYDVR0TBAUwAwEB/zAdBgNVHQ4EFgQUu9DJreqblYSvGphozIfT
106 | fCB4DzAwHwYDVR0jBBgwFoAUu9DJreqblYSvGphozIfTfCB4DzAwCgYIKoZIzj0E
107 | AwMDaAAwZQIxAM2LxMnWaLDtGAMEMqaH03HaZV0CnY+s3uRkp7mCElcfMAFXY2vB
108 | CW1ZOzn7qIUvBgIwYDcYqeN8Ox9y9ktgpLEkvdiRK6OLIF4dxnsQJ/ORjSNLPyYx
109 | RuXUu+yl3EgtEgvw
110 | -----END CERTIFICATE-----""".encode()
111 | )
112 |
113 | print(
114 | f"Timing receipt verification with {iterations} iterations and {repeats} repeats"
115 | )
116 |
117 | durations = timeit.Timer(
118 | lambda: check_json_receipt(req_type, req, res, receipt, service_cert)
119 | ).repeat(number=iterations, repeat=repeats)
120 |
121 | for duration in durations:
122 | print(
123 | f"Took {duration} seconds to complete {iterations} iterations, on average {duration/iterations} s/iter"
124 | )
125 |
126 | print()
127 | print(
128 | f"Average duration to complete {iterations} iterations:",
129 | sum(durations) / len(durations),
130 | "seconds",
131 | )
132 | ms_per_iter = ((sum(durations) / len(durations)) / iterations) * 1000
133 | print(
134 | "Average duration per iteration ",
135 | ms_per_iter,
136 | "milliseconds",
137 | )
138 | print("Throughput:", 1000 / ms_per_iter)
139 |
140 |
141 | if __name__ == "__main__":
142 | main()
143 |
--------------------------------------------------------------------------------
/cgmanifest.json:
--------------------------------------------------------------------------------
1 | {
2 | "$schema": "https://json.schemastore.org/component-detection-manifest.json",
3 | "Registrations": [
4 | {
5 | "component": {
6 | "type": "git",
7 | "git": {
8 | "repositoryUrl": "https://github.com/etcd-io/etcd",
9 | "commitHash": "b886bbc89f31d7ac59bdfbb3d14a4263dae85a1b"
10 | }
11 | }
12 | },
13 | {
14 | "component": {
15 | "type": "git",
16 | "git": {
17 | "repositoryUrl": "https://github.com/jeffa5/go-ycsb",
18 | "commitHash": "8d81c0f7548b61d7ef10bc205926fe36f83687d1"
19 | }
20 | }
21 | },
22 | {
23 | "component": {
24 | "type": "git",
25 | "git": {
26 | "repositoryUrl": "https://github.com/grafana/k6",
27 | "commitHash": "2fe2dd32b3827eeeeb3959aff63a6b402aab0a5a"
28 | }
29 | }
30 | }
31 | ],
32 | "Version": 1
33 | }
34 |
--------------------------------------------------------------------------------
/cmake/version.cmake:
--------------------------------------------------------------------------------
1 | # Copyright (c) Microsoft Corporation. All rights reserved. Licensed under the
2 | # MIT License.
3 |
4 | # get the current version from git
5 | find_package(Git)
6 | execute_process(
7 | COMMAND ${GIT_EXECUTABLE} describe --tags --dirty --exclude latest-main
8 | WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}"
9 | OUTPUT_VARIABLE "LSKV_VERSION"
10 | OUTPUT_STRIP_TRAILING_WHITESPACE
11 | RESULT_VARIABLE RETURN_CODE)
12 |
13 | if(NOT RETURN_CODE STREQUAL "0")
14 | if(DEFINED ENV{LSKV_VERSION})
15 | set(LSKV_VERSION $ENV{LSKV_VERSION})
16 | message(
17 | WARNING
18 | "Could not find any tag in repository. Using LSKV_VERSION environment variable: ${LSKV_VERSION}"
19 | )
20 | else()
21 | set(LSKV_VERSION "0.0.0")
22 | message(
23 | WARNING
24 | "Could not find any tag in repository. Defaulting LSKV version to ${LSKV_VERSION}"
25 | )
26 | endif()
27 | endif()
28 |
29 | # strip 'v' prefix from version
30 | string(REGEX REPLACE "^v" "" LSKV_VERSION ${LSKV_VERSION})
31 |
32 | # Convert git description into cmake list, separated at '-'
33 | string(REPLACE "-" ";" LSKV_VERSION_COMPONENTS ${LSKV_VERSION})
34 |
35 | message(STATUS "Got long version ${LSKV_VERSION}")
36 | list(GET LSKV_VERSION_COMPONENTS 0 LSKV_VERSION_SHORT)
37 |
38 | message(STATUS "Got short version ${LSKV_VERSION_SHORT}")
39 |
--------------------------------------------------------------------------------
/config/cchost_config.sgx.json:
--------------------------------------------------------------------------------
1 | {
2 | "enclave": {
3 | "file": "/app/liblskv.enclave.so.signed",
4 | "type": "Release",
5 | "platform": "SGX"
6 | },
7 | "network": {
8 | "node_to_node_interface": {
9 | "bind_address": "127.0.0.1:8001"
10 | },
11 | "rpc_interfaces": {
12 | "main_interface": {
13 | "bind_address": "0.0.0.0:8000",
14 | "app_protocol": "HTTP2"
15 | }
16 | }
17 | },
18 | "node_certificate": {
19 | "subject_alt_names": ["iPAddress:127.0.0.1"]
20 | },
21 | "command": {
22 | "type": "Start",
23 | "service_certificate_file": "/app/certs/service_cert.pem",
24 | "start": {
25 | "constitution_files": [
26 | "/app/validate.js",
27 | "/app/apply.js",
28 | "/app/resolve.js",
29 | "/app/actions.js"
30 | ],
31 | "members": [
32 | {
33 | "certificate_file": "/app/certs/member0_cert.pem",
34 | "encryption_public_key_file": "/app/certs/member0_enc_pubk.pem"
35 | }
36 | ]
37 | }
38 | }
39 | }
40 |
--------------------------------------------------------------------------------
/config/cchost_config.virtual.json:
--------------------------------------------------------------------------------
1 | {
2 | "enclave": {
3 | "file": "/app/liblskv.virtual.so",
4 | "type": "Virtual",
5 | "platform": "Virtual"
6 | },
7 | "network": {
8 | "node_to_node_interface": {
9 | "bind_address": "127.0.0.1:8001"
10 | },
11 | "rpc_interfaces": {
12 | "main_interface": {
13 | "bind_address": "0.0.0.0:8000",
14 | "app_protocol": "HTTP2"
15 | }
16 | }
17 | },
18 | "node_certificate": {
19 | "subject_alt_names": ["iPAddress:127.0.0.1"]
20 | },
21 | "command": {
22 | "type": "Start",
23 | "service_certificate_file": "/app/certs/service_cert.pem",
24 | "start": {
25 | "constitution_files": [
26 | "/app/validate.js",
27 | "/app/apply.js",
28 | "/app/resolve.js",
29 | "/app/actions.js"
30 | ],
31 | "members": [
32 | {
33 | "certificate_file": "/app/certs/member0_cert.pem",
34 | "encryption_public_key_file": "/app/certs/member0_enc_pubk.pem"
35 | }
36 | ]
37 | }
38 | }
39 | }
40 |
--------------------------------------------------------------------------------
/constitution/apply.js:
--------------------------------------------------------------------------------
1 | // Copyright (c) Microsoft Corporation. All rights reserved.
2 | // Licensed under the MIT License.
3 |
4 | export function apply(proposal, proposalId) {
5 | const proposed_actions = JSON.parse(proposal)["actions"];
6 | for (const proposed_action of proposed_actions) {
7 | const definition = actions.get(proposed_action.name);
8 | definition.apply(proposed_action.args, proposalId);
9 | }
10 | }
11 |
--------------------------------------------------------------------------------
/constitution/resolve.js:
--------------------------------------------------------------------------------
1 | // Copyright (c) Microsoft Corporation. All rights reserved.
2 | // Licensed under the MIT License.
3 |
4 | export function resolve(proposal, proposerId, votes) {
5 | const memberVoteCount = votes.filter((v) => v.vote).length;
6 |
7 | let activeMemberCount = 0;
8 | ccf.kv["public:ccf.gov.members.info"].forEach((v) => {
9 | const info = ccf.bufToJsonCompatible(v);
10 | if (info.status === "Active") {
11 | activeMemberCount++;
12 | }
13 | });
14 |
15 | // A single member can accept a proposal.
16 | if (memberVoteCount > 0 && activeMemberCount > 0) {
17 | return "Accepted";
18 | }
19 |
20 | return "Open";
21 | }
22 |
--------------------------------------------------------------------------------
/constitution/validate.js:
--------------------------------------------------------------------------------
1 | // Copyright (c) Microsoft Corporation. All rights reserved.
2 | // Licensed under the MIT License.
3 |
4 | export function validate(input) {
5 | let proposal = JSON.parse(input);
6 | let errors = [];
7 | let position = 0;
8 | for (const action of proposal["actions"]) {
9 | const definition = actions.get(action.name);
10 | if (definition) {
11 | try {
12 | definition.validate(action.args);
13 | } catch (e) {
14 | errors.push(
15 | `${action.name} at position ${position} failed validation: ${e}\n${e.stack}`,
16 | );
17 | }
18 | } else {
19 | errors.push(`${action.name}: no such action`);
20 | }
21 | position++;
22 | }
23 | return { valid: errors.length === 0, description: errors.join(", ") };
24 | }
25 |
--------------------------------------------------------------------------------
/docs/index.md:
--------------------------------------------------------------------------------
1 | # LSKV
2 |
3 | ## Request flows
4 |
5 | ### Put
6 |
7 | ```mermaid
8 | sequenceDiagram
9 | autonumber
10 | participant User
11 | participant App
12 | participant KV
13 | participant Index
14 | participant CCF
15 | participant Nodes as Other Nodes
16 |
17 | User->>App: /etcdserverpb.KV/Put
18 | rect rgba(191, 223, 255, 0.5)
19 | note over App,CCF: Inside single CCF node
20 | App->>App: parse grpc payload
21 | App->>App: create kvstore wrapper
22 | App->>KV: kvstore.put(key, value)
23 | App->>CCF: transaction commit
24 | CCF--)Index: handle_committed_transaction
25 | App->>App: Fill in header with optimistic transaction id
and committed transaction id
26 | end
27 | App->>User: send response
28 | CCF--)Nodes: Consensus
29 | ```
30 |
31 | ### Range
32 |
33 | ```mermaid
34 | sequenceDiagram
35 | autonumber
36 | participant User
37 | participant App
38 | participant KV
39 | participant Index
40 | participant CCF
41 |
42 | User->>App: /etcdserverpb.KV/Range
43 | rect rgba(191, 223, 255, 0.5)
44 | note over App,CCF: Inside single CCF node
45 | App->>App: parse grpc payload
46 | App->>App: create kvstore wrapper
47 | alt latest (revision == 0)
48 | App->>KV: kvstore.range(...)/kvstore.get(...)
49 | KV->>App: return KVs
50 | note over App: this reads from the local map so may
observe values that have not been committed
51 | else historical (revision > 0)
52 | App->>Index: index.range(...)/index.get(...)
53 | Index->>App: return KVs
54 | note over App: this reads from the index so only observes values
that have been committed but may be stale
55 | end
56 | App->>App: Fill in header with optimistic transaction id
and committed transaction id
57 | end
58 | App->>User: send response
59 | ```
60 |
61 | ### Receipts
62 |
63 | See [Receipts](./receipts.md) for how to verify the receipt.
64 |
65 | ```mermaid
66 | sequenceDiagram
67 | autonumber
68 | participant User
69 | participant Proxy
70 | participant App
71 |
72 | note over User: Make mutating request so
have request and response
73 |
74 | User->>Proxy: /etcdserverpb.Receipt/GetReceipt
75 | Proxy->>App: /etcdserverpb.Receipt/GetReceipt
76 | App->>Proxy: 202 Accepted, retry-after: 3s
77 | Proxy->>User: 202 Accepted, retry-after: 3s
78 | note over Proxy: A smart proxy may instead
handle the retry internally
79 | note over User: Retry request
80 | User->>Proxy: /etcdserverpb.Receipt/GetReceipt
81 | Proxy->>App: /etcdserverpb.Receipt/GetReceipt
82 | App-->>App: Get receipt
83 | App->>Proxy: send receipt in response with header
84 | Proxy->>User: send receipt in response with header
85 | User->>User: Verify receipt with given request (and response)
86 | ```
87 |
--------------------------------------------------------------------------------
/docs/receipts.md:
--------------------------------------------------------------------------------
1 | # Receipt verification
2 |
3 | To verify a receipt follow https://microsoft.github.io/CCF/main/use_apps/verify_tx.html#receipt-verification
4 |
5 | To obtain the custom claims digest:
6 |
7 | 1. Clear the header field in the response
8 | 2. Build a [`ReceiptClaims`](../proto/lskvserver.proto) protobuf type from the request sent and response received.
9 | 3. Serialize the `ReceiptClaims` and compute the sha256 hex digest
10 | 4. Compare this hex digest with the `claims_digest` in the receipt
11 |
--------------------------------------------------------------------------------
/etcdctl.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env sh
2 | # Copyright (c) Microsoft Corporation. All rights reserved.
3 | # Licensed under the MIT License.
4 |
5 | set -e
6 |
7 | bindir=bin
8 |
9 | install_etcdctl() {
10 | ETCD_VER="v3.5.4"
11 |
12 | GITHUB_URL=https://github.com/etcd-io/etcd/releases/download
13 | DOWNLOAD_URL=${GITHUB_URL}
14 |
15 | rm -f /tmp/etcd-${ETCD_VER}-linux-amd64.tar.gz
16 | rm -rf /tmp/etcd-download-test && mkdir -p /tmp/etcd-download-test
17 |
18 | curl -L ${DOWNLOAD_URL}/${ETCD_VER}/etcd-${ETCD_VER}-linux-amd64.tar.gz -o /tmp/etcd-${ETCD_VER}-linux-amd64.tar.gz
19 | tar xzvf /tmp/etcd-${ETCD_VER}-linux-amd64.tar.gz -C /tmp/etcd-download-test --strip-components=1
20 | rm -f /tmp/etcd-${ETCD_VER}-linux-amd64.tar.gz
21 |
22 | mkdir -p $bindir
23 | mv /tmp/etcd-download-test/etcdctl $bindir/etcdctl
24 | }
25 |
26 | if [ ! -f "$bindir/etcdctl" ]; then
27 | install_etcdctl
28 | fi
29 |
30 | workspace_common=workspace/sandbox_common
31 |
32 | cmd="$bindir/etcdctl --endpoints=https://127.0.0.1:8000 --cacert=$workspace_common/service_cert.pem --cert=$workspace_common/user0_cert.pem --key=$workspace_common/user0_privk.pem $*"
33 | echo "$cmd" >&2
34 | $cmd
35 |
--------------------------------------------------------------------------------
/flake.lock:
--------------------------------------------------------------------------------
1 | {
2 | "nodes": {
3 | "flake-utils": {
4 | "inputs": {
5 | "systems": "systems"
6 | },
7 | "locked": {
8 | "lastModified": 1689068808,
9 | "narHash": "sha256-6ixXo3wt24N/melDWjq70UuHQLxGV8jZvooRanIHXw0=",
10 | "owner": "numtide",
11 | "repo": "flake-utils",
12 | "rev": "919d646de7be200f3bf08cb76ae1f09402b6f9b4",
13 | "type": "github"
14 | },
15 | "original": {
16 | "owner": "numtide",
17 | "repo": "flake-utils",
18 | "type": "github"
19 | }
20 | },
21 | "nix-filter": {
22 | "locked": {
23 | "lastModified": 1687178632,
24 | "narHash": "sha256-HS7YR5erss0JCaUijPeyg2XrisEb959FIct3n2TMGbE=",
25 | "owner": "numtide",
26 | "repo": "nix-filter",
27 | "rev": "d90c75e8319d0dd9be67d933d8eb9d0894ec9174",
28 | "type": "github"
29 | },
30 | "original": {
31 | "owner": "numtide",
32 | "repo": "nix-filter",
33 | "type": "github"
34 | }
35 | },
36 | "nixpkgs": {
37 | "locked": {
38 | "lastModified": 1691368598,
39 | "narHash": "sha256-ia7li22keBBbj02tEdqjVeLtc7ZlSBuhUk+7XTUFr14=",
40 | "owner": "NixOS",
41 | "repo": "nixpkgs",
42 | "rev": "5a8e9243812ba528000995b294292d3b5e120947",
43 | "type": "github"
44 | },
45 | "original": {
46 | "owner": "NixOS",
47 | "ref": "nixos-unstable",
48 | "repo": "nixpkgs",
49 | "type": "github"
50 | }
51 | },
52 | "root": {
53 | "inputs": {
54 | "flake-utils": "flake-utils",
55 | "nix-filter": "nix-filter",
56 | "nixpkgs": "nixpkgs"
57 | }
58 | },
59 | "systems": {
60 | "locked": {
61 | "lastModified": 1681028828,
62 | "narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
63 | "owner": "nix-systems",
64 | "repo": "default",
65 | "rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
66 | "type": "github"
67 | },
68 | "original": {
69 | "owner": "nix-systems",
70 | "repo": "default",
71 | "type": "github"
72 | }
73 | }
74 | },
75 | "root": "root",
76 | "version": 7
77 | }
78 |
--------------------------------------------------------------------------------
/flake.nix:
--------------------------------------------------------------------------------
1 | {
2 | description = "Confidential computing packages";
3 |
4 | inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
5 | inputs.flake-utils.url = "github:numtide/flake-utils";
6 | inputs.nix-filter.url = "github:numtide/nix-filter";
7 |
8 | outputs = {
9 | self,
10 | nixpkgs,
11 | flake-utils,
12 | nix-filter,
13 | }: let
14 | system = "x86_64-linux";
15 | pkgs = import nixpkgs {
16 | inherit system;
17 | overlays = [nix-filter.overlays.default];
18 | };
19 | nix = import ./nix {
20 | inherit pkgs;
21 | };
22 | in {
23 | packages.${system} =
24 | flake-utils.lib.filterPackages system nix;
25 |
26 | overlays.default = _final: _prev: ((nix.lib.forAllPlatforms {
27 | inherit (self.packages.${system}) ccf ccf-sandbox lskv lskv-sandbox;
28 | })
29 | // {
30 | inherit (self.packages.${system}) openenclave az-dcap sgx-dcap;
31 | });
32 |
33 | checks.${system} =
34 | pkgs.lib.attrsets.filterAttrs
35 | (name: _value: name != "override" && name != "overrideDerivation")
36 | nix.ci-checks
37 | // {
38 | inherit (self.packages.${system}) lskv-sandbox-virtual lskv-sandbox-sgx;
39 | };
40 |
41 | lib = nix.lskvlib;
42 |
43 | formatter.${system} = pkgs.alejandra;
44 |
45 | apps.${system} = {
46 | ccf-sandbox-virtual = flake-utils.lib.mkApp {
47 | drv = self.packages.${system}.ccf-sandbox-virtual;
48 | exePath = "/bin/sandbox.sh";
49 | };
50 | ccf-sandbox-sgx = flake-utils.lib.mkApp {
51 | drv = self.packages.${system}.ccf-sandbox-sgx;
52 | exePath = "/bin/sandbox.sh";
53 | };
54 |
55 | lskv-sandbox-virtual = flake-utils.lib.mkApp {
56 | drv = self.packages.${system}.lskv-sandbox-virtual;
57 | exePath = "/bin/lskv-sandbox.sh";
58 | };
59 | lskv-sandbox-sgx = flake-utils.lib.mkApp {
60 | drv = self.packages.${system}.lskv-sandbox-sgx;
61 | exePath = "/bin/lskv-sandbox.sh";
62 | };
63 |
64 | oesign = flake-utils.lib.mkApp {
65 | drv = self.packages.${system}.openenclave;
66 | exePath = "/bin/oesign";
67 | };
68 | };
69 |
70 | devShells.${system}.default =
71 | pkgs.mkShell
72 | {
73 | packages = with pkgs;
74 | [
75 | clang-tools_10
76 | shellcheck
77 | nodePackages.npm
78 | nodejs
79 | cmake-format
80 |
81 | # for benchmarking
82 | python3Packages.pandas
83 | python3Packages.loguru
84 | python3Packages.seaborn
85 | python3Packages.paramiko
86 | jupyter
87 |
88 | # for go-ycsb
89 | go
90 | ]
91 | ++ [
92 | nix.python3.pkgs.cimetrics
93 | ];
94 | inputsFrom = [nix.lskv-virtual];
95 | };
96 | };
97 | }
98 |
--------------------------------------------------------------------------------
/integration-tests.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env sh
2 | # Copyright (c) Microsoft Corporation. All rights reserved.
3 | # Licensed under the MIT License.
4 |
5 | parallelism=1
6 |
7 | build_dir=build
8 |
9 | test_dir=$(realpath $build_dir/3rdparty/etcd/tests)
10 | lskv_dir=$(realpath ./.)
11 |
12 | echo "changing dir to $test_dir"
13 | cd "$test_dir" || exit 1
14 |
15 | cmd="env ETCD_VERIFY=all lskv_DIR=$lskv_dir VENV_DIR=$lskv_dir/.venv_ccf_sandbox go test --tags=integration --timeout=10m -p=$parallelism -run=TestTxn|TestKV|TestLease $test_dir/common/... $*"
16 | echo "$cmd"
17 | $cmd
18 |
--------------------------------------------------------------------------------
/nix/README.md:
--------------------------------------------------------------------------------
1 | # Nix configuration for building and running LSKV
2 |
3 | This configuration includes dependencies such as python packages and C/C++ projects, primarily for CCF and its dependencies.
4 |
5 | ## Getting nix
6 |
7 | If you don't have nix installed then follow the [official steps](https://nixos.org/download.html).
8 |
9 | You'll also need to [enable flakes](https://nixos.wiki/wiki/Flakes#Enable_flakes).
10 |
11 | ## Listing outputs
12 |
13 | ```sh
14 | nix flake show
15 | ```
16 |
17 | ## Building LSKV
18 |
19 | To build lskv in virtual mode run the following from the root of the repo:
20 |
21 | ```sh
22 | nix build .#lskv-virtual
23 | ```
24 |
25 | Replace `virtual` for `sgx` to build that platform.
26 |
27 | ## Running LSKV
28 |
29 | LSKV comes packaged with a sandbox based on the CCF one that uses the nix-built library directly.
30 | To build and run it:
31 |
32 | ```sh
33 | nix run .#lskv-sandbox-virtual
34 | ```
35 |
36 | Again, replace `virtual` for `sgx` to build that platform.
37 |
38 | **Note**: Whilst SGX targets can be built on non-sgx hardware, running them will require the requisite hardware.
39 |
--------------------------------------------------------------------------------
/nix/az-dcap.nix:
--------------------------------------------------------------------------------
1 | {
2 | fetchFromGitHub,
3 | stdenv,
4 | curl,
5 | nlohmann_json,
6 | makeWrapper,
7 | fetchurl,
8 | lib,
9 | openssl_1_1,
10 | }: let
11 | fetchFromIntelGitHub = {path, ...} @ attrs:
12 | fetchurl ({
13 | url = "https://raw.githubusercontent.com/intel/${path}";
14 | }
15 | // removeAttrs attrs ["path"]);
16 |
17 | files = [
18 | (fetchFromIntelGitHub {
19 | path = "SGXDataCenterAttestationPrimitives/0436284f12f1bd5da7e7a06f6274d36b4c8d39f9/QuoteGeneration/quote_wrapper/common/inc/sgx_ql_lib_common.h";
20 | hash = "sha256-36oxPBt0SmmRqjwtXgP87wOY2tOlbxQEhMZZgjoh4xI=";
21 | })
22 | (fetchFromIntelGitHub {
23 | path = "linux-sgx/1ccf25b64abd1c2eff05ead9d14b410b3c9ae7be/common/inc/sgx_report.h";
24 | hash = "sha256-NCDH3uhSlRRx0DDA/MKhWlUnA1rJ94O4DLuzqmnfr0I=";
25 | })
26 | (fetchFromIntelGitHub {
27 | path = "linux-sgx/1ccf25b64abd1c2eff05ead9d14b410b3c9ae7be/common/inc/sgx_key.h";
28 | hash = "sha256-3ApIE2QevE8MeU0y5UGvwaKD9OOJ3H9c5ibxsBSr49g=";
29 | })
30 | (fetchFromIntelGitHub {
31 | path = "linux-sgx/1ccf25b64abd1c2eff05ead9d14b410b3c9ae7be/common/inc/sgx_attributes.h";
32 | hash = "sha256-fPuwchUP9L1Zi3BoFfhmRPe7CgjHlafNrKeZDOF2l1k=";
33 | })
34 | ];
35 | in
36 | stdenv.mkDerivation rec {
37 | pname = "az-dcap";
38 | version = "1.11.2";
39 | src = fetchFromGitHub {
40 | owner = "microsoft";
41 | repo = "Azure-DCAP-Client";
42 | rev = version;
43 | hash = "sha256-EYj3jnzTyJRl6N7avNf9VrB8r9U6zIE6wBNeVsMtWCA=";
44 | };
45 | nativeBuildInputs = [makeWrapper];
46 | buildInputs = [
47 | (curl.override {openssl = openssl_1_1;})
48 | nlohmann_json
49 | ];
50 |
51 | configurePhase = ''
52 | cd src/Linux
53 | cat Makefile.in | sed "s|##CURLINC##|${curl.dev}/include/curl|g" > Makefile
54 | ${lib.flip (lib.concatMapStringsSep "\n") files (f: "cp ${f} ${f.name}")}
55 | '';
56 | makeFlags = ["prefix=$(out)"];
57 | }
58 |
--------------------------------------------------------------------------------
/nix/ccf-sandbox.nix:
--------------------------------------------------------------------------------
1 | {
2 | ccf,
3 | openenclave,
4 | python3,
5 | stdenv,
6 | platform ? "virtual",
7 | }: let
8 | infra = python3.pkgs.toPythonApplication python3.pkgs.python-ccf-infra;
9 | c = ccf.override {inherit platform;};
10 | in
11 | stdenv.mkDerivation {
12 | pname = "ccf-sandbox-${platform}";
13 | inherit (c) version src;
14 |
15 | dontBuild = true;
16 |
17 | installPhase = ''
18 | install -m755 -D ${./ccf-sandbox.sh} $out/bin/sandbox.sh
19 | install -m644 -t $out/bin \
20 | samples/constitutions/default/actions.js \
21 | samples/constitutions/default/validate.js \
22 | samples/constitutions/sandbox/resolve.js \
23 | samples/constitutions/default/apply.js
24 |
25 | substituteInPlace $out/bin/sandbox.sh \
26 | --replace CCF_ROOT "${c}" \
27 | --replace OE_ROOT "${openenclave}" \
28 | --replace START_NETWORK_SCRIPT "${infra}/bin/start_network.py"
29 | '';
30 | }
31 |
--------------------------------------------------------------------------------
/nix/ccf-sandbox.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -e
4 |
5 | is_package_specified=false
6 | is_js_bundle_specified=false
7 |
8 | extra_args=("$@")
9 | while [ "$1" != "" ]; do
10 | case $1 in
11 | -p | --package)
12 | is_package_specified=true
13 | shift
14 | ;;
15 | -p=* | --package=*)
16 | is_package_specified=true
17 | ;;
18 | --js-app-bundle)
19 | is_js_bundle_specified=true
20 | shift
21 | ;;
22 | --js-app-bundle=*)
23 | is_js_bundle_specified=true
24 | ;;
25 | *) ;;
26 |
27 | esac
28 | shift
29 | done
30 |
31 | if [ ${is_package_specified} == false ] && [ ${is_js_bundle_specified} == false ]; then
32 | # Only on install tree, default to installed js logging app
33 | echo "No package/app specified. Defaulting to installed JS logging app"
34 | extra_args+=(--package "CCF_ROOT/lib/libjs_generic")
35 | extra_args+=(--js-app-bundle "CCF_ROOT/samples/logging/js")
36 | fi
37 |
38 | PATH_HERE=$(dirname "$(realpath -s "$0")")
39 |
40 | START_NETWORK_SCRIPT \
41 | --binary-dir CCF_ROOT/bin \
42 | --oe-binary OE_ROOT/bin \
43 | --enclave-type virtual \
44 | --initial-member-count 1 \
45 | --constitution "${PATH_HERE}"/actions.js \
46 | --constitution "${PATH_HERE}"/validate.js \
47 | --constitution "${PATH_HERE}"/resolve.js \
48 | --constitution "${PATH_HERE}"/apply.js \
49 | --ledger-chunk-bytes 5000000 \
50 | --snapshot-tx-interval 10000 \
51 | --initial-node-cert-validity-days 90 \
52 | --initial-service-cert-validity-days 90 \
53 | --label sandbox \
54 | "${extra_args[@]}"
55 |
--------------------------------------------------------------------------------
/nix/ccf.nix:
--------------------------------------------------------------------------------
1 | {
2 | fetchFromGitHub,
3 | pkg-config,
4 | cmake,
5 | ninja,
6 | stdenv,
7 | openenclave,
8 | libuv,
9 | # az-dcap,
10 | # sgx-dcap,
11 | # sgx-psw,
12 | makeWrapper,
13 | protobuf,
14 | openssl,
15 | platform ? "virtual",
16 | }: let
17 | toRemove =
18 | if platform == "sgx"
19 | then ''
20 | # These are signed with a randomly generated key, which makes the build non-reproducible
21 | rm $out/lib/libjs_generic.enclave.so.debuggable \
22 | $out/lib/libjs_generic.enclave.so.signed
23 | ''
24 | else "";
25 | in
26 | stdenv.mkDerivation rec {
27 | pname = "ccf-${platform}";
28 | version = "4.0.7";
29 | src = fetchFromGitHub {
30 | owner = "microsoft";
31 | repo = "CCF";
32 | name = "ccf-${version}";
33 | rev = "ccf-${version}";
34 | hash = "sha256-CofADLExBTo3CH7iACKKNxMsSpy/ZBWBRaXc3ELHAd4=";
35 | };
36 | patches = [
37 | patches/ccf-no-python.diff
38 | patches/ccf-no-python-pb2.diff
39 | patches/ccf-protoc-binary.diff
40 | patches/ccf-ignore-submitter.diff
41 | ];
42 |
43 | nativeBuildInputs = [
44 | cmake
45 | ninja
46 | pkg-config
47 | libuv
48 | protobuf
49 | # arrow-cpp
50 | # sgx-dcap
51 | openenclave
52 | makeWrapper
53 | ];
54 |
55 | cmakeFlags = [
56 | "-DBUILD_TESTS=OFF"
57 | "-DBUILD_UNIT_TESTS=OFF"
58 | "-DLVI_MITIGATIONS=OFF"
59 | "-DCOMPILE_TARGET=${platform}"
60 | ];
61 |
62 | NIX_CFLAGS_COMPILE = "-Wno-unused-command-line-argument";
63 | NIX_NO_SELF_RPATH = "1";
64 |
65 | postInstall = ''
66 | # wrapProgram $out/bin/cchost \
67 | # --suffix LD_LIBRARY_PATH ':' "''${az-dcap}/lib:''${sgx-psw}/lib:''${sgx-dcap}/lib"
68 |
69 | wrapProgram $out/bin/keygenerator.sh \
70 | --prefix PATH ':' "${openssl}/bin"
71 |
72 | ${toRemove}
73 | '';
74 | }
75 |
--------------------------------------------------------------------------------
/nix/ci-checks.nix:
--------------------------------------------------------------------------------
1 | {
2 | runCommand,
3 | writeShellScriptBin,
4 | shellcheck,
5 | nodePackages,
6 | python3Packages,
7 | cpplint,
8 | alejandra,
9 | deadnix,
10 | statix,
11 | shfmt,
12 | cmake-format,
13 | python-ccf,
14 | types-paramiko,
15 | }: let
16 | pythonDeps = with python3Packages; [
17 | loguru
18 | httpx
19 | pandas
20 | seaborn
21 | pytest
22 | typing-extensions
23 | types-protobuf
24 | python-ccf
25 | paramiko
26 | types-paramiko
27 | ];
28 | in {
29 | checks = {
30 | shellcheck =
31 | runCommand "shellcheck"
32 | {
33 | buildInputs = [shellcheck];
34 | } ''
35 | find ${../.} -name '*.sh' ! -name "3rdparty" | xargs shellcheck -s bash -e SC2044,SC2002,SC1091,SC2181
36 | mkdir $out
37 | '';
38 |
39 | cmake-format =
40 | runCommand "cmake-format"
41 | {
42 | buildInputs = [cmake-format];
43 | } ''
44 | find ${../.} -name '*.cmake' -name '*.CMakeLists.txt' ! -name "3rdparty" | xargs cmake-format --check
45 | mkdir $out
46 | '';
47 |
48 | prettier =
49 | runCommand "prettier"
50 | {
51 | buildInputs = [nodePackages.prettier];
52 | } ''
53 | for e in ts js md yaml yml json; do
54 | find ${../.} -name "*.$e" ! -name "3rdparty" | xargs prettier --check
55 | done
56 | mkdir $out
57 | '';
58 |
59 | black =
60 | runCommand "black"
61 | {
62 | buildInputs = [python3Packages.black];
63 | } ''
64 | find ${../.} -name '*.py' ! -name "3rdparty" | xargs black --check
65 | mkdir $out
66 | '';
67 |
68 | pylint =
69 | runCommand "pylint"
70 | {
71 | buildInputs = [python3Packages.pylint] ++ pythonDeps;
72 | } ''
73 | find ${../.} -name '*.py' ! -name "3rdparty" | xargs pylint --ignored-modules "*_pb2"
74 | mkdir $out
75 | '';
76 |
77 | mypy =
78 | runCommand "mypy"
79 | {
80 | buildInputs = [python3Packages.mypy] ++ pythonDeps;
81 | } ''
82 | find ${../.} -name '*.py' ! -name "3rdparty" | xargs mypy
83 | mkdir $out
84 | '';
85 |
86 | cpplint =
87 | runCommand "cpplint"
88 | {
89 | buildInputs = [cpplint];
90 | } ''
91 | cpplint --filter=-whitespace/braces,-whitespace/indent,-whitespace/comments,-whitespace/newline,-build/include_order,-build/include_subdir,-runtime/references,-runtime/indentation_namespace ${../.}/src/**/*.cpp ${../.}/src/**/*.h
92 | mkdir $out
93 | '';
94 |
95 | nixfmt =
96 | runCommand "nixfmt"
97 | {
98 | buildInputs = [alejandra];
99 | } ''
100 | alejandra --check ${../.}/**/*.nix
101 | mkdir $out
102 | '';
103 |
104 | deadnix =
105 | runCommand "deadnix"
106 | {
107 | buildInputs = [deadnix];
108 | } ''
109 | deadnix --fail ${../.}
110 | mkdir $out
111 | '';
112 |
113 | statix =
114 | runCommand "statix"
115 | {
116 | buildInputs = [statix];
117 | } ''
118 | statix check ${../.}
119 | mkdir $out
120 | '';
121 |
122 | shfmt =
123 | runCommand "shfmt" {}
124 | ''
125 | find ${./.} -name '*.sh' ! -name "3rdparty" | xargs ${shfmt}/bin/shfmt --diff --simplify --case-indent --indent 2
126 | mkdir $out
127 | '';
128 | };
129 | fixes = {
130 | prettier =
131 | writeShellScriptBin "prettier"
132 | ''
133 | git ls-files -- . ':!:3rdparty/' | grep -e '\.ts$' -e '\.js$' -e '\.md$' -e '\.yaml$' -e '\.yml$' -e '\.json$' | xargs ${nodePackages.prettier}/bin/prettier --write
134 | '';
135 |
136 | black =
137 | writeShellScriptBin "black"
138 | ''
139 | git ls-files -- . ':!:3rdparty/' | grep -e '\.py$' | xargs ${python3Packages.black}/bin/black
140 | '';
141 |
142 | nixfmt =
143 | writeShellScriptBin "nixfmt"
144 | ''
145 | git ls-files -- . ':!:3rdparty/' | grep -e '\.nix$' | xargs ${alejandra}/bin/alejandra
146 | '';
147 |
148 | shfmt =
149 | writeShellScriptBin "shfmt"
150 | ''
151 | git ls-files -- . ':!:3rdparty/' | grep -e '\.sh$'| xargs ${shfmt}/bin/shfmt --write --simplify --case-indent --indent 2
152 | '';
153 |
154 | cmake-format =
155 | writeShellScriptBin "cmake-format"
156 | ''
157 | git ls-files -- . ':!:3rdparty/' | grep -e '\.cmake$' -e '^CMakeLists.txt' | xargs ${cmake-format}/bin/cmake-format --in-place
158 | '';
159 |
160 | statix =
161 | writeShellScriptBin "statix"
162 | ''
163 | ${statix}/bin/statix fix .
164 | '';
165 | };
166 | }
167 |
--------------------------------------------------------------------------------
/nix/default.nix:
--------------------------------------------------------------------------------
1 | {
2 | pkgs,
3 | packageOverrides ? (_self: _super: {}),
4 | }:
5 | pkgs.lib.makeScope pkgs.newScope (
6 | self: let
7 | lskvlib =
8 | pkgs.callPackage ./lib.nix {};
9 |
10 | # A python3 derivation that is extended with some CC related
11 | # packages.
12 | python3 = pkgs.python3.override {
13 | packageOverrides = pself: _psuper: {
14 | # Some generic python packages that are missing from
15 | # nixpkgs.
16 | columnar = pself.callPackage ./python/columnar.nix {};
17 | string-color = pself.callPackage ./python/string-color.nix {};
18 | adtk = pself.callPackage ./python/adtk.nix {};
19 | pycose = pself.callPackage ./python/pycose.nix {};
20 | cimetrics = pself.callPackage ./python/cimetrics.nix {};
21 | better-exceptions = pself.callPackage ./python/better-exceptions.nix {};
22 |
23 | types-paramiko = pself.callPackage ./python/types-paramiko.nix {};
24 |
25 | python-ccf = pself.callPackage ./python/python-ccf.nix {inherit ccf;};
26 | python-ccf-infra = pself.callPackage ./python/python-ccf-infra.nix {};
27 | };
28 | };
29 |
30 | ccf = self.callPackage ./ccf.nix {
31 | stdenv = pkgs.llvmPackages_16.libcxxStdenv;
32 | };
33 | ccf-sandbox = self.callPackage ./ccf-sandbox.nix {inherit ccf;};
34 | lskv = self.callPackage ./lskv.nix {
35 | inherit ccf;
36 | stdenv = pkgs.llvmPackages_16.libcxxStdenv;
37 | };
38 | lskv-sandbox = self.callPackage ./lskv-sandbox.nix {inherit ccf-sandbox lskv;};
39 | packages = lskvlib.forAllPlatforms {
40 | inherit ccf ccf-sandbox lskv lskv-sandbox;
41 | };
42 | ci-checks-pkgs = pkgs.callPackage ./ci-checks.nix {inherit (python3.pkgs) python-ccf types-paramiko;};
43 | ci-checks = lskvlib.ciChecks ci-checks-pkgs.checks;
44 | ci-fixes = lskvlib.ciFixes ci-checks-pkgs.fixes;
45 | in
46 | rec {
47 | inherit lskvlib ci-checks ci-fixes python3;
48 | inherit (python3.pkgs) python-ccf;
49 |
50 | ci-check-all = lskvlib.ciChecksAll ci-checks-pkgs.checks;
51 | ci-fix-all = lskvlib.ciFixesAll ci-checks-pkgs.fixes;
52 |
53 | az-dcap = self.callPackage ./az-dcap.nix {};
54 | sgx-dcap = self.callPackage ./sgx-dcap.nix {};
55 |
56 | openenclave-version = "0.19.3";
57 | openenclave-src = pkgs.fetchFromGitHub {
58 | owner = "openenclave";
59 | repo = "openenclave";
60 | rev = "v${openenclave-version}";
61 | hash = "sha256-RN7Mq6RO09CZOEoi/nYpPfa7TT1I5FYKqET8wRXnIxU=";
62 | fetchSubmodules = true;
63 | };
64 | lvi-mitigation = self.callPackage ./lvi-mitigation.nix {};
65 | openenclave = self.callPackage ./openenclave.nix {
66 | stdenv = pkgs.llvmPackages_11.libcxxStdenv;
67 | openssl = pkgs.openssl_1_1;
68 | };
69 |
70 | k6 = self.callPackage ./k6.nix {};
71 |
72 | mkShell = args:
73 | (pkgs.mkShell.override {
74 | stdenv = pkgs.llvmPackages_16.libcxxStdenv;
75 | }) ({
76 | NIX_CFLAGS_COMPILE = "-Wno-unused-command-line-argument";
77 | NIX_NO_SELF_RPATH = "1";
78 | }
79 | // args);
80 | }
81 | // ci-checks
82 | // ci-fixes
83 | // packages
84 | )
85 |
--------------------------------------------------------------------------------
/nix/k6.nix:
--------------------------------------------------------------------------------
1 | {
2 | buildGoModule,
3 | fetchFromGitHub,
4 | }:
5 | buildGoModule {
6 | name = "k6";
7 | version = "head";
8 | src = fetchFromGitHub {
9 | owner = "grafana";
10 | repo = "k6";
11 | rev = "2fe2dd32b3827eeeeb3959aff63a6b402aab0a5a";
12 | sha256 = "sha256-Y5s4w2yKwGu7nfegQUk14VbQiiU5Iv/GAme9LKhL3i0=";
13 | };
14 |
15 | patches = [
16 | ../patches/k6-micro.diff
17 | ];
18 |
19 | vendorSha256 = null;
20 |
21 | subPackages = ["./"];
22 | }
23 |
--------------------------------------------------------------------------------
/nix/lib.nix:
--------------------------------------------------------------------------------
1 | {
2 | lib,
3 | symlinkJoin,
4 | writeShellScriptBin,
5 | }: rec {
6 | # the list of supported platforms we can build for
7 | platforms = ["virtual" "sgx" "snp"];
8 |
9 | # create a set of per-platform derivations with the name "$pkgname-$platform"
10 | forPlatform = platform: lib.attrsets.mapAttrs' (name: value: lib.attrsets.nameValuePair "${name}-${platform}" (value.override {inherit platform;}));
11 |
12 | # generate a set of derivations for each platform
13 | forPlatforms = platforms: pkgs:
14 | lib.lists.foldl' lib.trivial.mergeAttrs {} (
15 | map (platform: forPlatform platform pkgs) platforms
16 | );
17 |
18 | # generate a set of derivations for all supported platforms
19 | forAllPlatforms = forPlatforms platforms;
20 |
21 | ciChecks =
22 | lib.attrsets.mapAttrs' (name: lib.attrsets.nameValuePair "ci-check-${name}");
23 |
24 | ciFixes =
25 | lib.attrsets.mapAttrs' (name: lib.attrsets.nameValuePair "ci-fix-${name}");
26 |
27 | ciChecksAll = pkgs: let
28 | ci-checks = ciChecks pkgs;
29 | in
30 | symlinkJoin {
31 | name = "ci-check-all";
32 | paths = lib.attrsets.attrValues ci-checks;
33 | };
34 |
35 | ciFixesAll = pkgs: let
36 | ci-fixes = ciFixes pkgs;
37 | ci-fix-all-pkgs = symlinkJoin {
38 | name = "ci-fix-all";
39 | paths = lib.attrsets.attrValues ci-fixes;
40 | };
41 | in
42 | writeShellScriptBin "ci-fix-all" ''
43 | for bin in ${ci-fix-all-pkgs}/bin/*; do
44 | echo "Running $bin"
45 | $bin
46 | done
47 | '';
48 | }
49 |
--------------------------------------------------------------------------------
/nix/lskv-sandbox.nix:
--------------------------------------------------------------------------------
1 | {
2 | writeShellScriptBin,
3 | ccf-sandbox,
4 | lskv,
5 | platform ? "virtual",
6 | }: let
7 | enclave_type =
8 | if platform == "virtual"
9 | then "virtual"
10 | else "release";
11 | l = lskv.override {inherit platform;};
12 | sandbox = ccf-sandbox.override {inherit platform;};
13 | in
14 | writeShellScriptBin "lskv-sandbox.sh" ''
15 | ${sandbox}/bin/sandbox.sh --package ${l}/lib/liblskv --enclave-type ${enclave_type} --enclave-platform ${platform} $@
16 | ''
17 |
--------------------------------------------------------------------------------
/nix/lskv.nix:
--------------------------------------------------------------------------------
1 | {
2 | stdenv,
3 | cmake,
4 | # sgx-dcap,
5 | openenclave,
6 | ninja,
7 | protobuf,
8 | ccf,
9 | nix-filter,
10 | platform ? "virtual",
11 | }:
12 | stdenv.mkDerivation rec {
13 | pname = "lskv-${platform}";
14 | version = "0.1.0";
15 | src = nix-filter {
16 | root = ./..;
17 | include = [
18 | "CMakeLists.txt"
19 | "cmake"
20 | "oe_sign.conf"
21 | "src"
22 | "proto"
23 | ];
24 | };
25 |
26 | nativeBuildInputs = [
27 | cmake
28 | ninja
29 | protobuf
30 | # sgx-dcap
31 | (ccf.override {inherit platform;})
32 | openenclave
33 | ];
34 |
35 | cmakeFlags = [
36 | "-DCOMPILE_TARGET=${platform}"
37 | (
38 | if platform == "sgx"
39 | then "-DLVI_MITIGATIONS=OFF"
40 | else null
41 | )
42 | ];
43 |
44 | LSKV_VERSION = version;
45 |
46 | NIX_CFLAGS_COMPILE = "-Wno-unused-command-line-argument";
47 | NIX_NO_SELF_RPATH = "1";
48 | }
49 |
--------------------------------------------------------------------------------
/nix/lvi-mitigation.nix:
--------------------------------------------------------------------------------
1 | {
2 | fetchzip,
3 | stdenv,
4 | openenclave-version,
5 | openenclave-src,
6 | clang_10,
7 | gcc,
8 | }: let
9 | intel-tarball = fetchzip {
10 | url = "https://download.01.org/intel-sgx/sgx-linux/2.13/as.ld.objdump.gold.r3.tar.gz";
11 | sha256 = "sha256-gD0LOLebDHZHrV7MW/ApqzdPxazidmDUDqBEnm1JmdQ=";
12 | };
13 | in
14 | stdenv.mkDerivation {
15 | pname = "lvi-mitigation";
16 | version = openenclave-version;
17 | src = openenclave-src;
18 | patches = [patches/openenclave.diff];
19 |
20 | buildInputs = [clang_10 gcc];
21 |
22 | preConfigure = ''
23 | patchShebangs scripts/lvi-mitigation/*
24 |
25 | ln -s ${intel-tarball} intel-tarball
26 |
27 | ./scripts/lvi-mitigation/install_lvi_mitigation_bindir
28 | '';
29 |
30 | dontBuild = true;
31 | dontInstall = true;
32 | }
33 |
--------------------------------------------------------------------------------
/nix/openenclave.nix:
--------------------------------------------------------------------------------
1 | {
2 | stdenv,
3 | fetchurl,
4 | fetchzip,
5 | openenclave-version,
6 | openenclave-src,
7 | cmake,
8 | ninja,
9 | perl,
10 | openssl,
11 | }: let
12 | sgx-h = fetchurl {
13 | url = "https://raw.githubusercontent.com/torvalds/linux/v5.13/arch/x86/include/uapi/asm/sgx.h";
14 | sha256 = "4764b8ce858579d99f1b66bb1e5f04ba149a38aea15649fff19f65f8d9113fd0";
15 | };
16 | compiler-rt = fetchzip {
17 | url = "https://github.com/llvm/llvm-project/releases/download/llvmorg-11.1.0/compiler-rt-11.1.0.src.tar.xz";
18 | hash = "sha256-jycaXF3wGF85B2cwe+1q5fVPhR+/JnaZ+4A8y/qyBag=";
19 | };
20 | libcxx = fetchzip {
21 | url = "https://github.com/llvm/llvm-project/releases/download/llvmorg-11.1.0/libcxx-11.1.0.src.tar.xz";
22 | sha256 = "sha256-UoRPugdPj0FtKp79V1nljehWyhChxgUo3mb/Wyq/RIA=";
23 | };
24 | symcrypt = fetchzip {
25 | url = "https://github.com/microsoft/SymCrypt/releases/download/v103.0.1/symcrypt-linux-oe_full-AMD64-103.0.1-69dbff3.tar.gz";
26 | sha256 = "sha256-VCJlAOnbY2kYlnNv6SxumD4BinntAvpBFkUs9hBxCY4=";
27 | stripRoot = false;
28 | };
29 | in
30 | stdenv.mkDerivation rec {
31 | pname = "openenclave";
32 | version = openenclave-version;
33 | src = openenclave-src;
34 | patches = [
35 | # patches/openenclave.diff
36 | patches/openenclave-pkgconfig.diff
37 | ];
38 | cmakeFlags = [
39 | "-DCMAKE_BUILD_TYPE=RelWithDebInfo"
40 | "-DFETCHCONTENT_SOURCE_DIR_COMPILER-RT-SOURCES=${compiler-rt}"
41 | "-DFETCHCONTENT_SOURCE_DIR_LIBCXX_SOURCES=${libcxx}"
42 | "-DFETCHCONTENT_SOURCE_DIR_SYMCRYPT_PACKAGE=${symcrypt}"
43 | "-DCLANG_INTRINSIC_HEADERS_DIR=${toString stdenv.cc.cc.lib}/lib/clang/${stdenv.cc.version}/include"
44 | "-DENABLE_REFMAN=OFF"
45 | "-DBUILD_TESTS=OFF"
46 |
47 | # oeutil includes an enclave (oeutil_enc), which is signed with a random key.
48 | # This breaks reproducible builds.
49 | "-DBUILD_OEUTIL_TOOL=OFF"
50 |
51 | # "-DCMAKE_BUILD_WITH_INSTALL_RPATH:BOOL=ON"
52 | # "-DCMAKE_INSTALL_RPATH_USE_LINK_PATH:BOOL=ON"
53 | ];
54 |
55 | preConfigure = ''
56 | mkdir -p build/host
57 | mkdir -p build/3rdparty/symcrypt_engine
58 | cp ${sgx-h} build/host/sgx.h
59 | ln -s ${compiler-rt} 3rdparty/compiler-rt/compiler-rt
60 | ln -s ${libcxx} 3rdparty/libcxx/libcxx
61 | ln -s ${symcrypt} build/3rdparty/symcrypt_engine/SymCrypt
62 |
63 | patchShebangs tools/oeutil/gen_pubkey_header.sh
64 | substituteInPlace tools/oeutil/gen_pubkey_header.sh --replace '/var/tmp/oeutil_lock' '.oeutil_lock'
65 | patchShebangs 3rdparty/openssl/append-unsupported
66 | patchShebangs 3rdparty/musl/append-deprecations
67 | '';
68 |
69 | postFixup = ''
70 | substituteInPlace $out/lib/${pname}/cmake/${pname}-*.cmake \
71 | --replace 'set(_IMPORT_PREFIX' '#set(_IMPORT_PREFIX'
72 | '';
73 |
74 | nativeBuildInputs = [cmake ninja perl];
75 | propagatedBuildInputs = [openssl];
76 |
77 | # Not sure if we want to keep this
78 | dontAutoPatchelf = true;
79 |
80 | NIX_CFLAGS_COMPILE = "-Wno-unused-command-line-argument";
81 | NIX_NO_SELF_RPATH = "1";
82 | }
83 |
--------------------------------------------------------------------------------
/nix/patches/ccf-ignore-submitter.diff:
--------------------------------------------------------------------------------
1 | diff --git a/CMakeLists.txt b/CMakeLists.txt
2 | index 6b2a143ae..4a0cdcec9 100644
3 | --- a/CMakeLists.txt
4 | +++ b/CMakeLists.txt
5 | @@ -1052,6 +1052,3 @@ install(FILES ${CMAKE_BINARY_DIR}/cmake/${CCF_PROJECT}-config.cmake
6 | ${CMAKE_BINARY_DIR}/cmake/${CCF_PROJECT}-config-version.cmake
7 | DESTINATION ${CMAKE_INSTALL_PREFIX}/cmake
8 | )
9 | -
10 | -# Perf tool executable
11 | -include(${CCF_DIR}/tests/perf-system/submitter/CMakeLists.txt)
12 |
--------------------------------------------------------------------------------
/nix/patches/ccf-no-python-pb2.diff:
--------------------------------------------------------------------------------
1 | diff --git a/src/apps/external_executor/protobuf/CMakeLists.txt b/src/apps/external_executor/protobuf/CMakeLists.txt
2 | index d35fe3537..3418d7be5 100644
3 | --- a/src/apps/external_executor/protobuf/CMakeLists.txt
4 | +++ b/src/apps/external_executor/protobuf/CMakeLists.txt
5 | @@ -38,24 +38,6 @@ foreach(proto_file ${PROTO_FILES})
6 | ${CMAKE_CURRENT_BINARY_DIR}/${PROTO_NAME_WE}.pb.cc
7 | )
8 |
9 | - add_custom_command(
10 | - OUTPUT
11 | - ${CMAKE_SOURCE_DIR}/tests/external_executor/${PROTO_NAME_WE}_pb2.py
12 | - ${CMAKE_SOURCE_DIR}/tests/external_executor/${PROTO_NAME_WE}_pb2_grpc.py
13 | - COMMAND ${CMAKE_CURRENT_SOURCE_DIR}/build.sh ${proto_file}
14 | - ${CMAKE_SOURCE_DIR}/tests/external_executor/
15 | - COMMENT "Generate Python source file from protobuf file ${PROTO_NAME}"
16 | - DEPENDS ${proto_file}
17 | - JOB_POOL one_job
18 | - )
19 | - add_custom_target(
20 | - ${PROTO_NAME_WE}_proto_python ALL
21 | - DEPENDS
22 | - ${CMAKE_SOURCE_DIR}/tests/external_executor/${PROTO_NAME_WE}_pb2.py
23 | - ${CMAKE_SOURCE_DIR}/tests/external_executor/${PROTO_NAME_WE}_pb2_grpc.py
24 | - JOB_POOL one_job
25 | - )
26 | -
27 | # For now, copy .proto files across to external executor test
28 | configure_file(
29 | ${proto_file}
30 |
--------------------------------------------------------------------------------
/nix/patches/ccf-no-python.diff:
--------------------------------------------------------------------------------
1 | diff --git a/CMakeLists.txt b/CMakeLists.txt
2 | index f44c51e5..a4333529 100644
3 | --- a/CMakeLists.txt
4 | +++ b/CMakeLists.txt
5 | @@ -208,24 +208,6 @@ install(
6 | PATTERN "*.h"
7 | )
8 |
9 | -# Install CCF Python infrastructure
10 | -install(
11 | - DIRECTORY tests/infra/
12 | - DESTINATION bin/infra
13 | - FILES_MATCHING
14 | - PATTERN "*.py"
15 | - PATTERN "*/__pycache__*" EXCLUDE
16 | -)
17 | -
18 | -install(PROGRAMS tests/sandbox/sandbox.sh DESTINATION bin)
19 | -install(PROGRAMS tests/docker_wrap.sh DESTINATION bin)
20 | -install(FILES samples/constitutions/default/actions.js DESTINATION bin)
21 | -install(FILES samples/constitutions/default/validate.js DESTINATION bin)
22 | -install(FILES samples/constitutions/sandbox/resolve.js DESTINATION bin)
23 | -install(FILES samples/constitutions/default/apply.js DESTINATION bin)
24 | -install(FILES tests/start_network.py DESTINATION bin)
25 | -install(FILES tests/requirements.txt DESTINATION bin)
26 | -
27 | # Generate an ephemeral signing key
28 | add_custom_command(
29 | OUTPUT ${CMAKE_CURRENT_BINARY_DIR}/signing_key.pem
30 |
--------------------------------------------------------------------------------
/nix/patches/ccf-protoc-binary.diff:
--------------------------------------------------------------------------------
1 | diff --git a/src/apps/external_executor/protobuf/CMakeLists.txt b/src/apps/external_executor/protobuf/CMakeLists.txt
2 | index 9be12ed3..e695e810 100644
3 | --- a/src/apps/external_executor/protobuf/CMakeLists.txt
4 | +++ b/src/apps/external_executor/protobuf/CMakeLists.txt
5 | @@ -2,7 +2,7 @@
6 | # Licensed under the Apache 2.0 License.
7 |
8 | # protoc should be installed under /opt/protoc
9 | -set(PROTOC_BINARY_PATH "/opt/protoc/bin/protoc")
10 | +find_program(PROTOC_BINARY_PATH "protoc")
11 |
12 | if(EXISTS ${PROTOC_BINARY_PATH})
13 | message(STATUS "Found protobuf compiler: ${PROTOC_BINARY_PATH}")
14 |
--------------------------------------------------------------------------------
/nix/patches/openenclave.diff:
--------------------------------------------------------------------------------
1 | diff --git a/3rdparty/CMakeLists.txt b/3rdparty/CMakeLists.txt
2 | index 1031b32ee..aa2b59a35 100644
3 | --- a/3rdparty/CMakeLists.txt
4 | +++ b/3rdparty/CMakeLists.txt
5 | @@ -107,7 +107,7 @@ if (OE_TRUSTZONE)
6 | teec
7 | INTERFACE
8 | $
9 | - $/${CMAKE_INSTALL_LIBDIR}/openenclave/optee/libteec/libteec.a>
10 | + $
11 | )
12 |
13 | install(TARGETS teec EXPORT openenclave-targets)
14 | diff --git a/3rdparty/musl/CMakeLists.txt b/3rdparty/musl/CMakeLists.txt
15 | index 548542535..74ceea39a 100644
16 | --- a/3rdparty/musl/CMakeLists.txt
17 | +++ b/3rdparty/musl/CMakeLists.txt
18 | @@ -117,9 +117,9 @@ target_include_directories(
19 | oelibc_includes
20 | INTERFACE
21 | $:${LIBCXX_INCLUDES}>>
22 | - $:$/${CMAKE_INSTALL_INCLUDEDIR}/openenclave/3rdparty/libcxx>>
23 | + $:${CMAKE_INSTALL_INCLUDEDIR}/openenclave/3rdparty/libcxx>>
24 | $
25 | - $/${CMAKE_INSTALL_INCLUDEDIR}/openenclave/3rdparty/libc>
26 | + $
27 | )
28 |
29 | if (CMAKE_C_COMPILER_ID MATCHES GNU AND CMAKE_C_COMPILER_VERSION
30 | diff --git a/cmake/apply_lvi_mitigation.cmake b/cmake/apply_lvi_mitigation.cmake
31 | index a2ea40dab..057b1e3b7 100644
32 | --- a/cmake/apply_lvi_mitigation.cmake
33 | +++ b/cmake/apply_lvi_mitigation.cmake
34 | @@ -4,12 +4,9 @@
35 | # Helper to obtain the version of glibc.
36 | macro (get_glibc_version)
37 | execute_process(
38 | - COMMAND ${CMAKE_C_COMPILER} -print-file-name=libc.so.6
39 | - OUTPUT_VARIABLE GLIBC_PATH
40 | + COMMAND bash "-c" "ldd --version | awk '/ldd/{print $NF}'"
41 | + OUTPUT_VARIABLE GLIBC_VERSION
42 | OUTPUT_STRIP_TRAILING_WHITESPACE)
43 | - get_filename_component(GLIBC_PATH ${GLIBC_PATH} REALPATH)
44 | - get_filename_component(GLIBC_VERSION ${GLIBC_PATH} NAME)
45 | - string(REGEX REPLACE "libc-(.*).so" \\1 GLIBC_VERSION ${GLIBC_VERSION})
46 | if (NOT GLIBC_VERSION MATCHES "^[0-9]+\.[0-9]+$")
47 | message(FATAL_ERROR "Glibc version is unknown: ${GLIBC_VERSION}")
48 | endif ()
49 | diff --git a/include/CMakeLists.txt b/include/CMakeLists.txt
50 | index 4bb3468ba..5dbfbb1d5 100644
51 | --- a/include/CMakeLists.txt
52 | +++ b/include/CMakeLists.txt
53 | @@ -8,7 +8,7 @@ target_include_directories(
54 | oe_includes
55 | INTERFACE $
56 | $
57 | - $/${CMAKE_INSTALL_INCLUDEDIR}>)
58 | + $)
59 | install(
60 | DIRECTORY openenclave/bits
61 | DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/openenclave/
62 | diff --git a/scripts/lvi-mitigation/install_lvi_mitigation_bindir b/scripts/lvi-mitigation/install_lvi_mitigation_bindir
63 | index 074456027..9fc16db87 100755
64 | --- a/scripts/lvi-mitigation/install_lvi_mitigation_bindir
65 | +++ b/scripts/lvi-mitigation/install_lvi_mitigation_bindir
66 | @@ -15,43 +15,27 @@ trap 'echo "\"${last_command}\" command filed with exit code $?."' ERR
67 | script=$(readlink -f "$0")
68 | script_path=$(dirname "$script")
69 | curr_path=$(pwd)
70 | -bin_name="lvi_mitigation_bin"
71 | -read -rp "Do you want to install in current directory? [yes/no]: " ans
72 | -if [[ "$ans" == "yes" ]]; then
73 | - install_path="$curr_path"
74 | -else
75 | - read -rp "Please input the directory which you want to install in: " install_path
76 | -fi
77 | -
78 | -if [[ "$install_path" == "" ]]; then
79 | - install_path="$curr_path"
80 | -fi
81 | -
82 | -bin_path="$install_path"/"$bin_name"
83 | -bin_path="${bin_path/#\~/$HOME}"
84 | +bin_path="$out/bin"
85 |
86 | mkdir -p "$bin_path"
87 |
88 | cp "$script_path"/invoke_compiler "$bin_path"/invoke_compiler
89 |
90 | -clang_versions=("" "-8" "-9" "-10")
91 | -for version in "${clang_versions[@]}"; do
92 | - clang="clang$version"
93 | - if ! [ -x "$(command -v "$clang")" ]; then
94 | - continue
95 | - fi
96 | - clangcpp="clang++$version"
97 | - clang_path=$(command -v "$clang")
98 | - clangcpp_path=$(command -v "$clangcpp")
99 | - if [ "$clang_path" ] && [ "$clangcpp_path" ]; then
100 | - ln -sf "$clang_path" "$bin_path"/"$clang"_symlink
101 | - ln -sf "$clangcpp_path" "$bin_path"/"$clangcpp"_symlink
102 | - rm -f "$bin_path"/"$clang"
103 | - "$script_path"/generate_wrapper --name="$clang" --path="$bin_path"
104 | - rm -f "$bin_path"/"$clangcpp"
105 | - "$script_path"/generate_wrapper --name="$clangcpp" --path="$bin_path"
106 | - fi
107 | -done
108 | +clang="clang"
109 | +if ! [ -x "$(command -v "$clang")" ]; then
110 | + continue
111 | +fi
112 | +clangcpp="clang++"
113 | +clang_path=$(command -v "$clang")
114 | +clangcpp_path=$(command -v "$clangcpp")
115 | +if [ "$clang_path" ] && [ "$clangcpp_path" ]; then
116 | + ln -sf "$clang_path" "$bin_path"/"$clang"_symlink
117 | + ln -sf "$clangcpp_path" "$bin_path"/"$clangcpp"_symlink
118 | + rm -f "$bin_path"/"$clang"
119 | + "$script_path"/generate_wrapper --name="$clang" --path="$bin_path"
120 | + rm -f "$bin_path"/"$clangcpp"
121 | + "$script_path"/generate_wrapper --name="$clangcpp" --path="$bin_path"
122 | +fi
123 |
124 | gcc_path=$(command -v gcc)
125 | gcpp_path=$(command -v g++)
126 | @@ -64,22 +48,16 @@ if [ "$gcc_path" ] && [ "$gcpp_path" ]; then
127 | "$script_path"/generate_wrapper --name=g++ --path="$bin_path"
128 | fi
129 |
130 | -# Obtain `as` and `ld` from Intel site.
131 | -intel_site="https://download.01.org/intel-sgx/sgx-linux/"
132 | -intel_tool_version="2.13"
133 | -intel_tarball="as.ld.objdump.gold.r3.tar.gz"
134 | -wget "$intel_site"/"$intel_tool_version"/"$intel_tarball" -O /tmp/"$intel_tarball"
135 | -tar -xf /tmp/"$intel_tarball" -C /tmp
136 | -
137 | -intel_extract_path=external/toolset/ubuntu18.04
138 | +intel_dir=intel-tarball
139 | +intel_extract_path=toolset/ubuntu18.04
140 | rm -f "$bin_path"/as
141 | -cp /tmp/"$intel_extract_path"/as "$bin_path"/as
142 | +cp $intel_dir/"$intel_extract_path"/as "$bin_path"/as
143 | # The `ld` depends on glibc version 2.27.
144 | glibc_version=$(ldd --version | awk '/ldd/{print $NF}')
145 | # shellcheck disable=SC2072
146 | if [[ "$glibc_version" > "2.26" ]]; then
147 | rm -f "$bin_path"/ld
148 | - cp /tmp/"$intel_extract_path"/ld "$bin_path"/ld
149 | + cp $intel_dir/"$intel_extract_path"/ld "$bin_path"/ld
150 | fi
151 |
152 | echo "Installed: $bin_path"
153 | diff --git a/scripts/lvi-mitigation/invoke_compiler b/scripts/lvi-mitigation/invoke_compiler
154 | index 5a2ee6ec3..83bdce946 100755
155 | --- a/scripts/lvi-mitigation/invoke_compiler
156 | +++ b/scripts/lvi-mitigation/invoke_compiler
157 | @@ -24,7 +24,7 @@ function call_compiler {
158 | done
159 |
160 | if [ $lvi_mitigation == 0 ]; then
161 | - /usr/bin/"$compiler" $@
162 | + "$compiler" $@
163 | else
164 | # Ensures that the compiler invokes customized
165 | # `as` and `ld` instead of default ones.
166 |
--------------------------------------------------------------------------------
/nix/python/adtk.nix:
--------------------------------------------------------------------------------
1 | {
2 | buildPythonPackage,
3 | fetchPypi,
4 | matplotlib,
5 | scikit-learn,
6 | pandas,
7 | statsmodels,
8 | tabulate,
9 | }:
10 | buildPythonPackage rec {
11 | pname = "adtk";
12 | version = "0.6.2";
13 | src = fetchPypi {
14 | inherit pname version;
15 | hash = "sha256-bPr7RLWtJqL/1kCut52E/FOD0tQsl6R0IGlbrb7ie+g=";
16 | };
17 | propagatedBuildInputs = [
18 | matplotlib
19 | scikit-learn
20 | pandas
21 | statsmodels
22 | tabulate
23 | ];
24 | }
25 |
--------------------------------------------------------------------------------
/nix/python/better-exceptions.nix:
--------------------------------------------------------------------------------
1 | {
2 | buildPythonPackage,
3 | fetchPypi,
4 | }:
5 | buildPythonPackage rec {
6 | pname = "better-exceptions";
7 | version = "0.2.1";
8 | src = fetchPypi {
9 | inherit version;
10 | pname = "better_exceptions";
11 | hash = "sha256-CnPv75a0j4Z+qYAiesOwDTapJ1Tm0xatLuRy8TYBRYA=";
12 | };
13 | doCheck = false;
14 | }
15 |
--------------------------------------------------------------------------------
/nix/python/ccf_infra_setup.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) Microsoft Corporation. All rights reserved.
2 | # Licensed under the Apache 2.0 License.
3 |
4 | """
5 | Setup infra for ccf.
6 | """
7 |
8 |
9 | from setuptools import setup # type: ignore
10 |
11 | PACKAGE_NAME = "infra"
12 |
13 | with open("requirements.txt", encoding="utf-8") as f:
14 | requirements = f.read().splitlines()
15 |
16 | setup(
17 | name=PACKAGE_NAME,
18 | version="2.0.0",
19 | description="Set of tools and utilities for the Confidential Consortium Framework (CCF)",
20 | url="https://github.com/microsoft/CCF/tree/main/python",
21 | license="Apache License 2.0",
22 | author="CCF Team",
23 | classifiers=[
24 | "Development Status :: 3 - Alpha",
25 | "Intended Audience :: Developers",
26 | "Programming Language :: Python :: 3",
27 | ],
28 | packages=[PACKAGE_NAME],
29 | python_requires=">=3.8",
30 | install_requires=requirements,
31 | scripts=["start_network.py"],
32 | )
33 |
--------------------------------------------------------------------------------
/nix/python/cimetrics.nix:
--------------------------------------------------------------------------------
1 | {
2 | buildPythonPackage,
3 | fetchFromGitHub,
4 | matplotlib,
5 | GitPython,
6 | requests,
7 | pythonRelaxDepsHook,
8 | adtk,
9 | pyparsing,
10 | pyyaml,
11 | pymongo,
12 | azure-storage-blob,
13 | }:
14 | buildPythonPackage {
15 | pname = "cimetrics";
16 | version = "0.3.12";
17 | src = fetchFromGitHub {
18 | owner = "jumaffre";
19 | repo = "cimetrics";
20 | rev = "d6647e7f1018ff18e9e9c457851d50e555870e8e";
21 | hash = "sha256-h+/Got2InF9Vfv3hXguL8kq3gfR50jcv9u8YFsKMkKA=";
22 | };
23 |
24 | propagatedBuildInputs = [
25 | matplotlib
26 | GitPython
27 | requests
28 | adtk
29 | pyparsing
30 | pyyaml
31 | pymongo
32 | azure-storage-blob
33 | ];
34 |
35 | nativeBuildInputs = [pythonRelaxDepsHook];
36 | pythonRelaxDeps = ["pyparsing"];
37 |
38 | doCheck = false;
39 | }
40 |
--------------------------------------------------------------------------------
/nix/python/columnar.nix:
--------------------------------------------------------------------------------
1 | {
2 | buildPythonPackage,
3 | fetchPypi,
4 | wcwidth,
5 | toolz,
6 | }:
7 | buildPythonPackage rec {
8 | pname = "columnar";
9 | version = "1.4.1";
10 | src = fetchPypi {
11 | inherit version;
12 | pname = "Columnar";
13 | hash = "sha256-w8tXJzMzsv+c+q/IbwkwdBkzDJf6qI3P4j3wXm+7nHI=";
14 | };
15 | propagatedBuildInputs = [
16 | wcwidth
17 | toolz
18 | ];
19 | doCheck = false;
20 | }
21 |
--------------------------------------------------------------------------------
/nix/python/pycose.nix:
--------------------------------------------------------------------------------
1 | {
2 | buildPythonPackage,
3 | fetchFromGitHub,
4 | attrs,
5 | cryptography,
6 | certvalidator,
7 | cbor2,
8 | ecdsa,
9 | pytest,
10 | }:
11 | buildPythonPackage rec {
12 | pname = "pycose";
13 | version = "1.0.1";
14 | src = fetchFromGitHub {
15 | owner = "TimothyClaeys";
16 | repo = "pycose";
17 | rev = "v${version}";
18 | hash = "sha256-8d6HebWlSKgx7dmOnT7ZZ5mrMfg6mNWhz1hHPv75XF4=";
19 | };
20 | propagatedBuildInputs = [
21 | attrs
22 | cryptography
23 | certvalidator
24 | cbor2
25 | ecdsa
26 | pytest
27 | ];
28 | }
29 |
--------------------------------------------------------------------------------
/nix/python/pyright.nix:
--------------------------------------------------------------------------------
1 | {
2 | buildPythonPackage,
3 | fetchFromGitHub,
4 | }:
5 | buildPythonPackage rec {
6 | pname = "pyright";
7 | version = "1.1.267";
8 | src = fetchFromGitHub {
9 | owner = "microsoft";
10 | repo = "pyright";
11 | rev = version;
12 | hash = "sha256-VOdr/S/KbnR6X/6U8GH73yKH+l9CYyJ1e4a+C/Q9mxg=";
13 | };
14 | sourceRoot = "source/packages/pyright";
15 | }
16 |
--------------------------------------------------------------------------------
/nix/python/python-ccf-infra.nix:
--------------------------------------------------------------------------------
1 | {
2 | buildPythonPackage,
3 | GitPython,
4 | better-exceptions,
5 | cryptography,
6 | docker,
7 | docutils,
8 | httpx,
9 | jinja2,
10 | loguru,
11 | matplotlib,
12 | openapi-spec-validator,
13 | pandas,
14 | paramiko,
15 | psutil,
16 | pyasn1,
17 | pyjwt,
18 | pyopenssl,
19 | grpcio-tools,
20 | python-ccf,
21 | cimetrics,
22 | pycose,
23 | jwcrypto,
24 | cbor2,
25 | }:
26 | buildPythonPackage {
27 | inherit (python-ccf) version src;
28 | pname = "python-ccf-infra";
29 | propagatedBuildInputs =
30 | [
31 | python-ccf
32 | cryptography
33 | httpx
34 | psutil
35 | matplotlib
36 | loguru
37 | pandas
38 | pyasn1
39 | pyjwt
40 | paramiko
41 | jinja2
42 | docker
43 | GitPython
44 | openapi-spec-validator
45 | better-exceptions
46 | pyopenssl
47 | docutils
48 | grpcio-tools
49 | cimetrics
50 | pycose
51 | jwcrypto
52 | cbor2
53 | ]
54 | ++ httpx.optional-dependencies.http2;
55 |
56 | preConfigure = ''
57 | cd tests
58 | cp ${./ccf_infra_setup.py} setup.py
59 | sed -i '/python-iptables/d' requirements.txt
60 | sed -i '/py-spy/d' requirements.txt
61 | sed -i '/locust/d' requirements.txt
62 | sed -i 's/grpcio-tools == 1.44.0/grpcio-tools/' requirements.txt
63 |
64 | sed -i '1s|^|#!/usr/bin/env python3\n|' start_network.py
65 | chmod +x start_network.py
66 | '';
67 |
68 | doCheck = false;
69 | }
70 |
--------------------------------------------------------------------------------
/nix/python/python-ccf.nix:
--------------------------------------------------------------------------------
1 | {
2 | buildPythonPackage,
3 | string-color,
4 | loguru,
5 | cryptography,
6 | pycose,
7 | ccf,
8 | pythonRelaxDepsHook,
9 | }: let
10 | ccf-virtual = ccf.override {platform = "virtual";};
11 | in
12 | buildPythonPackage {
13 | inherit (ccf-virtual) version src;
14 | pname = "ccf";
15 |
16 | # ccf wants cryptography 37, but we only have 36.
17 | nativeBuildInputs = [pythonRelaxDepsHook];
18 | pythonRelaxDeps = ["cryptography"];
19 |
20 | preConfigure = ''
21 | cd python
22 | cat > version.py < 45000:
37 | violations += 1
38 | print(f"violation at line {i} diff={diff}us last={last} now={time}")
39 | last = time
40 | last_us = time_us
41 | print("total violations", violations)
42 |
43 |
44 | if __name__ == "__main__":
45 | main()
46 |
--------------------------------------------------------------------------------
/patches/k6-micro.diff:
--------------------------------------------------------------------------------
1 | # Copyright (c) Microsoft Corporation. All rights reserved.
2 | # Licensed under the MIT License.
3 | diff --git a/output/csv/output.go b/output/csv/output.go
4 | index 582ad7e5..c4e94ba2 100644
5 | --- a/output/csv/output.go
6 | +++ b/output/csv/output.go
7 | @@ -198,7 +198,7 @@ func SampleToRow(sample *metrics.Sample, resTags []string, ignoredTags []string,
8 | case TimeFormatRFC3339:
9 | row[1] = sample.Time.Format(time.RFC3339)
10 | case TimeFormatUnix:
11 | - row[1] = strconv.FormatInt(sample.Time.Unix(), 10)
12 | + row[1] = strconv.FormatInt(sample.Time.UnixMicro(), 10)
13 | }
14 |
15 | row[2] = fmt.Sprintf("%f", sample.Value)
16 |
--------------------------------------------------------------------------------
/proto/CMakeLists.txt:
--------------------------------------------------------------------------------
1 | # Copyright (c) Microsoft Corporation. All rights reserved. Licensed under the
2 | # MIT License.
3 |
4 | find_program(PROTOC_BINARY_PATH "protoc" HINTS /opt/protoc/bin)
5 |
6 | if(EXISTS ${PROTOC_BINARY_PATH})
7 | message(STATUS "Found protobuf compiler: ${PROTOC_BINARY_PATH}")
8 | else()
9 | message(FATAL_ERROR "Cannot find protobuf compiler: ${PROTOC_BINARY_PATH}")
10 | endif()
11 |
12 | set(PROTOBUF_INCLUDE_DIR ${CCF_DIR}/include/3rdparty/protobuf/src/)
13 |
14 | set(PROTO_FILES
15 | ${CMAKE_CURRENT_SOURCE_DIR}/etcd.proto
16 | ${CMAKE_CURRENT_SOURCE_DIR}/lskvserver.proto
17 | ${CMAKE_CURRENT_SOURCE_DIR}/status.proto)
18 |
19 | option(GENERATE_PYTHON "generate python protobuf and grpc bindings" OFF)
20 |
21 | foreach(proto_file ${PROTO_FILES})
22 | get_filename_component(PROTO_NAME ${proto_file} NAME)
23 | get_filename_component(PROTO_NAME_WE ${proto_file} NAME_WE)
24 | message(TRACE "Generating source files from proto file ${PROTO_NAME}")
25 |
26 | add_custom_command(
27 | OUTPUT ${CMAKE_CURRENT_BINARY_DIR}/${PROTO_NAME_WE}.pb.h
28 | ${CMAKE_CURRENT_BINARY_DIR}/${PROTO_NAME_WE}.pb.cc
29 | COMMAND ${PROTOC_BINARY_PATH} --proto_path=${CMAKE_CURRENT_SOURCE_DIR}
30 | --cpp_out=${CMAKE_CURRENT_BINARY_DIR} ${proto_file}
31 | COMMENT "Generate C++ source files from protobuf file ${PROTO_NAME}"
32 | DEPENDS ${proto_file})
33 |
34 | if(${PROTO_NAME_WE} STREQUAL "lskvserver")
35 | set(EXTRA_INCLUDES ${CMAKE_CURRENT_BINARY_DIR}/etcd.pb.h)
36 | endif()
37 |
38 | if(${GENERATE_PYTHON})
39 | # for tests
40 | add_custom_command(
41 | OUTPUT ${CMAKE_SOURCE_DIR}/tests/${PROTO_NAME_WE}_pb2.py
42 | COMMAND ${CMAKE_CURRENT_SOURCE_DIR}/build.sh ${proto_file}
43 | ${CMAKE_SOURCE_DIR}/tests/
44 | COMMENT "Generate Python source file from protobuf file ${PROTO_NAME}"
45 | DEPENDS ${proto_file})
46 | add_custom_target(${PROTO_NAME_WE}_proto_python_tests ALL
47 | DEPENDS ${CMAKE_SOURCE_DIR}/tests/${PROTO_NAME_WE}_pb2.py)
48 |
49 | # for benchmarks
50 | add_custom_command(
51 | OUTPUT ${CMAKE_SOURCE_DIR}/benchmark/${PROTO_NAME_WE}_pb2.py
52 | COMMAND ${CMAKE_CURRENT_SOURCE_DIR}/build.sh ${proto_file}
53 | ${CMAKE_SOURCE_DIR}/benchmark/
54 | COMMENT "Generate Python source file from protobuf file ${PROTO_NAME}"
55 | DEPENDS ${proto_file})
56 | add_custom_target(${PROTO_NAME_WE}_proto_python_benchmark ALL
57 | DEPENDS ${CMAKE_SOURCE_DIR}/benchmark/${PROTO_NAME_WE}_pb2.py)
58 | endif()
59 |
60 | if(${COMPILE_TARGET} STREQUAL "sgx")
61 | add_enclave_library(
62 | ${PROTO_NAME_WE}.enclave
63 | ${CMAKE_CURRENT_BINARY_DIR}/${PROTO_NAME_WE}.pb.cc
64 | ${CMAKE_CURRENT_BINARY_DIR}/${PROTO_NAME_WE}.pb.h ${EXTRA_INCLUDES})
65 | target_include_directories(
66 | ${PROTO_NAME_WE}.enclave PUBLIC ${PROTOBUF_INCLUDE_DIR}
67 | ${CMAKE_CURRENT_BINARY_DIR})
68 | elseif(${COMPILE_TARGET} STREQUAL "snp")
69 | add_host_library(
70 | ${PROTO_NAME_WE}.snp
71 | ${CMAKE_CURRENT_BINARY_DIR}/${PROTO_NAME_WE}.pb.cc
72 | ${CMAKE_CURRENT_BINARY_DIR}/${PROTO_NAME_WE}.pb.h ${EXTRA_INCLUDES})
73 | target_include_directories(
74 | ${PROTO_NAME_WE}.snp PUBLIC ${PROTOBUF_INCLUDE_DIR}
75 | ${CMAKE_CURRENT_BINARY_DIR})
76 | else()
77 | add_host_library(
78 | ${PROTO_NAME_WE}.virtual
79 | ${CMAKE_CURRENT_BINARY_DIR}/${PROTO_NAME_WE}.pb.cc
80 | ${CMAKE_CURRENT_BINARY_DIR}/${PROTO_NAME_WE}.pb.h ${EXTRA_INCLUDES})
81 | target_include_directories(
82 | ${PROTO_NAME_WE}.virtual PUBLIC ${PROTOBUF_INCLUDE_DIR}
83 | ${CMAKE_CURRENT_BINARY_DIR})
84 | endif()
85 | endforeach()
86 |
--------------------------------------------------------------------------------
/proto/build.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 | # Copyright (c) Microsoft Corporation. All rights reserved.
3 | # Licensed under the MIT License.
4 |
5 | set -e
6 |
7 | if [ "$#" -ne 2 ]; then
8 | echo "Usage: $0 "
9 | fi
10 |
11 | THIS_DIR=$(dirname "${BASH_SOURCE[0]}")
12 | SOURCE_FILE=${1}
13 | GENERATED_DIR=${2}
14 |
15 | mkdir -p "${GENERATED_DIR}"
16 |
17 | echo " -- Building ${SOURCE_FILE} into ${GENERATED_DIR}"
18 | python3 -m grpc_tools.protoc \
19 | -I "${THIS_DIR}" \
20 | --python_out "${GENERATED_DIR}" \
21 | --mypy_out "${GENERATED_DIR}" \
22 | "${SOURCE_FILE}"
23 |
--------------------------------------------------------------------------------
/proto/lskvserver.proto:
--------------------------------------------------------------------------------
1 | // Copyright (c) Microsoft Corporation. All rights reserved.
2 | // Licensed under the MIT License.
3 | syntax = "proto3";
4 |
5 | import "etcd.proto";
6 |
7 | package lskvserverpb;
8 |
9 | message ReceiptClaims
10 | {
11 | oneof request
12 | {
13 | etcdserverpb.PutRequest request_put = 1;
14 | etcdserverpb.DeleteRangeRequest request_delete_range = 2;
15 | etcdserverpb.TxnRequest request_txn = 3;
16 | }
17 | oneof response
18 | {
19 | etcdserverpb.PutResponse response_put = 4;
20 | etcdserverpb.DeleteRangeResponse response_delete_range = 5;
21 | etcdserverpb.TxnResponse response_txn = 6;
22 | }
23 | }
24 |
25 | message SignatureReceipt { string leaf = 1; }
26 |
27 | message Proof
28 | {
29 | string left = 1;
30 | string right = 2;
31 | }
32 |
33 | message LeafComponents
34 | {
35 | string claims_digest = 1;
36 | string commit_evidence = 2;
37 | string write_set_digest = 3;
38 | }
39 |
40 | message TxReceipt
41 | {
42 | LeafComponents leaf_components = 1;
43 | repeated Proof proof = 2;
44 | }
45 |
46 | message Receipt
47 | {
48 | string cert = 1;
49 | bytes signature = 2;
50 | string node_id = 3;
51 | oneof receipt_extras
52 | {
53 | TxReceipt tx_receipt = 4;
54 | SignatureReceipt signature_receipt = 5;
55 | }
56 | }
57 |
58 | message GetReceiptRequest
59 | {
60 | int64 revision = 1;
61 | uint64 raft_term = 2;
62 | }
63 |
64 | message GetReceiptResponse
65 | {
66 | etcdserverpb.ResponseHeader header = 1;
67 | Receipt receipt = 2;
68 | }
69 |
70 | // https://microsoft.github.io/CCF/main/use_apps/verify_tx.html#checking-for-commit
71 | message TxStatusRequest
72 | {
73 | int64 revision = 1;
74 | uint64 raft_term = 2;
75 | }
76 |
77 | message TxStatusResponse
78 | {
79 | etcdserverpb.ResponseHeader header = 1;
80 | enum Status {
81 | // This node has not received this transaction, and knows nothing about it
82 | Unknown = 0;
83 | // This node has this transaction locally, but has not yet heard that the
84 | // transaction has been committed by the distributed consensus
85 | Pending = 1;
86 | // This node has seen that this transaction is committed, it is an
87 | // irrevocable and durable part of the service's transaction history
88 | Committed = 2;
89 | // This node knows that the given transaction cannot be committed. This may
90 | // mean there has been a view change, and a previously pending transaction
91 | // has been lost (the original request should be resubmitted and will be
92 | // given a new Transaction ID). This also describes IDs which are known to
93 | // be impossible given the currently committed IDs
94 | Invalid = 3;
95 | }
96 | Status status = 2;
97 | }
98 |
--------------------------------------------------------------------------------
/proto/status.proto:
--------------------------------------------------------------------------------
1 | syntax = "proto3";
2 |
3 | import "google/protobuf/any.proto";
4 |
5 | package ccf.protobuf;
6 |
7 | // As per https://cloud.google.com/apis/design/errors#error_model
8 | message Status
9 | {
10 | int32 code = 1;
11 | string message = 2;
12 | repeated google.protobuf.Any details = 3;
13 | }
--------------------------------------------------------------------------------
/python/pyproject.toml:
--------------------------------------------------------------------------------
1 | # Copyright (c) Microsoft Corporation. All rights reserved.
2 | # Licensed under the MIT License.
3 |
4 | [project]
5 | name = "lskv"
6 | version = "0.1.0"
7 |
8 | [build-system]
9 | requires = ["setuptools >= 64.0"]
10 | build-backend = "setuptools.build_meta"
11 |
--------------------------------------------------------------------------------
/receipts.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | # Copyright (c) Microsoft Corporation. All rights reserved.
3 | # Licensed under the MIT License.
4 |
5 | header=$(./etcdctl.sh put a b -w json | jq '.header')
6 | rev=$(echo "$header" | jq '.revision')
7 | raft_term=$(echo "$header" | jq '.raft_term')
8 |
9 | curl -X POST -k https://127.0.0.1:8000/v3/receipt/get_receipt -d '{"revision": "'"$rev"'", "raft_term": "'"$raft_term"'"}' -H 'content-type: application/json'
10 | curl -X POST -k https://127.0.0.1:8000/v3/receipt/get_receipt -d '{"revision": "'"$rev"'", "raft_term": "'"$raft_term"'"}' -H 'content-type: application/json' | jq
11 |
12 | sig_rev=$((rev + 1))
13 | curl -X POST -k https://127.0.0.1:8000/v3/receipt/get_receipt -d '{"revision": "'"$sig_rev"'", "raft_term": "'"$raft_term"'"}' -H 'content-type: application/json'
14 | curl -X POST -k https://127.0.0.1:8000/v3/receipt/get_receipt -d '{"revision": "'"$sig_rev"'", "raft_term": "'"$raft_term"'"}' -H 'content-type: application/json' | jq
15 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | # Copyright (c) Microsoft Corporation. All rights reserved.
2 | # Licensed under the MIT License.
3 | ccf==4.0.7
4 | cimetrics==0.3.14
5 | pandas==1.5.0
6 | notebook==6.4.12
7 | seaborn==0.12.0
8 | httpx[http2]==0.23.0
9 | loguru==0.6.0
10 | pytest==7.2.0
11 | types-protobuf==3.20.4.2
12 | mypy-protobuf==3.4.0
13 | grpcio-tools==1.50.0
14 | paramiko==2.12.0
15 | types-paramiko==2.12.0
16 | ./python
17 |
--------------------------------------------------------------------------------
/scripts/check-cmake-format.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | # Copyright (c) Microsoft Corporation. All rights reserved.
3 | # Licensed under the MIT License.
4 |
5 | set -u
6 |
7 | if [ "$#" -eq 0 ]; then
8 | echo "No args given - specify dir(s) to be formatted"
9 | exit 1
10 | fi
11 |
12 | fix=false
13 | while getopts ":f:" opt; do
14 | case $opt in
15 | f)
16 | fix=true
17 | shift
18 | ;;
19 | \?)
20 | echo "Invalid option -$OPTARG" >&2
21 | exit
22 | ;;
23 | esac
24 | done
25 |
26 | if $fix; then
27 | echo "Formatting files in" "$@"
28 | else
29 | echo "Checking file format in" "$@"
30 | fi
31 |
32 | if [ ! -f "scripts/env/bin/activate" ]; then
33 | python3.8 -m venv scripts/env
34 | fi
35 |
36 | source scripts/env/bin/activate
37 | pip install -U pip
38 | pip install cmake_format==0.6.11 1>/dev/null
39 |
40 | unformatted_files=""
41 | for file in $(git ls-files "$@" | grep -e '\.cmake$' -e 'CMakeLists\.txt$'); do
42 | cmake-format --check "$file" >/dev/null
43 | d=$?
44 | if $fix; then
45 | cmake-format -i "$file"
46 | fi
47 | if [ $d -ne 0 ]; then
48 | if [ "$unformatted_files" != "" ]; then
49 | unformatted_files+=$'\n'
50 | fi
51 | unformatted_files+="$file"
52 | fi
53 | done
54 |
55 | if [ "$unformatted_files" != "" ]; then
56 | if $fix; then
57 | echo "Formatted files:"
58 | else
59 | echo "Fix formatting:"
60 | fi
61 |
62 | echo "$unformatted_files"
63 | exit 1
64 | else
65 | echo "All files formatted correctly!"
66 | fi
67 |
--------------------------------------------------------------------------------
/scripts/check-format.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | # Copyright (c) Microsoft Corporation. All rights reserved.
3 | # Licensed under the MIT License.
4 |
5 | set -u
6 |
7 | if [ "$#" -eq 0 ]; then
8 | echo "No args given - specify dir(s) to be formatted"
9 | exit 1
10 | fi
11 |
12 | fix=false
13 | while getopts ":f:" opt; do
14 | case $opt in
15 | f)
16 | fix=true
17 | shift
18 | ;;
19 | \?)
20 | echo "Invalid option -$OPTARG" >&2
21 | exit
22 | ;;
23 | esac
24 | done
25 |
26 | if $fix; then
27 | echo "Formatting files in" "$@"
28 | else
29 | echo "Checking file format in" "$@"
30 | fi
31 |
32 | file_name_regex="^[[:lower:]0-9_]+$"
33 | unformatted_files=""
34 | badly_named_files=""
35 | clang_fmt=clang-format-11
36 | if [[ ! $(command -v ${clang_fmt}) ]]; then
37 | clang_fmt=clang-format
38 | fi
39 |
40 | echo "Using $clang_fmt"
41 |
42 | for file in $(git ls-files "$@" | grep -e '\.h$' -e '\.hpp$' -e '\.cpp$' -e '\.c$' -e '\.proto$'); do
43 | if ! $clang_fmt -n -Werror -style=file "$file"; then
44 | if $fix; then
45 | $clang_fmt -style=file -i "$file"
46 | fi
47 | if [ "$unformatted_files" != "" ]; then
48 | unformatted_files+=$'\n'
49 | fi
50 | unformatted_files+="$file"
51 | fi
52 | file_base_name=$(basename "${file%.*}")
53 | if ! [[ $file_base_name =~ $file_name_regex ]]; then
54 | if [ "$badly_named_files" != "" ]; then
55 | badly_named_files+=$'\n'
56 | fi
57 | badly_named_files+="$file"
58 | fi
59 | done
60 |
61 | if [ "$unformatted_files" != "" ]; then
62 | if $fix; then
63 | echo "Fixed formatting:"
64 | else
65 | echo "Fix formatting:"
66 | fi
67 | echo "$unformatted_files"
68 | if ! $fix; then
69 | exit 1
70 | fi
71 | else
72 | echo "All files formatted correctly!"
73 | fi
74 |
75 | if [ "$badly_named_files" != "" ]; then
76 | echo "Fix file name:"
77 | echo "$badly_named_files"
78 | exit 2
79 | else
80 | echo "All files named correctly!"
81 | fi
82 |
--------------------------------------------------------------------------------
/scripts/check-issues.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | # Copyright (c) Microsoft Corporation. All rights reserved.
3 | # Licensed under the MIT License.
4 |
5 | todos=$(git grep --line-number -o -E 'TODO\(#[0-9]+\)' -- ':!3rdparty/protobuf')
6 |
7 | ex_code=0
8 |
9 | repo="microsoft/LSKV"
10 |
11 | # check can see the issues on the repo
12 | if ! gh issue list --repo $repo --limit 1 >/dev/null 2>&1; then
13 | echo "Failed to authenticate with github. Try 'gh auth login'"
14 | exit 1
15 | fi
16 |
17 | # for each todo we found extract the file and the todo text
18 | # the text is in the format 'TODO(#n)' where 'n' is a number.
19 | # We can check the gh issue with that number and ensure it is open.
20 | for todo in $todos; do
21 | IFS=':' read -ra ADDR <<<"$todo"
22 | todo_text=${ADDR[2]}
23 |
24 | issue_no=$(echo "$todo_text" | grep -o -e '[[:digit:]]*')
25 |
26 | issue_state=$(gh issue view --repo $repo "$issue_no" --json state --jq '.state' 2>/dev/null)
27 | if [[ $issue_state != "OPEN" ]]; then
28 | if [[ $issue_state == "" ]]; then
29 | issue_state="MISSING"
30 | fi
31 | echo "$todo: $issue_state"
32 | ex_code=1
33 | fi
34 | done
35 |
36 | if [[ $ex_code -eq 0 ]]; then
37 | echo "All todo issues are open"
38 | else
39 | echo "Found references to non-open issues"
40 | fi
41 |
42 | exit $ex_code
43 |
--------------------------------------------------------------------------------
/scripts/check-todo.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | # Copyright (c) Microsoft Corporation. All rights reserved.
3 | # Licensed under the MIT License.
4 |
5 | set -u
6 |
7 | if [ "$#" -eq 0 ]; then
8 | echo "check-todo.sh takes at least one file or directory"
9 | exit 1
10 | fi
11 |
12 | DENYLIST="TODO: FIXME:"
13 | STATUS=0
14 |
15 | for DENYTERM in $DENYLIST; do
16 | FOUND=$(git ls-files "$@" | xargs grep -n "$DENYTERM")
17 |
18 | if [ "$FOUND" == "" ]; then
19 | echo "No ${DENYTERM}s found"
20 | else
21 | echo "$FOUND"
22 | STATUS=1
23 | fi
24 | done
25 |
26 | exit $STATUS
27 |
--------------------------------------------------------------------------------
/scripts/ci-checks.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | # Copyright (c) Microsoft Corporation. All rights reserved.
3 | # Licensed under the MIT License.
4 |
5 | set -e
6 |
7 | if [ "$1" == "-f" ]; then
8 | FIX=1
9 | else
10 | FIX=0
11 | fi
12 |
13 | SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)"
14 |
15 | ROOT_DIR=$(dirname "$SCRIPT_DIR")
16 | pushd "$ROOT_DIR" >/dev/null
17 |
18 | # GitHub actions workflow commands: https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions
19 | function group() {
20 | echo "::group::$1"
21 | }
22 | function endgroup() {
23 | echo "::endgroup::"
24 | }
25 |
26 | group "Shell scripts"
27 | git ls-files | grep -e '\.sh$' | grep -E -v "^3rdparty" | xargs shellcheck -s bash -e SC2044,SC2002,SC1091,SC2181
28 | endgroup
29 |
30 | group "TODOs"
31 | "$SCRIPT_DIR"/check-todo.sh include src constitution
32 | endgroup
33 |
34 | group "C/C++/Proto format"
35 | if [ $FIX -ne 0 ]; then
36 | "$SCRIPT_DIR"/check-format.sh -f include src samples proto
37 | else
38 | "$SCRIPT_DIR"/check-format.sh include src samples proto
39 | fi
40 | endgroup
41 |
42 | group "TypeScript, JavaScript, Markdown, YAML and JSON format"
43 | npm install --loglevel=error --no-save prettier 1>/dev/null
44 | if [ $FIX -ne 0 ]; then
45 | git ls-files -- . ':!:3rdparty/' | grep -e '\.ts$' -e '\.js$' -e '\.md$' -e '\.yaml$' -e '\.yml$' -e '\.json$' | xargs npx prettier --write
46 | else
47 | git ls-files -- . ':!:3rdparty/' | grep -e '\.ts$' -e '\.js$' -e '\.md$' -e '\.yaml$' -e '\.yml$' -e '\.json$' | xargs npx prettier --check
48 | fi
49 | endgroup
50 |
51 | group "CMake format"
52 | if [ $FIX -ne 0 ]; then
53 | "$SCRIPT_DIR"/check-cmake-format.sh -f cmake samples src tests CMakeLists.txt
54 | else
55 | "$SCRIPT_DIR"/check-cmake-format.sh cmake samples src tests CMakeLists.txt
56 | fi
57 | endgroup
58 |
59 | group "Python dependencies"
60 | # Virtual Environment w/ dependencies for Python steps
61 | VENV_DIR=.venv
62 | if [ ! -f "${VENV_DIR}/bin/activate" ]; then
63 | python3.8 -m venv ${VENV_DIR}
64 | fi
65 |
66 | # shellcheck source=/dev/null
67 | source ${VENV_DIR}/bin/activate
68 | pip install -U pip
69 | pip install -U wheel black[jupyter] pylint mypy cpplint 1>/dev/null
70 | pip install -r requirements.txt
71 | endgroup
72 |
73 | group "Copyright notice headers"
74 | python3 "$SCRIPT_DIR"/notice_check.py
75 | endgroup
76 |
77 | group "Python format"
78 | if [ $FIX -ne 0 ]; then
79 | git ls-files | grep -e '\.py$' -e '\.ipynb$' | xargs black
80 | else
81 | git ls-files | grep -e '\.py$' -e '\.ipynb$' | xargs black --check
82 | fi
83 | endgroup
84 |
85 | group "Python lint"
86 | git ls-files | grep -e '\.py$' | xargs python -m pylint --ignored-modules "*_pb2" --disable duplicate-code
87 | endgroup
88 |
89 | group "Python types"
90 | git ls-files | grep -e '\.py$' | xargs mypy
91 | endgroup
92 |
93 | group "CPP Lint"
94 | make cpplint
95 | endgroup
96 |
--------------------------------------------------------------------------------
/scripts/notice_check.py:
--------------------------------------------------------------------------------
1 | # Copyright (c) Microsoft Corporation. All rights reserved.
2 | # Licensed under the MIT License.
3 | """
4 | Script to check that source files have license headers.
5 | """
6 |
7 | import subprocess
8 | import sys
9 | from typing import List
10 |
11 | from loguru import logger
12 |
13 | LICENSE_HEADER = (
14 | "Copyright (c) Microsoft Corporation. "
15 | "All rights reserved. "
16 | "Licensed under the MIT License."
17 | )
18 |
19 | COMMENT_PREFIXES = ["//", "#"]
20 |
21 |
22 | def extract_potential_license(lines: List[str]) -> str:
23 | """
24 | Extract the first lines of a file that start with a comment prefix.
25 |
26 | These could contain license lines.
27 | """
28 | license_lines: List[str] = []
29 | for line in lines:
30 | was_comment = False
31 | for comment_prefix in COMMENT_PREFIXES:
32 | if line.startswith(comment_prefix):
33 | line = line.lstrip(comment_prefix)
34 | was_comment = True
35 | if not was_comment:
36 | logger.debug("stopping at line: {}", line.strip())
37 | return " ".join(license_lines)
38 | license_lines.append(line.strip())
39 |
40 | return " ".join(license_lines)
41 |
42 |
43 | def has_notice(path: str) -> bool:
44 | """
45 | Check that the given file has a notice.
46 | """
47 | try:
48 | with open(path, "r", encoding="utf-8") as file:
49 | lines = file.readlines()
50 | license_lines = extract_potential_license(lines)
51 | if LICENSE_HEADER in license_lines:
52 | return True
53 | logger.debug(" found: {}", license_lines)
54 | logger.debug("expected: {}", LICENSE_HEADER)
55 |
56 | return False
57 | except UnicodeDecodeError:
58 | logger.warning("Failed to read file (not utf-8): {}", path)
59 | # treat as ok
60 | return True
61 |
62 |
63 | def git_ls_files() -> List[str]:
64 | """
65 | Get the list of files to check that are tracked by git.
66 | """
67 | excluded = [
68 | "3rdparty/", # these aren't ours
69 | "LICENSE", # don't need a license on the license
70 | "*.json", # can't add comments to these files
71 | "*.ipynb", # can't add comments to these files
72 | "*.md", # just documentation
73 | ".gitmodules", # not a source file
74 | ".gitignore", # not a source file
75 | ".dockerignore", # not a source file
76 | ".clang-format", # not a source file
77 | "proto/etcd.proto", # mostly not ours
78 | "proto/status.proto", # mostly not ours
79 | ".github/workflows", # not source files
80 | "nix/", # just build files
81 | "flake.nix", # just build files
82 | "flake.lock", # just build files
83 | ".envrc", # just build files
84 | "*.parquet", # binary
85 | "benchmark/go-ycsb/workloads",
86 | ]
87 | excluded = [f":!:{e}" for e in excluded]
88 | cmd = ["git", "ls-files", "--", "."] + excluded
89 | res = subprocess.run(cmd, check=True, capture_output=True)
90 | return res.stdout.decode("utf-8").strip().split("\n")
91 |
92 |
93 | def main():
94 | """
95 | Main function.
96 | """
97 | logger.remove()
98 | logger.add(sink=sys.stdout, level="WARNING")
99 |
100 | files = git_ls_files()
101 |
102 | missing = 0
103 | for file in files:
104 | logger.info("Checking {}", file)
105 | if not has_notice(file):
106 | missing += 1
107 | logger.warning("Copyright notice missing from {}", file)
108 | sys.exit(missing)
109 |
110 |
111 | if __name__ == "__main__":
112 | main()
113 |
--------------------------------------------------------------------------------
/src/app/exceptions.h:
--------------------------------------------------------------------------------
1 | // Copyright (c) Microsoft Corporation. All rights reserved.
2 | // Licensed under the MIT License.
3 |
4 | #pragma once
5 |
6 | #include
7 |
8 | namespace app::exceptions
9 | {
10 | struct BadRequest : public std::exception
11 | {
12 | ccf::ErrorDetails error;
13 |
14 | explicit BadRequest(std::string&& msg) :
15 | error{HTTP_STATUS_BAD_REQUEST, ccf::errors::InvalidInput, msg}
16 | {}
17 |
18 | const char* what() const throw() override
19 | {
20 | return error.msg.c_str();
21 | }
22 | };
23 |
24 | struct WrongMediaType : public std::exception
25 | {
26 | ccf::ErrorDetails error;
27 |
28 | explicit WrongMediaType(std::string&& msg) :
29 | error{
30 | HTTP_STATUS_UNSUPPORTED_MEDIA_TYPE,
31 | ccf::errors::UnsupportedContentType,
32 | msg}
33 | {}
34 |
35 | const char* what() const throw() override
36 | {
37 | return error.msg.c_str();
38 | }
39 | };
40 |
41 | }; // namespace app::exceptions
42 |
--------------------------------------------------------------------------------
/src/app/grpc.h:
--------------------------------------------------------------------------------
1 | // Copyright (c) Microsoft Corporation. All rights reserved.
2 | // Licensed under the MIT License.
3 |
4 | #pragma once
5 |
6 | namespace app::grpc
7 | {
8 | template
9 | using GrpcReadOnlyEndpointInOnly = std::function;
11 |
12 | template
13 | ccf::endpoints::ReadOnlyEndpointFunction grpc_read_only_adapter_in_only(
14 | const GrpcReadOnlyEndpointInOnly& f)
15 | {
16 | return [f](ccf::endpoints::ReadOnlyEndpointContext& ctx) {
17 | f(ctx, ccf::grpc::get_grpc_payload(ctx.rpc_ctx));
18 | };
19 | }
20 |
21 | template
22 | using GrpcEndpointInOnly =
23 | std::function;
24 |
25 | template
26 | ccf::endpoints::EndpointFunction grpc_adapter_in_only(
27 | const GrpcEndpointInOnly& f)
28 | {
29 | return [f](ccf::endpoints::EndpointContext& ctx) {
30 | f(ctx, ccf::grpc::get_grpc_payload(ctx.rpc_ctx));
31 | };
32 | }
33 |
34 | template
35 | using HistoricalGrpcReadOnlyEndpoint =
36 | std::function(
37 | ccf::endpoints::ReadOnlyEndpointContext& ctx,
38 | ccf::historical::StatePtr historical_state,
39 | In&& payload)>;
40 |
41 | template
42 | ccf::historical::HandleReadOnlyHistoricalQuery
43 | historical_grpc_read_only_adapter(
44 | const HistoricalGrpcReadOnlyEndpoint& f)
45 | {
46 | return [f](
47 | ccf::endpoints::ReadOnlyEndpointContext& ctx,
48 | ccf::historical::StatePtr historical_state) {
49 | ccf::grpc::set_grpc_response(
50 | f(ctx, historical_state, ccf::grpc::get_grpc_payload(ctx.rpc_ctx)),
51 | ctx.rpc_ctx);
52 | };
53 | }
54 |
55 | }; // namespace app::grpc
56 |
--------------------------------------------------------------------------------
/src/app/index.h:
--------------------------------------------------------------------------------
1 | // Copyright (c) Microsoft Corporation. All rights reserved.
2 | // Licensed under the MIT License.
3 |
4 | #pragma once
5 |
6 | #include "ccf/indexing/strategy.h"
7 | #include "kvstore.h"
8 |
9 | #include