├── .eslintrc.js
├── .gitattributes
├── .github
├── dependabot.yml
└── workflows
│ ├── codeql.yml
│ ├── dependency-review.yml
│ ├── protos.yml
│ └── scorecards.yml
├── .gitignore
├── .pre-commit-config.yaml
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── LICENSE
├── README.md
├── SECURITY.md
├── buf.gen.yaml
├── buf.work.yaml
├── docker-compose.yaml
├── jasmine.json
├── package-lock.json
├── package.json
├── proto
├── README.md
├── base
│ └── v1
│ │ ├── base.proto
│ │ ├── errors.proto
│ │ ├── openapi.proto
│ │ └── service.proto
├── buf.lock
└── buf.yaml
├── samples
└── create_tenant.ts
├── scripts
└── run-instance.sh
├── src
├── grpc-clients.test.ts
├── grpc
│ ├── clients.ts
│ ├── config.ts
│ ├── generated
│ │ ├── base
│ │ │ └── v1
│ │ │ │ ├── base.ts
│ │ │ │ ├── errors.ts
│ │ │ │ ├── health.ts
│ │ │ │ ├── openapi.ts
│ │ │ │ └── service.ts
│ │ ├── google
│ │ │ ├── api
│ │ │ │ ├── annotations.ts
│ │ │ │ ├── expr
│ │ │ │ │ └── v1alpha1
│ │ │ │ │ │ ├── checked.ts
│ │ │ │ │ │ └── syntax.ts
│ │ │ │ └── http.ts
│ │ │ └── protobuf
│ │ │ │ ├── any.ts
│ │ │ │ ├── descriptor.ts
│ │ │ │ ├── duration.ts
│ │ │ │ ├── empty.ts
│ │ │ │ ├── struct.ts
│ │ │ │ └── timestamp.ts
│ │ ├── protoc-gen-openapiv2
│ │ │ └── options
│ │ │ │ ├── annotations.ts
│ │ │ │ └── openapiv2.ts
│ │ └── validate
│ │ │ └── validate.ts
│ ├── index.ts
│ └── interceptors.ts
└── index.ts
├── tsconfig.json
└── yarn-error.log
/.eslintrc.js:
--------------------------------------------------------------------------------
1 | module.exports = {
2 | "env": {
3 | "browser": true,
4 | "commonjs": true,
5 | "es2021": true
6 | },
7 | "overrides": [
8 | ],
9 | "parser": "@typescript-eslint/parser",
10 | "parserOptions": {
11 | "ecmaVersion": "latest"
12 | },
13 | "plugins": [
14 | "@typescript-eslint"
15 | ],
16 | "extends": "eslint:recommended",
17 | "ignorePatterns": ["src/grpc/generated/**", "src/*.test.js"],
18 | "rules": {
19 | }
20 | }
--------------------------------------------------------------------------------
/.gitattributes:
--------------------------------------------------------------------------------
1 | # Auto detect text files and perform LF normalization
2 | * text=auto
3 |
--------------------------------------------------------------------------------
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 | updates:
3 | - package-ecosystem: "npm"
4 | directory: "/"
5 | schedule:
6 | interval: "weekly"
7 | open-pull-requests-limit: 15
8 |
9 | - package-ecosystem: github-actions
10 | directory: /
11 | schedule:
12 | interval: daily
13 |
--------------------------------------------------------------------------------
/.github/workflows/codeql.yml:
--------------------------------------------------------------------------------
1 | # For most projects, this workflow file will not need changing; you simply need
2 | # to commit it to your repository.
3 | #
4 | # You may wish to alter this file to override the set of languages analyzed,
5 | # or to provide custom queries or build logic.
6 | #
7 | # ******** NOTE ********
8 | # We have attempted to detect the languages in your repository. Please check
9 | # the `language` matrix defined below to confirm you have the correct set of
10 | # supported CodeQL languages.
11 | #
12 | name: "CodeQL"
13 |
14 | on:
15 | push:
16 | branches: [ "main" ]
17 | pull_request:
18 | # The branches below must be a subset of the branches above
19 | branches: [ "main" ]
20 | schedule:
21 | - cron: '31 21 * * 1'
22 |
23 | permissions:
24 | contents: read
25 |
26 | jobs:
27 | analyze:
28 | name: Analyze
29 | runs-on: ubuntu-latest
30 | permissions:
31 | actions: read
32 | contents: read
33 | security-events: write
34 |
35 | strategy:
36 | fail-fast: false
37 | matrix:
38 | language: [ 'javascript' ]
39 | # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
40 | # Use only 'java' to analyze code written in Java, Kotlin or both
41 | # Use only 'javascript' to analyze code written in JavaScript, TypeScript or both
42 | # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
43 |
44 | steps:
45 | - name: Harden Runner
46 | uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
47 | with:
48 | egress-policy: audit
49 |
50 | - name: Checkout repository
51 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
52 |
53 | # Initializes the CodeQL tools for scanning.
54 | - name: Initialize CodeQL
55 | uses: github/codeql-action/init@fca7ace96b7d713c7035871441bd52efbe39e27e # v3.28.19
56 | with:
57 | languages: ${{ matrix.language }}
58 | # If you wish to specify custom queries, you can do so here or in a config file.
59 | # By default, queries listed here will override any specified in a config file.
60 | # Prefix the list here with "+" to use these queries and those in the config file.
61 |
62 | # Details on CodeQL's query packs refer to : https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
63 | # queries: security-extended,security-and-quality
64 |
65 |
66 | # Autobuild attempts to build any compiled languages (C/C++, C#, Go, or Java).
67 | # If this step fails, then you should remove it and run the build manually (see below)
68 | - name: Autobuild
69 | uses: github/codeql-action/autobuild@fca7ace96b7d713c7035871441bd52efbe39e27e # v3.28.19
70 |
71 | # ℹ️ Command-line programs to run using the OS shell.
72 | # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
73 |
74 | # If the Autobuild fails above, remove it and uncomment the following three lines.
75 | # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
76 |
77 | # - run: |
78 | # echo "Run, Build Application using script"
79 | # ./location_of_script_within_repo/buildscript.sh
80 |
81 | - name: Perform CodeQL Analysis
82 | uses: github/codeql-action/analyze@fca7ace96b7d713c7035871441bd52efbe39e27e # v3.28.19
83 | with:
84 | category: "/language:${{matrix.language}}"
85 |
--------------------------------------------------------------------------------
/.github/workflows/dependency-review.yml:
--------------------------------------------------------------------------------
1 | # Dependency Review Action
2 | #
3 | # This Action will scan dependency manifest files that change as part of a Pull Request,
4 | # surfacing known-vulnerable versions of the packages declared or updated in the PR.
5 | # Once installed, if the workflow run is marked as required,
6 | # PRs introducing known-vulnerable packages will be blocked from merging.
7 | #
8 | # Source repository: https://github.com/actions/dependency-review-action
9 | name: 'Dependency Review'
10 | on: [pull_request]
11 |
12 | permissions:
13 | contents: read
14 |
15 | jobs:
16 | dependency-review:
17 | runs-on: ubuntu-latest
18 | steps:
19 | - name: Harden Runner
20 | uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
21 | with:
22 | egress-policy: audit
23 |
24 | - name: 'Checkout Repository'
25 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
26 | - name: 'Dependency Review'
27 | uses: actions/dependency-review-action@da24556b548a50705dd671f47852072ea4c105d9 # v4.7.1
28 |
--------------------------------------------------------------------------------
/.github/workflows/protos.yml:
--------------------------------------------------------------------------------
1 | name: Overwrite Proto Directory
2 |
3 | on:
4 | push:
5 | branches: [ "main" ]
6 |
7 | permissions:
8 | contents: read
9 |
10 | jobs:
11 | copy-proto:
12 | permissions:
13 | contents: write # for Git to git push
14 | runs-on: ubuntu-latest
15 |
16 | steps:
17 | - name: Harden Runner
18 | uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
19 | with:
20 | egress-policy: audit
21 |
22 | - name: Checkout Repository
23 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
24 |
25 | - name: Run Bash Script to Overwrite Proto Directory
26 | run: |
27 | #!/bin/bash
28 | REPO_URL="https://github.com/Permify/permify.git"
29 | TEMP_DIR="temp_dir"
30 | TARGET_DIR="proto"
31 |
32 | # Clone the repository to a temporary directory
33 | git clone --depth=1 $REPO_URL $TEMP_DIR
34 |
35 | # Check if the clone was successful
36 | if [ $? -ne 0 ]; then
37 | echo "Failed to clone repository."
38 | exit 1
39 | fi
40 |
41 | # Remove the existing proto directory if it exists
42 | if [ -d "$TARGET_DIR" ]; then
43 | rm -rf $TARGET_DIR
44 | fi
45 |
46 | # Create the target directory and copy the specific directory to the desired location
47 | mkdir -p $TARGET_DIR
48 | cp -r $TEMP_DIR/proto/* $TARGET_DIR
49 |
50 | # Check if the copy was successful
51 | if [ $? -ne 0 ]; then
52 | echo "Failed to copy the directory."
53 | # Clean up the temporary repository directory
54 | rm -rf $TEMP_DIR
55 | exit 1
56 | fi
57 |
58 | # Clean up the temporary repository directory
59 | rm -rf $TEMP_DIR
60 |
61 | echo "Successfully copied the proto directory to $TARGET_DIR."
62 |
63 | - name: Install Node.js and Dependencies
64 | uses: actions/setup-node@49933ea5288caeca8642d1e84afbd3f7d6820020 # v4.4.0
65 | with:
66 | node-version: '18'
67 |
68 | - name: Install Buf
69 | run: |
70 | # Download and install buf
71 | wget https://github.com/bufbuild/buf/releases/download/v1.7.0/buf-Linux-x86_64 -O /usr/local/bin/buf
72 | chmod +x /usr/local/bin/buf
73 | buf --version
74 |
75 | - name: Generate Code with Buf
76 | run: |
77 | # Navigate to the proto directory and run buf generate
78 | npm install ts-proto
79 | buf generate
80 |
81 | - name: Add and Commit Changes
82 | run: |
83 | git config --global user.name 'GitHub Actions Bot'
84 | git config --global user.email '<>'
85 | git add src/grpc/generated package-lock.json package.json proto/
86 | if git diff-index --quiet HEAD; then
87 | echo "No changes to commit"
88 | else
89 | git commit -m "Update generated sdk directory with latest changes"
90 | fi
91 |
92 | - name: Push Changes
93 | env:
94 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
95 | run: |
96 | git push origin main
97 |
--------------------------------------------------------------------------------
/.github/workflows/scorecards.yml:
--------------------------------------------------------------------------------
1 | # This workflow uses actions that are not certified by GitHub. They are provided
2 | # by a third-party and are governed by separate terms of service, privacy
3 | # policy, and support documentation.
4 |
5 | name: Scorecard supply-chain security
6 | on:
7 | # For Branch-Protection check. Only the default branch is supported. See
8 | # https://github.com/ossf/scorecard/blob/main/docs/checks.md#branch-protection
9 | branch_protection_rule:
10 | # To guarantee Maintained check is occasionally updated. See
11 | # https://github.com/ossf/scorecard/blob/main/docs/checks.md#maintained
12 | schedule:
13 | - cron: '20 7 * * 2'
14 | push:
15 | branches: ["main"]
16 |
17 | # Declare default permissions as read only.
18 | permissions: read-all
19 |
20 | jobs:
21 | analysis:
22 | name: Scorecard analysis
23 | runs-on: ubuntu-latest
24 | permissions:
25 | # Needed to upload the results to code-scanning dashboard.
26 | security-events: write
27 | # Needed to publish results and get a badge (see publish_results below).
28 | id-token: write
29 | contents: read
30 | actions: read
31 | # To allow GraphQL ListCommits to work
32 | issues: read
33 | pull-requests: read
34 | # To detect SAST tools
35 | checks: read
36 |
37 | steps:
38 | - name: Harden Runner
39 | uses: step-security/harden-runner@0634a2670c59f64b4a01f0f96f84700a4088b9f0 # v2.12.0
40 | with:
41 | egress-policy: audit
42 |
43 | - name: "Checkout code"
44 | uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2
45 | with:
46 | persist-credentials: false
47 |
48 | - name: "Run analysis"
49 | uses: ossf/scorecard-action@05b42c624433fc40578a4040d5cf5e36ddca8cde # v2.4.2
50 | with:
51 | results_file: results.sarif
52 | results_format: sarif
53 | # (Optional) "write" PAT token. Uncomment the `repo_token` line below if:
54 | # - you want to enable the Branch-Protection check on a *public* repository, or
55 | # - you are installing Scorecards on a *private* repository
56 | # To create the PAT, follow the steps in https://github.com/ossf/scorecard-action#authentication-with-pat.
57 | # repo_token: ${{ secrets.SCORECARD_TOKEN }}
58 |
59 | # Public repositories:
60 | # - Publish results to OpenSSF REST API for easy access by consumers
61 | # - Allows the repository to include the Scorecard badge.
62 | # - See https://github.com/ossf/scorecard-action#publishing-results.
63 | # For private repositories:
64 | # - `publish_results` will always be set to `false`, regardless
65 | # of the value entered here.
66 | publish_results: true
67 |
68 | # Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF
69 | # format to the repository Actions tab.
70 | - name: "Upload artifact"
71 | uses: actions/upload-artifact@ea165f8d65b6e75b540449e92b4886f43607fa02 # v4.6.2
72 | with:
73 | name: SARIF file
74 | path: results.sarif
75 | retention-days: 5
76 |
77 | # Upload the results to GitHub's code scanning dashboard.
78 | - name: "Upload to code-scanning"
79 | uses: github/codeql-action/upload-sarif@fca7ace96b7d713c7035871441bd52efbe39e27e # v3.28.19
80 | with:
81 | sarif_file: results.sarif
82 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | node_modules/
2 | dist/
3 | .idea
4 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | repos:
2 | - repo: https://github.com/gitleaks/gitleaks
3 | rev: v8.16.3
4 | hooks:
5 | - id: gitleaks
6 | - repo: https://github.com/jumanjihouse/pre-commit-hooks
7 | rev: 3.0.0
8 | hooks:
9 | - id: shellcheck
10 | - repo: https://github.com/pre-commit/mirrors-eslint
11 | rev: v8.38.0
12 | hooks:
13 | - id: eslint
14 | - repo: https://github.com/pre-commit/pre-commit-hooks
15 | rev: v4.4.0
16 | hooks:
17 | - id: end-of-file-fixer
18 | - id: trailing-whitespace
19 |
--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | # Contributor Covenant Code of Conduct
2 |
3 | ## Our Pledge
4 |
5 | We as members, contributors, and leaders pledge to make participation in our
6 | community a harassment-free experience for everyone, regardless of age, body
7 | size, visible or invisible disability, ethnicity, sex characteristics, gender
8 | identity and expression, level of experience, education, socio-economic status,
9 | nationality, personal appearance, race, religion, or sexual identity
10 | and orientation.
11 |
12 | We pledge to act and interact in ways that contribute to an open, welcoming,
13 | diverse, inclusive, and healthy community.
14 |
15 | ## Our Standards
16 |
17 | Examples of behavior that contributes to a positive environment for our
18 | community include:
19 |
20 | * Demonstrating empathy and kindness toward other people
21 | * Being respectful of differing opinions, viewpoints, and experiences
22 | * Giving and gracefully accepting constructive feedback
23 | * Accepting responsibility and apologizing to those affected by our mistakes,
24 | and learning from the experience
25 | * Focusing on what is best not just for us as individuals, but for the
26 | overall community
27 |
28 | Examples of unacceptable behavior include:
29 |
30 | * The use of sexualized language or imagery, and sexual attention or
31 | advances of any kind
32 | * Trolling, insulting or derogatory comments, and personal or political attacks
33 | * Public or private harassment
34 | * Publishing others' private information, such as a physical or email
35 | address, without their explicit permission
36 | * Other conduct which could reasonably be considered inappropriate in a
37 | professional setting
38 |
39 | ## Enforcement Responsibilities
40 |
41 | Community leaders are responsible for clarifying and enforcing our standards of
42 | acceptable behavior and will take appropriate and fair corrective action in
43 | response to any behavior that they deem inappropriate, threatening, offensive,
44 | or harmful.
45 |
46 | Community leaders have the right and responsibility to remove, edit, or reject
47 | comments, commits, code, wiki edits, issues, and other contributions that are
48 | not aligned to this Code of Conduct, and will communicate reasons for moderation
49 | decisions when appropriate.
50 |
51 | ## Scope
52 |
53 | This Code of Conduct applies within all community spaces, and also applies when
54 | an individual is officially representing the community in public spaces.
55 | Examples of representing our community include using an official e-mail address,
56 | posting via an official social media account, or acting as an appointed
57 | representative at an online or offline event.
58 |
59 | ## Enforcement
60 |
61 | Instances of abusive, harassing, or otherwise unacceptable behavior may be
62 | reported to the community leaders responsible for enforcement at
63 | ege@permify.co.
64 | All complaints will be reviewed and investigated promptly and fairly.
65 |
66 | All community leaders are obligated to respect the privacy and security of the
67 | reporter of any incident.
68 |
69 | ## Enforcement Guidelines
70 |
71 | Community leaders will follow these Community Impact Guidelines in determining
72 | the consequences for any action they deem in violation of this Code of Conduct:
73 |
74 | ### 1. Correction
75 |
76 | **Community Impact**: Use of inappropriate language or other behavior deemed
77 | unprofessional or unwelcome in the community.
78 |
79 | **Consequence**: A private, written warning from community leaders, providing
80 | clarity around the nature of the violation and an explanation of why the
81 | behavior was inappropriate. A public apology may be requested.
82 |
83 | ### 2. Warning
84 |
85 | **Community Impact**: A violation through a single incident or series
86 | of actions.
87 |
88 | **Consequence**: A warning with consequences for continued behavior. No
89 | interaction with the people involved, including unsolicited interaction with
90 | those enforcing the Code of Conduct, for a specified period of time. This
91 | includes avoiding interactions in community spaces as well as external channels
92 | like social media. Violating these terms may lead to a temporary or
93 | permanent ban.
94 |
95 | ### 3. Temporary Ban
96 |
97 | **Community Impact**: A serious violation of community standards, including
98 | sustained inappropriate behavior.
99 |
100 | **Consequence**: A temporary ban from any sort of interaction or public
101 | communication with the community for a specified period of time. No public or
102 | private interaction with the people involved, including unsolicited interaction
103 | with those enforcing the Code of Conduct, is allowed during this period.
104 | Violating these terms may lead to a permanent ban.
105 |
106 | ### 4. Permanent Ban
107 |
108 | **Community Impact**: Demonstrating a pattern of violation of community
109 | standards, including sustained inappropriate behavior, harassment of an
110 | individual, or aggression toward or disparagement of classes of individuals.
111 |
112 | **Consequence**: A permanent ban from any sort of public interaction within
113 | the community.
114 |
115 | ## Attribution
116 |
117 | This Code of Conduct is adapted from the [Contributor Covenant][homepage],
118 | version 2.0, available at
119 | https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
120 |
121 | Community Impact Guidelines were inspired by [Mozilla's code of conduct
122 | enforcement ladder](https://github.com/mozilla/diversity).
123 |
124 | [homepage]: https://www.contributor-covenant.org
125 |
126 | For answers to common questions about this code of conduct, see the FAQ at
127 | https://www.contributor-covenant.org/faq. Translations are available at
128 | https://www.contributor-covenant.org/translations.
129 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing
2 |
3 | Welcome to Permify contribution guidelines, happy to see you here :blush:
4 |
5 | Before dive into a contributing flow and steps for creating good issues and pull requests. We must spesificy that all of the contributions must
6 | follow our [Code of Conduct](https://github.com/Permify/permify/blob/master/CODE_OF_CONDUCT.md).
7 | Please read it before you make any contributions.
8 |
9 | If you need any help or want to talk about about a spesific topic, you can reach out to me. I'm Ege one of the co-founders of Permify and here is my email:
10 | ege@permify.co
11 |
12 | You're always more than welcome to our other communication channels.
13 |
14 | ## Communication Channels
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 | ## Issues
29 |
30 | The best way to contribute to Permify is opening a issue. If you found any bug on Permify or mistake in our documents, contents
31 | you can open an issue about it to let us know. Evaluating problems and fixing them is high priority for us.
32 |
33 | ### When opening a issue
34 |
35 | - If you plan to work on a problem, please check that same problem or topic does not already exist.
36 | - If you plan to work on a new feature, our advice it discuss it with other community members/maintainers who might give you a idea or support.
37 | - If you stuck anywhere, ask for help in our discord community.
38 | - Please relate one bug with one issue, do not use issues as bug lists.
39 |
40 | After issue creation, If you are looking to make your contribution follow the steps below.
41 |
42 | ## Contribution Steps
43 |
44 | - Fork this repository.
45 | - Clone the repository you forked.
46 | - Create a branch with spesified name. Its better to related with your issue title.
47 | - Make necessary changes and commit those changes. Make sure to test your changes.
48 | - Push changes to your branch.
49 | - Submit your changes for review.
50 |
51 | You can create an issue and contribute about anything you want but following above steps
52 | will definitely ease your work and other maintainers too.
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright [yyyy] [name of copyright owner]
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 | Permify NodeJS Client
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 | This client makes it easy to interact with [Permify](https://github.com/Permify/permify) from your Node.js application.
18 |
19 | # Installation
20 |
21 | Use npm to install:
22 |
23 | ```shell
24 | npm install @permify/permify-node
25 | ```
26 |
27 | Use yarn to install (Please be aware that Yarn versions greater than v1.10.0 and less than v2 are not supported):
28 |
29 | ```shell
30 | yarn add @permify/permify-node
31 | ```
32 |
33 | # How to use
34 |
35 | ### Create a new tenant
36 |
37 | ```typescript
38 | import * as permify from "@permify/permify-node";
39 |
40 | const client = permify.grpc.newClient({
41 | endpoint: "localhost:3478",
42 | cert: undefined,
43 | insecure: true
44 | });
45 |
46 | client.tenancy.create({
47 | id: "t1",
48 | name: "Tenant 1"
49 | }).then((response) => {
50 | console.log(response);
51 | // handle response
52 | })
53 | ```
54 |
55 | ### Write Schema
56 |
57 | ```typescript
58 | import * as permify from "@permify/permify-node";
59 |
60 | const client = permify.grpc.newClient({
61 | endpoint: "localhost:3478",
62 | cert: undefined,
63 | insecure: true
64 | });
65 |
66 | let schema = `
67 | entity user {}
68 |
69 | entity document {
70 | relation viewer @user
71 |
72 | action view = viewer
73 | }
74 | `;
75 |
76 | // Write the schema
77 | client.tenancy.create({
78 | tenantId: "t1",
79 | schema: schema
80 | }).then((response) => {
81 | // handle response
82 | })
83 | ```
84 |
85 | ### Write Relationships
86 |
87 | ```typescript
88 | import * as permify from "@permify/permify-node";
89 |
90 | const client = permify.grpc.newClient({
91 | endpoint: "localhost:3478",
92 | cert: undefined,
93 | insecure: true
94 | });
95 |
96 | client.relationship.write({
97 | tenantId: "t1",
98 | metadata: {
99 | schemaVersion: ""
100 | },
101 | tuples: [{
102 | entity: {
103 | type: "document",
104 | id: "1"
105 | },
106 | relation: "viewer",
107 | subject: {
108 | type: "user",
109 | id: "1"
110 | }
111 | }]
112 | }).then((response) => {
113 | // handle response
114 | })
115 | ```
116 |
117 | ### Check
118 |
119 | ```typescript
120 | import * as permify from "@permify/permify-node";
121 |
122 | const client = permify.grpc.newClient({
123 | endpoint: "localhost:3478",
124 | cert: undefined,
125 | insecure: true
126 | });
127 |
128 | client.permission.check({
129 | tenantId: "t1",
130 | metadata: {
131 | snapToken: "",
132 | schemaVersion: "",
133 | depth: 20
134 | },
135 | entity: {
136 | type: "document",
137 | id: "1"
138 | },
139 | permission: "view",
140 | subject: {
141 | type: "user",
142 | id: "3"
143 | }
144 | }).then((response) => {
145 | if (response.can === permify.grpc.base.CheckResult.CHECK_RESULT_ALLOWED) {
146 | console.log("RESULT_ALLOWED")
147 | } else {
148 | console.log("RESULT_DENIED")
149 | }
150 | })
151 | ```
152 |
153 | ### Streaming Calls
154 |
155 | ```typescript
156 | import * as permify from "@permify/permify-node";
157 |
158 | function main() {
159 | const client = permify.grpc.newClient({
160 | endpoint: "localhost:3478",
161 | cert: undefined,
162 | insecure: true
163 | });
164 |
165 | let res = client.permission.lookupEntityStream({
166 | tenantId: "t1",
167 | metadata: {
168 | snapToken: "",
169 | schemaVersion: "",
170 | depth: 20
171 | },
172 | entityType: "document",
173 | permission: "view",
174 | subject: {
175 | type: "user",
176 | id: "1"
177 | }
178 | })
179 |
180 | handle(res)
181 | }
182 |
183 | async function handle(res: AsyncIterable) {
184 | for await (const response of res) {
185 | // response.entityId
186 | }
187 | }
188 | ```
189 |
190 | ### Interceptors
191 |
192 | #### Access Token Interceptor
193 |
194 | ```typescript
195 | import * as permify from "@permify/permify-node";
196 |
197 | const client = new permify.grpc.newClient({
198 | endpoint: "localhost:3478",
199 | cert: undefined,
200 | insecure: true
201 | }, permify.grpc.newAccessTokenInterceptor("YOUR_TOKEN"))
202 | ```
203 |
204 | ### Certs
205 |
206 | ```typescript
207 | import * as permify from "@permify/permify-node";
208 | import fs from 'fs';
209 |
210 | const cert = fs.readFileSync('path/to/cert.pem');
211 |
212 | const client = new permify.grpc.newClient({
213 | endpoint: "localhost:3478",
214 | cert: cert,
215 | insecure: true
216 | }, permify.grpc.newAccessTokenInterceptor("YOUR_TOKEN"))
217 | ```
218 |
219 | Permify is an **open-source authorization service** for creating and maintaining fine-grained authorizations accross
220 | your individual applications and services.
221 |
222 | * [Permify website](https://permify.co)
223 | * [Permify documentation](https://docs.permify.co/docs)
224 | * [Permify playground](https://play.permify.co)
225 | * [Permify GitHub Repository](https://github.com/Permify/permify)
226 |
227 | ## Community & Support
228 |
229 | Join our [Discord channel](https://discord.gg/MJbUjwskdH) for issues, feature requests, feedbacks or anything else. We
230 | love to talk about authorization and access control :heart:
231 |
232 |
233 |
234 |
235 |
236 |
237 |
238 |
239 |
240 |
241 |
242 |
243 |
--------------------------------------------------------------------------------
/SECURITY.md:
--------------------------------------------------------------------------------
1 | # Security Policy
2 |
3 | If you discover any security related issues, please email tolga@permify.co or ege@permify.co instead of using the issue tracker.
4 |
--------------------------------------------------------------------------------
/buf.gen.yaml:
--------------------------------------------------------------------------------
1 | version: "v1"
2 | plugins:
3 | - name: protoc-gen-ts_proto
4 | out: "src/grpc/generated"
5 | opt: oneof=unions,forceLong=long,esModuleInterop=true,env=node,outputServices=nice-grpc,outputServices=generic-definitions,useExactTypes=false
6 | path: ./node_modules/.bin/protoc-gen-ts_proto
--------------------------------------------------------------------------------
/buf.work.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | version: v1
3 | directories:
4 | - proto
5 |
--------------------------------------------------------------------------------
/docker-compose.yaml:
--------------------------------------------------------------------------------
1 | services:
2 | permify:
3 | image: "ghcr.io/permify/permify:latest"
4 | ports: ['3478:3478']
5 | command: "serve"
6 |
--------------------------------------------------------------------------------
/jasmine.json:
--------------------------------------------------------------------------------
1 | {
2 | "spec_dir": "src",
3 | "spec_files": ["*.test.ts"],
4 | "helpers": []
5 | }
6 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "@permify/permify-node",
3 | "version": "1.0.8",
4 | "description": "Permify Node Client",
5 | "main": "dist/src/index.js",
6 | "types": "dist/src/index.d.ts",
7 | "repository": "https://github.com/Permify/permify-node",
8 | "private": false,
9 | "scripts": {
10 | "run-instance": "scripts/run-instance.sh",
11 | "run-test": "ts-node node_modules/jasmine/bin/jasmine --config=jasmine.json",
12 | "lint": "eslint \"src/**/*.ts\"",
13 | "build": "rm -rf ./dist && npx tsc"
14 | },
15 | "keywords": [
16 | "permissions",
17 | "access-control",
18 | "rebac",
19 | "fga",
20 | "fine-grained-authorization",
21 | "zanzibar",
22 | "grpc"
23 | ],
24 | "author": "permify",
25 | "license": "Apache-2.0",
26 | "dependencies": {
27 | "@bufbuild/protobuf": "^2.1.0",
28 | "nice-grpc": "^2.1.9",
29 | "ts-proto": "^2.7.2"
30 | },
31 | "devDependencies": {
32 | "@types/jest": "^29.5.12",
33 | "@typescript-eslint/eslint-plugin": "^8.25.0",
34 | "@typescript-eslint/parser": "^8.25.0",
35 | "eslint": "^9.21.0",
36 | "globals": "^16.0.0",
37 | "jasmine": "^5.2.0",
38 | "jest": "^29.7.0",
39 | "ts-jest": "^29.2.4"
40 | }
41 | }
42 |
--------------------------------------------------------------------------------
/proto/README.md:
--------------------------------------------------------------------------------
1 | # Permify Protocol Buffers
2 |
3 | This folder contains Protocol Buffers used on Permify.
4 |
5 | [Protocol Buffers]: https://developers.google.com/protocol-buffers/
6 | [Buf]: https://github.com/bufbuild/buf
--------------------------------------------------------------------------------
/proto/base/v1/base.proto:
--------------------------------------------------------------------------------
1 | syntax = "proto3";
2 | package base.v1;
3 |
4 | import "google/api/expr/v1alpha1/checked.proto";
5 | import "google/protobuf/any.proto";
6 | import "google/protobuf/struct.proto";
7 | import "google/protobuf/timestamp.proto";
8 | import "validate/validate.proto";
9 |
10 | option go_package = "github.com/Permify/permify/pkg/pb/base/v1";
11 |
12 | // Enumerates results of a check operation.
13 | enum CheckResult {
14 | // Not specified check result. This is the default value.
15 | CHECK_RESULT_UNSPECIFIED = 0;
16 |
17 | // Represents a successful check (the check allowed the operation).
18 | CHECK_RESULT_ALLOWED = 1;
19 |
20 | // Represents a failed check (the check denied the operation).
21 | CHECK_RESULT_DENIED = 2;
22 | }
23 |
24 | // Enumerates the types of attribute.
25 | enum AttributeType {
26 | // Not specified attribute type. This is the default value.
27 | ATTRIBUTE_TYPE_UNSPECIFIED = 0;
28 |
29 | // A boolean attribute type.
30 | ATTRIBUTE_TYPE_BOOLEAN = 1;
31 | // A boolean array attribute type.
32 | ATTRIBUTE_TYPE_BOOLEAN_ARRAY = 2;
33 |
34 | // A string attribute type.
35 | ATTRIBUTE_TYPE_STRING = 3;
36 | // A string array attribute type.
37 | ATTRIBUTE_TYPE_STRING_ARRAY = 4;
38 |
39 | // An integer attribute type.
40 | ATTRIBUTE_TYPE_INTEGER = 5;
41 | // An integer array attribute type.
42 | ATTRIBUTE_TYPE_INTEGER_ARRAY = 6;
43 |
44 | // A double attribute type.
45 | ATTRIBUTE_TYPE_DOUBLE = 7;
46 | // A double array attribute type.
47 | ATTRIBUTE_TYPE_DOUBLE_ARRAY = 8;
48 | }
49 |
50 | // Context encapsulates the information related to a single operation,
51 | // including the tuples involved and the associated attributes.
52 | message Context {
53 | // A repeated field of tuples involved in the operation.
54 | repeated Tuple tuples = 1 [json_name = "tuples"];
55 |
56 | // A repeated field of attributes associated with the operation.
57 | repeated Attribute attributes = 2 [json_name = "attributes"];
58 |
59 | // Additional data associated with the context.
60 | google.protobuf.Struct data = 3;
61 | }
62 |
63 | // Child represents a node in the permission tree.
64 | message Child {
65 | // Child node can be either a leaf or a rewrite operation.
66 | oneof type {
67 | // Either leaf or rewrite operation is required.
68 | option (validate.required) = true;
69 |
70 | // Leaf node in the permission tree.
71 | Leaf leaf = 1 [(validate.rules).message.required = true];
72 |
73 | // Rewrite operation in the permission tree.
74 | Rewrite rewrite = 2 [(validate.rules).message.required = true];
75 | }
76 | }
77 |
78 | // Leaf represents a leaf node in the permission tree.
79 | message Leaf {
80 | // Leaf node can be one of several types.
81 | oneof type {
82 | // One type is required.
83 | option (validate.required) = true;
84 |
85 | // A computed set of users.
86 | ComputedUserSet computed_user_set = 1 [(validate.rules).message.required = true];
87 |
88 | // A tuple to user set conversion.
89 | TupleToUserSet tuple_to_user_set = 2 [(validate.rules).message.required = true];
90 |
91 | // A computed attribute.
92 | ComputedAttribute computed_attribute = 3 [(validate.rules).message.required = true];
93 |
94 | // A call to a function or method.
95 | Call call = 4 [(validate.rules).message.required = true];
96 | }
97 | }
98 |
99 | // The Rewrite message represents a specific rewrite operation.
100 | // This operation could be one of the following: union, intersection, or exclusion.
101 | message Rewrite {
102 | // Operation enum includes potential rewrite operations.
103 | // OPERATION_UNION: Represents a union operation.
104 | // OPERATION_INTERSECTION: Represents an intersection operation.
105 | // OPERATION_EXCLUSION: Represents an exclusion operation.
106 | enum Operation {
107 | OPERATION_UNSPECIFIED = 0; // Default, unspecified operation.
108 | OPERATION_UNION = 1; // Represents a union operation.
109 | OPERATION_INTERSECTION = 2; // Represents an intersection operation.
110 | OPERATION_EXCLUSION = 3; // Represents an exclusion operation.
111 | }
112 |
113 | // The type of rewrite operation to be performed.
114 | Operation rewrite_operation = 1;
115 |
116 | // A list of children that are operated upon by the rewrite operation.
117 | repeated Child children = 2;
118 | }
119 |
120 | // The SchemaDefinition message provides definitions for entities and rules,
121 | // and includes references to clarify whether a name refers to an entity or a rule.
122 | message SchemaDefinition {
123 | // The Reference enum helps distinguish whether a name corresponds to an entity or a rule.
124 | enum Reference {
125 | REFERENCE_UNSPECIFIED = 0; // Default, unspecified reference.
126 | REFERENCE_ENTITY = 1; // Indicates that the name refers to an entity.
127 | REFERENCE_RULE = 2; // Indicates that the name refers to a rule.
128 | }
129 |
130 | // Map of entity definitions. The key is the entity name, and the value is the corresponding EntityDefinition.
131 | map entity_definitions = 1;
132 |
133 | // Map of rule definitions. The key is the rule name, and the value is the corresponding RuleDefinition.
134 | map rule_definitions = 2;
135 |
136 | // Map of references to signify whether a string refers to an entity or a rule.
137 | map references = 3;
138 | }
139 |
140 | // The EntityDefinition message provides detailed information about a specific entity.
141 | message EntityDefinition {
142 | // The Reference enum specifies whether a name pertains to a relation, permission, or attribute.
143 | enum Reference {
144 | REFERENCE_UNSPECIFIED = 0; // Default, unspecified reference.
145 | REFERENCE_RELATION = 1; // Indicates that the name refers to a relation.
146 | REFERENCE_PERMISSION = 2; // Indicates that the name refers to a permission.
147 | REFERENCE_ATTRIBUTE = 3; // Indicates that the name refers to an attribute.
148 | }
149 |
150 | // The name of the entity, which follows a specific string pattern and has a maximum byte size.
151 | string name = 1 [(validate.rules).string = {
152 | pattern: "^[a-zA-Z_]{1,64}$"
153 | max_bytes: 64
154 | }];
155 |
156 | // Map of relation definitions within this entity. The key is the relation name, and the value is the RelationDefinition.
157 | map relations = 2;
158 |
159 | // Map of permission definitions within this entity. The key is the permission name, and the value is the PermissionDefinition.
160 | map permissions = 3;
161 |
162 | // Map of attribute definitions within this entity. The key is the attribute name, and the value is the AttributeDefinition.
163 | map attributes = 4;
164 |
165 | // Map of references indicating whether a string pertains to a relation, permission, or attribute.
166 | map references = 5;
167 | }
168 |
169 | // The RuleDefinition message provides detailed information about a specific rule.
170 | message RuleDefinition {
171 | // The name of the rule, which follows a specific string pattern and has a maximum byte size.
172 | string name = 1 [(validate.rules).string = {
173 | pattern: "^[a-zA-Z_]{1,64}$"
174 | max_bytes: 64
175 | }];
176 |
177 | // Map of arguments for this rule. The key is the attribute name, and the value is the AttributeType.
178 | map arguments = 2;
179 |
180 | // The expression for this rule in the form of a google.api.expr.v1alpha1.CheckedExpr.
181 | google.api.expr.v1alpha1.CheckedExpr expression = 3;
182 | }
183 |
184 | // The AttributeDefinition message provides detailed information about a specific attribute.
185 | message AttributeDefinition {
186 | // The name of the attribute, which follows a specific string pattern and has a maximum byte size.
187 | string name = 1 [(validate.rules).string = {
188 | pattern: "^[a-zA-Z_]{1,64}$"
189 | max_bytes: 64
190 | }];
191 |
192 | // The type of the attribute.
193 | AttributeType type = 2;
194 | }
195 |
196 | // The RelationDefinition message provides detailed information about a specific relation.
197 | message RelationDefinition {
198 | // The name of the relation, which follows a specific string pattern and has a maximum byte size.
199 | string name = 1 [(validate.rules).string = {
200 | pattern: "^[a-zA-Z_]{1,64}$"
201 | max_bytes: 64
202 | }];
203 |
204 | // A list of references to other relations.
205 | repeated RelationReference relation_references = 2;
206 | }
207 |
208 | // The PermissionDefinition message provides detailed information about a specific permission.
209 | message PermissionDefinition {
210 | // The name of the permission, which follows a specific string pattern and has a maximum byte size.
211 | string name = 1 [(validate.rules).string = {
212 | pattern: "^[a-zA-Z_]{1,64}$"
213 | max_bytes: 64
214 | }];
215 |
216 | // The child related to this permission.
217 | Child child = 2;
218 | }
219 |
220 | // The RelationReference message provides a reference to a specific relation.
221 | message RelationReference {
222 | // The type of the referenced entity, which follows a specific string pattern and has a maximum byte size.
223 | string type = 1 [(validate.rules).string = {
224 | pattern: "^[a-zA-Z_]{1,64}$"
225 | max_bytes: 64
226 | }];
227 |
228 | // The name of the referenced relation, which follows a specific string pattern and has a maximum byte size.
229 | string relation = 2 [(validate.rules).string = {
230 | pattern: "^[a-zA-Z_]{1,64}$"
231 | max_bytes: 64
232 | ignore_empty: true
233 | }];
234 | }
235 |
236 | message Entrance {
237 | // The type of the entrance entity, which follows a specific string pattern and has a maximum byte size.
238 | string type = 1 [(validate.rules).string = {
239 | pattern: "^[a-zA-Z_]{1,64}$"
240 | max_bytes: 64
241 | }];
242 |
243 | // The value associated with the entrance, which follows a specific string pattern and has a maximum byte size.
244 | string value = 2 [(validate.rules).string = {
245 | pattern: "^[a-zA-Z_]{1,64}$"
246 | max_bytes: 64
247 | }];
248 | }
249 |
250 | // Argument defines the type of argument in a Call. It can be either a ComputedAttribute or a ContextAttribute.
251 | message Argument {
252 | oneof type {
253 | ComputedAttribute computed_attribute = 1;
254 | }
255 | }
256 |
257 | // Call represents a call to a rule. It includes the name of the rule and the arguments passed to it.
258 | message Call {
259 | string rule_name = 1; // Name of the rule
260 | repeated Argument arguments = 2; // Arguments passed to the rule
261 | }
262 |
263 | // ComputedAttribute defines a computed attribute which includes its name.
264 | message ComputedAttribute {
265 | string name = 1 [(validate.rules).string = {
266 | pattern: "^[a-zA-Z_]{1,64}$"
267 | max_bytes: 64
268 | }]; // Name of the computed attribute
269 | }
270 |
271 | // ComputedUserSet defines a set of computed users which includes the relation name.
272 | message ComputedUserSet {
273 | string relation = 1 [(validate.rules).string = {
274 | pattern: "^[a-zA-Z_]{1,64}$"
275 | max_bytes: 64
276 | }]; // Relation name
277 | }
278 |
279 | // TupleToUserSet defines a mapping from tuple sets to computed user sets.
280 | message TupleToUserSet {
281 | TupleSet tupleSet = 1; // The tuple set
282 | ComputedUserSet computed = 2; // The computed user set
283 | }
284 |
285 | // TupleSet represents a set of tuples associated with a specific relation.
286 | message TupleSet {
287 | string relation = 1 [(validate.rules).string = {
288 | pattern: "^[a-zA-Z_]{1,64}$"
289 | max_bytes: 64
290 | }];
291 | }
292 |
293 | // Tuple is a structure that includes an entity, a relation, and a subject.
294 | message Tuple {
295 | Entity entity = 1 [
296 | json_name = "entity",
297 | (validate.rules).message.required = true
298 | ];
299 |
300 | string relation = 2 [
301 | json_name = "relation",
302 | (validate.rules).string = {
303 | pattern: "^[a-zA-Z_]{1,64}$"
304 | max_bytes: 64
305 | }
306 | ];
307 |
308 | Subject subject = 3 [
309 | json_name = "subject",
310 | (validate.rules).message.required = true
311 | ];
312 | }
313 |
314 | // Attribute represents an attribute of an entity with a specific type and value.
315 | message Attribute {
316 | Entity entity = 1 [
317 | json_name = "entity",
318 | (validate.rules).message.required = true
319 | ];
320 |
321 | string attribute = 2 [json_name = "attribute"]; // Name of the attribute
322 |
323 | google.protobuf.Any value = 3 [json_name = "value"];
324 | }
325 |
326 | // Tuples is a collection of tuples.
327 | message Tuples {
328 | repeated Tuple tuples = 1 [json_name = "tuples"];
329 | }
330 |
331 | // Attributes is a collection of attributes.
332 | message Attributes {
333 | repeated Attribute attributes = 1 [json_name = "attributes"];
334 | }
335 |
336 | // Entity represents an entity with a type and an identifier.
337 | message Entity {
338 | string type = 1 [
339 | json_name = "type",
340 | (validate.rules).string = {
341 | pattern: "^[a-zA-Z_]{1,64}$"
342 | max_bytes: 64
343 | }
344 | ];
345 |
346 | string id = 2 [
347 | json_name = "id",
348 | (validate.rules).string = {
349 | pattern: "^([a-zA-Z0-9_\\-@\\.:+]{1,128}|\\*)$"
350 | max_bytes: 128
351 | }
352 | ];
353 | }
354 |
355 | // EntityAndRelation represents an entity along with a relation.
356 | message EntityAndRelation {
357 | Entity entity = 1 [
358 | json_name = "entity",
359 | (validate.rules).message.required = true
360 | ];
361 |
362 | string relation = 2 [
363 | json_name = "relation",
364 | (validate.rules).string = {
365 | pattern: "^[a-zA-Z_]{1,64}$"
366 | max_bytes: 64
367 | }
368 | ];
369 | }
370 |
371 | // Subject represents an entity subject with a type, an identifier, and a relation.
372 | message Subject {
373 | string type = 1 [
374 | json_name = "type",
375 | (validate.rules).string = {
376 | pattern: "^[a-zA-Z_]{1,64}$"
377 | max_bytes: 64
378 | }
379 | ];
380 |
381 | string id = 2 [
382 | json_name = "id",
383 | (validate.rules).string = {
384 | pattern: "^([a-zA-Z0-9_\\-@\\.:+]{1,128}|\\*)$"
385 | max_bytes: 128
386 | }
387 | ];
388 |
389 | string relation = 3 [
390 | json_name = "relation",
391 | (validate.rules).string = {
392 | pattern: "^[a-zA-Z_]{1,64}$"
393 | max_bytes: 64
394 | ignore_empty: true
395 | }
396 | ];
397 | }
398 |
399 | // AttributeFilter is used to filter attributes based on the entity and attribute names.
400 | message AttributeFilter {
401 | EntityFilter entity = 1 [json_name = "entity"];
402 |
403 | repeated string attributes = 2 [json_name = "attributes"]; // Names of the attributes to be filtered
404 | }
405 |
406 | // TupleFilter is used to filter tuples based on the entity, relation and the subject.
407 | message TupleFilter {
408 | EntityFilter entity = 1 [json_name = "entity"];
409 |
410 | string relation = 2 [
411 | json_name = "relation",
412 | (validate.rules).string = {
413 | pattern: "^[a-zA-Z_]{1,64}$"
414 | max_bytes: 64
415 | ignore_empty: true
416 | }
417 | ];
418 |
419 | SubjectFilter subject = 3 [json_name = "subject"]; // The subject filter
420 | }
421 |
422 | // EntityFilter is used to filter entities based on the type and ids.
423 | message EntityFilter {
424 | string type = 1 [json_name = "type"]; // Type of the entity
425 |
426 | repeated string ids = 2 [json_name = "ids"]; // List of entity IDs
427 | }
428 |
429 | // SubjectFilter is used to filter subjects based on the type, ids and relation.
430 | message SubjectFilter {
431 | string type = 1 [json_name = "type"]; // Type of the subject
432 |
433 | repeated string ids = 2 [json_name = "ids"]; // List of subject IDs
434 |
435 | string relation = 3 [
436 | json_name = "relation",
437 | (validate.rules).string = {
438 | pattern: "^[a-zA-Z_]{1,64}$"
439 | max_bytes: 64
440 | ignore_empty: true
441 | }
442 | ];
443 | }
444 |
445 | // ExpandTreeNode represents a node in an expansion tree with a specific operation and its children.
446 | message ExpandTreeNode {
447 | // Operation is an enum representing the type of operation to be applied on the tree node.
448 | enum Operation {
449 | OPERATION_UNSPECIFIED = 0;
450 | OPERATION_UNION = 1;
451 | OPERATION_INTERSECTION = 2;
452 | OPERATION_EXCLUSION = 3;
453 | }
454 |
455 | Operation operation = 1; // Operation to be applied on this tree node
456 |
457 | repeated Expand children = 2; // The children of this tree node
458 | }
459 |
460 | // Expand is used to define a hierarchical structure for permissions.
461 | // It has an entity, permission, and arguments. The node can be either another hierarchical structure or a set of subjects.
462 | message Expand {
463 | // entity is the entity for which the hierarchical structure is defined.
464 | Entity entity = 1;
465 |
466 | // permission is the permission applied to the entity.
467 | string permission = 2;
468 |
469 | // arguments are the additional information or context used to evaluate permissions.
470 | repeated Argument arguments = 3;
471 |
472 | // The node can either be an ExpandTreeNode or a set of Subjects.
473 | oneof node {
474 | // expand contains another hierarchical structure.
475 | ExpandTreeNode expand = 4;
476 |
477 | // leaf contains a set of subjects.
478 | ExpandLeaf leaf = 5;
479 | }
480 | }
481 |
482 | // ExpandLeaf is the leaf node of an Expand tree and can be either a set of Subjects or a set of Values.
483 | message ExpandLeaf {
484 | oneof type { // type can be either Subjects or Values.
485 | option (validate.required) = true;
486 |
487 | // subjects are used when the leaf is a set of subjects.
488 | Subjects subjects = 1 [json_name = "subjects"];
489 |
490 | // values are used when the leaf node is a set of values.
491 | Values values = 2 [json_name = "values"];
492 |
493 | // value is used when the leaf node is a single value.
494 | google.protobuf.Any value = 3 [json_name = "value"];
495 | }
496 | }
497 |
498 | message Values {
499 | map values = 1 [json_name = "values"];
500 | }
501 |
502 | // Subjects holds a repeated field of Subject type.
503 | message Subjects {
504 | repeated Subject subjects = 1 [json_name = "subjects"]; // A list of subjects.
505 | }
506 |
507 | // Tenant represents a tenant with an id, a name, and a timestamp indicating when it was created.
508 | message Tenant {
509 | string id = 1 [json_name = "id"]; // The ID of the tenant.
510 | string name = 2 [json_name = "name"]; // The name of the tenant.
511 | google.protobuf.Timestamp created_at = 3 [json_name = "created_at"]; // The time at which the tenant was created.
512 | }
513 |
514 | // DataChanges represent changes in data with a snap token and a list of data change objects.
515 | message DataChanges {
516 | string snap_token = 1 [json_name = "snap_token"]; // The snapshot token.
517 |
518 | repeated DataChange data_changes = 2 [json_name = "data_changes"]; // The list of data changes.
519 | }
520 |
521 | // DataChange represents a single change in data, with an operation type and the actual change which could be a tuple or an attribute.
522 | message DataChange {
523 | enum Operation {
524 | OPERATION_UNSPECIFIED = 0; // Default operation, not specified.
525 | OPERATION_CREATE = 1; // Creation operation.
526 | OPERATION_DELETE = 2; // Deletion operation.
527 | }
528 |
529 | Operation operation = 1 [json_name = "operation"]; // The operation type.
530 |
531 | oneof type { // The type of the change which can be either a tuple or an attribute.
532 | option (validate.required) = true;
533 | Tuple tuple = 2 [json_name = "tuple"]; // If the change is a tuple.
534 | Attribute attribute = 3 [json_name = "attribute"]; // If the change is an attribute.
535 | }
536 | }
537 |
538 | // Wrapper for a single string value.
539 | message StringValue {
540 | string data = 1; // The string value.
541 | }
542 |
543 | // Wrapper for a single integer value.
544 | message IntegerValue {
545 | int32 data = 1; // The integer value.
546 | }
547 |
548 | // Wrapper for a single double precision floating point value.
549 | message DoubleValue {
550 | double data = 1; // The double value.
551 | }
552 |
553 | // Wrapper for a single boolean value.
554 | message BooleanValue {
555 | bool data = 1; // The boolean value.
556 | }
557 |
558 | // Wrapper for an array of strings.
559 | message StringArrayValue {
560 | repeated string data = 1; // The array of strings.
561 | }
562 |
563 | // Wrapper for an array of integers.
564 | message IntegerArrayValue {
565 | repeated int32 data = 1; // The array of integers.
566 | }
567 |
568 | // Wrapper for an array of double precision floating point values.
569 | message DoubleArrayValue {
570 | repeated double data = 1; // The array of doubles.
571 | }
572 |
573 | // Wrapper for an array of booleans.
574 | message BooleanArrayValue {
575 | repeated bool data = 1; // The array of booleans.
576 | }
577 |
578 | // DataBundle is a message representing a bundle of data, which includes a name,
579 | // a list of arguments, and a series of operations.
580 | message DataBundle {
581 | // 'name' is a simple string field representing the name of the DataBundle.
582 | string name = 1 [json_name = "name"];
583 |
584 | // 'arguments' is a repeated field, which means it can contain multiple strings.
585 | // These are used to store a list of arguments related to the DataBundle.
586 | repeated string arguments = 2 [json_name = "arguments"];
587 |
588 | // 'operations' is a repeated field containing multiple Operation messages.
589 | // Each Operation represents a specific action or set of actions to be performed.
590 | repeated Operation operations = 3 [json_name = "operations"];
591 | }
592 |
593 | // Operation is a message representing a series of operations that can be performed.
594 | // It includes fields for writing and deleting relationships and attributes.
595 | message Operation {
596 | // 'relationships_write' is a repeated string field for storing relationship keys
597 | // that are to be written or created.
598 | repeated string relationships_write = 1 [json_name = "relationships_write"];
599 |
600 | // 'relationships_delete' is a repeated string field for storing relationship keys
601 | // that are to be deleted or removed.
602 | repeated string relationships_delete = 2 [json_name = "relationships_delete"];
603 |
604 | // 'attributes_write' is a repeated string field for storing attribute keys
605 | // that are to be written or created.
606 | repeated string attributes_write = 3 [json_name = "attributes_write"];
607 |
608 | // 'attributes_delete' is a repeated string field for storing attribute keys
609 | // that are to be deleted or removed.
610 | repeated string attributes_delete = 4 [json_name = "attributes_delete"];
611 | }
612 |
613 | // Partials contains the write, update and delete definitions
614 | message Partials {
615 | repeated string write = 1 [json_name = "write"];
616 | repeated string delete = 2 [json_name = "delete"];
617 | repeated string update = 3 [json_name = "update"];
618 | }
619 |
--------------------------------------------------------------------------------
/proto/base/v1/errors.proto:
--------------------------------------------------------------------------------
1 | syntax = "proto3";
2 | package base.v1;
3 |
4 | option go_package = "github.com/Permify/permify/pkg/pb/base/v1";
5 |
6 | enum ErrorCode {
7 | ERROR_CODE_UNSPECIFIED = 0;
8 |
9 | // authn
10 | ERROR_CODE_MISSING_BEARER_TOKEN = 1001;
11 | ERROR_CODE_UNAUTHENTICATED = 1002;
12 | ERROR_CODE_MISSING_TENANT_ID = 1003;
13 | ERROR_CODE_INVALID_AUDIENCE = 1004;
14 | ERROR_CODE_INVALID_CLAIMS = 1005;
15 | ERROR_CODE_INVALID_ISSUER = 1006;
16 | ERROR_CODE_INVALID_BEARER_TOKEN = 1007;
17 |
18 | // validation
19 | ERROR_CODE_VALIDATION = 2000;
20 | ERROR_CODE_UNDEFINED_CHILD_TYPE = 2002;
21 | ERROR_CODE_UNDEFINED_CHILD_KIND = 2003;
22 | ERROR_CODE_UNDEFINED_RELATION_REFERENCE = 2006;
23 | ERROR_CODE_NOT_SUPPORTED_RELATION_WALK = 2007;
24 | ERROR_CODE_ENTITY_AND_SUBJECT_CANNOT_BE_EQUAL = 2008;
25 | ERROR_CODE_DEPTH_NOT_ENOUGH = 2009;
26 | ERROR_CODE_RELATION_REFERENCE_NOT_FOUND_IN_ENTITY_REFERENCES = 2010;
27 | ERROR_CODE_RELATION_REFERENCE_MUST_HAVE_ONE_ENTITY_REFERENCE = 2011;
28 | ERROR_CODE_DUPLICATED_ENTITY_REFERENCE = 2012;
29 | ERROR_CODE_DUPLICATED_RELATION_REFERENCE = 2013;
30 | ERROR_CODE_DUPLICATED_PERMISSION_REFERENCE = 2014;
31 | ERROR_CODE_SCHEMA_PARSE = 2015;
32 | ERROR_CODE_SCHEMA_COMPILE = 2016;
33 | ERROR_CODE_SUBJECT_RELATION_MUST_BE_EMPTY = 2017;
34 | ERROR_CODE_SUBJECT_RELATION_CANNOT_BE_EMPTY = 2018;
35 | ERROR_CODE_SCHEMA_MUST_HAVE_USER_ENTITY_DEFINITION = 2019;
36 | ERROR_CODE_UNIQUE_CONSTRAINT = 2020;
37 | ERROR_CODE_INVALID_CONTINUOUS_TOKEN = 2021;
38 | ERROR_CODE_INVALID_KEY = 2022;
39 | ERROR_CODE_ENTITY_TYPE_REQUIRED = 2023;
40 | ERROR_CODE_NO_ENTITY_REFERENCES_FOUND_IN_SCHEMA = 2024;
41 | ERROR_CODE_INVALID_ARGUMENT = 2025;
42 | ERROR_CODE_INVALID_RULE_REFERENCE = 2026;
43 | ERROR_CODE_NOT_SUPPORTED_WALK = 2027;
44 | ERROR_CODE_MISSING_ARGUMENT = 2028;
45 | ERROR_CODE_ALREADY_EXIST = 2029;
46 | ERROR_CODE_MAX_DATA_PER_WRITE_EXCEEDED = 2030;
47 |
48 | // not found
49 | ERROR_CODE_NOT_FOUND = 4000;
50 | ERROR_CODE_ENTITY_TYPE_NOT_FOUND = 4001;
51 | ERROR_CODE_PERMISSION_NOT_FOUND = 4002;
52 | ERROR_CODE_SCHEMA_NOT_FOUND = 4003;
53 | ERROR_CODE_SUBJECT_TYPE_NOT_FOUND = 4004;
54 | ERROR_CODE_ENTITY_DEFINITION_NOT_FOUND = 4005;
55 | ERROR_CODE_PERMISSION_DEFINITION_NOT_FOUND = 4006;
56 | ERROR_CODE_RELATION_DEFINITION_NOT_FOUND = 4007;
57 | ERROR_CODE_RECORD_NOT_FOUND = 4008;
58 | ERROR_CODE_TENANT_NOT_FOUND = 4009;
59 | ERROR_CODE_ATTRIBUTE_DEFINITION_NOT_FOUND = 4010;
60 | ERROR_CODE_ATTRIBUTE_TYPE_MISMATCH = 4011;
61 | ERROR_CODE_BUNDLE_NOT_FOUND = 4012;
62 | ERROR_CODE_RULE_DEFINITION_NOT_FOUND = 4013;
63 | ERROR_CODE_ENTITY_STATEMENT_NOT_FOUND = 4014;
64 | ERROR_CODE_REFERENCE_NOT_FOUND = 4015;
65 |
66 | // internal
67 | ERROR_CODE_INTERNAL = 5000;
68 | ERROR_CODE_CANCELLED = 5001;
69 | ERROR_CODE_SQL_BUILDER = 5002;
70 | ERROR_CODE_CIRCUIT_BREAKER = 5003;
71 | ERROR_CODE_EXECUTION = 5005;
72 | ERROR_CODE_SCAN = 5006;
73 | ERROR_CODE_MIGRATION = 5007;
74 | ERROR_CODE_TYPE_CONVERSATION = 5008;
75 | ERROR_CODE_ERROR_MAX_RETRIES = 5009;
76 | ERROR_CODE_ROLLBACK = 5010;
77 | ERROR_CODE_EXCLUSION_REQUIRES_MORE_THAN_ONE_FUNCTION = 5011;
78 | ERROR_CODE_NOT_IMPLEMENTED = 5012;
79 | ERROR_CODE_DATASTORE = 5013;
80 | ERROR_CODE_UNKNOWN_STATEMENT_TYPE = 5014;
81 | ERROR_CODE_UNKNOWN_REFERENCE_TYPE = 5015;
82 | ERROR_CODE_CANNOT_CONVERT_TO_ENTITY_STATEMENT = 5016;
83 | ERROR_CODE_CANNOT_CONVERT_TO_RELATION_STATEMENT = 5017;
84 | ERROR_CODE_CANNOT_CONVERT_TO_ATTRIBUTE_STATEMENT = 5018;
85 | ERROR_CODE_SERIALIZATION = 5019;
86 | }
87 |
88 | // ErrorResponse
89 | message ErrorResponse {
90 | ErrorCode code = 1;
91 | string message = 2;
92 | }
--------------------------------------------------------------------------------
/proto/base/v1/openapi.proto:
--------------------------------------------------------------------------------
1 | syntax = "proto3";
2 | package base.v1;
3 |
4 | option go_package = "github.com/Permify/permify/pkg/pb/base/v1";
5 |
6 | import "protoc-gen-openapiv2/options/annotations.proto";
7 |
8 | option (grpc.gateway.protoc_gen_openapiv2.options.openapiv2_swagger) = {
9 | info: {
10 | title: "Permify API";
11 | description: "Permify is an open source authorization service for creating fine-grained and scalable authorization systems.";
12 | version: "v1.3.9";
13 | contact: {
14 | name: "API Support";
15 | url: "https://github.com/Permify/permify/issues";
16 | email: "hello@permify.co";
17 | };
18 | license: {
19 | name: "Apache-2.0 license";
20 | url: "https://github.com/Permify/permify/blob/master/LICENSE";
21 | }
22 | };
23 | consumes: "application/json";
24 | produces: "application/json";
25 | schemes: HTTPS;
26 | security_definitions: {
27 | security: {
28 | key: "ApiKeyAuth";
29 | value: {
30 | type: TYPE_API_KEY;
31 | in: IN_HEADER;
32 | name: "Authorization";
33 | }
34 | }
35 | }
36 | };
--------------------------------------------------------------------------------
/proto/buf.lock:
--------------------------------------------------------------------------------
1 | # Generated by buf. DO NOT EDIT.
2 | version: v1
3 | deps:
4 | - remote: buf.build
5 | owner: envoyproxy
6 | repository: protoc-gen-validate
7 | commit: 6607b10f00ed4a3d98f906807131c44a
8 | digest: shake256:acc7b2ededb2f88d296862943a003b157bdb68ec93ed13dcd8566b2d06e47993ea6daf12013b9655658aaf6bbdb141cf65bfe400ce2870f4654b0a5b45e57c09
9 | - remote: buf.build
10 | owner: googleapis
11 | repository: googleapis
12 | commit: 75b4300737fb4efca0831636be94e517
13 | digest: shake256:d865f55b8ceb838c90c28b09894ab43d07f42551108c23760004a6a4e28fe24d3a1f7380a3c9278edb329a338a9cc5db8ad9f394de548e70d534e98504972d67
14 | - remote: buf.build
15 | owner: grpc-ecosystem
16 | repository: grpc-gateway
17 | commit: a1ecdc58eccd49aa8bea2a7a9022dc27
18 | digest: shake256:efdd86fbdc42e8b7259fe461a49656827a03fb7cba0b3b9eb622ca10654ec6beccb9a051229c1553ccd89ed3e95d69ad4d7c799f1da3f3f1bd447b7947a4893e
19 |
--------------------------------------------------------------------------------
/proto/buf.yaml:
--------------------------------------------------------------------------------
1 | version: v1
2 | name: buf.build/permifyco/permify
3 | deps:
4 | - buf.build/envoyproxy/protoc-gen-validate:6607b10f00ed4a3d98f906807131c44a
5 | - buf.build/grpc-ecosystem/grpc-gateway:a1ecdc58eccd49aa8bea2a7a9022dc27
6 | - buf.build/googleapis/googleapis:75b4300737fb4efca0831636be94e517
7 | breaking:
8 | ignore_unstable_packages: true
9 | lint:
10 | allow_comment_ignores: true
11 | except:
12 | - PACKAGE_VERSION_SUFFIX
13 | - FIELD_LOWER_SNAKE_CASE
14 | - SERVICE_SUFFIX
--------------------------------------------------------------------------------
/samples/create_tenant.ts:
--------------------------------------------------------------------------------
1 | import * as permify from "@permify/permify-node";
2 |
3 |
4 | const request = new permify.grpc.payload.TenantCreateRequest();
5 | request.setId("t1");
6 | request.setName("Tenant 1");
7 |
8 | const client = permify.grpc.newClient({
9 | endpoint: "localhost:3478",
10 | cert: undefined
11 | });
12 |
13 | client.tenancy.create(request).then((response) => {
14 | console.log(response);
15 | // handle response
16 | })
--------------------------------------------------------------------------------
/scripts/run-instance.sh:
--------------------------------------------------------------------------------
1 | #! /bin/sh
2 |
3 | set -e
4 |
5 | exit_code=0
6 | docker compose up -d --wait
7 | yarn run || exit_code=$?
8 | docker compose down
9 | exit $exit_code
10 |
--------------------------------------------------------------------------------
/src/grpc-clients.test.ts:
--------------------------------------------------------------------------------
1 | import * as permify from ".";
2 | import {Any} from "./grpc/generated/google/protobuf/any";
3 |
4 | describe("clients test", () => {
5 |
6 | it("permission client check", (done) => {
7 | // Initialize the Permify gRPC client
8 | let client = permify.grpc.newClient({
9 | endpoint: "localhost:3478",
10 | cert: undefined,
11 | pk: undefined,
12 | certChain: undefined,
13 | insecure: true
14 | });
15 |
16 | // Define the schema
17 | let schema = `
18 | entity user {}
19 |
20 | entity document {
21 | relation viewer @user
22 |
23 | action view = viewer
24 | }
25 | `;
26 |
27 | // Write the schema
28 | client.schema.write({
29 | tenantId: "t1",
30 | schema: schema
31 | }).then((response1_1: permify.grpc.payload.SchemaWriteResponse) => {
32 | // Perform the permission check
33 | client.permission.check({
34 | tenantId: "t1",
35 | metadata: {
36 | snapToken: "",
37 | schemaVersion: response1_1.schemaVersion,
38 | depth: 20
39 | },
40 | entity: {
41 | type: "document",
42 | id: "1"
43 | },
44 | permission: "view",
45 | subject: {
46 | type: "user",
47 | id: "3"
48 | }
49 | }).then((response1_2: permify.grpc.payload.PermissionCheckResponse) => {
50 | // Verify the response
51 | expect(response1_2.can).toBe(permify.grpc.base.CheckResult.CHECK_RESULT_DENIED);
52 | done();
53 | });
54 | });
55 | });
56 |
57 | it("permission client lookup entity", (done) => {
58 | // Initialize the Permify gRPC client
59 | let client = permify.grpc.newClient({
60 | endpoint: "localhost:3478",
61 | cert: undefined,
62 | pk: undefined,
63 | certChain: undefined,
64 | insecure: true
65 | });
66 |
67 | // Create a BooleanValue message
68 | const booleanValue = permify.grpc.base.BooleanValue.fromJSON({ data: true });
69 |
70 | // Create an Any message to wrap the BooleanValue
71 | const anyMessage = Any.fromJSON({
72 | typeUrl: 'type.googleapis.com/base.v1.BooleanValue',
73 | value: permify.grpc.base.BooleanValue.encode(booleanValue).finish()
74 | });
75 |
76 | // Define the schema
77 | let schema = `
78 | entity user {}
79 |
80 | entity document {
81 | relation viewer @user
82 |
83 | attribute public boolean
84 |
85 | action view = viewer
86 | }
87 | `;
88 |
89 | // Write the schema
90 | client.schema.write({
91 | tenantId: "t1",
92 | schema: schema
93 | }).then((response2_1: permify.grpc.payload.SchemaWriteResponse) => {
94 |
95 | // Write the data
96 | client.data.write({
97 | tenantId: "t1",
98 | metadata: {
99 | schemaVersion: response2_1.schemaVersion
100 | },
101 | attributes: [{
102 | entity: {
103 | type: "document",
104 | id: "1"
105 | },
106 | attribute: "public",
107 | value: anyMessage,
108 | }],
109 | tuples: [{
110 | entity: {
111 | type: "document",
112 | id: "1"
113 | },
114 | relation: "viewer",
115 | subject: {
116 | type: "user",
117 | id: "1"
118 | }
119 | }, {
120 | entity: {
121 | type: "document",
122 | id: "3"
123 | },
124 | relation: "viewer",
125 | subject: {
126 | type: "user",
127 | id: "1"
128 | }
129 | }, {
130 | entity: {
131 | type: "document",
132 | id: "4"
133 | },
134 | relation: "viewer",
135 | subject: {
136 | type: "user",
137 | id: "1"
138 | }
139 | }]
140 | }).then((response2_2: permify.grpc.payload.DataWriteResponse) => {
141 | // Perform Lookup Entity Stream
142 | const response2_3 = client.permission.lookupEntityStream({
143 | tenantId: "t1",
144 | metadata: {
145 | snapToken: response2_2.snapToken,
146 | schemaVersion: response2_1.schemaVersion,
147 | depth: 20
148 | },
149 | entityType: "document",
150 | permission: "view",
151 | subject: {
152 | type: "user",
153 | id: "1"
154 | }
155 | })
156 |
157 | // Handle the stream response
158 | handle(response2_3, ["1", "3", "4"]);
159 |
160 | // Wait for the stream to complete
161 | setTimeout(() => {
162 | done();
163 | }, 1000);
164 | });
165 | });
166 | });
167 | });
168 |
169 | // Helper function to handle the stream response
170 | async function handle(res: AsyncIterable, expected: string[]) {
171 | for await (const response of res) {
172 | expect(expected.includes(response.entityId)).toBe(true);
173 | }
174 | }
175 |
--------------------------------------------------------------------------------
/src/grpc/clients.ts:
--------------------------------------------------------------------------------
1 | import {ClientMiddleware, createChannel, createClientFactory, ChannelCredentials} from 'nice-grpc';
2 |
3 | import {
4 | PermissionDefinition,
5 | SchemaDefinition,
6 | DataDefinition,
7 | TenancyDefinition,
8 | WatchDefinition,
9 | BundleDefinition
10 | } from './generated/base/v1/service';
11 |
12 | import {Config} from "./config";
13 |
14 | /**
15 | * Create a new gRPC service client for of Permify.
16 | * The client can be configured with multiple client interceptors. For authentication interceptors,
17 | * see the interceptors in this package.
18 | *
19 | * @param conf A configuration object for bootstrap connection
20 | * @param interceptors A list of interceptors that should be used for the client.
21 | *
22 | * @returns A new gRPC service client for the Permission API of Permify.
23 | */
24 | export function newClient(conf: Config, ...interceptors: ClientMiddleware[]) {
25 | const channel = (conf.insecure)
26 | ? createChannel(conf.endpoint, ChannelCredentials.createInsecure())
27 | : createChannel(conf.endpoint, ChannelCredentials.createSsl(conf.cert, conf.pk, conf.certChain));
28 |
29 | let factory = createClientFactory();
30 | for (const interceptor of interceptors) {
31 | factory = factory.use(interceptor);
32 | }
33 | return {
34 | permission: factory.create(PermissionDefinition, channel),
35 | schema: factory.create(SchemaDefinition, channel),
36 | data: factory.create(DataDefinition, channel),
37 | bundle: factory.create(BundleDefinition, channel),
38 | tenancy: factory.create(TenancyDefinition, channel),
39 | watch: factory.create(WatchDefinition, channel)
40 | };
41 | }
--------------------------------------------------------------------------------
/src/grpc/config.ts:
--------------------------------------------------------------------------------
1 | /** Config */
2 | export interface Config {
3 | endpoint: string;
4 | cert: Buffer | null;
5 | pk: Buffer | null;
6 | certChain: Buffer | null;
7 | insecure: boolean | null;
8 | }
9 |
10 |
--------------------------------------------------------------------------------
/src/grpc/generated/base/v1/errors.ts:
--------------------------------------------------------------------------------
1 | // Code generated by protoc-gen-ts_proto. DO NOT EDIT.
2 | // versions:
3 | // protoc-gen-ts_proto v2.7.2
4 | // protoc unknown
5 | // source: base/v1/errors.proto
6 |
7 | /* eslint-disable */
8 | import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire";
9 | import Long from "long";
10 |
11 | export const protobufPackage = "base.v1";
12 |
13 | export enum ErrorCode {
14 | ERROR_CODE_UNSPECIFIED = 0,
15 | /** ERROR_CODE_MISSING_BEARER_TOKEN - authn */
16 | ERROR_CODE_MISSING_BEARER_TOKEN = 1001,
17 | ERROR_CODE_UNAUTHENTICATED = 1002,
18 | ERROR_CODE_MISSING_TENANT_ID = 1003,
19 | ERROR_CODE_INVALID_AUDIENCE = 1004,
20 | ERROR_CODE_INVALID_CLAIMS = 1005,
21 | ERROR_CODE_INVALID_ISSUER = 1006,
22 | ERROR_CODE_INVALID_BEARER_TOKEN = 1007,
23 | /** ERROR_CODE_VALIDATION - validation */
24 | ERROR_CODE_VALIDATION = 2000,
25 | ERROR_CODE_UNDEFINED_CHILD_TYPE = 2002,
26 | ERROR_CODE_UNDEFINED_CHILD_KIND = 2003,
27 | ERROR_CODE_UNDEFINED_RELATION_REFERENCE = 2006,
28 | ERROR_CODE_NOT_SUPPORTED_RELATION_WALK = 2007,
29 | ERROR_CODE_ENTITY_AND_SUBJECT_CANNOT_BE_EQUAL = 2008,
30 | ERROR_CODE_DEPTH_NOT_ENOUGH = 2009,
31 | ERROR_CODE_RELATION_REFERENCE_NOT_FOUND_IN_ENTITY_REFERENCES = 2010,
32 | ERROR_CODE_RELATION_REFERENCE_MUST_HAVE_ONE_ENTITY_REFERENCE = 2011,
33 | ERROR_CODE_DUPLICATED_ENTITY_REFERENCE = 2012,
34 | ERROR_CODE_DUPLICATED_RELATION_REFERENCE = 2013,
35 | ERROR_CODE_DUPLICATED_PERMISSION_REFERENCE = 2014,
36 | ERROR_CODE_SCHEMA_PARSE = 2015,
37 | ERROR_CODE_SCHEMA_COMPILE = 2016,
38 | ERROR_CODE_SUBJECT_RELATION_MUST_BE_EMPTY = 2017,
39 | ERROR_CODE_SUBJECT_RELATION_CANNOT_BE_EMPTY = 2018,
40 | ERROR_CODE_SCHEMA_MUST_HAVE_USER_ENTITY_DEFINITION = 2019,
41 | ERROR_CODE_UNIQUE_CONSTRAINT = 2020,
42 | ERROR_CODE_INVALID_CONTINUOUS_TOKEN = 2021,
43 | ERROR_CODE_INVALID_KEY = 2022,
44 | ERROR_CODE_ENTITY_TYPE_REQUIRED = 2023,
45 | ERROR_CODE_NO_ENTITY_REFERENCES_FOUND_IN_SCHEMA = 2024,
46 | ERROR_CODE_INVALID_ARGUMENT = 2025,
47 | ERROR_CODE_INVALID_RULE_REFERENCE = 2026,
48 | ERROR_CODE_NOT_SUPPORTED_WALK = 2027,
49 | ERROR_CODE_MISSING_ARGUMENT = 2028,
50 | ERROR_CODE_ALREADY_EXIST = 2029,
51 | ERROR_CODE_MAX_DATA_PER_WRITE_EXCEEDED = 2030,
52 | /** ERROR_CODE_NOT_FOUND - not found */
53 | ERROR_CODE_NOT_FOUND = 4000,
54 | ERROR_CODE_ENTITY_TYPE_NOT_FOUND = 4001,
55 | ERROR_CODE_PERMISSION_NOT_FOUND = 4002,
56 | ERROR_CODE_SCHEMA_NOT_FOUND = 4003,
57 | ERROR_CODE_SUBJECT_TYPE_NOT_FOUND = 4004,
58 | ERROR_CODE_ENTITY_DEFINITION_NOT_FOUND = 4005,
59 | ERROR_CODE_PERMISSION_DEFINITION_NOT_FOUND = 4006,
60 | ERROR_CODE_RELATION_DEFINITION_NOT_FOUND = 4007,
61 | ERROR_CODE_RECORD_NOT_FOUND = 4008,
62 | ERROR_CODE_TENANT_NOT_FOUND = 4009,
63 | ERROR_CODE_ATTRIBUTE_DEFINITION_NOT_FOUND = 4010,
64 | ERROR_CODE_ATTRIBUTE_TYPE_MISMATCH = 4011,
65 | ERROR_CODE_BUNDLE_NOT_FOUND = 4012,
66 | ERROR_CODE_RULE_DEFINITION_NOT_FOUND = 4013,
67 | ERROR_CODE_ENTITY_STATEMENT_NOT_FOUND = 4014,
68 | ERROR_CODE_REFERENCE_NOT_FOUND = 4015,
69 | /** ERROR_CODE_INTERNAL - internal */
70 | ERROR_CODE_INTERNAL = 5000,
71 | ERROR_CODE_CANCELLED = 5001,
72 | ERROR_CODE_SQL_BUILDER = 5002,
73 | ERROR_CODE_CIRCUIT_BREAKER = 5003,
74 | ERROR_CODE_EXECUTION = 5005,
75 | ERROR_CODE_SCAN = 5006,
76 | ERROR_CODE_MIGRATION = 5007,
77 | ERROR_CODE_TYPE_CONVERSATION = 5008,
78 | ERROR_CODE_ERROR_MAX_RETRIES = 5009,
79 | ERROR_CODE_ROLLBACK = 5010,
80 | ERROR_CODE_EXCLUSION_REQUIRES_MORE_THAN_ONE_FUNCTION = 5011,
81 | ERROR_CODE_NOT_IMPLEMENTED = 5012,
82 | ERROR_CODE_DATASTORE = 5013,
83 | ERROR_CODE_UNKNOWN_STATEMENT_TYPE = 5014,
84 | ERROR_CODE_UNKNOWN_REFERENCE_TYPE = 5015,
85 | ERROR_CODE_CANNOT_CONVERT_TO_ENTITY_STATEMENT = 5016,
86 | ERROR_CODE_CANNOT_CONVERT_TO_RELATION_STATEMENT = 5017,
87 | ERROR_CODE_CANNOT_CONVERT_TO_ATTRIBUTE_STATEMENT = 5018,
88 | ERROR_CODE_SERIALIZATION = 5019,
89 | UNRECOGNIZED = -1,
90 | }
91 |
92 | export function errorCodeFromJSON(object: any): ErrorCode {
93 | switch (object) {
94 | case 0:
95 | case "ERROR_CODE_UNSPECIFIED":
96 | return ErrorCode.ERROR_CODE_UNSPECIFIED;
97 | case 1001:
98 | case "ERROR_CODE_MISSING_BEARER_TOKEN":
99 | return ErrorCode.ERROR_CODE_MISSING_BEARER_TOKEN;
100 | case 1002:
101 | case "ERROR_CODE_UNAUTHENTICATED":
102 | return ErrorCode.ERROR_CODE_UNAUTHENTICATED;
103 | case 1003:
104 | case "ERROR_CODE_MISSING_TENANT_ID":
105 | return ErrorCode.ERROR_CODE_MISSING_TENANT_ID;
106 | case 1004:
107 | case "ERROR_CODE_INVALID_AUDIENCE":
108 | return ErrorCode.ERROR_CODE_INVALID_AUDIENCE;
109 | case 1005:
110 | case "ERROR_CODE_INVALID_CLAIMS":
111 | return ErrorCode.ERROR_CODE_INVALID_CLAIMS;
112 | case 1006:
113 | case "ERROR_CODE_INVALID_ISSUER":
114 | return ErrorCode.ERROR_CODE_INVALID_ISSUER;
115 | case 1007:
116 | case "ERROR_CODE_INVALID_BEARER_TOKEN":
117 | return ErrorCode.ERROR_CODE_INVALID_BEARER_TOKEN;
118 | case 2000:
119 | case "ERROR_CODE_VALIDATION":
120 | return ErrorCode.ERROR_CODE_VALIDATION;
121 | case 2002:
122 | case "ERROR_CODE_UNDEFINED_CHILD_TYPE":
123 | return ErrorCode.ERROR_CODE_UNDEFINED_CHILD_TYPE;
124 | case 2003:
125 | case "ERROR_CODE_UNDEFINED_CHILD_KIND":
126 | return ErrorCode.ERROR_CODE_UNDEFINED_CHILD_KIND;
127 | case 2006:
128 | case "ERROR_CODE_UNDEFINED_RELATION_REFERENCE":
129 | return ErrorCode.ERROR_CODE_UNDEFINED_RELATION_REFERENCE;
130 | case 2007:
131 | case "ERROR_CODE_NOT_SUPPORTED_RELATION_WALK":
132 | return ErrorCode.ERROR_CODE_NOT_SUPPORTED_RELATION_WALK;
133 | case 2008:
134 | case "ERROR_CODE_ENTITY_AND_SUBJECT_CANNOT_BE_EQUAL":
135 | return ErrorCode.ERROR_CODE_ENTITY_AND_SUBJECT_CANNOT_BE_EQUAL;
136 | case 2009:
137 | case "ERROR_CODE_DEPTH_NOT_ENOUGH":
138 | return ErrorCode.ERROR_CODE_DEPTH_NOT_ENOUGH;
139 | case 2010:
140 | case "ERROR_CODE_RELATION_REFERENCE_NOT_FOUND_IN_ENTITY_REFERENCES":
141 | return ErrorCode.ERROR_CODE_RELATION_REFERENCE_NOT_FOUND_IN_ENTITY_REFERENCES;
142 | case 2011:
143 | case "ERROR_CODE_RELATION_REFERENCE_MUST_HAVE_ONE_ENTITY_REFERENCE":
144 | return ErrorCode.ERROR_CODE_RELATION_REFERENCE_MUST_HAVE_ONE_ENTITY_REFERENCE;
145 | case 2012:
146 | case "ERROR_CODE_DUPLICATED_ENTITY_REFERENCE":
147 | return ErrorCode.ERROR_CODE_DUPLICATED_ENTITY_REFERENCE;
148 | case 2013:
149 | case "ERROR_CODE_DUPLICATED_RELATION_REFERENCE":
150 | return ErrorCode.ERROR_CODE_DUPLICATED_RELATION_REFERENCE;
151 | case 2014:
152 | case "ERROR_CODE_DUPLICATED_PERMISSION_REFERENCE":
153 | return ErrorCode.ERROR_CODE_DUPLICATED_PERMISSION_REFERENCE;
154 | case 2015:
155 | case "ERROR_CODE_SCHEMA_PARSE":
156 | return ErrorCode.ERROR_CODE_SCHEMA_PARSE;
157 | case 2016:
158 | case "ERROR_CODE_SCHEMA_COMPILE":
159 | return ErrorCode.ERROR_CODE_SCHEMA_COMPILE;
160 | case 2017:
161 | case "ERROR_CODE_SUBJECT_RELATION_MUST_BE_EMPTY":
162 | return ErrorCode.ERROR_CODE_SUBJECT_RELATION_MUST_BE_EMPTY;
163 | case 2018:
164 | case "ERROR_CODE_SUBJECT_RELATION_CANNOT_BE_EMPTY":
165 | return ErrorCode.ERROR_CODE_SUBJECT_RELATION_CANNOT_BE_EMPTY;
166 | case 2019:
167 | case "ERROR_CODE_SCHEMA_MUST_HAVE_USER_ENTITY_DEFINITION":
168 | return ErrorCode.ERROR_CODE_SCHEMA_MUST_HAVE_USER_ENTITY_DEFINITION;
169 | case 2020:
170 | case "ERROR_CODE_UNIQUE_CONSTRAINT":
171 | return ErrorCode.ERROR_CODE_UNIQUE_CONSTRAINT;
172 | case 2021:
173 | case "ERROR_CODE_INVALID_CONTINUOUS_TOKEN":
174 | return ErrorCode.ERROR_CODE_INVALID_CONTINUOUS_TOKEN;
175 | case 2022:
176 | case "ERROR_CODE_INVALID_KEY":
177 | return ErrorCode.ERROR_CODE_INVALID_KEY;
178 | case 2023:
179 | case "ERROR_CODE_ENTITY_TYPE_REQUIRED":
180 | return ErrorCode.ERROR_CODE_ENTITY_TYPE_REQUIRED;
181 | case 2024:
182 | case "ERROR_CODE_NO_ENTITY_REFERENCES_FOUND_IN_SCHEMA":
183 | return ErrorCode.ERROR_CODE_NO_ENTITY_REFERENCES_FOUND_IN_SCHEMA;
184 | case 2025:
185 | case "ERROR_CODE_INVALID_ARGUMENT":
186 | return ErrorCode.ERROR_CODE_INVALID_ARGUMENT;
187 | case 2026:
188 | case "ERROR_CODE_INVALID_RULE_REFERENCE":
189 | return ErrorCode.ERROR_CODE_INVALID_RULE_REFERENCE;
190 | case 2027:
191 | case "ERROR_CODE_NOT_SUPPORTED_WALK":
192 | return ErrorCode.ERROR_CODE_NOT_SUPPORTED_WALK;
193 | case 2028:
194 | case "ERROR_CODE_MISSING_ARGUMENT":
195 | return ErrorCode.ERROR_CODE_MISSING_ARGUMENT;
196 | case 2029:
197 | case "ERROR_CODE_ALREADY_EXIST":
198 | return ErrorCode.ERROR_CODE_ALREADY_EXIST;
199 | case 2030:
200 | case "ERROR_CODE_MAX_DATA_PER_WRITE_EXCEEDED":
201 | return ErrorCode.ERROR_CODE_MAX_DATA_PER_WRITE_EXCEEDED;
202 | case 4000:
203 | case "ERROR_CODE_NOT_FOUND":
204 | return ErrorCode.ERROR_CODE_NOT_FOUND;
205 | case 4001:
206 | case "ERROR_CODE_ENTITY_TYPE_NOT_FOUND":
207 | return ErrorCode.ERROR_CODE_ENTITY_TYPE_NOT_FOUND;
208 | case 4002:
209 | case "ERROR_CODE_PERMISSION_NOT_FOUND":
210 | return ErrorCode.ERROR_CODE_PERMISSION_NOT_FOUND;
211 | case 4003:
212 | case "ERROR_CODE_SCHEMA_NOT_FOUND":
213 | return ErrorCode.ERROR_CODE_SCHEMA_NOT_FOUND;
214 | case 4004:
215 | case "ERROR_CODE_SUBJECT_TYPE_NOT_FOUND":
216 | return ErrorCode.ERROR_CODE_SUBJECT_TYPE_NOT_FOUND;
217 | case 4005:
218 | case "ERROR_CODE_ENTITY_DEFINITION_NOT_FOUND":
219 | return ErrorCode.ERROR_CODE_ENTITY_DEFINITION_NOT_FOUND;
220 | case 4006:
221 | case "ERROR_CODE_PERMISSION_DEFINITION_NOT_FOUND":
222 | return ErrorCode.ERROR_CODE_PERMISSION_DEFINITION_NOT_FOUND;
223 | case 4007:
224 | case "ERROR_CODE_RELATION_DEFINITION_NOT_FOUND":
225 | return ErrorCode.ERROR_CODE_RELATION_DEFINITION_NOT_FOUND;
226 | case 4008:
227 | case "ERROR_CODE_RECORD_NOT_FOUND":
228 | return ErrorCode.ERROR_CODE_RECORD_NOT_FOUND;
229 | case 4009:
230 | case "ERROR_CODE_TENANT_NOT_FOUND":
231 | return ErrorCode.ERROR_CODE_TENANT_NOT_FOUND;
232 | case 4010:
233 | case "ERROR_CODE_ATTRIBUTE_DEFINITION_NOT_FOUND":
234 | return ErrorCode.ERROR_CODE_ATTRIBUTE_DEFINITION_NOT_FOUND;
235 | case 4011:
236 | case "ERROR_CODE_ATTRIBUTE_TYPE_MISMATCH":
237 | return ErrorCode.ERROR_CODE_ATTRIBUTE_TYPE_MISMATCH;
238 | case 4012:
239 | case "ERROR_CODE_BUNDLE_NOT_FOUND":
240 | return ErrorCode.ERROR_CODE_BUNDLE_NOT_FOUND;
241 | case 4013:
242 | case "ERROR_CODE_RULE_DEFINITION_NOT_FOUND":
243 | return ErrorCode.ERROR_CODE_RULE_DEFINITION_NOT_FOUND;
244 | case 4014:
245 | case "ERROR_CODE_ENTITY_STATEMENT_NOT_FOUND":
246 | return ErrorCode.ERROR_CODE_ENTITY_STATEMENT_NOT_FOUND;
247 | case 4015:
248 | case "ERROR_CODE_REFERENCE_NOT_FOUND":
249 | return ErrorCode.ERROR_CODE_REFERENCE_NOT_FOUND;
250 | case 5000:
251 | case "ERROR_CODE_INTERNAL":
252 | return ErrorCode.ERROR_CODE_INTERNAL;
253 | case 5001:
254 | case "ERROR_CODE_CANCELLED":
255 | return ErrorCode.ERROR_CODE_CANCELLED;
256 | case 5002:
257 | case "ERROR_CODE_SQL_BUILDER":
258 | return ErrorCode.ERROR_CODE_SQL_BUILDER;
259 | case 5003:
260 | case "ERROR_CODE_CIRCUIT_BREAKER":
261 | return ErrorCode.ERROR_CODE_CIRCUIT_BREAKER;
262 | case 5005:
263 | case "ERROR_CODE_EXECUTION":
264 | return ErrorCode.ERROR_CODE_EXECUTION;
265 | case 5006:
266 | case "ERROR_CODE_SCAN":
267 | return ErrorCode.ERROR_CODE_SCAN;
268 | case 5007:
269 | case "ERROR_CODE_MIGRATION":
270 | return ErrorCode.ERROR_CODE_MIGRATION;
271 | case 5008:
272 | case "ERROR_CODE_TYPE_CONVERSATION":
273 | return ErrorCode.ERROR_CODE_TYPE_CONVERSATION;
274 | case 5009:
275 | case "ERROR_CODE_ERROR_MAX_RETRIES":
276 | return ErrorCode.ERROR_CODE_ERROR_MAX_RETRIES;
277 | case 5010:
278 | case "ERROR_CODE_ROLLBACK":
279 | return ErrorCode.ERROR_CODE_ROLLBACK;
280 | case 5011:
281 | case "ERROR_CODE_EXCLUSION_REQUIRES_MORE_THAN_ONE_FUNCTION":
282 | return ErrorCode.ERROR_CODE_EXCLUSION_REQUIRES_MORE_THAN_ONE_FUNCTION;
283 | case 5012:
284 | case "ERROR_CODE_NOT_IMPLEMENTED":
285 | return ErrorCode.ERROR_CODE_NOT_IMPLEMENTED;
286 | case 5013:
287 | case "ERROR_CODE_DATASTORE":
288 | return ErrorCode.ERROR_CODE_DATASTORE;
289 | case 5014:
290 | case "ERROR_CODE_UNKNOWN_STATEMENT_TYPE":
291 | return ErrorCode.ERROR_CODE_UNKNOWN_STATEMENT_TYPE;
292 | case 5015:
293 | case "ERROR_CODE_UNKNOWN_REFERENCE_TYPE":
294 | return ErrorCode.ERROR_CODE_UNKNOWN_REFERENCE_TYPE;
295 | case 5016:
296 | case "ERROR_CODE_CANNOT_CONVERT_TO_ENTITY_STATEMENT":
297 | return ErrorCode.ERROR_CODE_CANNOT_CONVERT_TO_ENTITY_STATEMENT;
298 | case 5017:
299 | case "ERROR_CODE_CANNOT_CONVERT_TO_RELATION_STATEMENT":
300 | return ErrorCode.ERROR_CODE_CANNOT_CONVERT_TO_RELATION_STATEMENT;
301 | case 5018:
302 | case "ERROR_CODE_CANNOT_CONVERT_TO_ATTRIBUTE_STATEMENT":
303 | return ErrorCode.ERROR_CODE_CANNOT_CONVERT_TO_ATTRIBUTE_STATEMENT;
304 | case 5019:
305 | case "ERROR_CODE_SERIALIZATION":
306 | return ErrorCode.ERROR_CODE_SERIALIZATION;
307 | case -1:
308 | case "UNRECOGNIZED":
309 | default:
310 | return ErrorCode.UNRECOGNIZED;
311 | }
312 | }
313 |
314 | export function errorCodeToJSON(object: ErrorCode): string {
315 | switch (object) {
316 | case ErrorCode.ERROR_CODE_UNSPECIFIED:
317 | return "ERROR_CODE_UNSPECIFIED";
318 | case ErrorCode.ERROR_CODE_MISSING_BEARER_TOKEN:
319 | return "ERROR_CODE_MISSING_BEARER_TOKEN";
320 | case ErrorCode.ERROR_CODE_UNAUTHENTICATED:
321 | return "ERROR_CODE_UNAUTHENTICATED";
322 | case ErrorCode.ERROR_CODE_MISSING_TENANT_ID:
323 | return "ERROR_CODE_MISSING_TENANT_ID";
324 | case ErrorCode.ERROR_CODE_INVALID_AUDIENCE:
325 | return "ERROR_CODE_INVALID_AUDIENCE";
326 | case ErrorCode.ERROR_CODE_INVALID_CLAIMS:
327 | return "ERROR_CODE_INVALID_CLAIMS";
328 | case ErrorCode.ERROR_CODE_INVALID_ISSUER:
329 | return "ERROR_CODE_INVALID_ISSUER";
330 | case ErrorCode.ERROR_CODE_INVALID_BEARER_TOKEN:
331 | return "ERROR_CODE_INVALID_BEARER_TOKEN";
332 | case ErrorCode.ERROR_CODE_VALIDATION:
333 | return "ERROR_CODE_VALIDATION";
334 | case ErrorCode.ERROR_CODE_UNDEFINED_CHILD_TYPE:
335 | return "ERROR_CODE_UNDEFINED_CHILD_TYPE";
336 | case ErrorCode.ERROR_CODE_UNDEFINED_CHILD_KIND:
337 | return "ERROR_CODE_UNDEFINED_CHILD_KIND";
338 | case ErrorCode.ERROR_CODE_UNDEFINED_RELATION_REFERENCE:
339 | return "ERROR_CODE_UNDEFINED_RELATION_REFERENCE";
340 | case ErrorCode.ERROR_CODE_NOT_SUPPORTED_RELATION_WALK:
341 | return "ERROR_CODE_NOT_SUPPORTED_RELATION_WALK";
342 | case ErrorCode.ERROR_CODE_ENTITY_AND_SUBJECT_CANNOT_BE_EQUAL:
343 | return "ERROR_CODE_ENTITY_AND_SUBJECT_CANNOT_BE_EQUAL";
344 | case ErrorCode.ERROR_CODE_DEPTH_NOT_ENOUGH:
345 | return "ERROR_CODE_DEPTH_NOT_ENOUGH";
346 | case ErrorCode.ERROR_CODE_RELATION_REFERENCE_NOT_FOUND_IN_ENTITY_REFERENCES:
347 | return "ERROR_CODE_RELATION_REFERENCE_NOT_FOUND_IN_ENTITY_REFERENCES";
348 | case ErrorCode.ERROR_CODE_RELATION_REFERENCE_MUST_HAVE_ONE_ENTITY_REFERENCE:
349 | return "ERROR_CODE_RELATION_REFERENCE_MUST_HAVE_ONE_ENTITY_REFERENCE";
350 | case ErrorCode.ERROR_CODE_DUPLICATED_ENTITY_REFERENCE:
351 | return "ERROR_CODE_DUPLICATED_ENTITY_REFERENCE";
352 | case ErrorCode.ERROR_CODE_DUPLICATED_RELATION_REFERENCE:
353 | return "ERROR_CODE_DUPLICATED_RELATION_REFERENCE";
354 | case ErrorCode.ERROR_CODE_DUPLICATED_PERMISSION_REFERENCE:
355 | return "ERROR_CODE_DUPLICATED_PERMISSION_REFERENCE";
356 | case ErrorCode.ERROR_CODE_SCHEMA_PARSE:
357 | return "ERROR_CODE_SCHEMA_PARSE";
358 | case ErrorCode.ERROR_CODE_SCHEMA_COMPILE:
359 | return "ERROR_CODE_SCHEMA_COMPILE";
360 | case ErrorCode.ERROR_CODE_SUBJECT_RELATION_MUST_BE_EMPTY:
361 | return "ERROR_CODE_SUBJECT_RELATION_MUST_BE_EMPTY";
362 | case ErrorCode.ERROR_CODE_SUBJECT_RELATION_CANNOT_BE_EMPTY:
363 | return "ERROR_CODE_SUBJECT_RELATION_CANNOT_BE_EMPTY";
364 | case ErrorCode.ERROR_CODE_SCHEMA_MUST_HAVE_USER_ENTITY_DEFINITION:
365 | return "ERROR_CODE_SCHEMA_MUST_HAVE_USER_ENTITY_DEFINITION";
366 | case ErrorCode.ERROR_CODE_UNIQUE_CONSTRAINT:
367 | return "ERROR_CODE_UNIQUE_CONSTRAINT";
368 | case ErrorCode.ERROR_CODE_INVALID_CONTINUOUS_TOKEN:
369 | return "ERROR_CODE_INVALID_CONTINUOUS_TOKEN";
370 | case ErrorCode.ERROR_CODE_INVALID_KEY:
371 | return "ERROR_CODE_INVALID_KEY";
372 | case ErrorCode.ERROR_CODE_ENTITY_TYPE_REQUIRED:
373 | return "ERROR_CODE_ENTITY_TYPE_REQUIRED";
374 | case ErrorCode.ERROR_CODE_NO_ENTITY_REFERENCES_FOUND_IN_SCHEMA:
375 | return "ERROR_CODE_NO_ENTITY_REFERENCES_FOUND_IN_SCHEMA";
376 | case ErrorCode.ERROR_CODE_INVALID_ARGUMENT:
377 | return "ERROR_CODE_INVALID_ARGUMENT";
378 | case ErrorCode.ERROR_CODE_INVALID_RULE_REFERENCE:
379 | return "ERROR_CODE_INVALID_RULE_REFERENCE";
380 | case ErrorCode.ERROR_CODE_NOT_SUPPORTED_WALK:
381 | return "ERROR_CODE_NOT_SUPPORTED_WALK";
382 | case ErrorCode.ERROR_CODE_MISSING_ARGUMENT:
383 | return "ERROR_CODE_MISSING_ARGUMENT";
384 | case ErrorCode.ERROR_CODE_ALREADY_EXIST:
385 | return "ERROR_CODE_ALREADY_EXIST";
386 | case ErrorCode.ERROR_CODE_MAX_DATA_PER_WRITE_EXCEEDED:
387 | return "ERROR_CODE_MAX_DATA_PER_WRITE_EXCEEDED";
388 | case ErrorCode.ERROR_CODE_NOT_FOUND:
389 | return "ERROR_CODE_NOT_FOUND";
390 | case ErrorCode.ERROR_CODE_ENTITY_TYPE_NOT_FOUND:
391 | return "ERROR_CODE_ENTITY_TYPE_NOT_FOUND";
392 | case ErrorCode.ERROR_CODE_PERMISSION_NOT_FOUND:
393 | return "ERROR_CODE_PERMISSION_NOT_FOUND";
394 | case ErrorCode.ERROR_CODE_SCHEMA_NOT_FOUND:
395 | return "ERROR_CODE_SCHEMA_NOT_FOUND";
396 | case ErrorCode.ERROR_CODE_SUBJECT_TYPE_NOT_FOUND:
397 | return "ERROR_CODE_SUBJECT_TYPE_NOT_FOUND";
398 | case ErrorCode.ERROR_CODE_ENTITY_DEFINITION_NOT_FOUND:
399 | return "ERROR_CODE_ENTITY_DEFINITION_NOT_FOUND";
400 | case ErrorCode.ERROR_CODE_PERMISSION_DEFINITION_NOT_FOUND:
401 | return "ERROR_CODE_PERMISSION_DEFINITION_NOT_FOUND";
402 | case ErrorCode.ERROR_CODE_RELATION_DEFINITION_NOT_FOUND:
403 | return "ERROR_CODE_RELATION_DEFINITION_NOT_FOUND";
404 | case ErrorCode.ERROR_CODE_RECORD_NOT_FOUND:
405 | return "ERROR_CODE_RECORD_NOT_FOUND";
406 | case ErrorCode.ERROR_CODE_TENANT_NOT_FOUND:
407 | return "ERROR_CODE_TENANT_NOT_FOUND";
408 | case ErrorCode.ERROR_CODE_ATTRIBUTE_DEFINITION_NOT_FOUND:
409 | return "ERROR_CODE_ATTRIBUTE_DEFINITION_NOT_FOUND";
410 | case ErrorCode.ERROR_CODE_ATTRIBUTE_TYPE_MISMATCH:
411 | return "ERROR_CODE_ATTRIBUTE_TYPE_MISMATCH";
412 | case ErrorCode.ERROR_CODE_BUNDLE_NOT_FOUND:
413 | return "ERROR_CODE_BUNDLE_NOT_FOUND";
414 | case ErrorCode.ERROR_CODE_RULE_DEFINITION_NOT_FOUND:
415 | return "ERROR_CODE_RULE_DEFINITION_NOT_FOUND";
416 | case ErrorCode.ERROR_CODE_ENTITY_STATEMENT_NOT_FOUND:
417 | return "ERROR_CODE_ENTITY_STATEMENT_NOT_FOUND";
418 | case ErrorCode.ERROR_CODE_REFERENCE_NOT_FOUND:
419 | return "ERROR_CODE_REFERENCE_NOT_FOUND";
420 | case ErrorCode.ERROR_CODE_INTERNAL:
421 | return "ERROR_CODE_INTERNAL";
422 | case ErrorCode.ERROR_CODE_CANCELLED:
423 | return "ERROR_CODE_CANCELLED";
424 | case ErrorCode.ERROR_CODE_SQL_BUILDER:
425 | return "ERROR_CODE_SQL_BUILDER";
426 | case ErrorCode.ERROR_CODE_CIRCUIT_BREAKER:
427 | return "ERROR_CODE_CIRCUIT_BREAKER";
428 | case ErrorCode.ERROR_CODE_EXECUTION:
429 | return "ERROR_CODE_EXECUTION";
430 | case ErrorCode.ERROR_CODE_SCAN:
431 | return "ERROR_CODE_SCAN";
432 | case ErrorCode.ERROR_CODE_MIGRATION:
433 | return "ERROR_CODE_MIGRATION";
434 | case ErrorCode.ERROR_CODE_TYPE_CONVERSATION:
435 | return "ERROR_CODE_TYPE_CONVERSATION";
436 | case ErrorCode.ERROR_CODE_ERROR_MAX_RETRIES:
437 | return "ERROR_CODE_ERROR_MAX_RETRIES";
438 | case ErrorCode.ERROR_CODE_ROLLBACK:
439 | return "ERROR_CODE_ROLLBACK";
440 | case ErrorCode.ERROR_CODE_EXCLUSION_REQUIRES_MORE_THAN_ONE_FUNCTION:
441 | return "ERROR_CODE_EXCLUSION_REQUIRES_MORE_THAN_ONE_FUNCTION";
442 | case ErrorCode.ERROR_CODE_NOT_IMPLEMENTED:
443 | return "ERROR_CODE_NOT_IMPLEMENTED";
444 | case ErrorCode.ERROR_CODE_DATASTORE:
445 | return "ERROR_CODE_DATASTORE";
446 | case ErrorCode.ERROR_CODE_UNKNOWN_STATEMENT_TYPE:
447 | return "ERROR_CODE_UNKNOWN_STATEMENT_TYPE";
448 | case ErrorCode.ERROR_CODE_UNKNOWN_REFERENCE_TYPE:
449 | return "ERROR_CODE_UNKNOWN_REFERENCE_TYPE";
450 | case ErrorCode.ERROR_CODE_CANNOT_CONVERT_TO_ENTITY_STATEMENT:
451 | return "ERROR_CODE_CANNOT_CONVERT_TO_ENTITY_STATEMENT";
452 | case ErrorCode.ERROR_CODE_CANNOT_CONVERT_TO_RELATION_STATEMENT:
453 | return "ERROR_CODE_CANNOT_CONVERT_TO_RELATION_STATEMENT";
454 | case ErrorCode.ERROR_CODE_CANNOT_CONVERT_TO_ATTRIBUTE_STATEMENT:
455 | return "ERROR_CODE_CANNOT_CONVERT_TO_ATTRIBUTE_STATEMENT";
456 | case ErrorCode.ERROR_CODE_SERIALIZATION:
457 | return "ERROR_CODE_SERIALIZATION";
458 | case ErrorCode.UNRECOGNIZED:
459 | default:
460 | return "UNRECOGNIZED";
461 | }
462 | }
463 |
464 | /** ErrorResponse */
465 | export interface ErrorResponse {
466 | code: ErrorCode;
467 | message: string;
468 | }
469 |
470 | function createBaseErrorResponse(): ErrorResponse {
471 | return { code: 0, message: "" };
472 | }
473 |
474 | export const ErrorResponse: MessageFns = {
475 | encode(message: ErrorResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
476 | if (message.code !== 0) {
477 | writer.uint32(8).int32(message.code);
478 | }
479 | if (message.message !== "") {
480 | writer.uint32(18).string(message.message);
481 | }
482 | return writer;
483 | },
484 |
485 | decode(input: BinaryReader | Uint8Array, length?: number): ErrorResponse {
486 | const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
487 | let end = length === undefined ? reader.len : reader.pos + length;
488 | const message = createBaseErrorResponse();
489 | while (reader.pos < end) {
490 | const tag = reader.uint32();
491 | switch (tag >>> 3) {
492 | case 1: {
493 | if (tag !== 8) {
494 | break;
495 | }
496 |
497 | message.code = reader.int32() as any;
498 | continue;
499 | }
500 | case 2: {
501 | if (tag !== 18) {
502 | break;
503 | }
504 |
505 | message.message = reader.string();
506 | continue;
507 | }
508 | }
509 | if ((tag & 7) === 4 || tag === 0) {
510 | break;
511 | }
512 | reader.skip(tag & 7);
513 | }
514 | return message;
515 | },
516 |
517 | fromJSON(object: any): ErrorResponse {
518 | return {
519 | code: isSet(object.code) ? errorCodeFromJSON(object.code) : 0,
520 | message: isSet(object.message) ? globalThis.String(object.message) : "",
521 | };
522 | },
523 |
524 | toJSON(message: ErrorResponse): unknown {
525 | const obj: any = {};
526 | if (message.code !== 0) {
527 | obj.code = errorCodeToJSON(message.code);
528 | }
529 | if (message.message !== "") {
530 | obj.message = message.message;
531 | }
532 | return obj;
533 | },
534 |
535 | create(base?: DeepPartial): ErrorResponse {
536 | return ErrorResponse.fromPartial(base ?? {});
537 | },
538 | fromPartial(object: DeepPartial): ErrorResponse {
539 | const message = createBaseErrorResponse();
540 | message.code = object.code ?? 0;
541 | message.message = object.message ?? "";
542 | return message;
543 | },
544 | };
545 |
546 | type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined;
547 |
548 | export type DeepPartial = T extends Builtin ? T
549 | : T extends Long ? string | number | Long : T extends globalThis.Array ? globalThis.Array>
550 | : T extends ReadonlyArray ? ReadonlyArray>
551 | : T extends { $case: string } ? { [K in keyof Omit]?: DeepPartial } & { $case: T["$case"] }
552 | : T extends {} ? { [K in keyof T]?: DeepPartial }
553 | : Partial;
554 |
555 | function isSet(value: any): boolean {
556 | return value !== null && value !== undefined;
557 | }
558 |
559 | export interface MessageFns {
560 | encode(message: T, writer?: BinaryWriter): BinaryWriter;
561 | decode(input: BinaryReader | Uint8Array, length?: number): T;
562 | fromJSON(object: any): T;
563 | toJSON(message: T): unknown;
564 | create(base?: DeepPartial): T;
565 | fromPartial(object: DeepPartial): T;
566 | }
567 |
--------------------------------------------------------------------------------
/src/grpc/generated/base/v1/health.ts:
--------------------------------------------------------------------------------
1 | // Code generated by protoc-gen-ts_proto. DO NOT EDIT.
2 | // versions:
3 | // protoc-gen-ts_proto v2.2.0
4 | // protoc unknown
5 | // source: base/v1/health.proto
6 |
7 | /* eslint-disable */
8 | import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire";
9 | import Long from "long";
10 | import { type CallContext, type CallOptions } from "nice-grpc-common";
11 |
12 | export const protobufPackage = "base.v1";
13 |
14 | export interface HealthCheckRequest {
15 | service: string;
16 | }
17 |
18 | export interface HealthCheckResponse {
19 | status: HealthCheckResponse_ServingStatus;
20 | }
21 |
22 | export enum HealthCheckResponse_ServingStatus {
23 | UNKNOWN = 0,
24 | SERVING = 1,
25 | NOT_SERVING = 2,
26 | /** SERVICE_UNKNOWN - Used only by the Watch method. */
27 | SERVICE_UNKNOWN = 3,
28 | UNRECOGNIZED = -1,
29 | }
30 |
31 | export function healthCheckResponse_ServingStatusFromJSON(object: any): HealthCheckResponse_ServingStatus {
32 | switch (object) {
33 | case 0:
34 | case "UNKNOWN":
35 | return HealthCheckResponse_ServingStatus.UNKNOWN;
36 | case 1:
37 | case "SERVING":
38 | return HealthCheckResponse_ServingStatus.SERVING;
39 | case 2:
40 | case "NOT_SERVING":
41 | return HealthCheckResponse_ServingStatus.NOT_SERVING;
42 | case 3:
43 | case "SERVICE_UNKNOWN":
44 | return HealthCheckResponse_ServingStatus.SERVICE_UNKNOWN;
45 | case -1:
46 | case "UNRECOGNIZED":
47 | default:
48 | return HealthCheckResponse_ServingStatus.UNRECOGNIZED;
49 | }
50 | }
51 |
52 | export function healthCheckResponse_ServingStatusToJSON(object: HealthCheckResponse_ServingStatus): string {
53 | switch (object) {
54 | case HealthCheckResponse_ServingStatus.UNKNOWN:
55 | return "UNKNOWN";
56 | case HealthCheckResponse_ServingStatus.SERVING:
57 | return "SERVING";
58 | case HealthCheckResponse_ServingStatus.NOT_SERVING:
59 | return "NOT_SERVING";
60 | case HealthCheckResponse_ServingStatus.SERVICE_UNKNOWN:
61 | return "SERVICE_UNKNOWN";
62 | case HealthCheckResponse_ServingStatus.UNRECOGNIZED:
63 | default:
64 | return "UNRECOGNIZED";
65 | }
66 | }
67 |
68 | function createBaseHealthCheckRequest(): HealthCheckRequest {
69 | return { service: "" };
70 | }
71 |
72 | export const HealthCheckRequest: MessageFns = {
73 | encode(message: HealthCheckRequest, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
74 | if (message.service !== "") {
75 | writer.uint32(10).string(message.service);
76 | }
77 | return writer;
78 | },
79 |
80 | decode(input: BinaryReader | Uint8Array, length?: number): HealthCheckRequest {
81 | const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
82 | let end = length === undefined ? reader.len : reader.pos + length;
83 | const message = createBaseHealthCheckRequest();
84 | while (reader.pos < end) {
85 | const tag = reader.uint32();
86 | switch (tag >>> 3) {
87 | case 1:
88 | if (tag !== 10) {
89 | break;
90 | }
91 |
92 | message.service = reader.string();
93 | continue;
94 | }
95 | if ((tag & 7) === 4 || tag === 0) {
96 | break;
97 | }
98 | reader.skip(tag & 7);
99 | }
100 | return message;
101 | },
102 |
103 | fromJSON(object: any): HealthCheckRequest {
104 | return { service: isSet(object.service) ? globalThis.String(object.service) : "" };
105 | },
106 |
107 | toJSON(message: HealthCheckRequest): unknown {
108 | const obj: any = {};
109 | if (message.service !== "") {
110 | obj.service = message.service;
111 | }
112 | return obj;
113 | },
114 |
115 | create(base?: DeepPartial): HealthCheckRequest {
116 | return HealthCheckRequest.fromPartial(base ?? {});
117 | },
118 | fromPartial(object: DeepPartial): HealthCheckRequest {
119 | const message = createBaseHealthCheckRequest();
120 | message.service = object.service ?? "";
121 | return message;
122 | },
123 | };
124 |
125 | function createBaseHealthCheckResponse(): HealthCheckResponse {
126 | return { status: 0 };
127 | }
128 |
129 | export const HealthCheckResponse: MessageFns = {
130 | encode(message: HealthCheckResponse, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
131 | if (message.status !== 0) {
132 | writer.uint32(8).int32(message.status);
133 | }
134 | return writer;
135 | },
136 |
137 | decode(input: BinaryReader | Uint8Array, length?: number): HealthCheckResponse {
138 | const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
139 | let end = length === undefined ? reader.len : reader.pos + length;
140 | const message = createBaseHealthCheckResponse();
141 | while (reader.pos < end) {
142 | const tag = reader.uint32();
143 | switch (tag >>> 3) {
144 | case 1:
145 | if (tag !== 8) {
146 | break;
147 | }
148 |
149 | message.status = reader.int32() as any;
150 | continue;
151 | }
152 | if ((tag & 7) === 4 || tag === 0) {
153 | break;
154 | }
155 | reader.skip(tag & 7);
156 | }
157 | return message;
158 | },
159 |
160 | fromJSON(object: any): HealthCheckResponse {
161 | return { status: isSet(object.status) ? healthCheckResponse_ServingStatusFromJSON(object.status) : 0 };
162 | },
163 |
164 | toJSON(message: HealthCheckResponse): unknown {
165 | const obj: any = {};
166 | if (message.status !== 0) {
167 | obj.status = healthCheckResponse_ServingStatusToJSON(message.status);
168 | }
169 | return obj;
170 | },
171 |
172 | create(base?: DeepPartial): HealthCheckResponse {
173 | return HealthCheckResponse.fromPartial(base ?? {});
174 | },
175 | fromPartial(object: DeepPartial): HealthCheckResponse {
176 | const message = createBaseHealthCheckResponse();
177 | message.status = object.status ?? 0;
178 | return message;
179 | },
180 | };
181 |
182 | export type HealthDefinition = typeof HealthDefinition;
183 | export const HealthDefinition = {
184 | name: "Health",
185 | fullName: "base.v1.Health",
186 | methods: {
187 | /**
188 | * If the requested service is unknown, the call will fail with status
189 | * NOT_FOUND.
190 | */
191 | check: {
192 | name: "Check",
193 | requestType: HealthCheckRequest,
194 | requestStream: false,
195 | responseType: HealthCheckResponse,
196 | responseStream: false,
197 | options: {
198 | _unknownFields: {
199 | 8338: [
200 | Buffer.from([
201 | 187,
202 | 1,
203 | 10,
204 | 6,
205 | 72,
206 | 101,
207 | 97,
208 | 108,
209 | 116,
210 | 104,
211 | 18,
212 | 10,
213 | 104,
214 | 101,
215 | 97,
216 | 108,
217 | 116,
218 | 104,
219 | 32,
220 | 97,
221 | 112,
222 | 105,
223 | 42,
224 | 12,
225 | 104,
226 | 101,
227 | 97,
228 | 108,
229 | 116,
230 | 104,
231 | 46,
232 | 99,
233 | 104,
234 | 101,
235 | 99,
236 | 107,
237 | 106,
238 | 150,
239 | 1,
240 | 10,
241 | 13,
242 | 120,
243 | 45,
244 | 99,
245 | 111,
246 | 100,
247 | 101,
248 | 83,
249 | 97,
250 | 109,
251 | 112,
252 | 108,
253 | 101,
254 | 115,
255 | 18,
256 | 132,
257 | 1,
258 | 50,
259 | 129,
260 | 1,
261 | 10,
262 | 41,
263 | 42,
264 | 39,
265 | 10,
266 | 11,
267 | 10,
268 | 5,
269 | 108,
270 | 97,
271 | 98,
272 | 101,
273 | 108,
274 | 18,
275 | 2,
276 | 26,
277 | 0,
278 | 10,
279 | 10,
280 | 10,
281 | 4,
282 | 108,
283 | 97,
284 | 110,
285 | 103,
286 | 18,
287 | 2,
288 | 26,
289 | 0,
290 | 10,
291 | 12,
292 | 10,
293 | 6,
294 | 115,
295 | 111,
296 | 117,
297 | 114,
298 | 99,
299 | 101,
300 | 18,
301 | 2,
302 | 26,
303 | 0,
304 | 10,
305 | 41,
306 | 42,
307 | 39,
308 | 10,
309 | 11,
310 | 10,
311 | 5,
312 | 108,
313 | 97,
314 | 98,
315 | 101,
316 | 108,
317 | 18,
318 | 2,
319 | 26,
320 | 0,
321 | 10,
322 | 10,
323 | 10,
324 | 4,
325 | 108,
326 | 97,
327 | 110,
328 | 103,
329 | 18,
330 | 2,
331 | 26,
332 | 0,
333 | 10,
334 | 12,
335 | 10,
336 | 6,
337 | 115,
338 | 111,
339 | 117,
340 | 114,
341 | 99,
342 | 101,
343 | 18,
344 | 2,
345 | 26,
346 | 0,
347 | 10,
348 | 41,
349 | 42,
350 | 39,
351 | 10,
352 | 11,
353 | 10,
354 | 5,
355 | 108,
356 | 97,
357 | 98,
358 | 101,
359 | 108,
360 | 18,
361 | 2,
362 | 26,
363 | 0,
364 | 10,
365 | 10,
366 | 10,
367 | 4,
368 | 108,
369 | 97,
370 | 110,
371 | 103,
372 | 18,
373 | 2,
374 | 26,
375 | 0,
376 | 10,
377 | 12,
378 | 10,
379 | 6,
380 | 115,
381 | 111,
382 | 117,
383 | 114,
384 | 99,
385 | 101,
386 | 18,
387 | 2,
388 | 26,
389 | 0,
390 | ]),
391 | ],
392 | 578365826: [Buffer.from([10, 18, 8, 47, 104, 101, 97, 108, 116, 104, 122])],
393 | },
394 | },
395 | },
396 | watch: {
397 | name: "Watch",
398 | requestType: HealthCheckRequest,
399 | requestStream: false,
400 | responseType: HealthCheckResponse,
401 | responseStream: true,
402 | options: {},
403 | },
404 | },
405 | } as const;
406 |
407 | export interface HealthServiceImplementation {
408 | /**
409 | * If the requested service is unknown, the call will fail with status
410 | * NOT_FOUND.
411 | */
412 | check(request: HealthCheckRequest, context: CallContext & CallContextExt): Promise>;
413 | watch(
414 | request: HealthCheckRequest,
415 | context: CallContext & CallContextExt,
416 | ): ServerStreamingMethodResult>;
417 | }
418 |
419 | export interface HealthClient {
420 | /**
421 | * If the requested service is unknown, the call will fail with status
422 | * NOT_FOUND.
423 | */
424 | check(request: DeepPartial, options?: CallOptions & CallOptionsExt): Promise;
425 | watch(
426 | request: DeepPartial,
427 | options?: CallOptions & CallOptionsExt,
428 | ): AsyncIterable;
429 | }
430 |
431 | type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined;
432 |
433 | export type DeepPartial = T extends Builtin ? T
434 | : T extends Long ? string | number | Long : T extends globalThis.Array ? globalThis.Array>
435 | : T extends ReadonlyArray ? ReadonlyArray>
436 | : T extends { $case: string } ? { [K in keyof Omit]?: DeepPartial } & { $case: T["$case"] }
437 | : T extends {} ? { [K in keyof T]?: DeepPartial }
438 | : Partial;
439 |
440 | function isSet(value: any): boolean {
441 | return value !== null && value !== undefined;
442 | }
443 |
444 | export type ServerStreamingMethodResult = { [Symbol.asyncIterator](): AsyncIterator };
445 |
446 | export interface MessageFns {
447 | encode(message: T, writer?: BinaryWriter): BinaryWriter;
448 | decode(input: BinaryReader | Uint8Array, length?: number): T;
449 | fromJSON(object: any): T;
450 | toJSON(message: T): unknown;
451 | create(base?: DeepPartial): T;
452 | fromPartial(object: DeepPartial): T;
453 | }
454 |
--------------------------------------------------------------------------------
/src/grpc/generated/base/v1/openapi.ts:
--------------------------------------------------------------------------------
1 | // Code generated by protoc-gen-ts_proto. DO NOT EDIT.
2 | // versions:
3 | // protoc-gen-ts_proto v2.7.2
4 | // protoc unknown
5 | // source: base/v1/openapi.proto
6 |
7 | /* eslint-disable */
8 |
9 | export const protobufPackage = "base.v1";
10 |
--------------------------------------------------------------------------------
/src/grpc/generated/google/api/annotations.ts:
--------------------------------------------------------------------------------
1 | // Code generated by protoc-gen-ts_proto. DO NOT EDIT.
2 | // versions:
3 | // protoc-gen-ts_proto v2.7.2
4 | // protoc unknown
5 | // source: google/api/annotations.proto
6 |
7 | /* eslint-disable */
8 |
9 | export const protobufPackage = "google.api";
10 |
--------------------------------------------------------------------------------
/src/grpc/generated/google/api/http.ts:
--------------------------------------------------------------------------------
1 | // Code generated by protoc-gen-ts_proto. DO NOT EDIT.
2 | // versions:
3 | // protoc-gen-ts_proto v2.7.2
4 | // protoc unknown
5 | // source: google/api/http.proto
6 |
7 | /* eslint-disable */
8 | import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire";
9 | import Long from "long";
10 |
11 | export const protobufPackage = "google.api";
12 |
13 | /**
14 | * Defines the HTTP configuration for an API service. It contains a list of
15 | * [HttpRule][google.api.HttpRule], each specifying the mapping of an RPC method
16 | * to one or more HTTP REST API methods.
17 | */
18 | export interface Http {
19 | /**
20 | * A list of HTTP configuration rules that apply to individual API methods.
21 | *
22 | * **NOTE:** All service configuration rules follow "last one wins" order.
23 | */
24 | rules: HttpRule[];
25 | /**
26 | * When set to true, URL path parameters will be fully URI-decoded except in
27 | * cases of single segment matches in reserved expansion, where "%2F" will be
28 | * left encoded.
29 | *
30 | * The default behavior is to not decode RFC 6570 reserved characters in multi
31 | * segment matches.
32 | */
33 | fullyDecodeReservedExpansion: boolean;
34 | }
35 |
36 | /**
37 | * # gRPC Transcoding
38 | *
39 | * gRPC Transcoding is a feature for mapping between a gRPC method and one or
40 | * more HTTP REST endpoints. It allows developers to build a single API service
41 | * that supports both gRPC APIs and REST APIs. Many systems, including [Google
42 | * APIs](https://github.com/googleapis/googleapis),
43 | * [Cloud Endpoints](https://cloud.google.com/endpoints), [gRPC
44 | * Gateway](https://github.com/grpc-ecosystem/grpc-gateway),
45 | * and [Envoy](https://github.com/envoyproxy/envoy) proxy support this feature
46 | * and use it for large scale production services.
47 | *
48 | * `HttpRule` defines the schema of the gRPC/REST mapping. The mapping specifies
49 | * how different portions of the gRPC request message are mapped to the URL
50 | * path, URL query parameters, and HTTP request body. It also controls how the
51 | * gRPC response message is mapped to the HTTP response body. `HttpRule` is
52 | * typically specified as an `google.api.http` annotation on the gRPC method.
53 | *
54 | * Each mapping specifies a URL path template and an HTTP method. The path
55 | * template may refer to one or more fields in the gRPC request message, as long
56 | * as each field is a non-repeated field with a primitive (non-message) type.
57 | * The path template controls how fields of the request message are mapped to
58 | * the URL path.
59 | *
60 | * Example:
61 | *
62 | * service Messaging {
63 | * rpc GetMessage(GetMessageRequest) returns (Message) {
64 | * option (google.api.http) = {
65 | * get: "/v1/{name=messages/*}"
66 | * };
67 | * }
68 | * }
69 | * message GetMessageRequest {
70 | * string name = 1; // Mapped to URL path.
71 | * }
72 | * message Message {
73 | * string text = 1; // The resource content.
74 | * }
75 | *
76 | * This enables an HTTP REST to gRPC mapping as below:
77 | *
78 | * HTTP | gRPC
79 | * -----|-----
80 | * `GET /v1/messages/123456` | `GetMessage(name: "messages/123456")`
81 | *
82 | * Any fields in the request message which are not bound by the path template
83 | * automatically become HTTP query parameters if there is no HTTP request body.
84 | * For example:
85 | *
86 | * service Messaging {
87 | * rpc GetMessage(GetMessageRequest) returns (Message) {
88 | * option (google.api.http) = {
89 | * get:"/v1/messages/{message_id}"
90 | * };
91 | * }
92 | * }
93 | * message GetMessageRequest {
94 | * message SubMessage {
95 | * string subfield = 1;
96 | * }
97 | * string message_id = 1; // Mapped to URL path.
98 | * int64 revision = 2; // Mapped to URL query parameter `revision`.
99 | * SubMessage sub = 3; // Mapped to URL query parameter `sub.subfield`.
100 | * }
101 | *
102 | * This enables a HTTP JSON to RPC mapping as below:
103 | *
104 | * HTTP | gRPC
105 | * -----|-----
106 | * `GET /v1/messages/123456?revision=2&sub.subfield=foo` |
107 | * `GetMessage(message_id: "123456" revision: 2 sub: SubMessage(subfield:
108 | * "foo"))`
109 | *
110 | * Note that fields which are mapped to URL query parameters must have a
111 | * primitive type or a repeated primitive type or a non-repeated message type.
112 | * In the case of a repeated type, the parameter can be repeated in the URL
113 | * as `...?param=A¶m=B`. In the case of a message type, each field of the
114 | * message is mapped to a separate parameter, such as
115 | * `...?foo.a=A&foo.b=B&foo.c=C`.
116 | *
117 | * For HTTP methods that allow a request body, the `body` field
118 | * specifies the mapping. Consider a REST update method on the
119 | * message resource collection:
120 | *
121 | * service Messaging {
122 | * rpc UpdateMessage(UpdateMessageRequest) returns (Message) {
123 | * option (google.api.http) = {
124 | * patch: "/v1/messages/{message_id}"
125 | * body: "message"
126 | * };
127 | * }
128 | * }
129 | * message UpdateMessageRequest {
130 | * string message_id = 1; // mapped to the URL
131 | * Message message = 2; // mapped to the body
132 | * }
133 | *
134 | * The following HTTP JSON to RPC mapping is enabled, where the
135 | * representation of the JSON in the request body is determined by
136 | * protos JSON encoding:
137 | *
138 | * HTTP | gRPC
139 | * -----|-----
140 | * `PATCH /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id:
141 | * "123456" message { text: "Hi!" })`
142 | *
143 | * The special name `*` can be used in the body mapping to define that
144 | * every field not bound by the path template should be mapped to the
145 | * request body. This enables the following alternative definition of
146 | * the update method:
147 | *
148 | * service Messaging {
149 | * rpc UpdateMessage(Message) returns (Message) {
150 | * option (google.api.http) = {
151 | * patch: "/v1/messages/{message_id}"
152 | * body: "*"
153 | * };
154 | * }
155 | * }
156 | * message Message {
157 | * string message_id = 1;
158 | * string text = 2;
159 | * }
160 | *
161 | * The following HTTP JSON to RPC mapping is enabled:
162 | *
163 | * HTTP | gRPC
164 | * -----|-----
165 | * `PATCH /v1/messages/123456 { "text": "Hi!" }` | `UpdateMessage(message_id:
166 | * "123456" text: "Hi!")`
167 | *
168 | * Note that when using `*` in the body mapping, it is not possible to
169 | * have HTTP parameters, as all fields not bound by the path end in
170 | * the body. This makes this option more rarely used in practice when
171 | * defining REST APIs. The common usage of `*` is in custom methods
172 | * which don't use the URL at all for transferring data.
173 | *
174 | * It is possible to define multiple HTTP methods for one RPC by using
175 | * the `additional_bindings` option. Example:
176 | *
177 | * service Messaging {
178 | * rpc GetMessage(GetMessageRequest) returns (Message) {
179 | * option (google.api.http) = {
180 | * get: "/v1/messages/{message_id}"
181 | * additional_bindings {
182 | * get: "/v1/users/{user_id}/messages/{message_id}"
183 | * }
184 | * };
185 | * }
186 | * }
187 | * message GetMessageRequest {
188 | * string message_id = 1;
189 | * string user_id = 2;
190 | * }
191 | *
192 | * This enables the following two alternative HTTP JSON to RPC mappings:
193 | *
194 | * HTTP | gRPC
195 | * -----|-----
196 | * `GET /v1/messages/123456` | `GetMessage(message_id: "123456")`
197 | * `GET /v1/users/me/messages/123456` | `GetMessage(user_id: "me" message_id:
198 | * "123456")`
199 | *
200 | * ## Rules for HTTP mapping
201 | *
202 | * 1. Leaf request fields (recursive expansion nested messages in the request
203 | * message) are classified into three categories:
204 | * - Fields referred by the path template. They are passed via the URL path.
205 | * - Fields referred by the [HttpRule.body][google.api.HttpRule.body]. They are passed via the HTTP
206 | * request body.
207 | * - All other fields are passed via the URL query parameters, and the
208 | * parameter name is the field path in the request message. A repeated
209 | * field can be represented as multiple query parameters under the same
210 | * name.
211 | * 2. If [HttpRule.body][google.api.HttpRule.body] is "*", there is no URL query parameter, all fields
212 | * are passed via URL path and HTTP request body.
213 | * 3. If [HttpRule.body][google.api.HttpRule.body] is omitted, there is no HTTP request body, all
214 | * fields are passed via URL path and URL query parameters.
215 | *
216 | * ### Path template syntax
217 | *
218 | * Template = "/" Segments [ Verb ] ;
219 | * Segments = Segment { "/" Segment } ;
220 | * Segment = "*" | "**" | LITERAL | Variable ;
221 | * Variable = "{" FieldPath [ "=" Segments ] "}" ;
222 | * FieldPath = IDENT { "." IDENT } ;
223 | * Verb = ":" LITERAL ;
224 | *
225 | * The syntax `*` matches a single URL path segment. The syntax `**` matches
226 | * zero or more URL path segments, which must be the last part of the URL path
227 | * except the `Verb`.
228 | *
229 | * The syntax `Variable` matches part of the URL path as specified by its
230 | * template. A variable template must not contain other variables. If a variable
231 | * matches a single path segment, its template may be omitted, e.g. `{var}`
232 | * is equivalent to `{var=*}`.
233 | *
234 | * The syntax `LITERAL` matches literal text in the URL path. If the `LITERAL`
235 | * contains any reserved character, such characters should be percent-encoded
236 | * before the matching.
237 | *
238 | * If a variable contains exactly one path segment, such as `"{var}"` or
239 | * `"{var=*}"`, when such a variable is expanded into a URL path on the client
240 | * side, all characters except `[-_.~0-9a-zA-Z]` are percent-encoded. The
241 | * server side does the reverse decoding. Such variables show up in the
242 | * [Discovery
243 | * Document](https://developers.google.com/discovery/v1/reference/apis) as
244 | * `{var}`.
245 | *
246 | * If a variable contains multiple path segments, such as `"{var=foo/*}"`
247 | * or `"{var=**}"`, when such a variable is expanded into a URL path on the
248 | * client side, all characters except `[-_.~/0-9a-zA-Z]` are percent-encoded.
249 | * The server side does the reverse decoding, except "%2F" and "%2f" are left
250 | * unchanged. Such variables show up in the
251 | * [Discovery
252 | * Document](https://developers.google.com/discovery/v1/reference/apis) as
253 | * `{+var}`.
254 | *
255 | * ## Using gRPC API Service Configuration
256 | *
257 | * gRPC API Service Configuration (service config) is a configuration language
258 | * for configuring a gRPC service to become a user-facing product. The
259 | * service config is simply the YAML representation of the `google.api.Service`
260 | * proto message.
261 | *
262 | * As an alternative to annotating your proto file, you can configure gRPC
263 | * transcoding in your service config YAML files. You do this by specifying a
264 | * `HttpRule` that maps the gRPC method to a REST endpoint, achieving the same
265 | * effect as the proto annotation. This can be particularly useful if you
266 | * have a proto that is reused in multiple services. Note that any transcoding
267 | * specified in the service config will override any matching transcoding
268 | * configuration in the proto.
269 | *
270 | * Example:
271 | *
272 | * http:
273 | * rules:
274 | * # Selects a gRPC method and applies HttpRule to it.
275 | * - selector: example.v1.Messaging.GetMessage
276 | * get: /v1/messages/{message_id}/{sub.subfield}
277 | *
278 | * ## Special notes
279 | *
280 | * When gRPC Transcoding is used to map a gRPC to JSON REST endpoints, the
281 | * proto to JSON conversion must follow the [proto3
282 | * specification](https://developers.google.com/protocol-buffers/docs/proto3#json).
283 | *
284 | * While the single segment variable follows the semantics of
285 | * [RFC 6570](https://tools.ietf.org/html/rfc6570) Section 3.2.2 Simple String
286 | * Expansion, the multi segment variable **does not** follow RFC 6570 Section
287 | * 3.2.3 Reserved Expansion. The reason is that the Reserved Expansion
288 | * does not expand special characters like `?` and `#`, which would lead
289 | * to invalid URLs. As the result, gRPC Transcoding uses a custom encoding
290 | * for multi segment variables.
291 | *
292 | * The path variables **must not** refer to any repeated or mapped field,
293 | * because client libraries are not capable of handling such variable expansion.
294 | *
295 | * The path variables **must not** capture the leading "/" character. The reason
296 | * is that the most common use case "{var}" does not capture the leading "/"
297 | * character. For consistency, all path variables must share the same behavior.
298 | *
299 | * Repeated message fields must not be mapped to URL query parameters, because
300 | * no client library can support such complicated mapping.
301 | *
302 | * If an API needs to use a JSON array for request or response body, it can map
303 | * the request or response body to a repeated field. However, some gRPC
304 | * Transcoding implementations may not support this feature.
305 | */
306 | export interface HttpRule {
307 | /**
308 | * Selects a method to which this rule applies.
309 | *
310 | * Refer to [selector][google.api.DocumentationRule.selector] for syntax details.
311 | */
312 | selector: string;
313 | /**
314 | * Determines the URL pattern is matched by this rules. This pattern can be
315 | * used with any of the {get|put|post|delete|patch} methods. A custom method
316 | * can be defined using the 'custom' field.
317 | */
318 | pattern?:
319 | | //
320 | /**
321 | * Maps to HTTP GET. Used for listing and getting information about
322 | * resources.
323 | */
324 | { $case: "get"; get: string }
325 | | //
326 | /** Maps to HTTP PUT. Used for replacing a resource. */
327 | { $case: "put"; put: string }
328 | | //
329 | /** Maps to HTTP POST. Used for creating a resource or performing an action. */
330 | { $case: "post"; post: string }
331 | | //
332 | /** Maps to HTTP DELETE. Used for deleting a resource. */
333 | { $case: "delete"; delete: string }
334 | | //
335 | /** Maps to HTTP PATCH. Used for updating a resource. */
336 | { $case: "patch"; patch: string }
337 | | //
338 | /**
339 | * The custom pattern is used for specifying an HTTP method that is not
340 | * included in the `pattern` field, such as HEAD, or "*" to leave the
341 | * HTTP method unspecified for this rule. The wild-card rule is useful
342 | * for services that provide content to Web (HTML) clients.
343 | */
344 | { $case: "custom"; custom: CustomHttpPattern }
345 | | undefined;
346 | /**
347 | * The name of the request field whose value is mapped to the HTTP request
348 | * body, or `*` for mapping all request fields not captured by the path
349 | * pattern to the HTTP body, or omitted for not having any HTTP request body.
350 | *
351 | * NOTE: the referred field must be present at the top-level of the request
352 | * message type.
353 | */
354 | body: string;
355 | /**
356 | * Optional. The name of the response field whose value is mapped to the HTTP
357 | * response body. When omitted, the entire response message will be used
358 | * as the HTTP response body.
359 | *
360 | * NOTE: The referred field must be present at the top-level of the response
361 | * message type.
362 | */
363 | responseBody: string;
364 | /**
365 | * Additional HTTP bindings for the selector. Nested bindings must
366 | * not contain an `additional_bindings` field themselves (that is,
367 | * the nesting may only be one level deep).
368 | */
369 | additionalBindings: HttpRule[];
370 | }
371 |
372 | /** A custom pattern is used for defining custom HTTP verb. */
373 | export interface CustomHttpPattern {
374 | /** The name of this custom HTTP verb. */
375 | kind: string;
376 | /** The path matched by this custom verb. */
377 | path: string;
378 | }
379 |
380 | function createBaseHttp(): Http {
381 | return { rules: [], fullyDecodeReservedExpansion: false };
382 | }
383 |
384 | export const Http: MessageFns = {
385 | encode(message: Http, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
386 | for (const v of message.rules) {
387 | HttpRule.encode(v!, writer.uint32(10).fork()).join();
388 | }
389 | if (message.fullyDecodeReservedExpansion !== false) {
390 | writer.uint32(16).bool(message.fullyDecodeReservedExpansion);
391 | }
392 | return writer;
393 | },
394 |
395 | decode(input: BinaryReader | Uint8Array, length?: number): Http {
396 | const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
397 | let end = length === undefined ? reader.len : reader.pos + length;
398 | const message = createBaseHttp();
399 | while (reader.pos < end) {
400 | const tag = reader.uint32();
401 | switch (tag >>> 3) {
402 | case 1: {
403 | if (tag !== 10) {
404 | break;
405 | }
406 |
407 | message.rules.push(HttpRule.decode(reader, reader.uint32()));
408 | continue;
409 | }
410 | case 2: {
411 | if (tag !== 16) {
412 | break;
413 | }
414 |
415 | message.fullyDecodeReservedExpansion = reader.bool();
416 | continue;
417 | }
418 | }
419 | if ((tag & 7) === 4 || tag === 0) {
420 | break;
421 | }
422 | reader.skip(tag & 7);
423 | }
424 | return message;
425 | },
426 |
427 | fromJSON(object: any): Http {
428 | return {
429 | rules: globalThis.Array.isArray(object?.rules) ? object.rules.map((e: any) => HttpRule.fromJSON(e)) : [],
430 | fullyDecodeReservedExpansion: isSet(object.fullyDecodeReservedExpansion)
431 | ? globalThis.Boolean(object.fullyDecodeReservedExpansion)
432 | : false,
433 | };
434 | },
435 |
436 | toJSON(message: Http): unknown {
437 | const obj: any = {};
438 | if (message.rules?.length) {
439 | obj.rules = message.rules.map((e) => HttpRule.toJSON(e));
440 | }
441 | if (message.fullyDecodeReservedExpansion !== false) {
442 | obj.fullyDecodeReservedExpansion = message.fullyDecodeReservedExpansion;
443 | }
444 | return obj;
445 | },
446 |
447 | create(base?: DeepPartial): Http {
448 | return Http.fromPartial(base ?? {});
449 | },
450 | fromPartial(object: DeepPartial): Http {
451 | const message = createBaseHttp();
452 | message.rules = object.rules?.map((e) => HttpRule.fromPartial(e)) || [];
453 | message.fullyDecodeReservedExpansion = object.fullyDecodeReservedExpansion ?? false;
454 | return message;
455 | },
456 | };
457 |
458 | function createBaseHttpRule(): HttpRule {
459 | return { selector: "", pattern: undefined, body: "", responseBody: "", additionalBindings: [] };
460 | }
461 |
462 | export const HttpRule: MessageFns = {
463 | encode(message: HttpRule, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
464 | if (message.selector !== "") {
465 | writer.uint32(10).string(message.selector);
466 | }
467 | switch (message.pattern?.$case) {
468 | case "get":
469 | writer.uint32(18).string(message.pattern.get);
470 | break;
471 | case "put":
472 | writer.uint32(26).string(message.pattern.put);
473 | break;
474 | case "post":
475 | writer.uint32(34).string(message.pattern.post);
476 | break;
477 | case "delete":
478 | writer.uint32(42).string(message.pattern.delete);
479 | break;
480 | case "patch":
481 | writer.uint32(50).string(message.pattern.patch);
482 | break;
483 | case "custom":
484 | CustomHttpPattern.encode(message.pattern.custom, writer.uint32(66).fork()).join();
485 | break;
486 | }
487 | if (message.body !== "") {
488 | writer.uint32(58).string(message.body);
489 | }
490 | if (message.responseBody !== "") {
491 | writer.uint32(98).string(message.responseBody);
492 | }
493 | for (const v of message.additionalBindings) {
494 | HttpRule.encode(v!, writer.uint32(90).fork()).join();
495 | }
496 | return writer;
497 | },
498 |
499 | decode(input: BinaryReader | Uint8Array, length?: number): HttpRule {
500 | const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
501 | let end = length === undefined ? reader.len : reader.pos + length;
502 | const message = createBaseHttpRule();
503 | while (reader.pos < end) {
504 | const tag = reader.uint32();
505 | switch (tag >>> 3) {
506 | case 1: {
507 | if (tag !== 10) {
508 | break;
509 | }
510 |
511 | message.selector = reader.string();
512 | continue;
513 | }
514 | case 2: {
515 | if (tag !== 18) {
516 | break;
517 | }
518 |
519 | message.pattern = { $case: "get", get: reader.string() };
520 | continue;
521 | }
522 | case 3: {
523 | if (tag !== 26) {
524 | break;
525 | }
526 |
527 | message.pattern = { $case: "put", put: reader.string() };
528 | continue;
529 | }
530 | case 4: {
531 | if (tag !== 34) {
532 | break;
533 | }
534 |
535 | message.pattern = { $case: "post", post: reader.string() };
536 | continue;
537 | }
538 | case 5: {
539 | if (tag !== 42) {
540 | break;
541 | }
542 |
543 | message.pattern = { $case: "delete", delete: reader.string() };
544 | continue;
545 | }
546 | case 6: {
547 | if (tag !== 50) {
548 | break;
549 | }
550 |
551 | message.pattern = { $case: "patch", patch: reader.string() };
552 | continue;
553 | }
554 | case 8: {
555 | if (tag !== 66) {
556 | break;
557 | }
558 |
559 | message.pattern = { $case: "custom", custom: CustomHttpPattern.decode(reader, reader.uint32()) };
560 | continue;
561 | }
562 | case 7: {
563 | if (tag !== 58) {
564 | break;
565 | }
566 |
567 | message.body = reader.string();
568 | continue;
569 | }
570 | case 12: {
571 | if (tag !== 98) {
572 | break;
573 | }
574 |
575 | message.responseBody = reader.string();
576 | continue;
577 | }
578 | case 11: {
579 | if (tag !== 90) {
580 | break;
581 | }
582 |
583 | message.additionalBindings.push(HttpRule.decode(reader, reader.uint32()));
584 | continue;
585 | }
586 | }
587 | if ((tag & 7) === 4 || tag === 0) {
588 | break;
589 | }
590 | reader.skip(tag & 7);
591 | }
592 | return message;
593 | },
594 |
595 | fromJSON(object: any): HttpRule {
596 | return {
597 | selector: isSet(object.selector) ? globalThis.String(object.selector) : "",
598 | pattern: isSet(object.get)
599 | ? { $case: "get", get: globalThis.String(object.get) }
600 | : isSet(object.put)
601 | ? { $case: "put", put: globalThis.String(object.put) }
602 | : isSet(object.post)
603 | ? { $case: "post", post: globalThis.String(object.post) }
604 | : isSet(object.delete)
605 | ? { $case: "delete", delete: globalThis.String(object.delete) }
606 | : isSet(object.patch)
607 | ? { $case: "patch", patch: globalThis.String(object.patch) }
608 | : isSet(object.custom)
609 | ? { $case: "custom", custom: CustomHttpPattern.fromJSON(object.custom) }
610 | : undefined,
611 | body: isSet(object.body) ? globalThis.String(object.body) : "",
612 | responseBody: isSet(object.responseBody) ? globalThis.String(object.responseBody) : "",
613 | additionalBindings: globalThis.Array.isArray(object?.additionalBindings)
614 | ? object.additionalBindings.map((e: any) => HttpRule.fromJSON(e))
615 | : [],
616 | };
617 | },
618 |
619 | toJSON(message: HttpRule): unknown {
620 | const obj: any = {};
621 | if (message.selector !== "") {
622 | obj.selector = message.selector;
623 | }
624 | if (message.pattern?.$case === "get") {
625 | obj.get = message.pattern.get;
626 | } else if (message.pattern?.$case === "put") {
627 | obj.put = message.pattern.put;
628 | } else if (message.pattern?.$case === "post") {
629 | obj.post = message.pattern.post;
630 | } else if (message.pattern?.$case === "delete") {
631 | obj.delete = message.pattern.delete;
632 | } else if (message.pattern?.$case === "patch") {
633 | obj.patch = message.pattern.patch;
634 | } else if (message.pattern?.$case === "custom") {
635 | obj.custom = CustomHttpPattern.toJSON(message.pattern.custom);
636 | }
637 | if (message.body !== "") {
638 | obj.body = message.body;
639 | }
640 | if (message.responseBody !== "") {
641 | obj.responseBody = message.responseBody;
642 | }
643 | if (message.additionalBindings?.length) {
644 | obj.additionalBindings = message.additionalBindings.map((e) => HttpRule.toJSON(e));
645 | }
646 | return obj;
647 | },
648 |
649 | create(base?: DeepPartial): HttpRule {
650 | return HttpRule.fromPartial(base ?? {});
651 | },
652 | fromPartial(object: DeepPartial): HttpRule {
653 | const message = createBaseHttpRule();
654 | message.selector = object.selector ?? "";
655 | switch (object.pattern?.$case) {
656 | case "get": {
657 | if (object.pattern?.get !== undefined && object.pattern?.get !== null) {
658 | message.pattern = { $case: "get", get: object.pattern.get };
659 | }
660 | break;
661 | }
662 | case "put": {
663 | if (object.pattern?.put !== undefined && object.pattern?.put !== null) {
664 | message.pattern = { $case: "put", put: object.pattern.put };
665 | }
666 | break;
667 | }
668 | case "post": {
669 | if (object.pattern?.post !== undefined && object.pattern?.post !== null) {
670 | message.pattern = { $case: "post", post: object.pattern.post };
671 | }
672 | break;
673 | }
674 | case "delete": {
675 | if (object.pattern?.delete !== undefined && object.pattern?.delete !== null) {
676 | message.pattern = { $case: "delete", delete: object.pattern.delete };
677 | }
678 | break;
679 | }
680 | case "patch": {
681 | if (object.pattern?.patch !== undefined && object.pattern?.patch !== null) {
682 | message.pattern = { $case: "patch", patch: object.pattern.patch };
683 | }
684 | break;
685 | }
686 | case "custom": {
687 | if (object.pattern?.custom !== undefined && object.pattern?.custom !== null) {
688 | message.pattern = { $case: "custom", custom: CustomHttpPattern.fromPartial(object.pattern.custom) };
689 | }
690 | break;
691 | }
692 | }
693 | message.body = object.body ?? "";
694 | message.responseBody = object.responseBody ?? "";
695 | message.additionalBindings = object.additionalBindings?.map((e) => HttpRule.fromPartial(e)) || [];
696 | return message;
697 | },
698 | };
699 |
700 | function createBaseCustomHttpPattern(): CustomHttpPattern {
701 | return { kind: "", path: "" };
702 | }
703 |
704 | export const CustomHttpPattern: MessageFns = {
705 | encode(message: CustomHttpPattern, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
706 | if (message.kind !== "") {
707 | writer.uint32(10).string(message.kind);
708 | }
709 | if (message.path !== "") {
710 | writer.uint32(18).string(message.path);
711 | }
712 | return writer;
713 | },
714 |
715 | decode(input: BinaryReader | Uint8Array, length?: number): CustomHttpPattern {
716 | const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
717 | let end = length === undefined ? reader.len : reader.pos + length;
718 | const message = createBaseCustomHttpPattern();
719 | while (reader.pos < end) {
720 | const tag = reader.uint32();
721 | switch (tag >>> 3) {
722 | case 1: {
723 | if (tag !== 10) {
724 | break;
725 | }
726 |
727 | message.kind = reader.string();
728 | continue;
729 | }
730 | case 2: {
731 | if (tag !== 18) {
732 | break;
733 | }
734 |
735 | message.path = reader.string();
736 | continue;
737 | }
738 | }
739 | if ((tag & 7) === 4 || tag === 0) {
740 | break;
741 | }
742 | reader.skip(tag & 7);
743 | }
744 | return message;
745 | },
746 |
747 | fromJSON(object: any): CustomHttpPattern {
748 | return {
749 | kind: isSet(object.kind) ? globalThis.String(object.kind) : "",
750 | path: isSet(object.path) ? globalThis.String(object.path) : "",
751 | };
752 | },
753 |
754 | toJSON(message: CustomHttpPattern): unknown {
755 | const obj: any = {};
756 | if (message.kind !== "") {
757 | obj.kind = message.kind;
758 | }
759 | if (message.path !== "") {
760 | obj.path = message.path;
761 | }
762 | return obj;
763 | },
764 |
765 | create(base?: DeepPartial): CustomHttpPattern {
766 | return CustomHttpPattern.fromPartial(base ?? {});
767 | },
768 | fromPartial(object: DeepPartial): CustomHttpPattern {
769 | const message = createBaseCustomHttpPattern();
770 | message.kind = object.kind ?? "";
771 | message.path = object.path ?? "";
772 | return message;
773 | },
774 | };
775 |
776 | type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined;
777 |
778 | export type DeepPartial = T extends Builtin ? T
779 | : T extends Long ? string | number | Long : T extends globalThis.Array ? globalThis.Array>
780 | : T extends ReadonlyArray ? ReadonlyArray>
781 | : T extends { $case: string } ? { [K in keyof Omit]?: DeepPartial } & { $case: T["$case"] }
782 | : T extends {} ? { [K in keyof T]?: DeepPartial }
783 | : Partial;
784 |
785 | function isSet(value: any): boolean {
786 | return value !== null && value !== undefined;
787 | }
788 |
789 | export interface MessageFns {
790 | encode(message: T, writer?: BinaryWriter): BinaryWriter;
791 | decode(input: BinaryReader | Uint8Array, length?: number): T;
792 | fromJSON(object: any): T;
793 | toJSON(message: T): unknown;
794 | create(base?: DeepPartial): T;
795 | fromPartial(object: DeepPartial): T;
796 | }
797 |
--------------------------------------------------------------------------------
/src/grpc/generated/google/protobuf/any.ts:
--------------------------------------------------------------------------------
1 | // Code generated by protoc-gen-ts_proto. DO NOT EDIT.
2 | // versions:
3 | // protoc-gen-ts_proto v2.7.2
4 | // protoc unknown
5 | // source: google/protobuf/any.proto
6 |
7 | /* eslint-disable */
8 | import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire";
9 | import Long from "long";
10 |
11 | export const protobufPackage = "google.protobuf";
12 |
13 | /**
14 | * `Any` contains an arbitrary serialized protocol buffer message along with a
15 | * URL that describes the type of the serialized message.
16 | *
17 | * Protobuf library provides support to pack/unpack Any values in the form
18 | * of utility functions or additional generated methods of the Any type.
19 | *
20 | * Example 1: Pack and unpack a message in C++.
21 | *
22 | * Foo foo = ...;
23 | * Any any;
24 | * any.PackFrom(foo);
25 | * ...
26 | * if (any.UnpackTo(&foo)) {
27 | * ...
28 | * }
29 | *
30 | * Example 2: Pack and unpack a message in Java.
31 | *
32 | * Foo foo = ...;
33 | * Any any = Any.pack(foo);
34 | * ...
35 | * if (any.is(Foo.class)) {
36 | * foo = any.unpack(Foo.class);
37 | * }
38 | *
39 | * Example 3: Pack and unpack a message in Python.
40 | *
41 | * foo = Foo(...)
42 | * any = Any()
43 | * any.Pack(foo)
44 | * ...
45 | * if any.Is(Foo.DESCRIPTOR):
46 | * any.Unpack(foo)
47 | * ...
48 | *
49 | * Example 4: Pack and unpack a message in Go
50 | *
51 | * foo := &pb.Foo{...}
52 | * any, err := anypb.New(foo)
53 | * if err != nil {
54 | * ...
55 | * }
56 | * ...
57 | * foo := &pb.Foo{}
58 | * if err := any.UnmarshalTo(foo); err != nil {
59 | * ...
60 | * }
61 | *
62 | * The pack methods provided by protobuf library will by default use
63 | * 'type.googleapis.com/full.type.name' as the type URL and the unpack
64 | * methods only use the fully qualified type name after the last '/'
65 | * in the type URL, for example "foo.bar.com/x/y.z" will yield type
66 | * name "y.z".
67 | *
68 | * JSON
69 | *
70 | * The JSON representation of an `Any` value uses the regular
71 | * representation of the deserialized, embedded message, with an
72 | * additional field `@type` which contains the type URL. Example:
73 | *
74 | * package google.profile;
75 | * message Person {
76 | * string first_name = 1;
77 | * string last_name = 2;
78 | * }
79 | *
80 | * {
81 | * "@type": "type.googleapis.com/google.profile.Person",
82 | * "firstName": ,
83 | * "lastName":
84 | * }
85 | *
86 | * If the embedded message type is well-known and has a custom JSON
87 | * representation, that representation will be embedded adding a field
88 | * `value` which holds the custom JSON in addition to the `@type`
89 | * field. Example (for message [google.protobuf.Duration][]):
90 | *
91 | * {
92 | * "@type": "type.googleapis.com/google.protobuf.Duration",
93 | * "value": "1.212s"
94 | * }
95 | */
96 | export interface Any {
97 | /**
98 | * A URL/resource name that uniquely identifies the type of the serialized
99 | * protocol buffer message. This string must contain at least
100 | * one "/" character. The last segment of the URL's path must represent
101 | * the fully qualified name of the type (as in
102 | * `path/google.protobuf.Duration`). The name should be in a canonical form
103 | * (e.g., leading "." is not accepted).
104 | *
105 | * In practice, teams usually precompile into the binary all types that they
106 | * expect it to use in the context of Any. However, for URLs which use the
107 | * scheme `http`, `https`, or no scheme, one can optionally set up a type
108 | * server that maps type URLs to message definitions as follows:
109 | *
110 | * * If no scheme is provided, `https` is assumed.
111 | * * An HTTP GET on the URL must yield a [google.protobuf.Type][]
112 | * value in binary format, or produce an error.
113 | * * Applications are allowed to cache lookup results based on the
114 | * URL, or have them precompiled into a binary to avoid any
115 | * lookup. Therefore, binary compatibility needs to be preserved
116 | * on changes to types. (Use versioned type names to manage
117 | * breaking changes.)
118 | *
119 | * Note: this functionality is not currently available in the official
120 | * protobuf release, and it is not used for type URLs beginning with
121 | * type.googleapis.com.
122 | *
123 | * Schemes other than `http`, `https` (or the empty scheme) might be
124 | * used with implementation specific semantics.
125 | */
126 | typeUrl: string;
127 | /** Must be a valid serialized protocol buffer of the above specified type. */
128 | value: Buffer;
129 | }
130 |
131 | function createBaseAny(): Any {
132 | return { typeUrl: "", value: Buffer.alloc(0) };
133 | }
134 |
135 | export const Any: MessageFns = {
136 | encode(message: Any, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
137 | if (message.typeUrl !== "") {
138 | writer.uint32(10).string(message.typeUrl);
139 | }
140 | if (message.value.length !== 0) {
141 | writer.uint32(18).bytes(message.value);
142 | }
143 | return writer;
144 | },
145 |
146 | decode(input: BinaryReader | Uint8Array, length?: number): Any {
147 | const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
148 | let end = length === undefined ? reader.len : reader.pos + length;
149 | const message = createBaseAny();
150 | while (reader.pos < end) {
151 | const tag = reader.uint32();
152 | switch (tag >>> 3) {
153 | case 1: {
154 | if (tag !== 10) {
155 | break;
156 | }
157 |
158 | message.typeUrl = reader.string();
159 | continue;
160 | }
161 | case 2: {
162 | if (tag !== 18) {
163 | break;
164 | }
165 |
166 | message.value = Buffer.from(reader.bytes());
167 | continue;
168 | }
169 | }
170 | if ((tag & 7) === 4 || tag === 0) {
171 | break;
172 | }
173 | reader.skip(tag & 7);
174 | }
175 | return message;
176 | },
177 |
178 | fromJSON(object: any): Any {
179 | return {
180 | typeUrl: isSet(object.typeUrl) ? globalThis.String(object.typeUrl) : "",
181 | value: isSet(object.value) ? Buffer.from(bytesFromBase64(object.value)) : Buffer.alloc(0),
182 | };
183 | },
184 |
185 | toJSON(message: Any): unknown {
186 | const obj: any = {};
187 | if (message.typeUrl !== "") {
188 | obj.typeUrl = message.typeUrl;
189 | }
190 | if (message.value.length !== 0) {
191 | obj.value = base64FromBytes(message.value);
192 | }
193 | return obj;
194 | },
195 |
196 | create(base?: DeepPartial): Any {
197 | return Any.fromPartial(base ?? {});
198 | },
199 | fromPartial(object: DeepPartial): Any {
200 | const message = createBaseAny();
201 | message.typeUrl = object.typeUrl ?? "";
202 | message.value = object.value ?? Buffer.alloc(0);
203 | return message;
204 | },
205 | };
206 |
207 | function bytesFromBase64(b64: string): Uint8Array {
208 | return Uint8Array.from(globalThis.Buffer.from(b64, "base64"));
209 | }
210 |
211 | function base64FromBytes(arr: Uint8Array): string {
212 | return globalThis.Buffer.from(arr).toString("base64");
213 | }
214 |
215 | type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined;
216 |
217 | export type DeepPartial = T extends Builtin ? T
218 | : T extends Long ? string | number | Long : T extends globalThis.Array ? globalThis.Array>
219 | : T extends ReadonlyArray ? ReadonlyArray>
220 | : T extends { $case: string } ? { [K in keyof Omit]?: DeepPartial } & { $case: T["$case"] }
221 | : T extends {} ? { [K in keyof T]?: DeepPartial }
222 | : Partial;
223 |
224 | function isSet(value: any): boolean {
225 | return value !== null && value !== undefined;
226 | }
227 |
228 | export interface MessageFns {
229 | encode(message: T, writer?: BinaryWriter): BinaryWriter;
230 | decode(input: BinaryReader | Uint8Array, length?: number): T;
231 | fromJSON(object: any): T;
232 | toJSON(message: T): unknown;
233 | create(base?: DeepPartial): T;
234 | fromPartial(object: DeepPartial): T;
235 | }
236 |
--------------------------------------------------------------------------------
/src/grpc/generated/google/protobuf/duration.ts:
--------------------------------------------------------------------------------
1 | // Code generated by protoc-gen-ts_proto. DO NOT EDIT.
2 | // versions:
3 | // protoc-gen-ts_proto v2.7.2
4 | // protoc unknown
5 | // source: google/protobuf/duration.proto
6 |
7 | /* eslint-disable */
8 | import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire";
9 | import Long from "long";
10 |
11 | export const protobufPackage = "google.protobuf";
12 |
13 | /**
14 | * A Duration represents a signed, fixed-length span of time represented
15 | * as a count of seconds and fractions of seconds at nanosecond
16 | * resolution. It is independent of any calendar and concepts like "day"
17 | * or "month". It is related to Timestamp in that the difference between
18 | * two Timestamp values is a Duration and it can be added or subtracted
19 | * from a Timestamp. Range is approximately +-10,000 years.
20 | *
21 | * # Examples
22 | *
23 | * Example 1: Compute Duration from two Timestamps in pseudo code.
24 | *
25 | * Timestamp start = ...;
26 | * Timestamp end = ...;
27 | * Duration duration = ...;
28 | *
29 | * duration.seconds = end.seconds - start.seconds;
30 | * duration.nanos = end.nanos - start.nanos;
31 | *
32 | * if (duration.seconds < 0 && duration.nanos > 0) {
33 | * duration.seconds += 1;
34 | * duration.nanos -= 1000000000;
35 | * } else if (duration.seconds > 0 && duration.nanos < 0) {
36 | * duration.seconds -= 1;
37 | * duration.nanos += 1000000000;
38 | * }
39 | *
40 | * Example 2: Compute Timestamp from Timestamp + Duration in pseudo code.
41 | *
42 | * Timestamp start = ...;
43 | * Duration duration = ...;
44 | * Timestamp end = ...;
45 | *
46 | * end.seconds = start.seconds + duration.seconds;
47 | * end.nanos = start.nanos + duration.nanos;
48 | *
49 | * if (end.nanos < 0) {
50 | * end.seconds -= 1;
51 | * end.nanos += 1000000000;
52 | * } else if (end.nanos >= 1000000000) {
53 | * end.seconds += 1;
54 | * end.nanos -= 1000000000;
55 | * }
56 | *
57 | * Example 3: Compute Duration from datetime.timedelta in Python.
58 | *
59 | * td = datetime.timedelta(days=3, minutes=10)
60 | * duration = Duration()
61 | * duration.FromTimedelta(td)
62 | *
63 | * # JSON Mapping
64 | *
65 | * In JSON format, the Duration type is encoded as a string rather than an
66 | * object, where the string ends in the suffix "s" (indicating seconds) and
67 | * is preceded by the number of seconds, with nanoseconds expressed as
68 | * fractional seconds. For example, 3 seconds with 0 nanoseconds should be
69 | * encoded in JSON format as "3s", while 3 seconds and 1 nanosecond should
70 | * be expressed in JSON format as "3.000000001s", and 3 seconds and 1
71 | * microsecond should be expressed in JSON format as "3.000001s".
72 | */
73 | export interface Duration {
74 | /**
75 | * Signed seconds of the span of time. Must be from -315,576,000,000
76 | * to +315,576,000,000 inclusive. Note: these bounds are computed from:
77 | * 60 sec/min * 60 min/hr * 24 hr/day * 365.25 days/year * 10000 years
78 | */
79 | seconds: Long;
80 | /**
81 | * Signed fractions of a second at nanosecond resolution of the span
82 | * of time. Durations less than one second are represented with a 0
83 | * `seconds` field and a positive or negative `nanos` field. For durations
84 | * of one second or more, a non-zero value for the `nanos` field must be
85 | * of the same sign as the `seconds` field. Must be from -999,999,999
86 | * to +999,999,999 inclusive.
87 | */
88 | nanos: number;
89 | }
90 |
91 | function createBaseDuration(): Duration {
92 | return { seconds: Long.ZERO, nanos: 0 };
93 | }
94 |
95 | export const Duration: MessageFns = {
96 | encode(message: Duration, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
97 | if (!message.seconds.equals(Long.ZERO)) {
98 | writer.uint32(8).int64(message.seconds.toString());
99 | }
100 | if (message.nanos !== 0) {
101 | writer.uint32(16).int32(message.nanos);
102 | }
103 | return writer;
104 | },
105 |
106 | decode(input: BinaryReader | Uint8Array, length?: number): Duration {
107 | const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
108 | let end = length === undefined ? reader.len : reader.pos + length;
109 | const message = createBaseDuration();
110 | while (reader.pos < end) {
111 | const tag = reader.uint32();
112 | switch (tag >>> 3) {
113 | case 1: {
114 | if (tag !== 8) {
115 | break;
116 | }
117 |
118 | message.seconds = Long.fromString(reader.int64().toString());
119 | continue;
120 | }
121 | case 2: {
122 | if (tag !== 16) {
123 | break;
124 | }
125 |
126 | message.nanos = reader.int32();
127 | continue;
128 | }
129 | }
130 | if ((tag & 7) === 4 || tag === 0) {
131 | break;
132 | }
133 | reader.skip(tag & 7);
134 | }
135 | return message;
136 | },
137 |
138 | fromJSON(object: any): Duration {
139 | return {
140 | seconds: isSet(object.seconds) ? Long.fromValue(object.seconds) : Long.ZERO,
141 | nanos: isSet(object.nanos) ? globalThis.Number(object.nanos) : 0,
142 | };
143 | },
144 |
145 | toJSON(message: Duration): unknown {
146 | const obj: any = {};
147 | if (!message.seconds.equals(Long.ZERO)) {
148 | obj.seconds = (message.seconds || Long.ZERO).toString();
149 | }
150 | if (message.nanos !== 0) {
151 | obj.nanos = Math.round(message.nanos);
152 | }
153 | return obj;
154 | },
155 |
156 | create(base?: DeepPartial): Duration {
157 | return Duration.fromPartial(base ?? {});
158 | },
159 | fromPartial(object: DeepPartial): Duration {
160 | const message = createBaseDuration();
161 | message.seconds = (object.seconds !== undefined && object.seconds !== null)
162 | ? Long.fromValue(object.seconds)
163 | : Long.ZERO;
164 | message.nanos = object.nanos ?? 0;
165 | return message;
166 | },
167 | };
168 |
169 | type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined;
170 |
171 | export type DeepPartial = T extends Builtin ? T
172 | : T extends Long ? string | number | Long : T extends globalThis.Array ? globalThis.Array>
173 | : T extends ReadonlyArray ? ReadonlyArray>
174 | : T extends { $case: string } ? { [K in keyof Omit]?: DeepPartial } & { $case: T["$case"] }
175 | : T extends {} ? { [K in keyof T]?: DeepPartial }
176 | : Partial;
177 |
178 | function isSet(value: any): boolean {
179 | return value !== null && value !== undefined;
180 | }
181 |
182 | export interface MessageFns {
183 | encode(message: T, writer?: BinaryWriter): BinaryWriter;
184 | decode(input: BinaryReader | Uint8Array, length?: number): T;
185 | fromJSON(object: any): T;
186 | toJSON(message: T): unknown;
187 | create(base?: DeepPartial): T;
188 | fromPartial(object: DeepPartial): T;
189 | }
190 |
--------------------------------------------------------------------------------
/src/grpc/generated/google/protobuf/empty.ts:
--------------------------------------------------------------------------------
1 | // Code generated by protoc-gen-ts_proto. DO NOT EDIT.
2 | // versions:
3 | // protoc-gen-ts_proto v2.7.2
4 | // protoc unknown
5 | // source: google/protobuf/empty.proto
6 |
7 | /* eslint-disable */
8 | import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire";
9 | import Long from "long";
10 |
11 | export const protobufPackage = "google.protobuf";
12 |
13 | /**
14 | * A generic empty message that you can re-use to avoid defining duplicated
15 | * empty messages in your APIs. A typical example is to use it as the request
16 | * or the response type of an API method. For instance:
17 | *
18 | * service Foo {
19 | * rpc Bar(google.protobuf.Empty) returns (google.protobuf.Empty);
20 | * }
21 | */
22 | export interface Empty {
23 | }
24 |
25 | function createBaseEmpty(): Empty {
26 | return {};
27 | }
28 |
29 | export const Empty: MessageFns = {
30 | encode(_: Empty, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
31 | return writer;
32 | },
33 |
34 | decode(input: BinaryReader | Uint8Array, length?: number): Empty {
35 | const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
36 | let end = length === undefined ? reader.len : reader.pos + length;
37 | const message = createBaseEmpty();
38 | while (reader.pos < end) {
39 | const tag = reader.uint32();
40 | switch (tag >>> 3) {
41 | }
42 | if ((tag & 7) === 4 || tag === 0) {
43 | break;
44 | }
45 | reader.skip(tag & 7);
46 | }
47 | return message;
48 | },
49 |
50 | fromJSON(_: any): Empty {
51 | return {};
52 | },
53 |
54 | toJSON(_: Empty): unknown {
55 | const obj: any = {};
56 | return obj;
57 | },
58 |
59 | create(base?: DeepPartial): Empty {
60 | return Empty.fromPartial(base ?? {});
61 | },
62 | fromPartial(_: DeepPartial): Empty {
63 | const message = createBaseEmpty();
64 | return message;
65 | },
66 | };
67 |
68 | type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined;
69 |
70 | export type DeepPartial = T extends Builtin ? T
71 | : T extends Long ? string | number | Long : T extends globalThis.Array ? globalThis.Array>
72 | : T extends ReadonlyArray ? ReadonlyArray>
73 | : T extends { $case: string } ? { [K in keyof Omit]?: DeepPartial } & { $case: T["$case"] }
74 | : T extends {} ? { [K in keyof T]?: DeepPartial }
75 | : Partial;
76 |
77 | export interface MessageFns {
78 | encode(message: T, writer?: BinaryWriter): BinaryWriter;
79 | decode(input: BinaryReader | Uint8Array, length?: number): T;
80 | fromJSON(object: any): T;
81 | toJSON(message: T): unknown;
82 | create(base?: DeepPartial): T;
83 | fromPartial(object: DeepPartial): T;
84 | }
85 |
--------------------------------------------------------------------------------
/src/grpc/generated/google/protobuf/struct.ts:
--------------------------------------------------------------------------------
1 | // Code generated by protoc-gen-ts_proto. DO NOT EDIT.
2 | // versions:
3 | // protoc-gen-ts_proto v2.7.2
4 | // protoc unknown
5 | // source: google/protobuf/struct.proto
6 |
7 | /* eslint-disable */
8 | import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire";
9 | import Long from "long";
10 |
11 | export const protobufPackage = "google.protobuf";
12 |
13 | /**
14 | * `NullValue` is a singleton enumeration to represent the null value for the
15 | * `Value` type union.
16 | *
17 | * The JSON representation for `NullValue` is JSON `null`.
18 | */
19 | export enum NullValue {
20 | /** NULL_VALUE - Null value. */
21 | NULL_VALUE = 0,
22 | UNRECOGNIZED = -1,
23 | }
24 |
25 | export function nullValueFromJSON(object: any): NullValue {
26 | switch (object) {
27 | case 0:
28 | case "NULL_VALUE":
29 | return NullValue.NULL_VALUE;
30 | case -1:
31 | case "UNRECOGNIZED":
32 | default:
33 | return NullValue.UNRECOGNIZED;
34 | }
35 | }
36 |
37 | export function nullValueToJSON(object: NullValue): string {
38 | switch (object) {
39 | case NullValue.NULL_VALUE:
40 | return "NULL_VALUE";
41 | case NullValue.UNRECOGNIZED:
42 | default:
43 | return "UNRECOGNIZED";
44 | }
45 | }
46 |
47 | /**
48 | * `Struct` represents a structured data value, consisting of fields
49 | * which map to dynamically typed values. In some languages, `Struct`
50 | * might be supported by a native representation. For example, in
51 | * scripting languages like JS a struct is represented as an
52 | * object. The details of that representation are described together
53 | * with the proto support for the language.
54 | *
55 | * The JSON representation for `Struct` is JSON object.
56 | */
57 | export interface Struct {
58 | /** Unordered map of dynamically typed values. */
59 | fields: { [key: string]: any | undefined };
60 | }
61 |
62 | export interface Struct_FieldsEntry {
63 | key: string;
64 | value: any | undefined;
65 | }
66 |
67 | /**
68 | * `Value` represents a dynamically typed value which can be either
69 | * null, a number, a string, a boolean, a recursive struct value, or a
70 | * list of values. A producer of value is expected to set one of these
71 | * variants. Absence of any variant indicates an error.
72 | *
73 | * The JSON representation for `Value` is JSON value.
74 | */
75 | export interface Value {
76 | /** The kind of value. */
77 | kind?:
78 | | //
79 | /** Represents a null value. */
80 | { $case: "nullValue"; nullValue: NullValue }
81 | | //
82 | /** Represents a double value. */
83 | { $case: "numberValue"; numberValue: number }
84 | | //
85 | /** Represents a string value. */
86 | { $case: "stringValue"; stringValue: string }
87 | | //
88 | /** Represents a boolean value. */
89 | { $case: "boolValue"; boolValue: boolean }
90 | | //
91 | /** Represents a structured value. */
92 | { $case: "structValue"; structValue: { [key: string]: any } | undefined }
93 | | //
94 | /** Represents a repeated `Value`. */
95 | { $case: "listValue"; listValue: Array | undefined }
96 | | undefined;
97 | }
98 |
99 | /**
100 | * `ListValue` is a wrapper around a repeated field of values.
101 | *
102 | * The JSON representation for `ListValue` is JSON array.
103 | */
104 | export interface ListValue {
105 | /** Repeated field of dynamically typed values. */
106 | values: any[];
107 | }
108 |
109 | function createBaseStruct(): Struct {
110 | return { fields: {} };
111 | }
112 |
113 | export const Struct: MessageFns & StructWrapperFns = {
114 | encode(message: Struct, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
115 | Object.entries(message.fields).forEach(([key, value]) => {
116 | if (value !== undefined) {
117 | Struct_FieldsEntry.encode({ key: key as any, value }, writer.uint32(10).fork()).join();
118 | }
119 | });
120 | return writer;
121 | },
122 |
123 | decode(input: BinaryReader | Uint8Array, length?: number): Struct {
124 | const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
125 | let end = length === undefined ? reader.len : reader.pos + length;
126 | const message = createBaseStruct();
127 | while (reader.pos < end) {
128 | const tag = reader.uint32();
129 | switch (tag >>> 3) {
130 | case 1: {
131 | if (tag !== 10) {
132 | break;
133 | }
134 |
135 | const entry1 = Struct_FieldsEntry.decode(reader, reader.uint32());
136 | if (entry1.value !== undefined) {
137 | message.fields[entry1.key] = entry1.value;
138 | }
139 | continue;
140 | }
141 | }
142 | if ((tag & 7) === 4 || tag === 0) {
143 | break;
144 | }
145 | reader.skip(tag & 7);
146 | }
147 | return message;
148 | },
149 |
150 | fromJSON(object: any): Struct {
151 | return {
152 | fields: isObject(object.fields)
153 | ? Object.entries(object.fields).reduce<{ [key: string]: any | undefined }>((acc, [key, value]) => {
154 | acc[key] = value as any | undefined;
155 | return acc;
156 | }, {})
157 | : {},
158 | };
159 | },
160 |
161 | toJSON(message: Struct): unknown {
162 | const obj: any = {};
163 | if (message.fields) {
164 | const entries = Object.entries(message.fields);
165 | if (entries.length > 0) {
166 | obj.fields = {};
167 | entries.forEach(([k, v]) => {
168 | obj.fields[k] = v;
169 | });
170 | }
171 | }
172 | return obj;
173 | },
174 |
175 | create(base?: DeepPartial): Struct {
176 | return Struct.fromPartial(base ?? {});
177 | },
178 | fromPartial(object: DeepPartial): Struct {
179 | const message = createBaseStruct();
180 | message.fields = Object.entries(object.fields ?? {}).reduce<{ [key: string]: any | undefined }>(
181 | (acc, [key, value]) => {
182 | if (value !== undefined) {
183 | acc[key] = value;
184 | }
185 | return acc;
186 | },
187 | {},
188 | );
189 | return message;
190 | },
191 |
192 | wrap(object: { [key: string]: any } | undefined): Struct {
193 | const struct = createBaseStruct();
194 |
195 | if (object !== undefined) {
196 | for (const key of Object.keys(object)) {
197 | struct.fields[key] = object[key];
198 | }
199 | }
200 | return struct;
201 | },
202 |
203 | unwrap(message: Struct): { [key: string]: any } {
204 | const object: { [key: string]: any } = {};
205 | if (message.fields) {
206 | for (const key of Object.keys(message.fields)) {
207 | object[key] = message.fields[key];
208 | }
209 | }
210 | return object;
211 | },
212 | };
213 |
214 | function createBaseStruct_FieldsEntry(): Struct_FieldsEntry {
215 | return { key: "", value: undefined };
216 | }
217 |
218 | export const Struct_FieldsEntry: MessageFns = {
219 | encode(message: Struct_FieldsEntry, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
220 | if (message.key !== "") {
221 | writer.uint32(10).string(message.key);
222 | }
223 | if (message.value !== undefined) {
224 | Value.encode(Value.wrap(message.value), writer.uint32(18).fork()).join();
225 | }
226 | return writer;
227 | },
228 |
229 | decode(input: BinaryReader | Uint8Array, length?: number): Struct_FieldsEntry {
230 | const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
231 | let end = length === undefined ? reader.len : reader.pos + length;
232 | const message = createBaseStruct_FieldsEntry();
233 | while (reader.pos < end) {
234 | const tag = reader.uint32();
235 | switch (tag >>> 3) {
236 | case 1: {
237 | if (tag !== 10) {
238 | break;
239 | }
240 |
241 | message.key = reader.string();
242 | continue;
243 | }
244 | case 2: {
245 | if (tag !== 18) {
246 | break;
247 | }
248 |
249 | message.value = Value.unwrap(Value.decode(reader, reader.uint32()));
250 | continue;
251 | }
252 | }
253 | if ((tag & 7) === 4 || tag === 0) {
254 | break;
255 | }
256 | reader.skip(tag & 7);
257 | }
258 | return message;
259 | },
260 |
261 | fromJSON(object: any): Struct_FieldsEntry {
262 | return {
263 | key: isSet(object.key) ? globalThis.String(object.key) : "",
264 | value: isSet(object?.value) ? object.value : undefined,
265 | };
266 | },
267 |
268 | toJSON(message: Struct_FieldsEntry): unknown {
269 | const obj: any = {};
270 | if (message.key !== "") {
271 | obj.key = message.key;
272 | }
273 | if (message.value !== undefined) {
274 | obj.value = message.value;
275 | }
276 | return obj;
277 | },
278 |
279 | create(base?: DeepPartial): Struct_FieldsEntry {
280 | return Struct_FieldsEntry.fromPartial(base ?? {});
281 | },
282 | fromPartial(object: DeepPartial): Struct_FieldsEntry {
283 | const message = createBaseStruct_FieldsEntry();
284 | message.key = object.key ?? "";
285 | message.value = object.value ?? undefined;
286 | return message;
287 | },
288 | };
289 |
290 | function createBaseValue(): Value {
291 | return { kind: undefined };
292 | }
293 |
294 | export const Value: MessageFns & AnyValueWrapperFns = {
295 | encode(message: Value, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
296 | switch (message.kind?.$case) {
297 | case "nullValue":
298 | writer.uint32(8).int32(message.kind.nullValue);
299 | break;
300 | case "numberValue":
301 | writer.uint32(17).double(message.kind.numberValue);
302 | break;
303 | case "stringValue":
304 | writer.uint32(26).string(message.kind.stringValue);
305 | break;
306 | case "boolValue":
307 | writer.uint32(32).bool(message.kind.boolValue);
308 | break;
309 | case "structValue":
310 | Struct.encode(Struct.wrap(message.kind.structValue), writer.uint32(42).fork()).join();
311 | break;
312 | case "listValue":
313 | ListValue.encode(ListValue.wrap(message.kind.listValue), writer.uint32(50).fork()).join();
314 | break;
315 | }
316 | return writer;
317 | },
318 |
319 | decode(input: BinaryReader | Uint8Array, length?: number): Value {
320 | const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
321 | let end = length === undefined ? reader.len : reader.pos + length;
322 | const message = createBaseValue();
323 | while (reader.pos < end) {
324 | const tag = reader.uint32();
325 | switch (tag >>> 3) {
326 | case 1: {
327 | if (tag !== 8) {
328 | break;
329 | }
330 |
331 | message.kind = { $case: "nullValue", nullValue: reader.int32() as any };
332 | continue;
333 | }
334 | case 2: {
335 | if (tag !== 17) {
336 | break;
337 | }
338 |
339 | message.kind = { $case: "numberValue", numberValue: reader.double() };
340 | continue;
341 | }
342 | case 3: {
343 | if (tag !== 26) {
344 | break;
345 | }
346 |
347 | message.kind = { $case: "stringValue", stringValue: reader.string() };
348 | continue;
349 | }
350 | case 4: {
351 | if (tag !== 32) {
352 | break;
353 | }
354 |
355 | message.kind = { $case: "boolValue", boolValue: reader.bool() };
356 | continue;
357 | }
358 | case 5: {
359 | if (tag !== 42) {
360 | break;
361 | }
362 |
363 | message.kind = { $case: "structValue", structValue: Struct.unwrap(Struct.decode(reader, reader.uint32())) };
364 | continue;
365 | }
366 | case 6: {
367 | if (tag !== 50) {
368 | break;
369 | }
370 |
371 | message.kind = { $case: "listValue", listValue: ListValue.unwrap(ListValue.decode(reader, reader.uint32())) };
372 | continue;
373 | }
374 | }
375 | if ((tag & 7) === 4 || tag === 0) {
376 | break;
377 | }
378 | reader.skip(tag & 7);
379 | }
380 | return message;
381 | },
382 |
383 | fromJSON(object: any): Value {
384 | return {
385 | kind: isSet(object.nullValue)
386 | ? { $case: "nullValue", nullValue: nullValueFromJSON(object.nullValue) }
387 | : isSet(object.numberValue)
388 | ? { $case: "numberValue", numberValue: globalThis.Number(object.numberValue) }
389 | : isSet(object.stringValue)
390 | ? { $case: "stringValue", stringValue: globalThis.String(object.stringValue) }
391 | : isSet(object.boolValue)
392 | ? { $case: "boolValue", boolValue: globalThis.Boolean(object.boolValue) }
393 | : isSet(object.structValue)
394 | ? { $case: "structValue", structValue: object.structValue }
395 | : isSet(object.listValue)
396 | ? { $case: "listValue", listValue: [...object.listValue] }
397 | : undefined,
398 | };
399 | },
400 |
401 | toJSON(message: Value): unknown {
402 | const obj: any = {};
403 | if (message.kind?.$case === "nullValue") {
404 | obj.nullValue = nullValueToJSON(message.kind.nullValue);
405 | } else if (message.kind?.$case === "numberValue") {
406 | obj.numberValue = message.kind.numberValue;
407 | } else if (message.kind?.$case === "stringValue") {
408 | obj.stringValue = message.kind.stringValue;
409 | } else if (message.kind?.$case === "boolValue") {
410 | obj.boolValue = message.kind.boolValue;
411 | } else if (message.kind?.$case === "structValue") {
412 | obj.structValue = message.kind.structValue;
413 | } else if (message.kind?.$case === "listValue") {
414 | obj.listValue = message.kind.listValue;
415 | }
416 | return obj;
417 | },
418 |
419 | create(base?: DeepPartial): Value {
420 | return Value.fromPartial(base ?? {});
421 | },
422 | fromPartial(object: DeepPartial): Value {
423 | const message = createBaseValue();
424 | switch (object.kind?.$case) {
425 | case "nullValue": {
426 | if (object.kind?.nullValue !== undefined && object.kind?.nullValue !== null) {
427 | message.kind = { $case: "nullValue", nullValue: object.kind.nullValue };
428 | }
429 | break;
430 | }
431 | case "numberValue": {
432 | if (object.kind?.numberValue !== undefined && object.kind?.numberValue !== null) {
433 | message.kind = { $case: "numberValue", numberValue: object.kind.numberValue };
434 | }
435 | break;
436 | }
437 | case "stringValue": {
438 | if (object.kind?.stringValue !== undefined && object.kind?.stringValue !== null) {
439 | message.kind = { $case: "stringValue", stringValue: object.kind.stringValue };
440 | }
441 | break;
442 | }
443 | case "boolValue": {
444 | if (object.kind?.boolValue !== undefined && object.kind?.boolValue !== null) {
445 | message.kind = { $case: "boolValue", boolValue: object.kind.boolValue };
446 | }
447 | break;
448 | }
449 | case "structValue": {
450 | if (object.kind?.structValue !== undefined && object.kind?.structValue !== null) {
451 | message.kind = { $case: "structValue", structValue: object.kind.structValue };
452 | }
453 | break;
454 | }
455 | case "listValue": {
456 | if (object.kind?.listValue !== undefined && object.kind?.listValue !== null) {
457 | message.kind = { $case: "listValue", listValue: object.kind.listValue };
458 | }
459 | break;
460 | }
461 | }
462 | return message;
463 | },
464 |
465 | wrap(value: any): Value {
466 | const result = createBaseValue();
467 | if (value === null) {
468 | result.kind = { $case: "nullValue", nullValue: NullValue.NULL_VALUE };
469 | } else if (typeof value === "boolean") {
470 | result.kind = { $case: "boolValue", boolValue: value };
471 | } else if (typeof value === "number") {
472 | result.kind = { $case: "numberValue", numberValue: value };
473 | } else if (typeof value === "string") {
474 | result.kind = { $case: "stringValue", stringValue: value };
475 | } else if (globalThis.Array.isArray(value)) {
476 | result.kind = { $case: "listValue", listValue: value };
477 | } else if (typeof value === "object") {
478 | result.kind = { $case: "structValue", structValue: value };
479 | } else if (typeof value !== "undefined") {
480 | throw new globalThis.Error("Unsupported any value type: " + typeof value);
481 | }
482 | return result;
483 | },
484 |
485 | unwrap(message: Value): string | number | boolean | Object | null | Array | undefined {
486 | if (message.kind?.$case === "nullValue") {
487 | return null;
488 | } else if (message.kind?.$case === "numberValue") {
489 | return message.kind?.numberValue;
490 | } else if (message.kind?.$case === "stringValue") {
491 | return message.kind?.stringValue;
492 | } else if (message.kind?.$case === "boolValue") {
493 | return message.kind?.boolValue;
494 | } else if (message.kind?.$case === "structValue") {
495 | return message.kind?.structValue;
496 | } else if (message.kind?.$case === "listValue") {
497 | return message.kind?.listValue;
498 | } else {
499 | return undefined;
500 | }
501 | },
502 | };
503 |
504 | function createBaseListValue(): ListValue {
505 | return { values: [] };
506 | }
507 |
508 | export const ListValue: MessageFns & ListValueWrapperFns = {
509 | encode(message: ListValue, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
510 | for (const v of message.values) {
511 | Value.encode(Value.wrap(v!), writer.uint32(10).fork()).join();
512 | }
513 | return writer;
514 | },
515 |
516 | decode(input: BinaryReader | Uint8Array, length?: number): ListValue {
517 | const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
518 | let end = length === undefined ? reader.len : reader.pos + length;
519 | const message = createBaseListValue();
520 | while (reader.pos < end) {
521 | const tag = reader.uint32();
522 | switch (tag >>> 3) {
523 | case 1: {
524 | if (tag !== 10) {
525 | break;
526 | }
527 |
528 | message.values.push(Value.unwrap(Value.decode(reader, reader.uint32())));
529 | continue;
530 | }
531 | }
532 | if ((tag & 7) === 4 || tag === 0) {
533 | break;
534 | }
535 | reader.skip(tag & 7);
536 | }
537 | return message;
538 | },
539 |
540 | fromJSON(object: any): ListValue {
541 | return { values: globalThis.Array.isArray(object?.values) ? [...object.values] : [] };
542 | },
543 |
544 | toJSON(message: ListValue): unknown {
545 | const obj: any = {};
546 | if (message.values?.length) {
547 | obj.values = message.values;
548 | }
549 | return obj;
550 | },
551 |
552 | create(base?: DeepPartial): ListValue {
553 | return ListValue.fromPartial(base ?? {});
554 | },
555 | fromPartial(object: DeepPartial): ListValue {
556 | const message = createBaseListValue();
557 | message.values = object.values?.map((e) => e) || [];
558 | return message;
559 | },
560 |
561 | wrap(array: Array | undefined): ListValue {
562 | const result = createBaseListValue();
563 | result.values = array ?? [];
564 | return result;
565 | },
566 |
567 | unwrap(message: ListValue): Array {
568 | if (message?.hasOwnProperty("values") && globalThis.Array.isArray(message.values)) {
569 | return message.values;
570 | } else {
571 | return message as any;
572 | }
573 | },
574 | };
575 |
576 | type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined;
577 |
578 | export type DeepPartial = T extends Builtin ? T
579 | : T extends Long ? string | number | Long : T extends globalThis.Array ? globalThis.Array>
580 | : T extends ReadonlyArray ? ReadonlyArray>
581 | : T extends { $case: string } ? { [K in keyof Omit]?: DeepPartial } & { $case: T["$case"] }
582 | : T extends {} ? { [K in keyof T]?: DeepPartial }
583 | : Partial;
584 |
585 | function isObject(value: any): boolean {
586 | return typeof value === "object" && value !== null;
587 | }
588 |
589 | function isSet(value: any): boolean {
590 | return value !== null && value !== undefined;
591 | }
592 |
593 | export interface MessageFns {
594 | encode(message: T, writer?: BinaryWriter): BinaryWriter;
595 | decode(input: BinaryReader | Uint8Array, length?: number): T;
596 | fromJSON(object: any): T;
597 | toJSON(message: T): unknown;
598 | create(base?: DeepPartial): T;
599 | fromPartial(object: DeepPartial): T;
600 | }
601 |
602 | export interface StructWrapperFns {
603 | wrap(object: { [key: string]: any } | undefined): Struct;
604 | unwrap(message: Struct): { [key: string]: any };
605 | }
606 |
607 | export interface AnyValueWrapperFns {
608 | wrap(value: any): Value;
609 | unwrap(message: any): string | number | boolean | Object | null | Array | undefined;
610 | }
611 |
612 | export interface ListValueWrapperFns {
613 | wrap(array: Array | undefined): ListValue;
614 | unwrap(message: ListValue): Array;
615 | }
616 |
--------------------------------------------------------------------------------
/src/grpc/generated/google/protobuf/timestamp.ts:
--------------------------------------------------------------------------------
1 | // Code generated by protoc-gen-ts_proto. DO NOT EDIT.
2 | // versions:
3 | // protoc-gen-ts_proto v2.7.2
4 | // protoc unknown
5 | // source: google/protobuf/timestamp.proto
6 |
7 | /* eslint-disable */
8 | import { BinaryReader, BinaryWriter } from "@bufbuild/protobuf/wire";
9 | import Long from "long";
10 |
11 | export const protobufPackage = "google.protobuf";
12 |
13 | /**
14 | * A Timestamp represents a point in time independent of any time zone or local
15 | * calendar, encoded as a count of seconds and fractions of seconds at
16 | * nanosecond resolution. The count is relative to an epoch at UTC midnight on
17 | * January 1, 1970, in the proleptic Gregorian calendar which extends the
18 | * Gregorian calendar backwards to year one.
19 | *
20 | * All minutes are 60 seconds long. Leap seconds are "smeared" so that no leap
21 | * second table is needed for interpretation, using a [24-hour linear
22 | * smear](https://developers.google.com/time/smear).
23 | *
24 | * The range is from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59.999999999Z. By
25 | * restricting to that range, we ensure that we can convert to and from [RFC
26 | * 3339](https://www.ietf.org/rfc/rfc3339.txt) date strings.
27 | *
28 | * # Examples
29 | *
30 | * Example 1: Compute Timestamp from POSIX `time()`.
31 | *
32 | * Timestamp timestamp;
33 | * timestamp.set_seconds(time(NULL));
34 | * timestamp.set_nanos(0);
35 | *
36 | * Example 2: Compute Timestamp from POSIX `gettimeofday()`.
37 | *
38 | * struct timeval tv;
39 | * gettimeofday(&tv, NULL);
40 | *
41 | * Timestamp timestamp;
42 | * timestamp.set_seconds(tv.tv_sec);
43 | * timestamp.set_nanos(tv.tv_usec * 1000);
44 | *
45 | * Example 3: Compute Timestamp from Win32 `GetSystemTimeAsFileTime()`.
46 | *
47 | * FILETIME ft;
48 | * GetSystemTimeAsFileTime(&ft);
49 | * UINT64 ticks = (((UINT64)ft.dwHighDateTime) << 32) | ft.dwLowDateTime;
50 | *
51 | * // A Windows tick is 100 nanoseconds. Windows epoch 1601-01-01T00:00:00Z
52 | * // is 11644473600 seconds before Unix epoch 1970-01-01T00:00:00Z.
53 | * Timestamp timestamp;
54 | * timestamp.set_seconds((INT64) ((ticks / 10000000) - 11644473600LL));
55 | * timestamp.set_nanos((INT32) ((ticks % 10000000) * 100));
56 | *
57 | * Example 4: Compute Timestamp from Java `System.currentTimeMillis()`.
58 | *
59 | * long millis = System.currentTimeMillis();
60 | *
61 | * Timestamp timestamp = Timestamp.newBuilder().setSeconds(millis / 1000)
62 | * .setNanos((int) ((millis % 1000) * 1000000)).build();
63 | *
64 | * Example 5: Compute Timestamp from Java `Instant.now()`.
65 | *
66 | * Instant now = Instant.now();
67 | *
68 | * Timestamp timestamp =
69 | * Timestamp.newBuilder().setSeconds(now.getEpochSecond())
70 | * .setNanos(now.getNano()).build();
71 | *
72 | * Example 6: Compute Timestamp from current time in Python.
73 | *
74 | * timestamp = Timestamp()
75 | * timestamp.GetCurrentTime()
76 | *
77 | * # JSON Mapping
78 | *
79 | * In JSON format, the Timestamp type is encoded as a string in the
80 | * [RFC 3339](https://www.ietf.org/rfc/rfc3339.txt) format. That is, the
81 | * format is "{year}-{month}-{day}T{hour}:{min}:{sec}[.{frac_sec}]Z"
82 | * where {year} is always expressed using four digits while {month}, {day},
83 | * {hour}, {min}, and {sec} are zero-padded to two digits each. The fractional
84 | * seconds, which can go up to 9 digits (i.e. up to 1 nanosecond resolution),
85 | * are optional. The "Z" suffix indicates the timezone ("UTC"); the timezone
86 | * is required. A proto3 JSON serializer should always use UTC (as indicated by
87 | * "Z") when printing the Timestamp type and a proto3 JSON parser should be
88 | * able to accept both UTC and other timezones (as indicated by an offset).
89 | *
90 | * For example, "2017-01-15T01:30:15.01Z" encodes 15.01 seconds past
91 | * 01:30 UTC on January 15, 2017.
92 | *
93 | * In JavaScript, one can convert a Date object to this format using the
94 | * standard
95 | * [toISOString()](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Date/toISOString)
96 | * method. In Python, a standard `datetime.datetime` object can be converted
97 | * to this format using
98 | * [`strftime`](https://docs.python.org/2/library/time.html#time.strftime) with
99 | * the time format spec '%Y-%m-%dT%H:%M:%S.%fZ'. Likewise, in Java, one can use
100 | * the Joda Time's [`ISODateTimeFormat.dateTime()`](
101 | * http://www.joda.org/joda-time/apidocs/org/joda/time/format/ISODateTimeFormat.html#dateTime%2D%2D
102 | * ) to obtain a formatter capable of generating timestamps in this format.
103 | */
104 | export interface Timestamp {
105 | /**
106 | * Represents seconds of UTC time since Unix epoch
107 | * 1970-01-01T00:00:00Z. Must be from 0001-01-01T00:00:00Z to
108 | * 9999-12-31T23:59:59Z inclusive.
109 | */
110 | seconds: Long;
111 | /**
112 | * Non-negative fractions of a second at nanosecond resolution. Negative
113 | * second values with fractions must still have non-negative nanos values
114 | * that count forward in time. Must be from 0 to 999,999,999
115 | * inclusive.
116 | */
117 | nanos: number;
118 | }
119 |
120 | function createBaseTimestamp(): Timestamp {
121 | return { seconds: Long.ZERO, nanos: 0 };
122 | }
123 |
124 | export const Timestamp: MessageFns = {
125 | encode(message: Timestamp, writer: BinaryWriter = new BinaryWriter()): BinaryWriter {
126 | if (!message.seconds.equals(Long.ZERO)) {
127 | writer.uint32(8).int64(message.seconds.toString());
128 | }
129 | if (message.nanos !== 0) {
130 | writer.uint32(16).int32(message.nanos);
131 | }
132 | return writer;
133 | },
134 |
135 | decode(input: BinaryReader | Uint8Array, length?: number): Timestamp {
136 | const reader = input instanceof BinaryReader ? input : new BinaryReader(input);
137 | let end = length === undefined ? reader.len : reader.pos + length;
138 | const message = createBaseTimestamp();
139 | while (reader.pos < end) {
140 | const tag = reader.uint32();
141 | switch (tag >>> 3) {
142 | case 1: {
143 | if (tag !== 8) {
144 | break;
145 | }
146 |
147 | message.seconds = Long.fromString(reader.int64().toString());
148 | continue;
149 | }
150 | case 2: {
151 | if (tag !== 16) {
152 | break;
153 | }
154 |
155 | message.nanos = reader.int32();
156 | continue;
157 | }
158 | }
159 | if ((tag & 7) === 4 || tag === 0) {
160 | break;
161 | }
162 | reader.skip(tag & 7);
163 | }
164 | return message;
165 | },
166 |
167 | fromJSON(object: any): Timestamp {
168 | return {
169 | seconds: isSet(object.seconds) ? Long.fromValue(object.seconds) : Long.ZERO,
170 | nanos: isSet(object.nanos) ? globalThis.Number(object.nanos) : 0,
171 | };
172 | },
173 |
174 | toJSON(message: Timestamp): unknown {
175 | const obj: any = {};
176 | if (!message.seconds.equals(Long.ZERO)) {
177 | obj.seconds = (message.seconds || Long.ZERO).toString();
178 | }
179 | if (message.nanos !== 0) {
180 | obj.nanos = Math.round(message.nanos);
181 | }
182 | return obj;
183 | },
184 |
185 | create(base?: DeepPartial): Timestamp {
186 | return Timestamp.fromPartial(base ?? {});
187 | },
188 | fromPartial(object: DeepPartial): Timestamp {
189 | const message = createBaseTimestamp();
190 | message.seconds = (object.seconds !== undefined && object.seconds !== null)
191 | ? Long.fromValue(object.seconds)
192 | : Long.ZERO;
193 | message.nanos = object.nanos ?? 0;
194 | return message;
195 | },
196 | };
197 |
198 | type Builtin = Date | Function | Uint8Array | string | number | boolean | undefined;
199 |
200 | export type DeepPartial = T extends Builtin ? T
201 | : T extends Long ? string | number | Long : T extends globalThis.Array ? globalThis.Array>
202 | : T extends ReadonlyArray ? ReadonlyArray>
203 | : T extends { $case: string } ? { [K in keyof Omit]?: DeepPartial } & { $case: T["$case"] }
204 | : T extends {} ? { [K in keyof T]?: DeepPartial }
205 | : Partial;
206 |
207 | function isSet(value: any): boolean {
208 | return value !== null && value !== undefined;
209 | }
210 |
211 | export interface MessageFns {
212 | encode(message: T, writer?: BinaryWriter): BinaryWriter;
213 | decode(input: BinaryReader | Uint8Array, length?: number): T;
214 | fromJSON(object: any): T;
215 | toJSON(message: T): unknown;
216 | create(base?: DeepPartial): T;
217 | fromPartial(object: DeepPartial): T;
218 | }
219 |
--------------------------------------------------------------------------------
/src/grpc/generated/protoc-gen-openapiv2/options/annotations.ts:
--------------------------------------------------------------------------------
1 | // Code generated by protoc-gen-ts_proto. DO NOT EDIT.
2 | // versions:
3 | // protoc-gen-ts_proto v2.7.2
4 | // protoc unknown
5 | // source: protoc-gen-openapiv2/options/annotations.proto
6 |
7 | /* eslint-disable */
8 |
9 | export const protobufPackage = "grpc.gateway.protoc_gen_openapiv2.options";
10 |
--------------------------------------------------------------------------------
/src/grpc/index.ts:
--------------------------------------------------------------------------------
1 | export * as payload from './generated/base/v1/service';
2 | export * as base from './generated/base/v1/base';
3 | export * from './clients';
4 | export * from './interceptors';
5 | export * from './config';
--------------------------------------------------------------------------------
/src/grpc/interceptors.ts:
--------------------------------------------------------------------------------
1 | import {CallOptions, ClientMiddleware, ClientMiddlewareCall} from 'nice-grpc';
2 | import {Metadata} from 'nice-grpc-common';
3 |
4 | /**
5 | * Create a simple gRPC `Interceptor` that attaches a given access token to any request
6 | * a client sends. The token is attached with the `Bearer` auth-scheme.
7 | *
8 | * The interceptor does not insert the access token if the intercepted call
9 | * already has an `Authorization` header.
10 | *
11 | * @param token The access token that should be added to the gRPC request.
12 | *
13 | * @returns A gRPC client middleware (interceptor) that attaches the given token to each request, if no other authorization header is present.
14 | */
15 | export const newAccessTokenInterceptor = (token: string): ClientMiddleware =>
16 | async function* (call: ClientMiddlewareCall, options: CallOptions) {
17 | options.metadata ??= new Metadata();
18 | if (!options.metadata.has('authorization')) {
19 | options.metadata.set('authorization', `Bearer ${token}`);
20 | }
21 | return yield* call.next(call.request, options);
22 | };
23 |
--------------------------------------------------------------------------------
/src/index.ts:
--------------------------------------------------------------------------------
1 | export * as grpc from './grpc';
2 |
--------------------------------------------------------------------------------
/tsconfig.json:
--------------------------------------------------------------------------------
1 | {
2 | "compilerOptions": {
3 | "module": "commonjs",
4 | "target": "ES2020",
5 | "declaration": true,
6 | "outDir": "./dist",
7 | "baseUrl": "./src",
8 | "incremental": true,
9 | "skipLibCheck": true,
10 | "allowJs": true,
11 | "forceConsistentCasingInFileNames": true,
12 | "allowSyntheticDefaultImports": true,
13 | "esModuleInterop": true,
14 | "rootDir": "./",
15 | "typeRoots": [
16 | "node_modules/@types"
17 | ]
18 | },
19 | "include": [
20 | "src"
21 | ],
22 | "exclude": [
23 | "node_modules",
24 | "src/**/*.test.*/*"
25 | ]
26 | }
--------------------------------------------------------------------------------