├── .github
└── workflows
│ ├── build.yaml
│ └── release.yaml
├── .gitignore
├── .pre-commit-config.yaml
├── CHANGELOG.md
├── LICENSE
├── README.md
├── build.gradle
├── example
├── README.md
├── clear_cert.sh
├── create_cert.sh
├── docker-compose.yaml
├── opa_tutorial
│ ├── README.md
│ ├── create_cert.sh
│ └── docker-compose.yaml
└── policy
│ └── .gitkeep
├── gradle
└── wrapper
│ ├── gradle-wrapper.jar
│ └── gradle-wrapper.properties
├── gradlew
├── gradlew.bat
├── settings.gradle
└── src
├── main
├── rego
│ ├── README.md
│ └── policy.rego
└── scala
│ └── org
│ └── openpolicyagent
│ └── kafka
│ ├── MetricsLabel.scala
│ └── OpaAuthorizer.scala
└── test
├── rego
├── README.md
└── policy_test.rego
├── resources
└── log4j2.xml
└── scala
└── org
└── openpolicyagent
└── kafka
├── AzRequestContext.scala
├── OpaAuthorizerBenchmark.scala
└── OpaAuthorizerSpec.scala
/.github/workflows/build.yaml:
--------------------------------------------------------------------------------
1 | name: build
2 | on: [push]
3 |
4 | jobs:
5 | build:
6 | name: Build plugin
7 | runs-on: ubuntu-latest
8 | steps:
9 | - uses: actions/checkout@v4
10 | - name: Set up JDK 17
11 | uses: actions/setup-java@v4
12 | with:
13 | distribution: 'temurin'
14 | java-version: '17'
15 | cache: 'gradle'
16 | - name: Download opa
17 | run: wget -O opa https://openpolicyagent.org/downloads/latest/opa_linux_amd64
18 | - name: Install opa
19 | run: sudo mv opa /usr/local/bin/ && sudo chmod +x /usr/local/bin/opa
20 | - name: Test example rego
21 | run: opa test ./src/main/rego/policy.rego ./src/test/rego/policy_test.rego
22 | - name: Build plugin
23 | run: ./gradlew check shadowJar
24 | - name: Jacoco test report
25 | run: ./gradlew jacocoTestReport
26 | - uses: actions/upload-artifact@v4
27 | with:
28 | name: opa-authorizer
29 | path: build/libs
30 | - uses: codecov/codecov-action@v3
31 | with:
32 | token: ${{secrets.CODECOV_TOKEN}}
33 | files: ./build/reports/jacoco/test/jacocoTestReport.xml
34 | name: opa-kafka-plugin
35 | - uses: codecov/codecov-action@v3
36 | with:
37 | token: ${{secrets.CODECOV_TOKEN}}
38 | files: ./build/reports/opa/opa-codecov-coverage.json
39 | name: opa-policies
40 |
--------------------------------------------------------------------------------
/.github/workflows/release.yaml:
--------------------------------------------------------------------------------
1 | #name: release
2 | #on:
3 | # push:
4 | # tags:
5 | # - 'v[0-9]+.[0-9]+.[0-9]+'
6 | # workflow_dispatch:
7 | #jobs:
8 | # buildAndRelease:
9 | # runs-on: ubuntu-latest
10 | # steps:
11 | # - uses: actions/checkout@v2
12 | # - uses: actions/setup-java@v1
13 | # with:
14 | # java-version: 11
15 | # - uses: eskatos/gradle-command-action@v1
16 | # env:
17 | # SIGNING_KEY: ${{ secrets.SIGNING_KEY }}
18 | # SIGNING_PASSWORD: ${{ secrets.SIGNING_PASSWORD }}
19 | # OSSRH_USERNAME: ${{ secrets.OSSRH_USERNAME }}
20 | # OSSRH_PASSWORD: ${{ secrets.OSSRH_PASSWORD }}
21 | # with:
22 | # arguments: publishAllPublicationsToOSSRHRepository -PsigningKey=$SIGNING_KEY -PsigningPassword=$SIGNING_PASSWORD -PossrhUsername=$OSSRH_USERNAME -PossrhPassword=$OSSRH_PASSWORD -Dorg.gradle.internal.publish.checksums.insecure=true
23 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .gradle/
2 | .idea/
3 | .idea_modules/
4 | /build/
5 | !gradle-wrapper.jar
6 | .gradletasknamecache
7 | out/
8 | atlassian-ide-plugin.xml
9 | *.class
10 | *.log
11 | *.jar
12 | *.war
13 | *.nar
14 | *.ear
15 | *.zip
16 | *.tar.gz
17 | *.rar
18 | hs_err_pid*
19 |
20 | .metals
21 | .project
22 | .bloop
23 |
24 | example/cert/
25 | example/policy/bundle.tar.gz
26 |
27 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | # To use in local development:
2 | # - Run `pip3 install pre-commit` (or `brew install pre-commit`)
3 | # - In the project root directory (where this file resides) run: `pre-commit install`
4 | # - Done! The below hooks should now automatically run before commit (where applicable).
5 |
6 | repos:
7 | - repo: https://github.com/pre-commit/pre-commit-hooks
8 | rev: v3.3.0
9 | hooks:
10 | - id: no-commit-to-branch # Defaults to refuse commits to master
11 | - id: trailing-whitespace
12 | - id: check-added-large-files
13 | - id: check-merge-conflict
14 | - id: check-json
15 | - id: check-xml
16 | - id: check-yaml
17 | - id: detect-private-key
18 | - id: detect-aws-credentials
19 | - id: mixed-line-ending
20 | - id: end-of-file-fixer
21 |
22 | - repo: https://github.com/anderseknert/pre-commit-opa
23 | rev: v1.4.0
24 | hooks:
25 | - id: opa-fmt
26 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | # Changelog
2 | All notable changes to this project will be documented in this file.
3 |
4 | The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
5 | and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
6 |
7 | ## [1.5.1] - 2023-03-09
8 |
9 | - Move construction of request builder to `AllowCallable` `call` method to avoid it being shared between threads [#47](https://github.com/StyraInc/opa-kafka-plugin/pull/47) ([@xhl1988](https://github.com/@xhl1988))
10 |
11 | ## [1.5.0] - 2022-10-10
12 |
13 | - Add configuration properties (opa.authorizer.truststore.*) for truststore for HTTPS connections to OPA ([@iamatwork](https://github.com/@iamatwork))
14 |
15 | ## [1.4.0] - 2022-01-11
16 |
17 | - Collect and expose JMX metrics from OPA authorizer ([@quangminhtran94](https://github.com/quangminhtran94))
18 |
19 | ### Changes
20 |
21 | ## [1.3.0] - 2021-11-24
22 |
23 | ### Changes
24 |
25 | - Fix issue where unimplemented `acls` method of authorizer would be called under certain conditions ([@iamatwork](https://github.com/@iamatwork))
26 | - Change package group from com.bisnode.kafka.authorization to org.openpolicyagent.kafka
27 |
28 | ## [1.2.0] - 2021-10-12
29 |
30 | ### Changes
31 |
32 | - Ensure compatibility with Kafka 3.0.0 (@scholzj)
33 |
34 | ## [1.1.0] - 2021-06-11
35 |
36 | ### Changes
37 |
38 | - Update to Kafka library 2.8.0
39 | - Tested on Kafka 2.7.0 & 2.8.0
40 |
41 | ## [1.0.0] - 2021-03-29
42 |
43 | ### Changes
44 |
45 | #### Breaking changes:
46 |
47 | - Update to use Scala 2.13
48 | - Requires a Kafka cluster running 2.13
49 | - Update to Kafka library 2.7.0
50 | - Requires Kafka 2.7.X
51 | - New input structure to OPA
52 | - You will need to adjust policies to work with the new input structure. See an example of the new structure down below. We suggest to update your policies before upgrading, to work with both the old and the new structure. Then upgrade the plugin and then remove the old policies.`
53 |
54 | New input structure:
55 | ```json
56 | {
57 | "action": {
58 | "logIfAllowed": true,
59 | "logIfDenied": true,
60 | "operation": "DESCRIBE",
61 | "resourcePattern": {
62 | "name": "alice-topic",
63 | "patternType": "LITERAL",
64 | "resourceType": "TOPIC",
65 | "unknown": false
66 | },
67 | "resourceReferenceCount": 1
68 | },
69 | "requestContext": {
70 | "clientAddress": "192.168.64.1",
71 | "clientInformation": {
72 | "softwareName": "unknown",
73 | "softwareVersion": "unknown"
74 | },
75 | "connectionId": "192.168.64.4:9092-192.168.64.1:58864-0",
76 | "header": {
77 | "data": {
78 | "clientId": "rdkafka",
79 | "correlationId": 5,
80 | "requestApiKey": 3,
81 | "requestApiVersion": 2
82 | },
83 | "headerVersion": 1
84 | },
85 | "listenerName": "SASL_PLAINTEXT",
86 | "principal": {
87 | "name": "alice-consumer",
88 | "principalType": "User"
89 | },
90 | "securityProtocol": "SASL_PLAINTEXT"
91 | }
92 | }
93 | ```
94 |
95 | #### Other changes
96 |
97 | - Include `guava` and `paranamer` in the shadowJar since it's been excluded from the Kafka installation
98 | - Update to use the new Kafka libraries to use the new API
99 | - Update OPA policy and tests to work with the new input structure
100 | - Update version on various dependencies
101 | - Add Maven information to README
102 | - Update changelog
103 |
104 | ## [0.4.2] - 2020-10-20
105 | - Update Guava to 30.0-jre
106 | - Update OPA Gradle plugin to 0.3.0
107 | - Update github release script to properly use username and password
108 |
109 | ## [0.4.1] - 2020-04-29
110 | - Release to Maven Central under com.bisnode.kafka.authorization group. No code changes.
111 |
112 | ## [0.4.0] - 2020-04-23
113 | - Allow `super.users` to bypass OPA authorizer checks - [@scholzj](https://github.com/scholzj)
114 | - Fix wrong unit provided in docs on cache expiry - [@kelvk](https://github.com/kelvk)
115 |
116 | ## [0.3.0] - 2019-11-28
117 | - Default cache size increase from 500 to 50000 based on real world usage metrics.
118 | - Don't cache decision on errors as to avoid locking a client out if actually authorized.
119 |
120 | ## [0.2.0] - 2019-11-21
121 | - Fix connection leak in authorization call.
122 |
123 | ## [0.1.0] - 2019-11-14
124 | ### Added
125 | - First release!
126 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright [yyyy] [name of copyright owner]
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Open Policy Agent plugin for Kafka authorization
2 | [](https://maven-badges.herokuapp.com/maven-central/com.bisnode.kafka.authorization/opa-authorizer)
3 | 
4 | [](https://codecov.io/gh/Bisnode/opa-kafka-plugin)
5 |
6 | Open Policy Agent (OPA) plugin for Kafka authorization.
7 |
8 | ### Prerequisites
9 |
10 | * Kafka 3.8.0+ (for older Kafka versions, please check previous release)
11 | * Java 17 or above
12 | * OPA installed and running on the brokers
13 |
14 | ## Installation
15 |
16 | ###
17 |
18 | Download the latest OPA authorizer plugin jar from [Releases](https://github.com/anderseknert/opa-kafka-plugin/releases/) (or [Maven Central](https://search.maven.org/artifact/org.openpolicyagent.kafka/opa-authorizer)) and put the
19 | file (`opa-authorizer-{$VERSION}.jar`) somewhere Kafka recognizes it - this could be directly in Kafka's `libs` directory
20 | or in a separate plugin directory pointed out to Kafka at startup, e.g:
21 |
22 | `CLASSPATH=/usr/local/share/kafka/plugins/*`
23 |
24 | To activate the opa-kafka-plugin add the `authorizer.class.name` to server.properties\
25 | `authorizer.class.name=org.openpolicyagent.kafka.OpaAuthorizer`
26 |
27 |
28 | The plugin supports the following properties:
29 |
30 | | Property Key | Example | Default | Description |
31 | | --- | --- | --- | --- |
32 | | `opa.authorizer.url` | `http://opa:8181/v1/data/kafka/authz/allow` | | Name of the OPA policy to query. [required] |
33 | | `opa.authorizer.allow.on.error` | `false` | `false` | Fail-closed or fail-open if OPA call fails. |
34 | | `opa.authorizer.cache.initial.capacity` | `5000` | `5000` | Initial decision cache size. |
35 | | `opa.authorizer.cache.maximum.size` | `50000` | `50000` | Max decision cache size. |
36 | | `opa.authorizer.cache.expire.after.seconds` | `3600` | `3600` | Decision cache expiry in seconds. |
37 | | `opa.authorizer.metrics.enabled` | `true` | `false` | Whether or not expose JMX metrics for monitoring. |
38 | | `super.users` | `User:alice;User:bob` | | Super users which are always allowed. |
39 | | `opa.authorizer.truststore.path` | `/path/to/mytruststore.p12` | | Path to the PKCS12 truststore for HTTPS requests to OPA. |
40 | | `opa.authorizer.truststore.password` | `ichangedit` | `changeit` | Password for the truststore. |
41 | | `opa.authorizer.truststore.type` | `PKCS12`, `JKS` or whatever your JVM supports | `PKCS12` | Type of the truststore. |
42 |
43 | ## Usage
44 |
45 | Example structure of input data provided from opa-kafka-plugin to Open Policy Agent.
46 | ```json
47 | {
48 | "action": {
49 | "logIfAllowed": true,
50 | "logIfDenied": true,
51 | "operation": "DESCRIBE",
52 | "resourcePattern": {
53 | "name": "alice-topic",
54 | "patternType": "LITERAL",
55 | "resourceType": "TOPIC",
56 | "unknown": false
57 | },
58 | "resourceReferenceCount": 1
59 | },
60 | "requestContext": {
61 | "clientAddress": "192.168.64.1",
62 | "clientInformation": {
63 | "softwareName": "unknown",
64 | "softwareVersion": "unknown"
65 | },
66 | "connectionId": "192.168.64.4:9092-192.168.64.1:58864-0",
67 | "header": {
68 | "data": {
69 | "clientId": "rdkafka",
70 | "correlationId": 5,
71 | "requestApiKey": 3,
72 | "requestApiVersion": 2
73 | },
74 | "headerVersion": 1
75 | },
76 | "listenerName": "SASL_PLAINTEXT",
77 | "principal": {
78 | "name": "alice-consumer",
79 | "principalType": "User"
80 | },
81 | "securityProtocol": "SASL_PLAINTEXT"
82 | }
83 | }
84 | ```
85 |
86 | The following table summarizes the supported resource types and operation names.
87 |
88 | | `input.action.resourcePattern.resourceType` | `input.action.operation` |
89 | | --- | --- |
90 | | `CLUSTER` | `CLUSTER_ACTION` |
91 | | `CLUSTER` | `CREATE` |
92 | | `CLUSTER` | `DESCRIBE` |
93 | | `GROUP` | `READ` |
94 | | `GROUP` | `DESCRIPTION` |
95 | | `TOPIC` | `CREATE` |
96 | | `TOPIC` | `ALTER` |
97 | | `TOPIC` | `DELETE` |
98 | | `TOPIC` | `DESCRIBE` |
99 | | `TOPIC` | `READ` |
100 | | `TOPIC` | `WRITE` |
101 | | `TRANSACTIONAL_ID` | `DESCRIBE` |
102 | | `TRANSACTIONAL_ID` | `WRITE` |
103 |
104 | These are handled by the method _authorizeAction_, and passed to OPA with an _action_, that identifies
105 | the accessed resource and the performed operation. _patternType_ is always _LITERAL_.
106 |
107 | Creation of a topic checks for CLUSTER + CREATE. If this is denied, it will check for TOPIC with its name + CREATE.
108 |
109 | When doing idempotent write to a topic, and the first request for operation=IDEMPOTENT_WRITE on the resourceType=CLUSTER is denied,
110 | the method _authorizeByResourceType_ to check, if the user has the right to write to any topic.
111 | If yes, the idempotent write is granted by Kafka's ACL-implementation. To allow for a similar check,
112 | it is mapped to OPA with _patternType=PREFIXED_, _resourceType=TOPIC_, and _name=""_.
113 | ```json
114 | {
115 | "action": {
116 | "logIfAllowed": true,
117 | "logIfDenied": true,
118 | "operation": "DESCRIBE",
119 | "resourcePattern": {
120 | "name": "",
121 | "patternType": "PREFIXED",
122 | "resourceType": "TOPIC",
123 | "unknown": false
124 | },
125 | "resourceReferenceCount": 1
126 | },
127 | ...
128 | }
129 | ```
130 |
131 | It's likely possible to use all different resource types and operations described in the Kafka API docs:
132 | https://kafka.apache.org/24/javadoc/org/apache/kafka/common/acl/AclOperation.html
133 | https://kafka.apache.org/24/javadoc/org/apache/kafka/common/resource/ResourceType.html
134 |
135 | ### Security protocols:
136 |
137 | | Protocol | Description |
138 | |---|---|
139 | | `PLAINTEXT` | Un-authenticated, non-encrypted channel |
140 | | `SASL_PLAINTEXT` | authenticated, non-encrypted channel |
141 | | `SASL` | authenticated, SSL channel |
142 | | `SSL` | SSL channel |
143 |
144 | More info:
145 |
146 | https://kafka.apache.org/24/javadoc/org/apache/kafka/common/security/auth/SecurityProtocol.html
147 |
148 | ### Policy sample
149 |
150 | With the [sample policy rego](src/main/rego/README.md) you will out of the box get
151 | a structure where an "owner" can one user per type (`consumer`, `producer`, `mgmt`). The owner and user type is separated by `-`.
152 | * Username structure: `-`
153 | * Topic name structure: `.*`
154 |
155 | \
156 | Example: \
157 | User `alice-consumer` will be...
158 | * allowed to consume on topic `alice-topic1`
159 | * allowed to consume on topic `alice-topic-test`
160 | * denied to produce on any topic
161 | * denied to consume on topic `bob-topic`
162 |
163 | [See sample rego](src/main/rego/README.md)
164 |
165 | ## Build from source
166 |
167 | Using gradle wrapper: `./gradlew clean test shadowJar`
168 |
169 | The resulting jar (with dependencies embedded) will be named `opa-authorizer-{$VERSION}-all.jar` and stored in
170 | `build/libs`.
171 |
172 | ## Logging
173 |
174 | Set log level `log4j.logger.org.openpolicyagent=INFO` in `config/log4j.properties`
175 | Use DEBUG or TRACE for debugging.
176 |
177 | In a busy Kafka cluster it might be good to tweak the cache since it may produce a lot of log entries in Open Policy Agent, especially if decision logs are turned on. If the policy isn't dynamically updated very often it's recommended to cache a lot to improve performance and reduce the amount of log entries.
178 |
179 | ## Monitoring
180 | The plugin exposes some metrics that can be useful in operation.
181 | * `opa.authorizer:type=authorization-result`
182 | * `authorized-request-count`: number of allowed requests
183 | * `unauthorized-request-count`: number of denied requests
184 | * `opa.authorizer:type=request-handle`
185 | * `request-to-opa-count`: number of HTTP request sent to OPA to get authorization result
186 | * `cache-hit-rate`: Cache hit rate. Cache miss rate should be `1 - cache-hit-rate`
187 | * `cache-usage-percentage`: the ratio of cache size over maximum cache capacity
188 |
189 | ## Community
190 |
191 | For questions, discussions and announcements related to Styra products, services and open source projects, please join the Styra community on [Slack](https://communityinviter.com/apps/styracommunity/signup)!
--------------------------------------------------------------------------------
/build.gradle:
--------------------------------------------------------------------------------
1 | plugins {
2 | id 'scala'
3 | id 'jacoco'
4 | id 'signing'
5 | id 'maven-publish'
6 | id 'com.github.johnrengelman.shadow' version '8.1.1'
7 | id 'com.bisnode.opa' version '0.3.2'
8 | }
9 |
10 | group 'org.openpolicyagent.kafka'
11 | version '1.5.1'
12 |
13 | java {
14 | sourceCompatibility = JavaVersion.VERSION_17
15 | targetCompatibility = JavaVersion.VERSION_17
16 | withJavadocJar()
17 | withSourcesJar()
18 | }
19 |
20 | repositories {
21 | mavenCentral()
22 | }
23 |
24 | // See versions used in Kafka here https://github.com/apache/kafka/blob/4.0.0/gradle/dependencies.gradle
25 | dependencies {
26 | compileOnly group: 'org.apache.kafka', name: 'kafka_2.13', version: '4.0.0'
27 | compileOnly group: 'com.typesafe.scala-logging', name: 'scala-logging_2.13', version: '3.9.5'
28 | implementation group: 'com.fasterxml.jackson.module', name: 'jackson-module-scala_2.13', version: '2.16.2'
29 | implementation group: 'com.github.ben-manes.caffeine', name: 'caffeine', version: '3.2.0'
30 |
31 | testImplementation group: 'org.scalatest', name: 'scalatest_2.13', version: '3.2.17'
32 | testImplementation group: 'org.scalatestplus', name: 'junit-4-13_2.13', version: '3.2.17.0'
33 | testImplementation group: 'junit', name: 'junit', version: '4.12'
34 | testImplementation group: 'org.apache.logging.log4j', name: 'log4j-slf4j-impl', version: '2.14.0'
35 | testImplementation group: 'org.apache.kafka', name: 'kafka_2.13', version: '4.0.0'
36 | testImplementation group: 'org.apache.kafka', name: 'kafka-server', version: '4.0.0'
37 | testImplementation group: 'com.typesafe.scala-logging', name: 'scala-logging_2.13', version: '3.9.5'
38 | }
39 |
40 | shadowJar {
41 | dependencies {
42 | exclude(dependency {
43 | !(it.moduleGroup in ['org.openpolicyagent.kafka', 'com.github.ben-manes.caffeine']
44 | || (it.moduleGroup == 'com.fasterxml.jackson.module' && it.moduleName == 'jackson-module-scala_2.13')
45 | || (it.moduleGroup == 'com.thoughtworks.paranamer' && it.moduleName == 'paranamer'))
46 | })
47 | }
48 | }
49 |
50 | jacocoTestReport {
51 | reports {
52 | xml.required = true
53 | html.required = false
54 | }
55 | }
56 |
57 | test {
58 | testLogging {
59 | events "passed", "skipped", "failed"
60 | }
61 | }
62 |
63 | publishing {
64 | publications {
65 | mavenJava(MavenPublication) {
66 | groupId ='org.openpolicyagent.kafka'
67 | artifactId = 'opa-authorizer'
68 | version = '1.5.1'
69 |
70 | from components.java
71 |
72 | pom {
73 | name = 'Open Policy Agent plugin for Kafka authorization'
74 | description = 'Open Policy Agent (OPA) plugin for Kafka authorization.'
75 | url = 'https://github.com/anderseknert/opa-kafka-plugin'
76 | licenses {
77 | license {
78 | name = 'The Apache License, Version 2.0'
79 | url = 'https://www.apache.org/licenses/LICENSE-2.0.txt'
80 | }
81 | }
82 | developers {
83 | developer {
84 | name = 'Anders Eknert'
85 | email = 'anders@eknert.com'
86 | organization = 'Styra'
87 | organizationUrl = 'https://www.styra.com'
88 | }
89 | developer {
90 | name = 'Jakub Scholz'
91 | email = 'jakub@scholz.cz'
92 | organization = 'Red Hat'
93 | organizationUrl = 'https://www.redhat.com'
94 | }
95 | }
96 | scm {
97 | connection = 'scm:git:git://github.com/anderseknert/opa-kafka-plugin.git'
98 | developerConnection = 'scm:git:ssh://github.com/anderseknert/opa-kafka-plugin.git'
99 | url = 'https://github.com/anderseknert/opa-kafka-plugin.git'
100 | }
101 | }
102 | }
103 | }
104 | repositories {
105 | maven {
106 | name = 'OSSRH'
107 | credentials {
108 | username = findProperty('ossrhUsername')
109 | password = findProperty('ossrhPassword')
110 | }
111 | def releasesRepoUrl = 'https://s01.oss.sonatype.org/service/local/staging/deploy/maven2/'
112 | def snapshotsRepoUrl = 'https://s01.oss.sonatype.org/content/repositories/snapshots'
113 | url = version.endsWith('SNAPSHOT') ? snapshotsRepoUrl : releasesRepoUrl
114 | }
115 | }
116 | }
117 |
118 | signing {
119 | def signingKey = findProperty("signingKey")
120 | def signingPassword = findProperty("signingPassword")
121 | useInMemoryPgpKeys(signingKey, signingPassword)
122 | sign publishing.publications.mavenJava
123 | }
124 |
125 | check.dependsOn(testRego, testRegoCoverage)
126 | test.dependsOn startOpa
127 | test.finalizedBy stopOpa
128 | test.outputs.upToDateWhen {
129 | // Consider alternatives to this: https://stackoverflow.com/a/52484259
130 | false
131 | }
132 |
--------------------------------------------------------------------------------
/example/README.md:
--------------------------------------------------------------------------------
1 | # Open Policy Agent and Kafka with Docker Compose
2 |
3 | Example code for running Kafka with client certificates for authentication, and using OPA for authorization decisions.
4 |
5 | ## Setup
6 |
7 | 1. Build the OPA Kafka authorizer plugin. From the project root directory, run: `./gradlew shadowJar`.
8 | 2. Build an OPA bundle. From this directory, run: `opa build --bundle --output policy/bundle.tar.gz ../src/main/rego/`
9 | 3. Run the `create_cert.sh` script to create server and client certificates. These will be found in the `cert` directory.
10 | 4. `docker compose up`
11 |
12 | ## Updating policy
13 |
14 | Simply rebuild the policy bundle: `opa build --bundle --output policy/bundle.tar.gz ../src/main/rego/`
15 |
16 | ## Querying Kafka
17 |
18 | Three different users (represented by client certificates) are created by the `create_cert.sh` script:
19 |
20 | * `alice-mgmt` - can produce and consume to any topic named `alice-*`
21 | * `alice-producer` - can produce to any topic named `alice-*`
22 | * `alice-consumer` - can consume from any topic named `alice-*`
23 |
24 | The following example commands uses the CLI client tools provided with Kafka,
25 | and assume the Kafka root directory as the current working directory.
26 |
27 | ### Producing to a topic
28 |
29 | Using `alice-mgmt` or `alice-producer`:
30 |
31 | ```shell
32 | bin/kafka-console-producer.sh --bootstrap-server localhost:9093 --topic alice-topic1 --producer.config path/to/cert/client/alice-mgmt.properties
33 | > My first message
34 | > My second message
35 | ...
36 | Ctrl+c
37 | ```
38 |
39 | `alice-consumer` should however not be authorized:
40 |
41 | ```shell
42 |
43 | bin/kafka-console-producer.sh --bootstrap-server localhost:9093 --topic alice-topic1 --producer.config path/to/cert/client/alice-consumer.properties
44 | > My first message
45 | >[2021-12-01 09:43:45,437] ERROR Error when sending message to topic alice-topic1 with key: null, value: 8 bytes with error: (org.apache.kafka.clients.producer.internals.ErrorLoggingCallback)
46 | org.apache.kafka.common.errors.TopicAuthorizationException: Not authorized to access topics: [alice-topic1
47 | ```
48 |
49 | ### Consuming from a topic
50 |
51 | Using `alice-mgmt` or `alice-consumer`:
52 |
53 | ```shell
54 | bin/kafka-console-consumer.sh --bootstrap-server localhost:9093 --alice-topic1 test --consumer.config path/to/cert/client/alice-consumer.properties --from-beginning
55 | My first message
56 | My second message
57 | ...
58 | Ctrl+c
59 |
60 | Processed a total of 4 messages
61 | ```
62 |
63 | `alice-producer` should however not be authorized:
64 |
65 | ```shell
66 |
67 | bin/kafka-console-producer.sh --broker-list localhost:9093 --topic alice-topic1 --producer.config path/to/cert/client/alice-producer.properties
68 | > My first message
69 | >[2021-12-01 09:43:45,437] ERROR Error when sending message to topic alice-topic1 with key: null, value: 8 bytes with error: (org.apache.kafka.clients.producer.internals.ErrorLoggingCallback)
70 | org.apache.kafka.common.errors.TopicAuthorizationException: Not authorized to access topics: [alice-topic1
71 | ```
--------------------------------------------------------------------------------
/example/clear_cert.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | rm -rf cert
4 | mkdir cert
5 | touch cert/.gitkeep
6 |
--------------------------------------------------------------------------------
/example/create_cert.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | SCRIPT_DIR=$(cd $(dirname "${BASH_SOURCE[0]}") && pwd)
4 |
5 | mkdir -p cert/ca
6 | mkdir -p cert/server
7 | mkdir -p cert/client
8 |
9 | # Create CA certificate
10 | openssl req -new -x509 -days 3650 -keyout cert/ca/ca.key -out cert/ca/ca.crt -subj "/C=SE/L=Stockholm/CN=Kafka CA" -passout pass:1234
11 |
12 | # Server certificate
13 | keytool -genkey -keystore cert/server/server.keystore -alias localhost -dname CN=localhost -keyalg RSA -validity 3650 -ext san=dns:localhost -storepass 123456
14 | keytool -certreq -keystore cert/server/server.keystore -alias localhost -file cert/server/server.unsigned.crt -storepass 123456
15 | openssl x509 -req -sha256 -CA cert/ca/ca.crt -CAkey cert/ca/ca.key -in cert/server/server.unsigned.crt -out cert/server/server.crt -days 3650 -CAcreateserial -passin pass:1234
16 |
17 | # Broker truststore
18 | keytool -import -keystore cert/server/server.truststore -alias ca -file cert/ca/ca.crt -storepass 123456 -noprompt
19 |
20 | # Broker keystore
21 | keytool -import -file cert/ca/ca.crt -keystore cert/server/server.keystore -alias ca -storepass 123456 -noprompt
22 | keytool -import -file cert/server/server.crt -keystore cert/server/server.keystore -alias localhost -storepass 123456 -noprompt
23 |
24 | echo "123456" > cert/server/credentials.txt
25 |
26 | # Client truststore
27 | keytool -import -file cert/ca/ca.crt -keystore cert/client/client.truststore -alias ca -storepass 123456 -noprompt
28 |
29 | declare -a clients=("alice-mgmt" "alice-producer" "alice-consumer")
30 | for client in "${clients[@]}" ; do
31 | keytool -genkey -keystore cert/client/"${client}".keystore -alias "${client}" -dname "CN=${client}, OU=developers" -keyalg RSA -validity 3650 -storepass 123456
32 | keytool -certreq -keystore cert/client/"${client}".keystore -alias "${client}" -file cert/client/"${client}".unsigned.crt -storepass 123456
33 | openssl x509 -req -sha256 -CA cert/ca/ca.crt -CAkey cert/ca/ca.key -in cert/client/"${client}".unsigned.crt -out cert/client/"${client}".crt -days 3650 -CAcreateserial -passin pass:1234
34 | keytool -import -file cert/ca/ca.crt -keystore cert/client/"${client}".keystore -alias ca -storepass 123456 -noprompt
35 | keytool -import -file cert/client/"${client}".crt -keystore cert/client/"${client}".keystore -alias "${client}" -storepass 123456 -noprompt
36 |
37 | cat << EOF > cert/client/"${client}".properties
38 | security.protocol=SSL
39 | ssl.truststore.location=${SCRIPT_DIR}/cert/client/client.truststore
40 | ssl.truststore.password=123456
41 | ssl.keystore.location=${SCRIPT_DIR}/cert/client/${client}.keystore
42 | ssl.keystore.password=123456
43 | ssl.key.password=123456
44 | EOF
45 |
46 | rm cert/client/"${client}".unsigned.crt cert/client/"${client}".crt
47 | done
48 |
49 | # Cleanup
50 | rm cert/server/server.unsigned.crt cert/ca/ca.srl
51 |
--------------------------------------------------------------------------------
/example/docker-compose.yaml:
--------------------------------------------------------------------------------
1 | services:
2 | nginx:
3 | image: nginx:1.21.4
4 | volumes:
5 | - "./policy:/usr/share/nginx/html"
6 | ports:
7 | - "80:80"
8 | opa:
9 | image: openpolicyagent/opa:1.2.0
10 | ports:
11 | - "8181:8181"
12 | command:
13 | - "run"
14 | - "--server"
15 | - "--addr=0.0.0.0:8181"
16 | - "--set=decision_logs.console=true"
17 | - "--set=services.authz.url=http://nginx"
18 | - "--set=bundles.authz.service=authz"
19 | - "--set=bundles.authz.resource=bundle.tar.gz"
20 | depends_on:
21 | - nginx
22 | broker:
23 | # If experiencing hangs on darwin/arm64, explicitly setting the platform here seems to help
24 | # platform: linux/amd64
25 | image: apache/kafka:4.0.0
26 | ports:
27 | - "9093:9093"
28 | environment:
29 | CLASSPATH: "/plugin/*"
30 | KAFKA_AUTHORIZER_CLASS_NAME: org.openpolicyagent.kafka.OpaAuthorizer
31 | KAFKA_OPA_AUTHORIZER_URL: http://opa:8181/v1/data/kafka/authz/allow
32 | KAFKA_OPA_AUTHORIZER_CACHE_EXPIRE_AFTER_SECONDS: 10 # For development only
33 | KAFKA_NODE_ID: 1
34 | KAFKA_PROCESS_ROLES: broker,controller
35 | KAFKA_LISTENERS: CONTROLLER://broker:9092,SSL://broker:9093
36 | KAFKA_ADVERTISED_LISTENERS: SSL://localhost:9093
37 | KAFKA_CONTROLLER_LISTENER_NAMES: CONTROLLER
38 | KAFKA_INTER_BROKER_LISTENER_NAME: SSL
39 | KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: CONTROLLER:PLAINTEXT,SSL:SSL
40 | KAFKA_CONTROLLER_QUORUM_VOTERS: 1@broker:9092
41 | KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
42 | KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
43 | KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
44 | KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
45 | KAFKA_AUTO_CREATE_TOPICS_ENABLE: "true"
46 | KAFKA_SSL_KEYSTORE_FILENAME: server.keystore
47 | KAFKA_SSL_KEYSTORE_CREDENTIALS: credentials.txt
48 | KAFKA_SSL_KEY_CREDENTIALS: credentials.txt
49 | KAFKA_SSL_TRUSTSTORE_FILENAME: server.truststore
50 | KAFKA_SSL_TRUSTSTORE_CREDENTIALS: credentials.txt
51 | KAFKA_SSL_CLIENT_AUTH: required
52 | volumes:
53 | - "../build/libs:/plugin"
54 | - "./cert/server:/etc/kafka/secrets"
55 | depends_on:
56 | - opa
57 |
--------------------------------------------------------------------------------
/example/opa_tutorial/README.md:
--------------------------------------------------------------------------------
1 | # OPA Tutorial Companion Scripts
2 |
3 | This directory contains companion scripts for the
4 | [OPA Kafka tutorial](https://www.openpolicyagent.org/docs/latest/kafka-authorization/) in the OPA docs.
5 |
6 | The `create_cert.sh` script will create a server certificate for TLS, along with four client certificates representing
7 | the four different users used in the tutorial, namely:
8 |
9 | * `anon_producer`
10 | * `anon_consumer`
11 | * `pii_consumer`
12 | * `fanout_producer`
13 |
14 | These certificates will be stored in the `cert` directory, which is automatically mounted into the Kafka container using
15 | the Docker compose file.
16 |
--------------------------------------------------------------------------------
/example/opa_tutorial/create_cert.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | mkdir -p cert/ca
4 | mkdir -p cert/server
5 | mkdir -p cert/client
6 |
7 | # Create CA certificate
8 | openssl req -new -x509 -days 3650 -keyout cert/ca/ca.key -out cert/ca/ca.crt -subj "/C=SE/L=Stockholm/CN=Kafka CA" -passout pass:1234
9 |
10 | # Server certificate
11 | keytool -genkey -keystore cert/server/server.keystore -alias broker -dname CN=broker -keyalg RSA -validity 3650 -ext san=dns:broker -storepass 123456
12 | keytool -certreq -keystore cert/server/server.keystore -alias broker -file cert/server/server.unsigned.crt -storepass 123456
13 | openssl x509 -req -sha256 -CA cert/ca/ca.crt -CAkey cert/ca/ca.key -in cert/server/server.unsigned.crt -out cert/server/server.crt -days 3650 -CAcreateserial -passin pass:1234
14 |
15 | # Broker truststore
16 | keytool -import -keystore cert/server/server.truststore -alias ca -file cert/ca/ca.crt -storepass 123456 -noprompt
17 |
18 | # Broker keystore
19 | keytool -import -file cert/ca/ca.crt -keystore cert/server/server.keystore -alias ca -storepass 123456 -noprompt
20 | keytool -import -file cert/server/server.crt -keystore cert/server/server.keystore -alias broker -storepass 123456 -noprompt
21 |
22 | echo "123456" > cert/server/credentials.txt
23 |
24 | # Client truststore
25 | keytool -import -file cert/ca/ca.crt -keystore cert/client/client.truststore -alias ca -storepass 123456 -noprompt
26 |
27 | declare -a clients=("pii_consumer" "anon_producer" "anon_consumer" "fanout_producer")
28 | for client in "${clients[@]}" ; do
29 | keytool -genkey -keystore cert/client/"${client}".keystore -alias "${client}" -dname "CN=${client}, OU=Developers" -keyalg RSA -validity 3650 -storepass 123456
30 | keytool -certreq -keystore cert/client/"${client}".keystore -alias "${client}" -file cert/client/"${client}".unsigned.crt -storepass 123456
31 | openssl x509 -req -sha256 -CA cert/ca/ca.crt -CAkey cert/ca/ca.key -in cert/client/"${client}".unsigned.crt -out cert/client/"${client}".crt -days 3650 -CAcreateserial -passin pass:1234
32 | keytool -import -file cert/ca/ca.crt -keystore cert/client/"${client}".keystore -alias ca -storepass 123456 -noprompt
33 | keytool -import -file cert/client/"${client}".crt -keystore cert/client/"${client}".keystore -alias "${client}" -storepass 123456 -noprompt
34 |
35 | cat << EOF > cert/client/"${client}".properties
36 | security.protocol=SSL
37 | ssl.truststore.location=/tmp/client/client.truststore
38 | ssl.truststore.password=123456
39 | ssl.keystore.location=/tmp/client/${client}.keystore
40 | ssl.keystore.password=123456
41 | ssl.key.password=123456
42 | EOF
43 |
44 | rm cert/client/"${client}".unsigned.crt cert/client/"${client}".crt
45 | done
46 |
47 | # Cleanup
48 | rm cert/server/server.unsigned.crt cert/ca/ca.srl
49 |
--------------------------------------------------------------------------------
/example/opa_tutorial/docker-compose.yaml:
--------------------------------------------------------------------------------
1 | services:
2 | nginx:
3 | image: nginx:1.21.4
4 | volumes:
5 | - "./bundles:/usr/share/nginx/html"
6 | ports:
7 | - "80:80"
8 | opa:
9 | image: openpolicyagent/opa:{{< current_docker_version >}}-rootless
10 | ports:
11 | - "8181:8181"
12 | command:
13 | - "run"
14 | - "--server"
15 | - "--set=decision_logs.console=true"
16 | - "--set=services.authz.url=http://nginx"
17 | - "--set=bundles.authz.service=authz"
18 | - "--set=bundles.authz.resource=bundle.tar.gz"
19 | depends_on:
20 | - nginx
21 | broker:
22 | image: confluentinc/cp-kafka:6.2.1
23 | ports:
24 | - "9093:9093"
25 | environment:
26 | CLASSPATH: "/plugin/*"
27 | KAFKA_AUTHORIZER_CLASS_NAME: org.openpolicyagent.kafka.OpaAuthorizer
28 | KAFKA_OPA_AUTHORIZER_URL: http://opa:8181/v1/data/kafka/authz/allow
29 | KAFKA_OPA_AUTHORIZER_CACHE_EXPIRE_AFTER_SECONDS: 10 # For development only
30 | KAFKA_NODE_ID: 1
31 | KAFKA_PROCESS_ROLES: broker,controller
32 | KAFKA_LISTENERS: CONTROLLER://broker:9092,SSL://broker:9093
33 | KAFKA_ADVERTISED_LISTENERS: SSL://localhost:9093
34 | KAFKA_CONTROLLER_LISTENER_NAMES: CONTROLLER
35 | KAFKA_INTER_BROKER_LISTENER_NAME: SSL
36 | KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: CONTROLLER:PLAINTEXT,SSL:SSL
37 | KAFKA_CONTROLLER_QUORUM_VOTERS: 1@broker:9092
38 | KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
39 | KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
40 | KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
41 | KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
42 | KAFKA_AUTO_CREATE_TOPICS_ENABLE: "true"
43 | KAFKA_SSL_KEYSTORE_FILENAME: server.keystore
44 | KAFKA_SSL_KEYSTORE_CREDENTIALS: credentials.txt
45 | KAFKA_SSL_KEY_CREDENTIALS: credentials.txt
46 | KAFKA_SSL_TRUSTSTORE_FILENAME: server.truststore
47 | KAFKA_SSL_TRUSTSTORE_CREDENTIALS: credentials.txt
48 | KAFKA_SSL_CLIENT_AUTH: required
49 | volumes:
50 | - "./plugin:/plugin"
51 | - "./cert/server:/etc/kafka/secrets"
52 | depends_on:
53 | - opa
--------------------------------------------------------------------------------
/example/policy/.gitkeep:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/StyraInc/opa-kafka-plugin/5f457096293e3ae1c881647caaf78bdefb66f1ba/example/policy/.gitkeep
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/StyraInc/opa-kafka-plugin/5f457096293e3ae1c881647caaf78bdefb66f1ba/gradle/wrapper/gradle-wrapper.jar
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.properties:
--------------------------------------------------------------------------------
1 | distributionBase=GRADLE_USER_HOME
2 | distributionPath=wrapper/dists
3 | distributionUrl=https\://services.gradle.org/distributions/gradle-8.5-bin.zip
4 | zipStoreBase=GRADLE_USER_HOME
5 | zipStorePath=wrapper/dists
6 |
--------------------------------------------------------------------------------
/gradlew:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env sh
2 |
3 | #
4 | # Copyright 2015 the original author or authors.
5 | #
6 | # Licensed under the Apache License, Version 2.0 (the "License");
7 | # you may not use this file except in compliance with the License.
8 | # You may obtain a copy of the License at
9 | #
10 | # https://www.apache.org/licenses/LICENSE-2.0
11 | #
12 | # Unless required by applicable law or agreed to in writing, software
13 | # distributed under the License is distributed on an "AS IS" BASIS,
14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | # See the License for the specific language governing permissions and
16 | # limitations under the License.
17 | #
18 |
19 | ##############################################################################
20 | ##
21 | ## Gradle start up script for UN*X
22 | ##
23 | ##############################################################################
24 |
25 | # Attempt to set APP_HOME
26 | # Resolve links: $0 may be a link
27 | PRG="$0"
28 | # Need this for relative symlinks.
29 | while [ -h "$PRG" ] ; do
30 | ls=`ls -ld "$PRG"`
31 | link=`expr "$ls" : '.*-> \(.*\)$'`
32 | if expr "$link" : '/.*' > /dev/null; then
33 | PRG="$link"
34 | else
35 | PRG=`dirname "$PRG"`"/$link"
36 | fi
37 | done
38 | SAVED="`pwd`"
39 | cd "`dirname \"$PRG\"`/" >/dev/null
40 | APP_HOME="`pwd -P`"
41 | cd "$SAVED" >/dev/null
42 |
43 | APP_NAME="Gradle"
44 | APP_BASE_NAME=`basename "$0"`
45 |
46 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
47 | DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
48 |
49 | # Use the maximum available, or set MAX_FD != -1 to use that value.
50 | MAX_FD="maximum"
51 |
52 | warn () {
53 | echo "$*"
54 | }
55 |
56 | die () {
57 | echo
58 | echo "$*"
59 | echo
60 | exit 1
61 | }
62 |
63 | # OS specific support (must be 'true' or 'false').
64 | cygwin=false
65 | msys=false
66 | darwin=false
67 | nonstop=false
68 | case "`uname`" in
69 | CYGWIN* )
70 | cygwin=true
71 | ;;
72 | Darwin* )
73 | darwin=true
74 | ;;
75 | MINGW* )
76 | msys=true
77 | ;;
78 | NONSTOP* )
79 | nonstop=true
80 | ;;
81 | esac
82 |
83 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
84 |
85 |
86 | # Determine the Java command to use to start the JVM.
87 | if [ -n "$JAVA_HOME" ] ; then
88 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
89 | # IBM's JDK on AIX uses strange locations for the executables
90 | JAVACMD="$JAVA_HOME/jre/sh/java"
91 | else
92 | JAVACMD="$JAVA_HOME/bin/java"
93 | fi
94 | if [ ! -x "$JAVACMD" ] ; then
95 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
96 |
97 | Please set the JAVA_HOME variable in your environment to match the
98 | location of your Java installation."
99 | fi
100 | else
101 | JAVACMD="java"
102 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
103 |
104 | Please set the JAVA_HOME variable in your environment to match the
105 | location of your Java installation."
106 | fi
107 |
108 | # Increase the maximum file descriptors if we can.
109 | if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then
110 | MAX_FD_LIMIT=`ulimit -H -n`
111 | if [ $? -eq 0 ] ; then
112 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then
113 | MAX_FD="$MAX_FD_LIMIT"
114 | fi
115 | ulimit -n $MAX_FD
116 | if [ $? -ne 0 ] ; then
117 | warn "Could not set maximum file descriptor limit: $MAX_FD"
118 | fi
119 | else
120 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT"
121 | fi
122 | fi
123 |
124 | # For Darwin, add options to specify how the application appears in the dock
125 | if $darwin; then
126 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\""
127 | fi
128 |
129 | # For Cygwin or MSYS, switch paths to Windows format before running java
130 | if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then
131 | APP_HOME=`cygpath --path --mixed "$APP_HOME"`
132 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"`
133 |
134 | JAVACMD=`cygpath --unix "$JAVACMD"`
135 |
136 | # We build the pattern for arguments to be converted via cygpath
137 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null`
138 | SEP=""
139 | for dir in $ROOTDIRSRAW ; do
140 | ROOTDIRS="$ROOTDIRS$SEP$dir"
141 | SEP="|"
142 | done
143 | OURCYGPATTERN="(^($ROOTDIRS))"
144 | # Add a user-defined pattern to the cygpath arguments
145 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then
146 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)"
147 | fi
148 | # Now convert the arguments - kludge to limit ourselves to /bin/sh
149 | i=0
150 | for arg in "$@" ; do
151 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -`
152 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option
153 |
154 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition
155 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"`
156 | else
157 | eval `echo args$i`="\"$arg\""
158 | fi
159 | i=`expr $i + 1`
160 | done
161 | case $i in
162 | 0) set -- ;;
163 | 1) set -- "$args0" ;;
164 | 2) set -- "$args0" "$args1" ;;
165 | 3) set -- "$args0" "$args1" "$args2" ;;
166 | 4) set -- "$args0" "$args1" "$args2" "$args3" ;;
167 | 5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;;
168 | 6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;;
169 | 7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;;
170 | 8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;;
171 | 9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;;
172 | esac
173 | fi
174 |
175 | # Escape application args
176 | save () {
177 | for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done
178 | echo " "
179 | }
180 | APP_ARGS=`save "$@"`
181 |
182 | # Collect all arguments for the java command, following the shell quoting and substitution rules
183 | eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS"
184 |
185 | exec "$JAVACMD" "$@"
186 |
--------------------------------------------------------------------------------
/gradlew.bat:
--------------------------------------------------------------------------------
1 | @rem
2 | @rem Copyright 2015 the original author or authors.
3 | @rem
4 | @rem Licensed under the Apache License, Version 2.0 (the "License");
5 | @rem you may not use this file except in compliance with the License.
6 | @rem You may obtain a copy of the License at
7 | @rem
8 | @rem https://www.apache.org/licenses/LICENSE-2.0
9 | @rem
10 | @rem Unless required by applicable law or agreed to in writing, software
11 | @rem distributed under the License is distributed on an "AS IS" BASIS,
12 | @rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | @rem See the License for the specific language governing permissions and
14 | @rem limitations under the License.
15 | @rem
16 |
17 | @if "%DEBUG%" == "" @echo off
18 | @rem ##########################################################################
19 | @rem
20 | @rem Gradle startup script for Windows
21 | @rem
22 | @rem ##########################################################################
23 |
24 | @rem Set local scope for the variables with windows NT shell
25 | if "%OS%"=="Windows_NT" setlocal
26 |
27 | set DIRNAME=%~dp0
28 | if "%DIRNAME%" == "" set DIRNAME=.
29 | set APP_BASE_NAME=%~n0
30 | set APP_HOME=%DIRNAME%
31 |
32 | @rem Resolve any "." and ".." in APP_HOME to make it shorter.
33 | for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
34 |
35 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
36 | set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
37 |
38 | @rem Find java.exe
39 | if defined JAVA_HOME goto findJavaFromJavaHome
40 |
41 | set JAVA_EXE=java.exe
42 | %JAVA_EXE% -version >NUL 2>&1
43 | if "%ERRORLEVEL%" == "0" goto execute
44 |
45 | echo.
46 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
47 | echo.
48 | echo Please set the JAVA_HOME variable in your environment to match the
49 | echo location of your Java installation.
50 |
51 | goto fail
52 |
53 | :findJavaFromJavaHome
54 | set JAVA_HOME=%JAVA_HOME:"=%
55 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe
56 |
57 | if exist "%JAVA_EXE%" goto execute
58 |
59 | echo.
60 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
61 | echo.
62 | echo Please set the JAVA_HOME variable in your environment to match the
63 | echo location of your Java installation.
64 |
65 | goto fail
66 |
67 | :execute
68 | @rem Setup the command line
69 |
70 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
71 |
72 |
73 | @rem Execute Gradle
74 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %*
75 |
76 | :end
77 | @rem End local scope for the variables with windows NT shell
78 | if "%ERRORLEVEL%"=="0" goto mainEnd
79 |
80 | :fail
81 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
82 | rem the _cmd.exe /c_ return code!
83 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
84 | exit /b 1
85 |
86 | :mainEnd
87 | if "%OS%"=="Windows_NT" endlocal
88 |
89 | :omega
90 |
--------------------------------------------------------------------------------
/settings.gradle:
--------------------------------------------------------------------------------
1 | rootProject.name = 'opa-authorizer'
2 |
--------------------------------------------------------------------------------
/src/main/rego/README.md:
--------------------------------------------------------------------------------
1 | # Policy sample
2 |
3 | This sample assumes that users are prefixed with the owner name and suffixed
4 | with the type of user (`-consumer`, `-producer`, `-mgmt`).
5 | It assumes that inter broker communication is unauthenticated.
6 |
7 | For example, only users prefixed with `alice` will be able to read or consume
8 | topic `alice-topic1`, depending on the user type.
9 |
10 | See [policy tests](../../test/rego/README.md)
11 |
--------------------------------------------------------------------------------
/src/main/rego/policy.rego:
--------------------------------------------------------------------------------
1 | package kafka.authz
2 |
3 | # ----------------------------------------------------
4 | # Policies
5 | # ----------------------------------------------------
6 |
7 | default allow = false
8 |
9 | allow if {
10 | inter_broker_communication
11 | }
12 |
13 | allow if {
14 | consume(input.action)
15 | on_own_topic(input.action)
16 | as_consumer
17 | }
18 |
19 | allow if {
20 | produce(input.action)
21 | on_own_topic(input.action)
22 | as_producer
23 | }
24 |
25 | allow if {
26 | create(input.action)
27 | on_own_topic(input.action)
28 | }
29 |
30 | allow if {
31 | any_operation(input.action)
32 | on_own_topic(input.action)
33 | as_mgmt_user
34 | }
35 |
36 | allow if {
37 | input.action.operation == "READ"
38 | input.action.resourcePattern.resourceType == "GROUP"
39 | }
40 |
41 | allow if {
42 | describe(input.action)
43 | }
44 |
45 | allow if {
46 | idempotent_produce(input.action)
47 | }
48 |
49 | # ----------------------------------------------------
50 | # Functions
51 | # ----------------------------------------------------
52 |
53 | inter_broker_communication if {
54 | input.requestContext.principal.name == "ANONYMOUS"
55 | }
56 |
57 | inter_broker_communication if {
58 | input.requestContext.securityProtocol == "SSL"
59 | input.requestContext.principal.principalType == "User"
60 | username == "localhost"
61 | }
62 |
63 | consume(action) if {
64 | action.operation == "READ"
65 | }
66 |
67 | produce(action) if {
68 | action.operation == "WRITE"
69 | }
70 |
71 | idempotent_produce(action) if {
72 | action.operation == "IDEMPOTENT_WRITE"
73 | }
74 |
75 | create(action) if {
76 | action.operation == "CREATE"
77 | }
78 |
79 | describe(action) if {
80 | action.operation == "DESCRIBE"
81 | }
82 |
83 | any_operation(action) if {
84 | action.operation in ["READ", "WRITE", "CREATE", "ALTER", "DESCRIBE", "DELETE"]
85 | }
86 |
87 | as_consumer if {
88 | regex.match(".*-consumer", username)
89 | }
90 |
91 | as_producer if {
92 | regex.match(".*-producer", username)
93 | }
94 |
95 | as_mgmt_user if {
96 | regex.match(".*-mgmt", username)
97 | }
98 |
99 | on_own_topic(action) if {
100 | owner := trim(username, "-consumer")
101 | regex.match(owner, action.resourcePattern.name)
102 | }
103 |
104 | on_own_topic(action) if {
105 | owner := trim(username, "-producer")
106 | regex.match(owner, action.resourcePattern.name)
107 | }
108 |
109 | on_own_topic(action) if {
110 | owner := trim(username, "-mgmt")
111 | regex.match(owner, action.resourcePattern.name)
112 | }
113 |
114 | username := cn_parts[0] if {
115 | name := input.requestContext.principal.name
116 | startswith(name, "CN=")
117 | parsed := parse_user(name)
118 | cn_parts := split(parsed.CN, ".")
119 | }
120 | # If client certificates aren't used for authentication
121 | else := input.requestContext.principal.name if {
122 | true
123 | }
124 |
125 | parse_user(user) := {key: value |
126 | parts := split(user, ",")
127 | [key, value] := split(parts[_], "=")
128 | }
129 |
--------------------------------------------------------------------------------
/src/main/scala/org/openpolicyagent/kafka/MetricsLabel.scala:
--------------------------------------------------------------------------------
1 | package org.openpolicyagent.kafka
2 |
3 | object MetricsLabel {
4 | val NAMESPACE = "opa.authorizer"
5 |
6 | val RESULT_GROUP = "authorization-result"
7 | val AUTHORIZED_REQUEST_COUNT = "authorized-request-count"
8 | val UNAUTHORIZED_REQUEST_COUNT = "unauthorized-request-count"
9 |
10 | val REQUEST_HANDLE_GROUP = "request-handle"
11 | val REQUEST_TO_OPA_COUNT = "request-to-opa-count"
12 | val CACHE_HIT_RATE = "cache-hit-rate"
13 | val CACHE_USAGE_PERCENTAGE = "cache-usage-percentage"
14 | }
15 |
--------------------------------------------------------------------------------
/src/main/scala/org/openpolicyagent/kafka/OpaAuthorizer.scala:
--------------------------------------------------------------------------------
1 | package org.openpolicyagent.kafka
2 |
3 | import com.fasterxml.jackson.core.JsonGenerator
4 | import com.fasterxml.jackson.databind.json.JsonMapper
5 | import com.fasterxml.jackson.databind.module.SimpleModule
6 | import com.fasterxml.jackson.databind.{JsonSerializer, SerializerProvider}
7 | import com.fasterxml.jackson.module.scala.DefaultScalaModule
8 | import com.github.benmanes.caffeine.cache.Caffeine
9 | import com.typesafe.scalalogging.LazyLogging
10 | import org.apache.kafka.common.Endpoint
11 | import org.apache.kafka.common.acl.{AclBinding, AclBindingFilter, AclOperation}
12 | import org.apache.kafka.common.message.RequestHeaderData
13 | import org.apache.kafka.common.metrics.stats.{CumulativeCount, Value}
14 | import org.apache.kafka.common.metrics.{JmxReporter, KafkaMetricsContext, Metrics, MetricsContext}
15 | import org.apache.kafka.common.network.ClientInformation
16 | import org.apache.kafka.common.requests.{RequestContext, RequestHeader}
17 | import org.apache.kafka.common.resource.{PatternType, ResourcePattern, ResourceType}
18 | import org.apache.kafka.common.security.auth.KafkaPrincipal
19 | import org.apache.kafka.server.authorizer._
20 |
21 | import java.io.{File, FileInputStream, IOException}
22 | import java.net.http.HttpRequest.BodyPublishers
23 | import java.net.http.HttpResponse.BodyHandlers
24 | import java.net.http.{HttpClient, HttpRequest}
25 | import java.net.{URI, URL}
26 | import java.security.KeyStore
27 | import java.time.Duration.ofSeconds
28 | import java.util.concurrent._
29 | import java.util.function.{Function => JavaFunction}
30 | import javax.net.ssl.{KeyManagerFactory, SSLContext, TrustManagerFactory}
31 | import scala.jdk.CollectionConverters._
32 |
33 | //noinspection NotImplementedCode
34 | class OpaAuthorizer extends Authorizer with LazyLogging {
35 | private var config: Map[String, String] = Map.empty
36 | private lazy val opaUrl = new URL(config("opa.authorizer.url")).toURI
37 | private lazy val allowOnError = config.getOrElse("opa.authorizer.allow.on.error", "false").toBoolean
38 | private lazy val superUsers = config.getOrElse("super.users", "").split(";").toList
39 | private lazy val maxCacheCapacity = config.getOrElse("opa.authorizer.cache.maximum.size", "50000").toInt
40 | private lazy val trustStorePath = config.get("opa.authorizer.truststore.path")
41 | private lazy val trustStorePassword = config.get("opa.authorizer.truststore.password")
42 | private lazy val trustStoreType = config.get("opa.authorizer.truststore.type")
43 |
44 | private var metrics: Option[Metrics] = None
45 |
46 | private lazy val cache = Caffeine
47 | .newBuilder()
48 | .initialCapacity(config.getOrElse("opa.authorizer.cache.initial.capacity", "5000").toInt)
49 | .maximumSize(maxCacheCapacity)
50 | .expireAfterWrite(config.getOrElse("opa.authorizer.cache.expire.after.seconds", "3600").toInt, TimeUnit.SECONDS)
51 | .recordStats()
52 | .build[CacheableRequest, Boolean]
53 |
54 | override def authorize(
55 | requestContext: AuthorizableRequestContext,
56 | actions: java.util.List[Action]
57 | ): java.util.List[AuthorizationResult] =
58 | actions.asScala.map(action => authorizeAction(requestContext, action)).asJava
59 |
60 | override def configure(configs: java.util.Map[String, _]): Unit = {
61 | logger.debug(s"Call to configure() with config $configs")
62 | config = configs.asScala.view.mapValues(_.asInstanceOf[String]).toMap
63 |
64 | if (trustStorePath.isDefined) {
65 | logger.info(s"Enabling TLS truststore")
66 |
67 | if (trustStorePassword.isEmpty) {
68 | logger.info("property 'opa.authorizer.truststore.password' not set. using default!");
69 | }
70 |
71 | try {
72 | val ks = KeyStore.getInstance(trustStoreType.getOrElse("PKCS12"))
73 |
74 | val inputStream = new FileInputStream(new File(trustStorePath.getOrElse("")))
75 | ks.load(inputStream, trustStorePassword.getOrElse("changeit").toArray)
76 |
77 | inputStream.close();
78 |
79 | val tmf = TrustManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm)
80 | tmf.init(ks)
81 |
82 | val trustManager = tmf.getTrustManagers
83 |
84 | val sslContext = SSLContext.getInstance("TLS")
85 | sslContext.init(Array(), trustManager, null)
86 |
87 | // replaces the HttpClient initialized in AllowCallable
88 | AllowCallable.client = HttpClient.newBuilder.sslContext(sslContext).connectTimeout(ofSeconds(5)).build
89 | } catch {
90 | case e: Throwable => logger.error("Failed to load truststore", e);
91 | }
92 | }
93 | }
94 |
95 | // Not really used but has to be implemented for internal stuff. Can maybe be used to check OPA connectivity?
96 | // Just doing the same as the acl authorizer does here: https://github.com/apache/kafka/blob/trunk/core/src/main/scala/kafka/security/authorizer/AclAuthorizer.scala#L185
97 | override def start(
98 | authorizerServerInfo: AuthorizerServerInfo
99 | ): java.util.Map[Endpoint, _ <: CompletionStage[Void]] = {
100 | maybeSetupMetrics(authorizerServerInfo.clusterResource().clusterId(), authorizerServerInfo.brokerId())
101 | authorizerServerInfo.endpoints.asScala
102 | .map { endpoint =>
103 | endpoint -> CompletableFuture.completedFuture[Void](null)
104 | }
105 | .toMap
106 | .asJava
107 | }
108 |
109 | private[kafka] def maybeSetupMetrics(clusterId: String, brokerId: Int): Unit = {
110 | val isEnabled = config.getOrElse("opa.authorizer.metrics.enabled", "false").toBoolean
111 | if (isEnabled) {
112 | metrics = Option(new Metrics())
113 | val jmxReporter = new JmxReporter()
114 | jmxReporter.contextChange(createMetricsContext(clusterId, brokerId))
115 | metrics.get.addReporter(jmxReporter)
116 |
117 | val authorizedRequestName =
118 | metrics.get.metricName(MetricsLabel.AUTHORIZED_REQUEST_COUNT, MetricsLabel.RESULT_GROUP)
119 | val authorizedRequestSensor = metrics.get.sensor(MetricsLabel.AUTHORIZED_REQUEST_COUNT)
120 | authorizedRequestSensor.add(authorizedRequestName, new CumulativeCount())
121 |
122 | val unauthorizedRequestName =
123 | metrics.get.metricName(MetricsLabel.UNAUTHORIZED_REQUEST_COUNT, MetricsLabel.RESULT_GROUP)
124 | val unauthorizedRequestSensor = metrics.get.sensor(MetricsLabel.UNAUTHORIZED_REQUEST_COUNT)
125 | unauthorizedRequestSensor.add(unauthorizedRequestName, new CumulativeCount())
126 |
127 | val requestToOPAName =
128 | metrics.get.metricName(MetricsLabel.REQUEST_TO_OPA_COUNT, MetricsLabel.REQUEST_HANDLE_GROUP)
129 | val requestToOPASensor = metrics.get.sensor(MetricsLabel.REQUEST_TO_OPA_COUNT)
130 | requestToOPASensor.add(requestToOPAName, new CumulativeCount())
131 |
132 | val cacheHitName = metrics.get.metricName(MetricsLabel.CACHE_HIT_RATE, MetricsLabel.REQUEST_HANDLE_GROUP)
133 | val cacheHitSensor = metrics.get.sensor(MetricsLabel.CACHE_HIT_RATE)
134 | cacheHitSensor.add(cacheHitName, new Value())
135 |
136 | val cacheUsageName =
137 | metrics.get.metricName(MetricsLabel.CACHE_USAGE_PERCENTAGE, MetricsLabel.REQUEST_HANDLE_GROUP)
138 | val cacheUsageSensor = metrics.get.sensor(MetricsLabel.CACHE_USAGE_PERCENTAGE)
139 | cacheUsageSensor.add(cacheUsageName, new Value())
140 |
141 | }
142 | }
143 |
144 | private def createMetricsContext(clusterId: String, brokerId: Int): MetricsContext = {
145 | val contextLabels = Map(
146 | "kafka.cluster.id" -> clusterId,
147 | "kafka.broker.id" -> brokerId.toString
148 | ).asJava
149 | val prefix = MetricsLabel.NAMESPACE
150 | new KafkaMetricsContext(prefix, contextLabels)
151 | }
152 |
153 | private[kafka] def getCache = cache
154 |
155 | // None of the below needs implementations
156 | override def close(): Unit = { }
157 | override def acls(acls: AclBindingFilter): java.lang.Iterable[AclBinding] = ???
158 |
159 | override def deleteAcls(
160 | requestContext: AuthorizableRequestContext,
161 | aclBindingFilters: java.util.List[AclBindingFilter]
162 | ): java.util.List[_ <: CompletionStage[AclDeleteResult]] = ???
163 |
164 | override def createAcls(
165 | requestContext: AuthorizableRequestContext,
166 | aclBindings: java.util.List[AclBinding]
167 | ): java.util.List[_ <: CompletionStage[AclCreateResult]] = ???
168 |
169 | private def authorizeAction(requestContext: AuthorizableRequestContext, action: Action): AuthorizationResult = {
170 | val resource = action.resourcePattern
171 | if (resource.patternType != PatternType.LITERAL) {
172 | throw new IllegalArgumentException("Only literal resources are supported. Got: " + resource.patternType)
173 | }
174 |
175 | val result = doAuthorize(requestContext, action)
176 |
177 | if(metrics.isDefined){
178 | metrics.get.sensor(MetricsLabel.CACHE_HIT_RATE).record(cache.stats().hitRate())
179 | metrics.get.sensor(MetricsLabel.CACHE_USAGE_PERCENTAGE).record(cache.estimatedSize() / maxCacheCapacity.toDouble)
180 | result match {
181 | case AuthorizationResult.DENIED => metrics.get.sensor(MetricsLabel.UNAUTHORIZED_REQUEST_COUNT).record()
182 | case AuthorizationResult.ALLOWED => metrics.get.sensor(MetricsLabel.AUTHORIZED_REQUEST_COUNT).record()
183 | }
184 | }
185 |
186 | result
187 | }
188 |
189 | override def authorizeByResourceType(
190 | requestContext: AuthorizableRequestContext,
191 | op: AclOperation,
192 | resourceType: ResourceType
193 | ): AuthorizationResult =
194 | doAuthorize(
195 | requestContext,
196 | new Action(op, new ResourcePattern(resourceType, "", PatternType.PREFIXED), 0, true, true)
197 | )
198 |
199 | private def doAuthorize(requestContext: AuthorizableRequestContext, action: Action) = {
200 | // ensure we compare identical classes
201 | val sessionPrincipal = requestContext.principal
202 | val principal =
203 | if (classOf[KafkaPrincipal] != sessionPrincipal.getClass)
204 | new KafkaPrincipal(sessionPrincipal.getPrincipalType, sessionPrincipal.getName)
205 | else
206 | sessionPrincipal
207 |
208 | val host = requestContext.clientAddress.getHostAddress
209 |
210 | val cachableRequest = CacheableRequest(principal, action, host)
211 | val request = Request(Input(requestContext, action))
212 |
213 | def allowAccess =
214 | try cache.get(cachableRequest, new AllowCallable(request, opaUrl, allowOnError, metrics))
215 | catch {
216 | case e: Exception =>
217 | logger.warn(s"Exception in decision retrieval: ${e.getMessage}")
218 | logger.trace("Exception trace", e)
219 | allowOnError
220 | }
221 |
222 | // Evaluate if operation is allowed
223 | val authorized = isSuperUser(principal) || allowAccess
224 |
225 | if (authorized) AuthorizationResult.ALLOWED else AuthorizationResult.DENIED
226 | }
227 |
228 | def isSuperUser(principal: KafkaPrincipal): Boolean =
229 | if (superUsers.contains(principal.toString)) {
230 | logger.trace(s"User $principal is super user")
231 | return true
232 | } else false
233 | }
234 |
235 | class ResourcePatternSerializer() extends JsonSerializer[ResourcePattern] {
236 | override def serialize(value: ResourcePattern, gen: JsonGenerator, provider: SerializerProvider): Unit = {
237 | gen.writeStartObject()
238 | gen.writeStringField("resourceType", value.resourceType().name())
239 | gen.writeStringField("name", value.name())
240 | gen.writeStringField("patternType", value.patternType().name())
241 | gen.writeBooleanField("unknown", value.isUnknown())
242 | gen.writeEndObject()
243 | }
244 | }
245 |
246 | class ActionSerializer() extends JsonSerializer[Action] {
247 | override def serialize(value: Action, gen: JsonGenerator, provider: SerializerProvider): Unit = {
248 | gen.writeStartObject()
249 | gen.writeObjectField("resourcePattern", value.resourcePattern())
250 | gen.writeStringField("operation", value.operation().name())
251 | gen.writeNumberField("resourceReferenceCount", value.resourceReferenceCount())
252 | gen.writeBooleanField("logIfAllowed", value.logIfAllowed())
253 | gen.writeBooleanField("logIfDenied", value.logIfDenied())
254 | gen.writeEndObject()
255 | }
256 | }
257 |
258 | class RequestContextSerializer() extends JsonSerializer[RequestContext] {
259 | override def serialize(value: RequestContext, gen: JsonGenerator, provider: SerializerProvider): Unit = {
260 | gen.writeStartObject()
261 | gen.writeStringField("clientAddress", value.clientAddress().toString)
262 | gen.writeObjectField("clientInformation", value.clientInformation)
263 | gen.writeStringField("connectionId", value.connectionId)
264 | gen.writeObjectField("header", value.header) //
265 | gen.writeStringField("listenerName", value.listenerName())
266 | gen.writeObjectField("principal", value.principal())
267 | gen.writeStringField("securityProtocol", value.securityProtocol().name())
268 | gen.writeEndObject()
269 | }
270 | }
271 |
272 | class ClientInformationSerializer() extends JsonSerializer[ClientInformation] {
273 | override def serialize(value: ClientInformation, gen: JsonGenerator, provider: SerializerProvider): Unit = {
274 | gen.writeStartObject()
275 | gen.writeStringField("softwareName", value.softwareName())
276 | gen.writeStringField("softwareVersion", value.softwareVersion())
277 | gen.writeEndObject()
278 | }
279 | }
280 |
281 | class KafkaPrincipalSerializer() extends JsonSerializer[KafkaPrincipal] {
282 | override def serialize(value: KafkaPrincipal, gen: JsonGenerator, provider: SerializerProvider): Unit = {
283 | gen.writeStartObject()
284 | gen.writeStringField("principalType", value.getPrincipalType())
285 | gen.writeStringField("name", value.getName())
286 | gen.writeEndObject()
287 | }
288 | }
289 |
290 | class RequestHeaderSerializer() extends JsonSerializer[RequestHeader] {
291 | override def serialize(value: RequestHeader, gen: JsonGenerator, provider: SerializerProvider): Unit = {
292 | gen.writeStartObject()
293 | gen.writeObjectField("name", value.data())
294 | // Jackson 2.10 does not support writeNumberField for shorts
295 | gen.writeFieldName("headerVersion")
296 | gen.writeNumber(value.headerVersion())
297 | gen.writeEndObject()
298 | }
299 | }
300 |
301 | class RequestHeaderDataSerializer() extends JsonSerializer[RequestHeaderData] {
302 | override def serialize(value: RequestHeaderData, gen: JsonGenerator, provider: SerializerProvider): Unit = {
303 | gen.writeStartObject()
304 | gen.writeStringField("clientId", value.clientId())
305 | gen.writeNumberField("correlationId", value.correlationId())
306 | gen.writeNumberField("requestApiKey", value.requestApiKey())
307 | gen.writeNumberField("requestApiVersion", value.requestApiVersion())
308 | gen.writeEndObject()
309 | }
310 | }
311 |
312 | object AllowCallable {
313 | private val requestSerializerModule = new SimpleModule()
314 | .addSerializer(classOf[ResourcePattern], new ResourcePatternSerializer)
315 | .addSerializer(classOf[Action], new ActionSerializer)
316 | .addSerializer(classOf[RequestContext], new RequestContextSerializer)
317 | .addSerializer(classOf[ClientInformation], new ClientInformationSerializer)
318 | .addSerializer(classOf[KafkaPrincipal], new KafkaPrincipalSerializer)
319 | .addSerializer(classOf[RequestHeader], new RequestHeaderSerializer)
320 | .addSerializer(classOf[RequestHeaderData], new RequestHeaderDataSerializer)
321 |
322 | private val objectMapper =
323 | JsonMapper.builder().addModule(requestSerializerModule).addModule(DefaultScalaModule).build()
324 |
325 | // If a TrusStore is configured by the user,
326 | // This HttpClient is replaced by OpaAuthorizer.configure()
327 | var client = HttpClient.newBuilder.connectTimeout(ofSeconds(5)).build
328 | }
329 |
330 | class AllowCallable(
331 | request: Request,
332 | opaUrl: URI,
333 | allowOnError: Boolean,
334 | metrics: Option[Metrics]
335 | ) extends JavaFunction[CacheableRequest, Boolean]
336 | with LazyLogging {
337 |
338 | override def apply(key: CacheableRequest): Boolean = {
339 | logger.debug(s"Cache miss, querying OPA for decision")
340 | val reqJson = AllowCallable.objectMapper.writeValueAsString(request)
341 | val requestBuilder = HttpRequest.newBuilder.timeout(ofSeconds(5)).header("Content-Type", "application/json")
342 | val req = requestBuilder.uri(opaUrl).POST(BodyPublishers.ofString(reqJson)).build
343 | logger.debug(s"Querying OPA with object: $reqJson")
344 | if(metrics.isDefined){
345 | metrics.get.sensor(MetricsLabel.REQUEST_TO_OPA_COUNT).record()
346 | }
347 | val resp = AllowCallable.client.send(req, BodyHandlers.ofString)
348 | logger.trace(s"Response code: ${resp.statusCode}, body: ${resp.body}")
349 |
350 | AllowCallable.objectMapper.readTree(resp.body()).at("/result").asBoolean
351 | }
352 | }
353 |
354 | case class Input(requestContext: AuthorizableRequestContext, action: Action)
355 | case class Request(input: Input)
356 | case class CacheableRequest(principal: KafkaPrincipal, action: Action, host: String)
357 |
--------------------------------------------------------------------------------
/src/test/rego/README.md:
--------------------------------------------------------------------------------
1 | # Test rego
2 |
3 | Test rego for sample policy.
4 |
5 | # Manual run
6 |
7 | `opa test ../../../{test,main}/rego/`
8 |
--------------------------------------------------------------------------------
/src/test/rego/policy_test.rego:
--------------------------------------------------------------------------------
1 | package kafka.authz
2 |
3 | # --------------------------------------------------
4 | # Positive test
5 | # --------------------------------------------------
6 |
7 | # Brokers
8 | test_inter_broker_communication if {
9 | allow with input.requestContext.principal.name as "ANONYMOUS"
10 | }
11 |
12 | # Consumers
13 | test_consume_own_topic_as_consumer if {
14 | allow with input.requestContext.principal.name as "alice-consumer"
15 | with input.action as {
16 | "operation": "READ",
17 | "resourcePattern": {
18 | "name": "alice-mytopic",
19 | "resourceType": "TOPIC",
20 | },
21 | }
22 | }
23 |
24 | test_create_own_topic_as_consumer if {
25 | allow with input.requestContext.principal.name as "alice-consumer"
26 | with input.action as {
27 | "operation": "CREATE",
28 | "resourcePattern": {
29 | "name": "alice-topic1",
30 | "resourceType": "TOPIC",
31 | },
32 | }
33 | }
34 |
35 | # Producers
36 | test_produce_own_topic_as_producer if {
37 | allow with input.requestContext.principal.name as "CN=alice-producer, OU=Developers"
38 | with input.action as {
39 | "operation": "WRITE",
40 | "resourcePattern": {
41 | "name": "alice-mytopic",
42 | "resourceType": "TOPIC",
43 | },
44 | }
45 | }
46 |
47 | test_create_own_topic_as_producer if {
48 | allow with input.requestContext.principal.name as "alice-producer"
49 | with input.action as {
50 | "operation": "CREATE",
51 | "resourcePattern": {
52 | "name": "alice-topic1",
53 | "resourceType": "TOPIC",
54 | },
55 | }
56 | }
57 |
58 | # Global access
59 | test_anyone_describe_some_topic if {
60 | allow with input.requestContext.principal.name as "alice-producer"
61 | with input.action as {
62 | "operation": "DESCRIBE",
63 | "resourcePattern": {
64 | "name": "some-mytopic",
65 | "resourceType": "TOPIC",
66 | },
67 | }
68 | }
69 |
70 | test_anyone_describe_own_topic if {
71 | allow with input.requestContext.principal.name as "alice-producer"
72 | with input.action as {
73 | "operation": "DESCRIBE",
74 | "resourcePattern": {
75 | "name": "alice-mytopic",
76 | "resourceType": "TOPIC",
77 | },
78 | }
79 | }
80 |
81 | # MGMT User tests
82 | test_mgmt_user_own_topic_read if {
83 | allow with input.requestContext.principal.name as "CN=alice-mgmt, O=AcmeCorp"
84 | with input.action as {
85 | "operation": "READ",
86 | "resourcePattern": {
87 | "name": "alice-topic1",
88 | "resourceType": "TOPIC",
89 | },
90 | }
91 | }
92 |
93 | test_mgmt_user_own_topic_write if {
94 | allow with input.requestContext.principal.name as "alice-mgmt"
95 | with input.action as {
96 | "operation": "WRITE",
97 | "resourcePattern": {
98 | "name": "alice-topic1",
99 | "resourceType": "TOPIC",
100 | },
101 | }
102 | }
103 |
104 | test_mgmt_user_own_topic_create if {
105 | allow with input.requestContext.principal.name as "alice-mgmt"
106 | with input.action as {
107 | "operation": "CREATE",
108 | "resourcePattern": {
109 | "name": "alice-topic1",
110 | "resourceType": "TOPIC",
111 | },
112 | }
113 | }
114 |
115 | test_mgmt_user_own_topic_delete if {
116 | allow with input.requestContext.principal.name as "alice-mgmt"
117 | with input.action as {
118 | "operation": "DELETE",
119 | "resourcePattern": {
120 | "name": "alice-topic1",
121 | "resourceType": "TOPIC",
122 | },
123 | }
124 | }
125 |
126 | test_mgmt_user_own_topic_describe if {
127 | allow with input.requestContext.principal.name as "alice-mgmt"
128 | with input.action as {
129 | "operation": "DESCRIBE",
130 | "resourcePattern": {
131 | "name": "alice-topic1",
132 | "resourceType": "TOPIC",
133 | },
134 | }
135 | }
136 |
137 | test_mgmt_user_own_topic_alter if {
138 | allow with input.requestContext.principal.name as "alice-mgmt"
139 | with input.action as {
140 | "operation": "ALTER",
141 | "resourcePattern": {
142 | "name": "alice-topic1",
143 | "resourceType": "TOPIC",
144 | },
145 | }
146 | }
147 |
148 | # Anyone can do idemportent write
149 | test_anyone_describe_some_topic if {
150 | allow with input.requestContext.principal.name as "alice-producer"
151 | with input.action as {
152 | "operation": "IDEMPOTENT_WRITE",
153 | "resourcePattern": {
154 | "name": "kafka-cluster",
155 | "patternType": "LITERAL",
156 | "resourceType": "CLUSTER",
157 | },
158 | }
159 | }
160 |
161 | # --------------------------------------------------
162 | # Negative test
163 | # --------------------------------------------------
164 |
165 | test_consume_own_topic_as_producer if {
166 | not allow with input.requestContext.principal.name as "alice-producer"
167 | with input.action as {
168 | "operation": "READ",
169 | "resourcePattern": {
170 | "name": "alice-mytopic",
171 | "resourceType": "TOPIC",
172 | },
173 | }
174 | }
175 |
176 | test_consume_someone_elses_topic_as_producer if {
177 | not allow with input.requestContext.principal.name as "alice-producer"
178 | with input.action as {
179 | "operation": "READ",
180 | "resourcePattern": {
181 | "name": "someone-mytopic",
182 | "resourceType": "TOPIC",
183 | },
184 | }
185 | }
186 |
187 | test_consume_someone_elses_topic_as_consumer if {
188 | not allow with input.requestContext.principal.name as "alice-consumer"
189 | with input.action as {
190 | "operation": "READ",
191 | "resourcePattern": {
192 | "name": "someone-mytopic",
193 | "resourceType": "TOPIC",
194 | },
195 | }
196 | }
197 |
198 | test_produce_own_topic_as_consumer if {
199 | not allow with input.requestContext.principal.name as "alice-consumer"
200 | with input.action as {
201 | "operation": "WRITE",
202 | "resourcePattern": {
203 | "name": "alice-mytopic",
204 | "resourceType": "TOPIC",
205 | },
206 | }
207 | }
208 |
209 | test_produce_someone_elses_topic_as_consumer if {
210 | not allow with input.requestContext.principal.name as "alice-consumer"
211 | with input.action as {
212 | "operation": "WRITE",
213 | "resourcePattern": {
214 | "name": "someone-mytopic",
215 | "resourceType": "TOPIC",
216 | },
217 | }
218 | }
219 |
220 | test_produce_someone_elses_topic_as_producer if {
221 | not allow with input.requestContext.principal.name as "alice-producer"
222 | with input.action as {
223 | "operation": "WRITE",
224 | "resourcePattern": {
225 | "name": "someone-mytopic",
226 | "resourceType": "TOPIC",
227 | },
228 | }
229 | }
230 |
231 | test_create_someone_elses_topic_as_producer if {
232 | not allow with input.requestContext.principal.name as "alice-producer"
233 | with input.action as {
234 | "operation": "CREATE",
235 | "resourcePattern": {
236 | "name": "someone-topic1",
237 | "resourceType": "TOPIC",
238 | },
239 | }
240 | }
241 |
242 | test_create_someone_elses_topic_as_consumer if {
243 | not allow with input.requestContext.principal.name as "alice-producer"
244 | with input.action as {
245 | "operation": "CREATE",
246 | "resourcePattern": {
247 | "name": "someone-topic1",
248 | "resourceType": "TOPIC",
249 | },
250 | }
251 | }
252 |
253 | # MGMT User tests
254 | test_mgmt_user_other_topic_read if {
255 | not allow with input.requestContext.principal.name as "alice-mgmt"
256 | with input.action as {
257 | "operation": "READ",
258 | "resourcePattern": {
259 | "name": "some-topic1",
260 | "resourceType": "TOPIC",
261 | },
262 | }
263 | }
264 |
265 | test_mgmt_user_other_topic_write if {
266 | not allow with input.requestContext.principal.name as "alice-mgmt"
267 | with input.action as {
268 | "operation": "WRITE",
269 | "resourcePattern": {
270 | "name": "some-topic1",
271 | "resourceType": "TOPIC",
272 | },
273 | }
274 | }
275 |
276 | test_mgmt_user_other_topic_create if {
277 | not allow with input.requestContext.principal.name as "alice-mgmt"
278 | with input.action as {
279 | "operation": "CREATE",
280 | "resourcePattern": {
281 | "name": "some-topic1",
282 | "resourceType": "TOPIC",
283 | },
284 | }
285 | }
286 |
287 | test_mgmt_user_other_topic_delete if {
288 | not allow with input.requestContext.principal.name as "alice-mgmt"
289 | with input.action as {
290 | "operation": "DELETE",
291 | "resourcePattern": {
292 | "name": "some-topic1",
293 | "resourceType": "TOPIC",
294 | },
295 | }
296 | }
297 |
298 | test_mgmt_user_other_topic_alter if {
299 | not allow with input.requestContext.principal.name as "alice-mgmt"
300 | with input.action as {
301 | "operation": "ALTER",
302 | "resourcePattern": {
303 | "name": "some-topic1",
304 | "resourceType": "TOPIC",
305 | },
306 | }
307 | }
308 |
--------------------------------------------------------------------------------
/src/test/resources/log4j2.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
--------------------------------------------------------------------------------
/src/test/scala/org/openpolicyagent/kafka/AzRequestContext.scala:
--------------------------------------------------------------------------------
1 | package org.openpolicyagent.kafka
2 |
3 | import java.net.InetAddress
4 | import org.apache.kafka.common.security.auth.{KafkaPrincipal, SecurityProtocol}
5 | import org.apache.kafka.server.authorizer.{AuthorizableRequestContext, AuthorizerServerInfo}
6 | import org.apache.kafka.common.{Endpoint, ClusterResource}
7 | import java.util.Collection
8 |
9 | case class AzRequestContext(
10 | clientId: String,
11 | requestType: Int,
12 | listenerName: String,
13 | clientAddress: InetAddress,
14 | principal: KafkaPrincipal,
15 | securityProtocol: SecurityProtocol,
16 | correlationId: Int,
17 | requestVersion: Int) extends AuthorizableRequestContext
18 |
19 | case class AzServerInfo(
20 | brokerId: Int,
21 | clusterResource: ClusterResource,
22 | endpoints: Collection[Endpoint],
23 | interBrokerEndpoint: Endpoint,
24 | earlyStartListeners: Collection[String]) extends AuthorizerServerInfo
--------------------------------------------------------------------------------
/src/test/scala/org/openpolicyagent/kafka/OpaAuthorizerBenchmark.scala:
--------------------------------------------------------------------------------
1 | package org.openpolicyagent.kafka
2 |
3 | import java.net.InetAddress
4 | import java.util
5 | import java.util.concurrent.TimeUnit
6 | import org.apache.kafka.common.acl.AclOperation
7 | import org.apache.kafka.common.resource.ResourcePattern
8 | import org.apache.kafka.common.resource.PatternType
9 | import org.apache.kafka.common.resource.ResourceType.TOPIC
10 | import org.apache.kafka.common.security.auth.{KafkaPrincipal, SecurityProtocol}
11 | import org.apache.kafka.network.Session
12 | import org.apache.kafka.server.authorizer.Action
13 |
14 | import scala.jdk.CollectionConverters._
15 |
16 | object OpaAuthorizerBenchmark {
17 | private val opaUrl = "http://localhost:8181/v1/data/kafka/authz/allow"
18 |
19 | def main(args: Array[String]): Unit = {
20 | val startTime = System.nanoTime
21 | val benchmark = new OpaAuthorizerBenchmark
22 | val numCalls = 10000
23 | for (_ <- 1 until numCalls) {
24 | val input = benchmark.createRequest
25 | benchmark.getAuthorizer.authorize(input.requestContext, input.actions.asJava)
26 | }
27 |
28 | val endTime = System.nanoTime
29 | val totalTime = endTime - startTime
30 | val perCallTime = totalTime / numCalls
31 | val totalTimeMs = TimeUnit.MILLISECONDS.convert(totalTime, TimeUnit.NANOSECONDS)
32 | val perCallMs = TimeUnit.MILLISECONDS.convert(perCallTime, TimeUnit.NANOSECONDS)
33 |
34 | println("Tests run in " + totalTimeMs + " milliseconds")
35 | println("Time per call is " + perCallMs + " ms")
36 | }
37 | }
38 | class OpaAuthorizerBenchmark {
39 | private val authorizer = new OpaAuthorizer
40 |
41 | val config: util.HashMap[String, String] = new util.HashMap[String, String](2)
42 | config.put("opa.authorizer.url", OpaAuthorizerBenchmark.opaUrl)
43 | config.put("opa.authorizer.allow.on.error", "false")
44 | authorizer.configure(config)
45 |
46 | def createRequest = {
47 | val principal = new KafkaPrincipal("User", "user-" + new scala.util.Random().nextInt())
48 | val session = new Session(principal, InetAddress.getLoopbackAddress)
49 | val resource = new ResourcePattern(TOPIC, "my-topic", PatternType.LITERAL)
50 | val authzReqContext = new AzRequestContext(
51 | clientId = "rdkafka",
52 | requestType = 1,
53 | listenerName = "SASL_PLAINTEXT",
54 | clientAddress = session.clientAddress,
55 | principal = session.principal,
56 | securityProtocol = SecurityProtocol.SASL_PLAINTEXT,
57 | correlationId = new scala.util.Random().nextInt(1000),
58 | requestVersion = 4)
59 | val actions = List(new Action(AclOperation.WRITE, resource, 1, true, true))
60 |
61 | FullRequest(authzReqContext, actions)
62 | }
63 |
64 | private def getAuthorizer = authorizer
65 | }
66 |
--------------------------------------------------------------------------------
/src/test/scala/org/openpolicyagent/kafka/OpaAuthorizerSpec.scala:
--------------------------------------------------------------------------------
1 | package org.openpolicyagent.kafka
2 |
3 | import com.fasterxml.jackson.databind.json.JsonMapper
4 | import com.fasterxml.jackson.databind.module.SimpleModule
5 |
6 | import java.net.{InetAddress, URI}
7 | import java.net.http.HttpRequest.BodyPublishers
8 | import java.net.http.HttpResponse.BodyHandlers
9 | import java.net.http.{HttpClient, HttpRequest, HttpResponse}
10 | import org.junit.runner.RunWith
11 | import org.scalatestplus.junit.JUnitRunner
12 | import com.typesafe.scalalogging.LazyLogging
13 | import com.fasterxml.jackson.module.scala.DefaultScalaModule
14 | import org.apache.kafka.common.acl.AclOperation
15 | import org.apache.kafka.common.network.{ClientInformation, ListenerName}
16 | import org.apache.kafka.common.protocol.ApiKeys
17 | import org.apache.kafka.common.resource.PatternType
18 | import org.apache.kafka.common.security.auth.{KafkaPrincipal, SecurityProtocol}
19 | import org.apache.kafka.common.resource.ResourcePattern
20 | import org.apache.kafka.common.resource.ResourceType
21 | import org.apache.kafka.common.requests.{RequestContext, RequestHeader}
22 | import org.apache.kafka.network.Session
23 | import org.apache.kafka.server.authorizer.{Action, AuthorizationResult}
24 | import org.scalatest._
25 | import matchers.should._
26 | import flatspec._
27 | import org.apache.kafka.common.message.RequestHeaderData
28 |
29 | import java.lang.management.ManagementFactory
30 | import javax.management.ObjectName
31 | import scala.jdk.CollectionConverters._
32 |
33 | /**
34 | * Integration level tests of Kafka policy as described in src/main/rego/README.md
35 | * NOTE: Requires a running OPA instance with the provided policy loaded
36 | */
37 | @RunWith(classOf[JUnitRunner])
38 | class OpaAuthorizerSpec extends AnyFlatSpec with Matchers with PrivateMethodTester with LazyLogging {
39 |
40 | private val opaUrl = "http://localhost:8181/v1/data/kafka/authz/allow"
41 | private val requestSerializerModule = new SimpleModule()
42 | .addSerializer(classOf[ResourcePattern], new ResourcePatternSerializer)
43 | .addSerializer(classOf[Action], new ActionSerializer)
44 | .addSerializer(classOf[RequestContext], new RequestContextSerializer)
45 | .addSerializer(classOf[ClientInformation], new ClientInformationSerializer)
46 | .addSerializer(classOf[KafkaPrincipal], new KafkaPrincipalSerializer)
47 | .addSerializer(classOf[RequestHeader], new RequestHeaderSerializer)
48 | .addSerializer(classOf[RequestHeaderData], new RequestHeaderDataSerializer)
49 | private val objectMapper = JsonMapper.builder().addModule(requestSerializerModule).addModule(DefaultScalaModule).build()
50 | private val defaultCacheCapacity = 50000
51 | private lazy val opaResponse = testOpaConnection()
52 |
53 | override def withFixture(test: NoArgTest): Outcome = {
54 | assume(opaResponse.isDefined, s"Assumed OPA would respond to request at $opaUrl")
55 |
56 | val resp = opaResponse.get
57 | assume(resp.statusCode() == 200, "Assumed OPA would respond with status code 200")
58 | assume(!objectMapper.readTree(resp.body()).at("/result").asBoolean, "Assumed OPA would return negative result")
59 |
60 | super.withFixture(test)
61 | }
62 |
63 | "Request object" should "serialize to JSON" in {
64 | val actions = List(
65 | createAction("bob-topic", AclOperation.READ),
66 | )
67 | val request = createRequest("bob", actions)
68 |
69 | objectMapper.writeValueAsString(request) should not be "{}"
70 | objectMapper.writeValueAsString(request.actions.asJava) should not be "{}"
71 | objectMapper.writeValueAsString(request.requestContext) should not be "{}"
72 | }
73 |
74 | "OpaAuthorizer" should "authorize when username matches name of topic" in {
75 | val opaAuthorizer = setupAuthorizer()
76 | val actions = List(
77 | createAction("alice-topic", AclOperation.WRITE),
78 | )
79 | val request = createRequest("alice-producer", actions)
80 |
81 | opaAuthorizer.authorize(request.requestContext, request.actions.asJava) should be (List(AuthorizationResult.ALLOWED).asJava)
82 | opaAuthorizer.getCache.estimatedSize() should be (1)
83 | }
84 |
85 | "OpaAuthorizer" should "return authorization results for multiple actions in the same request in right order" in {
86 | val opaAuthorizer = setupAuthorizer()
87 | val actions = List(
88 | createAction("alice-topic", AclOperation.WRITE),
89 | createAction("alice-topic", AclOperation.WRITE),
90 | createAction("alice-topic", AclOperation.READ),
91 | createAction("alice-topic", AclOperation.READ),
92 | createAction("alice-topic", AclOperation.WRITE),
93 | createAction("alice-topic", AclOperation.DESCRIBE),
94 | createAction("alice-topic", AclOperation.CREATE),
95 | )
96 | val request = createRequest("alice-producer", actions)
97 |
98 | opaAuthorizer.authorize(request.requestContext, request.actions.asJava) should be (List(
99 | AuthorizationResult.ALLOWED,
100 | AuthorizationResult.ALLOWED,
101 | AuthorizationResult.DENIED,
102 | AuthorizationResult.DENIED,
103 | AuthorizationResult.ALLOWED,
104 | AuthorizationResult.ALLOWED,
105 | AuthorizationResult.ALLOWED).asJava)
106 | opaAuthorizer.getCache.estimatedSize() should be (4)
107 | }
108 |
109 | "OpaAuthorizer" should "not authorize when username does not match name of topic" in {
110 | val opaAuthorizer = setupAuthorizer()
111 | val actions = List(
112 | createAction("bob-topic", AclOperation.WRITE),
113 | )
114 | val request = createRequest("alice-producer", actions)
115 |
116 | opaAuthorizer.authorize(request.requestContext, request.actions.asJava) should be (List(AuthorizationResult.DENIED).asJava)
117 | opaAuthorizer.getCache.estimatedSize() should be (1)
118 | }
119 |
120 | "OpaAuthorizer" should "not authorize read request for producer" in {
121 | val opaAuthorizer = setupAuthorizer()
122 | val actions = List(
123 | createAction("alice-topic", AclOperation.READ),
124 | )
125 | val request = createRequest("alice-producer", actions)
126 |
127 | opaAuthorizer.authorize(request.requestContext, request.actions.asJava) should be (List(AuthorizationResult.DENIED).asJava)
128 | opaAuthorizer.getCache.estimatedSize() should be (1)
129 | }
130 |
131 | "OpaAuthorizer" should "not authorize write request for consumer" in {
132 | val opaAuthorizer = setupAuthorizer()
133 | val actions = List(
134 | createAction("alice-topic", AclOperation.WRITE),
135 | )
136 | val request = createRequest("alice-consumer", actions)
137 |
138 | opaAuthorizer.authorize(request.requestContext, request.actions.asJava) should be (List(AuthorizationResult.DENIED).asJava)
139 | opaAuthorizer.getCache.estimatedSize() should be (1)
140 | }
141 |
142 | "OpaAuthorizer" should "cache the first request" in {
143 | val opaAuthorizer = setupAuthorizer()
144 | val actions = List(
145 | createAction("alice-topic", AclOperation.READ),
146 | )
147 | val request = createRequest("alice-consumer", actions)
148 |
149 | for (_ <- 1 until 5) {
150 | opaAuthorizer.authorize(request.requestContext, request.actions.asJava) should be (List(AuthorizationResult.ALLOWED).asJava)
151 | }
152 |
153 | opaAuthorizer.getCache.estimatedSize() should be (1)
154 |
155 | val otherActions = List(
156 | createAction("bob-topic", AclOperation.READ),
157 | )
158 | val otherRequest = createRequest("bob-consumer", otherActions)
159 |
160 | for (_ <- 1 until 5) {
161 | opaAuthorizer.authorize(otherRequest.requestContext, otherRequest.actions.asJava) should be (List(AuthorizationResult.ALLOWED).asJava)
162 | }
163 |
164 | opaAuthorizer.getCache.estimatedSize() should be (2)
165 | }
166 |
167 | "OpaAuthorizer" should "not cache decisions while errors occur" in {
168 | val opaAuthorizer = setupAuthorizer("http://localhost/broken")
169 | val actions = List(
170 | createAction("alice-topic", AclOperation.WRITE),
171 | )
172 | val request = createRequest("alice-consumer", actions)
173 |
174 | opaAuthorizer.authorize(request.requestContext, request.actions.asJava) should be (List(AuthorizationResult.DENIED).asJava)
175 | opaAuthorizer.getCache.estimatedSize() should be (0)
176 | }
177 |
178 | "OpaAuthorizer" should "authorize super users without checking with OPA" in {
179 | val opaAuthorizer = setupAuthorizer(opaUrl)
180 |
181 | val actions1 = List(
182 | createAction("alice-topic", AclOperation.WRITE),
183 | )
184 | val request1 = createRequest("CN=my-user", actions1)
185 | opaAuthorizer.authorize(request1.requestContext, request1.actions.asJava) should be (List(AuthorizationResult.ALLOWED).asJava)
186 |
187 | val actions2 = List(
188 | createAction("alice-topic", AclOperation.WRITE),
189 | )
190 | val request2 = createRequest("CN=my-user2,O=my-org", actions2)
191 | opaAuthorizer.authorize(request2.requestContext, request2.actions.asJava) should be (List(AuthorizationResult.ALLOWED).asJava)
192 |
193 | val actions3 = List(
194 | createAction("alice-topic", AclOperation.WRITE),
195 | )
196 | val request3 = createRequest("CN=my-user3", actions3)
197 | opaAuthorizer.authorize(request3.requestContext, request3.actions.asJava) should be (List(AuthorizationResult.DENIED).asJava)
198 | }
199 |
200 | "OpaAuthorizer" should "authorize when RequestContext is used" in {
201 | val opaAuthorizer = setupAuthorizer()
202 | val actions = List(
203 | createAction("alice-topic", AclOperation.WRITE),
204 | )
205 | val requestContext = new RequestContext(new RequestHeader(ApiKeys.PRODUCE, 2, "rdkafka", 5), "192.168.64.4:9092-192.168.64.1:58864-0", InetAddress.getLoopbackAddress, new KafkaPrincipal("User", "alice-producer"),
206 | new ListenerName("SASL_PLAINTEXT"), SecurityProtocol.SASL_PLAINTEXT, new ClientInformation("rdkafka", "1.0.0"), false)
207 |
208 | opaAuthorizer.authorize(requestContext, actions.asJava) should be (List(AuthorizationResult.ALLOWED).asJava)
209 | opaAuthorizer.getCache.estimatedSize() should be (1)
210 | }
211 |
212 | "OpaAuthorizer" should "set up metrics system if enabled" in {
213 | val opaAuthorizer = setupAuthorizer(metricsEnabled = true)
214 | opaAuthorizer.maybeSetupMetrics("dummy_cluster", 1)
215 | val server = ManagementFactory.getPlatformMBeanServer
216 | assert(server.isRegistered(new ObjectName(MetricsLabel.NAMESPACE + ":type=" + MetricsLabel.REQUEST_HANDLE_GROUP)))
217 | assert(server.isRegistered(new ObjectName(MetricsLabel.NAMESPACE + ":type=" + MetricsLabel.RESULT_GROUP)))
218 | }
219 |
220 | "OpaAuthorizer" should "record correct number of authorized request" in {
221 | val opaAuthorizer = setupAuthorizer(metricsEnabled = true)
222 | opaAuthorizer.maybeSetupMetrics("dummy_cluster", 1)
223 |
224 | val actions = List(
225 | createAction("alice-topic", AclOperation.WRITE),
226 | )
227 | val request = createRequest("alice-producer", actions)
228 | opaAuthorizer.authorize(request.requestContext, request.actions.asJava)
229 |
230 | val server = ManagementFactory.getPlatformMBeanServer
231 | val authorizedRequestCountActual = server.getAttribute(
232 | new ObjectName(MetricsLabel.NAMESPACE + ":type=" + MetricsLabel.RESULT_GROUP),
233 | MetricsLabel.AUTHORIZED_REQUEST_COUNT)
234 | assert(authorizedRequestCountActual == 1.0)
235 |
236 | val unauthorizedRequestCountActual = server.getAttribute(
237 | new ObjectName(MetricsLabel.NAMESPACE + ":type=" + MetricsLabel.RESULT_GROUP),
238 | MetricsLabel.UNAUTHORIZED_REQUEST_COUNT)
239 | assert(unauthorizedRequestCountActual == 0.0)
240 | }
241 |
242 | "OpaAuthorizer" should "record correct number of unauthorized request" in {
243 | val opaAuthorizer = setupAuthorizer(metricsEnabled = true)
244 | opaAuthorizer.maybeSetupMetrics("dummy_cluster", 1)
245 |
246 | val actions = List(
247 | createAction("bob-topic", AclOperation.WRITE),
248 | )
249 | val request = createRequest("alice-producer", actions)
250 | opaAuthorizer.authorize(request.requestContext, request.actions.asJava)
251 |
252 | val server = ManagementFactory.getPlatformMBeanServer
253 | val authorizedRequestCountActual = server.getAttribute(
254 | new ObjectName(MetricsLabel.NAMESPACE + ":type=" + MetricsLabel.RESULT_GROUP),
255 | MetricsLabel.AUTHORIZED_REQUEST_COUNT)
256 | assert(authorizedRequestCountActual == 0.0)
257 |
258 | val unauthorizedRequestCountActual = server.getAttribute(
259 | new ObjectName(MetricsLabel.NAMESPACE + ":type=" + MetricsLabel.RESULT_GROUP),
260 | MetricsLabel.UNAUTHORIZED_REQUEST_COUNT)
261 | assert(unauthorizedRequestCountActual == 1.0)
262 | }
263 |
264 | "OpaAuthorizer" should "record correct usage statistic of cache and request to OPA" in {
265 | val opaAuthorizer = setupAuthorizer(metricsEnabled = true)
266 | opaAuthorizer.maybeSetupMetrics("dummy_cluster", 1)
267 |
268 | val actions = List(
269 | createAction("alice-topic", AclOperation.WRITE),
270 | )
271 | val request = createRequest("alice-producer", actions)
272 | for (_ <- 1 until 5) {
273 | opaAuthorizer.authorize(request.requestContext, request.actions.asJava)
274 | }
275 |
276 | val server = ManagementFactory.getPlatformMBeanServer
277 | val requestToOPACountActual = server.getAttribute(
278 | new ObjectName(MetricsLabel.NAMESPACE + ":type=" + MetricsLabel.REQUEST_HANDLE_GROUP),
279 | MetricsLabel.REQUEST_TO_OPA_COUNT)
280 | assert(requestToOPACountActual == 1.0)
281 |
282 | val cacheHitRateActual = server.getAttribute(
283 | new ObjectName(MetricsLabel.NAMESPACE + ":type=" + MetricsLabel.REQUEST_HANDLE_GROUP),
284 | MetricsLabel.CACHE_HIT_RATE)
285 | assert(cacheHitRateActual == opaAuthorizer.getCache.stats().hitRate())
286 |
287 | val cacheUsagePercentageActual = server.getAttribute(
288 | new ObjectName(MetricsLabel.NAMESPACE + ":type=" + MetricsLabel.REQUEST_HANDLE_GROUP),
289 | MetricsLabel.CACHE_USAGE_PERCENTAGE)
290 | assert(cacheUsagePercentageActual == (opaAuthorizer.getCache.estimatedSize() / defaultCacheCapacity.toDouble))
291 | }
292 |
293 |
294 | def setupAuthorizer(url: String = opaUrl, metricsEnabled: Boolean = false): OpaAuthorizer = {
295 | val opaAuthorizer = new OpaAuthorizer()
296 | val config = new java.util.HashMap[String, String]
297 | config.put("opa.authorizer.url", url)
298 | config.put("opa.authorizer.allow.on.error", "false")
299 | config.put("super.users", "User:CN=my-user;User:CN=my-user2,O=my-org")
300 | if(metricsEnabled) {
301 | config.put("opa.authorizer.metrics.enabled", "true")
302 | }
303 | opaAuthorizer.configure(config)
304 | opaAuthorizer
305 | }
306 |
307 | def createRequest(username: String, actions: List[Action]): FullRequest = {
308 | val principal = new KafkaPrincipal("User", username)
309 | val session = new Session(principal, InetAddress.getLoopbackAddress)
310 | val authzReqContext = AzRequestContext(
311 | clientId = "rdkafka",
312 | requestType = 1,
313 | listenerName = "SASL_PLAINTEXT",
314 | clientAddress = session.clientAddress,
315 | principal = session.principal,
316 | securityProtocol = SecurityProtocol.SASL_PLAINTEXT,
317 | correlationId = new scala.util.Random().nextInt(1000),
318 | requestVersion = 4)
319 |
320 | FullRequest(authzReqContext, actions)
321 | }
322 |
323 | def createAction(topic: String, operation: AclOperation): Action = {
324 | val resource = new ResourcePattern(ResourceType.TOPIC, topic, PatternType.LITERAL)
325 | new Action(operation, resource, 1, true, true)
326 | }
327 |
328 | def testOpaConnection(): Option[HttpResponse[String]] = {
329 | try {
330 | val req = HttpRequest.newBuilder.uri(new URI(opaUrl)).POST(BodyPublishers.ofString("{}")).build
331 | Option(HttpClient.newBuilder.build.send(req, BodyHandlers.ofString))
332 | } catch {
333 | case _: Exception => Option.empty
334 | }
335 | }
336 | }
337 |
338 | case class FullRequest(requestContext: AzRequestContext, actions: List[Action])
339 |
--------------------------------------------------------------------------------