├── .codecov.yml
├── .editorconfig
├── .flake8
├── .github
├── ISSUE_TEMPLATE
│ ├── bug_report.md
│ └── feature_request.md
└── PULL_REQUEST_TEMPLATE.md
├── .gitignore
├── .travis.yml
├── CONFIGURATION.md
├── CONTRIBUTING.md
├── LICENSE
├── README.md
├── kafkashell
├── __init__.py
├── bindings.py
├── completer.py
├── config.py
├── constants.py
├── data
│ ├── completer-acks.json
│ ├── completer-acks.schema
│ ├── completer-booleans.json
│ ├── completer-booleans.schema
│ ├── completer-cleanup-policy.json
│ ├── completer-cleanup-policy.schema
│ ├── completer-compression-codecs.json
│ ├── completer-compression-codecs.schema
│ ├── completer-entity-types.json
│ ├── completer-entity-types.schema
│ ├── completer-kafka-configs.json
│ ├── completer-kafka-configs.schema
│ ├── completer-ksql-output.json
│ ├── completer-ksql-output.schema
│ ├── completer-reset-policies.json
│ ├── completer-reset-policies.schema
│ ├── completer-resource-pattern-types.json
│ ├── completer-resource-pattern-types.schema
│ ├── completer-timestamp-types.json
│ ├── completer-timestamp-types.schema
│ ├── completer.json
│ ├── completer.schema
│ ├── shell-config.schema
│ └── shell-config.yaml
├── executor.py
├── helpers.py
├── main.py
├── settings.py
├── style.py
├── toolbar.py
└── version.py
├── manifest.in
├── requirements-dev.txt
├── scripts
├── build.sh
├── lint.sh
└── test.sh
├── setup.cfg
├── setup.py
├── tests
├── __init__.py
├── context.py
├── data
│ ├── test-admin-client-settings-config.yaml
│ ├── test-completer.json
│ ├── test-config.yaml
│ ├── test-consumer-settings-config.yaml
│ ├── test-consumer-settings-without-properties-config.yaml
│ ├── test-environment-variables-config.yaml
│ ├── test-file-extension-bat-config.yaml
│ ├── test-file-extension-sh-config.yaml
│ ├── test-history-off-config.yaml
│ ├── test-invalid-file-extension-config.yaml
│ ├── test-invalid-ksql-config.yaml
│ ├── test-invalid-schema-registry-config.yaml
│ ├── test-modified-config.yaml
│ ├── test-no-zookeeper-config.yaml
│ ├── test-prefix-config.yaml
│ ├── test-prefix-none-config.yaml
│ ├── test-producer-settings-config.yaml
│ └── test-producer-settings-without-properties-config.yaml
├── test_bindings.py
├── test_cli.py
├── test_completer.py
├── test_completer_data.py
├── test_config.py
├── test_constants.py
├── test_executor.py
├── test_helpers.py
├── test_schemas.py
├── test_settings.py
├── test_toolbar.py
├── test_version.py
└── utilities.py
└── tox.ini
/.codecov.yml:
--------------------------------------------------------------------------------
1 | coverage:
2 | precision: 2
3 | round: down
4 | range: "60...90"
--------------------------------------------------------------------------------
/.editorconfig:
--------------------------------------------------------------------------------
1 | root = true
2 |
3 | [*]
4 | charset = utf-8
5 | end_of_line = lf
6 | insert_final_newline = true
7 | trim_trailing_whitespace = true
8 |
9 | [{*.py, *.json}]
10 | indent_style = space
11 | indent_size = 4
12 |
13 | [*.yaml]
14 | indent_style = space
15 | indent_size = 2
16 |
--------------------------------------------------------------------------------
/.flake8:
--------------------------------------------------------------------------------
1 | select = Q0
2 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/bug_report.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Bug report
3 | about: Create a bug report to help us improve
4 | title: ''
5 | labels: bug
6 | assignees: ''
7 |
8 | ---
9 |
10 | **Describe the bug**
11 | A clear and concise description of what the bug is.
12 |
13 | **To Reproduce**
14 | Steps to reproduce the behavior:
15 | 1. Run `kafka-shell`
16 | 2. ...
17 |
18 | **Expected behavior**
19 | A clear and concise description of what you expected to happen.
20 |
21 | **Screenshots**
22 | If applicable, add screenshots to help explain your problem.
23 |
24 | **Desktop (please complete the following information):**
25 | - OS: [i.e. macOS Mojave]
26 | - Version [e.g. 0.1.0]
27 |
28 | **Additional context**
29 | Add any other context about the problem here.
30 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/feature_request.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Feature request
3 | about: Suggest an idea for this project
4 | title: ''
5 | labels: enhancement
6 | assignees: ''
7 |
8 | ---
9 |
10 | **Is your feature request related to a problem? Please describe.**
11 | A clear and concise description of what the problem is. Example: I'm always frustrated when [...]
12 |
13 | **Describe the solution you'd like**
14 | A clear and concise description of what you want to happen.
15 |
16 | **Describe alternatives you've considered**
17 | A clear and concise description of any alternative solutions or features you've considered.
18 |
19 | **Additional context**
20 | Add any other context or screenshots about the feature request here.
21 |
--------------------------------------------------------------------------------
/.github/PULL_REQUEST_TEMPLATE.md:
--------------------------------------------------------------------------------
1 | ## Description
2 |
3 |
4 | ## Types of Changes
5 |
6 | - [ ] Bug fix (non-breaking change which fixes an issue)
7 | - [ ] New feature (non-breaking change which adds functionality)
8 | - [ ] Breaking change (fix or feature that would cause existing functionality to change)
9 |
10 | ## Checklist
11 |
12 |
13 | - [ ] My code follows the code style of this project.
14 | - [ ] I have read the **CONTRIBUTING.md** document.
15 | - [ ] I have added tests to cover my changes.
16 | - [ ] All new and existing tests have passed.
17 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | .idea/
2 | venv/
3 | dist/
4 | build/
5 | htmlcov/
6 | .pytest_cache/
7 | /*.egg-info
8 | __pycache__
9 | *.iml
10 | *.pyc
11 | .coverage
12 | .tox/
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | sudo: false
2 | cache: pip
3 | language: python
4 |
5 | matrix:
6 | include:
7 | - python: 3.7
8 | dist: xenial
9 | sudo: required
10 | - python: 3.6
11 | - python: 3.5
12 | - python: 2.7
13 |
14 | install:
15 | - pip install -r requirements-dev.txt
16 | - pip install tox-travis codecov
17 | - pip install -e .
18 |
19 | script:
20 | - tox
21 | - coverage run --source kafkashell -m pytest
22 | - coverage report -m
23 |
24 | after_success:
25 | - codecov
--------------------------------------------------------------------------------
/CONFIGURATION.md:
--------------------------------------------------------------------------------
1 | # Configuration
2 |
3 | Kafka shell can be configured via a YAML file. It is generated on initialization and located in `~/.kafka-shell`.
4 |
5 | The user configuration file is defined by a JSON schema, which can be viewed at [shell-config.schema][schema]. The user configuration is parsed and validated on initialization of `kafka-shell`. If your modified configuration is incorrect, the shell will exit and print an error message displaying the validation error, such as a missing required field.
6 |
7 | ## Default Configuration
8 | By default, the generated `~/.kafka-shell/config.yaml` file looks like this:
9 |
10 | ```yaml
11 | version: 1
12 | enable:
13 | history: true
14 | save_on_exit: true
15 | auto_complete: true
16 | auto_suggest: true
17 | inline_help: true
18 | fuzzy_search: true
19 | cluster: local
20 | clusters:
21 | local:
22 | bootstrap_servers: localhost:9092
23 | zookeeper_connect: localhost:2181
24 | schema_registry_url: http://localhost:8081
25 | ksql_server_url: http://localhost:8088
26 | ```
27 |
28 | ## Customization
29 | One of the main benefits of utilizing `kafka-shell` instead of directly using the Kafka command-line tools is the ability to define clusters, in which commands will be run against by default. This means you no longer have to pass flags such as `--bootstrap-server`; they'll be added automatically when you run the command. The `~/.kafka-shell/config.yaml` file is where you define clusters to run against, as well as other settings for `kafka-shell`.
30 |
31 | In the future, we plan to add configuration via commands within the shell. For now, it must be manually done by editing the JSON file.
32 |
33 | ### Shell Settings
34 | There are a few top level settings to define how the shell works. Most of these can also be changed via function keys while using `kafka-shell`. Defaults for each property are shown above.
35 |
36 | | key | type | description |
37 | |------------|---------|----------------------------------------------------------------|
38 | | `version` | int | Version of the configuration schema. Must be 1. |
39 | | `enable` | Enable | Enable various features within kafka-shell. |
40 | | `cluster` | string | Default cluster to be selected on initialization. |
41 | | `clusters` | Cluster | Clusters that commands can run against. See below for details. |
42 |
43 | ### Enable Features
44 | Features for `kafka-shell` can be enabled through feature flags defined in the `enable` root-level object.
45 |
46 | | key | type | description |
47 | |-----------------|---------|------------------------------------------------------------------|
48 | | `history` | boolean | Save command history between `kafka-shell` sessions. |
49 | | `save_on_exit` | boolean | Save any settings changed by key bindings on exit. |
50 | | `auto_complete` | boolean | Show the command autocomplete dropdown when typing. |
51 | | `auto_suggest` | boolean | Show suggestions from past commands like the fish shell. |
52 | | `inline_help` | boolean | Show command and flag descriptions on the autocomplete dropdown. |
53 | | `fuzzy_search` | boolean | Allow fuzzy searching of autocomplete dropdown selections. |
54 |
55 |
56 | ### Clusters
57 | Clusters can be defined to run commands against. Commands automatically add flags such as `--bootstrap-server` to a command based on the selected `cluster`, which can be changed by `F2`.
58 |
59 | Each cluster should have a unique name (a key in the `clusters` root-level object). By default, the cluster `local` is added.
60 |
61 | | key | type | description |
62 | |--------------------------|--------------|---------------------------------------------------------------|
63 | | `bootstrap_servers` | string | Comma-separated `host:port` Kafka brokers to connect to. |
64 | | `zookeeper_connect` | string | Comma-separated `host:port` Zookeeper nodes to connect to. |
65 | | `schema_registry_url` | string | Schema Registry URL used when working with avro schemas. |
66 | | `ksql_server_url` | string | KSQL Server URL used when utilizing the `ksql` command. |
67 | | `command_prefix` | string | Prefix all commands with another command, i.e. 'docker exec'. |
68 | | `command_file_extension` | string | Add a file extension such as `sh` to commands. |
69 | | `consumer_settings` | ToolSettings | Pass config and default property settings to consumer CLIs. |
70 | | `producer_settings` | ToolSettings | Pass config and default property settings to producer CLIs. |
71 | | `admin_client_settings` | ToolSettings | Pass config to admin clients through `--command-config`. |
72 |
73 |
74 | #### Tool Settings
75 | Settings, such as a configuration properties files or default property settings, can be set for each cluster.
76 |
77 | See the below full config example to see how to use tool settings.
78 |
79 | | key | type | description |
80 | |--------------|--------|------------------------------------------------------------|
81 | | `config` | string | A configuration properties file to be passed to CLI tools. |
82 | | `properties` | object | Set default `--property` options to be passed. |
83 |
84 |
85 | ## Full Configuration Example
86 | The below example shows how to use some of the non-default settings.
87 |
88 | For example, when using cluster `docker-cluster`:
89 | - All commands will be prefixed with `docker-exec -it kafka-tools`
90 | - All consumer commands will add `--consumer.config consumer.properties` and `--property print.key=true`
91 | - All producer commands will add `--producer.config producer.properties` and `--property key.separator=,`
92 |
93 | ```yaml
94 | version: 1
95 | enable:
96 | history: true
97 | save_on_exit: true
98 | auto_complete: true
99 | auto_suggest: true
100 | inline_help: true
101 | fuzzy_search: true
102 | cluster: docker-cluster
103 | clusters:
104 | docker-cluster:
105 | bootstrap_servers: docker:9092
106 | zookeeper_connect: docker:2181
107 | schema_registry_url: http://docker:8081
108 | ksql_server_url: http://docker:8081
109 | command_prefix: docker exec -it kafka-tools
110 | consumer_settings:
111 | config: consumer.properties
112 | properties:
113 | print.key: true
114 | producer_settings:
115 | config: producer.properties
116 | properties:
117 | key.separator: ","
118 | admin_client_settings:
119 | config: admin.properties
120 | ```
121 |
122 | ### Example Commands
123 |
124 | These examples show what commands would *actually* be run based on what was typed, using the full configuration from above.
125 |
126 | For example, if you typed the command `kafka-console-consumer --topic test`:
127 |
128 | ```bash
129 | docker exec -it kafka-tools kafka-console-consumer --bootstrap-server docker:9092 --consumer.config consumer.properties --property print.key=true
130 | ```
131 |
132 | For example, if you typed the command `kafka-avro-console-producer --topic test`:
133 |
134 | ```bash
135 | docker exec -it kafka-tools kafka-avro-console-producer --broker-list docker:9092 --property schema.registry.url=http://docker:8081 --producer.config producer.properties --property key.separtor=,
136 | ```
137 |
138 | For example, if you typed the command `kafka-broker-api-versions`:
139 |
140 | ```bash
141 | docker exec -it kafka-tools kafka-broker-api-versions --bootstrap-server docker:9092 --command-config admin.properties
142 | ```
143 |
144 | As you can see, you can save a ton of typing time by utilizing `kafka-shell`!
145 |
146 | ### Command File Extension
147 |
148 | The file extension for commands such as `kafka-topics` is `null` by default. Depending on how you installed the kafka command-line tools, they may have the extension `sh` or `bat`. They may also be set this way in pre-built docker images.
149 |
150 | You can change this, per cluster, by setting the `command_file_extension` property in the cluster config. For example:
151 |
152 | ```yaml
153 | ...
154 | clusters:
155 | local:
156 | bootstrap_servers: localhost:9092
157 | zookeeper_connect: localhost:2181
158 | schema_registry_url: http://localhost:8081
159 | ksql_server_url: http://localhost:8088
160 | command_file_extension: sh
161 | ```
162 |
163 | If you run `kafka-topics --list` with the above command, the following would be run:
164 |
165 | ```bash
166 | kafka-topics.sh --list --zookeeper localhost:2181
167 | ```
168 |
169 | Without the file extension config set, the following would be run:
170 |
171 | ```bash
172 | kafka-topics --list --zookeeper localhost:2181
173 | ```
174 |
175 | ## Support
176 | If you have a question on how to configure `kafka-shell`, feel free to [open a support issue][support].
177 |
178 | [schema]: kafkashell/data/shell-config.schema
179 | [support]: https://github.com/devshawn/kafka-shell/issues/new?assignees=&labels=support&title=
180 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing
2 |
3 | Contributions are very welcome! Any existing issues labeled ["help wanted"](https://github.com/devshawn/kafka-shell/labels/help%20wanted) or ["good first issue"](https://github.com/devshawn/kafka-shell/labels/good%20first%20issue) are free to be pursued.
4 |
5 | ## Feature Requests & Bug Reports
6 | For feature requests and bug reports, [submit an issue][issues].
7 |
8 | ## Style Guide
9 | We follow [PEP8][pep8] as the general style guide with the following changes:
10 |
11 | - Line length can be up to 120 characters long
12 | - Double quotes for all strings except when avoiding backslashes
13 |
14 | We use `flake8` for linting and `pytest` with `tox` for testing.
15 |
16 | ## Pull Requests
17 | The preferred way to contribute is to fork the [main repository][repository] on GitHub.
18 |
19 | 1. Discuss your proposed change in a GitHub issue first before spending time and implementing a feature or fix.
20 |
21 | 2. Ensure all changes are relevant to the pull request. Keep pull requests as small and to-the-point as possible.
22 |
23 | 3. Add & modify tests as necessary. Also, ensure the code meets our style standards.
24 |
25 | 4. Once changes are completed, open a pull request for review against the master branch.
26 |
27 |
28 | [repository]: https://github.com/devshawn/kafka-shell
29 | [issues]: https://github.com/devshawn/kafka-shell/issues
30 | [pep8]: https://www.python.org/dev/peps/pep-0008/
31 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright 2019 Shawn Seymour
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # kafka-shell
2 |
3 | [](https://travis-ci.org/devshawn/kafka-shell) [](https://codecov.io/gh/devshawn/kafka-shell)   [](LICENSE)
4 |
5 | A supercharged, interactive Kafka shell built on top of the existing Kafka CLI tools.
6 |
7 |
8 |
9 |
10 |
11 | Kafka shell allows you to configure a list of clusters, and properties such as `--bootstrap-server` and `--zookeeper` for the currently selected cluster will automatically be added when the command is run. No more remembering long server addresses or ports!
12 |
13 | ## Installation
14 | Kafka shell requires `python` and `pip`. Kafka shell is a wrapper over the existing Kafka command-line tools, so
15 | those must exist within your `PATH`.
16 |
17 | You can install kafka-shell using pip:
18 |
19 | ```bash
20 | pip install kafka-shell
21 | ```
22 |
23 | ## Usage
24 | Kafka shell is an interactive shell. You can run it from the terminal by:
25 |
26 | ```bash
27 | kafka-shell
28 | ```
29 |
30 | From here, you can start typing `kafka` and the autocomplete will kick in.
31 |
32 | **Key Commands**
33 | - **Change Cluster**: The selected cluster commands are run against can be cycled through by pressing `F2`.
34 | - **Fuzzy Search**: By default, fuzzy search of commands is enabled. This can be toggled on & off by pressing `F3`.
35 | - **In-line Help**: By default, in-line help is shown along side the drop-down suggestion list. This can be toggled on & off by pressing `F9`.
36 | - **Exit**: The shell can be exited by pressing `F10` or by typing `exit`.
37 |
38 | ## Configuration
39 | Kafka shell allows you to configure settings and Kafka clusters to run commands against through a configuration file.
40 |
41 | By default, when `kafka-shell` is first run, a directory in your home directory is generated at `~/.kafka-shell`. A configuration file called `config.yaml` is generated in that new folder. It is a YAML file containing details about clusters, `kafka-shell` configuration, and more.
42 |
43 | See [CONFIGURATION.md][configuration] to add a cluster to your configuration or to set other `kafka-shell` settings.
44 |
45 | ## Features
46 | Kafka shell simplifies running complex Kafka CLI commands as well as provides smart auto-completion of commands, options, and more. /Users/shawn/.kafka-shell/config.yaml
47 |
48 | - Auto-completion of commands, options, and configurations
49 | - Configure clusters to run commands against and switch between them
50 | - Fish-style auto suggestions
51 | - Command history
52 | - Contextual help
53 | - Toolbar options
54 |
55 | **Completion of Configurations**
56 |
57 | Auto completion of Kafka configuration keys and their values.
58 |
59 |
60 |
61 |
62 |
63 | **Configure Clusters, Schema Registries, & More**
64 |
65 | Configure clusters and their properties will automatically be added to commands being run.
66 |
67 |
68 |
69 |
70 |
71 | ## Supported Commands
72 | Currently, the following commands are supported:
73 |
74 | * `kafka-topics`
75 | * `kafka-configs`
76 | * `kafka-console-consumer`
77 | * `kafka-console-producer`
78 | * `kafka-avro-console-consumer`
79 | * `kafka-avro-console-producer`
80 | * `kafka-verifiable-consumer`
81 | * `kafka-verifiable-producer`
82 | * `kafka-preferred-replica-election`
83 | * `kafka-replica-verification`
84 | * `kafka-reassign-partitions`
85 | * `kafka-broker-api-versions`
86 | * `kafka-consumer-groups`
87 | * `kafka-delete-records`
88 | * `kafka-log-dirs`
89 | * `kafka-dump-log`
90 | * `kafka-acls`
91 | * `ksql`
92 |
93 | **Helper Commands**
94 |
95 | Currently, kafka-shell has helper commands:
96 |
97 | * `exit`: exit the shell
98 | * `clear`: clear the shell
99 | * `cluster-select`: select a cluster
100 | * `cluster-describe`: describe a cluster config
101 |
102 | In-line help for each command and option is shown by default. This can be toggled by `F9`.
103 |
104 | ## Contributing
105 | Contributions are very welcome. See [CONTRIBUTING.md][contributing] for details.
106 |
107 | ## Acknowledgement
108 | This project was inspired by multiple amazing shells & prompts, such as [saws][saws], [kube-shell][kube-shell], [kube-prompt][kube-prompt], [http-prompt][http-prompt], and [wharfee][wharfee]. It was built using [prompt-toolkit][prompt-toolkit]. Much ❤️ to [Apache Kafka][kafka] and [Confluent][confluent] for their helpful CLI tools.
109 |
110 | ## License
111 | Copyright (c) 2019 Shawn Seymour.
112 |
113 | Licensed under the [Apache 2.0 license][license].
114 |
115 | [saws]: https://github.com/donnemartin/saws
116 | [kube-shell]: https://github.com/cloudnativelabs/kube-shell
117 | [kube-prompt]: https://github.com/c-bata/kube-prompt
118 | [http-prompt]: https://github.com/eliangcs/http-prompt
119 | [wharfee]: https://github.com/j-bennet/wharfee
120 | [prompt-toolkit]: https://github.com/prompt-toolkit/python-prompt-toolkit
121 | [kafka]: https://kafka.apache.org
122 | [confluent]: https://www.confluent.io/
123 | [configuration]: CONFIGURATION.md
124 | [contributing]: CONTRIBUTING.md
125 | [license]: LICENSE
126 |
--------------------------------------------------------------------------------
/kafkashell/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Copyright 2019 Shawn Seymour. All Rights Reserved.
4 | #
5 | # Licensed under the Apache License, Version 2.0 (the "License"). You
6 | # may not use this file except in compliance with the License. A copy of
7 | # the License is located at
8 | #
9 | # http://www.apache.org/licenses/LICENSE-2.0
10 | #
11 | # or in the "license" file accompanying this file. This file is
12 | # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
13 | # ANY KIND, either express or implied. See the License for the specific
14 | # language governing permissions and limitations under the License.
15 |
16 | from kafkashell import completer
17 | from kafkashell import config
18 | from kafkashell import constants
19 | from kafkashell import executor
20 | from kafkashell import helpers
21 | from kafkashell import main
22 | from kafkashell import settings
23 | from kafkashell import style
24 | from kafkashell import toolbar
25 | from kafkashell import version
26 |
27 | name = "kafkashell"
28 |
29 | __all__ = [
30 | "bindings",
31 | "completer",
32 | "config",
33 | "constants",
34 | "executor",
35 | "helpers",
36 | "main",
37 | "settings",
38 | "style",
39 | "toolbar",
40 | "version"
41 | ]
42 |
--------------------------------------------------------------------------------
/kafkashell/bindings.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Copyright 2019 Shawn Seymour. All Rights Reserved.
4 | #
5 | # Licensed under the Apache License, Version 2.0 (the "License"). You
6 | # may not use this file except in compliance with the License. A copy of
7 | # the License is located at
8 | #
9 | # http://www.apache.org/licenses/LICENSE-2.0
10 | #
11 | # or in the "license" file accompanying this file. This file is
12 | # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
13 | # ANY KIND, either express or implied. See the License for the specific
14 | # language governing permissions and limitations under the License.
15 |
16 | from prompt_toolkit.application import current
17 | from prompt_toolkit.key_binding import KeyBindings
18 | from prompt_toolkit.keys import Keys
19 |
20 |
21 | def get_bindings(settings):
22 | bindings = KeyBindings()
23 |
24 | @bindings.add(Keys.F2)
25 | def _(event):
26 | settings.set_next_cluster()
27 |
28 | @bindings.add(Keys.F3)
29 | def _(event):
30 | settings.set_enable_fuzzy_search(not settings.enable_fuzzy_search)
31 |
32 | @bindings.add(Keys.F9)
33 | def _(event):
34 | settings.set_enable_help(not settings.enable_help)
35 |
36 | @bindings.add(Keys.F10)
37 | def _(event):
38 | current.get_app().exit(exception=EOFError)
39 |
40 | return bindings
41 |
--------------------------------------------------------------------------------
/kafkashell/completer.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Copyright 2019 Shawn Seymour. All Rights Reserved.
4 | #
5 | # Licensed under the Apache License, Version 2.0 (the "License"). You
6 | # may not use this file except in compliance with the License. A copy of
7 | # the License is located at
8 | #
9 | # http://www.apache.org/licenses/LICENSE-2.0
10 | #
11 | # or in the "license" file accompanying this file. This file is
12 | # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
13 | # ANY KIND, either express or implied. See the License for the specific
14 | # language governing permissions and limitations under the License.
15 |
16 | from __future__ import unicode_literals
17 |
18 | from fuzzyfinder import fuzzyfinder
19 | from prompt_toolkit.completion import Completer, Completion
20 |
21 | from kafkashell.config import get_completer
22 | from kafkashell.helpers import exclude_options_from_removal
23 |
24 |
25 | class KafkaCompleter(Completer):
26 |
27 | def __init__(self, settings):
28 | self.settings = settings
29 | self.commands = self.settings.commands
30 |
31 | def get_completions(self, document, complete_event):
32 | word_before_cursor = document.get_word_before_cursor(WORD=True)
33 | text_before_cursor = document.text_before_cursor
34 | text_list = text_before_cursor.split(" ")
35 | command = text_list[0]
36 |
37 | if self.is_not_command(document.text):
38 | option = text_list[-2]
39 | possible_option_value = text_list[-1]
40 | value_completer_name = self.get_completer_name_for_option(command, option)
41 | if self.has_option_value_completer(value_completer_name):
42 | for completion in self.yield_extra_completers(word_before_cursor, document, possible_option_value,
43 | value_completer_name):
44 | yield completion
45 | elif self.is_cluster_command(document.text):
46 | for completion in self.yield_clusters(word_before_cursor):
47 | yield completion
48 | else:
49 | for completion in self.yield_options(word_before_cursor, document):
50 | yield completion
51 | else:
52 | for completion in self.yield_commands(word_before_cursor):
53 | yield completion
54 |
55 | # yields
56 | def yield_commands(self, word_before_cursor):
57 | valid_keys = self.fuzzy(word_before_cursor, self.commands.keys())
58 | descriptions = self.get_command_descriptions()
59 | for key in valid_keys:
60 | display_meta = self.get_display_meta(descriptions, key)
61 | yield Completion(key, start_position=-len(word_before_cursor), display_meta=display_meta)
62 |
63 | def yield_options(self, word_before_cursor, document):
64 | valid_keys = self.get_valid_options(document, word_before_cursor)
65 | descriptions = self.get_option_descriptions(document.text.split(" ")[0]) if len(valid_keys) > 0 else {}
66 | for key in valid_keys:
67 | display_meta = self.get_display_meta(descriptions, key)
68 | yield Completion(key, start_position=-len(word_before_cursor), display_meta=display_meta)
69 |
70 | def yield_extra_completers(self, word_before_cursor, document, possible_option_value, value_completer_name):
71 | value_completer = get_completer(value_completer_name)["values"]
72 |
73 | if self.is_config_value(possible_option_value):
74 | key_value_array = possible_option_value.split("=")
75 | config_completer_name = self.get_completer_name_for_config(value_completer, key_value_array[0])
76 | if config_completer_name is not None:
77 | for completion in self.yield_config_completer(key_value_array[1], document, config_completer_name):
78 | yield completion
79 | else:
80 | for completion in self.yield_option_value_completer(word_before_cursor, document, value_completer_name):
81 | yield completion
82 |
83 | def yield_option_value_completer(self, word_before_cursor, document, value_completer_name):
84 | option_values = get_completer(value_completer_name)["values"]
85 | valid_keys = self.get_valid_option_values(document, word_before_cursor, option_values, value_completer_name)
86 | descriptions = self.get_completer_descriptions(option_values)
87 | for key in valid_keys:
88 | display_meta = self.get_display_meta(descriptions, key)
89 | yield Completion(key, start_position=-len(word_before_cursor), display_meta=display_meta)
90 |
91 | def yield_config_completer(self, word_after_key, document, completer_name):
92 | config_completer = get_completer(completer_name)["values"]
93 | valid_keys = self.get_valid_config_values(document, word_after_key, config_completer)
94 | descriptions = self.get_completer_descriptions(config_completer)
95 | for key in valid_keys:
96 | display_meta = self.get_display_meta(descriptions, key)
97 | yield Completion(key, start_position=-len(word_after_key), display_meta=display_meta)
98 |
99 | def yield_clusters(self, word_before_cursor):
100 | clusters = self.settings.user_config["clusters"].keys()
101 | valid_keys = self.fuzzy(word_before_cursor, clusters)
102 | for key in valid_keys:
103 | yield Completion(u"{0}".format(key), start_position=-len(word_before_cursor), display_meta=None)
104 |
105 | # descriptions
106 | def get_display_meta(self, descriptions, key):
107 | return descriptions.get(key, "") if self.settings.enable_help else None
108 |
109 | def get_command_descriptions(self):
110 | return dict((x, self.commands[x]["description"]) for x in self.commands.keys())
111 |
112 | def get_option_descriptions(self, command):
113 | options_list = self.commands[command]["options"].keys()
114 | return dict((x, self.commands[command]["options"][x]["description"]) for x in options_list)
115 |
116 | @staticmethod
117 | def get_completer_descriptions(completer):
118 | return dict((x, completer[x]["description"]) for x in completer.keys())
119 |
120 | # helpers
121 | def get_valid_options(self, document, word_before_cursor):
122 | split_string = document.text.split(" ")
123 | command = split_string[0]
124 | try:
125 | valid_keys = self.fuzzy(word_before_cursor, self.commands[command]["options"].keys())
126 | modified_command_list = exclude_options_from_removal(split_string)
127 | return [elem for elem in valid_keys if elem not in modified_command_list]
128 | except KeyError:
129 | return []
130 |
131 | def get_valid_option_values(self, document, word_before_cursor, option_values, completer_name):
132 | split_string = document.text.split(" ")
133 | updated_keys = self.exclude_option_value_keys(document, option_values, completer_name)
134 | valid_keys = self.fuzzy(word_before_cursor, updated_keys)
135 | return [elem for elem in valid_keys if elem not in split_string]
136 |
137 | def get_valid_config_values(self, document, word_after_key, config_completer):
138 | split_string = document.text.split(" ")
139 | valid_keys = self.fuzzy(word_after_key, config_completer.keys())
140 | return [elem for elem in valid_keys if elem not in split_string]
141 |
142 | def exclude_option_value_keys(self, document, option_values, completer_name):
143 | if completer_name == "kafka-configs":
144 | if document.text.split(" ", 1)[0] == "kafka-topics":
145 | return self.handle_kafka_topics_configs(option_values)
146 | return self.handle_kafka_configs_completer(document, option_values)
147 | return option_values.keys()
148 |
149 | @staticmethod
150 | def handle_kafka_configs_completer(document, option_values):
151 | for entity_type in ["broker", "topic", "user", "client"]:
152 | if "--entity-type {0}".format(entity_type) in document.text:
153 | return [i for i in option_values.keys() if "{0}s".format(entity_type) in option_values[i]["types"]]
154 | return option_values.keys()
155 |
156 | @staticmethod
157 | def handle_kafka_topics_configs(option_values):
158 | return [i for i in option_values.keys() if "topics" in option_values[i]["types"]]
159 |
160 | def get_completer_name_for_option(self, command, option):
161 | try:
162 | return self.commands[command]["options"][option]["completer"]
163 | except KeyError:
164 | return None
165 |
166 | @staticmethod
167 | def get_completer_name_for_config(config_completer, config):
168 | try:
169 | return config_completer[config]["completer"]
170 | except KeyError:
171 | return None
172 |
173 | def fuzzy(self, word_before_cursor, completion_list):
174 | if self.settings.enable_fuzzy_search:
175 | return fuzzyfinder(word_before_cursor, completion_list)
176 | else:
177 | return [elem for elem in completion_list if elem.startswith(word_before_cursor)]
178 |
179 | @staticmethod
180 | def has_option_value_completer(value_completer_name):
181 | return value_completer_name is not None
182 |
183 | @staticmethod
184 | def is_config_value(value):
185 | return "=" in value
186 |
187 | @staticmethod
188 | def is_cluster_command(text):
189 | split_text = text.split(" ")
190 | return split_text[0].strip() in ["cluster-select", "cluster-describe"] and len(split_text) <= 2
191 |
192 | @staticmethod
193 | def is_not_command(text):
194 | return len(text.split(" ", 1)) > 1
195 |
--------------------------------------------------------------------------------
/kafkashell/config.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Copyright 2019 Shawn Seymour. All Rights Reserved.
4 | #
5 | # Licensed under the Apache License, Version 2.0 (the "License"). You
6 | # may not use this file except in compliance with the License. A copy of
7 | # the License is located at
8 | #
9 | # http://www.apache.org/licenses/LICENSE-2.0
10 | #
11 | # or in the "license" file accompanying this file. This file is
12 | # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
13 | # ANY KIND, either express or implied. See the License for the specific
14 | # language governing permissions and limitations under the License.
15 |
16 | from __future__ import print_function
17 | from __future__ import unicode_literals
18 |
19 | import json
20 | import os
21 | import sys
22 |
23 | import oyaml as yaml
24 | from jsonschema import validate, ValidationError
25 |
26 |
27 | def get_completer(name="completer"):
28 | final_name = name if name == "completer" else "completer-{0}".format(name)
29 | data_dir = os.path.dirname(os.path.realpath(__file__))
30 | data_path = os.path.join(data_dir, "data/{0}.json".format(final_name))
31 | with open(data_path) as f:
32 | return json.load(f)
33 |
34 |
35 | def init_config():
36 | shell_dir = get_kafka_shell_dir()
37 | config_file = get_user_config_path()
38 |
39 | if not os.path.exists(shell_dir):
40 | os.makedirs(shell_dir)
41 |
42 | if not os.path.isfile(config_file):
43 | with open(config_file, "w") as f:
44 | default_config = get_default_config()
45 | save_yaml(default_config, f)
46 |
47 |
48 | def init_history():
49 | history_file = get_user_history_path()
50 |
51 | if not os.path.isfile(history_file):
52 | open(history_file, "a").close()
53 |
54 |
55 | def get_config():
56 | config_file = get_user_config_path()
57 |
58 | with open(config_file) as f:
59 | return yaml.safe_load(f)
60 |
61 |
62 | def validate_config(config):
63 | config_schema = get_config_schema()
64 | try:
65 | validate(instance=config, schema=config_schema)
66 | return config
67 | except ValidationError as ex:
68 | error_type = ", ".join(ex.path) if len(ex.path) > 0 else "root"
69 | print("Invalid user configuration ({0}): {1}".format(error_type, ex.message))
70 | sys.exit(1)
71 |
72 |
73 | def save_config(updated_config):
74 | config_file = get_user_config_path()
75 |
76 | with open(config_file, "w") as f:
77 | save_yaml(updated_config, f)
78 |
79 |
80 | def get_default_config():
81 | data_dir = os.path.dirname(os.path.realpath(__file__))
82 | data_path = os.path.join(data_dir, "data/shell-config.yaml")
83 |
84 | with open(data_path) as f:
85 | return yaml.safe_load(f)
86 |
87 |
88 | def get_config_schema():
89 | data_dir = os.path.dirname(os.path.realpath(__file__))
90 | data_path = os.path.join(data_dir, "data/shell-config.schema")
91 |
92 | with open(data_path) as f:
93 | return json.load(f)
94 |
95 |
96 | def save_yaml(config, f):
97 | yaml.dump(config, f, default_flow_style=False, sort_keys=False)
98 |
99 |
100 | def get_kafka_shell_dir():
101 | return os.path.expanduser("~/.kafka-shell")
102 |
103 |
104 | def get_user_config_path():
105 | return os.path.expanduser("~/.kafka-shell/config.yaml")
106 |
107 |
108 | def get_user_history_path():
109 | return os.path.expanduser("~/.kafka-shell/history")
110 |
--------------------------------------------------------------------------------
/kafkashell/constants.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Copyright 2019 Shawn Seymour. All Rights Reserved.
4 | #
5 | # Licensed under the Apache License, Version 2.0 (the "License"). You
6 | # may not use this file except in compliance with the License. A copy of
7 | # the License is located at
8 | #
9 | # http://www.apache.org/licenses/LICENSE-2.0
10 | #
11 | # or in the "license" file accompanying this file. This file is
12 | # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
13 | # ANY KIND, either express or implied. See the License for the specific
14 | # language governing permissions and limitations under the License.
15 |
16 | from __future__ import unicode_literals
17 |
18 | COMMAND_KAFKA_TOPICS = "kafka-topics"
19 | COMMAND_KAFKA_CONFIGS = "kafka-configs"
20 | COMMAND_KAFKA_CONSOLE_CONSUMER = "kafka-console-consumer"
21 | COMMAND_KAFKA_CONSOLE_PRODUCER = "kafka-console-producer"
22 | COMMAND_KAFKA_AVRO_CONSOLE_CONSUMER = "kafka-avro-console-consumer"
23 | COMMAND_KAFKA_AVRO_CONSOLE_PRODUCER = "kafka-avro-console-producer"
24 | COMMAND_KAFKA_VERIFIABLE_CONSUMER = "kafka-verifiable-consumer"
25 | COMMAND_KAFKA_VERIFIABLE_PRODUCER = "kafka-verifiable-producer"
26 | COMMAND_KAFKA_CONSUMER_GROUPS = "kafka-consumer-groups"
27 | COMMAND_KAFKA_PREFERRED_REPLICA_ELECTION = "kafka-preferred-replica-election"
28 | COMMAND_KAFKA_REPLICA_VERIFICATION = "kafka-replica-verification"
29 | COMMAND_KAFKA_REASSIGN_PARTITIONS = "kafka-reassign-partitions"
30 | COMMAND_KAFKA_BROKER_API_VERSIONS = "kafka-broker-api-versions"
31 | COMMAND_KAFKA_DELETE_RECORDS = "kafka-delete-records"
32 | COMMAND_KAFKA_LOG_DIRS = "kafka-log-dirs"
33 | COMMAND_KAFKA_ACLS = "kafka-acls"
34 | COMMAND_KSQL = "ksql"
35 | COMMAND_ZOOKEEPER_SHELL = "zookeeper-shell"
36 | FLAG_ZOOKEEPER = "--zookeeper"
37 | FLAG_BOOTSTRAP_SERVER = "--bootstrap-server"
38 | FLAG_BROKER_LIST = "--broker-list"
39 | FLAG_SCHEMA_REGISTRY_URL = "--property schema.registry.url"
40 | FLAG_COMMAND_CONFIG = "--command-config"
41 |
--------------------------------------------------------------------------------
/kafkashell/data/completer-acks.json:
--------------------------------------------------------------------------------
1 | {
2 | "values": {
3 | "all": {
4 | "name": "all",
5 | "description": "The leader will wait for the full set of in-sync replicas to acknowledge the record."
6 | },
7 | "-1": {
8 | "name": "-1",
9 | "description": "The leader will wait for the full set of in-sync replicas to acknowledge the record."
10 | },
11 | "0": {
12 | "name": "0",
13 | "description": "The producer will not wait for any acknowledgment from the server at all."
14 | },
15 | "1": {
16 | "name": "1",
17 | "description": "The leader will write the record to itself but will not await acknowledgement from replicas."
18 | }
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/kafkashell/data/completer-acks.schema:
--------------------------------------------------------------------------------
1 | {
2 | "type": "object",
3 | "title": "kafka-shell JSON schema for kafka ack values",
4 | "required": [
5 | "values"
6 | ],
7 | "properties": {
8 | "values": {
9 | "type": "object",
10 | "title": "Values",
11 | "description": "The list of ack values.",
12 | "patternProperties": {
13 | "^(all|-1|0|1)$": {
14 | "type": "object",
15 | "title": "Ack Value",
16 | "description": "An ack value.",
17 | "required": [
18 | "name",
19 | "description"
20 | ],
21 | "properties": {
22 | "name": {
23 | "type": "string",
24 | "title": "Name",
25 | "description": "The ack value.",
26 | "examples": [
27 | "-1",
28 | "0",
29 | "all"
30 | ],
31 | "enum": [
32 | "all",
33 | "-1",
34 | "0",
35 | "1"
36 | ]
37 | },
38 | "description": {
39 | "type": "string",
40 | "title": "Description",
41 | "description": "The description of the ack value. This is shown when inline help is enabled.",
42 | "default": "",
43 | "examples": [
44 | "The producer will not wait for any acknowledgment from the server at all."
45 | ],
46 | "pattern": "^[^\\s].+[.][^\\s]*$"
47 | }
48 | },
49 | "additionalProperties": false
50 | }
51 | },
52 | "additionalProperties": false
53 | }
54 | },
55 | "additionalProperties": false
56 | }
57 |
--------------------------------------------------------------------------------
/kafkashell/data/completer-booleans.json:
--------------------------------------------------------------------------------
1 | {
2 | "values": {
3 | "true": {
4 | "name": "true",
5 | "description": "Sets the value to truthy."
6 | },
7 | "false": {
8 | "name": "false",
9 | "description": "Sets the value to falsy."
10 | }
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/kafkashell/data/completer-booleans.schema:
--------------------------------------------------------------------------------
1 | {
2 | "type": "object",
3 | "title": "kafka-shell JSON schema for boolean values",
4 | "required": [
5 | "values"
6 | ],
7 | "properties": {
8 | "values": {
9 | "type": "object",
10 | "title": "Booleans",
11 | "description": "The list of boolean values.",
12 | "required": [
13 | "true",
14 | "false"
15 | ],
16 | "patternProperties": {
17 | "^(true|false)$": {
18 | "type": "object",
19 | "title": "Boolean",
20 | "description": "A boolean.",
21 | "required": [
22 | "name",
23 | "description"
24 | ],
25 | "properties": {
26 | "name": {
27 | "type": "string",
28 | "title": "Name",
29 | "description": "The name of the boolean.",
30 | "default": "",
31 | "examples": [
32 | "true",
33 | "false"
34 | ],
35 | "enum": [
36 | "true",
37 | "false"
38 | ]
39 | },
40 | "description": {
41 | "type": "string",
42 | "title": "Description",
43 | "description": "The description of the boolean. This is shown when inline help is enabled.",
44 | "default": "",
45 | "examples": [
46 | "Sets the value to truthy."
47 | ],
48 | "pattern": "^[^\\s].+[.][^\\s]*$"
49 | }
50 | },
51 | "additionalProperties": false
52 | }
53 | },
54 | "additionalProperties": false
55 | }
56 | },
57 | "additionalProperties": false
58 | }
59 |
--------------------------------------------------------------------------------
/kafkashell/data/completer-cleanup-policy.json:
--------------------------------------------------------------------------------
1 | {
2 | "values": {
3 | "delete": {
4 | "name": "delete",
5 | "description": "Discard old segments when their retention time or size limit has been reached."
6 | },
7 | "compact": {
8 | "name": "compact",
9 | "description": "Enable log compaction which retains at least the last message for each key for a single topic partition."
10 | }
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/kafkashell/data/completer-cleanup-policy.schema:
--------------------------------------------------------------------------------
1 | {
2 | "type": "object",
3 | "title": "kafka-shell JSON schema for cleanup.policy values",
4 | "required": [
5 | "values"
6 | ],
7 | "properties": {
8 | "values": {
9 | "type": "object",
10 | "title": "Values",
11 | "description": "The list of cleanup policy values.",
12 | "required": [
13 | "compact",
14 | "delete"
15 | ],
16 | "patternProperties": {
17 | "^(compact|delete)$": {
18 | "type": "object",
19 | "title": "Cleanup Policy",
20 | "description": "A cleanup policy.",
21 | "required": [
22 | "name",
23 | "description"
24 | ],
25 | "properties": {
26 | "name": {
27 | "type": "string",
28 | "title": "Name",
29 | "description": "The name of the cleanup policy.",
30 | "default": "delete",
31 | "examples": [
32 | "delete",
33 | "compact"
34 | ],
35 | "enum": [
36 | "delete",
37 | "compact"
38 | ]
39 | },
40 | "description": {
41 | "type": "string",
42 | "title": "Description",
43 | "description": "The description of the cleanup policy. This is shown when inline help is enabled.",
44 | "default": "",
45 | "examples": [
46 | "Discard old segments when their retention time or size limit has been reached."
47 | ],
48 | "pattern": "^[^\\s].+[.][^\\s]*$"
49 | }
50 | },
51 | "additionalProperties": false
52 | }
53 | },
54 | "additionalProperties": false
55 | }
56 | },
57 | "additionalProperties": false
58 | }
59 |
--------------------------------------------------------------------------------
/kafkashell/data/completer-compression-codecs.json:
--------------------------------------------------------------------------------
1 | {
2 | "values": {
3 | "none": {
4 | "name": "none",
5 | "description": "Use no compression codec."
6 | },
7 | "gzip": {
8 | "name": "gzip",
9 | "description": "Use the gzip compression codec."
10 | },
11 | "snappy": {
12 | "name": "snappy",
13 | "description": "Use the snappy compression codec."
14 | },
15 | "lz4": {
16 | "name": "lz4",
17 | "description": "Use the lz4 compression codec."
18 | },
19 | "zstd": {
20 | "name": "zstd",
21 | "description": "Use the zstd compression codec."
22 | }
23 | }
24 | }
25 |
--------------------------------------------------------------------------------
/kafkashell/data/completer-compression-codecs.schema:
--------------------------------------------------------------------------------
1 | {
2 | "type": "object",
3 | "title": "kafka-shell JSON schema for compression codec values",
4 | "required": [
5 | "values"
6 | ],
7 | "properties": {
8 | "values": {
9 | "type": "object",
10 | "title": "Values",
11 | "description": "The list of compression codecs.",
12 | "required": [
13 | "none",
14 | "gzip",
15 | "snappy",
16 | "lz4",
17 | "zstd"
18 | ],
19 | "patternProperties": {
20 | "^(none|gzip|snappy|lz4|zstd)$": {
21 | "type": "object",
22 | "title": "Compression Codec",
23 | "description": "A compression codec.",
24 | "required": [
25 | "name",
26 | "description"
27 | ],
28 | "properties": {
29 | "name": {
30 | "type": "string",
31 | "title": "Name",
32 | "description": "The name of the compression codec.",
33 | "default": "gzip",
34 | "examples": [
35 | "gzip",
36 | "none"
37 | ],
38 | "enum": [
39 | "none",
40 | "gzip",
41 | "snappy",
42 | "lz4",
43 | "zstd"
44 | ]
45 | },
46 | "description": {
47 | "type": "string",
48 | "title": "Description",
49 | "description": "The description of the compression codec. This is shown when inline help is enabled.",
50 | "default": "",
51 | "examples": [
52 | "Use the gzip compression codec."
53 | ],
54 | "pattern": "^[^\\s].+[.][^\\s]*$"
55 | }
56 | },
57 | "additionalProperties": false
58 | }
59 | },
60 | "additionalProperties": false
61 | }
62 | },
63 | "additionalProperties": false
64 | }
65 |
--------------------------------------------------------------------------------
/kafkashell/data/completer-entity-types.json:
--------------------------------------------------------------------------------
1 | {
2 | "values": {
3 | "topic": {
4 | "name": "topic",
5 | "description": "Add, modify, or remove configurations for a topic."
6 | },
7 | "broker": {
8 | "name": "broker",
9 | "description": "Add, modify, or remove configurations for a broker."
10 | },
11 | "client": {
12 | "name": "client",
13 | "description": "Add, modify, or remove configurations for a client."
14 | },
15 | "user": {
16 | "name": "user",
17 | "description": "Add, modify, or remove configurations for a user."
18 | }
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/kafkashell/data/completer-entity-types.schema:
--------------------------------------------------------------------------------
1 | {
2 | "type": "object",
3 | "title": "kafka-shell JSON schema for kafka config entity type values",
4 | "required": [
5 | "values"
6 | ],
7 | "properties": {
8 | "values": {
9 | "type": "object",
10 | "title": "Values",
11 | "description": "The list of entity type values.",
12 | "patternProperties": {
13 | "^(topic|broker|client|user)$": {
14 | "type": "object",
15 | "title": "Entity Type",
16 | "description": "An entity type value",
17 | "required": [
18 | "name",
19 | "description"
20 | ],
21 | "properties": {
22 | "name": {
23 | "type": "string",
24 | "title": "Name",
25 | "description": "The name of the entity type.",
26 | "default": "",
27 | "examples": [
28 | "topic",
29 | "broker"
30 | ],
31 | "enum": [
32 | "topic",
33 | "broker",
34 | "user",
35 | "client"
36 | ]
37 | },
38 | "description": {
39 | "type": "string",
40 | "title": "Description",
41 | "description": "The description of the entity type. This is shown when inline help is enabled.",
42 | "default": "",
43 | "examples": [
44 | "Add, modify, or remove configurations for a topic."
45 | ],
46 | "pattern": "^[^\\s].+[.][^\\s]*$"
47 | }
48 | },
49 | "additionalProperties": false
50 | }
51 | },
52 | "additionalProperties": false
53 | }
54 | },
55 | "additionalProperties": false
56 | }
57 |
--------------------------------------------------------------------------------
/kafkashell/data/completer-kafka-configs.json:
--------------------------------------------------------------------------------
1 | {
2 | "values": {
3 | "advertised.listeners": {
4 | "name": "advertised.listeners",
5 | "description": "Listeners to publish to ZooKeeper for clients to use.",
6 | "types": [
7 | "brokers"
8 | ]
9 | },
10 | "background.threads": {
11 | "name": "background.threads",
12 | "description": "The number of threads to use for various background processing tasks.",
13 | "types": [
14 | "brokers"
15 | ]
16 | },
17 | "cleanup.policy": {
18 | "name": "cleanup.policy",
19 | "description": "Designate the retention policy to use on old log segments.",
20 | "types": [
21 | "topics"
22 | ],
23 | "completer": "cleanup-policy"
24 | },
25 | "compression.type": {
26 | "name": "compression.type",
27 | "description": "Specify the final compression type for a given topic.",
28 | "types": [
29 | "brokers",
30 | "topics"
31 | ],
32 | "completer": "compression-codecs"
33 | },
34 | "consumer_byte_rate": {
35 | "name": "consumer_byte_rate",
36 | "description": "Specify the consumer byte rate quota.",
37 | "types": [
38 | "clients",
39 | "users"
40 | ]
41 | },
42 | "delete.retention.ms": {
43 | "name": "delete.retention.ms",
44 | "description": "The amount of time to retain delete tombstone markers for log compacted topics.",
45 | "types": [
46 | "topics"
47 | ]
48 | },
49 | "file.delete.delay.ms": {
50 | "name": "file.delete.delay.ms",
51 | "description": "The time to wait before deleting a file from the filesystem.",
52 | "types": [
53 | "topics"
54 | ]
55 | },
56 | "flush.messages": {
57 | "name": "flush.messages",
58 | "description": "Specify an interval at which we will force an fsync of data written to the log.",
59 | "types": [
60 | "topics"
61 | ]
62 | },
63 | "flush.ms": {
64 | "name": "flush.ms",
65 | "description": "Specify a time interval at which we will force an fsync of data written to the log.",
66 | "types": [
67 | "topics"
68 | ]
69 | },
70 | "follower.replication.throttled.rate": {
71 | "name": "follower.replication.throttled.rate",
72 | "description": "Throttle replication rate for followers.",
73 | "types": [
74 | "brokers"
75 | ]
76 | },
77 | "follower.replication.throttled.replicas": {
78 | "name": "follower.replication.throttled.replicas",
79 | "description": "Throttle replication rate for specific replicas for followers.",
80 | "types": [
81 | "topics"
82 | ]
83 | },
84 | "index.interval.bytes": {
85 | "name": "index.interval.bytes",
86 | "description": "Specify how frequently Kafka adds an index entry to its offset index.",
87 | "types": [
88 | "topics"
89 | ]
90 | },
91 | "leader.replication.throttled.rate": {
92 | "name": "leader.replication.throttled.rate",
93 | "description": "Throttle replication rate for leaders.",
94 | "types": [
95 | "brokers"
96 | ]
97 | },
98 | "leader.replication.throttled.replicas": {
99 | "name": "leader.replication.throttled.replicas",
100 | "description": "Throttle replication rate for specific replicas for leaders.",
101 | "types": [
102 | "topics"
103 | ]
104 | },
105 | "listener.security.protocol.map": {
106 | "name": "listener.security.protocol.map",
107 | "description": "Map between listener names and security protocols.",
108 | "types": [
109 | "brokers"
110 | ]
111 | },
112 | "listeners": {
113 | "name": "listeners",
114 | "description": "List of comma-separated URIs the REST API will listen on.",
115 | "types": [
116 | "brokers"
117 | ]
118 | },
119 | "log.cleaner.backoff.ms": {
120 | "name": "log.cleaner.backoff.ms",
121 | "description": "The amount of time to sleep when there are no logs to clean.",
122 | "types": [
123 | "brokers"
124 | ]
125 | },
126 | "log.cleaner.dedupe.buffer.size": {
127 | "name": "log.cleaner.dedupe.buffer.size",
128 | "description": "The total memory used for log deduplication across all cleaner threads.",
129 | "types": [
130 | "brokers"
131 | ]
132 | },
133 | "log.cleaner.delete.retention.ms": {
134 | "name": "log.cleaner.delete.retention.ms",
135 | "description": "Specify how long delete records are retained.",
136 | "types": [
137 | "brokers"
138 | ]
139 | },
140 | "log.cleaner.io.buffer.load.factor": {
141 | "name": "log.cleaner.io.buffer.load.factor",
142 | "description": "Specify the log cleaner dedupe buffer load factor.",
143 | "types": [
144 | "brokers"
145 | ]
146 | },
147 | "log.cleaner.io.buffer.size": {
148 | "name": "log.cleaner.io.buffer.size",
149 | "description": "The total memory used for log cleaner I/O buffers across all cleaner threads.",
150 | "types": [
151 | "brokers"
152 | ]
153 | },
154 | "log.cleaner.io.max.bytes.per.second": {
155 | "name": "log.cleaner.io.max.bytes.per.second",
156 | "description": "Throttle the log cleaner's max bytes per second to be less than this value.",
157 | "types": [
158 | "brokers"
159 | ]
160 | },
161 | "log.cleaner.min.cleanable.ratio": {
162 | "name": "log.cleaner.min.cleanable.ratio",
163 | "description": "The minimum ratio of dirty log to total log for a log to eligible for cleaning.",
164 | "types": [
165 | "brokers"
166 | ]
167 | },
168 | "log.cleaner.min.compaction.lag.ms": {
169 | "name": "log.cleaner.min.compaction.lag.ms",
170 | "description": "The minimum time a message in a compacted topic will remain uncompacted in the log.",
171 | "types": [
172 | "brokers"
173 | ]
174 | },
175 | "log.cleaner.threads": {
176 | "name": "log.cleaner.threads",
177 | "description": "The number of background threads to use for log cleaning.",
178 | "types": [
179 | "brokers"
180 | ]
181 | },
182 | "log.cleanup.policy": {
183 | "name": "log.cleanup.policy",
184 | "description": "The default cleanup policy for segments beyond the retention window.",
185 | "types": [
186 | "brokers"
187 | ],
188 | "completer": "cleanup-policy"
189 | },
190 | "log.flush.interval.messages": {
191 | "name": "log.flush.interval.messages",
192 | "description": "The number of messages accumulated on a log partition before messages are flushed to disk.",
193 | "types": [
194 | "brokers"
195 | ]
196 | },
197 | "log.flush.interval.ms": {
198 | "name": "log.flush.interval.ms",
199 | "description": "The maximum time in ms that a message in any topic is kept in memory before flushed to disk.",
200 | "types": [
201 | "brokers"
202 | ]
203 | },
204 | "log.index.interval.bytes": {
205 | "name": "log.index.interval.bytes",
206 | "description": "The interval with which we add an entry to the offset index.",
207 | "types": [
208 | "brokers"
209 | ]
210 | },
211 | "log.index.size.max.bytes": {
212 | "name": "log.index.size.max.bytes",
213 | "description": "The maximum size in bytes of the offset index.",
214 | "types": [
215 | "brokers"
216 | ]
217 | },
218 | "log.message.downconversion.enable": {
219 | "name": "log.message.downconversion.enable",
220 | "description": "Speicy whether down-conversion of message formats is enabled to satisfy consume requests.",
221 | "types": [
222 | "brokers"
223 | ],
224 | "completer": "booleans"
225 | },
226 | "log.message.timestamp.difference.max.ms": {
227 | "name": "log.message.timestamp.difference.max.ms",
228 | "description": "The max difference allowed between the timestamp when a broker receives a message and the timestamp in the message.",
229 | "types": [
230 | "brokers"
231 | ]
232 | },
233 | "log.message.timestamp.type": {
234 | "name": "log.message.timestamp.type",
235 | "description": "Specify whether the timestamp in the message is message create time or log append time.",
236 | "types": [
237 | "brokers"
238 | ],
239 | "completer": "timestamp-types"
240 | },
241 | "log.preallocate": {
242 | "name": "log.preallocate",
243 | "description": "Specify whether Kafka should pre allocate files when creating new segment.",
244 | "types": [
245 | "brokers"
246 | ],
247 | "completer": "booleans"
248 | },
249 | "log.retention.bytes": {
250 | "name": "log.retention.bytes",
251 | "description": "The maximum size of the log before deleting it.",
252 | "types": [
253 | "brokers"
254 | ]
255 | },
256 | "log.retention.ms": {
257 | "name": "log.retention.ms",
258 | "description": "The number of milliseconds to keep a log file before deleting it.",
259 | "types": [
260 | "brokers"
261 | ]
262 | },
263 | "log.roll.jitter.ms": {
264 | "name": "log.roll.jitter.ms",
265 | "description": "The maximum jitter to subtract from logRollTimeMillis.",
266 | "types": [
267 | "brokers"
268 | ]
269 | },
270 | "log.roll.ms": {
271 | "name": "log.roll.ms",
272 | "description": "The maximum time before a new log segment is rolled out.",
273 | "types": [
274 | "brokers"
275 | ]
276 | },
277 | "log.segment.bytes": {
278 | "name": "log.segment.bytes",
279 | "description": "The maximum size of a single log file.",
280 | "types": [
281 | "brokers"
282 | ]
283 | },
284 | "log.segment.delete.delay.ms": {
285 | "name": "log.segment.delete.delay.ms",
286 | "description": "The amount of time to wait before deleting a file from the filesystem.",
287 | "types": [
288 | "brokers"
289 | ]
290 | },
291 | "max.connections.per.ip": {
292 | "name": "max.connections.per.ip",
293 | "description": "The maximum number of connections we allow from each ip address.",
294 | "types": [
295 | "brokers"
296 | ]
297 | },
298 | "max.connections.per.ip.overrides": {
299 | "name": "max.connections.per.ip.overrides",
300 | "description": "A comma-separated list of per-ip overrides to the default maximum number of connections.",
301 | "types": [
302 | "brokers"
303 | ]
304 | },
305 | "max.message.bytes": {
306 | "name": "max.message.bytes",
307 | "description": "The largest record batch size allowed by Kafka.",
308 | "types": [
309 | "topics"
310 | ]
311 | },
312 | "message.downconversion.enable": {
313 | "name": "message.downconversion.enable",
314 | "description": "Specify whether down-conversion of message formats is enabled to satisfy consume requests.",
315 | "types": [
316 | "topics"
317 | ],
318 | "completer": "booleans"
319 | },
320 | "message.format.version": {
321 | "name": "message.format.version",
322 | "description": "Specify the message format version the broker will use to append messages to the logs.",
323 | "types": [
324 | "topics"
325 | ]
326 | },
327 | "message.max.bytes": {
328 | "name": "message.max.bytes",
329 | "description": "The largest record batch size allowed by Kafka.",
330 | "types": [
331 | "brokers"
332 | ]
333 | },
334 | "message.timestamp.difference.max.ms": {
335 | "name": "message.timestamp.difference.max.ms",
336 | "description": "The max difference allowed between the timestamp when a broker receives a message and the timestamp in the message.",
337 | "types": [
338 | "topics"
339 | ]
340 | },
341 | "message.timestamp.type": {
342 | "name": "message.timestamp.type",
343 | "description": "Specify whether the timestamp in the message is message create time or log append time.",
344 | "types": [
345 | "topics"
346 | ],
347 | "completer": "timestamp-types"
348 | },
349 | "metric.reporters": {
350 | "name": "metric.reporters",
351 | "description": "A list of classes to use as metrics reporters.",
352 | "types": [
353 | "brokers"
354 | ]
355 | },
356 | "min.cleanable.dirty.ratio": {
357 | "name": "min.cleanable.dirty.ratio",
358 | "description": "Specify how frequently the log compactor will attempt to clean the log.",
359 | "types": [
360 | "topics"
361 | ]
362 | },
363 | "min.compaction.lag.ms": {
364 | "name": "min.compaction.lag.ms",
365 | "description": "The minimum time a message will remain uncompacted in the log.",
366 | "types": [
367 | "topics"
368 | ]
369 | },
370 | "min.insync.replicas": {
371 | "name": "min.insync.replicas",
372 | "description": "Minimum number of replicas that must acknowledge a write for the write to be considered successful.",
373 | "types": [
374 | "brokers",
375 | "topics"
376 | ]
377 | },
378 | "num.io.threads": {
379 | "name": "num.io.threads",
380 | "description": "The number of threads that the server uses for processing requests.",
381 | "types": [
382 | "brokers"
383 | ]
384 | },
385 | "num.network.threads": {
386 | "name": "num.network.threads",
387 | "description": "The number of threads that are used for sending and receiving requests.",
388 | "types": [
389 | "brokers"
390 | ]
391 | },
392 | "num.recovery.threads.per.data.dir": {
393 | "name": "num.recovery.threads.per.data.dir",
394 | "description": "The number of threads per data directory used for log recovery at startup and flushing at shutdown.",
395 | "types": [
396 | "brokers"
397 | ]
398 | },
399 | "num.replica.fetchers": {
400 | "name": "num.replica.fetchers",
401 | "description": "Number of fetcher threads used to replicate messages from a source broker.",
402 | "types": [
403 | "brokers"
404 | ]
405 | },
406 | "preallocate": {
407 | "name": "preallocate",
408 | "description": "Specify whether Kafka should pre allocate files when creating new segment.",
409 | "types": [
410 | "topics"
411 | ],
412 | "completer": "booleans"
413 | },
414 | "principal.builder.class": {
415 | "name": "principal.builder.class",
416 | "description": "The fully qualified name of a class used to build the KafkaPrincipal object for authorization.",
417 | "types": [
418 | "brokers"
419 | ]
420 | },
421 | "producer_byte_rate": {
422 | "name": "producer_byte_rate",
423 | "description": "Specify the producer byte rate quota.",
424 | "types": [
425 | "clients",
426 | "users"
427 | ]
428 | },
429 | "request_percentage": {
430 | "name": "request_percentage",
431 | "description": "The percentage per quota window, above which the request may be throttled.",
432 | "types": [
433 | "clients",
434 | "users"
435 | ]
436 | },
437 | "retention.bytes": {
438 | "name": "retention.bytes",
439 | "description": "The maximum size a partition can grow before old log segments will be discarded.",
440 | "types": [
441 | "topics"
442 | ]
443 | },
444 | "retention.ms": {
445 | "name": "retention.ms",
446 | "description": "The maximum time a log will be retained old log segments will be discarded.",
447 | "types": [
448 | "topics"
449 | ]
450 | },
451 | "sasl.enabled.mechanisms": {
452 | "name": "sasl.enabled.mechanisms",
453 | "description": "The list of SASL mechanisms enabled for the Kafka server.",
454 | "types": [
455 | "brokers"
456 | ]
457 | },
458 | "sasl.jaas.config": {
459 | "name": "sasl.jaas.config",
460 | "description": "JAAS login context parameters for SASL connections in the format used by JAAS config files.",
461 | "types": [
462 | "brokers"
463 | ]
464 | },
465 | "sasl.kerberos.kinit.cmd": {
466 | "name": "sasl.kerberos.kinit.cmd",
467 | "description": "Kerberos kinit command path.",
468 | "types": [
469 | "brokers"
470 | ]
471 | },
472 | "sasl.kerberos.min.time.before.relogin": {
473 | "name": "sasl.kerberos.min.time.before.relogin",
474 | "description": "Login thread sleep time between refresh attempts.",
475 | "types": [
476 | "brokers"
477 | ]
478 | },
479 | "sasl.kerberos.principal.to.local.rules": {
480 | "name": "sasl.kerberos.principal.to.local.rules",
481 | "description": "A list of rules for mapping from principal names to short names.",
482 | "types": [
483 | "brokers"
484 | ]
485 | },
486 | "sasl.kerberos.service.name": {
487 | "name": "sasl.kerberos.service.name",
488 | "description": "The Kerberos principal name that Kafka runs as.",
489 | "types": [
490 | "brokers"
491 | ]
492 | },
493 | "sasl.kerberos.ticket.renew.jitter": {
494 | "name": "sasl.kerberos.ticket.renew.jitter",
495 | "description": "Percentage of random jitter added to the renewal time.",
496 | "types": [
497 | "brokers"
498 | ]
499 | },
500 | "sasl.kerberos.ticket.renew.window.factor": {
501 | "name": "sasl.kerberos.ticket.renew.window.factor",
502 | "description": "Login thread will sleep until the specified window factor has been reached.",
503 | "types": [
504 | "brokers"
505 | ]
506 | },
507 | "sasl.login.refresh.buffer.seconds": {
508 | "name": "sasl.login.refresh.buffer.seconds",
509 | "description": "The amount of buffer time before credential expiration to maintain when refreshing a credential.",
510 | "types": [
511 | "brokers"
512 | ]
513 | },
514 | "sasl.login.refresh.min.period.seconds": {
515 | "name": "sasl.login.refresh.min.period.seconds",
516 | "description": "The minimum time for the login refresh thread to wait before refreshing a credential.",
517 | "types": [
518 | "brokers"
519 | ]
520 | },
521 | "sasl.login.refresh.window.factor": {
522 | "name": "sasl.login.refresh.window.factor",
523 | "description": "Login refresh thread will sleep until the specified window factor has been reached.",
524 | "types": [
525 | "brokers"
526 | ]
527 | },
528 | "sasl.login.refresh.window.jitter": {
529 | "name": "sasl.login.refresh.window.jitter",
530 | "description": "The maximum amount of random jitter that is added to the login refresh thread's sleep time.",
531 | "types": [
532 | "brokers"
533 | ]
534 | },
535 | "sasl.mechanism.inter.broker.protocol": {
536 | "name": "sasl.mechanism.inter.broker.protocol",
537 | "description": "SASL mechanism used for inter-broker communication.",
538 | "types": [
539 | "brokers"
540 | ]
541 | },
542 | "SCRAM-SHA-256": {
543 | "name": "SCRAM-SHA-256",
544 | "description": "SCRAM-SHA-256 SASL mechanism.",
545 | "types": [
546 | "users"
547 | ]
548 | },
549 | "SCRAM-SHA-512": {
550 | "name": "SCRAM-SHA-512",
551 | "description": "SCRAM-SHA-512 SASL mechanism.",
552 | "types": [
553 | "users"
554 | ]
555 | },
556 | "segment.bytes": {
557 | "name": "segment.bytes",
558 | "description": "The maximum size of a single log file.",
559 | "types": [
560 | "topics"
561 | ]
562 | },
563 | "segment.index.bytes": {
564 | "name": "segment.index.bytes",
565 | "description": "The size of the index that maps offsets to file positions.",
566 | "types": [
567 | "topics"
568 | ]
569 | },
570 | "segment.jitter.ms": {
571 | "name": "segment.jitter.ms",
572 | "description": "The maximum random jitter subtracted from the scheduled segment roll time.",
573 | "types": [
574 | "topics"
575 | ]
576 | },
577 | "segment.ms": {
578 | "name": "segment.ms",
579 | "description": "The period of time after which Kafka will force the log to roll even if the segment file isn't full.",
580 | "types": [
581 | "topics"
582 | ]
583 | },
584 | "ssl.cipher.suites": {
585 | "name": "ssl.cipher.suites",
586 | "description": "A list of cipher suites.",
587 | "types": [
588 | "brokers"
589 | ]
590 | },
591 | "ssl.client.auth": {
592 | "name": "ssl.client.auth",
593 | "description": "Configures Kafka to request client authentication.",
594 | "types": [
595 | "brokers"
596 | ]
597 | },
598 | "ssl.enabled.protocols": {
599 | "name": "ssl.enabled.protocols",
600 | "description": "The list of protocols enabled for SSL connections.",
601 | "types": [
602 | "brokers"
603 | ]
604 | },
605 | "ssl.endpoint.identification.algorithm": {
606 | "name": "ssl.endpoint.identification.algorithm",
607 | "description": "The endpoint identification algorithm to validate server hostname using server certificate.",
608 | "types": [
609 | "brokers"
610 | ]
611 | },
612 | "ssl.key.password": {
613 | "name": "ssl.key.password",
614 | "description": "The password of the private key in the key store file.",
615 | "types": [
616 | "brokers"
617 | ]
618 | },
619 | "ssl.keymanager.algorithm": {
620 | "name": "ssl.keymanager.algorithm",
621 | "description": "The algorithm used by key manager factory for SSL connections.",
622 | "types": [
623 | "brokers"
624 | ]
625 | },
626 | "ssl.keystore.location": {
627 | "name": "ssl.keystore.location",
628 | "description": "The location of the key store file.",
629 | "types": [
630 | "brokers"
631 | ]
632 | },
633 | "ssl.keystore.password": {
634 | "name": "ssl.keystore.password",
635 | "description": "The store password for the key store file.",
636 | "types": [
637 | "brokers"
638 | ]
639 | },
640 | "ssl.keystore.type": {
641 | "name": "ssl.keystore.type",
642 | "description": "The file format of the key store file.",
643 | "types": [
644 | "brokers"
645 | ]
646 | },
647 | "ssl.protocol": {
648 | "name": "ssl.protocol",
649 | "description": "The SSL protocol used to generate the SSLContext.",
650 | "types": [
651 | "brokers"
652 | ]
653 | },
654 | "ssl.provider": {
655 | "name": "ssl.provider",
656 | "description": "The name of the security provider used for SSL connections.",
657 | "types": [
658 | "brokers"
659 | ]
660 | },
661 | "ssl.secure.random.implementation": {
662 | "name": "ssl.secure.random.implementation",
663 | "description": "The SecureRandom PRNG implementation to use for SSL cryptography operations.",
664 | "types": [
665 | "brokers"
666 | ]
667 | },
668 | "ssl.trustmanager.algorithm": {
669 | "name": "ssl.trustmanager.algorithm",
670 | "description": "The algorithm used by trust manager factory for SSL connections.",
671 | "types": [
672 | "brokers"
673 | ]
674 | },
675 | "ssl.truststore.location": {
676 | "name": "ssl.truststore.location",
677 | "description": "The location of the trust store file.",
678 | "types": [
679 | "brokers"
680 | ]
681 | },
682 | "ssl.truststore.password": {
683 | "name": "ssl.truststore.password",
684 | "description": "The password for the trust store file.",
685 | "types": [
686 | "brokers"
687 | ]
688 | },
689 | "ssl.truststore.type": {
690 | "name": "ssl.truststore.type",
691 | "description": "The file format of the trust store file.",
692 | "types": [
693 | "brokers"
694 | ]
695 | },
696 | "unclean.leader.election.enable": {
697 | "name": "unclean.leader.election.enable",
698 | "description": "Enable replicas not in the ISR set to be elected as leader (DATA LOSS CAN OCCUR).",
699 | "types": [
700 | "brokers",
701 | "topics"
702 | ],
703 | "completer": "booleans"
704 | }
705 | }
706 | }
707 |
--------------------------------------------------------------------------------
/kafkashell/data/completer-kafka-configs.schema:
--------------------------------------------------------------------------------
1 | {
2 | "type": "object",
3 | "title": "kafka-shell JSON schema for kafka configuration values",
4 | "required": [
5 | "values"
6 | ],
7 | "properties": {
8 | "values": {
9 | "type": "object",
10 | "title": "Values",
11 | "description": "The list of kafka configuration keys.",
12 | "patternProperties": {
13 | "^(.*)$": {
14 | "type": "object",
15 | "title": "Kafka Configuration",
16 | "description": "A kafka configuration key.",
17 | "required": [
18 | "name",
19 | "description",
20 | "types"
21 | ],
22 | "properties": {
23 | "name": {
24 | "type": "string",
25 | "title": "Name",
26 | "description": "The name (key) of the Kafka configuration. This should match the object's key.",
27 | "default": "",
28 | "examples": [
29 | "min.insync.replicas",
30 | "cleanup.policy"
31 | ],
32 | "pattern": "^[a-zA-Z]+((\\.|\\_|\\-)[a-zA-Z0-9]+)*$"
33 | },
34 | "description": {
35 | "type": "string",
36 | "title": "Description",
37 | "description": "The description of the configuration. This is shown when inline help is enabled.",
38 | "default": "",
39 | "examples": [
40 | "Minimum number of replicas that must acknowledge a write for the write to be considered successful.",
41 | "A string that is either 'delete' or 'compact'."
42 | ],
43 | "pattern": "^[^\\s].+[.][^\\s]*$"
44 | },
45 | "types": {
46 | "type": "array",
47 | "title": "Types",
48 | "description": "The entity type(s) this configuration can be used for (i.e. brokers, topics).",
49 | "default": [],
50 | "examples": [
51 | "brokers",
52 | "topics"
53 | ],
54 | "items": {
55 | "type": "string",
56 | "enum": [
57 | "brokers",
58 | "topics",
59 | "users",
60 | "clients"
61 | ]
62 | },
63 | "uniqueItems": true,
64 | "minItems": 1,
65 | "maxItems": 4
66 | },
67 | "completer": {
68 | "type": "string",
69 | "title": "Completer",
70 | "description": "The completer JSON file to use for auto completion of values for this config.",
71 | "default": "",
72 | "examples": [
73 | "cleanup-policy"
74 | ],
75 | "enum": [
76 | "cleanup-policy",
77 | "compression-codecs",
78 | "timestamp-types",
79 | "booleans"
80 | ]
81 | }
82 | },
83 | "additionalProperties": false
84 | }
85 | },
86 | "additionalProperties": false
87 | }
88 | },
89 | "additionalProperties": false
90 | }
91 |
--------------------------------------------------------------------------------
/kafkashell/data/completer-ksql-output.json:
--------------------------------------------------------------------------------
1 | {
2 | "values": {
3 | "TABULAR": {
4 | "name": "TABULAR",
5 | "description": "Output in TABULAR format."
6 | },
7 | "JSON": {
8 | "name": "JSON",
9 | "description": "Output in JSON format."
10 | }
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/kafkashell/data/completer-ksql-output.schema:
--------------------------------------------------------------------------------
1 | {
2 | "type": "object",
3 | "title": "kafka-shell JSON schema for KSQL output values",
4 | "required": [
5 | "values"
6 | ],
7 | "properties": {
8 | "values": {
9 | "type": "object",
10 | "title": "Values",
11 | "description": "The list of KSQL output values.",
12 | "patternProperties": {
13 | "^(TABULAR|JSON)$": {
14 | "type": "object",
15 | "title": "Output Value",
16 | "description": "An output value",
17 | "required": [
18 | "name",
19 | "description"
20 | ],
21 | "properties": {
22 | "name": {
23 | "type": "string",
24 | "title": "Name",
25 | "description": "The name of the output value.",
26 | "default": "TABULAR",
27 | "examples": [
28 | "TABULAR",
29 | "JSON"
30 | ],
31 | "enum": [
32 | "TABULAR",
33 | "JSON"
34 | ]
35 | },
36 | "description": {
37 | "type": "string",
38 | "title": "Description",
39 | "description": "The description of the KSQL output value. This is shown when inline help is enabled.",
40 | "default": "",
41 | "examples": [
42 | "Output in JSON format."
43 | ],
44 | "pattern": "^[^\\s].+[.][^\\s]*$"
45 | }
46 | },
47 | "additionalProperties": false
48 | }
49 | },
50 | "additionalProperties": false
51 | }
52 | },
53 | "additionalProperties": false
54 | }
55 |
--------------------------------------------------------------------------------
/kafkashell/data/completer-reset-policies.json:
--------------------------------------------------------------------------------
1 | {
2 | "values": {
3 | "earliest": {
4 | "name": "earliest",
5 | "description": "Reset from the earliest offset."
6 | },
7 | "latest": {
8 | "name": "latest",
9 | "description": "Reset from the latest offset."
10 | },
11 | "none": {
12 | "name": "none",
13 | "description": "No reset policy."
14 | }
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/kafkashell/data/completer-reset-policies.schema:
--------------------------------------------------------------------------------
1 | {
2 | "type": "object",
3 | "title": "kafka-shell JSON schema for kafka reset policy values",
4 | "required": [
5 | "values"
6 | ],
7 | "properties": {
8 | "values": {
9 | "type": "object",
10 | "title": "Values",
11 | "description": "The list of kafka configuration keys.",
12 | "patternProperties": {
13 | "^(earliest|latest|none)$": {
14 | "type": "object",
15 | "title": "Reset Policies",
16 | "description": "A reset policy.",
17 | "required": [
18 | "name",
19 | "description"
20 | ],
21 | "properties": {
22 | "name": {
23 | "type": "string",
24 | "title": "Name",
25 | "description": "The name of the reset policy.",
26 | "default": "earliest",
27 | "examples": [
28 | "earliest",
29 | "latest",
30 | "none"
31 | ],
32 | "enum": [
33 | "earliest",
34 | "latest",
35 | "none"
36 | ]
37 | },
38 | "description": {
39 | "type": "string",
40 | "title": "Description",
41 | "description": "The description of the reset policy. This is shown when inline help is enabled.",
42 | "default": "",
43 | "examples": [
44 | "Reset from the earliest offset."
45 | ],
46 | "pattern": "^[^\\s].+[.][^\\s]*$"
47 | }
48 | },
49 | "additionalProperties": false
50 | }
51 | },
52 | "additionalProperties": false
53 | }
54 | },
55 | "additionalProperties": false
56 | }
57 |
--------------------------------------------------------------------------------
/kafkashell/data/completer-resource-pattern-types.json:
--------------------------------------------------------------------------------
1 | {
2 | "values": {
3 | "ANY": {
4 | "name": "ANY",
5 | "description": "Match any pattern type."
6 | },
7 | "MATCH": {
8 | "name": "MATCH",
9 | "description": "Perform pattern matching for the pattern type."
10 | },
11 | "LITERAL": {
12 | "name": "LITERAL",
13 | "description": "Give a specific pattern type."
14 | },
15 | "PREFIXED": {
16 | "name": "PREFIXED",
17 | "description": "Prefixed pattern type."
18 | }
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/kafkashell/data/completer-resource-pattern-types.schema:
--------------------------------------------------------------------------------
1 | {
2 | "type": "object",
3 | "title": "kafka-shell JSON schema for kafka resource pattern type values",
4 | "required": [
5 | "values"
6 | ],
7 | "properties": {
8 | "values": {
9 | "type": "object",
10 | "title": "Values",
11 | "description": "The list of resource pattern types.",
12 | "patternProperties": {
13 | "^(ANY|LITERAL|MATCH|PREFIXED)$": {
14 | "type": "object",
15 | "title": "Resource Pattern Type",
16 | "description": "A resource pattern type.",
17 | "required": [
18 | "name",
19 | "description"
20 | ],
21 | "properties": {
22 | "name": {
23 | "type": "string",
24 | "title": "Name",
25 | "description": "The name of the resource pattern type.",
26 | "default": "LITERAL",
27 | "examples": [
28 | "ANY",
29 | "LITERAL"
30 | ],
31 | "enum": [
32 | "ANY",
33 | "LITERAL",
34 | "MATCH",
35 | "PREFIXED"
36 | ]
37 | },
38 | "description": {
39 | "type": "string",
40 | "title": "Description",
41 | "description": "The description of the resource pattern type. This is shown when inline help is enabled.",
42 | "default": "",
43 | "examples": [
44 | "Match any pattern type."
45 | ],
46 | "pattern": "^[^\\s].+[.][^\\s]*$"
47 | }
48 | },
49 | "additionalProperties": false
50 | }
51 | },
52 | "additionalProperties": false
53 | }
54 | },
55 | "additionalProperties": false
56 | }
57 |
--------------------------------------------------------------------------------
/kafkashell/data/completer-timestamp-types.json:
--------------------------------------------------------------------------------
1 | {
2 | "values": {
3 | "CreateTime": {
4 | "name": "CreateTime",
5 | "description": "The timestamp in the message will be the message create time."
6 | },
7 | "LogAppendTime": {
8 | "name": "LogAppendTime",
9 | "description": "The timestamp in the message will be the log append time."
10 | }
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/kafkashell/data/completer-timestamp-types.schema:
--------------------------------------------------------------------------------
1 | {
2 | "type": "object",
3 | "title": "kafka-shell JSON schema for log message timestamp type values",
4 | "required": [
5 | "values"
6 | ],
7 | "properties": {
8 | "values": {
9 | "type": "object",
10 | "title": "Values",
11 | "description": "The list of timestamp types.",
12 | "patternProperties": {
13 | "^(CreateTime|LogAppendTime)$": {
14 | "type": "object",
15 | "title": "Timestamp Type",
16 | "description": "A log message timestamp type.",
17 | "required": [
18 | "name",
19 | "description"
20 | ],
21 | "properties": {
22 | "name": {
23 | "type": "string",
24 | "title": "Name",
25 | "description": "The name of the timestamp type.",
26 | "default": "CreateTime",
27 | "examples": [
28 | "CreateTime",
29 | "LogAppendTime"
30 | ],
31 | "enum": [
32 | "CreateTime",
33 | "LogAppendTime"
34 | ]
35 | },
36 | "description": {
37 | "type": "string",
38 | "title": "Description",
39 | "description": "The description of the timestamp type. This is shown when inline help is enabled.",
40 | "default": "",
41 | "examples": [
42 | "The timestamp in the message will be the message create time."
43 | ],
44 | "pattern": "^[^\\s].+[.][^\\s]*$"
45 | }
46 | },
47 | "additionalProperties": false
48 | }
49 | },
50 | "additionalProperties": false
51 | }
52 | },
53 | "additionalProperties": false
54 | }
55 |
--------------------------------------------------------------------------------
/kafkashell/data/completer.schema:
--------------------------------------------------------------------------------
1 | {
2 | "type": "object",
3 | "title": "kafka-shell autocomplete commands JSON schema",
4 | "required": [
5 | "commands"
6 | ],
7 | "properties": {
8 | "commands": {
9 | "type": "object",
10 | "title": "Commands",
11 | "description": "The list of supported commands.",
12 | "patternProperties": {
13 | "^[a-z]+(-[a-z]+)*$": {
14 | "type": "object",
15 | "title": "Command",
16 | "description": "A command that can be autocompleted.",
17 | "required": [
18 | "name",
19 | "description"
20 | ],
21 | "properties": {
22 | "name": {
23 | "type": "string",
24 | "title": "Name",
25 | "description": "The name of the command. This should match the object's key.",
26 | "default": "",
27 | "examples": [
28 | "kafka-topics",
29 | "kafka-console-consumer"
30 | ],
31 | "pattern": "^[a-z]+(-[a-z]+)*$"
32 | },
33 | "description": {
34 | "type": "string",
35 | "title": "Description",
36 | "description": "The description of the command. This is shown when inline help is enabled.",
37 | "default": "",
38 | "examples": [
39 | "Manage topics within a cluster."
40 | ],
41 | "pattern": "^[^\\s].+[.][^\\s]*$"
42 | },
43 | "options": {
44 | "type": "object",
45 | "title": "Options",
46 | "description": "List of options, arguments, and flags for the command.",
47 | "patternProperties": {
48 | "^(--|--[a-z.\\-]*[^\\s])$": {
49 | "type": "object",
50 | "title": "Option",
51 | "description": "An option, argument, or flag for the command.",
52 | "required": [
53 | "name",
54 | "description"
55 | ],
56 | "properties": {
57 | "name": {
58 | "type": "string",
59 | "title": "Name",
60 | "description": "The name of the option. This should match the object's key.",
61 | "default": "",
62 | "examples": [
63 | "--config",
64 | "--query-timeout"
65 | ],
66 | "pattern": "^(--|--[a-z.\\-]*[^\\s])$"
67 | },
68 | "description": {
69 | "type": "string",
70 | "title": "Description",
71 | "description": "The description of the option. This is shown when inline help is enabled.",
72 | "default": "",
73 | "examples": [
74 | "Create a new topic.",
75 | "List details for the given topics."
76 | ],
77 | "pattern": "^[^\\s].+[.][^\\s]*$"
78 | },
79 | "completer": {
80 | "type": "string",
81 | "title": "Completer",
82 | "description": "The completer JSON file to use for auto completion of values for this option.",
83 | "default": "",
84 | "examples": [
85 | "kafka-configs",
86 | "reset-policies"
87 | ],
88 | "enum": [
89 | "kafka-configs",
90 | "reset-policies",
91 | "resource-pattern-types",
92 | "ksql-output",
93 | "acks",
94 | "compression-codecs",
95 | "entity-types"
96 | ]
97 | }
98 | },
99 | "additionalProperties": false
100 | }
101 | },
102 | "additionalProperties": false
103 | }
104 | },
105 | "additionalProperties": false
106 | }
107 | },
108 | "additionalProperties": false
109 | }
110 | },
111 | "additionalProperties": false
112 | }
113 |
--------------------------------------------------------------------------------
/kafkashell/data/shell-config.schema:
--------------------------------------------------------------------------------
1 | {
2 | "type": "object",
3 | "title": "kafka-shell user configuration JSON schema",
4 | "required": [
5 | "version",
6 | "enable",
7 | "cluster",
8 | "clusters"
9 | ],
10 | "properties": {
11 | "version": {
12 | "type": "integer",
13 | "title": "Version",
14 | "description": "The version of the configuration schema. Currently, it must be 1.",
15 | "default": 1,
16 | "enum": [
17 | 1
18 | ]
19 | },
20 | "enable": {
21 | "type": "object",
22 | "title": "Feature Toggles",
23 | "description": "Enable or disable features within the shell.",
24 | "required": [
25 | "history",
26 | "save_on_exit",
27 | "auto_complete",
28 | "auto_suggest",
29 | "inline_help",
30 | "fuzzy_search"
31 | ],
32 | "properties": {
33 | "history": {
34 | "type": "boolean",
35 | "title": "Enable History",
36 | "description": "If enabled, history will be saved between sessions to ~/.kafka-shell-history.",
37 | "default": true,
38 | "examples": [
39 | true,
40 | false
41 | ]
42 | },
43 | "save_on_exit": {
44 | "type": "boolean",
45 | "title": "Enable Save On Exit",
46 | "description": "If enabled, user configuration changes will be saved on exit from kafka-shell.",
47 | "default": true,
48 | "examples": [
49 | true,
50 | false
51 | ]
52 | },
53 | "auto_complete": {
54 | "type": "boolean",
55 | "title": "Enable Auto Complete",
56 | "description": "If enabled, the autocomplete dropdown will be shown as you type.",
57 | "default": true,
58 | "examples": [
59 | true,
60 | false
61 | ]
62 | },
63 | "auto_suggest": {
64 | "type": "boolean",
65 | "title": "Enable Auto Suggest",
66 | "description": "If enabled, suggestions from history will be shown as gray text (like the fish shell).",
67 | "default": true,
68 | "examples": [
69 | true,
70 | false
71 | ]
72 | },
73 | "inline_help": {
74 | "type": "boolean",
75 | "title": "Enable Inline Help",
76 | "description": "Enable inline help and descriptions of commands within the autocomplete dropdown.",
77 | "default": true,
78 | "examples": [
79 | true,
80 | false
81 | ]
82 | },
83 | "fuzzy_search": {
84 | "type": "boolean",
85 | "title": "Enable Fuzzy Search",
86 | "description": "Enable fuzzy searching of autocomplete dropdown selections.",
87 | "default": true,
88 | "examples": [
89 | true,
90 | false
91 | ]
92 | }
93 | },
94 | "additionalProperties": false
95 | },
96 | "cluster": {
97 | "type": "string",
98 | "title": "Selected Cluster",
99 | "description": "The default cluster to be selected on initialization of kafka-shell.",
100 | "default": "local",
101 | "examples": [
102 | "local",
103 | "test-cluster"
104 | ],
105 | "pattern": "^(.*)$"
106 | },
107 | "clusters": {
108 | "type": "object",
109 | "title": "List of clusters to be used",
110 | "patternProperties": {
111 | "^(.*)$": {
112 | "type": "object",
113 | "title": "Cluster",
114 | "description": "Properties for a Kafka cluster",
115 | "properties": {
116 | "bootstrap_servers": {
117 | "type": "string",
118 | "title": "Kafka Bootstrap Servers",
119 | "description": "The Kafka broker(s) to connect to.",
120 | "default": "",
121 | "examples": [
122 | "localhost:9092"
123 | ],
124 | "pattern": "^(.*)$"
125 | },
126 | "zookeeper_connect": {
127 | "type": "string",
128 | "title": "Zookeeper Connect URL",
129 | "description": "The Zookeeper nodes to connect to.",
130 | "default": "",
131 | "examples": [
132 | "localhost:2181"
133 | ],
134 | "pattern": "^(.*)$"
135 | },
136 | "schema_registry_url": {
137 | "type": "string",
138 | "title": "Schema Registry URL",
139 | "description": "The Schema Registry URL to connect to.",
140 | "default": "",
141 | "examples": [
142 | "http://localhost:8081"
143 | ],
144 | "pattern": "^https?://(.*)$|^\\$.*$"
145 | },
146 | "ksql_server_url": {
147 | "type": "string",
148 | "title": "KSQL Server URL",
149 | "description": "The KSQL Server URL to connect to.",
150 | "default": "",
151 | "examples": [
152 | "http://localhost:8088"
153 | ],
154 | "pattern": "^https?://(.*)$|^\\$.*$"
155 | },
156 | "command_prefix": {
157 | "type": "string",
158 | "title": "Command Prefix",
159 | "description": "Prefix all commands with another command, such as a 'docker exec'.",
160 | "default": "",
161 | "examples": [
162 | "docker run --net=host -it --entrypoint run devshawn/confluent-tools"
163 | ],
164 | "pattern": "^(.*)$"
165 | },
166 | "command_file_extension": {
167 | "type": "string",
168 | "title": "Command File Extension",
169 | "description": "Add a file extension such as '.sh' or .bat' to the commands.",
170 | "default": "",
171 | "examples": [
172 | "sh",
173 | "bat"
174 | ],
175 | "pattern": "^sh$|^bat$"
176 | },
177 | "consumer_settings": {
178 | "type": "object",
179 | "title": "Consumer Settings",
180 | "description": "Settings for configuring consumer CLI tools.",
181 | "properties": {
182 | "config": {
183 | "type": "string",
184 | "title": "Consumer Config",
185 | "description": "Configuration path for properties file passed to --consumer.config.",
186 | "default": "",
187 | "examples": [
188 | "/tmp/consumer.properties"
189 | ],
190 | "pattern": "^(.*).properties$"
191 | },
192 | "properties": {
193 | "type": "object",
194 | "title": "Consumer Properties",
195 | "description": "Properties to send to the consumer CLIs by default.",
196 | "patternProperties": {
197 | "^(.*)$": {
198 | "title": "Consumer Property",
199 | "description": "Property to send to the consumer CLIs by default."
200 | }
201 | }
202 | }
203 | },
204 | "additionalProperties": false
205 | },
206 | "producer_settings": {
207 | "type": "object",
208 | "title": "Producer Settings",
209 | "description": "Settings for configuring producer CLI tools.",
210 | "properties": {
211 | "config": {
212 | "type": "string",
213 | "title": "Consumer Config",
214 | "description": "Configuration path for properties file passed to --producer.config.",
215 | "default": "",
216 | "examples": [
217 | "/tmp/producer.properties"
218 | ],
219 | "pattern": "^(.*).properties$"
220 | },
221 | "properties": {
222 | "type": "object",
223 | "title": "Producer Properties",
224 | "description": "Properties to send to the producer CLIs by default.",
225 | "patternProperties": {
226 | "^[a-z]+(\\.[a-z]+)*$": {
227 | "title": "Producer Property",
228 | "description": "Property to send to the producer CLIs by default."
229 | }
230 | },
231 | "additionalProperties": true
232 | }
233 | },
234 | "additionalProperties": false
235 | },
236 | "admin_client_settings": {
237 | "type": "object",
238 | "title": "Admin Client Settings",
239 | "description": "Settings for configuring admin client within the CLI tools.",
240 | "properties": {
241 | "config": {
242 | "type": "string",
243 | "title": "Admin Client Config",
244 | "description": "Configuration path for properties file passed to --command-config.",
245 | "default": "",
246 | "examples": [
247 | "/tmp/admin.properties"
248 | ],
249 | "pattern": "^(.*).properties$"
250 | }
251 | },
252 | "additionalProperties": false
253 | }
254 | },
255 | "additionalProperties": false
256 | }
257 | },
258 | "additionalProperties": false
259 | }
260 | },
261 | "additionalProperties": false
262 | }
263 |
--------------------------------------------------------------------------------
/kafkashell/data/shell-config.yaml:
--------------------------------------------------------------------------------
1 | version: 1
2 | enable:
3 | history: true
4 | save_on_exit: true
5 | auto_complete: true
6 | auto_suggest: true
7 | inline_help: true
8 | fuzzy_search: true
9 | cluster: local
10 | clusters:
11 | local:
12 | bootstrap_servers: localhost:9092
13 | zookeeper_connect: localhost:2181
14 | schema_registry_url: http://localhost:8081
15 | ksql_server_url: http://localhost:8088
16 |
--------------------------------------------------------------------------------
/kafkashell/executor.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Copyright 2019 Shawn Seymour. All Rights Reserved.
4 | #
5 | # Licensed under the Apache License, Version 2.0 (the "License"). You
6 | # may not use this file except in compliance with the License. A copy of
7 | # the License is located at
8 | #
9 | # http://www.apache.org/licenses/LICENSE-2.0
10 | #
11 | # or in the "license" file accompanying this file. This file is
12 | # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
13 | # ANY KIND, either express or implied. See the License for the specific
14 | # language governing permissions and limitations under the License.
15 |
16 | from __future__ import print_function
17 | from __future__ import unicode_literals
18 |
19 | import sys
20 |
21 | import os
22 | from prompt_toolkit import shortcuts
23 |
24 | from kafkashell import constants
25 | from kafkashell import helpers
26 | from kafkashell import version
27 |
28 | valid_command_prefixes = (
29 | "kafka",
30 | "zookeeper",
31 | "ksql"
32 | )
33 |
34 |
35 | class Executor:
36 |
37 | def __init__(self, settings):
38 | self.settings = settings
39 |
40 | def execute(self, command):
41 | command = command.strip()
42 | if command == "exit":
43 | self.settings.save_settings()
44 | sys.exit(0)
45 |
46 | elif command == "clear":
47 | shortcuts.clear()
48 |
49 | elif command == "save":
50 | self.settings.save_settings()
51 | print("Saved settings!")
52 |
53 | elif command == "version":
54 | print(version.get_version())
55 |
56 | elif command.startswith("cluster-"):
57 | self.execute_cluster_command(command)
58 |
59 | elif command.startswith(valid_command_prefixes):
60 | self.execute_valid_command(command)
61 |
62 | def execute_valid_command(self, command):
63 | if command.startswith(constants.COMMAND_KAFKA_TOPICS):
64 | final_command = self.handle_kafka_topics_command(command)
65 |
66 | elif command.startswith(constants.COMMAND_KAFKA_CONFIGS):
67 | final_command = self.handle_kafka_configs_command(command)
68 |
69 | elif command.startswith(constants.COMMAND_KAFKA_CONSOLE_CONSUMER):
70 | final_command = self.handle_kafka_console_consumer_command(command)
71 |
72 | elif command.startswith(constants.COMMAND_KAFKA_CONSOLE_PRODUCER):
73 | final_command = self.handle_kafka_console_producer_command(command)
74 |
75 | elif command.startswith(constants.COMMAND_KAFKA_AVRO_CONSOLE_CONSUMER):
76 | final_command = self.handle_kafka_avro_console_consumer_command(command)
77 |
78 | elif command.startswith(constants.COMMAND_KAFKA_AVRO_CONSOLE_PRODUCER):
79 | final_command = self.handle_kafka_avro_console_producer_command(command)
80 |
81 | elif command.startswith(constants.COMMAND_KAFKA_VERIFIABLE_CONSUMER):
82 | final_command = self.handle_kafka_verifiable_consumer(command)
83 |
84 | elif command.startswith(constants.COMMAND_KAFKA_VERIFIABLE_PRODUCER):
85 | final_command = self.handle_kafka_verifiable_producer(command)
86 |
87 | elif command.startswith(constants.COMMAND_KAFKA_CONSUMER_GROUPS):
88 | final_command = self.handle_kafka_consumer_groups_command(command)
89 |
90 | elif command.startswith(constants.COMMAND_KAFKA_PREFERRED_REPLICA_ELECTION):
91 | final_command = self.handle_kafka_preferred_replica_election(command)
92 |
93 | elif command.startswith(constants.COMMAND_KAFKA_REPLICA_VERIFICATION):
94 | final_command = self.handle_kafka_replica_verification_command(command)
95 |
96 | elif command.startswith(constants.COMMAND_KAFKA_REASSIGN_PARTITIONS):
97 | final_command = self.handle_kafka_reassign_partitions_command(command)
98 |
99 | elif command.startswith(constants.COMMAND_KAFKA_BROKER_API_VERSIONS):
100 | final_command = self.handle_kafka_broker_api_versions_command(command)
101 |
102 | elif command.startswith(constants.COMMAND_KAFKA_DELETE_RECORDS):
103 | final_command = self.handle_kafka_delete_records_command(command)
104 |
105 | elif command.startswith(constants.COMMAND_KAFKA_LOG_DIRS):
106 | final_command = self.handle_kafka_log_dirs_command(command)
107 |
108 | elif command.startswith(constants.COMMAND_KAFKA_ACLS):
109 | final_command = self.handle_kafka_acls_command(command)
110 |
111 | elif command.startswith(constants.COMMAND_KSQL):
112 | final_command = self.handle_ksql_command(command)
113 |
114 | elif command.startswith(constants.COMMAND_ZOOKEEPER_SHELL):
115 | final_command = self.handle_zookeeper_shell_command(command)
116 |
117 | else:
118 | final_command = command
119 |
120 | final_command = self.handle_file_extension(final_command)
121 |
122 | if self.check_for_valid_command_prefix():
123 | command_prefix = self.settings.get_cluster_details()["command_prefix"].strip()
124 | final_command = "{0} {1}".format(command_prefix, final_command)
125 |
126 | os.system(final_command)
127 |
128 | def handle_file_extension(self, command):
129 | file_extension = self.get_file_extension()
130 | if file_extension is not None:
131 | split_command = command.split(" ")
132 | split_command[0] = "{}.{}".format(split_command[0], file_extension)
133 | return " ".join(split_command)
134 | return command
135 |
136 | def execute_cluster_command(self, command):
137 | split_text = command.split(" ")
138 | if len(split_text) > 1:
139 | if len(split_text) > 2:
140 | print("Too many arguments!")
141 | else:
142 | if split_text[0] == "cluster-select":
143 | self.handle_cluster_select(split_text)
144 |
145 | elif split_text[0] == "cluster-describe":
146 | self.handle_cluster_describe(split_text)
147 | else:
148 | print("Please enter a cluster name.")
149 |
150 | # Handlers
151 |
152 | def handle_cluster_select(self, split_text):
153 | if split_text[1] in self.settings.user_config["clusters"]:
154 | self.settings.cluster = split_text[1]
155 | print("Selected cluster: {0}".format(split_text[1]))
156 | else:
157 | print("Unknown cluster!")
158 |
159 | def handle_cluster_describe(self, split_text):
160 | try:
161 | cluster = self.settings.user_config["clusters"][split_text[1]]
162 | helpers.print_cluster_config(cluster)
163 | except KeyError:
164 | print("Unknown cluster!")
165 |
166 | def handle_kafka_topics_command(self, command):
167 | command += self.handle_bootstrap_or_zookeeper_flag(command)
168 | command += self.handle_admin_client_settings(command)
169 | return command
170 |
171 | def handle_kafka_configs_command(self, command):
172 | command += self.handle_zookeeper_flag(command)
173 | command += self.handle_admin_client_settings(command)
174 | return command
175 |
176 | def handle_kafka_console_consumer_command(self, command):
177 | command += self.handle_bootstrap_server_flag(command)
178 | command += self.handle_cli_settings(command, "consumer")
179 | return command
180 |
181 | def handle_kafka_console_producer_command(self, command):
182 | command += self.handle_broker_list_flag(command)
183 | command += self.handle_cli_settings(command, "producer")
184 | return command
185 |
186 | def handle_kafka_avro_console_consumer_command(self, command):
187 | command += self.handle_bootstrap_server_flag(command)
188 | command += self.handle_schema_registry_url_property(command)
189 | command += self.handle_cli_settings(command, "consumer")
190 | return command
191 |
192 | def handle_kafka_avro_console_producer_command(self, command):
193 | command += self.handle_broker_list_flag(command)
194 | command += self.handle_schema_registry_url_property(command)
195 | command += self.handle_cli_settings(command, "producer")
196 | return command
197 |
198 | def handle_kafka_verifiable_consumer(self, command):
199 | command += self.handle_broker_list_flag(command)
200 | command += self.handle_config(command, "consumer.config", "consumer")
201 | return command
202 |
203 | def handle_kafka_verifiable_producer(self, command):
204 | command += self.handle_broker_list_flag(command)
205 | command += self.handle_config(command, "producer.config", "producer")
206 | return command
207 |
208 | def handle_kafka_consumer_groups_command(self, command):
209 | command += self.handle_bootstrap_server_flag(command)
210 | command += self.handle_admin_client_settings(command)
211 | return command
212 |
213 | def handle_kafka_preferred_replica_election(self, command):
214 | command += self.handle_bootstrap_server_flag(command)
215 | command += self.handle_admin_client_settings(command)
216 | return command
217 |
218 | def handle_kafka_replica_verification_command(self, command):
219 | command += self.handle_broker_list_flag(command)
220 | return command
221 |
222 | def handle_kafka_reassign_partitions_command(self, command):
223 | command += self.handle_bootstrap_server_flag(command)
224 | command += self.handle_zookeeper_flag(command)
225 | command += self.handle_admin_client_settings(command)
226 | return command
227 |
228 | def handle_kafka_broker_api_versions_command(self, command):
229 | command += self.handle_bootstrap_server_flag(command)
230 | command += self.handle_admin_client_settings(command)
231 | return command
232 |
233 | def handle_kafka_delete_records_command(self, command):
234 | command += self.handle_bootstrap_server_flag(command)
235 | command += self.handle_admin_client_settings(command)
236 | return command
237 |
238 | def handle_kafka_log_dirs_command(self, command):
239 | command += self.handle_bootstrap_server_flag(command)
240 | command += self.handle_admin_client_settings(command)
241 | return command
242 |
243 | def handle_kafka_acls_command(self, command):
244 | command += self.handle_bootstrap_server_flag(command)
245 | command += self.handle_admin_client_settings(command)
246 | return command
247 |
248 | def handle_ksql_command(self, command):
249 | command += self.handle_ksql_input(command)
250 | return command
251 |
252 | def handle_zookeeper_shell_command(self, command):
253 | command += self.handle_zookeeper_shell_input(command)
254 | return command
255 |
256 | # Helpers
257 |
258 | def handle_bootstrap_or_zookeeper_flag(self, command):
259 | if self.settings.get_cluster_details().get("zookeeper_connect") is not None:
260 | return self.handle_zookeeper_flag(command)
261 |
262 | return self.handle_bootstrap_server_flag(command)
263 |
264 | def handle_zookeeper_flag(self, command):
265 | if constants.FLAG_ZOOKEEPER not in command:
266 | zookeeper_flag = self.wrap_with_spaces(constants.FLAG_ZOOKEEPER)
267 | return zookeeper_flag + self.settings.get_cluster_details()["zookeeper_connect"]
268 | else:
269 | return ""
270 |
271 | def handle_bootstrap_server_flag(self, command):
272 | if constants.FLAG_BOOTSTRAP_SERVER not in command:
273 | bootstrap_server_flag = self.wrap_with_spaces(constants.FLAG_BOOTSTRAP_SERVER)
274 | return bootstrap_server_flag + self.settings.get_cluster_details()["bootstrap_servers"]
275 | else:
276 | return ""
277 |
278 | def handle_broker_list_flag(self, command):
279 | if constants.FLAG_BROKER_LIST not in command:
280 | broker_list_flag = self.wrap_with_spaces(constants.FLAG_BROKER_LIST)
281 | return broker_list_flag + self.settings.get_cluster_details()["bootstrap_servers"]
282 | else:
283 | return ""
284 |
285 | def handle_schema_registry_url_property(self, command):
286 | if constants.FLAG_SCHEMA_REGISTRY_URL not in command:
287 | return " {0}={1}".format(constants.FLAG_SCHEMA_REGISTRY_URL,
288 | self.settings.get_cluster_details()["schema_registry_url"])
289 | else:
290 | return ""
291 |
292 | def handle_ksql_input(self, command):
293 | return " -- " + self.settings.get_cluster_details()["ksql_server_url"] if " -- " not in command else ""
294 |
295 | def handle_zookeeper_shell_input(self, command):
296 | return " " + self.settings.get_cluster_details()["zookeeper_connect"] if len(command.split()) == 1 else ""
297 |
298 | def handle_admin_client_settings(self, command):
299 | if "admin_client_settings" in self.settings.get_cluster_details().keys():
300 | admin_client_option = "command-config" if constants.COMMAND_KAFKA_PREFERRED_REPLICA_ELECTION \
301 | not in command else "admin.config"
302 | return self.handle_config(command, admin_client_option, "admin_client")
303 | else:
304 | return ""
305 |
306 | def handle_cli_settings(self, command, settings_type):
307 | if "{0}_settings".format(settings_type) in self.settings.get_cluster_details().keys():
308 | return "".join([
309 | self.handle_config(command, "{0}.config".format(settings_type), settings_type),
310 | self.handle_properties(settings_type)
311 | ])
312 | else:
313 | return ""
314 |
315 | def handle_config(self, command, config_prefix, settings_type):
316 | if "--{}".format(config_prefix) not in command:
317 | config = self.get_config_from_settings(settings_type)
318 | return " --{0} {1}".format(config_prefix, config) if config is not None else ""
319 | else:
320 | return ""
321 |
322 | def handle_properties(self, settings_type):
323 | properties = self.get_properties_from_settings(settings_type)
324 | return "".join([self.format_property(key, properties) for key in sorted(list(properties.keys()))])
325 |
326 | @staticmethod
327 | def format_property(key, properties):
328 | value = str(properties[key]).lower() if isinstance(properties[key], bool) else properties[key]
329 | return " --property {0}={1}".format(key, value)
330 |
331 | def get_properties_from_settings(self, settings_type):
332 | try:
333 | return self.settings.get_cluster_details()["{0}_settings".format(settings_type)]["properties"]
334 | except KeyError:
335 | return {}
336 |
337 | def get_config_from_settings(self, settings_type):
338 | try:
339 | return self.settings.get_cluster_details()["{0}_settings".format(settings_type)]["config"]
340 | except KeyError:
341 | return None
342 |
343 | def check_for_valid_command_prefix(self):
344 | return "command_prefix" in self.settings.get_cluster_details() \
345 | and \
346 | len(self.settings.get_cluster_details()["command_prefix"]) > 0
347 |
348 | def get_file_extension(self):
349 | try:
350 | return self.settings.get_cluster_details()["command_file_extension"]
351 | except KeyError:
352 | return None
353 |
354 | @staticmethod
355 | def wrap_with_spaces(string):
356 | return " {0} ".format(string)
357 |
--------------------------------------------------------------------------------
/kafkashell/helpers.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Copyright 2019 Shawn Seymour. All Rights Reserved.
4 | #
5 | # Licensed under the Apache License, Version 2.0 (the "License"). You
6 | # may not use this file except in compliance with the License. A copy of
7 | # the License is located at
8 | #
9 | # http://www.apache.org/licenses/LICENSE-2.0
10 | #
11 | # or in the "license" file accompanying this file. This file is
12 | # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
13 | # ANY KIND, either express or implied. See the License for the specific
14 | # language governing permissions and limitations under the License.
15 |
16 | from __future__ import print_function
17 | from __future__ import unicode_literals
18 |
19 | options_that_can_be_duplicated = [
20 | "--add-config",
21 | "--config",
22 | "--consumer-property",
23 | "--delete-config",
24 | "--producer-property",
25 | "--property",
26 | "--principal"
27 | ]
28 |
29 |
30 | def exclude_options_from_removal(command_list):
31 | return [elem for elem in command_list if elem not in options_that_can_be_duplicated]
32 |
33 |
34 | def cycle(my_list, start_at=None):
35 | index = my_list.index(start_at)
36 | start_at = 0 if start_at is None else index
37 | while True:
38 | start_at = (start_at + 1) % len(my_list)
39 | yield my_list[start_at]
40 |
41 |
42 | def print_cluster_config(cluster):
43 | output = ""
44 | for key in cluster.keys():
45 | output += "{0}: {1}\n".format(key, cluster[key])
46 | print(output)
47 |
--------------------------------------------------------------------------------
/kafkashell/main.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Copyright 2019 Shawn Seymour. All Rights Reserved.
4 | #
5 | # Licensed under the Apache License, Version 2.0 (the "License"). You
6 | # may not use this file except in compliance with the License. A copy of
7 | # the License is located at
8 | #
9 | # http://www.apache.org/licenses/LICENSE-2.0
10 | #
11 | # or in the "license" file accompanying this file. This file is
12 | # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
13 | # ANY KIND, either express or implied. See the License for the specific
14 | # language governing permissions and limitations under the License.
15 |
16 | from __future__ import unicode_literals
17 |
18 | from prompt_toolkit import PromptSession
19 | from prompt_toolkit.auto_suggest import AutoSuggestFromHistory
20 | from prompt_toolkit.history import FileHistory, InMemoryHistory, ThreadedHistory
21 |
22 | from kafkashell.bindings import get_bindings
23 | from kafkashell.completer import KafkaCompleter
24 | from kafkashell.config import get_user_history_path
25 | from kafkashell.executor import Executor
26 | from kafkashell.settings import Settings
27 | from kafkashell.style import style
28 | from kafkashell.toolbar import Toolbar
29 |
30 |
31 | def main():
32 | settings = Settings()
33 | bindings = get_bindings(settings)
34 | executor = Executor(settings)
35 | toolbar = Toolbar(settings)
36 | completer = KafkaCompleter(settings) if settings.enable_auto_complete else None
37 | suggester = AutoSuggestFromHistory() if settings.enable_auto_suggest else None
38 | history = ThreadedHistory(FileHistory(get_user_history_path())) if settings.enable_history else InMemoryHistory()
39 | session = PromptSession(completer=completer, style=style, bottom_toolbar=toolbar.handler,
40 | key_bindings=bindings, history=history, include_default_pygments_style=False)
41 | while True:
42 | try:
43 | command = session.prompt([("class:operator", "> ")], auto_suggest=suggester)
44 | except KeyboardInterrupt:
45 | continue
46 | except EOFError:
47 | break
48 | else:
49 | executor.execute(command)
50 |
51 | settings.save_settings()
52 |
53 |
54 | if __name__ == "__main__":
55 | main()
56 |
--------------------------------------------------------------------------------
/kafkashell/settings.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Copyright 2019 Shawn Seymour. All Rights Reserved.
4 | #
5 | # Licensed under the Apache License, Version 2.0 (the "License"). You
6 | # may not use this file except in compliance with the License. A copy of
7 | # the License is located at
8 | #
9 | # http://www.apache.org/licenses/LICENSE-2.0
10 | #
11 | # or in the "license" file accompanying this file. This file is
12 | # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
13 | # ANY KIND, either express or implied. See the License for the specific
14 | # language governing permissions and limitations under the License.
15 |
16 | from __future__ import unicode_literals
17 |
18 | import copy
19 |
20 | from kafkashell import config
21 | from kafkashell.helpers import cycle
22 |
23 |
24 | class Settings:
25 |
26 | def __init__(self):
27 | config.init_config()
28 | self.user_config = config.validate_config(config.get_config())
29 | self.commands = config.get_completer()["commands"]
30 | self.enable = self.user_config["enable"]
31 | self.enable_history = self.user_config["enable"]["history"]
32 | self.enable_save_on_exit = self.user_config["enable"]["save_on_exit"]
33 | self.enable_auto_complete = self.user_config["enable"]["auto_complete"]
34 | self.enable_auto_suggest = self.user_config["enable"]["auto_suggest"]
35 | self.enable_help = self.user_config["enable"]["inline_help"]
36 | self.enable_fuzzy_search = self.user_config["enable"]["fuzzy_search"]
37 | self.cluster = self.user_config["cluster"]
38 | self.cluster_iterator = cycle(list(self.user_config["clusters"].keys()), self.cluster)
39 | self.init_history()
40 |
41 | def set_enable_help(self, value):
42 | self.enable_help = value
43 |
44 | def set_enable_fuzzy_search(self, value):
45 | self.enable_fuzzy_search = value
46 |
47 | def set_next_cluster(self):
48 | self.cluster = next(self.cluster_iterator)
49 |
50 | def get_cluster_details(self):
51 | return self.user_config["clusters"][self.cluster]
52 |
53 | def init_history(self):
54 | if self.enable_history:
55 | config.init_history()
56 |
57 | def save_settings(self):
58 | if self.enable_save_on_exit:
59 | settings = copy.deepcopy(self.user_config)
60 | settings["enable"]["inline_help"] = self.enable_help
61 | settings["enable"]["fuzzy_search"] = self.enable_fuzzy_search
62 | settings["cluster"] = self.cluster
63 | config.save_config(settings)
64 |
--------------------------------------------------------------------------------
/kafkashell/style.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Copyright 2019 Shawn Seymour. All Rights Reserved.
4 | #
5 | # Licensed under the Apache License, Version 2.0 (the "License"). You
6 | # may not use this file except in compliance with the License. A copy of
7 | # the License is located at
8 | #
9 | # http://www.apache.org/licenses/LICENSE-2.0
10 | #
11 | # or in the "license" file accompanying this file. This file is
12 | # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
13 | # ANY KIND, either express or implied. See the License for the specific
14 | # language governing permissions and limitations under the License.
15 |
16 | from __future__ import unicode_literals
17 |
18 | from prompt_toolkit.styles import Style
19 |
20 | style = Style.from_dict({
21 | "operator": "#2196f3",
22 | "completion-menu.completion": "bg:#1769aa #ffffff",
23 | "completion-menu.completion.current": "bg:#2196f3 #000000",
24 | "completion-menu.meta.completion": "bg:#2196f3 #ffffff",
25 | "completion-menu.meta.completion.current": "bg:#4dabf5 #000000",
26 | "scrollbar.background": "bg:#1769aa",
27 | "scrollbar.button": "bg:#003333",
28 | "bottom-toolbar": "bg:#ffffff #1769aa",
29 | "bottom-toolbar-yellow": "bg:#ffff10 #1769aa",
30 | })
31 |
--------------------------------------------------------------------------------
/kafkashell/toolbar.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Copyright 2019 Shawn Seymour. All Rights Reserved.
4 | #
5 | # Licensed under the Apache License, Version 2.0 (the "License"). You
6 | # may not use this file except in compliance with the License. A copy of
7 | # the License is located at
8 | #
9 | # http://www.apache.org/licenses/LICENSE-2.0
10 | #
11 | # or in the "license" file accompanying this file. This file is
12 | # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
13 | # ANY KIND, either express or implied. See the License for the specific
14 | # language governing permissions and limitations under the License.
15 |
16 | from __future__ import unicode_literals
17 |
18 |
19 | class Toolbar(object):
20 |
21 | def __init__(self, settings):
22 | self.handler = self._create_handler(settings)
23 |
24 | @staticmethod
25 | def _create_handler(settings):
26 | def get_toolbar_items():
27 | help_text = "ON" if settings.enable_help else "OFF"
28 | fuzzy_text = "ON" if settings.enable_fuzzy_search else "OFF"
29 | return [
30 | ("class:bottom-toolbar", " [F2] Cluster: "),
31 | ("class:bottom-toolbar-yellow", settings.cluster),
32 | ("class:bottom-toolbar", " "),
33 | ("class:bottom-toolbar", "[F3] Fuzzy: "),
34 | ("class:bottom-toolbar-yellow", fuzzy_text),
35 | ("class:bottom-toolbar", " "),
36 | ("class:bottom-toolbar", "[F9] In-Line Help: "),
37 | ("class:bottom-toolbar-yellow", help_text),
38 | ("class:bottom-toolbar", " "),
39 | ("class:bottom-toolbar", "[F10] Exit")
40 | ]
41 |
42 | return get_toolbar_items
43 |
--------------------------------------------------------------------------------
/kafkashell/version.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | #
3 | # Copyright 2019 Shawn Seymour. All Rights Reserved.
4 | #
5 | # Licensed under the Apache License, Version 2.0 (the "License"). You
6 | # may not use this file except in compliance with the License. A copy of
7 | # the License is located at
8 | #
9 | # http://www.apache.org/licenses/LICENSE-2.0
10 | #
11 | # or in the "license" file accompanying this file. This file is
12 | # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
13 | # ANY KIND, either express or implied. See the License for the specific
14 | # language governing permissions and limitations under the License.
15 |
16 | __version__ = "0.1.4"
17 |
18 |
19 | def get_version():
20 | return __version__
21 |
--------------------------------------------------------------------------------
/manifest.in:
--------------------------------------------------------------------------------
1 | include README.md
2 | include LICENSE
--------------------------------------------------------------------------------
/requirements-dev.txt:
--------------------------------------------------------------------------------
1 | pytest>=2.7.0
2 | pytest-cov>=2.4.0,<2.6
3 | mock>=1.0.1
4 | tox>=1.9.2
5 | prompt_toolkit>=2.0.9
6 | fuzzyfinder>=2.1.0
7 | pygments>=2.3.1
8 | flake8>=3.7.7
9 | flake8-quotes>=1.0.0
10 | pexpect>=4.6.0
11 | jsonschema>=3.0.1
12 | oyaml>=0.8
13 |
--------------------------------------------------------------------------------
/scripts/build.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | rm -r dist/ build/
4 | python3 setup.py sdist bdist_wheel
5 |
--------------------------------------------------------------------------------
/scripts/lint.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | flake8 --max-line-length=120 --inline-quotes '"' --exclude=.tox,htmlcov,build,tests,scratch,docs,venv .
4 |
--------------------------------------------------------------------------------
/scripts/test.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | pytest
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 | [metadata]
2 | description-file = README.md
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | import setuptools
2 |
3 | meta = {}
4 |
5 | with open("README.md", "r") as fh:
6 | long_description = fh.read()
7 |
8 | with open("kafkashell/version.py") as f:
9 | exec(f.read(), meta)
10 |
11 | requires = [
12 | "prompt-toolkit>=2.0.9",
13 | "pygments>=2.1.3,<3.0.0",
14 | "fuzzyfinder>=2.0.0",
15 | "jsonschema>=3.0.1",
16 | "oyaml>=0.8"
17 | ]
18 |
19 | setuptools.setup(
20 | name="kafka-shell",
21 | version=meta["__version__"],
22 | author="Shawn Seymour",
23 | author_email="shawn@devshawn.com",
24 | description="A supercharged, interactive Kafka shell built on top of the existing Kafka CLI tools.",
25 | long_description=long_description,
26 | long_description_content_type="text/markdown",
27 | url="https://github.com/devshawn/kafka-shell",
28 | license="Apache License 2.0",
29 | packages=["kafkashell"],
30 | package_data={"kafkashell": ["data/*.json", "data/*.yaml", "data/*.schema"]},
31 | install_requires=requires,
32 | entry_points={
33 | "console_scripts": ["kafka-shell=kafkashell.main:main"],
34 | },
35 | keywords=("kafka", "shell", "prompt", "apache", "autocomplete", "streams", "cli"),
36 | classifiers=[
37 | "Development Status :: 4 - Beta",
38 | "Intended Audience :: Developers",
39 | "Intended Audience :: System Administrators",
40 | "License :: OSI Approved :: Apache Software License",
41 | "Natural Language :: English",
42 | "Operating System :: MacOS",
43 | "Operating System :: Unix",
44 | "Programming Language :: Python :: 2",
45 | "Programming Language :: Python :: 2.7",
46 | "Programming Language :: Python :: 3",
47 | "Programming Language :: Python :: 3.5",
48 | "Programming Language :: Python :: 3.6",
49 | "Programming Language :: Python :: 3.7",
50 | "Topic :: Software Development",
51 | "Topic :: Software Development :: Libraries :: Python Modules",
52 | ],
53 | )
54 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/devshawn/kafka-shell/36158950ad3251956cfa9ace8451feee7f16338d/tests/__init__.py
--------------------------------------------------------------------------------
/tests/context.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import os
3 |
4 | sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '../kafkashell')))
5 |
6 | import kafkashell
7 |
--------------------------------------------------------------------------------
/tests/data/test-admin-client-settings-config.yaml:
--------------------------------------------------------------------------------
1 | version: 1
2 | enable:
3 | history: true
4 | save_on_exit: true
5 | auto_complete: true
6 | auto_suggest: true
7 | inline_help: true
8 | fuzzy_search: true
9 | cluster: local
10 | clusters:
11 | local:
12 | bootstrap_servers: localhost:9092
13 | zookeeper_connect: localhost:2181
14 | schema_registry_url: http://localhost:8081
15 | ksql_server_url: http://localhost:8088
16 | command_prefix: ''
17 | admin_client_settings:
18 | config: admin.properties
19 |
--------------------------------------------------------------------------------
/tests/data/test-completer.json:
--------------------------------------------------------------------------------
1 | {
2 | "commands": {
3 | "clear": {
4 | "name": "clear",
5 | "description": "Clear the screen"
6 | },
7 | "kafka-topics": {
8 | "name": "kafka-topics",
9 | "description": "Manage topics within a cluster.",
10 | "options": {
11 | "--alter": {
12 | "name": "alter",
13 | "description": "Alter the number of partitions, replica assignment, and/or configuration for a topic."
14 | },
15 | "--config": {
16 | "name": "--config",
17 | "description": "A topic configuration override for the topic being created or altered."
18 | }
19 | }
20 | }
21 | }
22 | }
--------------------------------------------------------------------------------
/tests/data/test-config.yaml:
--------------------------------------------------------------------------------
1 | version: 1
2 | enable:
3 | history: true
4 | save_on_exit: true
5 | auto_complete: true
6 | auto_suggest: true
7 | inline_help: true
8 | fuzzy_search: true
9 | cluster: local
10 | clusters:
11 | local:
12 | bootstrap_servers: localhost:9092
13 | zookeeper_connect: localhost:2181
14 | schema_registry_url: http://localhost:8081
15 | ksql_server_url: http://localhost:8088
16 | command_prefix: ''
17 | test:
18 | bootstrap_servers: test:9092
19 | zookeeper_connect: test:2181
20 | schema_registry_url: https://test:8081
21 | ksql_server_url: https://test:8088
22 | command_prefix: ''
23 |
--------------------------------------------------------------------------------
/tests/data/test-consumer-settings-config.yaml:
--------------------------------------------------------------------------------
1 | version: 1
2 | enable:
3 | history: true
4 | save_on_exit: true
5 | auto_complete: true
6 | auto_suggest: true
7 | inline_help: true
8 | fuzzy_search: true
9 | cluster: local
10 | clusters:
11 | local:
12 | bootstrap_servers: localhost:9092
13 | zookeeper_connect: localhost:2181
14 | schema_registry_url: http://localhost:8081
15 | ksql_server_url: http://localhost:8088
16 | command_prefix: ''
17 | consumer_settings:
18 | config: consumer.properties
19 | properties:
20 | print.key: true
21 | key.separator: ","
22 |
--------------------------------------------------------------------------------
/tests/data/test-consumer-settings-without-properties-config.yaml:
--------------------------------------------------------------------------------
1 | version: 1
2 | enable:
3 | history: true
4 | save_on_exit: true
5 | auto_complete: true
6 | auto_suggest: true
7 | inline_help: true
8 | fuzzy_search: true
9 | cluster: local
10 | clusters:
11 | local:
12 | bootstrap_servers: localhost:9092
13 | zookeeper_connect: localhost:2181
14 | schema_registry_url: http://localhost:8081
15 | ksql_server_url: http://localhost:8088
16 | command_prefix: ''
17 | consumer_settings:
18 | config: consumer.properties
19 |
--------------------------------------------------------------------------------
/tests/data/test-environment-variables-config.yaml:
--------------------------------------------------------------------------------
1 | version: 1
2 | enable:
3 | history: true
4 | save_on_exit: true
5 | auto_complete: true
6 | auto_suggest: true
7 | inline_help: true
8 | fuzzy_search: true
9 | cluster: local
10 | clusters:
11 | local:
12 | bootstrap_servers: $BOOTSTRAP_SERVERS
13 | zookeeper_connect: $ZOOKEEPER_CONNECT
14 | schema_registry_url: $SCHEMA_REGISTRY_URL
15 | ksql_server_url: $KSQL_SERVER_URL
16 | command_prefix: ''
17 | test:
18 | bootstrap_servers: test:9092
19 | zookeeper_connect: test:2181
20 | schema_registry_url: https://test:8081
21 | ksql_server_url: https://test:8088
22 | command_prefix: ''
23 |
--------------------------------------------------------------------------------
/tests/data/test-file-extension-bat-config.yaml:
--------------------------------------------------------------------------------
1 | version: 1
2 | enable:
3 | history: true
4 | save_on_exit: true
5 | auto_complete: true
6 | auto_suggest: true
7 | inline_help: true
8 | fuzzy_search: true
9 | cluster: local
10 | clusters:
11 | local:
12 | bootstrap_servers: localhost:9092
13 | zookeeper_connect: localhost:2181
14 | schema_registry_url: http://localhost:8081
15 | ksql_server_url: http://localhost:8088
16 | command_prefix: ''
17 | command_file_extension: bat
18 |
--------------------------------------------------------------------------------
/tests/data/test-file-extension-sh-config.yaml:
--------------------------------------------------------------------------------
1 | version: 1
2 | enable:
3 | history: true
4 | save_on_exit: true
5 | auto_complete: true
6 | auto_suggest: true
7 | inline_help: true
8 | fuzzy_search: true
9 | cluster: local
10 | clusters:
11 | local:
12 | bootstrap_servers: localhost:9092
13 | zookeeper_connect: localhost:2181
14 | schema_registry_url: http://localhost:8081
15 | ksql_server_url: http://localhost:8088
16 | command_prefix: ''
17 | command_file_extension: sh
18 |
--------------------------------------------------------------------------------
/tests/data/test-history-off-config.yaml:
--------------------------------------------------------------------------------
1 | version: 1
2 | enable:
3 | history: false
4 | save_on_exit: true
5 | auto_complete: true
6 | auto_suggest: true
7 | inline_help: true
8 | fuzzy_search: true
9 | cluster: local
10 | clusters:
11 | local:
12 | bootstrap_servers: localhost:9092
13 | zookeeper_connect: localhost:2181
14 | schema_registry_url: http://localhost:8081
15 | ksql_server_url: http://localhost:8088
16 | command_prefix: ''
17 |
--------------------------------------------------------------------------------
/tests/data/test-invalid-file-extension-config.yaml:
--------------------------------------------------------------------------------
1 | version: 1
2 | enable:
3 | history: true
4 | save_on_exit: true
5 | auto_complete: true
6 | auto_suggest: true
7 | inline_help: true
8 | fuzzy_search: true
9 | cluster: local
10 | clusters:
11 | local:
12 | bootstrap_servers: localhost:9092
13 | zookeeper_connect: localhost:2181
14 | schema_registry_url: http://localhost:8081
15 | ksql_server_url: http://localhost:8088
16 | command_prefix: ''
17 | command_file_extension: asdf
18 |
--------------------------------------------------------------------------------
/tests/data/test-invalid-ksql-config.yaml:
--------------------------------------------------------------------------------
1 | version: 1
2 | enable:
3 | history: true
4 | save_on_exit: true
5 | auto_complete: true
6 | auto_suggest: true
7 | inline_help: true
8 | fuzzy_search: true
9 | cluster: local
10 | clusters:
11 | local:
12 | bootstrap_servers: localhost:9092
13 | zookeeper_connect: localhost:2181
14 | schema_registry_url: http://localhost:8081
15 | ksql_server_url: localhost:8088
16 | command_prefix: ''
17 |
--------------------------------------------------------------------------------
/tests/data/test-invalid-schema-registry-config.yaml:
--------------------------------------------------------------------------------
1 | version: 1
2 | enable:
3 | history: true
4 | save_on_exit: true
5 | auto_complete: true
6 | auto_suggest: true
7 | inline_help: true
8 | fuzzy_search: true
9 | cluster: local
10 | clusters:
11 | local:
12 | bootstrap_servers: localhost:9092
13 | zookeeper_connect: localhost:2181
14 | schema_registry_url: localhost:8081
15 | ksql_server_url: https://localhost:8088
16 | command_prefix: ''
17 |
--------------------------------------------------------------------------------
/tests/data/test-modified-config.yaml:
--------------------------------------------------------------------------------
1 | version: 1
2 | enable:
3 | history: true
4 | save_on_exit: true
5 | auto_complete: true
6 | auto_suggest: true
7 | inline_help: false
8 | fuzzy_search: false
9 | cluster: test
10 | clusters:
11 | local:
12 | bootstrap_servers: localhost:9092
13 | zookeeper_connect: localhost:2181
14 | schema_registry_url: http://localhost:8081
15 | ksql_server_url: http://localhost:8088
16 | command_prefix: ''
17 | test:
18 | bootstrap_servers: test:9092
19 | zookeeper_connect: test:2181
20 | schema_registry_url: https://test:8081
21 | ksql_server_url: https://test:8088
22 | command_prefix: ''
23 |
--------------------------------------------------------------------------------
/tests/data/test-no-zookeeper-config.yaml:
--------------------------------------------------------------------------------
1 | version: 1
2 | enable:
3 | history: true
4 | save_on_exit: true
5 | auto_complete: true
6 | auto_suggest: true
7 | inline_help: true
8 | fuzzy_search: true
9 | cluster: local
10 | clusters:
11 | local:
12 | bootstrap_servers: localhost:9092
13 |
--------------------------------------------------------------------------------
/tests/data/test-prefix-config.yaml:
--------------------------------------------------------------------------------
1 | version: 1
2 | enable:
3 | history: true
4 | save_on_exit: true
5 | auto_complete: true
6 | auto_suggest: true
7 | inline_help: true
8 | fuzzy_search: true
9 | cluster: local
10 | clusters:
11 | local:
12 | bootstrap_servers: localhost:9092
13 | zookeeper_connect: localhost:2181
14 | schema_registry_url: http://localhost:8081
15 | ksql_server_url: http://localhost:8088
16 | command_prefix: docker exec -it container-name
17 |
--------------------------------------------------------------------------------
/tests/data/test-prefix-none-config.yaml:
--------------------------------------------------------------------------------
1 | version: 1
2 | enable:
3 | history: true
4 | save_on_exit: true
5 | auto_complete: true
6 | auto_suggest: true
7 | inline_help: true
8 | fuzzy_search: true
9 | cluster: local
10 | clusters:
11 | local:
12 | bootstrap_servers: localhost:9092
13 | zookeeper_connect: localhost:2181
14 | schema_registry_url: http://localhost:8081
15 | ksql_server_url: http://localhost:8088
16 |
--------------------------------------------------------------------------------
/tests/data/test-producer-settings-config.yaml:
--------------------------------------------------------------------------------
1 | version: 1
2 | enable:
3 | history: true
4 | save_on_exit: true
5 | auto_complete: true
6 | auto_suggest: true
7 | inline_help: true
8 | fuzzy_search: true
9 | cluster: local
10 | clusters:
11 | local:
12 | bootstrap_servers: localhost:9092
13 | zookeeper_connect: localhost:2181
14 | schema_registry_url: http://localhost:8081
15 | ksql_server_url: http://localhost:8088
16 | command_prefix: ''
17 | producer_settings:
18 | config: producer.properties
19 | properties:
20 | print.key: true
21 | key.separator: ","
22 |
--------------------------------------------------------------------------------
/tests/data/test-producer-settings-without-properties-config.yaml:
--------------------------------------------------------------------------------
1 | version: 1
2 | enable:
3 | history: true
4 | save_on_exit: true
5 | auto_complete: true
6 | auto_suggest: true
7 | inline_help: true
8 | fuzzy_search: true
9 | cluster: local
10 | clusters:
11 | local:
12 | bootstrap_servers: localhost:9092
13 | zookeeper_connect: localhost:2181
14 | schema_registry_url: http://localhost:8081
15 | ksql_server_url: http://localhost:8088
16 | command_prefix: ''
17 | producer_settings:
18 | config: producer.properties
19 |
--------------------------------------------------------------------------------
/tests/test_bindings.py:
--------------------------------------------------------------------------------
1 | import mock
2 |
3 | from tests.context import kafkashell
4 | from tests.utilities import setup_config_path_for_test
5 |
6 |
7 | @mock.patch("prompt_toolkit.application.current.get_app")
8 | @mock.patch('kafkashell.config.get_user_config_path')
9 | @mock.patch("kafkashell.settings.Settings.set_enable_help")
10 | @mock.patch("kafkashell.settings.Settings.set_enable_fuzzy_search")
11 | @mock.patch("kafkashell.settings.Settings.set_next_cluster")
12 | def test_toolbar(mock_set_next_cluster, mock_set_enable_fuzzy_search, mock_set_enable_help, mock_config_path,
13 | mock_get_app):
14 | mock_config_path.return_value = setup_config_path_for_test()
15 | bindings = kafkashell.bindings.get_bindings(kafkashell.settings.Settings())
16 |
17 | assert bindings.bindings is not None
18 | for binding in bindings.bindings:
19 | binding.handler({})
20 | key = binding.keys[0]
21 |
22 | if key == "f2":
23 | mock_set_next_cluster.assert_called_once()
24 |
25 | elif key == "f3":
26 | mock_set_enable_fuzzy_search.assert_called_once_with(False)
27 |
28 | elif key == "f9":
29 | mock_set_enable_help.assert_called_once_with(False)
30 |
31 | elif key == "f10":
32 | mock_get_app.assert_called_once()
33 | mock_get_app.return_value.exit.assert_called_once_with(exception=EOFError)
34 |
--------------------------------------------------------------------------------
/tests/test_cli.py:
--------------------------------------------------------------------------------
1 | # import pexpect
2 | #
3 | #
4 | # def test_run_cli():
5 | # child = pexpect.spawn('python ./kafkashell/main.py', [], 5)
6 | # child.expect("> ")
7 | # child.sendline('exit')
8 |
--------------------------------------------------------------------------------
/tests/test_completer.py:
--------------------------------------------------------------------------------
1 | from __future__ import unicode_literals
2 |
3 | import mock
4 | import pytest
5 | from prompt_toolkit.document import Document
6 |
7 | from tests.context import kafkashell
8 | from tests.test_completer_data import command_test_data, option_test_data, option_value_test_data
9 | from tests.utilities import setup_settings_for_test, setup_settings_with_real_completer_for_test
10 |
11 | fuzzy_test_data = [
12 | ("kf", ["kafka"], ["kafka"], True),
13 | ("kaf", ["kafka"], ["kafka"], True),
14 | ("kf", ["kafka"], [], False),
15 | ("kaf", ["kafka"], ["kafka"], False),
16 | ]
17 |
18 |
19 | def get_completions(completer, command):
20 | position = len(command)
21 | result = list(completer.get_completions(Document(text=command, cursor_position=position), None))
22 | return result
23 |
24 |
25 | def verify_completions(completions, expected):
26 | actual = []
27 | for completion in completions:
28 | actual.append(completion.text)
29 | print("[\"" + "\",\"".join(actual) + "\"]")
30 | actual.sort()
31 | expected.sort()
32 | assert actual == expected
33 |
34 |
35 | @mock.patch("kafkashell.settings.Settings")
36 | @pytest.mark.parametrize("test_input,expected", command_test_data)
37 | def test_get_completions_for_commands(mock_settings, test_input, expected):
38 | settings = setup_settings_with_real_completer_for_test(mock_settings)
39 | completer = kafkashell.completer.KafkaCompleter(settings)
40 | completions = get_completions(completer, test_input)
41 | verify_completions(completions, expected)
42 |
43 |
44 | @mock.patch("kafkashell.settings.Settings")
45 | @pytest.mark.parametrize("test_input,expected", option_test_data)
46 | def test_get_completions_for_options(mock_settings, test_input, expected):
47 | settings = setup_settings_with_real_completer_for_test(mock_settings)
48 | completer = kafkashell.completer.KafkaCompleter(settings)
49 | completions = get_completions(completer, test_input)
50 | verify_completions(completions, expected)
51 |
52 |
53 | @mock.patch("kafkashell.settings.Settings")
54 | @pytest.mark.parametrize("test_input,expected", option_value_test_data)
55 | def test_get_completions_for_option_values(mock_settings, test_input, expected):
56 | settings = setup_settings_with_real_completer_for_test(mock_settings)
57 | completer = kafkashell.completer.KafkaCompleter(settings)
58 | completions = get_completions(completer, test_input)
59 | verify_completions(completions, expected)
60 |
61 |
62 | @mock.patch("kafkashell.settings.Settings")
63 | def test_get_command_descriptions(mock_settings):
64 | settings = setup_settings_for_test(mock_settings)
65 |
66 | completer = kafkashell.completer.KafkaCompleter(settings)
67 |
68 | assert completer.get_command_descriptions() == {
69 | "clear": "Clear the screen",
70 | "kafka-topics": "Manage topics within a cluster."
71 | }
72 |
73 |
74 | @mock.patch("kafkashell.settings.Settings")
75 | def test_get_option_descriptions(mock_settings):
76 | settings = setup_settings_for_test(mock_settings)
77 |
78 | completer = kafkashell.completer.KafkaCompleter(settings)
79 |
80 | assert completer.get_option_descriptions("kafka-topics") == {
81 | "--alter": "Alter the number of partitions, replica assignment, and/or configuration for a topic.",
82 | "--config": "A topic configuration override for the topic being created or altered."
83 | }
84 |
85 |
86 | @mock.patch("kafkashell.settings.Settings")
87 | @pytest.mark.parametrize("wbc,commands,expected,enabled", fuzzy_test_data)
88 | def test_fuzzy_enabled(mock_settings, wbc, commands, expected, enabled):
89 | settings = setup_settings_for_test(mock_settings)
90 | settings.enable_fuzzy_search = enabled
91 |
92 | completer = kafkashell.completer.KafkaCompleter(settings)
93 |
94 | assert list(completer.fuzzy(wbc, commands)) == expected
95 |
96 |
97 | @mock.patch("kafkashell.settings.Settings")
98 | @pytest.mark.parametrize("test_input,expected", [({}, True), (None, False), ("", True)])
99 | def test_has_option_value_completer(mock_settings, test_input, expected):
100 | settings = setup_settings_for_test(mock_settings)
101 | completer = kafkashell.completer.KafkaCompleter(settings)
102 | assert completer.has_option_value_completer(test_input) == expected
103 |
104 |
105 | @mock.patch("kafkashell.settings.Settings")
106 | @pytest.mark.parametrize("test_input,expected",
107 | [("kafka-topics ", True), ("kafka-topics --list", True), ("kafka-topics", False), ("", False)]
108 | )
109 | def test_is_not_command(mock_settings, test_input, expected):
110 | settings = setup_settings_for_test(mock_settings)
111 | completer = kafkashell.completer.KafkaCompleter(settings)
112 | assert completer.is_not_command(test_input) == expected
113 |
--------------------------------------------------------------------------------
/tests/test_completer_data.py:
--------------------------------------------------------------------------------
1 | from __future__ import unicode_literals
2 |
3 | command_test_data = [
4 | (
5 | "",
6 | ["version", "cluster-select", "cluster-describe", "exit", "clear", "kafka-acls", "kafka-avro-console-consumer",
7 | "kafka-avro-console-producer", "kafka-replica-verification", "kafka-preferred-replica-election",
8 | "kafka-broker-api-versions", "kafka-configs", "kafka-console-consumer", "kafka-console-producer", "kafka-reassign-partitions",
9 | "kafka-consumer-groups", "kafka-delete-records", "kafka-dump-log", "kafka-log-dirs", "kafka-topics",
10 | "kafka-verifiable-consumer", "kafka-verifiable-producer", "ksql", "zookeeper-shell"]
11 | ),
12 | (
13 | "kafka",
14 | ["kafka-acls", "kafka-avro-console-consumer", "kafka-avro-console-producer", "kafka-broker-api-versions",
15 | "kafka-configs", "kafka-console-consumer", "kafka-console-producer", "kafka-consumer-groups", "kafka-reassign-partitions",
16 | "kafka-delete-records", "kafka-dump-log", "kafka-log-dirs", "kafka-topics", "kafka-verifiable-consumer",
17 | "kafka-verifiable-producer", "kafka-replica-verification", "kafka-preferred-replica-election"]
18 | ),
19 | (
20 | "k",
21 | ["ksql", "kafka-acls", "kafka-avro-console-consumer", "kafka-avro-console-producer",
22 | "kafka-broker-api-versions", "kafka-replica-verification", "kafka-preferred-replica-election",
23 | "kafka-configs", "kafka-console-consumer", "kafka-console-producer", "kafka-consumer-groups", "kafka-reassign-partitions",
24 | "kafka-delete-records", "kafka-dump-log", "kafka-log-dirs", "kafka-topics", "kafka-verifiable-consumer",
25 | "kafka-verifiable-producer", "zookeeper-shell"]
26 | ),
27 | (
28 | "ksq",
29 | ["ksql"]
30 | ),
31 | (
32 | "zookeeper",
33 | ["zookeeper-shell"]
34 | ),
35 | (
36 | "kafka-topics",
37 | ["kafka-topics"]
38 | ),
39 | (
40 | "cluster-",
41 | ["cluster-select", "cluster-describe"]
42 | ),
43 | (
44 | "this-command-does-not-exist",
45 | []
46 | )
47 | ]
48 |
49 | option_test_data = [
50 | (
51 | "kafka-topics ",
52 | ["--alter", "--config", "--create", "--delete", "--delete-config", "--describe", "--disable-rack-aware",
53 | "--exclude-internal", "--force", "--help", "--if-exists", "--if-not-exists", "--list", "--partitions",
54 | "--replica-assignment", "--replication-factor", "--topic", "--topics-with-overrides",
55 | "--unavailable-partitions", "--under-replicated-partitions", "--zookeeper"]
56 | ),
57 | (
58 | "kafka-configs ",
59 | ["--add-config", "--alter", "--bootstrap-server", "--command-config", "--delete-config", "--describe",
60 | "--entity-default", "--entity-name", "--entity-type", "--force", "--help", "--zookeeper"]
61 | ),
62 | (
63 | "kafka-console-consumer ",
64 | ["--bootstrap-server", "--consumer-property", "--consumer.config", "--enable-systest-events", "--formatter",
65 | "--from-beginning", "--group", "--isolation-level", "--key-deserializer", "--max-messages", "--offset",
66 | "--partition", "--property", "--skip-message-on-error", "--timeout-ms", "--topic", "--value-deserializer",
67 | "--whitelist"]
68 | ),
69 | (
70 | "kafka-console-consumer --group test ",
71 | ["--bootstrap-server", "--consumer-property", "--consumer.config", "--enable-systest-events", "--formatter",
72 | "--from-beginning", "--isolation-level", "--key-deserializer", "--max-messages", "--offset",
73 | "--partition", "--property", "--skip-message-on-error", "--timeout-ms", "--topic", "--value-deserializer",
74 | "--whitelist"]
75 | ),
76 | (
77 | "kafka-console-consumer --group test --consumer-property print.key=true ",
78 | ["--bootstrap-server", "--consumer-property", "--consumer.config", "--enable-systest-events", "--formatter",
79 | "--from-beginning", "--isolation-level", "--key-deserializer", "--max-messages", "--offset",
80 | "--partition", "--property", "--skip-message-on-error", "--timeout-ms", "--topic", "--value-deserializer",
81 | "--whitelist"]
82 | ),
83 | (
84 | "kafka-console-consumer --group test --consumer-property print.key=true --for",
85 | ["--formatter"]
86 | ),
87 | (
88 | "ksql ",
89 | ["--", "--config-file", "--help", "--output", "--query-row-limit", "--query-timeout"]
90 | ),
91 | (
92 | "zookeeper-shell ",
93 | []
94 | ),
95 | (
96 | "cluster-select l",
97 | ["local"]
98 | ),
99 | (
100 | "cluster-describe ",
101 | ["local"]
102 | )
103 | ]
104 |
105 | option_value_test_data = [
106 | (
107 | "kafka-configs --add-config ",
108 | ["SCRAM-SHA-256", "SCRAM-SHA-512", "advertised.listeners", "background.threads", "cleanup.policy",
109 | "compression.type", "consumer_byte_rate", "delete.retention.ms", "file.delete.delay.ms", "flush.messages",
110 | "flush.ms", "follower.replication.throttled.rate", "follower.replication.throttled.replicas",
111 | "index.interval.bytes", "leader.replication.throttled.rate", "leader.replication.throttled.replicas",
112 | "listener.security.protocol.map", "listeners", "log.cleaner.backoff.ms", "log.cleaner.dedupe.buffer.size",
113 | "log.cleaner.delete.retention.ms", "log.cleaner.io.buffer.load.factor", "log.cleaner.io.buffer.size",
114 | "log.cleaner.io.max.bytes.per.second", "log.cleaner.min.cleanable.ratio", "log.cleaner.min.compaction.lag.ms",
115 | "log.cleaner.threads", "log.cleanup.policy", "log.flush.interval.messages", "log.flush.interval.ms",
116 | "log.index.interval.bytes", "log.index.size.max.bytes", "log.message.downconversion.enable",
117 | "log.message.timestamp.difference.max.ms", "log.message.timestamp.type", "log.preallocate",
118 | "log.retention.bytes", "log.retention.ms", "log.roll.jitter.ms", "log.roll.ms", "log.segment.bytes",
119 | "log.segment.delete.delay.ms", "max.connections.per.ip", "max.connections.per.ip.overrides",
120 | "max.message.bytes", "message.downconversion.enable", "message.format.version", "message.max.bytes",
121 | "message.timestamp.difference.max.ms", "message.timestamp.type", "metric.reporters",
122 | "min.cleanable.dirty.ratio", "min.compaction.lag.ms", "min.insync.replicas", "num.io.threads",
123 | "num.network.threads", "num.recovery.threads.per.data.dir", "num.replica.fetchers", "preallocate",
124 | "principal.builder.class", "producer_byte_rate", "request_percentage", "retention.bytes", "retention.ms",
125 | "sasl.enabled.mechanisms", "sasl.jaas.config", "sasl.kerberos.kinit.cmd",
126 | "sasl.kerberos.min.time.before.relogin", "sasl.kerberos.principal.to.local.rules",
127 | "sasl.kerberos.service.name", "sasl.kerberos.ticket.renew.jitter", "sasl.kerberos.ticket.renew.window.factor",
128 | "sasl.login.refresh.buffer.seconds", "sasl.login.refresh.min.period.seconds",
129 | "sasl.login.refresh.window.factor", "sasl.login.refresh.window.jitter", "sasl.mechanism.inter.broker.protocol",
130 | "segment.bytes", "segment.index.bytes", "segment.jitter.ms", "segment.ms", "ssl.cipher.suites",
131 | "ssl.client.auth", "ssl.enabled.protocols", "ssl.endpoint.identification.algorithm", "ssl.key.password",
132 | "ssl.keymanager.algorithm", "ssl.keystore.location", "ssl.keystore.password", "ssl.keystore.type",
133 | "ssl.protocol", "ssl.provider", "ssl.secure.random.implementation", "ssl.trustmanager.algorithm",
134 | "ssl.truststore.location", "ssl.truststore.password", "ssl.truststore.type", "unclean.leader.election.enable"]
135 | ),
136 | (
137 | "kafka-configs --entity-type ",
138 | ["broker", "client", "topic", "user"]
139 | ),
140 | (
141 | "kafka-configs --entity-type broker --add-config ",
142 | ["advertised.listeners", "background.threads", "compression.type", "follower.replication.throttled.rate",
143 | "leader.replication.throttled.rate", "listener.security.protocol.map", "listeners", "log.cleaner.backoff.ms",
144 | "log.cleaner.dedupe.buffer.size", "log.cleaner.delete.retention.ms", "log.cleaner.io.buffer.load.factor",
145 | "log.cleaner.io.buffer.size", "log.cleaner.io.max.bytes.per.second", "log.cleaner.min.cleanable.ratio",
146 | "log.cleaner.min.compaction.lag.ms", "log.cleaner.threads", "log.cleanup.policy",
147 | "log.flush.interval.messages", "log.flush.interval.ms", "log.index.interval.bytes", "log.index.size.max.bytes",
148 | "log.message.downconversion.enable", "log.message.timestamp.difference.max.ms", "log.message.timestamp.type",
149 | "log.preallocate", "log.retention.bytes", "log.retention.ms", "log.roll.jitter.ms", "log.roll.ms",
150 | "log.segment.bytes", "log.segment.delete.delay.ms", "max.connections.per.ip",
151 | "max.connections.per.ip.overrides", "message.max.bytes", "metric.reporters", "min.insync.replicas",
152 | "num.io.threads", "num.network.threads", "num.recovery.threads.per.data.dir", "num.replica.fetchers",
153 | "principal.builder.class", "sasl.enabled.mechanisms", "sasl.jaas.config", "sasl.kerberos.kinit.cmd",
154 | "sasl.kerberos.min.time.before.relogin", "sasl.kerberos.principal.to.local.rules",
155 | "sasl.kerberos.service.name", "sasl.kerberos.ticket.renew.jitter", "sasl.kerberos.ticket.renew.window.factor",
156 | "sasl.login.refresh.buffer.seconds", "sasl.login.refresh.min.period.seconds",
157 | "sasl.login.refresh.window.factor", "sasl.login.refresh.window.jitter", "sasl.mechanism.inter.broker.protocol",
158 | "ssl.cipher.suites", "ssl.client.auth", "ssl.enabled.protocols", "ssl.endpoint.identification.algorithm",
159 | "ssl.key.password", "ssl.keymanager.algorithm", "ssl.keystore.location", "ssl.keystore.password",
160 | "ssl.keystore.type", "ssl.protocol", "ssl.provider", "ssl.secure.random.implementation",
161 | "ssl.trustmanager.algorithm", "ssl.truststore.location", "ssl.truststore.password", "ssl.truststore.type",
162 | "unclean.leader.election.enable"]
163 | ),
164 | (
165 | "kafka-configs --entity-type broker --delete-config ",
166 | ["advertised.listeners", "background.threads", "compression.type", "follower.replication.throttled.rate",
167 | "leader.replication.throttled.rate", "listener.security.protocol.map", "listeners", "log.cleaner.backoff.ms",
168 | "log.cleaner.dedupe.buffer.size", "log.cleaner.delete.retention.ms", "log.cleaner.io.buffer.load.factor",
169 | "log.cleaner.io.buffer.size", "log.cleaner.io.max.bytes.per.second", "log.cleaner.min.cleanable.ratio",
170 | "log.cleaner.min.compaction.lag.ms", "log.cleaner.threads", "log.cleanup.policy",
171 | "log.flush.interval.messages", "log.flush.interval.ms", "log.index.interval.bytes", "log.index.size.max.bytes",
172 | "log.message.downconversion.enable", "log.message.timestamp.difference.max.ms", "log.message.timestamp.type",
173 | "log.preallocate", "log.retention.bytes", "log.retention.ms", "log.roll.jitter.ms", "log.roll.ms",
174 | "log.segment.bytes", "log.segment.delete.delay.ms", "max.connections.per.ip",
175 | "max.connections.per.ip.overrides", "message.max.bytes", "metric.reporters", "min.insync.replicas",
176 | "num.io.threads", "num.network.threads", "num.recovery.threads.per.data.dir", "num.replica.fetchers",
177 | "principal.builder.class", "sasl.enabled.mechanisms", "sasl.jaas.config", "sasl.kerberos.kinit.cmd",
178 | "sasl.kerberos.min.time.before.relogin", "sasl.kerberos.principal.to.local.rules",
179 | "sasl.kerberos.service.name", "sasl.kerberos.ticket.renew.jitter", "sasl.kerberos.ticket.renew.window.factor",
180 | "sasl.login.refresh.buffer.seconds", "sasl.login.refresh.min.period.seconds",
181 | "sasl.login.refresh.window.factor", "sasl.login.refresh.window.jitter", "sasl.mechanism.inter.broker.protocol",
182 | "ssl.cipher.suites", "ssl.client.auth", "ssl.enabled.protocols", "ssl.endpoint.identification.algorithm",
183 | "ssl.key.password", "ssl.keymanager.algorithm", "ssl.keystore.location", "ssl.keystore.password",
184 | "ssl.keystore.type", "ssl.protocol", "ssl.provider", "ssl.secure.random.implementation",
185 | "ssl.trustmanager.algorithm", "ssl.truststore.location", "ssl.truststore.password", "ssl.truststore.type",
186 | "unclean.leader.election.enable"]
187 | ),
188 | (
189 | "kafka-configs --entity-type topic --add-config ",
190 | ["cleanup.policy", "compression.type", "delete.retention.ms", "file.delete.delay.ms", "flush.messages",
191 | "flush.ms", "follower.replication.throttled.replicas", "index.interval.bytes",
192 | "leader.replication.throttled.replicas", "max.message.bytes", "message.downconversion.enable",
193 | "message.format.version", "message.timestamp.difference.max.ms", "message.timestamp.type",
194 | "min.cleanable.dirty.ratio", "min.compaction.lag.ms", "min.insync.replicas", "preallocate", "retention.bytes",
195 | "retention.ms", "segment.bytes", "segment.index.bytes", "segment.jitter.ms", "segment.ms",
196 | "unclean.leader.election.enable"]
197 | ),
198 | (
199 | "kafka-configs --entity-type user --add-config ",
200 | ["SCRAM-SHA-256", "SCRAM-SHA-512", "consumer_byte_rate", "producer_byte_rate", "request_percentage"]
201 | ),
202 | (
203 | "kafka-configs --entity-type client --add-config ",
204 | ["consumer_byte_rate", "producer_byte_rate", "request_percentage"]
205 | ),
206 | (
207 | "kafka-configs --entity-type client --delete-config ",
208 | ["consumer_byte_rate", "producer_byte_rate", "request_percentage"]
209 | ),
210 | (
211 | "kafka-topics --config ",
212 | ["cleanup.policy", "compression.type", "delete.retention.ms", "file.delete.delay.ms", "flush.messages",
213 | "flush.ms", "follower.replication.throttled.replicas", "index.interval.bytes",
214 | "leader.replication.throttled.replicas", "max.message.bytes", "message.downconversion.enable",
215 | "message.format.version", "message.timestamp.difference.max.ms", "message.timestamp.type",
216 | "min.cleanable.dirty.ratio", "min.compaction.lag.ms", "min.insync.replicas", "preallocate", "retention.bytes",
217 | "retention.ms", "segment.bytes", "segment.index.bytes", "segment.jitter.ms", "segment.ms",
218 | "unclean.leader.election.enable"]
219 | ),
220 | (
221 | "kafka-topics --delete-config ",
222 | ["cleanup.policy", "compression.type", "delete.retention.ms", "file.delete.delay.ms", "flush.messages",
223 | "flush.ms", "follower.replication.throttled.replicas", "index.interval.bytes",
224 | "leader.replication.throttled.replicas", "max.message.bytes", "message.downconversion.enable",
225 | "message.format.version", "message.timestamp.difference.max.ms", "message.timestamp.type",
226 | "min.cleanable.dirty.ratio", "min.compaction.lag.ms", "min.insync.replicas", "preallocate", "retention.bytes",
227 | "retention.ms", "segment.bytes", "segment.index.bytes", "segment.jitter.ms", "segment.ms",
228 | "unclean.leader.election.enable"]
229 | ),
230 | (
231 | "kafka-configs --add-config cleanup.polic",
232 | ["cleanup.policy", "log.cleanup.policy"]
233 | ),
234 | (
235 | "kafka-configs --add-config cleanup.policy=",
236 | ["compact", "delete"]
237 | ),
238 | (
239 | "kafka-configs --add-config log.cleanup.policy=",
240 | ["compact", "delete"]
241 | ),
242 | (
243 | "kafka-configs --add-config log.cleanup.policy=comp",
244 | ["compact"]
245 | ),
246 | (
247 | "kafka-configs --add-config ssl.protocol=",
248 | []
249 | ),
250 | (
251 | "kafka-configs --add-config log.message.timestamp.type=",
252 | ["CreateTime", "LogAppendTime"]
253 | ),
254 | (
255 | "kafka-configs --add-config log.message.timestamp.type=Create",
256 | ["CreateTime"]
257 | ),
258 | (
259 | "kafka-configs --add-config log.message.timestamp.type=asdf",
260 | []
261 | ),
262 | (
263 | "kafka-configs --add-config compression.type=",
264 | ["gzip", "lz4", "none", "snappy", "zstd"]
265 | ),
266 | (
267 | "kafka-configs --delete-config compression.type=",
268 | ["gzip", "lz4", "none", "snappy", "zstd"]
269 | ),
270 | (
271 | "kafka-configs --add-config compression.type=z",
272 | ["zstd", "gzip", "lz4"]
273 | ),
274 | (
275 | "kafka-topics --config compression.type=",
276 | ["gzip", "lz4", "none", "snappy", "zstd"]
277 | ),
278 | (
279 | "kafka-topics --config message.timestamp.type=",
280 | ["CreateTime", "LogAppendTime"]
281 | ),
282 | (
283 | "kafka-topics --delete-config message.timestamp.type=",
284 | ["CreateTime", "LogAppendTime"]
285 | ),
286 | (
287 | "ksql --output ",
288 | ["JSON", "TABULAR"]
289 | ),
290 | (
291 | "ksql --output JS",
292 | ["JSON"]
293 | ),
294 | (
295 | "kafka-console-producer --request-required-acks ",
296 | ["-1", "0", "1", "all"]
297 | ),
298 | (
299 | "kafka-avro-console-producer --request-required-acks ",
300 | ["-1", "0", "1", "all"]
301 | ),
302 | (
303 | "kafka-verifiable-producer --acks ",
304 | ["-1", "0", "1", "all"]
305 | ),
306 | (
307 | "kafka-verifiable-consumer --reset-policy ",
308 | ["earliest", "latest", "none"]
309 | ),
310 | (
311 | "kafka-acls --resource-pattern-type ",
312 | ["ANY", "LITERAL", "MATCH", "PREFIXED"]
313 | ),
314 | (
315 | "kafka-console-producer --compression-codec ",
316 | ["gzip", "lz4", "none", "snappy", "zstd"]
317 | ),
318 | (
319 | "kafka-configs --add-config unclean.leader.election.enable=",
320 | ["true", "false"]
321 | ),
322 | (
323 | "kafka-configs --delete-config unclean.leader.election.enable=fal",
324 | ["false"]
325 | ),
326 | (
327 | "kafka-topics --config unclean.leader.election.enable=",
328 | ["true", "false"]
329 | ),
330 | (
331 | "kafka-topics --delete-config unclean.leader.election.enable=",
332 | ["true", "false"]
333 | ),
334 | (
335 | "kafka-configs --add-config log.preallocate=",
336 | ["true", "false"]
337 | ),
338 | (
339 | "kafka-configs --delete-config log.preallocate=tr",
340 | ["true"]
341 | ),
342 | (
343 | "kafka-topics --config preallocate=",
344 | ["true", "false"]
345 | ),
346 | (
347 | "kafka-topics --delete-config preallocate=",
348 | ["true", "false"]
349 | ),
350 | (
351 | "kafka-configs --add-config log.message.downconversion.enable=",
352 | ["true", "false"]
353 | ),
354 | (
355 | "kafka-topics --config message.downconversion.enable=",
356 | ["true", "false"]
357 | )
358 | ]
359 |
--------------------------------------------------------------------------------
/tests/test_config.py:
--------------------------------------------------------------------------------
1 | import os
2 | import sys
3 |
4 | import mock
5 | import oyaml as yaml
6 | import pytest
7 |
8 | from tests.context import kafkashell
9 |
10 | open_patch_name = "__builtin__.open" if sys.version_info[0] < 3 else "builtins.open"
11 | print_patch_name = "__builtin__.print" if sys.version_info[0] < 3 else "builtins.print"
12 |
13 | test_config_data = [
14 | ("{}", {}),
15 | ("{\"test\": \"testing\"}", {"test": "testing"})
16 | ]
17 |
18 | test_completer_data = [
19 | ("completer", "{}", {}),
20 | ("kafka-configs", "{\"test\": \"testing\"}", {"test": "testing"})
21 | ]
22 |
23 |
24 | @mock.patch(open_patch_name, create=True)
25 | @mock.patch("os.makedirs")
26 | @mock.patch("os.path.exists")
27 | @mock.patch("os.path.isfile")
28 | @mock.patch("os.path.expanduser")
29 | def test_init_config_file_exists(mock_expanduser, mock_isfile, mock_exists, mock_makedirs, mock_open):
30 | mock_config_file = "/tmp/.kafka-shell/config.yaml"
31 | mock_dir = "/tmp/.kafka-shell"
32 | mock_expanduser.side_effect = [mock_dir, mock_config_file]
33 | mock_isfile.return_value = True
34 | mock_exists.return_value = True
35 |
36 | kafkashell.config.init_config()
37 |
38 | mock_exists.assert_called_once_with(mock_dir)
39 | mock_makedirs.assert_not_called()
40 | mock_isfile.assert_called_once_with(mock_config_file)
41 | mock_open.assert_not_called()
42 |
43 |
44 | @mock.patch("os.makedirs")
45 | @mock.patch("os.path.exists")
46 | @mock.patch("os.path.isfile")
47 | @mock.patch("os.path.expanduser")
48 | @mock.patch("oyaml.dump")
49 | def test_init_config_dir_exists_but_file_does_not_exist(mock_dump, mock_expanduser, mock_isfile, mock_exists,
50 | mock_makedirs):
51 | with mock.patch(open_patch_name, mock.mock_open(read_data="{}")) as mock_open:
52 | mock_dir = "/tmp/.kafka-shell"
53 | mock_config_file = "/tmp/.kafka-shell/config.yaml"
54 | mock_expanduser.side_effect = [mock_dir, mock_config_file]
55 | mock_isfile.return_value = False
56 | mock_exists.return_value = True
57 | data_dir = os.path.dirname(os.path.realpath(__file__))
58 | expected_path = os.path.realpath(os.path.join(data_dir, "../kafkashell/data/shell-config.yaml"))
59 |
60 | kafkashell.config.init_config()
61 |
62 | mock_exists.assert_called_once_with(mock_dir)
63 | mock_makedirs.assert_not_called()
64 | mock_isfile.assert_called_once_with(mock_config_file)
65 | mock_open.assert_any_call(mock_config_file, "w")
66 | mock_open.assert_any_call(expected_path)
67 | mock_dump.assert_called_once_with({}, mock_open.return_value, default_flow_style=False, sort_keys=False)
68 |
69 |
70 | @mock.patch("os.makedirs")
71 | @mock.patch("os.path.exists")
72 | @mock.patch("os.path.isfile")
73 | @mock.patch("os.path.expanduser")
74 | @mock.patch("oyaml.dump")
75 | def test_init_config_dir_and_file_does_not_exist(mock_dump, mock_expanduser, mock_isfile, mock_exists,
76 | mock_makedirs):
77 | with mock.patch(open_patch_name, mock.mock_open(read_data="{}")) as mock_open:
78 | mock_dir = "/tmp/.kafka-shell"
79 | mock_config_file = "/tmp/.kafka-shell/config.yaml"
80 | mock_expanduser.side_effect = [mock_dir, mock_config_file]
81 | mock_isfile.return_value = False
82 | mock_exists.return_value = False
83 | data_dir = os.path.dirname(os.path.realpath(__file__))
84 | expected_path = os.path.realpath(os.path.join(data_dir, "../kafkashell/data/shell-config.yaml"))
85 |
86 | kafkashell.config.init_config()
87 |
88 | mock_exists.assert_called_once_with(mock_dir)
89 | mock_makedirs.assert_called_once_with(mock_dir)
90 | mock_isfile.assert_called_once_with(mock_config_file)
91 | mock_open.assert_any_call(mock_config_file, "w")
92 | mock_open.assert_any_call(expected_path)
93 | mock_dump.assert_called_once_with({}, mock_open.return_value, default_flow_style=False, sort_keys=False)
94 |
95 |
96 | @mock.patch(open_patch_name, create=True)
97 | @mock.patch("os.path.isfile")
98 | @mock.patch("os.path.expanduser")
99 | def test_init_history_file_exits(mock_expanduser, mock_isfile, mock_open):
100 | mock_history_file = "/tmp/.kafka-shell-history"
101 | mock_expanduser.return_value = mock_history_file
102 | mock_isfile.return_value = True
103 |
104 | kafkashell.config.init_history()
105 |
106 | mock_isfile.assert_called_once_with(mock_history_file)
107 | assert not mock_open.called
108 |
109 |
110 | @mock.patch("os.path.isfile")
111 | @mock.patch("os.path.expanduser")
112 | def test_init_history_file_does_not_exist(mock_expanduser, mock_isfile):
113 | with mock.patch(open_patch_name, mock.mock_open(read_data="{}")) as mock_open:
114 | mock_history_file = "/tmp/.kafka-shell-history"
115 | mock_expanduser.return_value = mock_history_file
116 | mock_isfile.return_value = False
117 |
118 | kafkashell.config.init_history()
119 |
120 | mock_isfile.assert_called_once_with(mock_history_file)
121 | mock_open.assert_any_call(mock_history_file, "a")
122 |
123 |
124 | @mock.patch("os.path.expanduser")
125 | @pytest.mark.parametrize("test_input,expected", test_config_data)
126 | def test_get_config(mock_expanduser, test_input, expected):
127 | with mock.patch(open_patch_name, mock.mock_open(read_data=test_input)) as mock_open:
128 | mock_config_file = "/tmp/.kafka-shell"
129 | mock_expanduser.return_value = mock_config_file
130 |
131 | config = kafkashell.config.get_config()
132 |
133 | assert config == expected
134 | mock_open.assert_called_once_with(mock_config_file)
135 |
136 |
137 | @mock.patch(print_patch_name)
138 | @mock.patch("sys.exit")
139 | @pytest.mark.parametrize("test_input,expected", test_config_data)
140 | def test_validate_config_valid(mock_exit, mock_print, test_input, expected):
141 | with open("tests/data/test-config.yaml") as f:
142 | config = yaml.safe_load(f)
143 | returned_config = kafkashell.config.validate_config(config)
144 |
145 | assert not mock_print.called
146 | assert not mock_exit.called
147 | assert config == returned_config
148 |
149 |
150 | @mock.patch(print_patch_name)
151 | @mock.patch("sys.exit")
152 | @pytest.mark.parametrize("test_input,expected", test_config_data)
153 | def test_validate_config_is_valid_with_environment_variables(mock_exit, mock_print, test_input, expected):
154 | with open("tests/data/test-environment-variables-config.yaml") as f:
155 | config = yaml.safe_load(f)
156 | returned_config = kafkashell.config.validate_config(config)
157 |
158 | assert not mock_print.called
159 | assert not mock_exit.called
160 | assert config == returned_config
161 |
162 |
163 | @mock.patch(print_patch_name)
164 | @mock.patch("sys.exit")
165 | @pytest.mark.parametrize("test_input,expected", test_config_data)
166 | def test_validate_config_invalid(mock_exit, mock_print, test_input, expected):
167 | config = {}
168 | kafkashell.config.validate_config(config)
169 |
170 | error_type = "root"
171 | error_message = "'version' is a required property"
172 | compatible_error_message = "u{0}".format(error_message) if sys.version_info[0] < 3 else error_message
173 | final_error_message = "Invalid user configuration ({0}): {1}".format(error_type, compatible_error_message)
174 |
175 | mock_print.assert_called_once_with(final_error_message)
176 | mock_exit.assert_called_once_with(1)
177 |
178 |
179 | @mock.patch("oyaml.dump")
180 | @mock.patch("os.path.expanduser")
181 | @pytest.mark.parametrize("test_input,expected", test_config_data)
182 | def test_save_config(mock_expanduser, mock_dump, test_input, expected):
183 | with mock.patch(open_patch_name, mock.mock_open(read_data=test_input)) as mock_open:
184 | mock_config_file = "/tmp/.kafka-shell"
185 | mock_expanduser.return_value = mock_config_file
186 |
187 | kafkashell.config.save_config(expected)
188 |
189 | mock_open.assert_called_once_with(mock_config_file, "w")
190 | mock_dump.assert_called_once_with(expected, mock_open.return_value, default_flow_style=False, sort_keys=False)
191 |
192 |
193 | @pytest.mark.parametrize("test_input,expected", test_config_data)
194 | def test_get_default_config(test_input, expected):
195 | with mock.patch(open_patch_name, mock.mock_open(read_data=test_input)) as mock_open:
196 | data_dir = os.path.dirname(os.path.realpath(__file__))
197 | expected_path = os.path.realpath(os.path.join(data_dir, "../kafkashell/data/shell-config.yaml"))
198 |
199 | assert kafkashell.config.get_default_config() == expected
200 | mock_open.assert_called_once_with(expected_path)
201 |
202 |
203 | @pytest.mark.parametrize("test_input,expected", test_config_data)
204 | def test_get_config_schema(test_input, expected):
205 | with mock.patch(open_patch_name, mock.mock_open(read_data=test_input)) as mock_open:
206 | data_dir = os.path.dirname(os.path.realpath(__file__))
207 | expected_path = os.path.realpath(os.path.join(data_dir, "../kafkashell/data/shell-config.schema"))
208 |
209 | assert kafkashell.config.get_config_schema() == expected
210 | mock_open.assert_called_once_with(expected_path)
211 |
212 |
213 | @pytest.mark.parametrize("name,test_input,expected", test_completer_data)
214 | def test_get_completer(name, test_input, expected):
215 | with mock.patch(open_patch_name, mock.mock_open(read_data=test_input)) as mock_open:
216 | data_dir = os.path.dirname(os.path.realpath(__file__))
217 | expected_name = name if name == "completer" or name is None else "completer-{0}".format(name)
218 | expected_path = os.path.realpath(os.path.join(data_dir, "../kafkashell/data/{0}.json".format(expected_name)))
219 |
220 | assert kafkashell.config.get_completer(name) == expected
221 | mock_open.assert_called_once_with(expected_path)
222 |
223 |
224 | def test_get_kafka_shell_dir():
225 | assert kafkashell.config.get_kafka_shell_dir() == os.path.expanduser("~/.kafka-shell")
226 |
227 |
228 | def test_get_user_config_path():
229 | assert kafkashell.config.get_user_config_path() == os.path.expanduser("~/.kafka-shell/config.yaml")
230 |
231 |
232 | def test_get_user_history_path():
233 | assert kafkashell.config.get_user_history_path() == os.path.expanduser("~/.kafka-shell/history")
234 |
--------------------------------------------------------------------------------
/tests/test_constants.py:
--------------------------------------------------------------------------------
1 | from .context import kafkashell
2 |
3 |
4 | def test_constants():
5 | assert kafkashell.constants.COMMAND_KAFKA_TOPICS == "kafka-topics"
6 | assert kafkashell.constants.COMMAND_KAFKA_CONFIGS == "kafka-configs"
7 | assert kafkashell.constants.COMMAND_KAFKA_CONSOLE_CONSUMER == "kafka-console-consumer"
8 | assert kafkashell.constants.COMMAND_KAFKA_CONSOLE_PRODUCER == "kafka-console-producer"
9 | assert kafkashell.constants.COMMAND_KAFKA_AVRO_CONSOLE_CONSUMER == "kafka-avro-console-consumer"
10 | assert kafkashell.constants.COMMAND_KAFKA_AVRO_CONSOLE_PRODUCER == "kafka-avro-console-producer"
11 | assert kafkashell.constants.COMMAND_KAFKA_VERIFIABLE_CONSUMER == "kafka-verifiable-consumer"
12 | assert kafkashell.constants.COMMAND_KAFKA_VERIFIABLE_PRODUCER == "kafka-verifiable-producer"
13 | assert kafkashell.constants.COMMAND_KAFKA_PREFERRED_REPLICA_ELECTION == "kafka-preferred-replica-election"
14 | assert kafkashell.constants.COMMAND_KAFKA_REPLICA_VERIFICATION == "kafka-replica-verification"
15 | assert kafkashell.constants.COMMAND_KAFKA_REASSIGN_PARTITIONS == "kafka-reassign-partitions"
16 | assert kafkashell.constants.COMMAND_KAFKA_BROKER_API_VERSIONS == "kafka-broker-api-versions"
17 | assert kafkashell.constants.COMMAND_KAFKA_DELETE_RECORDS == "kafka-delete-records"
18 | assert kafkashell.constants.COMMAND_KAFKA_LOG_DIRS == "kafka-log-dirs"
19 | assert kafkashell.constants.COMMAND_KAFKA_ACLS == "kafka-acls"
20 | assert kafkashell.constants.COMMAND_KSQL == "ksql"
21 | assert kafkashell.constants.COMMAND_ZOOKEEPER_SHELL == "zookeeper-shell"
22 | assert kafkashell.constants.FLAG_ZOOKEEPER == "--zookeeper"
23 | assert kafkashell.constants.FLAG_BOOTSTRAP_SERVER == "--bootstrap-server"
24 | assert kafkashell.constants.FLAG_BROKER_LIST == "--broker-list"
25 | assert kafkashell.constants.FLAG_SCHEMA_REGISTRY_URL == "--property schema.registry.url"
26 |
--------------------------------------------------------------------------------
/tests/test_helpers.py:
--------------------------------------------------------------------------------
1 | from __future__ import unicode_literals
2 |
3 | import sys
4 |
5 | import mock
6 | import pytest
7 |
8 | from .context import kafkashell
9 |
10 | patch_name = "__builtin__.print" if sys.version_info[0] < 3 else "builtins.print"
11 |
12 | exclude_options_test_data = [
13 | (["kafka-topics", "--property"], ["kafka-topics"]),
14 | (["kafka-topics", "--test"], ["kafka-topics", "--test"]),
15 | (["kafka-topics", "--config", "--property"], ["kafka-topics"]),
16 | (["kafka-topics", "--config", "--property", "--test", "--add-config"], ["kafka-topics", "--test"]),
17 | (["--property", "kafka-topics"], ["kafka-topics"]),
18 | ]
19 |
20 | print_cluster_config_test_data = [
21 | ({"test": "test"}, "test: test\n"),
22 | ({"a": "a"}, "a: a\n"),
23 | ]
24 |
25 |
26 | def test_options_that_can_be_duplicated():
27 | actual = kafkashell.helpers.options_that_can_be_duplicated
28 | expected = [
29 | "--add-config",
30 | "--config",
31 | "--consumer-property",
32 | "--delete-config",
33 | "--producer-property",
34 | "--property",
35 | "--principal"
36 | ]
37 | assert actual == expected
38 |
39 |
40 | @pytest.mark.parametrize("test_input,expected", exclude_options_test_data)
41 | def test_exclude_options_from_removal(test_input, expected):
42 | actual = kafkashell.helpers.exclude_options_from_removal(test_input)
43 | assert actual == expected
44 |
45 |
46 | @mock.patch(patch_name, create=True)
47 | @pytest.mark.parametrize("test_input,expected", print_cluster_config_test_data)
48 | def test_print_cluster_config(mock_print, test_input, expected):
49 | kafkashell.helpers.print_cluster_config(test_input)
50 | mock_print.assert_called_once_with(expected)
51 |
--------------------------------------------------------------------------------
/tests/test_schemas.py:
--------------------------------------------------------------------------------
1 | from __future__ import unicode_literals
2 |
3 | from tests.context import kafkashell
4 | from tests.utilities import validate_schema, get_test_config, validate_invalid_schema
5 |
6 |
7 | def test_completer_commands():
8 | json_value = kafkashell.config.get_completer()
9 | validate_schema(json_value, "completer", "Commands (completer.json)")
10 |
11 |
12 | def test_completer_kafka_configs():
13 | json_value = kafkashell.config.get_completer("kafka-configs")
14 | validate_schema(json_value, "completer-kafka-configs", "Kafka configs (completer-kafka-configs.json)")
15 |
16 |
17 | def test_completer_reset_policies():
18 | json_value = kafkashell.config.get_completer("reset-policies")
19 | validate_schema(json_value, "completer-reset-policies", "Reset Policies (completer-reset-policies.json)")
20 |
21 |
22 | def test_completer_resource_pattern_type():
23 | json_value = kafkashell.config.get_completer("resource-pattern-types")
24 | validate_schema(json_value, "completer-resource-pattern-types",
25 | "Resource Pattern Types (completer-resource-pattern-types.json)")
26 |
27 |
28 | def test_completer_acks():
29 | json_value = kafkashell.config.get_completer("acks")
30 | validate_schema(json_value, "completer-acks", "Acks (completer-acks.json)")
31 |
32 |
33 | def test_completer_compression_codecs():
34 | json_value = kafkashell.config.get_completer("compression-codecs")
35 | validate_schema(json_value, "completer-compression-codecs",
36 | "Compression Codecs (completer-compression-codecs.json)")
37 |
38 |
39 | def test_completer_entity_types():
40 | json_value = kafkashell.config.get_completer("entity-types")
41 | validate_schema(json_value, "completer-entity-types", "Entity Types (completer-entity-types.json)")
42 |
43 |
44 | def test_completer_ksql_output():
45 | json_value = kafkashell.config.get_completer("ksql-output")
46 | validate_schema(json_value, "completer-ksql-output", "KSQL Output (completer-ksql-output.json)")
47 |
48 |
49 | def test_completer_cleanup_policy():
50 | json_value = kafkashell.config.get_completer("cleanup-policy")
51 | validate_schema(json_value, "completer-cleanup-policy", "Cleanup Policy (completer-cleanup-policy.json)")
52 |
53 |
54 | def test_completer_booleans():
55 | json_value = kafkashell.config.get_completer("booleans")
56 | validate_schema(json_value, "completer-booleans", "Booleans (completer-booleans.json)")
57 |
58 |
59 | def test_completer_timestamp_types():
60 | json_value = kafkashell.config.get_completer("timestamp-types")
61 | validate_schema(json_value, "completer-timestamp-types", "Timestamp Types (completer-timestamp-types.json)")
62 |
63 |
64 | def test_default_config_schema():
65 | json_value = kafkashell.config.get_default_config()
66 | validate_schema(json_value, "shell-config", "Default user config")
67 |
68 |
69 | def test_environment_variable_config_schema():
70 | json_value = get_test_config("test-environment-variables")
71 | validate_schema(json_value, "shell-config", "Environment variable user config")
72 |
73 |
74 | def test_invalid_configs():
75 | json_value = get_test_config("test-invalid-ksql")
76 | message = validate_invalid_schema(json_value, "shell-config")
77 | assert message is not None
78 |
79 | json_value = get_test_config("test-invalid-schema-registry")
80 | message = validate_invalid_schema(json_value, "shell-config")
81 | assert message is not None
82 |
83 |
84 | def test_invalid_file_extension():
85 | json_value = get_test_config("test-invalid-file-extension")
86 | message = validate_invalid_schema(json_value, "shell-config")
87 | assert message is not None
88 |
--------------------------------------------------------------------------------
/tests/test_settings.py:
--------------------------------------------------------------------------------
1 | import mock
2 | import oyaml as yaml
3 |
4 | from .context import kafkashell
5 |
6 |
7 | @mock.patch("kafkashell.config.save_config")
8 | @mock.patch("kafkashell.config.get_completer")
9 | @mock.patch("kafkashell.config.get_config")
10 | @mock.patch("kafkashell.config.init_history")
11 | @mock.patch("kafkashell.config.init_config")
12 | def test_settings(mock_init_config, mock_init_history, mock_get_config, mock_get_completer, mock_save_config):
13 | with open("tests/data/test-config.yaml") as f:
14 | with open("tests/data/test-modified-config.yaml") as fm:
15 | config_json = yaml.safe_load(f)
16 | modified_json = yaml.safe_load(fm)
17 | mock_get_completer.return_value = {"commands": {}}
18 | mock_get_config.return_value = config_json
19 |
20 | # test init
21 | settings = kafkashell.settings.Settings()
22 |
23 | mock_init_config.assert_called_once()
24 | mock_init_history.assert_called_once()
25 | mock_get_config.assert_called_once()
26 | mock_get_completer.assert_called_once()
27 | assert settings.enable_help is True
28 | assert settings.enable_auto_complete is True
29 | assert settings.enable_auto_suggest is True
30 | assert settings.cluster == "local"
31 |
32 | # test set_enable_help
33 | settings.set_enable_help(False)
34 | assert settings.enable_help is False
35 |
36 | # test set_enable_fuzzy_search
37 | settings.set_enable_fuzzy_search(False)
38 | assert settings.enable_fuzzy_search is False
39 |
40 | # test set_next_cluster & get_cluster_details
41 | settings.set_next_cluster()
42 | assert settings.cluster == "test"
43 | assert settings.get_cluster_details() == config_json["clusters"]["test"]
44 |
45 | # test save_settings
46 | settings.save_settings()
47 | mock_save_config.assert_called_once_with(modified_json)
48 |
49 | # test save_settings when enableSaveOnExit is false
50 | mock_save_config.reset_mock()
51 | settings.enable_save_on_exit = False
52 | settings.save_settings()
53 | assert not mock_save_config.called
54 |
55 | # test things can change back
56 | settings.set_enable_help(True)
57 | assert settings.enable_help is True
58 |
59 | settings.set_enable_fuzzy_search(True)
60 | assert settings.enable_fuzzy_search is True
61 |
62 | settings.set_next_cluster()
63 | assert settings.cluster == "local"
64 | assert settings.get_cluster_details() == config_json["clusters"]["local"]
65 |
66 |
67 | @mock.patch("kafkashell.config.get_completer")
68 | @mock.patch("kafkashell.config.get_config")
69 | @mock.patch("kafkashell.config.init_history")
70 | @mock.patch("kafkashell.config.init_config")
71 | def test_settings_init_history_off(mock_init_config, mock_init_history, mock_get_config, mock_get_completer):
72 | with open("tests/data/test-history-off-config.yaml") as f:
73 | config_json = yaml.safe_load(f)
74 | mock_get_completer.return_value = {"commands": {}}
75 | mock_get_config.return_value = config_json
76 |
77 | # test init
78 | kafkashell.settings.Settings()
79 |
80 | mock_init_config.assert_called_once()
81 | mock_init_history.assert_not_called()
82 |
--------------------------------------------------------------------------------
/tests/test_toolbar.py:
--------------------------------------------------------------------------------
1 | import mock
2 | import pytest
3 |
4 | from .context import kafkashell
5 |
6 | test_data = [
7 | ("local", True, True),
8 | ("local", False, False),
9 | ("test-cluster", True, True),
10 | ("123-cluster", False, False),
11 | ]
12 |
13 |
14 | @mock.patch("kafkashell.settings.Settings")
15 | @pytest.mark.parametrize("cluster,enable_help,enable_fuzzy", test_data)
16 | def test_toolbar(mock_settings, cluster, enable_help, enable_fuzzy):
17 | settings = mock_settings.return_value
18 | settings.cluster = cluster
19 | settings.enable_help = enable_help
20 | settings.enable_fuzzy_search = enable_fuzzy
21 |
22 | toolbar = kafkashell.toolbar.Toolbar(settings)
23 |
24 | assert toolbar.handler() == [
25 | ('class:bottom-toolbar', " [F2] Cluster: "),
26 | ('class:bottom-toolbar-yellow', cluster),
27 | ('class:bottom-toolbar', " "),
28 | ('class:bottom-toolbar', "[F3] Fuzzy: "),
29 | ('class:bottom-toolbar-yellow', "ON" if enable_fuzzy else "OFF"),
30 | ('class:bottom-toolbar', " "),
31 | ('class:bottom-toolbar', "[F9] In-Line Help: "),
32 | ('class:bottom-toolbar-yellow', "ON" if enable_help else "OFF"),
33 | ('class:bottom-toolbar', " "),
34 | ('class:bottom-toolbar', "[F10] Exit")
35 | ]
36 |
--------------------------------------------------------------------------------
/tests/test_version.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 | from .context import kafkashell
4 |
5 | test_data = [
6 | "0.0.1",
7 | "1.0.1",
8 | "2.3.4"
9 | ]
10 |
11 |
12 | @pytest.mark.parametrize("test_input", test_data)
13 | def test_get_version(test_input):
14 | old_version = kafkashell.version.__version__
15 | kafkashell.version.__version__ = test_input
16 | assert kafkashell.version.get_version() == test_input
17 | kafkashell.version.__version__ = old_version
18 |
--------------------------------------------------------------------------------
/tests/utilities.py:
--------------------------------------------------------------------------------
1 | from __future__ import unicode_literals
2 |
3 | import json
4 | import os
5 |
6 | import oyaml as yaml
7 | import pytest
8 | from jsonschema import validate, ValidationError
9 |
10 |
11 | def setup_settings_for_test(mock_settings):
12 | with open("tests/data/test-completer.json") as f:
13 | settings = mock_settings.return_value
14 | settings.selected_cluster = "local"
15 | settings.enable_help = True
16 | settings.enable_fuzzy_search = True
17 | settings.commands = json.load(f)["commands"]
18 | return settings
19 |
20 |
21 | def setup_settings_with_real_completer_for_test(mock_settings):
22 | with open("kafkashell/data/completer.json") as f:
23 | with open("kafkashell/data/shell-config.yaml") as fc:
24 | settings = mock_settings.return_value
25 | settings.selected_cluster = "local"
26 | settings.enable_help = True
27 | settings.enable_fuzzy_search = True
28 | settings.user_config = yaml.safe_load(fc)
29 | settings.commands = json.load(f)["commands"]
30 | return settings
31 |
32 |
33 | def setup_config_path_for_test(config_name="test"):
34 | data_dir = os.path.dirname(os.path.realpath(__file__))
35 | return os.path.realpath(os.path.join(data_dir, "../tests/data/{0}-config.yaml".format(config_name)))
36 |
37 |
38 | def get_schema(schema_name):
39 | data_dir = os.path.dirname(os.path.realpath(__file__))
40 | data_path = os.path.realpath(os.path.join(data_dir, "../kafkashell/data/{0}.schema".format(schema_name)))
41 | with open(data_path) as f:
42 | return json.load(f)
43 |
44 |
45 | def get_test_config(config_name="test"):
46 | data_dir = os.path.dirname(os.path.realpath(__file__))
47 | data_path = os.path.realpath(os.path.join(data_dir, "../tests/data/{0}-config.yaml".format(config_name)))
48 | with open(data_path) as f:
49 | return yaml.safe_load(f)
50 |
51 |
52 | def validate_schema(actual, schema_name, message):
53 | schema = get_schema(schema_name)
54 | assert actual is not None
55 | try:
56 | validate(instance=actual, schema=schema)
57 | except ValidationError as ex:
58 | pytest.fail(make_schema_error_message(ex, message))
59 |
60 |
61 | def validate_invalid_schema(actual, schema_name):
62 | schema = get_schema(schema_name)
63 | assert actual is not None
64 | try:
65 | validate(instance=actual, schema=schema)
66 | except ValidationError as ex:
67 | return make_schema_error_message(ex)
68 |
69 |
70 | def make_schema_error_message(ex, message="Schema"):
71 | error_type = ", ".join(ex.path) if len(ex.path) > 0 else "root"
72 | return "{0} does not conform to schema ({1}): {2}".format(message, error_type, ex.message)
73 |
--------------------------------------------------------------------------------
/tox.ini:
--------------------------------------------------------------------------------
1 | [tox]
2 | envlist = py27,py35,py36,py37
3 |
4 | [testenv]
5 | deps =
6 | pytest
7 | mock
8 | prompt_toolkit
9 | fuzzyfinder
10 | pygments
11 | flake8
12 | flake8-quotes
13 | pexpect
14 | jsonschema
15 | oyaml
16 | commands =
17 | flake8 --max-line-length=120 --inline-quotes '"' --exclude=.tox,.git,htmlcov,build,dist,tests,docs,venv .
18 | pytest
19 |
--------------------------------------------------------------------------------