├── .coveragerc ├── .flake8 ├── .gitignore ├── .travis.yml ├── Dockerfile ├── LICENSE ├── MANIFEST.in ├── README.rst ├── docker-compose.yaml ├── ksql-server ├── Dockerfile ├── conf │ ├── ksql-server.conf │ └── ksqlserver.properties └── startup.sh ├── ksql ├── __init__.py ├── api.py ├── builder.py ├── client.py ├── errors.py ├── upload.py └── utils.py ├── requirements-dev.txt ├── requirements.txt ├── setup.cfg ├── setup.py ├── test-requirements.txt ├── tests ├── test_builder.py ├── test_client.py ├── test_utils.py ├── unit-tests │ ├── test_api.py │ └── utils_tests.py └── vcr_cassettes │ ├── bad_requests.yml │ ├── get_ksql_server.yml │ ├── get_properties.yml │ ├── healthcheck.yml │ ├── ksql_create_stream.yml │ ├── ksql_create_stream_as_with_conditions_with_startwith.yml │ ├── ksql_create_stream_as_with_conditions_with_startwith_with_and.yml │ ├── ksql_create_stream_as_with_conditions_without_startwith.yml │ ├── ksql_create_stream_as_with_wrong_timestamp.yml │ ├── ksql_create_stream_as_without_conditions.yml │ ├── ksql_create_stream_by_builder.yml │ ├── ksql_create_stream_by_builder_api.yml │ ├── ksql_show_table.yml │ ├── ksql_show_table_with_api_key.yml │ ├── ksql_topic_already_registered.yml │ ├── raise_create_error_no_topic.yml │ ├── utils_test_drop_all_streams.yml │ ├── utils_test_drop_stream.yml │ ├── utils_test_drop_stream_create_as_stream.yml │ ├── utils_test_get_all_streams.yml │ ├── utils_test_get_dependent_queries.yml │ └── utils_test_get_stream_info.yml └── validate.sh /.coveragerc: -------------------------------------------------------------------------------- 1 | [run] 2 | branch = True 3 | source = ksql 4 | 5 | [report] 6 | include = 7 | ksql/* 8 | tests/* 9 | -------------------------------------------------------------------------------- /.flake8: -------------------------------------------------------------------------------- 1 | [flake8] 2 | max-line-length = 120 3 | ignore = E731,W504,E231 4 | exclude = .svn,CVS,.bzr,.hg,.git,__pycache__,.tox,.eggs,*.egg,*/_vendor/*,node_modules 5 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | env/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | downloads/ 16 | eggs/ 17 | .eggs/ 18 | lib/ 19 | lib64/ 20 | parts/ 21 | sdist/ 22 | var/ 23 | wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | 28 | # PyInstaller 29 | # Usually these files are written by a python script from a template 30 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 31 | *.manifest 32 | *.spec 33 | 34 | # Installer logs 35 | pip-log.txt 36 | pip-delete-this-directory.txt 37 | 38 | # Unit test / coverage reports 39 | htmlcov/ 40 | .tox/ 41 | .coverage 42 | .coverage.* 43 | .cache 44 | nosetests.xml 45 | coverage.xml 46 | *.cover 47 | .hypothesis/ 48 | 49 | # Translations 50 | *.mo 51 | *.pot 52 | 53 | # Django stuff: 54 | *.log 55 | local_settings.py 56 | 57 | # Flask stuff: 58 | instance/ 59 | .webassets-cache 60 | 61 | # Scrapy stuff: 62 | .scrapy 63 | 64 | # Sphinx documentation 65 | docs/_build/ 66 | 67 | # PyBuilder 68 | target/ 69 | 70 | # Jupyter Notebook 71 | .ipynb_checkpoints 72 | 73 | # pyenv 74 | .python-version 75 | 76 | # celery beat schedule file 77 | celerybeat-schedule 78 | 79 | # SageMath parsed files 80 | *.sage.py 81 | 82 | # dotenv 83 | .env 84 | 85 | # virtualenv 86 | .venv 87 | venv/ 88 | ENV/ 89 | 90 | # Spyder project settings 91 | .spyderproject 92 | .spyproject 93 | 94 | # Rope project settings 95 | .ropeproject 96 | 97 | # mkdocs documentation 98 | /site 99 | 100 | # mypy 101 | .mypy_cache/ 102 | 103 | # jetbrains 104 | .idea -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | matrix: 3 | fast_finish: true 4 | include: 5 | - python: '3.10.2' 6 | dist: bionic 7 | - python: '3.9' 8 | - python: '3.8' 9 | - python: '3.7.13' 10 | 11 | # command to install dependencies 12 | install: 13 | - pip install .[dev] 14 | # command to run tests 15 | script: nosetests --with-coverage --cover-erase --cover-package=ksql 16 | 17 | after_success: 18 | - codecov 19 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM frolvlad/alpine-python3 2 | WORKDIR /app 3 | COPY *requirements* /app/ 4 | RUN sed -i -e 's/v3\.8/edge/g' /etc/apk/repositories \ 5 | && apk upgrade --update-cache --available \ 6 | && apk add --no-cache librdkafka librdkafka-dev 7 | RUN apk add --no-cache alpine-sdk python3-dev 8 | RUN pip install -r requirements.txt 9 | RUN pip install -r test-requirements.txt 10 | COPY . /app 11 | RUN pip install -e . 12 | 13 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2017 Bryan Yang 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include requirements.txt test-requirements.txt -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | ksql-python 2 | =========== 3 | 4 | A python wrapper for the KSQL REST API. Easily interact with the KSQL REST API using this library. 5 | 6 | Supported KSQLDB version: 0.10.1+ 7 | Supported Python version: 3.5+ 8 | 9 | .. image:: https://travis-ci.org/bryanyang0528/ksql-python.svg?branch=master 10 | :target: https://travis-ci.org/bryanyang0528/ksql-python 11 | 12 | .. image:: https://codecov.io/gh/bryanyang0528/ksql-python/branch/master/graph/badge.svg 13 | :target: https://codecov.io/gh/bryanyang0528/ksql-python 14 | 15 | .. image:: https://pepy.tech/badge/ksql 16 | :target: https://pepy.tech/project/ksql 17 | 18 | .. image:: https://pepy.tech/badge/ksql/month 19 | :target: https://pepy.tech/project/ksql/month 20 | 21 | .. image:: https://img.shields.io/badge/license-MIT-yellow.svg 22 | :target: https://github.com/bryanyang0528/ksql-python/blob/master/LICENSE 23 | 24 | Installation 25 | ------------ 26 | 27 | .. code:: bash 28 | 29 | pip install ksql 30 | 31 | Or 32 | 33 | .. code:: bash 34 | 35 | git clone https://github.com/bryanyang0528/ksql-python 36 | cd ksql-python 37 | python setup.py install 38 | 39 | Getting Started 40 | --------------- 41 | 42 | Setup for KSQL 43 | ~~~~~~~~~~~~~~~ 44 | 45 | This is the GITHUB page of KSQL. https://github.com/confluentinc/ksql 46 | 47 | If you have installed open source Confluent CLI (e.g. by installing Confluent Open Source or Enterprise Platform), you can start KSQL and its dependencies with one single command: 48 | 49 | .. code:: bash 50 | 51 | confluent start ksql-server 52 | 53 | Setup for ksql-python API 54 | ~~~~~~~~~~~~~~~~~~~~~~~~~ 55 | 56 | - Setup for the KSQL API: 57 | 58 | .. code:: python 59 | 60 | from ksql import KSQLAPI 61 | client = KSQLAPI('http://ksql-server:8088') 62 | 63 | - Setup for KSQl API with logging enabled: 64 | 65 | .. code:: python 66 | 67 | import logging 68 | from ksql import KSQLAPI 69 | logging.basicConfig(level=logging.DEBUG) 70 | client = KSQLAPI('http://ksql-server:8088') 71 | 72 | - Setup for KSQL API with Basic Authentication 73 | 74 | .. code:: python 75 | 76 | from ksql import KSQLAPI 77 | client = KSQLAPI('http://ksql-server:8088', api_key="your_key", secret="your_secret") 78 | 79 | Options 80 | ~~~~~~~ 81 | 82 | +---------------+-----------+------------+--------------------------------------------------------------+ 83 | | Option | Type | Required | Description | 84 | +===============+===========+============+==============================================================+ 85 | | ``url`` | string | yes | Your ksql-server url. Example: ``http://ksql-server:8080`` | 86 | +---------------+-----------+------------+--------------------------------------------------------------+ 87 | | ``timeout`` | integer | no | Timout for Requests. Default: ``5`` | 88 | +---------------+-----------+------------+--------------------------------------------------------------+ 89 | | ``api_key`` | string | no | API Key to use on the requests | 90 | +---------------+-----------+------------+--------------------------------------------------------------+ 91 | | ``secret`` | string | no | Secret to use on the requests | 92 | +---------------+-----------+------------+--------------------------------------------------------------+ 93 | 94 | Main Methods 95 | ~~~~~~~~~~~~ 96 | 97 | ksql 98 | ^^^^ 99 | 100 | This method can be used for some KSQL features which are not supported via other specific methods like ``query``, ``create_stream`` or ``create_stream_as``. 101 | The following example shows how to execute the ``show tables`` statement: 102 | 103 | .. code:: python 104 | 105 | client.ksql('show tables') 106 | 107 | - Example Response ``[{'tables': {'statementText': 'show tables;', 'tables': []}}]`` 108 | 109 | query 110 | ^^^^^ 111 | 112 | It will execute sql query and keep listening streaming data. 113 | 114 | .. code:: python 115 | 116 | client.query('select * from table1') 117 | 118 | This command returns a generator. It can be printed e.g. by reading its values via `next(query)` or a for loop. Here is a complete example: 119 | 120 | .. code:: python 121 | 122 | from ksql import KSQLAPI 123 | client = KSQLAPI('http://localhost:8088') 124 | query = client.query('select * from table1') 125 | for item in query: print(item) 126 | 127 | - Example Response 128 | 129 | :: 130 | 131 | {"row":{"columns":[1512787743388,"key1",1,2,3]},"errorMessage":null} 132 | {"row":{"columns":[1512787753200,"key1",1,2,3]},"errorMessage":null} 133 | {"row":{"columns":[1512787753488,"key1",1,2,3]},"errorMessage":null} 134 | {"row":{"columns":[1512787753888,"key1",1,2,3]},"errorMessage":null} 135 | 136 | Query with HTTP/2 137 | ^^^^^^^^^^^^^^^^^ 138 | Execute queries with the new ``/query-stream`` endpoint. Documented `here `_ 139 | 140 | To execute a sql query use the same syntax as the regular query, with the additional ``use_http2=True`` parameter. 141 | 142 | .. code:: python 143 | 144 | client.query('select * from table1', use_http2=True) 145 | 146 | A generator is returned with the following example response 147 | 148 | :: 149 | 150 | {"queryId":"44d8413c-0018-423d-b58f-3f2064b9a312","columnNames":["ORDER_ID","TOTAL_AMOUNT","CUSTOMER_NAME"],"columnTypes":["INTEGER","DOUBLE","STRING"]} 151 | [3,43.0,"Palo Alto"] 152 | [3,43.0,"Palo Alto"] 153 | [3,43.0,"Palo Alto"] 154 | 155 | To terminate the query above use the ``close_query`` call. 156 | Provide the ``queryId`` returned from the ``query`` call. 157 | 158 | .. code:: python 159 | 160 | client.close_query("44d8413c-0018-423d-b58f-3f2064b9a312") 161 | 162 | Insert rows into a Stream with HTTP/2 163 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ 164 | 165 | Uses the new ``/inserts-stream`` endpoint. See `documentation `_ 166 | 167 | .. code:: python 168 | 169 | rows = [ 170 | {"ORDER_ID": 1, "TOTAL_AMOUNT": 23.5, "CUSTOMER_NAME": "abc"}, 171 | {"ORDER_ID": 2, "TOTAL_AMOUNT": 3.7, "CUSTOMER_NAME": "xyz"} 172 | ] 173 | 174 | results = self.api_client.inserts_stream("my_stream_name", rows) 175 | 176 | An array of object will be returned on success, with the status of each row inserted. 177 | 178 | 179 | Simplified API 180 | ~~~~~~~~~~~~~~ 181 | 182 | create_stream/ create_table 183 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ 184 | 185 | .. code:: python 186 | 187 | client.create_stream(table_name=table_name, 188 | columns_type=columns_type, 189 | topic=topic, 190 | value_format=value_format) 191 | 192 | Options 193 | ^^^^^^^ 194 | 195 | +-----------------+-----------+----------+--------------------------------------------------------------+ 196 | | Option | Type | Required | Description | 197 | +=================+===========+==========+==============================================================+ 198 | | ``table_name`` | string | yes | name of stream/table | 199 | +-----------------+-----------+----------+--------------------------------------------------------------+ 200 | | ``columns_type``| list | yes | ex:``['viewtime bigint','userid varchar','pageid varchar']`` | 201 | +-----------------+-----------+----------+--------------------------------------------------------------+ 202 | | ``topic`` | string | yes | Kafka topic | 203 | +-----------------+-----------+----------+--------------------------------------------------------------+ 204 | | ``value_format``| string | no | ``JSON`` (Default) or ``DELIMITED`` or ``AVRO`` | 205 | +-----------------+-----------+----------+--------------------------------------------------------------+ 206 | | ``key`` | string | for Table| Key (used for JOINs) | 207 | +-----------------+-----------+----------+--------------------------------------------------------------+ 208 | 209 | 210 | - Responses 211 | 212 | :If create table/stream succeed: 213 | return True 214 | 215 | :If failed: 216 | raise a CreateError(respose_from_ksql_server) 217 | 218 | create_stream_as 219 | ^^^^^^^^^^^^^^^^ 220 | 221 | a simplified api for creating stream as select 222 | 223 | .. code:: python 224 | 225 | client.create_stream_as(table_name=table_name, 226 | select_columns=select_columns, 227 | src_table=src_table, 228 | kafka_topic=kafka_topic, 229 | value_format=value_format, 230 | conditions=conditions, 231 | partition_by=partition_by, 232 | **kwargs) 233 | 234 | 235 | .. code:: sql 236 | 237 | CREATE STREAM 238 | [WITH ( kafka_topic=, value_format=, property_name=expression ... )] 239 | AS SELECT 240 | FROM 241 | [WHERE ] 242 | PARTITION BY ]; 243 | 244 | Options 245 | ^^^^^^^ 246 | 247 | +-------------------+-----------+----------+--------------------------------------------------------------+ 248 | | Option | Type | Required | Description | 249 | +===================+===========+==========+==============================================================+ 250 | | ``table_name`` | string | yes | name of stream/table | 251 | +-------------------+-----------+----------+--------------------------------------------------------------+ 252 | | ``select_columns``| list | yes | you can select ``[*]`` or ``['columnA', 'columnB']`` | 253 | +-------------------+-----------+----------+--------------------------------------------------------------+ 254 | | ``src_table`` | string | yes | name of source table | 255 | +-------------------+-----------+----------+--------------------------------------------------------------+ 256 | | ``kafka_topic`` | string | no | The name of the Kafka topic of this new stream(table). | 257 | +-------------------+-----------+----------+--------------------------------------------------------------+ 258 | | ``value_format`` | string | no | ``DELIMITED``, ``JSON``(Default) or ``AVRO`` | 259 | +-------------------+-----------+----------+--------------------------------------------------------------+ 260 | | ``conditions`` | string | no | The conditions in the where clause. | 261 | +-------------------+-----------+----------+--------------------------------------------------------------+ 262 | | ``partition_by`` | string | no | Data will be distributed across partitions by this column. | 263 | +-------------------+-----------+----------+--------------------------------------------------------------+ 264 | | ``kwargs`` | pair | no | please provide ``key=value`` pairs. Please see more options. | 265 | +-------------------+-----------+----------+--------------------------------------------------------------+ 266 | 267 | KSQL JOINs 268 | ~~~~~~~~~~~~~~ 269 | 270 | KSQL JOINs between Streams and Tables are not supported yet via explicit methods, but you can use the ``ksql`` method for this like the following: 271 | 272 | .. code:: python 273 | 274 | client.ksql("CREATE STREAM join_per_user WITH (VALUE_FORMAT='AVRO', KAFKA_TOPIC='join_per_user') AS SELECT Time, Amount FROM source c INNER JOIN users u on c.user = u.userid WHERE u.USERID = 1") 275 | 276 | FileUpload 277 | ~~~~~~~~~~~~~~ 278 | 279 | upload 280 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ 281 | Run commands from a .ksql file. Can only support ksql commands and not streaming queries. 282 | 283 | .. code:: python 284 | 285 | from ksql.upload import FileUpload 286 | pointer = FileUpload('http://ksql-server:8080') 287 | pointer.upload('rules.ksql') 288 | 289 | 290 | Options 291 | ^^^^^^^ 292 | 293 | +-----------------+-----------+----------+--------------------------------------------------------------+ 294 | | Option | Type | Required | Description | 295 | +=================+===========+==========+==============================================================+ 296 | | ``ksqlfile`` | string | yes | name of file containing the rules | 297 | +-----------------+-----------+----------+--------------------------------------------------------------+ 298 | 299 | 300 | - Responses 301 | 302 | :If ksql-commands succesfully executed: 303 | return (List of server response for all commands) 304 | 305 | :If failed: 306 | raise the appropriate error 307 | 308 | More Options 309 | ^^^^^^^^^^^^ 310 | 311 | There are more properties (partitions, replicas, etc...) in the official document. 312 | 313 | `KSQL Syntax Reference `_ 314 | 315 | - Responses 316 | 317 | :If create table/stream succeed: 318 | return True 319 | 320 | :If failed: 321 | raise a CreatError(respose_from_ksql_server) 322 | -------------------------------------------------------------------------------- /docker-compose.yaml: -------------------------------------------------------------------------------- 1 | version: "3" 2 | services: 3 | zookeeper: 4 | image: confluentinc/cp-zookeeper:5.5.0 5 | hostname: zookeeper 6 | container_name: zookeeper 7 | ports: 8 | - "2181:2181" 9 | environment: 10 | ZOOKEEPER_CLIENT_PORT: 2181 11 | ZOOKEEPER_TICK_TIME: 2000 12 | healthcheck: 13 | test: nc -z localhost 2181 || exit -1 14 | interval: 30s 15 | timeout: 5s 16 | retries: 5 17 | start_period: 20s 18 | 19 | broker: 20 | image: confluentinc/cp-enterprise-kafka:5.5.0 21 | hostname: broker 22 | container_name: broker 23 | depends_on: 24 | zookeeper: 25 | condition: service_healthy 26 | ports: 27 | - "29092:29092" 28 | environment: 29 | KAFKA_BROKER_ID: 1 30 | KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2181' 31 | KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT 32 | KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://broker:9092,PLAINTEXT_HOST://localhost:29092 33 | KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1 34 | KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0 35 | KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1 36 | KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1 37 | healthcheck: 38 | test: kafka-topics --bootstrap-server broker:9092 --list 39 | interval: 30s 40 | timeout: 20s 41 | retries: 3 42 | start_period: 20s 43 | 44 | schema-registry: 45 | image: confluentinc/cp-schema-registry:5.5.0 46 | hostname: schema-registry 47 | container_name: schema-registry 48 | depends_on: 49 | zookeeper: 50 | condition: service_healthy 51 | broker: 52 | condition: service_healthy 53 | ports: 54 | - "8081:8081" 55 | environment: 56 | SCHEMA_REGISTRY_HOST_NAME: schema-registry 57 | SCHEMA_REGISTRY_KAFKASTORE_CONNECTION_URL: 'zookeeper:2181' 58 | healthcheck: 59 | test: curl --output /dev/null --silent --head --fail http://schema-registry:8081/subjects 60 | interval: 30s 61 | timeout: 10s 62 | retries: 3 63 | 64 | ksqldb-server: 65 | image: confluentinc/ksqldb-server:0.10.1 66 | hostname: ksqldb-server 67 | container_name: ksqldb-server 68 | depends_on: 69 | broker: 70 | condition: service_healthy 71 | schema-registry: 72 | condition: service_healthy 73 | ports: 74 | - "8088:8088" 75 | volumes: 76 | - "./confluent-hub-components/:/usr/share/kafka/plugins/" 77 | environment: 78 | KSQL_LISTENERS: "http://0.0.0.0:8088" 79 | KSQL_BOOTSTRAP_SERVERS: "broker:9092" 80 | KSQL_KSQL_SCHEMA_REGISTRY_URL: "http://schema-registry:8081" 81 | KSQL_KSQL_LOGGING_PROCESSING_STREAM_AUTO_CREATE: "true" 82 | KSQL_KSQL_LOGGING_PROCESSING_TOPIC_AUTO_CREATE: "true" 83 | KSQL_CONNECT_GROUP_ID: "ksql-connect-cluster" 84 | KSQL_CONNECT_BOOTSTRAP_SERVERS: "broker:9092" 85 | KSQL_CONNECT_KEY_CONVERTER: "io.confluent.connect.avro.AvroConverter" 86 | KSQL_CONNECT_VALUE_CONVERTER: "io.confluent.connect.avro.AvroConverter" 87 | KSQL_CONNECT_KEY_CONVERTER_SCHEMA_REGISTRY_URL: "http://schema-registry:8081" 88 | KSQL_CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL: "http://schema-registry:8081" 89 | KSQL_CONNECT_VALUE_CONVERTER_SCHEMAS_ENABLE: "false" 90 | KSQL_CONNECT_CONFIG_STORAGE_TOPIC: "ksql-connect-configs" 91 | KSQL_CONNECT_OFFSET_STORAGE_TOPIC: "ksql-connect-offsets" 92 | KSQL_CONNECT_STATUS_STORAGE_TOPIC: "ksql-connect-statuses" 93 | KSQL_CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR: 1 94 | KSQL_CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR: 1 95 | KSQL_CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: 1 96 | KSQL_CONNECT_PLUGIN_PATH: "/usr/share/kafka/plugins" 97 | healthcheck: 98 | test: nc -z localhost 8088 || exit -1 99 | interval: 10s 100 | timeout: 5s 101 | retries: 3 102 | start_period: 20s 103 | 104 | ksqldb-cli: 105 | image: confluentinc/ksqldb-cli:0.10.1 106 | container_name: ksqldb-cli 107 | depends_on: 108 | - broker 109 | - ksqldb-server 110 | entrypoint: /bin/sh 111 | tty: true 112 | 113 | ksql-python: 114 | build: 115 | context: . 116 | network_mode: host 117 | depends_on: 118 | - ksqldb-server 119 | - broker 120 | - schema-registry 121 | environment: 122 | KSQL_SERVER: localhost:8088 123 | STREAMS_BOOTSTRAP_SERVERS: localhost:29092 124 | volumes: 125 | - ./:/app 126 | command: tail -f /dev/null 127 | -------------------------------------------------------------------------------- /ksql-server/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM confluentinc/ksql-cli:5.0.0-beta1 2 | LABEL maintainer="bryan.yang@vpon.com" 3 | 4 | RUN apt update && apt install -y supervisor &&\ 5 | mkdir /var/log/ksql 6 | 7 | COPY ./conf/ksql-server.conf /etc/supervisor/conf.d 8 | COPY ./conf/ksqlserver.properties /etc/ksql/ 9 | COPY ./startup.sh . 10 | 11 | ENTRYPOINT ["./startup.sh"] 12 | -------------------------------------------------------------------------------- /ksql-server/conf/ksql-server.conf: -------------------------------------------------------------------------------- 1 | [program:ksql-server] 2 | command=/usr/bin/ksql-server-start /etc/ksql/ksqlserver.properties 3 | directory=/usr/bin 4 | autostart=true 5 | autorestart=true 6 | startretries=3 7 | stderr_logfile=/var/log/ksql/ksql-server.err.log 8 | stdout_logfile=/var/log/ksql/ksql-server.out.log 9 | user=root 10 | -------------------------------------------------------------------------------- /ksql-server/conf/ksqlserver.properties: -------------------------------------------------------------------------------- 1 | bootstrap.servers=kafka:29092 2 | ksql.command.topic.suffix=commands 3 | 4 | listeners=http://0.0.0.0:8080 5 | -------------------------------------------------------------------------------- /ksql-server/startup.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | 3 | /bin/sleep 10 4 | /usr/bin/supervisord -n 5 | -------------------------------------------------------------------------------- /ksql/__init__.py: -------------------------------------------------------------------------------- 1 | __package_name__ = "ksql" 2 | __ksql_server_version__ = "0.10.1" 3 | __ksql_api_version__ = "0.1.2" 4 | __version__ = __ksql_server_version__ + "." + __ksql_api_version__ 5 | 6 | from ksql.client import KSQLAPI # noqa 7 | from ksql.builder import SQLBuilder # noqa 8 | from ksql.api import SimplifiedAPI # noqa 9 | -------------------------------------------------------------------------------- /ksql/api.py: -------------------------------------------------------------------------------- 1 | import time 2 | 3 | import base64 4 | import functools 5 | import json 6 | import logging 7 | import requests 8 | import urllib 9 | from copy import deepcopy 10 | from requests import Timeout 11 | from urllib.parse import urlparse 12 | from hyper import HTTPConnection 13 | 14 | 15 | from ksql.builder import SQLBuilder 16 | from ksql.errors import CreateError, InvalidQueryError, KSQLError 17 | 18 | 19 | class BaseAPI(object): 20 | def __init__(self, url, **kwargs): 21 | self.url = url 22 | self.max_retries = kwargs.get("max_retries", 3) 23 | self.delay = kwargs.get("delay", 0) 24 | self.timeout = kwargs.get("timeout", 15) 25 | self.api_key = kwargs.get("api_key") 26 | self.secret = kwargs.get("secret") 27 | self.headers = { 28 | 'Content-Type': 'application/vnd.ksql.v1+json; charset=utf-8', 29 | } 30 | self.cert = kwargs.get("cert") 31 | 32 | def get_timout(self): 33 | return self.timeout 34 | 35 | @staticmethod 36 | def _validate_sql_string(sql_string): 37 | if len(sql_string) > 0: 38 | if sql_string[-1] != ";": 39 | sql_string = sql_string + ";" 40 | else: 41 | raise InvalidQueryError(sql_string) 42 | return sql_string 43 | 44 | @staticmethod 45 | def _raise_for_status(r, response): 46 | r_json = json.loads(response) 47 | if r.getcode() != 200: 48 | # seems to be the new API behavior 49 | if r_json.get("@type") == "statement_error" or r_json.get("@type") == "generic_error": 50 | error_message = r_json["message"] 51 | error_code = r_json["error_code"] 52 | stackTrace = r_json["stack_trace"] 53 | raise KSQLError(error_message, error_code, stackTrace) 54 | else: 55 | raise KSQLError("Unknown Error: {}".format(r.content)) 56 | else: 57 | # seems to be the old API behavior, so some errors have status 200, bug?? 58 | if r_json and r_json[0]["@type"] == "currentStatus" and r_json[0]["commandStatus"]["status"] == "ERROR": 59 | error_message = r_json[0]["commandStatus"]["message"] 60 | error_code = None 61 | stackTrace = None 62 | raise KSQLError(error_message, error_code, stackTrace) 63 | return True 64 | 65 | def ksql(self, ksql_string, stream_properties=None): 66 | r = self._request(endpoint="ksql", sql_string=ksql_string, stream_properties=stream_properties) 67 | response = r.read().decode("utf-8") 68 | self._raise_for_status(r, response) 69 | res = json.loads(response) 70 | return res 71 | 72 | def query2(self, query_string, encoding="utf-8", chunk_size=128, stream_properties=None, idle_timeout=None): 73 | """ 74 | Process streaming incoming data with HTTP/2. 75 | 76 | """ 77 | parsed_uri = urlparse(self.url) 78 | 79 | logging.debug("KSQL generated: {}".format(query_string)) 80 | sql_string = self._validate_sql_string(query_string) 81 | body = {"sql": sql_string} 82 | if stream_properties: 83 | body["properties"] = stream_properties 84 | else: 85 | body["properties"] = {} 86 | 87 | with HTTPConnection(parsed_uri.netloc) as connection: 88 | streaming_response = self._request2( 89 | endpoint="query-stream", body=body, connection=connection 90 | ) 91 | start_idle = None 92 | 93 | if streaming_response.status == 200: 94 | for chunk in streaming_response.read_chunked(): 95 | if chunk != b"\n": 96 | start_idle = None 97 | yield chunk.decode(encoding) 98 | 99 | else: 100 | if not start_idle: 101 | start_idle = time.time() 102 | if idle_timeout and time.time() - start_idle > idle_timeout: 103 | print("Ending query because of time out! ({} seconds)".format(idle_timeout)) 104 | return 105 | else: 106 | raise ValueError("Return code is {}.".format(streaming_response.status)) 107 | 108 | def query(self, query_string, encoding="utf-8", chunk_size=128, stream_properties=None, idle_timeout=None): 109 | """ 110 | Process streaming incoming data. 111 | 112 | """ 113 | 114 | streaming_response = self._request( 115 | endpoint="query", sql_string=query_string, stream_properties=stream_properties 116 | ) 117 | 118 | start_idle = None 119 | 120 | if streaming_response.code == 200: 121 | for chunk in streaming_response: 122 | if chunk != b"\n": 123 | start_idle = None 124 | yield chunk.decode(encoding) 125 | else: 126 | if not start_idle: 127 | start_idle = time.time() 128 | if idle_timeout and time.time() - start_idle > idle_timeout: 129 | print("Ending query because of time out! ({} seconds)".format(idle_timeout)) 130 | return 131 | else: 132 | raise ValueError("Return code is {}.".format(streaming_response.status_code)) 133 | 134 | def get_request(self, endpoint): 135 | auth = (self.api_key, self.secret) if self.api_key or self.secret else None 136 | return requests.get(endpoint, headers=self.headers, auth=auth, verify=self.cert) 137 | 138 | def _request2(self, endpoint, connection, body, method="POST", encoding="utf-8"): 139 | url = "{}/{}".format(self.url, endpoint) 140 | data = json.dumps(body).encode(encoding) 141 | 142 | headers = deepcopy(self.headers) 143 | if self.api_key and self.secret: 144 | base64string = base64.b64encode(bytes("{}:{}".format(self.api_key, self.secret), "utf-8")).decode("utf-8") 145 | headers["Authorization"] = "Basic %s" % base64string 146 | 147 | connection.request(method=method.upper(), url=url, headers=headers, body=data) 148 | resp = connection.get_response() 149 | 150 | return resp 151 | 152 | def _request(self, endpoint, method="POST", sql_string="", stream_properties=None, encoding="utf-8"): 153 | url = "{}/{}".format(self.url, endpoint) 154 | 155 | logging.debug("KSQL generated: {}".format(sql_string)) 156 | 157 | sql_string = self._validate_sql_string(sql_string) 158 | body = {"ksql": sql_string} 159 | if stream_properties: 160 | body["streamsProperties"] = stream_properties 161 | else: 162 | body["streamsProperties"] = {} 163 | data = json.dumps(body).encode(encoding) 164 | 165 | headers = deepcopy(self.headers) 166 | if self.api_key and self.secret: 167 | base64string = base64.b64encode(bytes("{}:{}".format(self.api_key, self.secret), "utf-8")).decode("utf-8") 168 | headers["Authorization"] = "Basic %s" % base64string 169 | 170 | req = urllib.request.Request(url=url, data=data, headers=headers, method=method.upper()) 171 | 172 | try: 173 | r = urllib.request.urlopen(req, timeout=self.timeout, cafile=self.cert) 174 | except urllib.error.HTTPError as http_error: 175 | try: 176 | content = json.loads(http_error.read().decode(encoding)) 177 | except Exception as e: 178 | raise http_error 179 | else: 180 | logging.debug("content: {}".format(content)) 181 | raise KSQLError(content.get("message"), content.get("error_code"), content.get("stackTrace")) 182 | else: 183 | return r 184 | 185 | def close_query(self, query_id): 186 | body = {"queryId": query_id} 187 | data = json.dumps(body).encode("utf-8") 188 | url = "{}/{}".format(self.url, "close-query") 189 | 190 | response = requests.post(url=url, data=data, verify=self.cert) 191 | 192 | if response.status_code == 200: 193 | logging.debug("Successfully canceled Query ID: {}".format(query_id)) 194 | return True 195 | elif response.status_code == 400: 196 | message = json.loads(response.content)["message"] 197 | logging.debug("Failed canceling Query ID: {}: {}".format(query_id, message)) 198 | return False 199 | else: 200 | raise ValueError("Return code is {}.".format(response.status_code)) 201 | 202 | def inserts_stream(self, stream_name, rows): 203 | body = '{{"target":"{}"}}'.format(stream_name) 204 | for row in rows: 205 | body += '\n{}'.format(json.dumps(row)) 206 | 207 | parsed_uri = urlparse(self.url) 208 | url = "{}/{}".format(self.url, "inserts-stream") 209 | headers = deepcopy(self.headers) 210 | with HTTPConnection(parsed_uri.netloc) as connection: 211 | connection.request("POST", url, bytes(body, "utf-8"), headers) 212 | response = connection.get_response() 213 | result = response.read() 214 | 215 | result_str = result.decode("utf-8") 216 | result_chunks = result_str.split("\n") 217 | return_arr = [] 218 | for chunk in result_chunks: 219 | try: 220 | return_arr.append(json.loads(chunk)) 221 | except: 222 | pass 223 | 224 | return return_arr 225 | 226 | @ staticmethod 227 | def retry(exceptions, delay=1, max_retries=5): 228 | """ 229 | A decorator for retrying a function call with a specified delay in case of a set of exceptions 230 | 231 | Parameter List 232 | ------------- 233 | :param exceptions: A tuple of all exceptions that need to be caught for retry 234 | e.g. retry(exception_list = (Timeout, Readtimeout)) 235 | :param delay: Amount of delay (seconds) needed between successive retries. 236 | :param times: no of times the function should be retried 237 | 238 | """ 239 | 240 | def outer_wrapper(function): 241 | @ functools.wraps(function) 242 | def inner_wrapper(*args, **kwargs): 243 | final_excep = None 244 | for counter in range(max_retries): 245 | if counter > 0: 246 | time.sleep(delay) 247 | final_excep = None 248 | try: 249 | value = function(*args, **kwargs) 250 | return value 251 | except (exceptions) as e: 252 | final_excep = e 253 | pass # or log it 254 | 255 | if final_excep is not None: 256 | raise final_excep 257 | 258 | return inner_wrapper 259 | 260 | return outer_wrapper 261 | 262 | 263 | class SimplifiedAPI(BaseAPI): 264 | def __init__(self, url, **kwargs): 265 | super(SimplifiedAPI, self).__init__(url, **kwargs) 266 | 267 | def create_stream(self, table_name, columns_type, topic, value_format="JSON"): 268 | return self._create( 269 | table_type="stream", 270 | table_name=table_name, 271 | columns_type=columns_type, 272 | topic=topic, 273 | value_format=value_format, 274 | ) 275 | 276 | def create_table(self, table_name, columns_type, topic, value_format, key): 277 | if not key: 278 | raise ValueError("key is required for creating a table.") 279 | return self._create( 280 | table_type="table", 281 | table_name=table_name, 282 | columns_type=columns_type, 283 | topic=topic, 284 | value_format=value_format, 285 | key=key, 286 | ) 287 | 288 | def create_stream_as( 289 | self, 290 | table_name, 291 | select_columns, 292 | src_table, 293 | kafka_topic=None, 294 | value_format="JSON", 295 | conditions=[], 296 | partition_by=None, 297 | **kwargs 298 | ): 299 | return self._create_as( 300 | table_type="stream", 301 | table_name=table_name, 302 | select_columns=select_columns, 303 | src_table=src_table, 304 | kafka_topic=kafka_topic, 305 | value_format=value_format, 306 | conditions=conditions, 307 | partition_by=partition_by, 308 | **kwargs, 309 | ) 310 | 311 | def _create(self, table_type, table_name, columns_type, topic, value_format="JSON", key=None): 312 | ksql_string = SQLBuilder.build( 313 | sql_type="create", 314 | table_type=table_type, 315 | table_name=table_name, 316 | columns_type=columns_type, 317 | topic=topic, 318 | value_format=value_format, 319 | key=key, 320 | ) 321 | self.ksql(ksql_string) 322 | return True 323 | 324 | @BaseAPI.retry(exceptions=(Timeout, CreateError)) 325 | def _create_as( 326 | self, 327 | table_type, 328 | table_name, 329 | select_columns, 330 | src_table, 331 | kafka_topic=None, 332 | value_format="JSON", 333 | conditions=[], 334 | partition_by=None, 335 | **kwargs 336 | ): 337 | ksql_string = SQLBuilder.build( 338 | sql_type="create_as", 339 | table_type=table_type, 340 | table_name=table_name, 341 | select_columns=select_columns, 342 | src_table=src_table, 343 | kafka_topic=kafka_topic, 344 | value_format=value_format, 345 | conditions=conditions, 346 | partition_by=partition_by, 347 | **kwargs, 348 | ) 349 | self.ksql(ksql_string) 350 | return True 351 | -------------------------------------------------------------------------------- /ksql/builder.py: -------------------------------------------------------------------------------- 1 | import re 2 | 3 | from six import string_types 4 | 5 | from ksql.errors import ( 6 | SQLTypeNotImplementYetError, 7 | IllegalTableTypeError, 8 | IllegalValueFormatError, 9 | SQLFormatNotImplementError, 10 | BuildNotImplmentError, 11 | ) 12 | 13 | 14 | class SQLBuilder(object): 15 | @classmethod 16 | def build(self, sql_type, **kwargs): 17 | if sql_type == "create": 18 | sql_builder = CreateBuilder(kwargs.pop("table_type")) 19 | sql_str = sql_builder.build( 20 | kwargs.pop("table_name"), 21 | kwargs.pop("columns_type"), 22 | kwargs.pop("topic"), 23 | kwargs.pop("value_format", "JSON"), 24 | kwargs.pop("key", None), 25 | ) 26 | 27 | elif sql_type == "create_as": 28 | sql_builder = CreateAsBuilder(kwargs.pop("table_type")) 29 | sql_str = sql_builder.build( 30 | table_name=kwargs.pop("table_name"), 31 | select_columns=kwargs.pop("select_columns", None), 32 | src_table=kwargs.pop("src_table"), 33 | kafka_topic=kwargs.pop("kafka_topic"), 34 | value_format=kwargs.pop("value_format"), 35 | partition_by=kwargs.pop("partition_by", None), 36 | **kwargs, 37 | ) 38 | 39 | else: 40 | raise SQLTypeNotImplementYetError(sql_type) 41 | 42 | return sql_str 43 | 44 | 45 | class BaseCreateBuilder(object): 46 | def __init__(self, table_type, sql_format=None): 47 | self.table_types = ["table", "stream"] 48 | self.value_formats = ["delimited", "json", "avro"] 49 | self.table_type = table_type 50 | self.sql_format = sql_format 51 | 52 | if table_type.lower() not in self.table_types: 53 | raise IllegalTableTypeError(table_type) 54 | 55 | if not sql_format: 56 | raise SQLFormatNotImplementError() 57 | 58 | def build(self): 59 | raise BuildNotImplmentError() 60 | 61 | def _parsed_with_properties(self, **kwargs): 62 | properties = [""] 63 | for prop in self.properties: 64 | if prop in kwargs.keys(): 65 | value = kwargs.get(prop) 66 | if isinstance(value, string_types): 67 | p = "{}='{}'".format(prop, kwargs.get(prop)) 68 | else: 69 | p = "{}={}".format(prop, kwargs.get(prop)) 70 | 71 | properties.append(p) 72 | 73 | return ", ".join(properties) 74 | 75 | 76 | class CreateBuilder(BaseCreateBuilder): 77 | def __init__(self, table_type): 78 | str_format = "CREATE {} {} ({}) WITH (kafka_topic='{}', value_format='{}'{});" 79 | super(CreateBuilder, self).__init__(table_type, str_format) 80 | self.properties = ["kafka_topic", "value_format", "key", "timestamp"] 81 | 82 | def build(self, table_name, columns_type=[], topic=None, value_format="JSON", key=None): 83 | if value_format.lower() not in self.value_formats: 84 | raise IllegalValueFormatError(value_format) 85 | 86 | built_colums_type = self._build_colums_type(columns_type) 87 | built_key = self._build_key(key) 88 | 89 | sql_str = self.sql_format.format(self.table_type, table_name, built_colums_type, topic, value_format, built_key) 90 | return sql_str 91 | 92 | @staticmethod 93 | def _build_colums_type(columns_type): 94 | built_columns_type = ", ".join(columns_type) 95 | return built_columns_type 96 | 97 | @staticmethod 98 | def _build_key(key): 99 | if key: 100 | built_key = ", key='{}'".format(key) 101 | return built_key 102 | else: 103 | return "" 104 | 105 | 106 | class CreateAsBuilder(BaseCreateBuilder): 107 | def __init__(self, table_type): 108 | str_format = "CREATE {} {} WITH (kafka_topic='{}', value_format='{}'{}) AS SELECT {} FROM {} {} {}" 109 | super(CreateAsBuilder, self).__init__(table_type, str_format) 110 | self.properties = ["kafka_topic", "value_format", "partitions", "replicas", "timestamp"] 111 | 112 | def build( 113 | self, 114 | table_name, 115 | select_columns, 116 | src_table, 117 | kafka_topic=None, 118 | value_format="JSON", 119 | conditions=[], 120 | partition_by=None, 121 | **kwargs 122 | ): 123 | 124 | if value_format.lower() not in self.value_formats: 125 | raise IllegalValueFormatError(value_format) 126 | 127 | if not kafka_topic: 128 | kafka_topic = table_name 129 | 130 | select_clause, where_clause, partition_by_clause, properties = self._build_clauses( 131 | select_columns, conditions, partition_by, **kwargs 132 | ) 133 | 134 | sql_str = self.sql_format.format( 135 | self.table_type, 136 | table_name, 137 | kafka_topic, 138 | value_format, 139 | properties, 140 | select_clause, 141 | src_table, 142 | where_clause, 143 | partition_by_clause, 144 | ) 145 | 146 | cleaned_sql_str = re.sub(r"\s+", " ", sql_str).strip() 147 | 148 | return cleaned_sql_str 149 | 150 | def _build_clauses(self, select_columns, conditions, partition_by, **kwargs): 151 | select_clause = self._build_select_clause(select_columns) 152 | where_clause = self._build_where_clause(conditions) 153 | partition_by_clause = self._build_partition_by_clause(partition_by) 154 | properties = self._parsed_with_properties(**kwargs) 155 | 156 | return select_clause, where_clause, partition_by_clause, properties 157 | 158 | @staticmethod 159 | def _build_where_clause(conditions): 160 | if len(conditions) > 0: 161 | where_clause = "where {}".format(conditions).replace('"', "'") 162 | return where_clause 163 | else: 164 | return "" 165 | 166 | @staticmethod 167 | def _build_select_clause(select_columns): 168 | select_clause = "*" 169 | if select_columns: 170 | if len(select_columns) > 0: 171 | select_clause = ", ".join(select_columns) 172 | 173 | return select_clause 174 | 175 | @staticmethod 176 | def _build_partition_by_clause(partition_by): 177 | if partition_by: 178 | return "PARTITION BY {}".format(partition_by) 179 | else: 180 | return "" 181 | -------------------------------------------------------------------------------- /ksql/client.py: -------------------------------------------------------------------------------- 1 | from __future__ import absolute_import 2 | from __future__ import print_function 3 | 4 | from ksql.api import SimplifiedAPI 5 | from ksql.utils import process_query_result 6 | 7 | 8 | class KSQLAPI(object): 9 | """ API Class """ 10 | 11 | def __init__(self, url, max_retries=3, check_version=True, ** kwargs): 12 | """ 13 | You can use a Basic Authentication with this API, for now we accept the api_key/secret based on the Confluent 14 | Cloud implementation. So you just need to put on the kwargs the api_key and secret. 15 | """ 16 | self.url = url 17 | 18 | self.sa = SimplifiedAPI(url, max_retries=max_retries, **kwargs) 19 | 20 | self.check_version = check_version 21 | if check_version is True: 22 | self.get_ksql_version() 23 | 24 | def get_url(self): 25 | return self.url 26 | 27 | @property 28 | def timeout(self): 29 | return self.sa.get_timout() 30 | 31 | def get_ksql_version(self): 32 | r = self.sa.get_request(self.url + "/info") 33 | if r.status_code == 200: 34 | info = r.json().get("KsqlServerInfo") 35 | version = info.get("version") 36 | return version 37 | 38 | else: 39 | raise ValueError("Status Code: {}.\nMessage: {}".format(r.status_code, r.content)) 40 | 41 | def get_properties(self): 42 | properties = self.sa.ksql("show properties;") 43 | return properties[0]["properties"] 44 | 45 | def ksql(self, ksql_string, stream_properties=None): 46 | return self.sa.ksql(ksql_string, stream_properties=stream_properties) 47 | 48 | def query(self, query_string, encoding="utf-8", chunk_size=128, stream_properties=None, idle_timeout=None, use_http2=None, return_objects=None): 49 | if use_http2: 50 | yield from self.sa.query2( 51 | query_string=query_string, 52 | encoding=encoding, 53 | chunk_size=chunk_size, 54 | stream_properties=stream_properties, 55 | idle_timeout=idle_timeout, 56 | ) 57 | else: 58 | results = self.sa.query( 59 | query_string=query_string, 60 | encoding=encoding, 61 | chunk_size=chunk_size, 62 | stream_properties=stream_properties, 63 | idle_timeout=idle_timeout 64 | ) 65 | 66 | yield from process_query_result(results, return_objects) 67 | 68 | def close_query(self, query_id): 69 | return self.sa.close_query(query_id) 70 | 71 | def inserts_stream(self, stream_name, rows): 72 | return self.sa.inserts_stream(stream_name, rows) 73 | 74 | def create_stream(self, table_name, columns_type, topic, value_format="JSON"): 75 | return self.sa.create_stream( 76 | table_name=table_name, columns_type=columns_type, topic=topic, value_format=value_format 77 | ) 78 | 79 | def create_table(self, table_name, columns_type, topic, value_format, key, **kwargs): 80 | return self.sa.create_table( 81 | table_name=table_name, columns_type=columns_type, topic=topic, value_format=value_format, key=key, **kwargs 82 | ) 83 | 84 | def create_stream_as( 85 | self, 86 | table_name, 87 | select_columns, 88 | src_table, 89 | kafka_topic=None, 90 | value_format="JSON", 91 | conditions=[], 92 | partition_by=None, 93 | **kwargs 94 | ): 95 | 96 | return self.sa.create_stream_as( 97 | table_name=table_name, 98 | select_columns=select_columns, 99 | src_table=src_table, 100 | kafka_topic=kafka_topic, 101 | value_format=value_format, 102 | conditions=conditions, 103 | partition_by=partition_by, 104 | **kwargs, 105 | ) 106 | -------------------------------------------------------------------------------- /ksql/errors.py: -------------------------------------------------------------------------------- 1 | class SQLTypeNotImplementYetError(Exception): 2 | def __init__(self, sql_type): 3 | self.msg = "This type {} has not be implement yet.".format(sql_type) 4 | 5 | 6 | class IllegalTableTypeError(Exception): 7 | def __init__(self, table_type): 8 | self.msg = "This table type {} is illegal.".format(table_type) 9 | 10 | 11 | class IllegalValueFormatError(Exception): 12 | def __init__(self, value_format): 13 | self.msg = "This value format {} is illegal.".format(value_format) 14 | 15 | 16 | class CreateError(Exception): 17 | def __init__(self, e): 18 | self.msg = "{}".format(e) 19 | 20 | 21 | class SQLFormatNotImplementError(Exception): 22 | pass 23 | 24 | 25 | class BuildNotImplmentError(Exception): 26 | pass 27 | 28 | 29 | class FileTypeError(Exception): 30 | def __init__(self, ext): 31 | self.msg = "This {} file extension is not valid".format(ext) 32 | 33 | 34 | class InvalidQueryError(Exception): 35 | def __init__(self, query): 36 | self.msg = "The query:\n{}\n is invalid".format(query) 37 | 38 | 39 | class KSQLError(Exception): 40 | def __init__(self, e, error_code=None, stackTrace=None): 41 | self.msg = "{}".format(e) 42 | self.error_code = error_code 43 | self.stackTrace = stackTrace 44 | -------------------------------------------------------------------------------- /ksql/upload.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | from ksql.client import KSQLAPI 4 | from ksql.errors import FileTypeError, InvalidQueryError 5 | 6 | 7 | class FileUpload(object): 8 | """ UPLOAD KSQL RULES AS FILES """ 9 | 10 | def __init__(self, url, **kwargs): 11 | """ Instantiate the url pointer and the client object """ 12 | self.url = url 13 | self.client = KSQLAPI(url, **kwargs) 14 | 15 | def upload(self, ksqlfile): 16 | """ 17 | A method to upload ksql rules as .ksql file 18 | 19 | Parameter List 20 | ------------- 21 | :param ksqlfile: File containing the ksql rules to be uploaded. 22 | Only supports ksql queries and not streaming queries 23 | 24 | """ 25 | 26 | # check if the file is .ksql 27 | self.checkExtension(ksqlfile) 28 | 29 | # parse the file and get back the rules 30 | rules = self.get_rules_list(ksqlfile) 31 | log_return = [] 32 | for each_rule in rules: 33 | resp = self.client.ksql(each_rule) 34 | log_return.append(resp) 35 | 36 | return log_return 37 | 38 | def get_rules_list(self, ksqlfile): 39 | with open(ksqlfile) as rf: 40 | rule = "" 41 | 42 | for line in rf: 43 | rule = rule + " " + line.strip() 44 | 45 | if rule[-1:] == ";": 46 | yield rule 47 | rule = "" 48 | 49 | def checkExtension(self, filename): 50 | ext = os.path.splitext(filename)[-1].lower() 51 | 52 | if ext != ".ksql": 53 | raise FileTypeError(ext) 54 | return 55 | -------------------------------------------------------------------------------- /ksql/utils.py: -------------------------------------------------------------------------------- 1 | import ksql 2 | import telnetlib 3 | import json 4 | import re 5 | 6 | 7 | def check_kafka_available(bootstrap_servers): 8 | host, port = bootstrap_servers.split(":") 9 | try: 10 | telnetlib.Telnet(host, port) 11 | return True 12 | except Exception: 13 | return False 14 | 15 | 16 | def get_all_streams(api_client, prefix=None): 17 | all_streams = api_client.ksql("""SHOW STREAMS;""") 18 | filtered_streams = [] 19 | for stream in all_streams[0]["streams"]: 20 | if stream["type"] != "STREAM": 21 | continue 22 | if prefix and not stream["name"].startswith(prefix.upper()): 23 | continue 24 | filtered_streams.append(stream["name"]) 25 | return filtered_streams 26 | 27 | 28 | def get_stream_info(api_client, stream_name): 29 | try: 30 | r = api_client.ksql("""DESCRIBE EXTENDED {}""".format(stream_name)) 31 | except ksql.errors.KSQLError as e: 32 | if e.error_code == 40001: 33 | return None 34 | else: 35 | raise 36 | stream_info = r[0]["sourceDescription"] 37 | return stream_info 38 | 39 | 40 | def drop_all_streams(api_client, prefix=None): 41 | filtered_streams = get_all_streams(api_client, prefix=prefix) 42 | for stream in filtered_streams: 43 | drop_stream(api_client, stream) 44 | 45 | 46 | def drop_stream(api_client, stream_name): 47 | read_queries, write_queries = get_dependent_queries(api_client, stream_name) 48 | dependent_queries = read_queries + write_queries 49 | for query in dependent_queries: 50 | api_client.ksql("""TERMINATE {};""".format(query)) 51 | api_client.ksql( 52 | """DROP 53 | STREAM IF EXISTS 54 | {};""".format( 55 | stream_name 56 | ) 57 | ) 58 | 59 | 60 | def get_dependent_queries(api_client, stream_name): 61 | stream_info = get_stream_info(api_client, stream_name) 62 | read_queries = [] 63 | write_queries = [] 64 | if stream_info and stream_info["readQueries"]: 65 | read_queries = [query["id"] for query in stream_info["readQueries"]] 66 | if stream_info and stream_info["writeQueries"]: 67 | write_queries = [query["id"] for query in stream_info["writeQueries"]] 68 | 69 | return read_queries, write_queries 70 | 71 | 72 | def parse_columns(columns_str): 73 | regex = r"(?[A-Z_]+)` (?P[A-z]+)[\<, \"](?!\>)" 74 | result = [] 75 | 76 | matches = re.finditer(regex, columns_str) 77 | for matchNum, match in enumerate(matches, start=1): 78 | result.append({"name": match.group("name"), "type": match.group("type")}) 79 | 80 | return result 81 | 82 | 83 | def process_row(row, column_names): 84 | row = row.replace(",\n", "").replace("]\n", "").rstrip("]") 85 | row_obj = json.loads(row) 86 | if "finalMessage" in row_obj: 87 | return None 88 | column_values = row_obj["row"]["columns"] 89 | index = 0 90 | result = {} 91 | for column in column_values: 92 | result[column_names[index]["name"]] = column 93 | index += 1 94 | 95 | return result 96 | 97 | 98 | def process_query_result(results, return_objects=None): 99 | if return_objects is None: 100 | yield from results 101 | 102 | # parse rows into objects 103 | try: 104 | header = next(results) 105 | except StopIteration: 106 | return 107 | columns = parse_columns(header) 108 | 109 | for result in results: 110 | row_obj = process_row(result, columns) 111 | if row_obj is None: 112 | return 113 | yield row_obj 114 | -------------------------------------------------------------------------------- /requirements-dev.txt: -------------------------------------------------------------------------------- 1 | appdirs==1.4.4 2 | attrs==19.3.0 3 | avro-python3==1.9.2.1 4 | backcall==0.2.0 5 | black==19.10b0 6 | certifi==2018.4.16 7 | chardet==3.0.4 8 | click==7.1.2 9 | codecov==2.1.8 10 | confluent-kafka==0.9.2 11 | coverage==5.2 12 | decorator==4.4.2 13 | fastavro==0.23.6 14 | flake8==3.8.3 15 | idna==2.6 16 | importlib-metadata==1.7.0 17 | ipython==7.16.3 18 | ipython-genutils==0.2.0 19 | jedi==0.17.2 20 | ksql==0.5.1.1 21 | mccabe==0.6.1 22 | multidict==4.7.6 23 | nose==1.3.7 24 | parso==0.7.1 25 | pathspec==0.8.0 26 | pexpect==4.8.0 27 | pickleshare==0.7.5 28 | prompt-toolkit==3.0.5 29 | ptyprocess==0.6.0 30 | pycodestyle==2.6.0 31 | pyflakes==2.2.0 32 | Pygments==2.7.4 33 | pytz==2020.1 34 | PyYAML==5.4 35 | regex==2020.7.14 36 | requests==2.20.0 37 | responses==0.10.15 38 | six==1.11.0 39 | toml==0.10.1 40 | traitlets==4.3.3 41 | typed-ast==1.4.1 42 | urllib3==1.26.5 43 | vcrpy==4.0.2 44 | wcwidth==0.2.5 45 | wrapt==1.12.1 46 | yarl==1.4.2 47 | zipp==3.1.0 48 | hyper 49 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | requests 2 | six 3 | urllib3 4 | hyper 5 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | description-file = README.md 3 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: utf-8 -*- 3 | """ Setup module """ 4 | import os 5 | 6 | from setuptools import setup 7 | from distutils.version import LooseVersion 8 | import pip 9 | 10 | if LooseVersion(pip.__version__) >= "10.0.0": 11 | from pip._internal.req import parse_requirements 12 | else: 13 | from pip.req import parse_requirements 14 | 15 | def get_install_requirements(path): 16 | content = open(os.path.join(os.path.dirname(__file__), path)).read() 17 | return [ 18 | req 19 | for req in content.split("\n") 20 | if req != '' and not req.startswith('#') 21 | ] 22 | 23 | # Get version from __init__.py file 24 | VERSION = "0.10.2" 25 | 26 | here = os.path.dirname(__file__) 27 | 28 | # Get long description 29 | README = open(os.path.join(os.path.dirname(__file__), "README.rst")).read() 30 | 31 | setuptools_kwargs = { 32 | 'install_requires': [ 33 | 'requests', 34 | 'six', 35 | 'urllib3', 36 | 'hyper' 37 | ], 38 | 'zip_safe': False, 39 | } 40 | 41 | # allow setup.py to be run from any path 42 | os.chdir(os.path.normpath(os.path.join(os.path.abspath(__file__), os.pardir))) 43 | 44 | setup( 45 | name="ksql", 46 | version=VERSION, 47 | description="A Python wrapper for the KSQL REST API", 48 | long_description=README, 49 | author="Bryan Yang", 50 | author_email="kenshin200528@gmail.com", 51 | url="https://github.com/bryanyang0528/ksql-python", 52 | license="MIT License", 53 | packages=[ 54 | "ksql" 55 | ], 56 | include_package_data=True, 57 | platforms=['any'], 58 | extras_require={ 59 | "dev": get_install_requirements("test-requirements.txt") 60 | }, 61 | classifiers=[ 62 | "Development Status :: 5 - Production/Stable", 63 | "Intended Audience :: Developers", 64 | "Natural Language :: English", 65 | "License :: OSI Approved :: MIT License", 66 | "Programming Language :: Python", 67 | "Programming Language :: Python :: 3.6", 68 | "Programming Language :: Python :: 3.7", 69 | "Programming Language :: Python :: 3.8", 70 | "Programming Language :: Python :: 3.9", 71 | "Topic :: Software Development :: Libraries :: Python Modules" 72 | ], 73 | **setuptools_kwargs 74 | ) 75 | -------------------------------------------------------------------------------- /test-requirements.txt: -------------------------------------------------------------------------------- 1 | nose 2 | codecov 3 | vcrpy 4 | coverage 5 | confluent-kafka[avro] 6 | responses -------------------------------------------------------------------------------- /tests/test_builder.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | from ksql import SQLBuilder 3 | from ksql.errors import SQLTypeNotImplementYetError, IllegalTableTypeError, IllegalValueFormatError 4 | 5 | 6 | class TestSQLBuilder(unittest.TestCase): 7 | create_table_with_key = ( 8 | "CREATE TABLE users_original " 9 | "(registertime bigint, gender varchar, regionid varchar, userid varchar) WITH " 10 | "(kafka_topic='users', value_format='JSON', key='userid');" 11 | ) 12 | create_table_without_key = ( 13 | "CREATE TABLE users_original " 14 | "(registertime bigint, gender varchar, regionid varchar, userid varchar) " 15 | "WITH (kafka_topic='users', value_format='JSON');" 16 | ) 17 | create_table_without_key_avro = ( 18 | "CREATE TABLE users_original " 19 | "(registertime bigint, gender varchar, regionid varchar, userid varchar) " 20 | "WITH (kafka_topic='users', value_format='AVRO');" 21 | ) 22 | create_table_without_key_delimited = ( 23 | "CREATE TABLE users_original " 24 | "(registertime bigint, gender varchar, regionid varchar, userid varchar) " 25 | "WITH (kafka_topic='users', value_format='DELIMITED');" 26 | ) 27 | create_stream_without_key = ( 28 | "CREATE STREAM users_original " 29 | "(registertime bigint, gender varchar, regionid varchar, userid varchar) " 30 | "WITH (kafka_topic='users', value_format='JSON');" 31 | ) 32 | create_stream_as_with_condition = ( 33 | "CREATE STREAM pageviews_valid " 34 | "WITH (kafka_topic='pageviews_valid', value_format='DELIMITED', " 35 | "timestamp='logtime') AS " 36 | "SELECT rowtime as logtime, * FROM pageviews_original " 37 | "WHERE userid like 'User_%' AND pageid like 'Page_%'" 38 | ) 39 | create_stream_as_with_condition_select_star = ( 40 | "CREATE STREAM pageviews_valid " 41 | "WITH (kafka_topic='pageviews_valid', value_format='DELIMITED', " 42 | "timestamp='logtime') AS SELECT * FROM pageviews_original " 43 | "WHERE userid like 'User_%' AND pageid like 'Page_%'" 44 | ) 45 | create_stream_as_without_condition = ( 46 | "CREATE STREAM pageviews_valid " 47 | "WITH (kafka_topic='pageviews_valid', value_format='DELIMITED', " 48 | "timestamp='logtime') AS SELECT rowtime as logtime, * FROM pageviews_original" 49 | ) 50 | create_stream_as_without_condition_select_star = ( 51 | "CREATE STREAM pageviews_valid " 52 | "WITH (kafka_topic='pageviews_valid', value_format='DELIMITED', " 53 | "timestamp='logtime') AS SELECT * FROM pageviews_original" 54 | ) 55 | create_stream_as_with_condition_with_partitions = ( 56 | "CREATE STREAM pageviews_valid " 57 | "WITH (kafka_topic='pageviews_valid', value_format='DELIMITED', " 58 | "partitions=5, timestamp='logtime') AS " 59 | "SELECT rowtime as logtime, * FROM pageviews_original " 60 | "WHERE userid like 'User_%' AND pageid like 'Page_%'" 61 | ) 62 | create_stream_as_without_condition_partition_by = ( 63 | "CREATE STREAM pageviews_valid WITH " 64 | "(kafka_topic='pageviews_valid', value_format='DELIMITED', " 65 | "timestamp='logtime') AS " 66 | "SELECT rowtime as logtime, * " 67 | "FROM pageviews_original PARTITION BY logtime" 68 | ) 69 | create_stream_as_without_condition_avro = ( 70 | "CREATE STREAM pageviews_valid " 71 | "WITH (kafka_topic='pageviews_valid', value_format='AVRO', " 72 | "timestamp='logtime')" 73 | " AS SELECT rowtime as logtime, * FROM pageviews_original" 74 | ) 75 | 76 | def test_create_table_with_key(self): 77 | table_name = "users_original" 78 | columns_type = ["registertime bigint", "gender varchar", "regionid varchar", "userid varchar"] 79 | topic = "users" 80 | value_format = "JSON" 81 | key = "userid" 82 | 83 | build_sql_str = SQLBuilder.build( 84 | "create", 85 | table_type="table", 86 | table_name=table_name, 87 | columns_type=columns_type, 88 | topic=topic, 89 | value_format=value_format, 90 | key=key, 91 | ) 92 | 93 | self.assertEqual(build_sql_str.lower(), self.create_table_with_key.lower()) 94 | 95 | def test_create_table_without_key(self): 96 | table_name = "users_original" 97 | columns_type = ["registertime bigint", "gender varchar", "regionid varchar", "userid varchar"] 98 | topic = "users" 99 | value_format = "JSON" 100 | 101 | SQLBuilder.build( 102 | "create", 103 | table_type="table", 104 | table_name=table_name, 105 | columns_type=columns_type, 106 | topic=topic, 107 | value_format=value_format, 108 | ) 109 | 110 | def test_create_table_without_key_avro(self): 111 | table_name = "users_original" 112 | columns_type = ["registertime bigint", "gender varchar", "regionid varchar", "userid varchar"] 113 | topic = "users" 114 | value_format = "AVRO" 115 | 116 | build_sql_str = SQLBuilder.build( 117 | "create", 118 | table_type="table", 119 | table_name=table_name, 120 | columns_type=columns_type, 121 | topic=topic, 122 | value_format=value_format, 123 | ) 124 | 125 | self.assertEqual(build_sql_str.lower(), self.create_table_without_key_avro.lower()) 126 | 127 | def test_create_table_without_key_delimited(self): 128 | table_name = "users_original" 129 | columns_type = ["registertime bigint", "gender varchar", "regionid varchar", "userid varchar"] 130 | topic = "users" 131 | value_format = "DELIMITED" 132 | 133 | build_sql_str = SQLBuilder.build( 134 | "create", 135 | table_type="table", 136 | table_name=table_name, 137 | columns_type=columns_type, 138 | topic=topic, 139 | value_format=value_format, 140 | ) 141 | 142 | self.assertEqual(build_sql_str.lower(), self.create_table_without_key_delimited.lower()) 143 | 144 | def test_create_stream_without_key(self): 145 | table_name = "users_original" 146 | columns_type = ["registertime bigint", "gender varchar", "regionid varchar", "userid varchar"] 147 | topic = "users" 148 | value_format = "JSON" 149 | 150 | build_sql_str = SQLBuilder.build( 151 | "create", 152 | table_type="stream", 153 | table_name=table_name, 154 | columns_type=columns_type, 155 | topic=topic, 156 | value_format=value_format, 157 | ) 158 | 159 | self.assertEqual(build_sql_str.lower(), self.create_stream_without_key.lower()) 160 | 161 | def test_create_stream_as_without_condition(self): 162 | sql_type = "create_as" 163 | table_name = "pageviews_valid" 164 | src_table = "pageviews_original" 165 | kafka_topic = "pageviews_valid" 166 | value_format = "DELIMITED" 167 | select_columns = ["rowtime as logtime", "*"] 168 | 169 | built_sql_str = SQLBuilder.build( 170 | sql_type=sql_type, 171 | table_type="stream", 172 | table_name=table_name, 173 | src_table=src_table, 174 | kafka_topic=kafka_topic, 175 | select_columns=select_columns, 176 | timestamp="logtime", 177 | value_format=value_format, 178 | ) 179 | 180 | self.assertEqual(built_sql_str.lower(), self.create_stream_as_without_condition.lower()) 181 | 182 | def test_create_stream_as_without_condition_avro(self): 183 | sql_type = "create_as" 184 | table_name = "pageviews_valid" 185 | src_table = "pageviews_original" 186 | kafka_topic = "pageviews_valid" 187 | value_format = "AVRO" 188 | select_columns = ["rowtime as logtime", "*"] 189 | 190 | built_sql_str = SQLBuilder.build( 191 | sql_type=sql_type, 192 | table_type="stream", 193 | table_name=table_name, 194 | src_table=src_table, 195 | kafka_topic=kafka_topic, 196 | select_columns=select_columns, 197 | timestamp="logtime", 198 | value_format=value_format, 199 | ) 200 | 201 | self.assertEqual(built_sql_str.lower(), self.create_stream_as_without_condition_avro.lower()) 202 | 203 | def test_create_stream_as_without_condition_select_star(self): 204 | sql_type = "create_as" 205 | table_name = "pageviews_valid" 206 | src_table = "pageviews_original" 207 | kafka_topic = "pageviews_valid" 208 | value_format = "DELIMITED" 209 | 210 | built_sql_str = SQLBuilder.build( 211 | sql_type=sql_type, 212 | table_type="stream", 213 | table_name=table_name, 214 | src_table=src_table, 215 | kafka_topic=kafka_topic, 216 | timestamp="logtime", 217 | value_format=value_format, 218 | ) 219 | 220 | self.assertEqual(built_sql_str.lower(), self.create_stream_as_without_condition_select_star.lower()) 221 | 222 | def test_create_stream_as_without_condition_select_star_with_blank_list(self): 223 | sql_type = "create_as" 224 | table_name = "pageviews_valid" 225 | src_table = "pageviews_original" 226 | kafka_topic = "pageviews_valid" 227 | value_format = "DELIMITED" 228 | select_columns = [] 229 | 230 | built_sql_str = SQLBuilder.build( 231 | sql_type=sql_type, 232 | table_type="stream", 233 | table_name=table_name, 234 | src_table=src_table, 235 | kafka_topic=kafka_topic, 236 | timestamp="logtime", 237 | value_format=value_format, 238 | select_columns=select_columns, 239 | ) 240 | 241 | self.assertEqual(built_sql_str.lower(), self.create_stream_as_without_condition_select_star.lower()) 242 | 243 | def test_create_stream_as_without_condition_select_star_with_only_star(self): 244 | sql_type = "create_as" 245 | table_name = "pageviews_valid" 246 | src_table = "pageviews_original" 247 | kafka_topic = "pageviews_valid" 248 | value_format = "DELIMITED" 249 | select_columns = ["*"] 250 | 251 | built_sql_str = SQLBuilder.build( 252 | sql_type=sql_type, 253 | table_type="stream", 254 | table_name=table_name, 255 | src_table=src_table, 256 | kafka_topic=kafka_topic, 257 | timestamp="logtime", 258 | value_format=value_format, 259 | select_columns=select_columns, 260 | ) 261 | 262 | self.assertEqual(built_sql_str.lower(), self.create_stream_as_without_condition_select_star.lower()) 263 | 264 | def test_create_stream_as_with_condition(self): 265 | sql_type = "create_as" 266 | table_name = "pageviews_valid" 267 | src_table = "pageviews_original" 268 | kafka_topic = "pageviews_valid" 269 | value_format = "DELIMITED" 270 | select_columns = ["rowtime as logtime", "*"] 271 | conditions = "userid like 'User_%' AND pageid like 'Page_%'" 272 | 273 | built_sql_str = SQLBuilder.build( 274 | sql_type=sql_type, 275 | table_type="stream", 276 | table_name=table_name, 277 | src_table=src_table, 278 | kafka_topic=kafka_topic, 279 | select_columns=select_columns, 280 | timestamp="logtime", 281 | value_format=value_format, 282 | conditions=conditions, 283 | ) 284 | 285 | self.assertEqual(built_sql_str.lower(), self.create_stream_as_with_condition.lower()) 286 | 287 | def test_create_stream_as_with_condition_double_qoute(self): 288 | sql_type = "create_as" 289 | table_name = "pageviews_valid" 290 | src_table = "pageviews_original" 291 | kafka_topic = "pageviews_valid" 292 | value_format = "DELIMITED" 293 | select_columns = ["rowtime as logtime", "*"] 294 | conditions = 'userid like "User_%" AND pageid like "Page_%"' 295 | 296 | built_sql_str = SQLBuilder.build( 297 | sql_type=sql_type, 298 | table_type="stream", 299 | table_name=table_name, 300 | src_table=src_table, 301 | kafka_topic=kafka_topic, 302 | select_columns=select_columns, 303 | timestamp="logtime", 304 | value_format=value_format, 305 | conditions=conditions, 306 | ) 307 | 308 | self.assertEqual(built_sql_str.lower(), self.create_stream_as_with_condition.lower()) 309 | 310 | def test_create_stream_as_with_condition_with_partitions(self): 311 | sql_type = "create_as" 312 | table_name = "pageviews_valid" 313 | src_table = "pageviews_original" 314 | kafka_topic = "pageviews_valid" 315 | value_format = "DELIMITED" 316 | select_columns = ["rowtime as logtime", "*"] 317 | conditions = "userid like 'User_%' AND pageid like 'Page_%'" 318 | paritions = 5 319 | 320 | built_sql_str = SQLBuilder.build( 321 | sql_type=sql_type, 322 | table_type="stream", 323 | table_name=table_name, 324 | src_table=src_table, 325 | kafka_topic=kafka_topic, 326 | select_columns=select_columns, 327 | timestamp="logtime", 328 | value_format=value_format, 329 | conditions=conditions, 330 | partitions=paritions, 331 | ) 332 | 333 | self.assertEqual(built_sql_str.lower(), self.create_stream_as_with_condition_with_partitions.lower()) 334 | 335 | def test_create_stream_as_without_condition_partition_by(self): 336 | sql_type = "create_as" 337 | table_name = "pageviews_valid" 338 | src_table = "pageviews_original" 339 | kafka_topic = "pageviews_valid" 340 | value_format = "DELIMITED" 341 | select_columns = ["rowtime as logtime", "*"] 342 | partition_by = "logtime" 343 | 344 | built_sql_str = SQLBuilder.build( 345 | sql_type=sql_type, 346 | table_type="stream", 347 | table_name=table_name, 348 | src_table=src_table, 349 | kafka_topic=kafka_topic, 350 | select_columns=select_columns, 351 | timestamp="logtime", 352 | value_format=value_format, 353 | partition_by=partition_by, 354 | ) 355 | 356 | self.assertEqual(built_sql_str.lower(), self.create_stream_as_without_condition_partition_by.lower()) 357 | 358 | def test_sql_type_error(self): 359 | sql_type = "view" 360 | table_type = "stream" 361 | table_name = "users_original" 362 | columns_type = ["registertime bigint", "gender varchar", "regionid varchar", "userid varchar"] 363 | topic = "users" 364 | value_format = "JSON" 365 | 366 | with self.assertRaises(SQLTypeNotImplementYetError): 367 | SQLBuilder.build( 368 | sql_type=sql_type, 369 | table_type=table_type, 370 | table_name=table_name, 371 | columns_type=columns_type, 372 | topic=topic, 373 | value_format=value_format, 374 | ) 375 | 376 | def test_table_type_error(self): 377 | sql_type = "create" 378 | table_type = "qoo" 379 | table_name = "users_original" 380 | columns_type = ["registertime bigint", "gender varchar", "regionid varchar", "userid varchar"] 381 | topic = "users" 382 | value_format = "JSON" 383 | 384 | with self.assertRaises(IllegalTableTypeError): 385 | SQLBuilder.build( 386 | sql_type=sql_type, 387 | table_type=table_type, 388 | table_name=table_name, 389 | columns_type=columns_type, 390 | topic=topic, 391 | value_format=value_format, 392 | ) 393 | 394 | def test_value_format_error(self): 395 | sql_type = "create" 396 | table_type = "stream" 397 | table_name = "users_original" 398 | columns_type = ["registertime bigint", "gender varchar", "regionid varchar", "userid varchar"] 399 | topic = "users" 400 | value_format = "foo" 401 | 402 | with self.assertRaises(IllegalValueFormatError): 403 | SQLBuilder.build( 404 | sql_type=sql_type, 405 | table_type=table_type, 406 | table_name=table_name, 407 | columns_type=columns_type, 408 | topic=topic, 409 | value_format=value_format, 410 | ) 411 | -------------------------------------------------------------------------------- /tests/test_utils.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import vcr 3 | from confluent_kafka import Producer 4 | 5 | import ksql.utils as utils 6 | from ksql.client import KSQLAPI 7 | 8 | 9 | class TestKSQLUtils(unittest.TestCase): 10 | """Test case for the client methods.""" 11 | 12 | def setUp(self): 13 | self.url = "http://localhost:8088" 14 | self.api_client = KSQLAPI(url=self.url, check_version=False) 15 | self.test_prefix = "ksql_python_test" 16 | 17 | self.exist_topic = self.test_prefix + "_exist_topic" 18 | self.bootstrap_servers = "localhost:29092" 19 | if utils.check_kafka_available(self.bootstrap_servers): 20 | producer = Producer({"bootstrap.servers": self.bootstrap_servers}) 21 | producer.produce(self.exist_topic, "test_message") 22 | producer.flush() 23 | 24 | def tearDown(self): 25 | if utils.check_kafka_available(self.bootstrap_servers): 26 | utils.drop_all_streams(self.api_client, prefix=self.test_prefix) 27 | 28 | @vcr.use_cassette("tests/vcr_cassettes/utils_test_drop_stream.yml") 29 | def test_drop_stream(self): 30 | topic = self.exist_topic 31 | stream_name = self.test_prefix + "_test_drop_stream" 32 | utils.drop_stream(self.api_client, stream_name) 33 | ksql_string = "CREATE STREAM {} (viewtime bigint, userid varchar, pageid varchar) \ 34 | WITH (kafka_topic='{}', value_format='DELIMITED');".format( 35 | stream_name, topic 36 | ) 37 | self.api_client.ksql(ksql_string) 38 | self.assertTrue(utils.get_stream_info(self.api_client, stream_name)) 39 | utils.drop_stream(self.api_client, stream_name) 40 | self.assertFalse(utils.get_stream_info(self.api_client, stream_name)) 41 | 42 | @vcr.use_cassette("tests/vcr_cassettes/utils_test_drop_stream_create_as_stream.yml") 43 | def test_drop_stream_create_as_stream(self): 44 | topic = self.exist_topic 45 | stream_name = self.test_prefix + "_test_drop_stream" 46 | stream_name_as = stream_name + "_as" 47 | utils.drop_stream(self.api_client, stream_name) 48 | ksql_string = "CREATE STREAM {} (viewtime bigint, userid varchar, pageid varchar) \ 49 | WITH (kafka_topic='{}', value_format='DELIMITED');".format( 50 | stream_name, topic 51 | ) 52 | self.api_client.ksql(ksql_string) 53 | ksql_string = "CREATE STREAM {} as select * from {};".format(stream_name_as, stream_name) 54 | self.api_client.ksql(ksql_string) 55 | 56 | self.assertTrue(utils.get_stream_info(self.api_client, stream_name_as)) 57 | utils.drop_stream(self.api_client, stream_name_as) 58 | self.assertFalse(utils.get_stream_info(self.api_client, stream_name_as)) 59 | 60 | @vcr.use_cassette("tests/vcr_cassettes/utils_test_get_all_streams.yml") 61 | def test_get_all_streams(self): 62 | topic = self.exist_topic 63 | stream_name = self.test_prefix + "_test_get_all_streams" 64 | utils.drop_stream(self.api_client, stream_name) 65 | ksql_string = "CREATE STREAM {} (viewtime bigint, userid varchar, pageid varchar) \ 66 | WITH (kafka_topic='{}', value_format='DELIMITED');".format( 67 | stream_name, topic 68 | ) 69 | self.api_client.ksql(ksql_string) 70 | filtered_streams = utils.get_all_streams(self.api_client, prefix=self.test_prefix) 71 | self.assertEqual(filtered_streams, [stream_name.upper()]) 72 | 73 | @vcr.use_cassette("tests/vcr_cassettes/utils_test_get_stream_info.yml") 74 | def test_get_stream_info(self): 75 | topic = self.exist_topic 76 | stream_name = self.test_prefix + "_test_get_stream_info" 77 | utils.drop_stream(self.api_client, stream_name) 78 | ksql_string = "CREATE STREAM {} (viewtime bigint, userid varchar, pageid varchar) \ 79 | WITH (kafka_topic='{}', value_format='DELIMITED');".format( 80 | stream_name, topic 81 | ) 82 | self.api_client.ksql(ksql_string) 83 | stream_info = utils.get_stream_info(self.api_client, stream_name) 84 | # print(stream_info['topic']) 85 | self.assertEqual(stream_info["topic"], self.exist_topic) 86 | 87 | @vcr.use_cassette("tests/vcr_cassettes/utils_test_drop_all_streams.yml") 88 | def test_drop_all_streams(self): 89 | topic = self.exist_topic 90 | stream_name = self.test_prefix + "_test_drop_all_streams" 91 | utils.drop_stream(self.api_client, stream_name) 92 | ksql_string = "CREATE STREAM {} (viewtime bigint, userid varchar, pageid varchar) \ 93 | WITH (kafka_topic='{}', value_format='DELIMITED');".format( 94 | stream_name, topic 95 | ) 96 | self.api_client.ksql(ksql_string) 97 | utils.drop_all_streams(self.api_client, prefix=self.test_prefix) 98 | self.assertFalse(utils.get_stream_info(self.api_client, stream_name)) 99 | 100 | @vcr.use_cassette("tests/vcr_cassettes/utils_test_get_dependent_queries.yml") 101 | def test_get_dependent_queries(self): 102 | topic = self.exist_topic 103 | stream_name = self.test_prefix + "_test_get_dependent_queries" 104 | stream_name_as = stream_name + "_as" 105 | utils.drop_stream(self.api_client, stream_name) 106 | ksql_string = "CREATE STREAM {} (viewtime bigint, userid varchar, pageid varchar) \ 107 | WITH (kafka_topic='{}', value_format='DELIMITED');".format( 108 | stream_name, topic 109 | ) 110 | self.api_client.ksql(ksql_string) 111 | ksql_string = "CREATE STREAM {} as select * from {};".format(stream_name_as, stream_name) 112 | self.api_client.ksql(ksql_string) 113 | read_queries, write_queries = utils.get_dependent_queries(self.api_client, stream_name_as) 114 | self.assertEqual(read_queries, []) 115 | self.assertTrue(write_queries[0].startswith("CSAS_KSQL_PYTHON_TEST_TEST_GET_DEPENDENT_QUERIES_AS")) 116 | 117 | def test_parse_columns(self): 118 | header_str = """[{"header":{"queryId":"none","schema":"`ORDER_ID` INTEGER, `MY_STRUCT` STRUCT<`A` INTEGER, `B` STRING>, `MY_MAP` MAP, `MY_ARRAY` ARRAY, `TOTAL_AMOUNT` DOUBLE, `CUSTOMER_NAME` STRING"}},""" 119 | 120 | columns = utils.parse_columns(header_str) 121 | 122 | self.assertEqual(columns[0], {'name': 'ORDER_ID', 'type': 'INTEGER'}) 123 | self.assertEqual(columns[1], {'name': 'MY_STRUCT', 'type': 'STRUCT'}) 124 | self.assertEqual(columns[2], {'name': 'MY_MAP', 'type': 'MAP'}) 125 | self.assertEqual(columns[3], {'name': 'MY_ARRAY', 'type': 'ARRAY'}) 126 | self.assertEqual(columns[4], {'name': 'TOTAL_AMOUNT', 'type': 'DOUBLE'}) 127 | self.assertEqual(columns[5], {'name': 'CUSTOMER_NAME', 'type': 'STRING'}) 128 | 129 | def test_process_row(self): 130 | parsed_header = [{'name': 'ORDER_ID', 'type': 'INTEGER'}, {'name': 'MY_STRUCT', 'type': 'STRUCT'}, {'name': 'MY_MAP', 'type': 'MAP'}, {'name': 'MY_ARRAY', 'type': 'ARRAY'}, {'name': 'TOTAL_AMOUNT', 'type': 'DOUBLE'}, {'name': 'CUSTOMER_NAME', 'type': 'STRING'}] 131 | row_str = """{"row":{"columns":[3,{"A":1,"B":"bbb"},{"x":3,"y":4},[1,2,3],43.0,"Palo Alto"]}},\n""" 132 | 133 | row_obj = utils.process_row(row_str, parsed_header) 134 | 135 | self.assertEqual(row_obj["ORDER_ID"], 3) 136 | self.assertEqual(row_obj["MY_STRUCT"], {"A": 1, "B": "bbb"}) 137 | self.assertEqual(row_obj["MY_MAP"], {"x": 3, "y": 4}) 138 | self.assertEqual(row_obj["MY_ARRAY"], [1, 2, 3]) 139 | self.assertEqual(row_obj["TOTAL_AMOUNT"], 43) 140 | self.assertEqual(row_obj["CUSTOMER_NAME"], "Palo Alto") 141 | 142 | def test_process_query_result(self): 143 | def mock_generator(): 144 | results = [1,2,3,4,5,6] 145 | for a in results: 146 | yield a 147 | 148 | results = utils.process_query_result(mock_generator()) 149 | 150 | first_result = next(results) 151 | self.assertEqual(first_result, 1) 152 | 153 | def test_process_query_result_parse_rows(self): 154 | def mock_generator(): 155 | header_str = """[{"header":{"queryId":"none","schema":"`ORDER_ID` INTEGER, `MY_STRUCT` STRUCT<`A` INTEGER, `B` STRING>, `MY_MAP` MAP, `MY_ARRAY` ARRAY, `TOTAL_AMOUNT` DOUBLE, `CUSTOMER_NAME` STRING"}},""" 156 | row_str = """{"row":{"columns":[3,{"A":1,"B":"bbb"},{"x":3,"y":4},[1,2,3],43.0,"Palo Alto"]}},\n""" 157 | 158 | results = [header_str, row_str] 159 | for a in results: 160 | yield a 161 | 162 | rows = utils.process_query_result(mock_generator(), True) 163 | 164 | first_row = next(rows) 165 | self.assertEqual(first_row["ORDER_ID"], 3) 166 | self.assertEqual(first_row["MY_STRUCT"], {"A": 1, "B": "bbb"}) 167 | self.assertEqual(first_row["MY_MAP"], {"x": 3, "y": 4}) 168 | self.assertEqual(first_row["MY_ARRAY"], [1, 2, 3]) 169 | self.assertEqual(first_row["TOTAL_AMOUNT"], 43) 170 | self.assertEqual(first_row["CUSTOMER_NAME"], "Palo Alto") -------------------------------------------------------------------------------- /tests/unit-tests/test_api.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | import responses 3 | import urllib 4 | 5 | from ksql.api import BaseAPI 6 | 7 | 8 | class TestBaseApi(unittest.TestCase): 9 | @responses.activate 10 | def test_base_api_query(self): 11 | responses.add(responses.POST, "http://dummy.org/query", body="test", status=200, stream=True) 12 | base = BaseAPI("http://dummy.org") 13 | result = base.query("so") 14 | for entry in result: 15 | entry 16 | """ 17 | with self.assertRaises(urllib.error.HTTPError): 18 | for entry in result: 19 | entry 20 | """ 21 | -------------------------------------------------------------------------------- /tests/unit-tests/utils_tests.py: -------------------------------------------------------------------------------- 1 | import unittest 2 | 3 | import ksql.utils 4 | 5 | 6 | class TestKSQLUtils(unittest.TestCase): 7 | """Test case for the client methods.""" 8 | 9 | def test_process_header(self): 10 | header_str = '[{"header":{"queryId":"query_1643298761990","schema":"`COMPANY_UID` STRING KEY, `USER_UID` STRING KEY, `USER_STATUS_ID` BIGINT KEY, `BONUS_PCT` STRING"}},\n' 11 | actual_columns = ksql.utils.parse_columns(header_str) 12 | expected_columns = [ 13 | {"name": "COMPANY_UID", "type": "STRING"}, 14 | {"name": "USER_UID", "type": "STRING"}, 15 | {"name": "USER_STATUS_ID", "type": "BIGINT"}, 16 | {"name": "BONUS_PCT", "type": "STRING"}, 17 | ] 18 | self.assertEqual(actual_columns, expected_columns) 19 | 20 | def test_process_row_with_no_dangling_closing_bracket(self): 21 | columns = [ 22 | {"name": "COMPANY_UID", "type": "STRING"}, 23 | {"name": "USER_UID", "type": "STRING"}, 24 | {"name": "USER_STATUS_ID", "type": "BIGINT"}, 25 | {"name": "BONUS_PCT", "type": "STRING"}, 26 | ] 27 | row = '{"row":{"columns":["f08c77db7","fcafb7c23",11508,"1.10976000000000000000"]}},\n' 28 | 29 | actual = ksql.utils.process_row(row, columns) 30 | expected = { 31 | "BONUS_PCT": "1.10976000000000000000", 32 | "COMPANY_UID": "f08c77db7", 33 | "USER_UID": "fcafb7c23", 34 | "USER_STATUS_ID": 11508, 35 | } 36 | self.assertEqual(actual, expected) 37 | 38 | def test_process_row_with_dangling_closing_bracket(self): 39 | columns = [ 40 | {"name": "COMPANY_UID", "type": "STRING"}, 41 | {"name": "USER_UID", "type": "STRING"}, 42 | {"name": "USER_STATUS_ID", "type": "BIGINT"}, 43 | {"name": "BONUS_PCT", "type": "STRING"}, 44 | ] 45 | row = '{"row":{"columns":["f08c77db7","fdcacbca1",13120,"1.09760000000000000000"]}}]' 46 | 47 | actual = ksql.utils.process_row(row, columns) 48 | expected = { 49 | "BONUS_PCT": "1.09760000000000000000", 50 | "COMPANY_UID": "f08c77db7", 51 | "USER_UID": "fdcacbca1", 52 | "USER_STATUS_ID": 13120, 53 | } 54 | self.assertEqual(actual, expected) 55 | 56 | 57 | def test_process_query_results(self): 58 | results = ( 59 | r 60 | for r in [ 61 | '[{"header":{"queryId":"query_1643298761990","schema":"`COMPANY_UID` STRING KEY, `USER_UID` STRING KEY, `USER_STATUS_ID` BIGINT KEY, `BONUS_PCT` STRING"}},\n', 62 | '{"row":{"columns":["f08c77db7","fcafb7c23",11508,"1.10976000000000000000"]}},\n', 63 | '{"row":{"columns":["f08c77db7","fdcacbca1",13120,"1.09760000000000000000"]}}]', 64 | ] 65 | ) 66 | 67 | actual = list(ksql.utils.process_query_result(results, return_objects=True)) 68 | expected = [ 69 | { 70 | "BONUS_PCT": "1.10976000000000000000", 71 | "COMPANY_UID": "f08c77db7", 72 | "USER_STATUS_ID": 11508, 73 | "USER_UID": "fcafb7c23", 74 | }, 75 | { 76 | "BONUS_PCT": "1.09760000000000000000", 77 | "COMPANY_UID": "f08c77db7", 78 | "USER_STATUS_ID": 13120, 79 | "USER_UID": "fdcacbca1", 80 | }, 81 | ] 82 | self.assertEqual(actual, expected) 83 | -------------------------------------------------------------------------------- /tests/vcr_cassettes/bad_requests.yml: -------------------------------------------------------------------------------- 1 | interactions: 2 | - request: 3 | body: '{"ksql": "noi;"}' 4 | headers: 5 | Accept: 6 | - application/json 7 | Connection: 8 | - close 9 | Content-Length: 10 | - '16' 11 | Content-Type: 12 | - application/json 13 | Host: 14 | - localhost:8088 15 | User-Agent: 16 | - Python-urllib/3.6 17 | method: POST 18 | uri: http://localhost:8088/ksql 19 | response: 20 | body: 21 | string: '{"@type":"statement_error","error_code":40001,"message":"line 1:1: 22 | mismatched input ''noi'' expecting {, ''SELECT'', ''CREATE'', ''INSERT'', 23 | ''DESCRIBE'', ''PRINT'', ''EXPLAIN'', ''SHOW'', ''LIST'', ''TERMINATE'', ''DROP'', 24 | ''SET'', ''UNSET''}","statementText":"noi;","entities":[]}' 25 | headers: 26 | connection: 27 | - close 28 | content-length: 29 | - '261' 30 | content-type: 31 | - application/json 32 | status: 33 | code: 400 34 | message: Bad Request 35 | version: 1 36 | -------------------------------------------------------------------------------- /tests/vcr_cassettes/get_ksql_server.yml: -------------------------------------------------------------------------------- 1 | interactions: 2 | - request: 3 | body: null 4 | headers: 5 | Accept: 6 | - '*/*' 7 | Accept-Encoding: 8 | - gzip, deflate 9 | Authorization: 10 | - Basic Tm9uZTpOb25l 11 | Connection: 12 | - keep-alive 13 | User-Agent: 14 | - python-requests/2.18.4 15 | method: GET 16 | uri: http://localhost:8088/info 17 | response: 18 | body: 19 | string: '{"KsqlServerInfo":{"version":"0.10.1","kafkaClusterId":"DkvN9zaxSjO5NxYIkhjKCQ","ksqlServiceId":"default_"}}' 20 | headers: 21 | content-length: 22 | - '108' 23 | content-type: 24 | - application/json 25 | status: 26 | code: 200 27 | message: OK 28 | version: 1 29 | -------------------------------------------------------------------------------- /tests/vcr_cassettes/get_properties.yml: -------------------------------------------------------------------------------- 1 | interactions: 2 | - request: 3 | body: '{"ksql": "show properties;"}' 4 | headers: 5 | Accept: 6 | - application/json 7 | Connection: 8 | - close 9 | Content-Length: 10 | - '28' 11 | Content-Type: 12 | - application/json 13 | Host: 14 | - localhost:8088 15 | User-Agent: 16 | - Python-urllib/3.6 17 | method: POST 18 | uri: http://localhost:8088/ksql 19 | response: 20 | body: 21 | string: '[{"@type":"properties","statementText":"show properties;","properties":[{"name":"ksql.extension.dir","scope":"KSQL","value":"ext"},{"name":"ksql.streams.cache.max.bytes.buffering","scope":"KSQL","value":"10000000"},{"name":"ksql.security.extension.class","scope":"KSQL","value":null},{"name":"ksql.transient.prefix","scope":"KSQL","value":"transient_"},{"name":"ksql.persistence.wrap.single.values","scope":"KSQL","value":"true"},{"name":"ksql.authorization.cache.expiry.time.secs","scope":"KSQL","value":"30"},{"name":"ksql.schema.registry.url","scope":"KSQL","value":"http://schema-registry:8081"},{"name":"ksql.streams.default.deserialization.exception.handler","scope":"KSQL","value":"io.confluent.ksql.errors.LogMetricAndContinueExceptionHandler"},{"name":"ksql.output.topic.name.prefix","scope":"KSQL","value":""},{"name":"ksql.query.pull.enable.standby.reads","scope":"KSQL","value":"false"},{"name":"ksql.connect.url","scope":"KSQL","value":"http://localhost:8083"},{"name":"ksql.service.id","scope":"KSQL","value":"default_"},{"name":"ksql.internal.topic.min.insync.replicas","scope":"KSQL","value":"1"},{"name":"ksql.streams.shutdown.timeout.ms","scope":"KSQL","value":"300000"},{"name":"ksql.internal.topic.replicas","scope":"KSQL","value":"1"},{"name":"ksql.insert.into.values.enabled","scope":"KSQL","value":"true"},{"name":"ksql.query.pull.max.allowed.offset.lag","scope":"KSQL","value":"9223372036854775807"},{"name":"ksql.query.pull.max.qps","scope":"KSQL","value":"2147483647"},{"name":"ksql.streams.default.production.exception.handler","scope":"KSQL","value":"io.confluent.ksql.errors.ProductionExceptionHandlerUtil$LogAndFailProductionExceptionHandler"},{"name":"ksql.access.validator.enable","scope":"KSQL","value":"auto"},{"name":"ksql.streams.bootstrap.servers","scope":"KSQL","value":"broker:9092"},{"name":"ksql.streams.commit.interval.ms","scope":"KSQL","value":"2000"},{"name":"ksql.metric.reporters","scope":"KSQL","value":""},{"name":"ksql.query.pull.metrics.enabled","scope":"KSQL","value":"false"},{"name":"ksql.metrics.extension","scope":"KSQL","value":null},{"name":"ksql.streams.topology.optimization","scope":"KSQL","value":"all"},{"name":"ksql.hidden.topics","scope":"KSQL","value":"_confluent.*,__confluent.*,_schemas,__consumer_offsets,__transaction_state,connect-configs,connect-offsets,connect-status,connect-statuses"},{"name":"ksql.streams.num.stream.threads","scope":"KSQL","value":"4"},{"name":"ksql.timestamp.throw.on.invalid","scope":"KSQL","value":"false"},{"name":"ksql.authorization.cache.max.entries","scope":"KSQL","value":"10000"},{"name":"ksql.metrics.tags.custom","scope":"KSQL","value":""},{"name":"ksql.pull.queries.enable","scope":"KSQL","value":"true"},{"name":"ksql.udfs.enabled","scope":"KSQL","value":"true"},{"name":"ksql.udf.enable.security.manager","scope":"KSQL","value":"true"},{"name":"ksql.connect.worker.config","scope":"KSQL","value":"/etc/ksqldb/connect.properties"},{"name":"ksql.streams.application.server","scope":"KSQL","value":"http://ksqldb-server:8088"},{"name":"ksql.sink.window.change.log.additional.retention","scope":"KSQL","value":"1000000"},{"name":"ksql.readonly.topics","scope":"KSQL","value":"_confluent.*,__confluent.*,_schemas,__consumer_offsets,__transaction_state,connect-configs,connect-offsets,connect-status,connect-statuses"},{"name":"ksql.udf.collect.metrics","scope":"KSQL","value":"false"},{"name":"ksql.persistent.prefix","scope":"KSQL","value":"query_"},{"name":"ksql.query.persistent.active.limit","scope":"KSQL","value":"2147483647"},{"name":"ksql.error.classifier.regex","scope":"KSQL","value":""},{"name":"config.storage.topic","scope":"EMBEDDED 22 | CONNECT WORKER","value":"ksql-connect-configs"},{"name":"status.storage.topic","scope":"EMBEDDED 23 | CONNECT WORKER","value":"ksql-connect-statuses"},{"name":"group.id","scope":"EMBEDDED 24 | CONNECT WORKER","value":"ksql-connect-cluster"},{"name":"bootstrap.servers","scope":"EMBEDDED 25 | CONNECT WORKER","value":"broker:9092"},{"name":"plugin.path","scope":"EMBEDDED 26 | CONNECT WORKER","value":"/usr/share/kafka/plugins"},{"name":"config.storage.replication.factor","scope":"EMBEDDED 27 | CONNECT WORKER","value":"1"},{"name":"value.converter.schema.registry.url","scope":"EMBEDDED 28 | CONNECT WORKER","value":"http://schema-registry:8081"},{"name":"status.storage.replication.factor","scope":"EMBEDDED 29 | CONNECT WORKER","value":"1"},{"name":"value.converter.schemas.enable","scope":"EMBEDDED 30 | CONNECT WORKER","value":"false"},{"name":"offset.storage.replication.factor","scope":"EMBEDDED 31 | CONNECT WORKER","value":"1"},{"name":"offset.storage.topic","scope":"EMBEDDED 32 | CONNECT WORKER","value":"ksql-connect-offsets"},{"name":"value.converter","scope":"EMBEDDED 33 | CONNECT WORKER","value":"io.confluent.connect.avro.AvroConverter"},{"name":"key.converter","scope":"EMBEDDED 34 | CONNECT WORKER","value":"io.confluent.connect.avro.AvroConverter"},{"name":"key.converter.schema.registry.url","scope":"EMBEDDED 35 | CONNECT WORKER","value":"http://schema-registry:8081"}],"overwrittenProperties":["config.storage.topic","status.storage.topic","group.id","bootstrap.servers","plugin.path","config.storage.replication.factor","value.converter.schema.registry.url","status.storage.replication.factor","value.converter.schemas.enable","offset.storage.replication.factor","offset.storage.topic","value.converter","key.converter","key.converter.schema.registry.url"],"defaultProperties":["ksql.extension.dir","ksql.security.extension.class","ksql.transient.prefix","ksql.persistence.wrap.single.values","ksql.authorization.cache.expiry.time.secs","ksql.output.topic.name.prefix","ksql.query.pull.enable.standby.reads","ksql.connect.url","ksql.service.id","ksql.internal.topic.min.insync.replicas","ksql.internal.topic.replicas","ksql.insert.into.values.enabled","ksql.query.pull.max.allowed.offset.lag","ksql.query.pull.max.qps","ksql.access.validator.enable","ksql.metric.reporters","ksql.query.pull.metrics.enabled","ksql.metrics.extension","ksql.hidden.topics","ksql.timestamp.throw.on.invalid","ksql.authorization.cache.max.entries","ksql.metrics.tags.custom","ksql.pull.queries.enable","ksql.udfs.enabled","ksql.udf.enable.security.manager","ksql.sink.window.change.log.additional.retention","ksql.readonly.topics","ksql.udf.collect.metrics","ksql.persistent.prefix","ksql.query.persistent.active.limit","ksql.error.classifier.regex"],"warnings":[]}]' 36 | headers: 37 | connection: 38 | - close 39 | content-length: 40 | - '6354' 41 | content-type: 42 | - application/json 43 | status: 44 | code: 200 45 | message: OK 46 | version: 1 47 | -------------------------------------------------------------------------------- /tests/vcr_cassettes/healthcheck.yml: -------------------------------------------------------------------------------- 1 | interactions: 2 | - request: 3 | body: null 4 | headers: 5 | Accept: 6 | - '*/*' 7 | Accept-Encoding: 8 | - gzip, deflate 9 | Connection: 10 | - keep-alive 11 | User-Agent: 12 | - python-requests/2.18.4 13 | method: GET 14 | uri: http://localhost:8088/status 15 | response: 16 | body: 17 | string: '{"commandStatuses":{"terminate/CSAS_CREATE_STREAM_AS_WITH_CONDITIONS_WITH_STARTWITH_33/execute":"SUCCESS","terminate/CSAS_CREATE_STREAM_AS_WITH_CONDITIONS_WITH_STARTWITH_173/execute":"SUCCESS","terminate/CSAS_CREATE_STREAM_AS_WITHOUT_CONDITIONS_31/execute":"SUCCESS","stream/`FOO_TABLE`/create":"SUCCESS","stream/KSQL_PROCESSING_LOG/drop":"SUCCESS","terminate/CSAS_CREATE_STREAM_AS_WITH_CONDITIONS_WITHOUT_STARTWITH_237/execute":"SUCCESS","terminate/CSAS_CREATE_STREAM_AS_WITH_CONDITIONS_WITH_STARTWITH_WITH_AND_0/execute":"SUCCESS","stream/`CREATE_STREAM_AS_WITH_CONDITIONS_WITHOUT_STARTWITH`/create":"SUCCESS","stream/`TEST_TABLE`/create":"SUCCESS","stream/`CREATE_STREAM_AS_WITH_CONDITIONS_WITH_STARTWITH_WITH_AND`/create":"SUCCESS","terminate/CSAS_CREATE_STREAM_AS_WITHOUT_CONDITIONS_247/execute":"SUCCESS","terminate/CSAS_CREATE_STREAM_AS_WITH_CONDITIONS_WITH_STARTWITH_71/execute":"SUCCESS","terminate/CSAS_CREATE_STREAM_AS_WITH_CONDITIONS_WITH_STARTWITH_WITH_69/execute":"SUCCESS","stream/CREATE_STREAM_AS_WITH_CONDITIONS_WITH_STARTWITH_WITH/drop":"SUCCESS","stream/PREBID_TRAFFIC_LOG_VALID_STREAM/drop":"SUCCESS","stream/CREATE_STREAM_AS_WITH_CONDITIONS_WITH_STARTWITH_WITH_AND/drop":"SUCCESS","stream/`CREATE_STREAM_AS_WITH_CONDITIONS_WITH_STARTWITH`/create":"SUCCESS","stream/`PREBID_TRAFFIC_LOG_TOTAL_STREAM`/create":"SUCCESS","stream/`KSQL_PROCESSING_LOG`/create":"SUCCESS","stream/CREATE_STREAM_AS_WITH_CONDITIONS_WITH_STARTWITH/drop":"SUCCESS","stream/`CREATE_STREAM_AS_WITHOUT_CONDITIONS`/create":"SUCCESS","stream/CREATE_STREAM_AS_WITHOUT_CONDITIONS/drop":"SUCCESS","stream/`CREATE_STREAM_AS_WITH_CONDITIONS_WITH_STARTWITH_WITH`/create":"SUCCESS","terminate/CSAS_CREATE_STREAM_AS_WITH_CONDITIONS_WITH_STARTWITH_WITH_AND_227/execute":"SUCCESS","stream/PAGEVIEWS_ORIGINAL/drop":"SUCCESS","terminate/CSAS_CREATE_STREAM_AS_WITH_CONDITIONS_WITHOUT_STARTWITH_129/execute":"SUCCESS","stream/TEST_KSQL_CREATE_STREAM/drop":"SUCCESS","stream/TEST_TABLE/drop":"SUCCESS","stream/CREATE_STREAM_AS_WITH_CONDITIONS_WITHOUT_STARTWITH/drop":"SUCCESS","terminate/CSAS_CREATE_STREAM_AS_WITHOUT_CONDITIONS_131/execute":"SUCCESS","stream/PREBID_TRAFFIC_LOG_TOTAL_STREAM/drop":"SUCCESS","stream/`PAGEVIEWS_ORIGINAL`/create":"SUCCESS","stream/KSQL_PYTHON_TESTTEST_KSQL_CREATE_STREAM/drop":"SUCCESS","stream/`KSQL_PYTHON_TESTTEST_KSQL_CREATE_STREAM`/create":"SUCCESS","stream/FOO_TABLE/drop":"SUCCESS","terminate/CSAS_CREATE_STREAM_AS_WITH_CONDITIONS_WITH_STARTWITH_7/execute":"SUCCESS","terminate/CSAS_CREATE_STREAM_AS_WITH_CONDITIONS_WITH_STARTWITH_WITH_AND_127/execute":"SUCCESS","terminate/CSAS_CREATE_STREAM_AS_WITH_CONDITIONS_WITHOUT_STARTWITH_29/execute":"SUCCESS","terminate/CSAS_CREATE_STREAM_AS_WITH_CONDITIONS_WITH_STARTWITH_133/execute":"SUCCESS","stream/`TEST_KSQL_CREATE_STREAM`/create":"SUCCESS"}}' 18 | headers: 19 | content-length: 20 | - '2804' 21 | content-type: 22 | - application/json 23 | status: 24 | code: 200 25 | message: OK 26 | version: 1 27 | -------------------------------------------------------------------------------- /tests/vcr_cassettes/ksql_create_stream.yml: -------------------------------------------------------------------------------- 1 | interactions: 2 | - request: 3 | body: '{"ksql": "CREATE STREAM ksql_python_testtest_ksql_create_stream (viewtime 4 | bigint, userid varchar, pageid varchar) WITH (kafka_topic=''exist_topic'', 5 | value_format=''DELIMITED'');"}' 6 | headers: 7 | Accept: 8 | - application/json 9 | Connection: 10 | - close 11 | Content-Length: 12 | - '198' 13 | Content-Type: 14 | - application/json 15 | Host: 16 | - localhost:8088 17 | User-Agent: 18 | - Python-urllib/3.6 19 | method: POST 20 | uri: http://localhost:8088/ksql 21 | response: 22 | body: 23 | string: '[{"@type":"currentStatus","statementText":"CREATE STREAM ksql_python_testtest_ksql_create_stream 24 | (viewtime bigint, userid varchar, pageid varchar) WITH 25 | (kafka_topic=''exist_topic'', value_format=''DELIMITED'');","commandId":"stream/`KSQL_PYTHON_TESTTEST_KSQL_CREATE_STREAM`/create","commandStatus":{"status":"SUCCESS","message":"Stream 26 | created"},"commandSequenceNumber":264,"warnings":[]}]' 27 | headers: 28 | connection: 29 | - close 30 | content-length: 31 | - '408' 32 | content-type: 33 | - application/json 34 | status: 35 | code: 200 36 | message: OK 37 | version: 1 38 | -------------------------------------------------------------------------------- /tests/vcr_cassettes/ksql_create_stream_as_with_conditions_with_startwith.yml: -------------------------------------------------------------------------------- 1 | interactions: 2 | - request: 3 | body: '{"ksql": "DESCRIBE EXTENDED pageviews_original;"}' 4 | headers: 5 | Accept: 6 | - application/json 7 | Connection: 8 | - close 9 | Content-Length: 10 | - '49' 11 | Content-Type: 12 | - application/json 13 | Host: 14 | - localhost:8088 15 | User-Agent: 16 | - Python-urllib/3.6 17 | method: POST 18 | uri: http://localhost:8088/ksql 19 | response: 20 | body: 21 | string: '{"@type":"statement_error","error_code":40001,"message":"Could not 22 | find STREAM/TABLE ''PAGEVIEWS_ORIGINAL'' in the Metastore","statementText":"DESCRIBE 23 | EXTENDED pageviews_original;","entities":[]}' 24 | headers: 25 | connection: 26 | - close 27 | content-length: 28 | - '194' 29 | content-type: 30 | - application/json 31 | status: 32 | code: 400 33 | message: Bad Request 34 | - request: 35 | body: '{"ksql": "DROP\n STREAM IF EXISTS\n pageviews_original;"}' 36 | headers: 37 | Accept: 38 | - application/json 39 | Connection: 40 | - close 41 | Content-Length: 42 | - '63' 43 | Content-Type: 44 | - application/json 45 | Host: 46 | - localhost:8088 47 | User-Agent: 48 | - Python-urllib/3.6 49 | method: POST 50 | uri: http://localhost:8088/ksql 51 | response: 52 | body: 53 | string: '[{"@type":"currentStatus","statementText":"DROP\n STREAM IF EXISTS\n pageviews_original;","commandId":"stream/PAGEVIEWS_ORIGINAL/drop","commandStatus":{"status":"SUCCESS","message":"Source 54 | `PAGEVIEWS_ORIGINAL` does not exist."},"commandSequenceNumber":220,"warnings":[]}]' 55 | headers: 56 | connection: 57 | - close 58 | content-length: 59 | - '277' 60 | content-type: 61 | - application/json 62 | status: 63 | code: 200 64 | message: OK 65 | - request: 66 | body: '{"ksql": "DESCRIBE EXTENDED create_stream_as_with_conditions_with_startwith;"}' 67 | headers: 68 | Accept: 69 | - application/json 70 | Connection: 71 | - close 72 | Content-Length: 73 | - '78' 74 | Content-Type: 75 | - application/json 76 | Host: 77 | - localhost:8088 78 | User-Agent: 79 | - Python-urllib/3.6 80 | method: POST 81 | uri: http://localhost:8088/ksql 82 | response: 83 | body: 84 | string: '{"@type":"statement_error","error_code":40001,"message":"Could not 85 | find STREAM/TABLE ''CREATE_STREAM_AS_WITH_CONDITIONS_WITH_STARTWITH'' in the 86 | Metastore","statementText":"DESCRIBE EXTENDED create_stream_as_with_conditions_with_startwith;","entities":[]}' 87 | headers: 88 | connection: 89 | - close 90 | content-length: 91 | - '252' 92 | content-type: 93 | - application/json 94 | status: 95 | code: 400 96 | message: Bad Request 97 | - request: 98 | body: '{"ksql": "DROP\n STREAM IF EXISTS\n create_stream_as_with_conditions_with_startwith;"}' 99 | headers: 100 | Accept: 101 | - application/json 102 | Connection: 103 | - close 104 | Content-Length: 105 | - '92' 106 | Content-Type: 107 | - application/json 108 | Host: 109 | - localhost:8088 110 | User-Agent: 111 | - Python-urllib/3.6 112 | method: POST 113 | uri: http://localhost:8088/ksql 114 | response: 115 | body: 116 | string: '[{"@type":"currentStatus","statementText":"DROP\n STREAM IF EXISTS\n create_stream_as_with_conditions_with_startwith;","commandId":"stream/CREATE_STREAM_AS_WITH_CONDITIONS_WITH_STARTWITH/drop","commandStatus":{"status":"SUCCESS","message":"Source 117 | `CREATE_STREAM_AS_WITH_CONDITIONS_WITH_STARTWITH` does not exist."},"commandSequenceNumber":222,"warnings":[]}]' 118 | headers: 119 | connection: 120 | - close 121 | content-length: 122 | - '364' 123 | content-type: 124 | - application/json 125 | status: 126 | code: 200 127 | message: OK 128 | - request: 129 | body: '{"ksql": "CREATE stream pageviews_original (name string, age bigint, userid 130 | string, pageid bigint) WITH (kafka_topic=''exist_topic'', value_format=''DELIMITED'');"}' 131 | headers: 132 | Accept: 133 | - application/json 134 | Connection: 135 | - close 136 | Content-Length: 137 | - '160' 138 | Content-Type: 139 | - application/json 140 | Host: 141 | - localhost:8088 142 | User-Agent: 143 | - Python-urllib/3.6 144 | method: POST 145 | uri: http://localhost:8088/ksql 146 | response: 147 | body: 148 | string: '[{"@type":"currentStatus","statementText":"CREATE stream pageviews_original 149 | (name string, age bigint, userid string, pageid bigint) WITH (kafka_topic=''exist_topic'', 150 | value_format=''DELIMITED'');","commandId":"stream/`PAGEVIEWS_ORIGINAL`/create","commandStatus":{"status":"SUCCESS","message":"Stream 151 | created"},"commandSequenceNumber":224,"warnings":[]}]' 152 | headers: 153 | connection: 154 | - close 155 | content-length: 156 | - '349' 157 | content-type: 158 | - application/json 159 | status: 160 | code: 200 161 | message: OK 162 | - request: 163 | body: '{"ksql": "CREATE stream create_stream_as_with_conditions_with_startwith 164 | WITH (kafka_topic=''create_stream_as_with_conditions_with_startwith'', value_format=''DELIMITED'', 165 | timestamp=''logtime'') AS SELECT rowtime as logtime, * FROM pageviews_original 166 | where userid = ''foo_%'';"}' 167 | headers: 168 | Accept: 169 | - application/json 170 | Connection: 171 | - close 172 | Content-Length: 173 | - '269' 174 | Content-Type: 175 | - application/json 176 | Host: 177 | - localhost:8088 178 | User-Agent: 179 | - Python-urllib/3.6 180 | method: POST 181 | uri: http://localhost:8088/ksql 182 | response: 183 | body: 184 | string: '[{"@type":"currentStatus","statementText":"CREATE STREAM CREATE_STREAM_AS_WITH_CONDITIONS_WITH_STARTWITH 185 | WITH (KAFKA_TOPIC=''create_stream_as_with_conditions_with_startwith'', PARTITIONS=1, 186 | REPLICAS=1, TIMESTAMP=''logtime'', VALUE_FORMAT=''DELIMITED'') AS SELECT\n PAGEVIEWS_ORIGINAL.ROWTIME 187 | LOGTIME,\n *\nFROM PAGEVIEWS_ORIGINAL PAGEVIEWS_ORIGINAL\nWHERE (PAGEVIEWS_ORIGINAL.USERID 188 | = ''foo_%'')\nEMIT CHANGES;","commandId":"stream/`CREATE_STREAM_AS_WITH_CONDITIONS_WITH_STARTWITH`/create","commandStatus":{"status":"SUCCESS","message":"Created 189 | query with ID CSAS_CREATE_STREAM_AS_WITH_CONDITIONS_WITH_STARTWITH_173"},"commandSequenceNumber":226,"warnings":[]}]' 190 | headers: 191 | connection: 192 | - close 193 | content-length: 194 | - '655' 195 | content-type: 196 | - application/json 197 | status: 198 | code: 200 199 | message: OK 200 | version: 1 201 | -------------------------------------------------------------------------------- /tests/vcr_cassettes/ksql_create_stream_as_with_conditions_with_startwith_with_and.yml: -------------------------------------------------------------------------------- 1 | interactions: 2 | - request: 3 | body: '{"ksql": "CREATE stream pageviews_original (name string, age bigint, userid 4 | string, pageid bigint) WITH (kafka_topic=''exist_topic'', value_format=''DELIMITED'');"}' 5 | headers: 6 | Accept: 7 | - application/json 8 | Connection: 9 | - close 10 | Content-Length: 11 | - '160' 12 | Content-Type: 13 | - application/json 14 | Host: 15 | - localhost:8088 16 | User-Agent: 17 | - Python-urllib/3.6 18 | method: POST 19 | uri: http://localhost:8088/ksql 20 | response: 21 | body: 22 | string: '[{"@type":"currentStatus","statementText":"CREATE stream pageviews_original 23 | (name string, age bigint, userid string, pageid bigint) WITH (kafka_topic=''exist_topic'', 24 | value_format=''DELIMITED'');","commandId":"stream/`PAGEVIEWS_ORIGINAL`/create","commandStatus":{"status":"SUCCESS","message":"Stream 25 | created"},"commandSequenceNumber":234,"warnings":[]}]' 26 | headers: 27 | connection: 28 | - close 29 | content-length: 30 | - '349' 31 | content-type: 32 | - application/json 33 | status: 34 | code: 200 35 | message: OK 36 | - request: 37 | body: '{"ksql": "CREATE stream create_stream_as_with_conditions_with_startwith_with_and 38 | WITH (kafka_topic=''create_stream_as_with_conditions_with_startwith_with_and'', 39 | value_format=''DELIMITED'', timestamp=''logtime'') AS SELECT rowtime as logtime, 40 | * FROM pageviews_original where userid = ''foo_%'' and age > 10;"}' 41 | headers: 42 | Accept: 43 | - application/json 44 | Connection: 45 | - close 46 | Content-Length: 47 | - '300' 48 | Content-Type: 49 | - application/json 50 | Host: 51 | - localhost:8088 52 | User-Agent: 53 | - Python-urllib/3.6 54 | method: POST 55 | uri: http://localhost:8088/ksql 56 | response: 57 | body: 58 | string: '[{"@type":"currentStatus","statementText":"CREATE STREAM CREATE_STREAM_AS_WITH_CONDITIONS_WITH_STARTWITH_WITH_AND 59 | WITH (KAFKA_TOPIC=''create_stream_as_with_conditions_with_startwith_with_and'', 60 | PARTITIONS=1, REPLICAS=1, TIMESTAMP=''logtime'', VALUE_FORMAT=''DELIMITED'') 61 | AS SELECT\n PAGEVIEWS_ORIGINAL.ROWTIME LOGTIME,\n *\nFROM PAGEVIEWS_ORIGINAL 62 | PAGEVIEWS_ORIGINAL\nWHERE ((PAGEVIEWS_ORIGINAL.USERID = ''foo_%'') AND (PAGEVIEWS_ORIGINAL.AGE 63 | > 10))\nEMIT CHANGES;","commandId":"stream/`CREATE_STREAM_AS_WITH_CONDITIONS_WITH_STARTWITH_WITH_AND`/create","commandStatus":{"status":"SUCCESS","message":"Created 64 | query with ID CSAS_CREATE_STREAM_AS_WITH_CONDITIONS_WITH_STARTWITH_WITH_AND_227"},"commandSequenceNumber":236,"warnings":[]}]' 65 | headers: 66 | connection: 67 | - close 68 | content-length: 69 | - '727' 70 | content-type: 71 | - application/json 72 | status: 73 | code: 200 74 | message: OK 75 | version: 1 76 | -------------------------------------------------------------------------------- /tests/vcr_cassettes/ksql_create_stream_as_with_conditions_without_startwith.yml: -------------------------------------------------------------------------------- 1 | interactions: 2 | - request: 3 | body: '{"ksql": "CREATE stream pageviews_original (name string, age bigint, userid 4 | string, pageid bigint) WITH (kafka_topic=''exist_topic'', value_format=''DELIMITED'');"}' 5 | headers: 6 | Accept: 7 | - application/json 8 | Connection: 9 | - close 10 | Content-Length: 11 | - '160' 12 | Content-Type: 13 | - application/json 14 | Host: 15 | - localhost:8088 16 | User-Agent: 17 | - Python-urllib/3.6 18 | method: POST 19 | uri: http://localhost:8088/ksql 20 | response: 21 | body: 22 | string: '[{"@type":"currentStatus","statementText":"CREATE stream pageviews_original 23 | (name string, age bigint, userid string, pageid bigint) WITH (kafka_topic=''exist_topic'', 24 | value_format=''DELIMITED'');","commandId":"stream/`PAGEVIEWS_ORIGINAL`/create","commandStatus":{"status":"SUCCESS","message":"Stream 25 | created"},"commandSequenceNumber":244,"warnings":[]}]' 26 | headers: 27 | connection: 28 | - close 29 | content-length: 30 | - '349' 31 | content-type: 32 | - application/json 33 | status: 34 | code: 200 35 | message: OK 36 | - request: 37 | body: '{"ksql": "CREATE stream create_stream_as_with_conditions_without_startwith 38 | WITH (kafka_topic=''create_stream_as_with_conditions_without_startwith'', value_format=''DELIMITED'', 39 | timestamp=''logtime'') AS SELECT rowtime as logtime, * FROM pageviews_original 40 | where userid = ''foo'';"}' 41 | headers: 42 | Accept: 43 | - application/json 44 | Connection: 45 | - close 46 | Content-Length: 47 | - '273' 48 | Content-Type: 49 | - application/json 50 | Host: 51 | - localhost:8088 52 | User-Agent: 53 | - Python-urllib/3.6 54 | method: POST 55 | uri: http://localhost:8088/ksql 56 | response: 57 | body: 58 | string: '[{"@type":"currentStatus","statementText":"CREATE STREAM CREATE_STREAM_AS_WITH_CONDITIONS_WITHOUT_STARTWITH 59 | WITH (KAFKA_TOPIC=''create_stream_as_with_conditions_without_startwith'', 60 | PARTITIONS=1, REPLICAS=1, TIMESTAMP=''logtime'', VALUE_FORMAT=''DELIMITED'') 61 | AS SELECT\n PAGEVIEWS_ORIGINAL.ROWTIME LOGTIME,\n *\nFROM PAGEVIEWS_ORIGINAL 62 | PAGEVIEWS_ORIGINAL\nWHERE (PAGEVIEWS_ORIGINAL.USERID = ''foo'')\nEMIT CHANGES;","commandId":"stream/`CREATE_STREAM_AS_WITH_CONDITIONS_WITHOUT_STARTWITH`/create","commandStatus":{"status":"SUCCESS","message":"Created 63 | query with ID CSAS_CREATE_STREAM_AS_WITH_CONDITIONS_WITHOUT_STARTWITH_237"},"commandSequenceNumber":246,"warnings":[]}]' 64 | headers: 65 | connection: 66 | - close 67 | content-length: 68 | - '665' 69 | content-type: 70 | - application/json 71 | status: 72 | code: 200 73 | message: OK 74 | version: 1 75 | -------------------------------------------------------------------------------- /tests/vcr_cassettes/ksql_create_stream_as_with_wrong_timestamp.yml: -------------------------------------------------------------------------------- 1 | interactions: 2 | - request: 3 | body: '{"ksql": "DESCRIBE EXTENDED prebid_traffic_log_total_stream;"}' 4 | headers: 5 | Accept: 6 | - application/json 7 | Connection: 8 | - close 9 | Content-Length: 10 | - '62' 11 | Content-Type: 12 | - application/json 13 | Host: 14 | - localhost:8088 15 | User-Agent: 16 | - Python-urllib/3.6 17 | method: POST 18 | uri: http://localhost:8088/ksql 19 | response: 20 | body: 21 | string: '{"@type":"statement_error","error_code":40001,"message":"Could not 22 | find STREAM/TABLE ''PREBID_TRAFFIC_LOG_TOTAL_STREAM'' in the Metastore","statementText":"DESCRIBE 23 | EXTENDED prebid_traffic_log_total_stream;","entities":[]}' 24 | headers: 25 | connection: 26 | - close 27 | content-length: 28 | - '220' 29 | content-type: 30 | - application/json 31 | status: 32 | code: 400 33 | message: Bad Request 34 | - request: 35 | body: '{"ksql": "DROP\n STREAM IF EXISTS\n prebid_traffic_log_total_stream;"}' 36 | headers: 37 | Accept: 38 | - application/json 39 | Connection: 40 | - close 41 | Content-Length: 42 | - '76' 43 | Content-Type: 44 | - application/json 45 | Host: 46 | - localhost:8088 47 | User-Agent: 48 | - Python-urllib/3.6 49 | method: POST 50 | uri: http://localhost:8088/ksql 51 | response: 52 | body: 53 | string: '[{"@type":"currentStatus","statementText":"DROP\n STREAM IF EXISTS\n prebid_traffic_log_total_stream;","commandId":"stream/PREBID_TRAFFIC_LOG_TOTAL_STREAM/drop","commandStatus":{"status":"SUCCESS","message":"Source 54 | `PREBID_TRAFFIC_LOG_TOTAL_STREAM` does not exist."},"commandSequenceNumber":268,"warnings":[]}]' 55 | headers: 56 | connection: 57 | - close 58 | content-length: 59 | - '316' 60 | content-type: 61 | - application/json 62 | status: 63 | code: 200 64 | message: OK 65 | - request: 66 | body: '{"ksql": "DESCRIBE EXTENDED prebid_traffic_log_valid_stream;"}' 67 | headers: 68 | Accept: 69 | - application/json 70 | Connection: 71 | - close 72 | Content-Length: 73 | - '62' 74 | Content-Type: 75 | - application/json 76 | Host: 77 | - localhost:8088 78 | User-Agent: 79 | - Python-urllib/3.6 80 | method: POST 81 | uri: http://localhost:8088/ksql 82 | response: 83 | body: 84 | string: '{"@type":"statement_error","error_code":40001,"message":"Could not 85 | find STREAM/TABLE ''PREBID_TRAFFIC_LOG_VALID_STREAM'' in the Metastore","statementText":"DESCRIBE 86 | EXTENDED prebid_traffic_log_valid_stream;","entities":[]}' 87 | headers: 88 | connection: 89 | - close 90 | content-length: 91 | - '220' 92 | content-type: 93 | - application/json 94 | status: 95 | code: 400 96 | message: Bad Request 97 | - request: 98 | body: '{"ksql": "DROP\n STREAM IF EXISTS\n prebid_traffic_log_valid_stream;"}' 99 | headers: 100 | Accept: 101 | - application/json 102 | Connection: 103 | - close 104 | Content-Length: 105 | - '76' 106 | Content-Type: 107 | - application/json 108 | Host: 109 | - localhost:8088 110 | User-Agent: 111 | - Python-urllib/3.6 112 | method: POST 113 | uri: http://localhost:8088/ksql 114 | response: 115 | body: 116 | string: '[{"@type":"currentStatus","statementText":"DROP\n STREAM IF EXISTS\n prebid_traffic_log_valid_stream;","commandId":"stream/PREBID_TRAFFIC_LOG_VALID_STREAM/drop","commandStatus":{"status":"SUCCESS","message":"Source 117 | `PREBID_TRAFFIC_LOG_VALID_STREAM` does not exist."},"commandSequenceNumber":270,"warnings":[]}]' 118 | headers: 119 | connection: 120 | - close 121 | content-length: 122 | - '316' 123 | content-type: 124 | - application/json 125 | status: 126 | code: 200 127 | message: OK 128 | - request: 129 | body: '{"ksql": "CREATE stream prebid_traffic_log_total_stream (name string, age 130 | bigint, userid string, pageid bigint) WITH (kafka_topic=''exist_topic'', value_format=''DELIMITED'');"}' 131 | headers: 132 | Accept: 133 | - application/json 134 | Connection: 135 | - close 136 | Content-Length: 137 | - '173' 138 | Content-Type: 139 | - application/json 140 | Host: 141 | - localhost:8088 142 | User-Agent: 143 | - Python-urllib/3.6 144 | method: POST 145 | uri: http://localhost:8088/ksql 146 | response: 147 | body: 148 | string: '[{"@type":"currentStatus","statementText":"CREATE stream prebid_traffic_log_total_stream 149 | (name string, age bigint, userid string, pageid bigint) WITH (kafka_topic=''exist_topic'', 150 | value_format=''DELIMITED'');","commandId":"stream/`PREBID_TRAFFIC_LOG_TOTAL_STREAM`/create","commandStatus":{"status":"SUCCESS","message":"Stream 151 | created"},"commandSequenceNumber":272,"warnings":[]}]' 152 | headers: 153 | connection: 154 | - close 155 | content-length: 156 | - '375' 157 | content-type: 158 | - application/json 159 | status: 160 | code: 200 161 | message: OK 162 | - request: 163 | body: '{"ksql": "CREATE stream prebid_traffic_log_valid_stream WITH (kafka_topic=''prebid_traffic_log_valid_topic'', 164 | value_format=''DELIMITED'', timestamp=''foo'') AS SELECT * FROM prebid_traffic_log_total_stream;"}' 165 | headers: 166 | Accept: 167 | - application/json 168 | Connection: 169 | - close 170 | Content-Length: 171 | - '202' 172 | Content-Type: 173 | - application/json 174 | Host: 175 | - localhost:8088 176 | User-Agent: 177 | - Python-urllib/3.6 178 | method: POST 179 | uri: http://localhost:8088/ksql 180 | response: 181 | body: 182 | string: '{"@type":"statement_error","error_code":40001,"message":"The TIMESTAMP 183 | column set in the WITH clause does not exist in the schema: ''FOO''","statementText":"CREATE 184 | STREAM PREBID_TRAFFIC_LOG_VALID_STREAM WITH (KAFKA_TOPIC=''prebid_traffic_log_valid_topic'', 185 | PARTITIONS=1, REPLICAS=1, TIMESTAMP=''foo'', VALUE_FORMAT=''DELIMITED'') AS 186 | SELECT *\nFROM PREBID_TRAFFIC_LOG_TOTAL_STREAM PREBID_TRAFFIC_LOG_TOTAL_STREAM\nEMIT 187 | CHANGES;","entities":[]}' 188 | headers: 189 | connection: 190 | - close 191 | content-length: 192 | - '434' 193 | content-type: 194 | - application/json 195 | status: 196 | code: 400 197 | message: Bad Request 198 | version: 1 199 | -------------------------------------------------------------------------------- /tests/vcr_cassettes/ksql_create_stream_as_without_conditions.yml: -------------------------------------------------------------------------------- 1 | interactions: 2 | - request: 3 | body: '{"ksql": "CREATE stream pageviews_original (name string, age bigint, userid 4 | string, pageid bigint) WITH (kafka_topic=''exist_topic'', value_format=''DELIMITED'');"}' 5 | headers: 6 | Accept: 7 | - application/json 8 | Connection: 9 | - close 10 | Content-Length: 11 | - '160' 12 | Content-Type: 13 | - application/json 14 | Host: 15 | - localhost:8088 16 | User-Agent: 17 | - Python-urllib/3.6 18 | method: POST 19 | uri: http://localhost:8088/ksql 20 | response: 21 | body: 22 | string: '[{"@type":"currentStatus","statementText":"CREATE stream pageviews_original 23 | (name string, age bigint, userid string, pageid bigint) WITH (kafka_topic=''exist_topic'', 24 | value_format=''DELIMITED'');","commandId":"stream/`PAGEVIEWS_ORIGINAL`/create","commandStatus":{"status":"SUCCESS","message":"Stream 25 | created"},"commandSequenceNumber":254,"warnings":[]}]' 26 | headers: 27 | connection: 28 | - close 29 | content-length: 30 | - '349' 31 | content-type: 32 | - application/json 33 | status: 34 | code: 200 35 | message: OK 36 | - request: 37 | body: '{"ksql": "CREATE stream create_stream_as_without_conditions WITH (kafka_topic=''create_stream_as_without_conditions'', 38 | value_format=''DELIMITED'', timestamp=''logtime'') AS SELECT rowtime as logtime, 39 | * FROM pageviews_original;"}' 40 | headers: 41 | Accept: 42 | - application/json 43 | Connection: 44 | - close 45 | Content-Length: 46 | - '222' 47 | Content-Type: 48 | - application/json 49 | Host: 50 | - localhost:8088 51 | User-Agent: 52 | - Python-urllib/3.6 53 | method: POST 54 | uri: http://localhost:8088/ksql 55 | response: 56 | body: 57 | string: '[{"@type":"currentStatus","statementText":"CREATE STREAM CREATE_STREAM_AS_WITHOUT_CONDITIONS 58 | WITH (KAFKA_TOPIC=''create_stream_as_without_conditions'', PARTITIONS=1, REPLICAS=1, 59 | TIMESTAMP=''logtime'', VALUE_FORMAT=''DELIMITED'') AS SELECT\n PAGEVIEWS_ORIGINAL.ROWTIME 60 | LOGTIME,\n *\nFROM PAGEVIEWS_ORIGINAL PAGEVIEWS_ORIGINAL\nEMIT CHANGES;","commandId":"stream/`CREATE_STREAM_AS_WITHOUT_CONDITIONS`/create","commandStatus":{"status":"SUCCESS","message":"Created 61 | query with ID CSAS_CREATE_STREAM_AS_WITHOUT_CONDITIONS_247"},"commandSequenceNumber":256,"warnings":[]}]' 62 | headers: 63 | connection: 64 | - close 65 | content-length: 66 | - '562' 67 | content-type: 68 | - application/json 69 | status: 70 | code: 200 71 | message: OK 72 | version: 1 73 | -------------------------------------------------------------------------------- /tests/vcr_cassettes/ksql_create_stream_by_builder.yml: -------------------------------------------------------------------------------- 1 | interactions: 2 | - request: 3 | body: '{"ksql": "DESCRIBE EXTENDED test_table;"}' 4 | headers: 5 | Accept: 6 | - application/json 7 | Connection: 8 | - close 9 | Content-Length: 10 | - '41' 11 | Content-Type: 12 | - application/json 13 | Host: 14 | - localhost:8088 15 | User-Agent: 16 | - Python-urllib/3.6 17 | method: POST 18 | uri: http://localhost:8088/ksql 19 | response: 20 | body: 21 | string: '{"@type":"statement_error","error_code":40001,"message":"Could not 22 | find STREAM/TABLE ''TEST_TABLE'' in the Metastore","statementText":"DESCRIBE 23 | EXTENDED test_table;","entities":[]}' 24 | headers: 25 | connection: 26 | - close 27 | content-length: 28 | - '178' 29 | content-type: 30 | - application/json 31 | status: 32 | code: 400 33 | message: Bad Request 34 | - request: 35 | body: '{"ksql": "DROP\n STREAM IF EXISTS\n test_table;"}' 36 | headers: 37 | Accept: 38 | - application/json 39 | Connection: 40 | - close 41 | Content-Length: 42 | - '55' 43 | Content-Type: 44 | - application/json 45 | Host: 46 | - localhost:8088 47 | User-Agent: 48 | - Python-urllib/3.6 49 | method: POST 50 | uri: http://localhost:8088/ksql 51 | response: 52 | body: 53 | string: '[{"@type":"currentStatus","statementText":"DROP\n STREAM IF EXISTS\n test_table;","commandId":"stream/TEST_TABLE/drop","commandStatus":{"status":"SUCCESS","message":"Source 54 | `TEST_TABLE` does not exist."},"commandSequenceNumber":276,"warnings":[]}]' 55 | headers: 56 | connection: 57 | - close 58 | content-length: 59 | - '253' 60 | content-type: 61 | - application/json 62 | status: 63 | code: 200 64 | message: OK 65 | - request: 66 | body: '{"ksql": "CREATE stream test_table (viewtime bigint, userid varchar, pageid 67 | varchar) WITH (kafka_topic=''exist_topic'', value_format=''DELIMITED'');"}' 68 | headers: 69 | Accept: 70 | - application/json 71 | Connection: 72 | - close 73 | Content-Length: 74 | - '146' 75 | Content-Type: 76 | - application/json 77 | Host: 78 | - localhost:8088 79 | User-Agent: 80 | - Python-urllib/3.6 81 | method: POST 82 | uri: http://localhost:8088/ksql 83 | response: 84 | body: 85 | string: '[{"@type":"currentStatus","statementText":"CREATE stream test_table 86 | (viewtime bigint, userid varchar, pageid varchar) WITH (kafka_topic=''exist_topic'', 87 | value_format=''DELIMITED'');","commandId":"stream/`TEST_TABLE`/create","commandStatus":{"status":"SUCCESS","message":"Stream 88 | created"},"commandSequenceNumber":278,"warnings":[]}]' 89 | headers: 90 | connection: 91 | - close 92 | content-length: 93 | - '327' 94 | content-type: 95 | - application/json 96 | status: 97 | code: 200 98 | message: OK 99 | version: 1 100 | -------------------------------------------------------------------------------- /tests/vcr_cassettes/ksql_create_stream_by_builder_api.yml: -------------------------------------------------------------------------------- 1 | interactions: 2 | - request: 3 | body: '{"ksql": "DESCRIBE EXTENDED test_table;"}' 4 | headers: 5 | Accept: 6 | - application/json 7 | Connection: 8 | - close 9 | Content-Length: 10 | - '41' 11 | Content-Type: 12 | - application/json 13 | Host: 14 | - localhost:8088 15 | User-Agent: 16 | - Python-urllib/3.6 17 | method: POST 18 | uri: http://localhost:8088/ksql 19 | response: 20 | body: 21 | string: '{"@type":"statement_error","error_code":40001,"message":"Could not 22 | find STREAM/TABLE ''TEST_TABLE'' in the Metastore","statementText":"DESCRIBE 23 | EXTENDED test_table;","entities":[]}' 24 | headers: 25 | connection: 26 | - close 27 | content-length: 28 | - '178' 29 | content-type: 30 | - application/json 31 | status: 32 | code: 400 33 | message: Bad Request 34 | - request: 35 | body: '{"ksql": "DROP\n STREAM IF EXISTS\n test_table;"}' 36 | headers: 37 | Accept: 38 | - application/json 39 | Connection: 40 | - close 41 | Content-Length: 42 | - '55' 43 | Content-Type: 44 | - application/json 45 | Host: 46 | - localhost:8088 47 | User-Agent: 48 | - Python-urllib/3.6 49 | method: POST 50 | uri: http://localhost:8088/ksql 51 | response: 52 | body: 53 | string: '[{"@type":"currentStatus","statementText":"DROP\n STREAM IF EXISTS\n test_table;","commandId":"stream/TEST_TABLE/drop","commandStatus":{"status":"SUCCESS","message":"Source 54 | `TEST_TABLE` does not exist."},"commandSequenceNumber":282,"warnings":[]}]' 55 | headers: 56 | connection: 57 | - close 58 | content-length: 59 | - '253' 60 | content-type: 61 | - application/json 62 | status: 63 | code: 200 64 | message: OK 65 | - request: 66 | body: '{"ksql": "CREATE stream test_table (viewtime bigint, userid varchar, pageid 67 | varchar) WITH (kafka_topic=''exist_topic'', value_format=''DELIMITED'');"}' 68 | headers: 69 | Accept: 70 | - application/json 71 | Connection: 72 | - close 73 | Content-Length: 74 | - '146' 75 | Content-Type: 76 | - application/json 77 | Host: 78 | - localhost:8088 79 | User-Agent: 80 | - Python-urllib/3.6 81 | method: POST 82 | uri: http://localhost:8088/ksql 83 | response: 84 | body: 85 | string: '[{"@type":"currentStatus","statementText":"CREATE stream test_table 86 | (viewtime bigint, userid varchar, pageid varchar) WITH (kafka_topic=''exist_topic'', 87 | value_format=''DELIMITED'');","commandId":"stream/`TEST_TABLE`/create","commandStatus":{"status":"SUCCESS","message":"Stream 88 | created"},"commandSequenceNumber":284,"warnings":[]}]' 89 | headers: 90 | connection: 91 | - close 92 | content-length: 93 | - '327' 94 | content-type: 95 | - application/json 96 | status: 97 | code: 200 98 | message: OK 99 | version: 1 100 | -------------------------------------------------------------------------------- /tests/vcr_cassettes/ksql_show_table.yml: -------------------------------------------------------------------------------- 1 | interactions: 2 | - request: 3 | body: '{"ksql": "show tables;"}' 4 | headers: 5 | Accept: 6 | - application/json 7 | Connection: 8 | - close 9 | Content-Length: 10 | - '24' 11 | Content-Type: 12 | - application/json 13 | Host: 14 | - localhost:8088 15 | User-Agent: 16 | - Python-urllib/3.6 17 | method: POST 18 | uri: http://localhost:8088/ksql 19 | response: 20 | body: 21 | string: '[{"@type":"tables","statementText":"show tables;","tables":[],"warnings":[]}]' 22 | headers: 23 | connection: 24 | - close 25 | content-length: 26 | - '77' 27 | content-type: 28 | - application/json 29 | status: 30 | code: 200 31 | message: OK 32 | version: 1 33 | -------------------------------------------------------------------------------- /tests/vcr_cassettes/ksql_show_table_with_api_key.yml: -------------------------------------------------------------------------------- 1 | interactions: 2 | - request: 3 | body: '{"ksql": "show tables;"}' 4 | headers: 5 | Accept: 6 | - application/json 7 | Authorization: 8 | - Basic {} 9 | Connection: 10 | - close 11 | Content-Length: 12 | - '24' 13 | Content-Type: 14 | - application/json 15 | Host: 16 | - localhost:8088 17 | User-Agent: 18 | - Python-urllib/3.8 19 | method: POST 20 | uri: http://localhost:8088/ksql 21 | response: 22 | body: 23 | string: '[{"@type":"tables","statementText":"show tables;","tables":[],"warnings":[]}]' 24 | headers: 25 | connection: 26 | - close 27 | content-length: 28 | - '77' 29 | content-type: 30 | - application/json 31 | status: 32 | code: 200 33 | message: OK 34 | version: 1 35 | -------------------------------------------------------------------------------- /tests/vcr_cassettes/ksql_topic_already_registered.yml: -------------------------------------------------------------------------------- 1 | interactions: 2 | - request: 3 | body: '{"ksql": "DESCRIBE EXTENDED foo_table;"}' 4 | headers: 5 | Accept: 6 | - application/json 7 | Connection: 8 | - close 9 | Content-Length: 10 | - '40' 11 | Content-Type: 12 | - application/json 13 | Host: 14 | - localhost:8088 15 | User-Agent: 16 | - Python-urllib/3.6 17 | method: POST 18 | uri: http://localhost:8088/ksql 19 | response: 20 | body: 21 | string: '{"@type":"statement_error","error_code":40001,"message":"Could not 22 | find STREAM/TABLE ''FOO_TABLE'' in the Metastore","statementText":"DESCRIBE 23 | EXTENDED foo_table;","entities":[]}' 24 | headers: 25 | connection: 26 | - close 27 | content-length: 28 | - '176' 29 | content-type: 30 | - application/json 31 | status: 32 | code: 400 33 | message: Bad Request 34 | - request: 35 | body: '{"ksql": "DROP\n STREAM IF EXISTS\n foo_table;"}' 36 | headers: 37 | Accept: 38 | - application/json 39 | Connection: 40 | - close 41 | Content-Length: 42 | - '54' 43 | Content-Type: 44 | - application/json 45 | Host: 46 | - localhost:8088 47 | User-Agent: 48 | - Python-urllib/3.6 49 | method: POST 50 | uri: http://localhost:8088/ksql 51 | response: 52 | body: 53 | string: '[{"@type":"currentStatus","statementText":"DROP\n STREAM IF EXISTS\n foo_table;","commandId":"stream/FOO_TABLE/drop","commandStatus":{"status":"SUCCESS","message":"Source 54 | `FOO_TABLE` does not exist."},"commandSequenceNumber":292,"warnings":[]}]' 55 | headers: 56 | connection: 57 | - close 58 | content-length: 59 | - '250' 60 | content-type: 61 | - application/json 62 | status: 63 | code: 200 64 | message: OK 65 | - request: 66 | body: '{"ksql": "CREATE stream foo_table (name string, age bigint) WITH (kafka_topic=''exist_topic'', 67 | value_format=''DELIMITED'');"}' 68 | headers: 69 | Accept: 70 | - application/json 71 | Connection: 72 | - close 73 | Content-Length: 74 | - '121' 75 | Content-Type: 76 | - application/json 77 | Host: 78 | - localhost:8088 79 | User-Agent: 80 | - Python-urllib/3.6 81 | method: POST 82 | uri: http://localhost:8088/ksql 83 | response: 84 | body: 85 | string: '[{"@type":"currentStatus","statementText":"CREATE stream foo_table 86 | (name string, age bigint) WITH (kafka_topic=''exist_topic'', value_format=''DELIMITED'');","commandId":"stream/`FOO_TABLE`/create","commandStatus":{"status":"SUCCESS","message":"Stream 87 | created"},"commandSequenceNumber":294,"warnings":[]}]' 88 | headers: 89 | connection: 90 | - close 91 | content-length: 92 | - '301' 93 | content-type: 94 | - application/json 95 | status: 96 | code: 200 97 | message: OK 98 | - request: 99 | body: '{"ksql": "CREATE stream foo_table (name string, age bigint) WITH (kafka_topic=''exist_topic'', 100 | value_format=''DELIMITED'');"}' 101 | headers: 102 | Accept: 103 | - application/json 104 | Connection: 105 | - close 106 | Content-Length: 107 | - '121' 108 | Content-Type: 109 | - application/json 110 | Host: 111 | - localhost:8088 112 | User-Agent: 113 | - Python-urllib/3.6 114 | method: POST 115 | uri: http://localhost:8088/ksql 116 | response: 117 | body: 118 | string: '{"@type":"statement_error","error_code":40001,"message":"Cannot add 119 | stream ''FOO_TABLE'': A stream with the same name already exists","statementText":"CREATE 120 | stream foo_table (name string, age bigint) WITH (kafka_topic=''exist_topic'', 121 | value_format=''DELIMITED'');","entities":[]}' 122 | headers: 123 | connection: 124 | - close 125 | content-length: 126 | - '274' 127 | content-type: 128 | - application/json 129 | status: 130 | code: 400 131 | message: Bad Request 132 | version: 1 133 | -------------------------------------------------------------------------------- /tests/vcr_cassettes/raise_create_error_no_topic.yml: -------------------------------------------------------------------------------- 1 | interactions: 2 | - request: 3 | body: '{"ksql": "CREATE stream foo_table (name string, age bigint) WITH (kafka_topic=''this_topic_is_not_exist'', 4 | value_format=''DELIMITED'');"}' 5 | headers: 6 | Accept: 7 | - application/json 8 | Connection: 9 | - close 10 | Content-Length: 11 | - '133' 12 | Content-Type: 13 | - application/json 14 | Host: 15 | - localhost:8088 16 | User-Agent: 17 | - Python-urllib/3.6 18 | method: POST 19 | uri: http://localhost:8088/ksql 20 | response: 21 | body: 22 | string: '{"@type":"generic_error","error_code":40000,"message":"Topic ''this_topic_is_not_exist'' 23 | does not exist. If you want to create a new topic for the stream/table please 24 | re-run the statement providing the required ''PARTITIONS'' configuration in 25 | the WITH clause (and optionally ''REPLICAS''). For example: CREATE STREAM 26 | FOO_TABLE (NAME STRING, AGE BIGINT) WITH (KAFKA_TOPIC=''this_topic_is_not_exist'', 27 | PARTITIONS=2, REPLICAS=1, VALUE_FORMAT=''DELIMITED'');"}' 28 | headers: 29 | connection: 30 | - close 31 | content-length: 32 | - '446' 33 | content-type: 34 | - application/json 35 | status: 36 | code: 400 37 | message: Bad Request 38 | version: 1 39 | -------------------------------------------------------------------------------- /tests/vcr_cassettes/utils_test_drop_all_streams.yml: -------------------------------------------------------------------------------- 1 | interactions: 2 | - request: 3 | body: '{"ksql": "DESCRIBE EXTENDED ksql_python_test_test_drop_all_streams;"}' 4 | headers: 5 | Accept: [application/json] 6 | Accept-Encoding: ['gzip, deflate'] 7 | Connection: [keep-alive] 8 | Content-Length: ['69'] 9 | Content-Type: [application/json] 10 | User-Agent: [python-requests/2.19.1] 11 | method: POST 12 | uri: http://localhost:8088/ksql 13 | response: 14 | body: {string: '{"@type":"statement_error","error_code":40001,"message":"Could 15 | not find STREAM/TABLE ''KSQL_PYTHON_TEST_TEST_DROP_ALL_STREAMS'' in the Metastore","stackTrace":["io.confluent.ksql.rest.server.resources.KsqlResource.describe(KsqlResource.java:457)","io.confluent.ksql.rest.server.resources.KsqlResource.validateStatement(KsqlResource.java:248)","io.confluent.ksql.rest.server.resources.KsqlResource.handleKsqlStatements(KsqlResource.java:190)","sun.reflect.GeneratedMethodAccessor6.invoke(Unknown 16 | Source)","sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)","java.lang.reflect.Method.invoke(Method.java:498)","org.glassfish.jersey.server.model.internal.ResourceMethodInvocationHandlerFactory.lambda$static$0(ResourceMethodInvocationHandlerFactory.java:76)","org.glassfish.jersey.server.model.internal.AbstractJavaResourceMethodDispatcher$1.run(AbstractJavaResourceMethodDispatcher.java:148)","org.glassfish.jersey.server.model.internal.AbstractJavaResourceMethodDispatcher.invoke(AbstractJavaResourceMethodDispatcher.java:191)","org.glassfish.jersey.server.model.internal.JavaResourceMethodDispatcherProvider$ResponseOutInvoker.doDispatch(JavaResourceMethodDispatcherProvider.java:200)","org.glassfish.jersey.server.model.internal.AbstractJavaResourceMethodDispatcher.dispatch(AbstractJavaResourceMethodDispatcher.java:103)","org.glassfish.jersey.server.model.ResourceMethodInvoker.invoke(ResourceMethodInvoker.java:493)","org.glassfish.jersey.server.model.ResourceMethodInvoker.apply(ResourceMethodInvoker.java:415)","org.glassfish.jersey.server.model.ResourceMethodInvoker.apply(ResourceMethodInvoker.java:104)","org.glassfish.jersey.server.ServerRuntime$1.run(ServerRuntime.java:277)","org.glassfish.jersey.internal.Errors$1.call(Errors.java:272)","org.glassfish.jersey.internal.Errors$1.call(Errors.java:268)","org.glassfish.jersey.internal.Errors.process(Errors.java:316)","org.glassfish.jersey.internal.Errors.process(Errors.java:298)","org.glassfish.jersey.internal.Errors.process(Errors.java:268)","org.glassfish.jersey.process.internal.RequestScope.runInScope(RequestScope.java:289)","org.glassfish.jersey.server.ServerRuntime.process(ServerRuntime.java:256)","org.glassfish.jersey.server.ApplicationHandler.handle(ApplicationHandler.java:703)","org.glassfish.jersey.servlet.WebComponent.serviceImpl(WebComponent.java:416)","org.glassfish.jersey.servlet.ServletContainer.serviceImpl(ServletContainer.java:409)","org.glassfish.jersey.servlet.ServletContainer.doFilter(ServletContainer.java:584)","org.glassfish.jersey.servlet.ServletContainer.doFilter(ServletContainer.java:525)","org.glassfish.jersey.servlet.ServletContainer.doFilter(ServletContainer.java:462)","org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1642)","org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:533)","org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:255)","org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:1595)","org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:255)","org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1253)","org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:203)","org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:473)","org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:1564)","org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:201)","org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1155)","org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:144)","org.eclipse.jetty.server.handler.HandlerCollection.handle(HandlerCollection.java:126)","org.eclipse.jetty.server.handler.StatisticsHandler.handle(StatisticsHandler.java:169)","org.eclipse.jetty.server.handler.ContextHandlerCollection.handle(ContextHandlerCollection.java:219)","org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:132)","org.eclipse.jetty.server.Server.handle(Server.java:531)","org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:352)","org.eclipse.jetty.server.HttpConnection.onFillable(HttpConnection.java:260)","org.eclipse.jetty.io.AbstractConnection$ReadCallback.succeeded(AbstractConnection.java:281)","org.eclipse.jetty.io.FillInterest.fillable(FillInterest.java:102)","org.eclipse.jetty.io.ChannelEndPoint$2.run(ChannelEndPoint.java:118)","org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.runTask(EatWhatYouKill.java:333)","org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:310)","org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:168)","org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.run(EatWhatYouKill.java:126)","org.eclipse.jetty.util.thread.ReservedThreadExecutor$ReservedThread.run(ReservedThreadExecutor.java:366)","org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:760)","org.eclipse.jetty.util.thread.QueuedThreadPool$2.run(QueuedThreadPool.java:678)","java.lang.Thread.run(Thread.java:748)"],"statementText":"DESCRIBE 17 | EXTENDED ksql_python_test_test_drop_all_streams;","entities":[]}'} 18 | headers: 19 | Content-Type: [application/json] 20 | Date: ['Fri, 20 Jul 2018 20:10:17 GMT'] 21 | Server: [Jetty(9.4.10.v20180503)] 22 | status: {code: 400, message: Bad Request} 23 | - request: 24 | body: '{"ksql": "DROP\n STREAM IF EXISTS\n ksql_python_test_test_drop_all_streams;"}' 25 | headers: 26 | Accept: [application/json] 27 | Accept-Encoding: ['gzip, deflate'] 28 | Connection: [keep-alive] 29 | Content-Length: ['83'] 30 | Content-Type: [application/json] 31 | User-Agent: [python-requests/2.19.1] 32 | method: POST 33 | uri: http://localhost:8088/ksql 34 | response: 35 | body: {string: '[{"@type":"currentStatus","statementText":"DROP\n STREAM IF 36 | EXISTS\n ksql_python_test_test_drop_all_streams;","commandId":"stream/KSQL_PYTHON_TEST_TEST_DROP_ALL_STREAMS/drop","commandStatus":{"status":"SUCCESS","message":"Source 37 | KSQL_PYTHON_TEST_TEST_DROP_ALL_STREAMS does not exist."}}]'} 38 | headers: 39 | Content-Type: [application/json] 40 | Date: ['Fri, 20 Jul 2018 20:10:17 GMT'] 41 | Server: [Jetty(9.4.10.v20180503)] 42 | status: {code: 200, message: OK} 43 | - request: 44 | body: '{"ksql": "CREATE STREAM ksql_python_test_test_drop_all_streams (viewtime 45 | bigint, userid varchar, pageid varchar) WITH (kafka_topic=''ksql_python_test_exist_topic'', 46 | value_format=''DELIMITED'');"}' 47 | headers: 48 | Accept: [application/json] 49 | Accept-Encoding: ['gzip, deflate'] 50 | Connection: [keep-alive] 51 | Content-Length: ['215'] 52 | Content-Type: [application/json] 53 | User-Agent: [python-requests/2.19.1] 54 | method: POST 55 | uri: http://localhost:8088/ksql 56 | response: 57 | body: {string: '[{"@type":"currentStatus","statementText":"CREATE STREAM ksql_python_test_test_drop_all_streams 58 | (viewtime bigint, userid varchar, pageid varchar) WITH 59 | (kafka_topic=''ksql_python_test_exist_topic'', value_format=''DELIMITED'');","commandId":"stream/KSQL_PYTHON_TEST_TEST_DROP_ALL_STREAMS/create","commandStatus":{"status":"SUCCESS","message":"Stream 60 | created"}}]'} 61 | headers: 62 | Content-Type: [application/json] 63 | Date: ['Fri, 20 Jul 2018 20:10:17 GMT'] 64 | Server: [Jetty(9.4.10.v20180503)] 65 | status: {code: 200, message: OK} 66 | - request: 67 | body: '{"ksql": "SHOW STREAMS;"}' 68 | headers: 69 | Accept: [application/json] 70 | Accept-Encoding: ['gzip, deflate'] 71 | Connection: [keep-alive] 72 | Content-Length: ['25'] 73 | Content-Type: [application/json] 74 | User-Agent: [python-requests/2.19.1] 75 | method: POST 76 | uri: http://localhost:8088/ksql 77 | response: 78 | body: {string: '[{"@type":"streams","statementText":"SHOW STREAMS;","streams":[{"type":"STREAM","name":"TEST_TABLE","topic":"exist_topic","format":"DELIMITED"},{"type":"STREAM","name":"CREATE_STREAM_AS_WITH_CONDITIONS_WITHOUT_STARTWITH","topic":"create_stream_as_with_conditions_without_startwith","format":"DELIMITED"},{"type":"STREAM","name":"PREBID_TRAFFIC_LOG_TOTAL_STREAM","topic":"exist_topic","format":"DELIMITED"},{"type":"STREAM","name":"PAGEVIEWS_ORIGINAL","topic":"exist_topic","format":"DELIMITED"},{"type":"STREAM","name":"CREATE_STREAM_AS_WITHOUT_CONDITIONS","topic":"create_stream_as_without_conditions","format":"DELIMITED"},{"type":"STREAM","name":"KSQL_PYTHON_TEST_TEST_DROP_ALL_STREAMS","topic":"ksql_python_test_exist_topic","format":"DELIMITED"},{"type":"STREAM","name":"FOO_TABLE","topic":"exist_topic","format":"DELIMITED"},{"type":"STREAM","name":"CREATE_STREAM_AS_WITH_CONDITIONS_WITH_STARTWITH_WITH_AND","topic":"create_stream_as_with_conditions_with_startwith_with_and","format":"DELIMITED"},{"type":"STREAM","name":"CREATE_STREAM_AS_WITH_CONDITIONS_WITH_STARTWITH","topic":"create_stream_as_with_conditions_with_startwith","format":"DELIMITED"}]}]'} 79 | headers: 80 | Content-Type: [application/json] 81 | Date: ['Fri, 20 Jul 2018 20:10:18 GMT'] 82 | Server: [Jetty(9.4.10.v20180503)] 83 | status: {code: 200, message: OK} 84 | - request: 85 | body: '{"ksql": "DESCRIBE EXTENDED KSQL_PYTHON_TEST_TEST_DROP_ALL_STREAMS;"}' 86 | headers: 87 | Accept: [application/json] 88 | Accept-Encoding: ['gzip, deflate'] 89 | Connection: [keep-alive] 90 | Content-Length: ['69'] 91 | Content-Type: [application/json] 92 | User-Agent: [python-requests/2.19.1] 93 | method: POST 94 | uri: http://localhost:8088/ksql 95 | response: 96 | body: {string: '[{"@type":"sourceDescription","statementText":"DESCRIBE EXTENDED 97 | KSQL_PYTHON_TEST_TEST_DROP_ALL_STREAMS;","sourceDescription":{"name":"KSQL_PYTHON_TEST_TEST_DROP_ALL_STREAMS","readQueries":[],"writeQueries":[],"fields":[{"name":"ROWTIME","schema":{"type":"BIGINT","fields":null,"memberSchema":null}},{"name":"ROWKEY","schema":{"type":"STRING","fields":null,"memberSchema":null}},{"name":"VIEWTIME","schema":{"type":"BIGINT","fields":null,"memberSchema":null}},{"name":"USERID","schema":{"type":"STRING","fields":null,"memberSchema":null}},{"name":"PAGEID","schema":{"type":"STRING","fields":null,"memberSchema":null}}],"type":"STREAM","key":"","timestamp":"","statistics":"","errorStats":"","extended":true,"format":"DELIMITED","topic":"ksql_python_test_exist_topic","partitions":1,"replication":1}}]'} 98 | headers: 99 | Content-Type: [application/json] 100 | Date: ['Fri, 20 Jul 2018 20:10:18 GMT'] 101 | Server: [Jetty(9.4.10.v20180503)] 102 | status: {code: 200, message: OK} 103 | - request: 104 | body: '{"ksql": "DROP\n STREAM IF EXISTS\n KSQL_PYTHON_TEST_TEST_DROP_ALL_STREAMS;"}' 105 | headers: 106 | Accept: [application/json] 107 | Accept-Encoding: ['gzip, deflate'] 108 | Connection: [keep-alive] 109 | Content-Length: ['83'] 110 | Content-Type: [application/json] 111 | User-Agent: [python-requests/2.19.1] 112 | method: POST 113 | uri: http://localhost:8088/ksql 114 | response: 115 | body: {string: '[{"@type":"currentStatus","statementText":"DROP\n STREAM IF 116 | EXISTS\n KSQL_PYTHON_TEST_TEST_DROP_ALL_STREAMS;","commandId":"stream/KSQL_PYTHON_TEST_TEST_DROP_ALL_STREAMS/drop","commandStatus":{"status":"SUCCESS","message":"Source 117 | KSQL_PYTHON_TEST_TEST_DROP_ALL_STREAMS was dropped. "}}]'} 118 | headers: 119 | Content-Type: [application/json] 120 | Date: ['Fri, 20 Jul 2018 20:10:18 GMT'] 121 | Server: [Jetty(9.4.10.v20180503)] 122 | status: {code: 200, message: OK} 123 | - request: 124 | body: '{"ksql": "DESCRIBE EXTENDED ksql_python_test_test_drop_all_streams;"}' 125 | headers: 126 | Accept: [application/json] 127 | Accept-Encoding: ['gzip, deflate'] 128 | Connection: [keep-alive] 129 | Content-Length: ['69'] 130 | Content-Type: [application/json] 131 | User-Agent: [python-requests/2.19.1] 132 | method: POST 133 | uri: http://localhost:8088/ksql 134 | response: 135 | body: {string: '{"@type":"statement_error","error_code":40001,"message":"Could 136 | not find STREAM/TABLE ''KSQL_PYTHON_TEST_TEST_DROP_ALL_STREAMS'' in the Metastore","stackTrace":["io.confluent.ksql.rest.server.resources.KsqlResource.describe(KsqlResource.java:457)","io.confluent.ksql.rest.server.resources.KsqlResource.validateStatement(KsqlResource.java:248)","io.confluent.ksql.rest.server.resources.KsqlResource.handleKsqlStatements(KsqlResource.java:190)","sun.reflect.GeneratedMethodAccessor6.invoke(Unknown 137 | Source)","sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)","java.lang.reflect.Method.invoke(Method.java:498)","org.glassfish.jersey.server.model.internal.ResourceMethodInvocationHandlerFactory.lambda$static$0(ResourceMethodInvocationHandlerFactory.java:76)","org.glassfish.jersey.server.model.internal.AbstractJavaResourceMethodDispatcher$1.run(AbstractJavaResourceMethodDispatcher.java:148)","org.glassfish.jersey.server.model.internal.AbstractJavaResourceMethodDispatcher.invoke(AbstractJavaResourceMethodDispatcher.java:191)","org.glassfish.jersey.server.model.internal.JavaResourceMethodDispatcherProvider$ResponseOutInvoker.doDispatch(JavaResourceMethodDispatcherProvider.java:200)","org.glassfish.jersey.server.model.internal.AbstractJavaResourceMethodDispatcher.dispatch(AbstractJavaResourceMethodDispatcher.java:103)","org.glassfish.jersey.server.model.ResourceMethodInvoker.invoke(ResourceMethodInvoker.java:493)","org.glassfish.jersey.server.model.ResourceMethodInvoker.apply(ResourceMethodInvoker.java:415)","org.glassfish.jersey.server.model.ResourceMethodInvoker.apply(ResourceMethodInvoker.java:104)","org.glassfish.jersey.server.ServerRuntime$1.run(ServerRuntime.java:277)","org.glassfish.jersey.internal.Errors$1.call(Errors.java:272)","org.glassfish.jersey.internal.Errors$1.call(Errors.java:268)","org.glassfish.jersey.internal.Errors.process(Errors.java:316)","org.glassfish.jersey.internal.Errors.process(Errors.java:298)","org.glassfish.jersey.internal.Errors.process(Errors.java:268)","org.glassfish.jersey.process.internal.RequestScope.runInScope(RequestScope.java:289)","org.glassfish.jersey.server.ServerRuntime.process(ServerRuntime.java:256)","org.glassfish.jersey.server.ApplicationHandler.handle(ApplicationHandler.java:703)","org.glassfish.jersey.servlet.WebComponent.serviceImpl(WebComponent.java:416)","org.glassfish.jersey.servlet.ServletContainer.serviceImpl(ServletContainer.java:409)","org.glassfish.jersey.servlet.ServletContainer.doFilter(ServletContainer.java:584)","org.glassfish.jersey.servlet.ServletContainer.doFilter(ServletContainer.java:525)","org.glassfish.jersey.servlet.ServletContainer.doFilter(ServletContainer.java:462)","org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1642)","org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:533)","org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:255)","org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:1595)","org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:255)","org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1253)","org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:203)","org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:473)","org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:1564)","org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:201)","org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1155)","org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:144)","org.eclipse.jetty.server.handler.HandlerCollection.handle(HandlerCollection.java:126)","org.eclipse.jetty.server.handler.StatisticsHandler.handle(StatisticsHandler.java:169)","org.eclipse.jetty.server.handler.ContextHandlerCollection.handle(ContextHandlerCollection.java:219)","org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:132)","org.eclipse.jetty.server.Server.handle(Server.java:531)","org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:352)","org.eclipse.jetty.server.HttpConnection.onFillable(HttpConnection.java:260)","org.eclipse.jetty.io.AbstractConnection$ReadCallback.succeeded(AbstractConnection.java:281)","org.eclipse.jetty.io.FillInterest.fillable(FillInterest.java:102)","org.eclipse.jetty.io.ChannelEndPoint$2.run(ChannelEndPoint.java:118)","org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.runTask(EatWhatYouKill.java:333)","org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:310)","org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:168)","org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.run(EatWhatYouKill.java:126)","org.eclipse.jetty.util.thread.ReservedThreadExecutor$ReservedThread.run(ReservedThreadExecutor.java:366)","org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:760)","org.eclipse.jetty.util.thread.QueuedThreadPool$2.run(QueuedThreadPool.java:678)","java.lang.Thread.run(Thread.java:748)"],"statementText":"DESCRIBE 138 | EXTENDED ksql_python_test_test_drop_all_streams;","entities":[]}'} 139 | headers: 140 | Content-Type: [application/json] 141 | Date: ['Fri, 20 Jul 2018 20:10:18 GMT'] 142 | Server: [Jetty(9.4.10.v20180503)] 143 | status: {code: 400, message: Bad Request} 144 | version: 1 145 | -------------------------------------------------------------------------------- /tests/vcr_cassettes/utils_test_drop_stream.yml: -------------------------------------------------------------------------------- 1 | interactions: 2 | - request: 3 | body: '{"ksql": "DESCRIBE EXTENDED ksql_python_test_test_drop_stream;"}' 4 | headers: 5 | Accept: [application/json] 6 | Accept-Encoding: ['gzip, deflate'] 7 | Connection: [keep-alive] 8 | Content-Length: ['64'] 9 | Content-Type: [application/json] 10 | User-Agent: [python-requests/2.19.1] 11 | method: POST 12 | uri: http://localhost:8088/ksql 13 | response: 14 | body: {string: '{"@type":"statement_error","error_code":40001,"message":"Could 15 | not find STREAM/TABLE ''KSQL_PYTHON_TEST_TEST_DROP_STREAM'' in the Metastore","stackTrace":["io.confluent.ksql.rest.server.resources.KsqlResource.describe(KsqlResource.java:457)","io.confluent.ksql.rest.server.resources.KsqlResource.validateStatement(KsqlResource.java:248)","io.confluent.ksql.rest.server.resources.KsqlResource.handleKsqlStatements(KsqlResource.java:190)","sun.reflect.GeneratedMethodAccessor6.invoke(Unknown 16 | Source)","sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)","java.lang.reflect.Method.invoke(Method.java:498)","org.glassfish.jersey.server.model.internal.ResourceMethodInvocationHandlerFactory.lambda$static$0(ResourceMethodInvocationHandlerFactory.java:76)","org.glassfish.jersey.server.model.internal.AbstractJavaResourceMethodDispatcher$1.run(AbstractJavaResourceMethodDispatcher.java:148)","org.glassfish.jersey.server.model.internal.AbstractJavaResourceMethodDispatcher.invoke(AbstractJavaResourceMethodDispatcher.java:191)","org.glassfish.jersey.server.model.internal.JavaResourceMethodDispatcherProvider$ResponseOutInvoker.doDispatch(JavaResourceMethodDispatcherProvider.java:200)","org.glassfish.jersey.server.model.internal.AbstractJavaResourceMethodDispatcher.dispatch(AbstractJavaResourceMethodDispatcher.java:103)","org.glassfish.jersey.server.model.ResourceMethodInvoker.invoke(ResourceMethodInvoker.java:493)","org.glassfish.jersey.server.model.ResourceMethodInvoker.apply(ResourceMethodInvoker.java:415)","org.glassfish.jersey.server.model.ResourceMethodInvoker.apply(ResourceMethodInvoker.java:104)","org.glassfish.jersey.server.ServerRuntime$1.run(ServerRuntime.java:277)","org.glassfish.jersey.internal.Errors$1.call(Errors.java:272)","org.glassfish.jersey.internal.Errors$1.call(Errors.java:268)","org.glassfish.jersey.internal.Errors.process(Errors.java:316)","org.glassfish.jersey.internal.Errors.process(Errors.java:298)","org.glassfish.jersey.internal.Errors.process(Errors.java:268)","org.glassfish.jersey.process.internal.RequestScope.runInScope(RequestScope.java:289)","org.glassfish.jersey.server.ServerRuntime.process(ServerRuntime.java:256)","org.glassfish.jersey.server.ApplicationHandler.handle(ApplicationHandler.java:703)","org.glassfish.jersey.servlet.WebComponent.serviceImpl(WebComponent.java:416)","org.glassfish.jersey.servlet.ServletContainer.serviceImpl(ServletContainer.java:409)","org.glassfish.jersey.servlet.ServletContainer.doFilter(ServletContainer.java:584)","org.glassfish.jersey.servlet.ServletContainer.doFilter(ServletContainer.java:525)","org.glassfish.jersey.servlet.ServletContainer.doFilter(ServletContainer.java:462)","org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1642)","org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:533)","org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:255)","org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:1595)","org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:255)","org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1253)","org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:203)","org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:473)","org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:1564)","org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:201)","org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1155)","org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:144)","org.eclipse.jetty.server.handler.HandlerCollection.handle(HandlerCollection.java:126)","org.eclipse.jetty.server.handler.StatisticsHandler.handle(StatisticsHandler.java:169)","org.eclipse.jetty.server.handler.ContextHandlerCollection.handle(ContextHandlerCollection.java:219)","org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:132)","org.eclipse.jetty.server.Server.handle(Server.java:531)","org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:352)","org.eclipse.jetty.server.HttpConnection.onFillable(HttpConnection.java:260)","org.eclipse.jetty.io.AbstractConnection$ReadCallback.succeeded(AbstractConnection.java:281)","org.eclipse.jetty.io.FillInterest.fillable(FillInterest.java:102)","org.eclipse.jetty.io.ChannelEndPoint$2.run(ChannelEndPoint.java:118)","org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.runTask(EatWhatYouKill.java:333)","org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:310)","org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:168)","org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.run(EatWhatYouKill.java:126)","org.eclipse.jetty.util.thread.ReservedThreadExecutor$ReservedThread.run(ReservedThreadExecutor.java:366)","org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:760)","org.eclipse.jetty.util.thread.QueuedThreadPool$2.run(QueuedThreadPool.java:678)","java.lang.Thread.run(Thread.java:748)"],"statementText":"DESCRIBE 17 | EXTENDED ksql_python_test_test_drop_stream;","entities":[]}'} 18 | headers: 19 | Content-Type: [application/json] 20 | Date: ['Fri, 20 Jul 2018 20:10:18 GMT'] 21 | Server: [Jetty(9.4.10.v20180503)] 22 | status: {code: 400, message: Bad Request} 23 | - request: 24 | body: '{"ksql": "DROP\n STREAM IF EXISTS\n ksql_python_test_test_drop_stream;"}' 25 | headers: 26 | Accept: [application/json] 27 | Accept-Encoding: ['gzip, deflate'] 28 | Connection: [keep-alive] 29 | Content-Length: ['78'] 30 | Content-Type: [application/json] 31 | User-Agent: [python-requests/2.19.1] 32 | method: POST 33 | uri: http://localhost:8088/ksql 34 | response: 35 | body: {string: '[{"@type":"currentStatus","statementText":"DROP\n STREAM IF 36 | EXISTS\n ksql_python_test_test_drop_stream;","commandId":"stream/KSQL_PYTHON_TEST_TEST_DROP_STREAM/drop","commandStatus":{"status":"SUCCESS","message":"Source 37 | KSQL_PYTHON_TEST_TEST_DROP_STREAM does not exist."}}]'} 38 | headers: 39 | Content-Type: [application/json] 40 | Date: ['Fri, 20 Jul 2018 20:10:18 GMT'] 41 | Server: [Jetty(9.4.10.v20180503)] 42 | status: {code: 200, message: OK} 43 | - request: 44 | body: '{"ksql": "CREATE STREAM ksql_python_test_test_drop_stream (viewtime bigint, 45 | userid varchar, pageid varchar) WITH (kafka_topic=''ksql_python_test_exist_topic'', 46 | value_format=''DELIMITED'');"}' 47 | headers: 48 | Accept: [application/json] 49 | Accept-Encoding: ['gzip, deflate'] 50 | Connection: [keep-alive] 51 | Content-Length: ['209'] 52 | Content-Type: [application/json] 53 | User-Agent: [python-requests/2.19.1] 54 | method: POST 55 | uri: http://localhost:8088/ksql 56 | response: 57 | body: {string: '[{"@type":"currentStatus","statementText":"CREATE STREAM ksql_python_test_test_drop_stream 58 | (viewtime bigint, userid varchar, pageid varchar) WITH 59 | (kafka_topic=''ksql_python_test_exist_topic'', value_format=''DELIMITED'');","commandId":"stream/KSQL_PYTHON_TEST_TEST_DROP_STREAM/create","commandStatus":{"status":"SUCCESS","message":"Stream 60 | created"}}]'} 61 | headers: 62 | Content-Type: [application/json] 63 | Date: ['Fri, 20 Jul 2018 20:10:19 GMT'] 64 | Server: [Jetty(9.4.10.v20180503)] 65 | status: {code: 200, message: OK} 66 | - request: 67 | body: '{"ksql": "DESCRIBE EXTENDED ksql_python_test_test_drop_stream;"}' 68 | headers: 69 | Accept: [application/json] 70 | Accept-Encoding: ['gzip, deflate'] 71 | Connection: [keep-alive] 72 | Content-Length: ['64'] 73 | Content-Type: [application/json] 74 | User-Agent: [python-requests/2.19.1] 75 | method: POST 76 | uri: http://localhost:8088/ksql 77 | response: 78 | body: {string: '[{"@type":"sourceDescription","statementText":"DESCRIBE EXTENDED 79 | ksql_python_test_test_drop_stream;","sourceDescription":{"name":"KSQL_PYTHON_TEST_TEST_DROP_STREAM","readQueries":[],"writeQueries":[],"fields":[{"name":"ROWTIME","schema":{"type":"BIGINT","fields":null,"memberSchema":null}},{"name":"ROWKEY","schema":{"type":"STRING","fields":null,"memberSchema":null}},{"name":"VIEWTIME","schema":{"type":"BIGINT","fields":null,"memberSchema":null}},{"name":"USERID","schema":{"type":"STRING","fields":null,"memberSchema":null}},{"name":"PAGEID","schema":{"type":"STRING","fields":null,"memberSchema":null}}],"type":"STREAM","key":"","timestamp":"","statistics":"","errorStats":"","extended":true,"format":"DELIMITED","topic":"ksql_python_test_exist_topic","partitions":1,"replication":1}}]'} 80 | headers: 81 | Content-Type: [application/json] 82 | Date: ['Fri, 20 Jul 2018 20:10:19 GMT'] 83 | Server: [Jetty(9.4.10.v20180503)] 84 | status: {code: 200, message: OK} 85 | - request: 86 | body: '{"ksql": "DESCRIBE EXTENDED ksql_python_test_test_drop_stream;"}' 87 | headers: 88 | Accept: [application/json] 89 | Accept-Encoding: ['gzip, deflate'] 90 | Connection: [keep-alive] 91 | Content-Length: ['64'] 92 | Content-Type: [application/json] 93 | User-Agent: [python-requests/2.19.1] 94 | method: POST 95 | uri: http://localhost:8088/ksql 96 | response: 97 | body: {string: '[{"@type":"sourceDescription","statementText":"DESCRIBE EXTENDED 98 | ksql_python_test_test_drop_stream;","sourceDescription":{"name":"KSQL_PYTHON_TEST_TEST_DROP_STREAM","readQueries":[],"writeQueries":[],"fields":[{"name":"ROWTIME","schema":{"type":"BIGINT","fields":null,"memberSchema":null}},{"name":"ROWKEY","schema":{"type":"STRING","fields":null,"memberSchema":null}},{"name":"VIEWTIME","schema":{"type":"BIGINT","fields":null,"memberSchema":null}},{"name":"USERID","schema":{"type":"STRING","fields":null,"memberSchema":null}},{"name":"PAGEID","schema":{"type":"STRING","fields":null,"memberSchema":null}}],"type":"STREAM","key":"","timestamp":"","statistics":"","errorStats":"","extended":true,"format":"DELIMITED","topic":"ksql_python_test_exist_topic","partitions":1,"replication":1}}]'} 99 | headers: 100 | Content-Type: [application/json] 101 | Date: ['Fri, 20 Jul 2018 20:10:19 GMT'] 102 | Server: [Jetty(9.4.10.v20180503)] 103 | status: {code: 200, message: OK} 104 | - request: 105 | body: '{"ksql": "DROP\n STREAM IF EXISTS\n ksql_python_test_test_drop_stream;"}' 106 | headers: 107 | Accept: [application/json] 108 | Accept-Encoding: ['gzip, deflate'] 109 | Connection: [keep-alive] 110 | Content-Length: ['78'] 111 | Content-Type: [application/json] 112 | User-Agent: [python-requests/2.19.1] 113 | method: POST 114 | uri: http://localhost:8088/ksql 115 | response: 116 | body: {string: '[{"@type":"currentStatus","statementText":"DROP\n STREAM IF 117 | EXISTS\n ksql_python_test_test_drop_stream;","commandId":"stream/KSQL_PYTHON_TEST_TEST_DROP_STREAM/drop","commandStatus":{"status":"SUCCESS","message":"Source 118 | KSQL_PYTHON_TEST_TEST_DROP_STREAM was dropped. "}}]'} 119 | headers: 120 | Content-Type: [application/json] 121 | Date: ['Fri, 20 Jul 2018 20:10:19 GMT'] 122 | Server: [Jetty(9.4.10.v20180503)] 123 | status: {code: 200, message: OK} 124 | - request: 125 | body: '{"ksql": "DESCRIBE EXTENDED ksql_python_test_test_drop_stream;"}' 126 | headers: 127 | Accept: [application/json] 128 | Accept-Encoding: ['gzip, deflate'] 129 | Connection: [keep-alive] 130 | Content-Length: ['64'] 131 | Content-Type: [application/json] 132 | User-Agent: [python-requests/2.19.1] 133 | method: POST 134 | uri: http://localhost:8088/ksql 135 | response: 136 | body: {string: '{"@type":"statement_error","error_code":40001,"message":"Could 137 | not find STREAM/TABLE ''KSQL_PYTHON_TEST_TEST_DROP_STREAM'' in the Metastore","stackTrace":["io.confluent.ksql.rest.server.resources.KsqlResource.describe(KsqlResource.java:457)","io.confluent.ksql.rest.server.resources.KsqlResource.validateStatement(KsqlResource.java:248)","io.confluent.ksql.rest.server.resources.KsqlResource.handleKsqlStatements(KsqlResource.java:190)","sun.reflect.GeneratedMethodAccessor6.invoke(Unknown 138 | Source)","sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)","java.lang.reflect.Method.invoke(Method.java:498)","org.glassfish.jersey.server.model.internal.ResourceMethodInvocationHandlerFactory.lambda$static$0(ResourceMethodInvocationHandlerFactory.java:76)","org.glassfish.jersey.server.model.internal.AbstractJavaResourceMethodDispatcher$1.run(AbstractJavaResourceMethodDispatcher.java:148)","org.glassfish.jersey.server.model.internal.AbstractJavaResourceMethodDispatcher.invoke(AbstractJavaResourceMethodDispatcher.java:191)","org.glassfish.jersey.server.model.internal.JavaResourceMethodDispatcherProvider$ResponseOutInvoker.doDispatch(JavaResourceMethodDispatcherProvider.java:200)","org.glassfish.jersey.server.model.internal.AbstractJavaResourceMethodDispatcher.dispatch(AbstractJavaResourceMethodDispatcher.java:103)","org.glassfish.jersey.server.model.ResourceMethodInvoker.invoke(ResourceMethodInvoker.java:493)","org.glassfish.jersey.server.model.ResourceMethodInvoker.apply(ResourceMethodInvoker.java:415)","org.glassfish.jersey.server.model.ResourceMethodInvoker.apply(ResourceMethodInvoker.java:104)","org.glassfish.jersey.server.ServerRuntime$1.run(ServerRuntime.java:277)","org.glassfish.jersey.internal.Errors$1.call(Errors.java:272)","org.glassfish.jersey.internal.Errors$1.call(Errors.java:268)","org.glassfish.jersey.internal.Errors.process(Errors.java:316)","org.glassfish.jersey.internal.Errors.process(Errors.java:298)","org.glassfish.jersey.internal.Errors.process(Errors.java:268)","org.glassfish.jersey.process.internal.RequestScope.runInScope(RequestScope.java:289)","org.glassfish.jersey.server.ServerRuntime.process(ServerRuntime.java:256)","org.glassfish.jersey.server.ApplicationHandler.handle(ApplicationHandler.java:703)","org.glassfish.jersey.servlet.WebComponent.serviceImpl(WebComponent.java:416)","org.glassfish.jersey.servlet.ServletContainer.serviceImpl(ServletContainer.java:409)","org.glassfish.jersey.servlet.ServletContainer.doFilter(ServletContainer.java:584)","org.glassfish.jersey.servlet.ServletContainer.doFilter(ServletContainer.java:525)","org.glassfish.jersey.servlet.ServletContainer.doFilter(ServletContainer.java:462)","org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1642)","org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:533)","org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:255)","org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:1595)","org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:255)","org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1253)","org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:203)","org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:473)","org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:1564)","org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:201)","org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1155)","org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:144)","org.eclipse.jetty.server.handler.HandlerCollection.handle(HandlerCollection.java:126)","org.eclipse.jetty.server.handler.StatisticsHandler.handle(StatisticsHandler.java:169)","org.eclipse.jetty.server.handler.ContextHandlerCollection.handle(ContextHandlerCollection.java:219)","org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:132)","org.eclipse.jetty.server.Server.handle(Server.java:531)","org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:352)","org.eclipse.jetty.server.HttpConnection.onFillable(HttpConnection.java:260)","org.eclipse.jetty.io.AbstractConnection$ReadCallback.succeeded(AbstractConnection.java:281)","org.eclipse.jetty.io.FillInterest.fillable(FillInterest.java:102)","org.eclipse.jetty.io.ChannelEndPoint$2.run(ChannelEndPoint.java:118)","org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.runTask(EatWhatYouKill.java:333)","org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:310)","org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:168)","org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.run(EatWhatYouKill.java:126)","org.eclipse.jetty.util.thread.ReservedThreadExecutor$ReservedThread.run(ReservedThreadExecutor.java:366)","org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:760)","org.eclipse.jetty.util.thread.QueuedThreadPool$2.run(QueuedThreadPool.java:678)","java.lang.Thread.run(Thread.java:748)"],"statementText":"DESCRIBE 139 | EXTENDED ksql_python_test_test_drop_stream;","entities":[]}'} 140 | headers: 141 | Content-Type: [application/json] 142 | Date: ['Fri, 20 Jul 2018 20:10:19 GMT'] 143 | Server: [Jetty(9.4.10.v20180503)] 144 | status: {code: 400, message: Bad Request} 145 | version: 1 146 | -------------------------------------------------------------------------------- /tests/vcr_cassettes/utils_test_drop_stream_create_as_stream.yml: -------------------------------------------------------------------------------- 1 | interactions: 2 | - request: 3 | body: '{"ksql": "DESCRIBE EXTENDED ksql_python_test_test_drop_stream;"}' 4 | headers: 5 | Accept: [application/json] 6 | Accept-Encoding: ['gzip, deflate'] 7 | Connection: [keep-alive] 8 | Content-Length: ['64'] 9 | Content-Type: [application/json] 10 | User-Agent: [python-requests/2.19.1] 11 | method: POST 12 | uri: http://localhost:8088/ksql 13 | response: 14 | body: {string: '{"@type":"statement_error","error_code":40001,"message":"Could 15 | not find STREAM/TABLE ''KSQL_PYTHON_TEST_TEST_DROP_STREAM'' in the Metastore","stackTrace":["io.confluent.ksql.rest.server.resources.KsqlResource.describe(KsqlResource.java:457)","io.confluent.ksql.rest.server.resources.KsqlResource.validateStatement(KsqlResource.java:248)","io.confluent.ksql.rest.server.resources.KsqlResource.handleKsqlStatements(KsqlResource.java:190)","sun.reflect.GeneratedMethodAccessor6.invoke(Unknown 16 | Source)","sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)","java.lang.reflect.Method.invoke(Method.java:498)","org.glassfish.jersey.server.model.internal.ResourceMethodInvocationHandlerFactory.lambda$static$0(ResourceMethodInvocationHandlerFactory.java:76)","org.glassfish.jersey.server.model.internal.AbstractJavaResourceMethodDispatcher$1.run(AbstractJavaResourceMethodDispatcher.java:148)","org.glassfish.jersey.server.model.internal.AbstractJavaResourceMethodDispatcher.invoke(AbstractJavaResourceMethodDispatcher.java:191)","org.glassfish.jersey.server.model.internal.JavaResourceMethodDispatcherProvider$ResponseOutInvoker.doDispatch(JavaResourceMethodDispatcherProvider.java:200)","org.glassfish.jersey.server.model.internal.AbstractJavaResourceMethodDispatcher.dispatch(AbstractJavaResourceMethodDispatcher.java:103)","org.glassfish.jersey.server.model.ResourceMethodInvoker.invoke(ResourceMethodInvoker.java:493)","org.glassfish.jersey.server.model.ResourceMethodInvoker.apply(ResourceMethodInvoker.java:415)","org.glassfish.jersey.server.model.ResourceMethodInvoker.apply(ResourceMethodInvoker.java:104)","org.glassfish.jersey.server.ServerRuntime$1.run(ServerRuntime.java:277)","org.glassfish.jersey.internal.Errors$1.call(Errors.java:272)","org.glassfish.jersey.internal.Errors$1.call(Errors.java:268)","org.glassfish.jersey.internal.Errors.process(Errors.java:316)","org.glassfish.jersey.internal.Errors.process(Errors.java:298)","org.glassfish.jersey.internal.Errors.process(Errors.java:268)","org.glassfish.jersey.process.internal.RequestScope.runInScope(RequestScope.java:289)","org.glassfish.jersey.server.ServerRuntime.process(ServerRuntime.java:256)","org.glassfish.jersey.server.ApplicationHandler.handle(ApplicationHandler.java:703)","org.glassfish.jersey.servlet.WebComponent.serviceImpl(WebComponent.java:416)","org.glassfish.jersey.servlet.ServletContainer.serviceImpl(ServletContainer.java:409)","org.glassfish.jersey.servlet.ServletContainer.doFilter(ServletContainer.java:584)","org.glassfish.jersey.servlet.ServletContainer.doFilter(ServletContainer.java:525)","org.glassfish.jersey.servlet.ServletContainer.doFilter(ServletContainer.java:462)","org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1642)","org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:533)","org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:255)","org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:1595)","org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:255)","org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1253)","org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:203)","org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:473)","org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:1564)","org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:201)","org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1155)","org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:144)","org.eclipse.jetty.server.handler.HandlerCollection.handle(HandlerCollection.java:126)","org.eclipse.jetty.server.handler.StatisticsHandler.handle(StatisticsHandler.java:169)","org.eclipse.jetty.server.handler.ContextHandlerCollection.handle(ContextHandlerCollection.java:219)","org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:132)","org.eclipse.jetty.server.Server.handle(Server.java:531)","org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:352)","org.eclipse.jetty.server.HttpConnection.onFillable(HttpConnection.java:260)","org.eclipse.jetty.io.AbstractConnection$ReadCallback.succeeded(AbstractConnection.java:281)","org.eclipse.jetty.io.FillInterest.fillable(FillInterest.java:102)","org.eclipse.jetty.io.ChannelEndPoint$2.run(ChannelEndPoint.java:118)","org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.runTask(EatWhatYouKill.java:333)","org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:310)","org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:168)","org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.run(EatWhatYouKill.java:126)","org.eclipse.jetty.util.thread.ReservedThreadExecutor$ReservedThread.run(ReservedThreadExecutor.java:366)","org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:760)","org.eclipse.jetty.util.thread.QueuedThreadPool$2.run(QueuedThreadPool.java:678)","java.lang.Thread.run(Thread.java:748)"],"statementText":"DESCRIBE 17 | EXTENDED ksql_python_test_test_drop_stream;","entities":[]}'} 18 | headers: 19 | Content-Type: [application/json] 20 | Date: ['Fri, 20 Jul 2018 20:10:21 GMT'] 21 | Server: [Jetty(9.4.10.v20180503)] 22 | status: {code: 400, message: Bad Request} 23 | - request: 24 | body: '{"ksql": "DROP\n STREAM IF EXISTS\n ksql_python_test_test_drop_stream;"}' 25 | headers: 26 | Accept: [application/json] 27 | Accept-Encoding: ['gzip, deflate'] 28 | Connection: [keep-alive] 29 | Content-Length: ['78'] 30 | Content-Type: [application/json] 31 | User-Agent: [python-requests/2.19.1] 32 | method: POST 33 | uri: http://localhost:8088/ksql 34 | response: 35 | body: {string: '[{"@type":"currentStatus","statementText":"DROP\n STREAM IF 36 | EXISTS\n ksql_python_test_test_drop_stream;","commandId":"stream/KSQL_PYTHON_TEST_TEST_DROP_STREAM/drop","commandStatus":{"status":"SUCCESS","message":"Source 37 | KSQL_PYTHON_TEST_TEST_DROP_STREAM does not exist."}}]'} 38 | headers: 39 | Content-Type: [application/json] 40 | Date: ['Fri, 20 Jul 2018 20:10:21 GMT'] 41 | Server: [Jetty(9.4.10.v20180503)] 42 | status: {code: 200, message: OK} 43 | - request: 44 | body: '{"ksql": "CREATE STREAM ksql_python_test_test_drop_stream (viewtime bigint, 45 | userid varchar, pageid varchar) WITH (kafka_topic=''ksql_python_test_exist_topic'', 46 | value_format=''DELIMITED'');"}' 47 | headers: 48 | Accept: [application/json] 49 | Accept-Encoding: ['gzip, deflate'] 50 | Connection: [keep-alive] 51 | Content-Length: ['209'] 52 | Content-Type: [application/json] 53 | User-Agent: [python-requests/2.19.1] 54 | method: POST 55 | uri: http://localhost:8088/ksql 56 | response: 57 | body: {string: '[{"@type":"currentStatus","statementText":"CREATE STREAM ksql_python_test_test_drop_stream 58 | (viewtime bigint, userid varchar, pageid varchar) WITH 59 | (kafka_topic=''ksql_python_test_exist_topic'', value_format=''DELIMITED'');","commandId":"stream/KSQL_PYTHON_TEST_TEST_DROP_STREAM/create","commandStatus":{"status":"SUCCESS","message":"Stream 60 | created"}}]'} 61 | headers: 62 | Content-Type: [application/json] 63 | Date: ['Fri, 20 Jul 2018 20:10:21 GMT'] 64 | Server: [Jetty(9.4.10.v20180503)] 65 | status: {code: 200, message: OK} 66 | - request: 67 | body: '{"ksql": "CREATE STREAM ksql_python_test_test_drop_stream_as as select 68 | * from ksql_python_test_test_drop_stream;"}' 69 | headers: 70 | Accept: [application/json] 71 | Accept-Encoding: ['gzip, deflate'] 72 | Connection: [keep-alive] 73 | Content-Length: ['114'] 74 | Content-Type: [application/json] 75 | User-Agent: [python-requests/2.19.1] 76 | method: POST 77 | uri: http://localhost:8088/ksql 78 | response: 79 | body: {string: '[{"@type":"currentStatus","statementText":"CREATE STREAM ksql_python_test_test_drop_stream_as 80 | as select * from ksql_python_test_test_drop_stream;","commandId":"stream/KSQL_PYTHON_TEST_TEST_DROP_STREAM_AS/create","commandStatus":{"status":"SUCCESS","message":"Stream 81 | created and running"}}]'} 82 | headers: 83 | Content-Type: [application/json] 84 | Date: ['Fri, 20 Jul 2018 20:10:21 GMT'] 85 | Server: [Jetty(9.4.10.v20180503)] 86 | status: {code: 200, message: OK} 87 | - request: 88 | body: '{"ksql": "DESCRIBE EXTENDED ksql_python_test_test_drop_stream_as;"}' 89 | headers: 90 | Accept: [application/json] 91 | Accept-Encoding: ['gzip, deflate'] 92 | Connection: [keep-alive] 93 | Content-Length: ['67'] 94 | Content-Type: [application/json] 95 | User-Agent: [python-requests/2.19.1] 96 | method: POST 97 | uri: http://localhost:8088/ksql 98 | response: 99 | body: {string: '[{"@type":"sourceDescription","statementText":"DESCRIBE EXTENDED 100 | ksql_python_test_test_drop_stream_as;","sourceDescription":{"name":"KSQL_PYTHON_TEST_TEST_DROP_STREAM_AS","readQueries":[],"writeQueries":[{"sinks":["KSQL_PYTHON_TEST_TEST_DROP_STREAM_AS"],"id":"CSAS_KSQL_PYTHON_TEST_TEST_DROP_STREAM_AS_4","queryString":"CREATE 101 | STREAM ksql_python_test_test_drop_stream_as as select * from ksql_python_test_test_drop_stream;"}],"fields":[{"name":"ROWTIME","schema":{"type":"BIGINT","fields":null,"memberSchema":null}},{"name":"ROWKEY","schema":{"type":"STRING","fields":null,"memberSchema":null}},{"name":"VIEWTIME","schema":{"type":"BIGINT","fields":null,"memberSchema":null}},{"name":"USERID","schema":{"type":"STRING","fields":null,"memberSchema":null}},{"name":"PAGEID","schema":{"type":"STRING","fields":null,"memberSchema":null}}],"type":"STREAM","key":"","timestamp":"","statistics":"","errorStats":"","extended":true,"format":"DELIMITED","topic":"KSQL_PYTHON_TEST_TEST_DROP_STREAM_AS","partitions":4,"replication":1}}]'} 102 | headers: 103 | Content-Type: [application/json] 104 | Date: ['Fri, 20 Jul 2018 20:10:23 GMT'] 105 | Server: [Jetty(9.4.10.v20180503)] 106 | status: {code: 200, message: OK} 107 | - request: 108 | body: '{"ksql": "DESCRIBE EXTENDED ksql_python_test_test_drop_stream_as;"}' 109 | headers: 110 | Accept: [application/json] 111 | Accept-Encoding: ['gzip, deflate'] 112 | Connection: [keep-alive] 113 | Content-Length: ['67'] 114 | Content-Type: [application/json] 115 | User-Agent: [python-requests/2.19.1] 116 | method: POST 117 | uri: http://localhost:8088/ksql 118 | response: 119 | body: {string: '[{"@type":"sourceDescription","statementText":"DESCRIBE EXTENDED 120 | ksql_python_test_test_drop_stream_as;","sourceDescription":{"name":"KSQL_PYTHON_TEST_TEST_DROP_STREAM_AS","readQueries":[],"writeQueries":[{"sinks":["KSQL_PYTHON_TEST_TEST_DROP_STREAM_AS"],"id":"CSAS_KSQL_PYTHON_TEST_TEST_DROP_STREAM_AS_4","queryString":"CREATE 121 | STREAM ksql_python_test_test_drop_stream_as as select * from ksql_python_test_test_drop_stream;"}],"fields":[{"name":"ROWTIME","schema":{"type":"BIGINT","fields":null,"memberSchema":null}},{"name":"ROWKEY","schema":{"type":"STRING","fields":null,"memberSchema":null}},{"name":"VIEWTIME","schema":{"type":"BIGINT","fields":null,"memberSchema":null}},{"name":"USERID","schema":{"type":"STRING","fields":null,"memberSchema":null}},{"name":"PAGEID","schema":{"type":"STRING","fields":null,"memberSchema":null}}],"type":"STREAM","key":"","timestamp":"","statistics":"","errorStats":"","extended":true,"format":"DELIMITED","topic":"KSQL_PYTHON_TEST_TEST_DROP_STREAM_AS","partitions":4,"replication":1}}]'} 122 | headers: 123 | Content-Type: [application/json] 124 | Date: ['Fri, 20 Jul 2018 20:10:24 GMT'] 125 | Server: [Jetty(9.4.10.v20180503)] 126 | status: {code: 200, message: OK} 127 | - request: 128 | body: '{"ksql": "TERMINATE CSAS_KSQL_PYTHON_TEST_TEST_DROP_STREAM_AS_4;"}' 129 | headers: 130 | Accept: [application/json] 131 | Accept-Encoding: ['gzip, deflate'] 132 | Connection: [keep-alive] 133 | Content-Length: ['66'] 134 | Content-Type: [application/json] 135 | User-Agent: [python-requests/2.19.1] 136 | method: POST 137 | uri: http://localhost:8088/ksql 138 | response: 139 | body: {string: '[{"@type":"currentStatus","statementText":"TERMINATE CSAS_KSQL_PYTHON_TEST_TEST_DROP_STREAM_AS_4;","commandId":"terminate/CSAS_KSQL_PYTHON_TEST_TEST_DROP_STREAM_AS_4/execute","commandStatus":{"status":"QUEUED","message":"Statement 140 | written to command topic"}}]'} 141 | headers: 142 | Content-Type: [application/json] 143 | Date: ['Fri, 20 Jul 2018 20:10:24 GMT'] 144 | Server: [Jetty(9.4.10.v20180503)] 145 | status: {code: 200, message: OK} 146 | - request: 147 | body: '{"ksql": "DROP\n STREAM IF EXISTS\n ksql_python_test_test_drop_stream_as;"}' 148 | headers: 149 | Accept: [application/json] 150 | Accept-Encoding: ['gzip, deflate'] 151 | Connection: [keep-alive] 152 | Content-Length: ['81'] 153 | Content-Type: [application/json] 154 | User-Agent: [python-requests/2.19.1] 155 | method: POST 156 | uri: http://localhost:8088/ksql 157 | response: 158 | body: {string: '[{"@type":"currentStatus","statementText":"DROP\n STREAM IF 159 | EXISTS\n ksql_python_test_test_drop_stream_as;","commandId":"stream/KSQL_PYTHON_TEST_TEST_DROP_STREAM_AS/drop","commandStatus":{"status":"SUCCESS","message":"Source 160 | KSQL_PYTHON_TEST_TEST_DROP_STREAM_AS was dropped. "}}]'} 161 | headers: 162 | Content-Type: [application/json] 163 | Date: ['Fri, 20 Jul 2018 20:10:29 GMT'] 164 | Server: [Jetty(9.4.10.v20180503)] 165 | status: {code: 200, message: OK} 166 | - request: 167 | body: '{"ksql": "DESCRIBE EXTENDED ksql_python_test_test_drop_stream_as;"}' 168 | headers: 169 | Accept: [application/json] 170 | Accept-Encoding: ['gzip, deflate'] 171 | Connection: [keep-alive] 172 | Content-Length: ['67'] 173 | Content-Type: [application/json] 174 | User-Agent: [python-requests/2.19.1] 175 | method: POST 176 | uri: http://localhost:8088/ksql 177 | response: 178 | body: {string: '{"@type":"statement_error","error_code":40001,"message":"Could 179 | not find STREAM/TABLE ''KSQL_PYTHON_TEST_TEST_DROP_STREAM_AS'' in the Metastore","stackTrace":["io.confluent.ksql.rest.server.resources.KsqlResource.describe(KsqlResource.java:457)","io.confluent.ksql.rest.server.resources.KsqlResource.validateStatement(KsqlResource.java:248)","io.confluent.ksql.rest.server.resources.KsqlResource.handleKsqlStatements(KsqlResource.java:190)","sun.reflect.GeneratedMethodAccessor6.invoke(Unknown 180 | Source)","sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)","java.lang.reflect.Method.invoke(Method.java:498)","org.glassfish.jersey.server.model.internal.ResourceMethodInvocationHandlerFactory.lambda$static$0(ResourceMethodInvocationHandlerFactory.java:76)","org.glassfish.jersey.server.model.internal.AbstractJavaResourceMethodDispatcher$1.run(AbstractJavaResourceMethodDispatcher.java:148)","org.glassfish.jersey.server.model.internal.AbstractJavaResourceMethodDispatcher.invoke(AbstractJavaResourceMethodDispatcher.java:191)","org.glassfish.jersey.server.model.internal.JavaResourceMethodDispatcherProvider$ResponseOutInvoker.doDispatch(JavaResourceMethodDispatcherProvider.java:200)","org.glassfish.jersey.server.model.internal.AbstractJavaResourceMethodDispatcher.dispatch(AbstractJavaResourceMethodDispatcher.java:103)","org.glassfish.jersey.server.model.ResourceMethodInvoker.invoke(ResourceMethodInvoker.java:493)","org.glassfish.jersey.server.model.ResourceMethodInvoker.apply(ResourceMethodInvoker.java:415)","org.glassfish.jersey.server.model.ResourceMethodInvoker.apply(ResourceMethodInvoker.java:104)","org.glassfish.jersey.server.ServerRuntime$1.run(ServerRuntime.java:277)","org.glassfish.jersey.internal.Errors$1.call(Errors.java:272)","org.glassfish.jersey.internal.Errors$1.call(Errors.java:268)","org.glassfish.jersey.internal.Errors.process(Errors.java:316)","org.glassfish.jersey.internal.Errors.process(Errors.java:298)","org.glassfish.jersey.internal.Errors.process(Errors.java:268)","org.glassfish.jersey.process.internal.RequestScope.runInScope(RequestScope.java:289)","org.glassfish.jersey.server.ServerRuntime.process(ServerRuntime.java:256)","org.glassfish.jersey.server.ApplicationHandler.handle(ApplicationHandler.java:703)","org.glassfish.jersey.servlet.WebComponent.serviceImpl(WebComponent.java:416)","org.glassfish.jersey.servlet.ServletContainer.serviceImpl(ServletContainer.java:409)","org.glassfish.jersey.servlet.ServletContainer.doFilter(ServletContainer.java:584)","org.glassfish.jersey.servlet.ServletContainer.doFilter(ServletContainer.java:525)","org.glassfish.jersey.servlet.ServletContainer.doFilter(ServletContainer.java:462)","org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1642)","org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:533)","org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:255)","org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:1595)","org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:255)","org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1253)","org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:203)","org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:473)","org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:1564)","org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:201)","org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1155)","org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:144)","org.eclipse.jetty.server.handler.HandlerCollection.handle(HandlerCollection.java:126)","org.eclipse.jetty.server.handler.StatisticsHandler.handle(StatisticsHandler.java:169)","org.eclipse.jetty.server.handler.ContextHandlerCollection.handle(ContextHandlerCollection.java:219)","org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:132)","org.eclipse.jetty.server.Server.handle(Server.java:531)","org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:352)","org.eclipse.jetty.server.HttpConnection.onFillable(HttpConnection.java:260)","org.eclipse.jetty.io.AbstractConnection$ReadCallback.succeeded(AbstractConnection.java:281)","org.eclipse.jetty.io.FillInterest.fillable(FillInterest.java:102)","org.eclipse.jetty.io.ChannelEndPoint$2.run(ChannelEndPoint.java:118)","org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.runTask(EatWhatYouKill.java:333)","org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:310)","org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:168)","org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.run(EatWhatYouKill.java:126)","org.eclipse.jetty.util.thread.ReservedThreadExecutor$ReservedThread.run(ReservedThreadExecutor.java:366)","org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:760)","org.eclipse.jetty.util.thread.QueuedThreadPool$2.run(QueuedThreadPool.java:678)","java.lang.Thread.run(Thread.java:748)"],"statementText":"DESCRIBE 181 | EXTENDED ksql_python_test_test_drop_stream_as;","entities":[]}'} 182 | headers: 183 | Content-Type: [application/json] 184 | Date: ['Fri, 20 Jul 2018 20:10:30 GMT'] 185 | Server: [Jetty(9.4.10.v20180503)] 186 | status: {code: 400, message: Bad Request} 187 | version: 1 188 | -------------------------------------------------------------------------------- /tests/vcr_cassettes/utils_test_get_all_streams.yml: -------------------------------------------------------------------------------- 1 | interactions: 2 | - request: 3 | body: '{"ksql": "DESCRIBE EXTENDED ksql_python_test_test_get_all_streams;"}' 4 | headers: 5 | Accept: [application/json] 6 | Accept-Encoding: ['gzip, deflate'] 7 | Connection: [keep-alive] 8 | Content-Length: ['68'] 9 | Content-Type: [application/json] 10 | User-Agent: [python-requests/2.19.1] 11 | method: POST 12 | uri: http://localhost:8088/ksql 13 | response: 14 | body: {string: '{"@type":"statement_error","error_code":40001,"message":"Could 15 | not find STREAM/TABLE ''KSQL_PYTHON_TEST_TEST_GET_ALL_STREAMS'' in the Metastore","stackTrace":["io.confluent.ksql.rest.server.resources.KsqlResource.describe(KsqlResource.java:457)","io.confluent.ksql.rest.server.resources.KsqlResource.validateStatement(KsqlResource.java:248)","io.confluent.ksql.rest.server.resources.KsqlResource.handleKsqlStatements(KsqlResource.java:190)","sun.reflect.GeneratedMethodAccessor6.invoke(Unknown 16 | Source)","sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)","java.lang.reflect.Method.invoke(Method.java:498)","org.glassfish.jersey.server.model.internal.ResourceMethodInvocationHandlerFactory.lambda$static$0(ResourceMethodInvocationHandlerFactory.java:76)","org.glassfish.jersey.server.model.internal.AbstractJavaResourceMethodDispatcher$1.run(AbstractJavaResourceMethodDispatcher.java:148)","org.glassfish.jersey.server.model.internal.AbstractJavaResourceMethodDispatcher.invoke(AbstractJavaResourceMethodDispatcher.java:191)","org.glassfish.jersey.server.model.internal.JavaResourceMethodDispatcherProvider$ResponseOutInvoker.doDispatch(JavaResourceMethodDispatcherProvider.java:200)","org.glassfish.jersey.server.model.internal.AbstractJavaResourceMethodDispatcher.dispatch(AbstractJavaResourceMethodDispatcher.java:103)","org.glassfish.jersey.server.model.ResourceMethodInvoker.invoke(ResourceMethodInvoker.java:493)","org.glassfish.jersey.server.model.ResourceMethodInvoker.apply(ResourceMethodInvoker.java:415)","org.glassfish.jersey.server.model.ResourceMethodInvoker.apply(ResourceMethodInvoker.java:104)","org.glassfish.jersey.server.ServerRuntime$1.run(ServerRuntime.java:277)","org.glassfish.jersey.internal.Errors$1.call(Errors.java:272)","org.glassfish.jersey.internal.Errors$1.call(Errors.java:268)","org.glassfish.jersey.internal.Errors.process(Errors.java:316)","org.glassfish.jersey.internal.Errors.process(Errors.java:298)","org.glassfish.jersey.internal.Errors.process(Errors.java:268)","org.glassfish.jersey.process.internal.RequestScope.runInScope(RequestScope.java:289)","org.glassfish.jersey.server.ServerRuntime.process(ServerRuntime.java:256)","org.glassfish.jersey.server.ApplicationHandler.handle(ApplicationHandler.java:703)","org.glassfish.jersey.servlet.WebComponent.serviceImpl(WebComponent.java:416)","org.glassfish.jersey.servlet.ServletContainer.serviceImpl(ServletContainer.java:409)","org.glassfish.jersey.servlet.ServletContainer.doFilter(ServletContainer.java:584)","org.glassfish.jersey.servlet.ServletContainer.doFilter(ServletContainer.java:525)","org.glassfish.jersey.servlet.ServletContainer.doFilter(ServletContainer.java:462)","org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1642)","org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:533)","org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:255)","org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:1595)","org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:255)","org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1253)","org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:203)","org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:473)","org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:1564)","org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:201)","org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1155)","org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:144)","org.eclipse.jetty.server.handler.HandlerCollection.handle(HandlerCollection.java:126)","org.eclipse.jetty.server.handler.StatisticsHandler.handle(StatisticsHandler.java:169)","org.eclipse.jetty.server.handler.ContextHandlerCollection.handle(ContextHandlerCollection.java:219)","org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:132)","org.eclipse.jetty.server.Server.handle(Server.java:531)","org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:352)","org.eclipse.jetty.server.HttpConnection.onFillable(HttpConnection.java:260)","org.eclipse.jetty.io.AbstractConnection$ReadCallback.succeeded(AbstractConnection.java:281)","org.eclipse.jetty.io.FillInterest.fillable(FillInterest.java:102)","org.eclipse.jetty.io.ChannelEndPoint$2.run(ChannelEndPoint.java:118)","org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.runTask(EatWhatYouKill.java:333)","org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:310)","org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:168)","org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.run(EatWhatYouKill.java:126)","org.eclipse.jetty.util.thread.ReservedThreadExecutor$ReservedThread.run(ReservedThreadExecutor.java:366)","org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:760)","org.eclipse.jetty.util.thread.QueuedThreadPool$2.run(QueuedThreadPool.java:678)","java.lang.Thread.run(Thread.java:748)"],"statementText":"DESCRIBE 17 | EXTENDED ksql_python_test_test_get_all_streams;","entities":[]}'} 18 | headers: 19 | Content-Type: [application/json] 20 | Date: ['Fri, 20 Jul 2018 20:10:31 GMT'] 21 | Server: [Jetty(9.4.10.v20180503)] 22 | status: {code: 400, message: Bad Request} 23 | - request: 24 | body: '{"ksql": "DROP\n STREAM IF EXISTS\n ksql_python_test_test_get_all_streams;"}' 25 | headers: 26 | Accept: [application/json] 27 | Accept-Encoding: ['gzip, deflate'] 28 | Connection: [keep-alive] 29 | Content-Length: ['82'] 30 | Content-Type: [application/json] 31 | User-Agent: [python-requests/2.19.1] 32 | method: POST 33 | uri: http://localhost:8088/ksql 34 | response: 35 | body: {string: '[{"@type":"currentStatus","statementText":"DROP\n STREAM IF 36 | EXISTS\n ksql_python_test_test_get_all_streams;","commandId":"stream/KSQL_PYTHON_TEST_TEST_GET_ALL_STREAMS/drop","commandStatus":{"status":"SUCCESS","message":"Source 37 | KSQL_PYTHON_TEST_TEST_GET_ALL_STREAMS does not exist."}}]'} 38 | headers: 39 | Content-Type: [application/json] 40 | Date: ['Fri, 20 Jul 2018 20:10:31 GMT'] 41 | Server: [Jetty(9.4.10.v20180503)] 42 | status: {code: 200, message: OK} 43 | - request: 44 | body: '{"ksql": "CREATE STREAM ksql_python_test_test_get_all_streams (viewtime 45 | bigint, userid varchar, pageid varchar) WITH (kafka_topic=''ksql_python_test_exist_topic'', 46 | value_format=''DELIMITED'');"}' 47 | headers: 48 | Accept: [application/json] 49 | Accept-Encoding: ['gzip, deflate'] 50 | Connection: [keep-alive] 51 | Content-Length: ['214'] 52 | Content-Type: [application/json] 53 | User-Agent: [python-requests/2.19.1] 54 | method: POST 55 | uri: http://localhost:8088/ksql 56 | response: 57 | body: {string: '[{"@type":"currentStatus","statementText":"CREATE STREAM ksql_python_test_test_get_all_streams 58 | (viewtime bigint, userid varchar, pageid varchar) WITH 59 | (kafka_topic=''ksql_python_test_exist_topic'', value_format=''DELIMITED'');","commandId":"stream/KSQL_PYTHON_TEST_TEST_GET_ALL_STREAMS/create","commandStatus":{"status":"SUCCESS","message":"Stream 60 | created"}}]'} 61 | headers: 62 | Content-Type: [application/json] 63 | Date: ['Fri, 20 Jul 2018 20:10:31 GMT'] 64 | Server: [Jetty(9.4.10.v20180503)] 65 | status: {code: 200, message: OK} 66 | - request: 67 | body: '{"ksql": "SHOW STREAMS;"}' 68 | headers: 69 | Accept: [application/json] 70 | Accept-Encoding: ['gzip, deflate'] 71 | Connection: [keep-alive] 72 | Content-Length: ['25'] 73 | Content-Type: [application/json] 74 | User-Agent: [python-requests/2.19.1] 75 | method: POST 76 | uri: http://localhost:8088/ksql 77 | response: 78 | body: {string: '[{"@type":"streams","statementText":"SHOW STREAMS;","streams":[{"type":"STREAM","name":"TEST_TABLE","topic":"exist_topic","format":"DELIMITED"},{"type":"STREAM","name":"CREATE_STREAM_AS_WITH_CONDITIONS_WITHOUT_STARTWITH","topic":"create_stream_as_with_conditions_without_startwith","format":"DELIMITED"},{"type":"STREAM","name":"PREBID_TRAFFIC_LOG_TOTAL_STREAM","topic":"exist_topic","format":"DELIMITED"},{"type":"STREAM","name":"KSQL_PYTHON_TEST_TEST_GET_ALL_STREAMS","topic":"ksql_python_test_exist_topic","format":"DELIMITED"},{"type":"STREAM","name":"PAGEVIEWS_ORIGINAL","topic":"exist_topic","format":"DELIMITED"},{"type":"STREAM","name":"CREATE_STREAM_AS_WITHOUT_CONDITIONS","topic":"create_stream_as_without_conditions","format":"DELIMITED"},{"type":"STREAM","name":"FOO_TABLE","topic":"exist_topic","format":"DELIMITED"},{"type":"STREAM","name":"CREATE_STREAM_AS_WITH_CONDITIONS_WITH_STARTWITH_WITH_AND","topic":"create_stream_as_with_conditions_with_startwith_with_and","format":"DELIMITED"},{"type":"STREAM","name":"CREATE_STREAM_AS_WITH_CONDITIONS_WITH_STARTWITH","topic":"create_stream_as_with_conditions_with_startwith","format":"DELIMITED"}]}]'} 79 | headers: 80 | Content-Type: [application/json] 81 | Date: ['Fri, 20 Jul 2018 20:10:32 GMT'] 82 | Server: [Jetty(9.4.10.v20180503)] 83 | status: {code: 200, message: OK} 84 | version: 1 85 | -------------------------------------------------------------------------------- /tests/vcr_cassettes/utils_test_get_dependent_queries.yml: -------------------------------------------------------------------------------- 1 | interactions: 2 | - request: 3 | body: '{"ksql": "DESCRIBE EXTENDED ksql_python_test_test_get_dependent_queries;"}' 4 | headers: 5 | Accept: [application/json] 6 | Accept-Encoding: ['gzip, deflate'] 7 | Connection: [keep-alive] 8 | Content-Length: ['74'] 9 | Content-Type: [application/json] 10 | User-Agent: [python-requests/2.19.1] 11 | method: POST 12 | uri: http://localhost:8088/ksql 13 | response: 14 | body: {string: '{"@type":"statement_error","error_code":40001,"message":"Could 15 | not find STREAM/TABLE ''KSQL_PYTHON_TEST_TEST_GET_DEPENDENT_QUERIES'' in the 16 | Metastore","stackTrace":["io.confluent.ksql.rest.server.resources.KsqlResource.describe(KsqlResource.java:457)","io.confluent.ksql.rest.server.resources.KsqlResource.validateStatement(KsqlResource.java:248)","io.confluent.ksql.rest.server.resources.KsqlResource.handleKsqlStatements(KsqlResource.java:190)","sun.reflect.GeneratedMethodAccessor6.invoke(Unknown 17 | Source)","sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)","java.lang.reflect.Method.invoke(Method.java:498)","org.glassfish.jersey.server.model.internal.ResourceMethodInvocationHandlerFactory.lambda$static$0(ResourceMethodInvocationHandlerFactory.java:76)","org.glassfish.jersey.server.model.internal.AbstractJavaResourceMethodDispatcher$1.run(AbstractJavaResourceMethodDispatcher.java:148)","org.glassfish.jersey.server.model.internal.AbstractJavaResourceMethodDispatcher.invoke(AbstractJavaResourceMethodDispatcher.java:191)","org.glassfish.jersey.server.model.internal.JavaResourceMethodDispatcherProvider$ResponseOutInvoker.doDispatch(JavaResourceMethodDispatcherProvider.java:200)","org.glassfish.jersey.server.model.internal.AbstractJavaResourceMethodDispatcher.dispatch(AbstractJavaResourceMethodDispatcher.java:103)","org.glassfish.jersey.server.model.ResourceMethodInvoker.invoke(ResourceMethodInvoker.java:493)","org.glassfish.jersey.server.model.ResourceMethodInvoker.apply(ResourceMethodInvoker.java:415)","org.glassfish.jersey.server.model.ResourceMethodInvoker.apply(ResourceMethodInvoker.java:104)","org.glassfish.jersey.server.ServerRuntime$1.run(ServerRuntime.java:277)","org.glassfish.jersey.internal.Errors$1.call(Errors.java:272)","org.glassfish.jersey.internal.Errors$1.call(Errors.java:268)","org.glassfish.jersey.internal.Errors.process(Errors.java:316)","org.glassfish.jersey.internal.Errors.process(Errors.java:298)","org.glassfish.jersey.internal.Errors.process(Errors.java:268)","org.glassfish.jersey.process.internal.RequestScope.runInScope(RequestScope.java:289)","org.glassfish.jersey.server.ServerRuntime.process(ServerRuntime.java:256)","org.glassfish.jersey.server.ApplicationHandler.handle(ApplicationHandler.java:703)","org.glassfish.jersey.servlet.WebComponent.serviceImpl(WebComponent.java:416)","org.glassfish.jersey.servlet.ServletContainer.serviceImpl(ServletContainer.java:409)","org.glassfish.jersey.servlet.ServletContainer.doFilter(ServletContainer.java:584)","org.glassfish.jersey.servlet.ServletContainer.doFilter(ServletContainer.java:525)","org.glassfish.jersey.servlet.ServletContainer.doFilter(ServletContainer.java:462)","org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1642)","org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:533)","org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:255)","org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:1595)","org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:255)","org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1253)","org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:203)","org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:473)","org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:1564)","org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:201)","org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1155)","org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:144)","org.eclipse.jetty.server.handler.HandlerCollection.handle(HandlerCollection.java:126)","org.eclipse.jetty.server.handler.StatisticsHandler.handle(StatisticsHandler.java:169)","org.eclipse.jetty.server.handler.ContextHandlerCollection.handle(ContextHandlerCollection.java:219)","org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:132)","org.eclipse.jetty.server.Server.handle(Server.java:531)","org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:352)","org.eclipse.jetty.server.HttpConnection.onFillable(HttpConnection.java:260)","org.eclipse.jetty.io.AbstractConnection$ReadCallback.succeeded(AbstractConnection.java:281)","org.eclipse.jetty.io.FillInterest.fillable(FillInterest.java:102)","org.eclipse.jetty.io.ChannelEndPoint$2.run(ChannelEndPoint.java:118)","org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.runTask(EatWhatYouKill.java:333)","org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:310)","org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:168)","org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.run(EatWhatYouKill.java:126)","org.eclipse.jetty.util.thread.ReservedThreadExecutor$ReservedThread.run(ReservedThreadExecutor.java:366)","org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:760)","org.eclipse.jetty.util.thread.QueuedThreadPool$2.run(QueuedThreadPool.java:678)","java.lang.Thread.run(Thread.java:748)"],"statementText":"DESCRIBE 18 | EXTENDED ksql_python_test_test_get_dependent_queries;","entities":[]}'} 19 | headers: 20 | Content-Type: [application/json] 21 | Date: ['Fri, 20 Jul 2018 20:10:33 GMT'] 22 | Server: [Jetty(9.4.10.v20180503)] 23 | status: {code: 400, message: Bad Request} 24 | - request: 25 | body: '{"ksql": "DROP\n STREAM IF EXISTS\n ksql_python_test_test_get_dependent_queries;"}' 26 | headers: 27 | Accept: [application/json] 28 | Accept-Encoding: ['gzip, deflate'] 29 | Connection: [keep-alive] 30 | Content-Length: ['88'] 31 | Content-Type: [application/json] 32 | User-Agent: [python-requests/2.19.1] 33 | method: POST 34 | uri: http://localhost:8088/ksql 35 | response: 36 | body: {string: '[{"@type":"currentStatus","statementText":"DROP\n STREAM IF 37 | EXISTS\n ksql_python_test_test_get_dependent_queries;","commandId":"stream/KSQL_PYTHON_TEST_TEST_GET_DEPENDENT_QUERIES/drop","commandStatus":{"status":"SUCCESS","message":"Source 38 | KSQL_PYTHON_TEST_TEST_GET_DEPENDENT_QUERIES does not exist."}}]'} 39 | headers: 40 | Content-Type: [application/json] 41 | Date: ['Fri, 20 Jul 2018 20:10:33 GMT'] 42 | Server: [Jetty(9.4.10.v20180503)] 43 | status: {code: 200, message: OK} 44 | - request: 45 | body: '{"ksql": "CREATE STREAM ksql_python_test_test_get_dependent_queries (viewtime 46 | bigint, userid varchar, pageid varchar) WITH (kafka_topic=''ksql_python_test_exist_topic'', 47 | value_format=''DELIMITED'');"}' 48 | headers: 49 | Accept: [application/json] 50 | Accept-Encoding: ['gzip, deflate'] 51 | Connection: [keep-alive] 52 | Content-Length: ['219'] 53 | Content-Type: [application/json] 54 | User-Agent: [python-requests/2.19.1] 55 | method: POST 56 | uri: http://localhost:8088/ksql 57 | response: 58 | body: {string: '[{"@type":"currentStatus","statementText":"CREATE STREAM ksql_python_test_test_get_dependent_queries 59 | (viewtime bigint, userid varchar, pageid varchar) WITH 60 | (kafka_topic=''ksql_python_test_exist_topic'', value_format=''DELIMITED'');","commandId":"stream/KSQL_PYTHON_TEST_TEST_GET_DEPENDENT_QUERIES/create","commandStatus":{"status":"SUCCESS","message":"Stream 61 | created"}}]'} 62 | headers: 63 | Content-Type: [application/json] 64 | Date: ['Fri, 20 Jul 2018 20:10:33 GMT'] 65 | Server: [Jetty(9.4.10.v20180503)] 66 | status: {code: 200, message: OK} 67 | - request: 68 | body: '{"ksql": "CREATE STREAM ksql_python_test_test_get_dependent_queries_as 69 | as select * from ksql_python_test_test_get_dependent_queries;"}' 70 | headers: 71 | Accept: [application/json] 72 | Accept-Encoding: ['gzip, deflate'] 73 | Connection: [keep-alive] 74 | Content-Length: ['134'] 75 | Content-Type: [application/json] 76 | User-Agent: [python-requests/2.19.1] 77 | method: POST 78 | uri: http://localhost:8088/ksql 79 | response: 80 | body: {string: '[{"@type":"currentStatus","statementText":"CREATE STREAM ksql_python_test_test_get_dependent_queries_as 81 | as select * from ksql_python_test_test_get_dependent_queries;","commandId":"stream/KSQL_PYTHON_TEST_TEST_GET_DEPENDENT_QUERIES_AS/create","commandStatus":{"status":"SUCCESS","message":"Stream 82 | created and running"}}]'} 83 | headers: 84 | Content-Type: [application/json] 85 | Date: ['Fri, 20 Jul 2018 20:10:33 GMT'] 86 | Server: [Jetty(9.4.10.v20180503)] 87 | status: {code: 200, message: OK} 88 | - request: 89 | body: '{"ksql": "DESCRIBE EXTENDED ksql_python_test_test_get_dependent_queries_as;"}' 90 | headers: 91 | Accept: [application/json] 92 | Accept-Encoding: ['gzip, deflate'] 93 | Connection: [keep-alive] 94 | Content-Length: ['77'] 95 | Content-Type: [application/json] 96 | User-Agent: [python-requests/2.19.1] 97 | method: POST 98 | uri: http://localhost:8088/ksql 99 | response: 100 | body: {string: '[{"@type":"sourceDescription","statementText":"DESCRIBE EXTENDED 101 | ksql_python_test_test_get_dependent_queries_as;","sourceDescription":{"name":"KSQL_PYTHON_TEST_TEST_GET_DEPENDENT_QUERIES_AS","readQueries":[],"writeQueries":[{"sinks":["KSQL_PYTHON_TEST_TEST_GET_DEPENDENT_QUERIES_AS"],"id":"CSAS_KSQL_PYTHON_TEST_TEST_GET_DEPENDENT_QUERIES_AS_5","queryString":"CREATE 102 | STREAM ksql_python_test_test_get_dependent_queries_as as select * from ksql_python_test_test_get_dependent_queries;"}],"fields":[{"name":"ROWTIME","schema":{"type":"BIGINT","fields":null,"memberSchema":null}},{"name":"ROWKEY","schema":{"type":"STRING","fields":null,"memberSchema":null}},{"name":"VIEWTIME","schema":{"type":"BIGINT","fields":null,"memberSchema":null}},{"name":"USERID","schema":{"type":"STRING","fields":null,"memberSchema":null}},{"name":"PAGEID","schema":{"type":"STRING","fields":null,"memberSchema":null}}],"type":"STREAM","key":"","timestamp":"","statistics":"","errorStats":"","extended":true,"format":"DELIMITED","topic":"KSQL_PYTHON_TEST_TEST_GET_DEPENDENT_QUERIES_AS","partitions":4,"replication":1}}]'} 103 | headers: 104 | Content-Type: [application/json] 105 | Date: ['Fri, 20 Jul 2018 20:10:35 GMT'] 106 | Server: [Jetty(9.4.10.v20180503)] 107 | status: {code: 200, message: OK} 108 | version: 1 109 | -------------------------------------------------------------------------------- /tests/vcr_cassettes/utils_test_get_stream_info.yml: -------------------------------------------------------------------------------- 1 | interactions: 2 | - request: 3 | body: '{"ksql": "DESCRIBE EXTENDED ksql_python_test_test_get_stream_info;"}' 4 | headers: 5 | Accept: [application/json] 6 | Accept-Encoding: ['gzip, deflate'] 7 | Connection: [keep-alive] 8 | Content-Length: ['68'] 9 | Content-Type: [application/json] 10 | User-Agent: [python-requests/2.19.1] 11 | method: POST 12 | uri: http://localhost:8088/ksql 13 | response: 14 | body: {string: '{"@type":"statement_error","error_code":40001,"message":"Could 15 | not find STREAM/TABLE ''KSQL_PYTHON_TEST_TEST_GET_STREAM_INFO'' in the Metastore","stackTrace":["io.confluent.ksql.rest.server.resources.KsqlResource.describe(KsqlResource.java:457)","io.confluent.ksql.rest.server.resources.KsqlResource.validateStatement(KsqlResource.java:248)","io.confluent.ksql.rest.server.resources.KsqlResource.handleKsqlStatements(KsqlResource.java:190)","sun.reflect.GeneratedMethodAccessor6.invoke(Unknown 16 | Source)","sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)","java.lang.reflect.Method.invoke(Method.java:498)","org.glassfish.jersey.server.model.internal.ResourceMethodInvocationHandlerFactory.lambda$static$0(ResourceMethodInvocationHandlerFactory.java:76)","org.glassfish.jersey.server.model.internal.AbstractJavaResourceMethodDispatcher$1.run(AbstractJavaResourceMethodDispatcher.java:148)","org.glassfish.jersey.server.model.internal.AbstractJavaResourceMethodDispatcher.invoke(AbstractJavaResourceMethodDispatcher.java:191)","org.glassfish.jersey.server.model.internal.JavaResourceMethodDispatcherProvider$ResponseOutInvoker.doDispatch(JavaResourceMethodDispatcherProvider.java:200)","org.glassfish.jersey.server.model.internal.AbstractJavaResourceMethodDispatcher.dispatch(AbstractJavaResourceMethodDispatcher.java:103)","org.glassfish.jersey.server.model.ResourceMethodInvoker.invoke(ResourceMethodInvoker.java:493)","org.glassfish.jersey.server.model.ResourceMethodInvoker.apply(ResourceMethodInvoker.java:415)","org.glassfish.jersey.server.model.ResourceMethodInvoker.apply(ResourceMethodInvoker.java:104)","org.glassfish.jersey.server.ServerRuntime$1.run(ServerRuntime.java:277)","org.glassfish.jersey.internal.Errors$1.call(Errors.java:272)","org.glassfish.jersey.internal.Errors$1.call(Errors.java:268)","org.glassfish.jersey.internal.Errors.process(Errors.java:316)","org.glassfish.jersey.internal.Errors.process(Errors.java:298)","org.glassfish.jersey.internal.Errors.process(Errors.java:268)","org.glassfish.jersey.process.internal.RequestScope.runInScope(RequestScope.java:289)","org.glassfish.jersey.server.ServerRuntime.process(ServerRuntime.java:256)","org.glassfish.jersey.server.ApplicationHandler.handle(ApplicationHandler.java:703)","org.glassfish.jersey.servlet.WebComponent.serviceImpl(WebComponent.java:416)","org.glassfish.jersey.servlet.ServletContainer.serviceImpl(ServletContainer.java:409)","org.glassfish.jersey.servlet.ServletContainer.doFilter(ServletContainer.java:584)","org.glassfish.jersey.servlet.ServletContainer.doFilter(ServletContainer.java:525)","org.glassfish.jersey.servlet.ServletContainer.doFilter(ServletContainer.java:462)","org.eclipse.jetty.servlet.ServletHandler$CachedChain.doFilter(ServletHandler.java:1642)","org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:533)","org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:255)","org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:1595)","org.eclipse.jetty.server.handler.ScopedHandler.nextHandle(ScopedHandler.java:255)","org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1253)","org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:203)","org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:473)","org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:1564)","org.eclipse.jetty.server.handler.ScopedHandler.nextScope(ScopedHandler.java:201)","org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1155)","org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:144)","org.eclipse.jetty.server.handler.HandlerCollection.handle(HandlerCollection.java:126)","org.eclipse.jetty.server.handler.StatisticsHandler.handle(StatisticsHandler.java:169)","org.eclipse.jetty.server.handler.ContextHandlerCollection.handle(ContextHandlerCollection.java:219)","org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:132)","org.eclipse.jetty.server.Server.handle(Server.java:531)","org.eclipse.jetty.server.HttpChannel.handle(HttpChannel.java:352)","org.eclipse.jetty.server.HttpConnection.onFillable(HttpConnection.java:260)","org.eclipse.jetty.io.AbstractConnection$ReadCallback.succeeded(AbstractConnection.java:281)","org.eclipse.jetty.io.FillInterest.fillable(FillInterest.java:102)","org.eclipse.jetty.io.ChannelEndPoint$2.run(ChannelEndPoint.java:118)","org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.runTask(EatWhatYouKill.java:333)","org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.doProduce(EatWhatYouKill.java:310)","org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.tryProduce(EatWhatYouKill.java:168)","org.eclipse.jetty.util.thread.strategy.EatWhatYouKill.run(EatWhatYouKill.java:126)","org.eclipse.jetty.util.thread.ReservedThreadExecutor$ReservedThread.run(ReservedThreadExecutor.java:366)","org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:760)","org.eclipse.jetty.util.thread.QueuedThreadPool$2.run(QueuedThreadPool.java:678)","java.lang.Thread.run(Thread.java:748)"],"statementText":"DESCRIBE 17 | EXTENDED ksql_python_test_test_get_stream_info;","entities":[]}'} 18 | headers: 19 | Content-Type: [application/json] 20 | Date: ['Fri, 20 Jul 2018 20:10:44 GMT'] 21 | Server: [Jetty(9.4.10.v20180503)] 22 | status: {code: 400, message: Bad Request} 23 | - request: 24 | body: '{"ksql": "DROP\n STREAM IF EXISTS\n ksql_python_test_test_get_stream_info;"}' 25 | headers: 26 | Accept: [application/json] 27 | Accept-Encoding: ['gzip, deflate'] 28 | Connection: [keep-alive] 29 | Content-Length: ['82'] 30 | Content-Type: [application/json] 31 | User-Agent: [python-requests/2.19.1] 32 | method: POST 33 | uri: http://localhost:8088/ksql 34 | response: 35 | body: {string: '[{"@type":"currentStatus","statementText":"DROP\n STREAM IF 36 | EXISTS\n ksql_python_test_test_get_stream_info;","commandId":"stream/KSQL_PYTHON_TEST_TEST_GET_STREAM_INFO/drop","commandStatus":{"status":"SUCCESS","message":"Source 37 | KSQL_PYTHON_TEST_TEST_GET_STREAM_INFO does not exist."}}]'} 38 | headers: 39 | Content-Type: [application/json] 40 | Date: ['Fri, 20 Jul 2018 20:10:45 GMT'] 41 | Server: [Jetty(9.4.10.v20180503)] 42 | status: {code: 200, message: OK} 43 | - request: 44 | body: '{"ksql": "CREATE STREAM ksql_python_test_test_get_stream_info (viewtime 45 | bigint, userid varchar, pageid varchar) WITH (kafka_topic=''ksql_python_test_exist_topic'', 46 | value_format=''DELIMITED'');"}' 47 | headers: 48 | Accept: [application/json] 49 | Accept-Encoding: ['gzip, deflate'] 50 | Connection: [keep-alive] 51 | Content-Length: ['214'] 52 | Content-Type: [application/json] 53 | User-Agent: [python-requests/2.19.1] 54 | method: POST 55 | uri: http://localhost:8088/ksql 56 | response: 57 | body: {string: '[{"@type":"currentStatus","statementText":"CREATE STREAM ksql_python_test_test_get_stream_info 58 | (viewtime bigint, userid varchar, pageid varchar) WITH 59 | (kafka_topic=''ksql_python_test_exist_topic'', value_format=''DELIMITED'');","commandId":"stream/KSQL_PYTHON_TEST_TEST_GET_STREAM_INFO/create","commandStatus":{"status":"SUCCESS","message":"Stream 60 | created"}}]'} 61 | headers: 62 | Content-Type: [application/json] 63 | Date: ['Fri, 20 Jul 2018 20:10:45 GMT'] 64 | Server: [Jetty(9.4.10.v20180503)] 65 | status: {code: 200, message: OK} 66 | - request: 67 | body: '{"ksql": "DESCRIBE EXTENDED ksql_python_test_test_get_stream_info;"}' 68 | headers: 69 | Accept: [application/json] 70 | Accept-Encoding: ['gzip, deflate'] 71 | Connection: [keep-alive] 72 | Content-Length: ['68'] 73 | Content-Type: [application/json] 74 | User-Agent: [python-requests/2.19.1] 75 | method: POST 76 | uri: http://localhost:8088/ksql 77 | response: 78 | body: {string: '[{"@type":"sourceDescription","statementText":"DESCRIBE EXTENDED 79 | ksql_python_test_test_get_stream_info;","sourceDescription":{"name":"KSQL_PYTHON_TEST_TEST_GET_STREAM_INFO","readQueries":[],"writeQueries":[],"fields":[{"name":"ROWTIME","schema":{"type":"BIGINT","fields":null,"memberSchema":null}},{"name":"ROWKEY","schema":{"type":"STRING","fields":null,"memberSchema":null}},{"name":"VIEWTIME","schema":{"type":"BIGINT","fields":null,"memberSchema":null}},{"name":"USERID","schema":{"type":"STRING","fields":null,"memberSchema":null}},{"name":"PAGEID","schema":{"type":"STRING","fields":null,"memberSchema":null}}],"type":"STREAM","key":"","timestamp":"","statistics":"","errorStats":"","extended":true,"format":"DELIMITED","topic":"ksql_python_test_exist_topic","partitions":1,"replication":1}}]'} 80 | headers: 81 | Content-Type: [application/json] 82 | Date: ['Fri, 20 Jul 2018 20:10:45 GMT'] 83 | Server: [Jetty(9.4.10.v20180503)] 84 | status: {code: 200, message: OK} 85 | version: 1 86 | -------------------------------------------------------------------------------- /validate.sh: -------------------------------------------------------------------------------- 1 | #!/bin/sh 2 | set -ex 3 | 4 | black --line-length 120 --target-version py35 ksql tests 5 | mypy ksql 6 | flake8 setup.py ksql tests 7 | --------------------------------------------------------------------------------