├── .github
└── workflows
│ ├── build.yml
│ ├── maven.yml
│ └── trivy.yml
├── .gitignore
├── LICENSE
├── NOTICE
├── pom.xml
├── readme.md
└── src
├── main
└── java
│ └── com
│ └── github
│ └── cedelsb
│ └── kafka
│ └── connect
│ └── smt
│ ├── Record2JsonStringConverter.java
│ └── converter
│ ├── AvroJsonSchemafulRecordConverter.java
│ ├── FieldConverter.java
│ ├── JsonRawStringRecordConverter.java
│ ├── JsonSchemalessRecordConverter.java
│ ├── RecordConverter.java
│ ├── SinkConverter.java
│ ├── SinkDocument.java
│ ├── SinkFieldConverter.java
│ └── types
│ ├── json
│ ├── JsonBinaryConverter.java
│ ├── JsonDateTimeAsLongConverter.java
│ ├── JsonDateTimeAsStringConverter.java
│ └── JsonDecimalConverter.java
│ └── sink
│ └── bson
│ ├── BooleanFieldConverter.java
│ ├── BytesFieldConverter.java
│ ├── Float32FieldConverter.java
│ ├── Float64FieldConverter.java
│ ├── Int16FieldConverter.java
│ ├── Int32FieldConverter.java
│ ├── Int64FieldConverter.java
│ ├── Int8FieldConverter.java
│ ├── StringFieldConverter.java
│ └── logical
│ ├── DateFieldConverter.java
│ ├── DecimalFieldConverter.java
│ ├── TimeFieldConverter.java
│ └── TimestampFieldConverter.java
└── test
├── java
└── com
│ └── github
│ └── cedelsb
│ ├── integration
│ └── Record2JsonStringIT.java
│ └── kafka
│ └── connect
│ └── smt
│ ├── Record2JsonStringConverterTest.java
│ └── converter
│ ├── RecordConverterTest.java
│ ├── SinkConverterTest.java
│ ├── SinkDocumentTest.java
│ └── SinkFieldConverterTest.java
└── resources
└── docker
├── compose.yaml
├── init.sql
└── jdbcsinkconnector.json
/.github/workflows/build.yml:
--------------------------------------------------------------------------------
1 | name: Build
2 |
3 | on:
4 | # Trigger the workflow on push or pull request,
5 | # but only for the main branch
6 | push:
7 | branches:
8 | - main
9 | pull_request:
10 | branches:
11 | - main
12 | jobs:
13 | build:
14 | name: Build
15 | runs-on: ubuntu-latest
16 | steps:
17 | - name: Add hosts to /etc/hosts
18 | run: |
19 | sudo echo "127.0.0.1 kafkabroker" | sudo tee -a /etc/hosts
20 | sudo echo "127.0.0.1 schemaregistry" | sudo tee -a /etc/hosts
21 | - uses: actions/checkout@v2
22 | with:
23 | fetch-depth: 0 # Shallow clones should be disabled for a better relevancy of analysis
24 | - name: Set up JDK 11
25 | uses: actions/setup-java@v1
26 | with:
27 | java-version: 11
28 | - name: Cache SonarCloud packages
29 | uses: actions/cache@v1
30 | with:
31 | path: ~/.sonar/cache
32 | key: ${{ runner.os }}-sonar
33 | restore-keys: ${{ runner.os }}-sonar
34 | - name: Cache Maven packages
35 | uses: actions/cache@v1
36 | with:
37 | path: ~/.m2
38 | key: ${{ runner.os }}-m2-${{ hashFiles('**/pom.xml') }}
39 | restore-keys: ${{ runner.os }}-m2
40 | - name: Build and analyze
41 | env:
42 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # Needed to get PR information, if any
43 | SONAR_TOKEN: ${{ secrets.SONAR_TOKEN }}
44 | run: mvn -B verify org.sonarsource.scanner.maven:sonar-maven-plugin:sonar
45 |
--------------------------------------------------------------------------------
/.github/workflows/maven.yml:
--------------------------------------------------------------------------------
1 | # This workflow will build a Java project with Maven
2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/building-and-testing-java-with-maven
3 |
4 | name: Java CI with Maven
5 |
6 | on:
7 | workflow_dispatch:
8 | inputs:
9 | tags:
10 | description: 'Release Tag'
11 |
12 | jobs:
13 | build:
14 |
15 | runs-on: ubuntu-latest
16 |
17 | steps:
18 | - name: Add hosts to /etc/hosts
19 | run: |
20 | sudo echo "127.0.0.1 kafkabroker" | sudo tee -a /etc/hosts
21 | sudo echo "127.0.0.1 schemaregistry" | sudo tee -a /etc/hosts
22 | - uses: actions/checkout@v2
23 | - name: Set up JDK 1.8
24 | uses: actions/setup-java@v1
25 | with:
26 | java-version: 1.8
27 | - name: Build with Maven
28 | run: mvn -B package -DskipTests --file pom.xml
29 | - name: Set Release version env variable
30 | run: |
31 | echo "RELEASE_VERSION=${{ github.event.inputs.tags }}" >> $GITHUB_ENV
32 | - name: Upload binaries to release
33 | uses: svenstaro/upload-release-action@v2
34 | with:
35 | repo_token: ${{ secrets.GITHUB_TOKEN }}
36 | file: /home/runner/work/kafka-connect-transform-tojsonstring/kafka-connect-transform-tojsonstring/target/components/packages/an0r0c-kafka-connect-transform-record2jsonstring-${{ github.event.inputs.tags }}.zip
37 | asset_name: an0r0c-kafka-connect-transform-record2jsonstring-${{ github.event.inputs.tags }}.zip
38 | tag: ${{ github.event.inputs.tags }}
39 | overwrite: true
40 | body: "This is my release text"
41 |
--------------------------------------------------------------------------------
/.github/workflows/trivy.yml:
--------------------------------------------------------------------------------
1 |
2 | name: trivy
3 |
4 | on:
5 | push:
6 | branches: [ "main" ]
7 | pull_request:
8 | branches: [ "main" ]
9 | schedule:
10 | - cron: '24 23 * * 6'
11 | workflow_dispatch:
12 |
13 |
14 | permissions:
15 | contents: read
16 |
17 | jobs:
18 | build:
19 | permissions:
20 | contents: read # for actions/checkout to fetch code
21 | security-events: write # for github/codeql-action/upload-sarif to upload SARIF results
22 | actions: read # only required for a private repository by github/codeql-action/upload-sarif to get the Action run status
23 | name: Build
24 | runs-on: "ubuntu-latest"
25 | steps:
26 | - name: Checkout code
27 | uses: actions/checkout@v3
28 |
29 | - name: Run Trivy vulnerability scanner in repo mode
30 | uses: aquasecurity/trivy-action@master
31 | with:
32 | scan-type: 'fs'
33 | ignore-unfixed: true
34 | format: 'sarif'
35 | output: 'trivy-results.sarif'
36 | severity: 'CRITICAL'
37 |
38 | - name: Upload Trivy scan results to GitHub Security tab
39 | uses: github/codeql-action/upload-sarif@v2
40 | with:
41 | sarif_file: 'trivy-results.sarif'
42 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 |
2 | # Created by https://www.gitignore.io/api/java,intellij+all
3 |
4 | ### Intellij+all ###
5 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio and Webstorm
6 | # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
7 |
8 |
9 | # User-specific stuff:
10 | .idea/**/workspace.xml
11 | .idea/**/tasks.xml
12 | .idea/dictionaries
13 |
14 | # Sensitive or high-churn files:
15 | .idea/**/dataSources/
16 | .idea/**/dataSources.ids
17 | .idea/**/dataSources.xml
18 | .idea/**/dataSources.local.xml
19 | .idea/**/sqlDataSources.xml
20 | .idea/**/dynamic.xml
21 | .idea/**/uiDesigner.xml
22 |
23 | # Gradle:
24 | .idea/**/gradle.xml
25 | .idea/**/libraries
26 |
27 | # CMake
28 | cmake-build-debug/
29 |
30 | # Mongo Explorer plugin:
31 | .idea/**/mongoSettings.xml
32 |
33 | ## File-based project format:
34 | *.iws
35 |
36 | ## Plugin-specific files:
37 |
38 | # IntelliJ
39 | /out/
40 |
41 | # mpeltonen/sbt-idea plugin
42 | .idea_modules/
43 |
44 | # JIRA plugin
45 | atlassian-ide-plugin.xml
46 |
47 | # Cursive Clojure plugin
48 | .idea/replstate.xml
49 |
50 | # Ruby plugin and RubyMine
51 | /.rakeTasks
52 |
53 | # Crashlytics plugin (for Android Studio and IntelliJ)
54 | com_crashlytics_export_strings.xml
55 | crashlytics.properties
56 | crashlytics-build.properties
57 | fabric.properties
58 |
59 | ### Intellij+all Patch ###
60 | # Ignores the whole idea folder
61 | # See https://github.com/joeblau/gitignore.io/issues/186 and https://github.com/joeblau/gitignore.io/issues/360
62 |
63 | .idea/
64 |
65 | ### Java ###
66 | # Compiled class file
67 | *.class
68 |
69 | # Log file
70 | *.log
71 |
72 | # BlueJ files
73 | *.ctxt
74 |
75 | # Mobile Tools for Java (J2ME)
76 | .mtj.tmp/
77 |
78 | # Package Files #
79 | *.jar
80 | *.war
81 | *.ear
82 | *.zip
83 | *.tar.gz
84 | *.rar
85 |
86 | # virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml
87 | hs_err_pid*
88 |
89 |
90 | ### Maven ###
91 | target/
92 | pom.xml.tag
93 | pom.xml.releaseBackup
94 | pom.xml.versionsBackup
95 | pom.xml.next
96 | release.properties
97 | dependency-reduced-pom.xml
98 | buildNumber.properties
99 | .mvn/timing.properties
100 |
101 | # Avoid ignoring Maven wrapper jar file (.jar files are usually ignored)
102 | !/.mvn/wrapper/maven-wrapper.jar
103 |
104 |
105 |
106 | # End of https://www.gitignore.io/api/java,intellij+all
107 |
108 | # IntelliJ
109 | *.iml
110 |
111 | ### Gradle template
112 | .gradle
113 | /build/
114 |
115 | # Ignore Gradle GUI config
116 | gradle-app.setting
117 |
118 | # Avoid ignoring Gradle wrapper jar file (.jar files are usually ignored)
119 | !gradle-wrapper.jar
120 |
121 | # Cache of project
122 | .gradletasknamecache
123 |
124 | # # Work around https://youtrack.jetbrains.com/issue/IDEA-116898
125 | # gradle/wrapper/gradle-wrapper.properties
126 |
127 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright [yyyy] [name of copyright owner]
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/NOTICE:
--------------------------------------------------------------------------------
1 |
2 | The Initial Developer of the code within src/main/java/com/github/cedelsb/kafka/connect/smt/converter and src/test/java/com/github/cedelsb/kafka/connect/smt/converter and related util classes, which are copied from https://github.com/hpgrahsl/kafka-connect-mongodb is hpgrahsl (https://github.com/hpgrahsl)
3 | It is licensed in Apache2 and was copied from https://github.com/hpgrahsl/kafka-connect-mongodb. As this repo is not maintained anymore and had some security vulnerabilities in other parts of the code the decision was made to copy over the parts that are needed for this repo. All Rights Reserved.
4 |
5 |
6 |
7 |
--------------------------------------------------------------------------------
/pom.xml:
--------------------------------------------------------------------------------
1 |
2 |
17 |
18 |
21 | 4.0.0
22 |
23 | kafka-connect-transform-record2jsonstring
24 | kafka-connect-transform-record2jsonstring
25 | 1.4
26 | Kafka Connect single message transform (SMT) taking a record (with schema) and transforms it into a single JSON String
27 |
28 | https://github.com/an0r0c/kafka-connect-transform-tojsonstring
29 |
30 |
31 | Apache License 2.0
32 | https://github.com/an0r0c/kafka-connect-transform-tojsonstring/blob/main/LICENSE
33 | repo
34 |
35 |
36 |
37 |
38 | an0r0c
39 | Christian Edelsbrunner
40 | https://github.com/an0r0c
41 |
42 | Maintainer
43 |
44 |
45 |
46 |
47 | github
48 | https://github.com/an0r0c/kafka-connect-transform-tojsonstring/issues
49 |
50 |
51 | UTF-8
52 | 1.8
53 | 3.3.1
54 | 7.2.1
55 | record2json
56 | an0r0c_kafka-connect-transform-tojsonstring
57 | an0r0c
58 | https://sonarcloud.io
59 | 5.5.2
60 | 5.5.2
61 | 1.5.2
62 | 3.2.4
63 | 2.0.0.0
64 | 1.11.1
65 |
66 |
67 |
68 |
69 |
70 |
71 | org.testcontainers
72 | testcontainers
73 | 1.17.5
74 |
75 |
76 |
77 |
78 |
79 |
80 | org.apache.kafka
81 | connect-api
82 | ${kafkaconnect.version}
83 | provided
84 |
85 |
86 | org.apache.kafka
87 | connect-transforms
88 | ${kafkaconnect.version}
89 |
90 |
91 | org.junit.jupiter
92 | junit-jupiter-engine
93 | ${junit.jupiter.version}
94 | test
95 |
96 |
97 | org.junit.jupiter
98 | junit-jupiter-params
99 | ${junit.jupiter.version}
100 | test
101 |
102 |
103 | org.junit.vintage
104 | junit-vintage-engine
105 | ${junit.vintage.version}
106 | test
107 |
108 |
109 | org.junit.platform
110 | junit-platform-runner
111 | ${junit.platform.version}
112 | test
113 |
114 |
115 | org.junit.platform
116 | junit-platform-console
117 | ${junit.platform.version}
118 | test
119 |
120 |
121 | org.hamcrest
122 | hamcrest-junit
123 | ${hamcrest.version}
124 | test
125 |
126 |
127 | org.mockito
128 | mockito-core
129 | ${mockito.version}
130 | test
131 |
132 |
133 | com.github.javadev
134 | underscore
135 | 1.81
136 |
137 |
138 | org.mongodb
139 | bson
140 | 4.8.0
141 |
142 |
143 | ch.qos.logback
144 | logback-classic
145 | 1.4.14
146 |
147 |
148 | org.testcontainers
149 | testcontainers
150 | 1.19.3
151 | test
152 |
153 |
154 | org.testcontainers
155 | kafka
156 | 1.19.3
157 | test
158 |
159 |
160 | com.esotericsoftware.yamlbeans
161 | yamlbeans
162 | 1.15
163 | test
164 |
165 |
166 | com.squareup.okhttp3
167 | okhttp
168 | 4.10.0
169 | test
170 |
171 |
172 | org.apache.avro
173 | avro
174 | ${avro.version}
175 | test
176 |
177 |
178 | io.confluent
179 | kafka-avro-serializer
180 | 7.2.2
181 | test
182 |
183 |
184 | com.fasterxml.jackson.core
185 | jackson-annotations
186 | 2.13.4
187 | test
188 |
189 |
190 |
191 | org.postgresql
192 | postgresql
193 | 42.5.0
194 | test
195 |
196 |
197 | org.slf4j
198 | slf4j-simple
199 | 2.0.9
200 |
201 |
202 |
203 |
204 |
205 |
206 |
207 | org.apache.maven.plugins
208 | maven-surefire-plugin
209 |
210 |
211 |
212 |
213 |
214 | org.jacoco
215 | jacoco-maven-plugin
216 | 0.8.6
217 |
218 |
219 |
220 | prepare-agent
221 |
222 |
223 |
224 | report
225 | prepare-package
226 |
227 | report
228 |
229 |
230 |
231 | jacoco-check
232 |
233 | check
234 |
235 |
236 |
237 |
238 |
239 |
240 | 0.8
241 |
242 |
243 |
244 |
245 |
246 |
247 |
248 |
249 |
250 | maven-release-plugin
251 | 2.5.3
252 |
253 |
254 | org.apache.maven.plugins
255 | maven-jar-plugin
256 | 3.2.0
257 |
258 |
259 |
260 | true
261 | true
262 |
263 |
264 |
265 |
266 |
267 | org.apache.maven.plugins
268 | maven-compiler-plugin
269 | 3.8.1
270 | true
271 |
272 | 1.8
273 | 1.8
274 |
275 |
276 |
277 | org.apache.maven.plugins
278 | maven-assembly-plugin
279 | 3.2.0
280 |
281 |
282 | jar-with-dependencies
283 |
284 | ${basedir}/target/kafka-tojsonstring-smt/
285 |
286 |
287 |
288 | make-assembly
289 | package
290 |
291 | single
292 |
293 |
294 |
295 |
296 |
297 | org.apache.maven.plugins
298 | maven-failsafe-plugin
299 | 2.22.2
300 |
301 | false
302 |
303 | **/*IT.java
304 |
305 |
306 |
307 |
308 | integration-test
309 |
310 | integration-test
311 | verify
312 |
313 |
314 |
315 |
316 |
317 | io.confluent
318 | kafka-connect-maven-plugin
319 | 0.12.0
320 |
321 |
322 |
323 | kafka-connect
324 |
325 |
326 | Record to JSON String Transformation
327 | false
328 | https://github.com/an0r0c/kafka-connect-transform-tojsonstring
329 | https://github.com/an0r0c/kafka-connect-transform-tojsonstring
330 |
331 | transform
332 |
333 | an0r0c
334 | Christian Edelsbrunner
335 | user
336 | https://github.com/an0r0c
337 |
338 | Transform
339 |
340 | https://github.com/an0r0c/kafka-connect-transform-tojsonstring/issues
341 |
342 | Support provided through open source community, bugs can be filed as github issues.
343 |
344 |
345 |
346 |
347 |
348 |
349 |
350 |
351 |
352 |
353 | confluent
354 | https://packages.confluent.io/maven/
355 |
356 |
357 |
358 |
--------------------------------------------------------------------------------
/readme.md:
--------------------------------------------------------------------------------
1 | [](https://sonarcloud.io/dashboard?id=an0r0c_kafka-connect-transform-tojsonstring)
2 | [](https://sonarcloud.io/dashboard?id=an0r0c_kafka-connect-transform-tojsonstring)
3 | [](https://sonarcloud.io/dashboard?id=an0r0c_kafka-connect-transform-tojsonstring)
4 | [](https://sonarcloud.io/dashboard?id=an0r0c_kafka-connect-transform-tojsonstring)
5 | [](https://sonarcloud.io/dashboard?id=an0r0c_kafka-connect-transform-tojsonstring)
6 | [](https://sonarcloud.io/dashboard?id=an0r0c_kafka-connect-transform-tojsonstring)
7 | [](https://sonarcloud.io/dashboard?id=an0r0c_kafka-connect-transform-tojsonstring)
8 |
9 | # kafka-connect-transform-tojsonstring - A simple Record to JSON String SMT
10 | This is a very simple Kafka Connect SMT which takes the entire key or value record and transforms it to a new record which contains exactly one field with a JSON representation of the origin record.
11 |
12 | Blog Post describing how we ended up developing this SMT can be found [here](https://medium.com/bearingpoint-technology-advisory/handle-arrays-and-nested-arrays-in-kafka-jdbc-sink-connector-41929ea46301?source=friends_link&sk=b7028711b4945c820f647df950cdd949)
13 |
14 | ## Use Cases
15 | The reason why this SMT was built is the known limitation of the JDBC Sink Connector tohandle nested arrays. If you have schema which contains arrays you cannot really use the JDBC Sink Connector because this connector only supports primitive Data Types.
16 | But sometimes you just need also some arrays from the schema in the RDBMS. If your RDBMS is able to handle JSON Strings this SMT might be the saviour. You can use it to transform the whole record into a single JSON String which can be mapped by the JDBC Sink connector.
17 | Afterwards you can use the tools offered by the RDBMS to parse and process the JSON String.
18 |
19 | But for sure there are also other use cases out there where this SMT might be helpful.
20 |
21 | ## Restrictions
22 | This SMT was built to transform Records **with a Schema** to a new Record with a Schema but with only one field.
23 | So this SMT does not work for Schemaless records.
24 |
25 | It also was only tested with Avro Schemas backed by Confluent Schema Registry (but most likely will work for other schema variants too because the deserializer already converted the record to a Connect Record before so it shouldn't be schema specific)
26 |
27 | ## Configuration
28 | ```json5
29 | {
30 | ////
31 | "transforms": "tojson",
32 | "transforms.tojson.type": "com.github.cedelsb.kafka.connect.smt.Record2JsonStringConverter$Value",
33 | "transforms.tojson.json.string.field.name" : "myawesomejsonstring", // Optional
34 | "transforms.tojson.post.processing.to.xml" : false, // Optional
35 | "transforms.tojson.json.writer.handle.logical.types" : true, // Optional
36 | "transforms.tojson.json.writer.datetime.logical.types.as" : "STRING", // Optional
37 | "transforms.tojson.json.writer.datetime.pattern" : "", // Optional
38 | "transforms.tojson.json.writer.datetime.zoneid" : "UTC" // Optional
39 | ////
40 | }
41 | ```
42 |
43 | ### Parameters
44 |
45 |
46 | Name Description Type Default Valid Values Importance
47 |
48 | json.string.field.name
49 | Output schema field name of field that contains the JSON String string non-empty string high
50 |
51 |
52 | json.writer.output.mode
53 | Output mode of the BSON Json Writer string RELAXED RELAXED, EXTENDED, STRICT or SHELL high
54 |
55 |
56 | post.processing.to.xml
57 | Post Process JSON to XML. Some old RBDMS like Oracle 11 are not the best in handling JSON - for such scenarios this option can be used to transform the generated JSON into a schemaless XML String
58 | boolean false true/false high
59 |
60 |
61 | json.writer.handle.logical.types
62 | In BSON serialization, logical types (dates, times, timestamps, decimal, bytes) are embedded inside a $ field. Setting this configuration to true will remove the embeddings and add the value to the parent field.
63 | boolean false true/false high
64 |
65 |
66 | json.writer.datetime.logical.types.as
67 | Write the logical type field (of time, date or timestamp) either as a STRING or a LONG (epoc) value, only applicable if json.writer.handle.logical.types=true
68 | string LONG LONG/STRING high
69 |
70 |
71 | json.writer.datetime.pattern
72 | The pattern (either a predefined constant or pattern letters) to use to format the date/time or timestamp as string, only applicable if json.writer.datetime.logical.types.as=STRING
73 | string ISO_INSTANTISO_DATE,ISO_DATE_TIME,ISO_INSTANT,ISO_TIME,ISO_LOCAL_DATE,ISO_LOCAL_DATE_TIME,ISO_LOCAL_TIME,RFC_1123_DATE_TIME,ISO_ZONED_DATE_TIME,ISO_OFFSET_DATE,ISO_OFFSET_DATE_TIME,ISO_OFFSET_TIME,BASIC_ISO_DATE,ISO_ORDINAL_DATE,ISO_WEEK_DATE,"pattern" high
74 |
75 |
76 | json.writer.datetime.zoneid
77 | The ZoneId to use to format the date/time or timestamp as string, only applicable if json.writer.datetime.logical.types.as=STRING
78 | string UTC a valid ZoneId string, such as Europe/Zurich, CET or UTC high
79 |
80 |
81 |
82 | ## Example
83 |
84 | ##### Input
85 |
86 | * Schema (avro syntax)
87 |
88 | ```json5
89 | {
90 | "type": "record",
91 | "name": "MyEntity",
92 | "fields": [{
93 | "name": "id",
94 | "type": "string"
95 | },
96 | {
97 | "name": "name",
98 | "type": "string"
99 | },
100 | {
101 | "name": "subElements",
102 | "type": {
103 | "type": "array",
104 | "items": {
105 | "type": "record",
106 | "name": "element",
107 | "fields": [{
108 | "name": "id",
109 | "type": "string",
110 |
111 | }]
112 | }
113 | }
114 | }]
115 | }
116 | ```
117 |
118 | * Value
119 |
120 | ```
121 | -id:myobject
122 | -name:awesomename
123 | -subElements:
124 | -id:element1
125 | -id:element2
126 | ```
127 | ##### Output
128 | * Schema
129 | ```json5
130 | {
131 | "type": "record",
132 | "name": "jsonStringSchema",
133 | "fields": [{
134 | "name": "jsonstring",
135 | "type": "string"
136 | }]
137 | }
138 | ```
139 | * Value (of the schema field "jsonstring")
140 | ```json5
141 | {
142 | "id": "record",
143 | "name": "jsonStringSchema",
144 | "subElements": [{"id": "element1"},
145 | {"id": "element2"}]
146 | }]
147 | }
148 | ```
149 | ## Build, installation / deployment
150 | You can build this project from sources via Maven.
151 |
152 | Or download a pre-build release from [Releases](https://github.com/an0r0c/kafka-connect-transform-tojsonstring/releases)
153 |
154 | ## Thanks and Acknowledgement
155 | Basic structure of how to build a basic SMT was taken from [kafka-connect-insert-uuid](https://github.com/cjmatta/kafka-connect-insert-uuid)
156 |
157 | Logic for transforming a Connect Record into a Json Document is build up on the awesome converter implemented in [kafka-connect-mongodb](https://github.com/hpgrahsl/kafka-connect-mongodb) which safed me a lot of time and nerves :)
158 |
159 | ## License Information
160 |
161 | This project is licensed according to [Apache License Version 2.0](https://www.apache.org/licenses/LICENSE-2.0)
162 |
163 | ```
164 | Copyright (c) 2021. Christian Edelsbrunner (christian.edelsbrunner@gmail.com)
165 |
166 | Licensed under the Apache License, Version 2.0 (the "License");
167 | you may not use this file except in compliance with the License.
168 | You may obtain a copy of the License at
169 |
170 | http://www.apache.org/licenses/LICENSE-2.0
171 |
172 | Unless required by applicable law or agreed to in writing, software
173 | distributed under the License is distributed on an "AS IS" BASIS,
174 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
175 | See the License for the specific language governing permissions and
176 | limitations under the License.
177 |
--------------------------------------------------------------------------------
/src/main/java/com/github/cedelsb/kafka/connect/smt/Record2JsonStringConverter.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright © 2021 Christian Edelsbrunner (christian.edelsbrunner@gmail.com)
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.github.cedelsb.kafka.connect.smt;
18 |
19 | import com.github.cedelsb.kafka.connect.smt.converter.AvroJsonSchemafulRecordConverter;
20 | import com.github.cedelsb.kafka.connect.smt.converter.JsonSchemalessRecordConverter;
21 | import com.github.cedelsb.kafka.connect.smt.converter.types.json.JsonBinaryConverter;
22 | import com.github.cedelsb.kafka.connect.smt.converter.types.json.JsonDateTimeAsLongConverter;
23 | import com.github.cedelsb.kafka.connect.smt.converter.types.json.JsonDateTimeAsStringConverter;
24 | import com.github.cedelsb.kafka.connect.smt.converter.types.json.JsonDecimalConverter;
25 | import com.github.underscore.U;
26 | import org.apache.kafka.common.config.ConfigDef;
27 | import org.apache.kafka.connect.connector.ConnectRecord;
28 | import org.apache.kafka.connect.data.Schema;
29 | import org.apache.kafka.connect.data.SchemaBuilder;
30 | import org.apache.kafka.connect.data.Struct;
31 | import org.apache.kafka.connect.transforms.Transformation;
32 | import org.apache.kafka.connect.transforms.util.SimpleConfig;
33 | import org.bson.BsonDocument;
34 | import org.bson.json.Converter;
35 | import org.bson.json.JsonMode;
36 | import org.bson.json.JsonWriterSettings;
37 | import org.slf4j.Logger;
38 | import org.slf4j.LoggerFactory;
39 |
40 | import java.util.Map;
41 |
42 | import static org.apache.kafka.connect.transforms.util.Requirements.requireStruct;
43 |
44 | public abstract class Record2JsonStringConverter> implements Transformation {
45 |
46 | private static Logger logger = LoggerFactory.getLogger(Record2JsonStringConverter.class);
47 |
48 | public static final String OVERVIEW_DOC =
49 | "Converts a record value with a schema into a new schema containing a single JSON string field";
50 |
51 | private static final class ConfigName {
52 | public static final String JSON_STRING_FIELD_NAME = "json.string.field.name";
53 | public static final String JSON_WRITER_OUTPUT_MODE = "json.writer.output.mode";
54 | public static final String POST_PROCESSING_TO_XML = "post.processing.to.xml";
55 | public static final String JSON_WRITER_HANDLE_LOGICAL_TYPES = "json.writer.handle.logical.types";
56 | public static final String JSON_WRITER_DATETIME_LOGICAL_TYPES_AS = "json.writer.datetime.logical.types.as";
57 | public static final String JSON_WRITER_DATETIME_PATTERN = "json.writer.datetime.pattern";
58 | public static final String JSON_WRITER_DATETIME_ZONE_ID = "json.writer.datetime.zoneid";
59 | }
60 |
61 | public static final ConfigDef CONFIG_DEF = new ConfigDef()
62 | .define(ConfigName.JSON_STRING_FIELD_NAME, ConfigDef.Type.STRING, "jsonstring", ConfigDef.Importance.HIGH,
63 | "Field name for output JSON String field")
64 | .define(ConfigName.JSON_WRITER_OUTPUT_MODE, ConfigDef.Type.STRING, "RELAXED", ConfigDef.Importance.MEDIUM,
65 | "Output mode of JSON Writer (RELAXED,EXTENDED,SHELL or STRICT)")
66 | .define(ConfigName.POST_PROCESSING_TO_XML, ConfigDef.Type.BOOLEAN, false, ConfigDef.Importance.LOW,
67 | "Some old RBDMS like Oracle 11 are not the best in handling JSON - for such scenarios this option can be used to transform the generated JSON into a schemaless XML String")
68 | .define(ConfigName.JSON_WRITER_HANDLE_LOGICAL_TYPES, ConfigDef.Type.BOOLEAN, false, ConfigDef.Importance.LOW,
69 | "In BSON serialization, logical types (dates, times, timestamps, decimal, bytes) are embedded inside a $ field. Setting this configuration to true will remove the embeddings and add the value to the parent field.")
70 | .define(ConfigName.JSON_WRITER_DATETIME_LOGICAL_TYPES_AS, ConfigDef.Type.STRING, "LONG", ConfigDef.Importance.LOW,
71 | "Write the logical type field (of time, date or timestamp) either as a STRING or a LONG (epoc) value, only applicable if json.writer.handle.logical.types=true")
72 | .define(ConfigName.JSON_WRITER_DATETIME_PATTERN, ConfigDef.Type.STRING, null, ConfigDef.Importance.LOW,
73 | "The pattern (either a predefined constant or pattern letters) to use to format the date/time or timestamp as string, only applicable if json.writer.datetime.logical.types.as=STRING")
74 | .define(ConfigName.JSON_WRITER_DATETIME_ZONE_ID, ConfigDef.Type.STRING, "UTC", ConfigDef.Importance.LOW,
75 | "The zone id to use to format the date/time or timestamp as string, only applicable if json.writer.datetime.logical.types.as=STRING"
76 | );
77 |
78 | private static final String PURPOSE = "Converting record with Schema into a simple JSON String";
79 |
80 | private String jsonStringFieldName;
81 | private Schema jsonStringOutputSchema;
82 | private boolean transformToXML;
83 | private String writeDatetimeLogicalTypesAs;
84 | private String writeDatetimeWithPattern;
85 | private String writeDatetimeWithZoneId;
86 |
87 | private boolean handleLogicalTypes;
88 |
89 | AvroJsonSchemafulRecordConverter converter;
90 | JsonSchemalessRecordConverter converterWithoutSchema;
91 | JsonWriterSettings jsonWriterSettings;
92 |
93 | @Override
94 | public void configure(Map props) {
95 | final SimpleConfig config = new SimpleConfig(CONFIG_DEF, props);
96 | jsonStringFieldName = config.getString(ConfigName.JSON_STRING_FIELD_NAME);
97 | jsonStringOutputSchema = makeJsonStringOutputSchema();
98 | handleLogicalTypes = config.getBoolean(ConfigName.JSON_WRITER_HANDLE_LOGICAL_TYPES);
99 | writeDatetimeLogicalTypesAs = config.getString(ConfigName.JSON_WRITER_DATETIME_LOGICAL_TYPES_AS);
100 | writeDatetimeWithPattern = config.getString(ConfigName.JSON_WRITER_DATETIME_PATTERN);
101 | writeDatetimeWithZoneId = config.getString(ConfigName.JSON_WRITER_DATETIME_ZONE_ID);
102 |
103 | if (handleLogicalTypes) {
104 | Converter dateTimeConverter = null;
105 | if (writeDatetimeLogicalTypesAs.equalsIgnoreCase("STRING")) {
106 | dateTimeConverter = new JsonDateTimeAsStringConverter(writeDatetimeWithPattern, writeDatetimeWithZoneId);
107 | } else if (writeDatetimeLogicalTypesAs.equalsIgnoreCase("LONG")) {
108 | dateTimeConverter = new JsonDateTimeAsLongConverter();
109 | } else {
110 | throw new IllegalArgumentException("Wrong value for configuration setting: " + ConfigName.JSON_WRITER_DATETIME_LOGICAL_TYPES_AS + "=" + writeDatetimeLogicalTypesAs);
111 | }
112 |
113 | jsonWriterSettings = JsonWriterSettings
114 | .builder()
115 | .outputMode(toJsonMode(config.getString(ConfigName.JSON_WRITER_OUTPUT_MODE)))
116 | .dateTimeConverter(dateTimeConverter)
117 | .decimal128Converter(new JsonDecimalConverter())
118 | .binaryConverter(new JsonBinaryConverter())
119 | .build();
120 | } else {
121 | jsonWriterSettings = JsonWriterSettings
122 | .builder()
123 | .outputMode(toJsonMode(config.getString(ConfigName.JSON_WRITER_OUTPUT_MODE)))
124 | .build();
125 | }
126 |
127 | converter = new AvroJsonSchemafulRecordConverter();
128 | converterWithoutSchema = new JsonSchemalessRecordConverter();
129 |
130 | transformToXML = config.getBoolean(ConfigName.POST_PROCESSING_TO_XML);
131 | }
132 |
133 | @Override
134 | public R apply(R record) {
135 |
136 | if (isTombstoneRecord(record))
137 | return record;
138 |
139 | Schema schema = operatingSchema(record);
140 | Object value = operatingValue(record);
141 |
142 | BsonDocument bsonDoc;
143 | if (schema != null) {
144 | bsonDoc = convertWithSchema(schema, value);
145 | } else {
146 | bsonDoc = convertWithoutSchema(value);
147 | }
148 |
149 | final Struct jsonStringOutputStruct = new Struct(jsonStringOutputSchema);
150 | String outputDocument = bsonDoc.toJson(jsonWriterSettings);
151 |
152 | if (transformToXML) {
153 | outputDocument = U.jsonToXml(outputDocument);
154 | }
155 |
156 | jsonStringOutputStruct.put(jsonStringFieldName, outputDocument);
157 |
158 | return newRecord(record, jsonStringOutputSchema, jsonStringOutputStruct);
159 | }
160 |
161 | private BsonDocument convertWithSchema(Schema schema, Object value) {
162 | final Struct struct = requireStruct(value, PURPOSE);
163 | return converter.convert(schema, struct);
164 | }
165 |
166 | private BsonDocument convertWithoutSchema(Object value) {
167 | return converterWithoutSchema.convert(makeDefaultSchema(), value);
168 | }
169 |
170 | private Schema makeDefaultSchema() {
171 | return SchemaBuilder
172 | .struct()
173 | .name("defaultSchema")
174 | .version(1)
175 | .field("value", Schema.STRING_SCHEMA)
176 | .build();
177 | }
178 |
179 | @Override
180 | public ConfigDef config() {
181 | return CONFIG_DEF;
182 | }
183 |
184 |
185 | @Override
186 | public void close() {
187 | converter = null;
188 | }
189 |
190 | private Schema makeJsonStringOutputSchema() {
191 | return SchemaBuilder
192 | .struct()
193 | .name("jsonStringSchema")
194 | .version(1)
195 | .field(jsonStringFieldName, Schema.STRING_SCHEMA)
196 | .build();
197 | }
198 |
199 | private JsonMode toJsonMode(String jsonMode) {
200 | switch (jsonMode) {
201 | case "SHELL":
202 | return JsonMode.SHELL;
203 | case "EXTENDED":
204 | return JsonMode.EXTENDED;
205 | case "STRICT":
206 | return JsonMode.STRICT;
207 | default:
208 | return JsonMode.RELAXED;
209 | }
210 | }
211 |
212 | protected abstract Schema operatingSchema(R record);
213 |
214 | protected abstract Object operatingValue(R record);
215 |
216 | protected abstract R newRecord(R record, Schema updatedSchema, Object updatedValue);
217 |
218 | protected abstract boolean isTombstoneRecord(R record);
219 |
220 | public static class Key> extends Record2JsonStringConverter {
221 |
222 | @Override
223 | protected Schema operatingSchema(R record) {
224 |
225 | return record.keySchema();
226 | }
227 |
228 | @Override
229 | protected Object operatingValue(R record) {
230 |
231 | return record.key();
232 | }
233 |
234 | @Override
235 | protected R newRecord(R record, Schema updatedSchema, Object updatedValue) {
236 | return record.newRecord(record.topic(), record.kafkaPartition(), updatedSchema, updatedValue, record.valueSchema(), record.value(), record.timestamp());
237 | }
238 |
239 | @Override
240 | protected boolean isTombstoneRecord(R record) {
241 | return record.key() == null;
242 | }
243 |
244 | }
245 |
246 | public static class Value> extends Record2JsonStringConverter {
247 |
248 | @Override
249 | protected Schema operatingSchema(R record) {
250 | return record.valueSchema();
251 | }
252 |
253 | @Override
254 | protected Object operatingValue(R record) {
255 | return record.value();
256 | }
257 |
258 | @Override
259 | protected R newRecord(R record, Schema updatedSchema, Object updatedValue) {
260 | return record.newRecord(record.topic(), record.kafkaPartition(), record.keySchema(), record.key(), updatedSchema, updatedValue, record.timestamp());
261 | }
262 |
263 | @Override
264 | protected boolean isTombstoneRecord(R record) {
265 | return record.value() == null;
266 | }
267 |
268 | }
269 | }
270 |
--------------------------------------------------------------------------------
/src/main/java/com/github/cedelsb/kafka/connect/smt/converter/AvroJsonSchemafulRecordConverter.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2017. Hans-Peter Grahsl (grahslhp@gmail.com)
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.github.cedelsb.kafka.connect.smt.converter;
18 |
19 | import com.github.cedelsb.kafka.connect.smt.converter.types.sink.bson.*;
20 | import com.github.cedelsb.kafka.connect.smt.converter.types.sink.bson.logical.*;
21 | import org.apache.kafka.connect.data.Date;
22 | import org.apache.kafka.connect.data.*;
23 | import org.apache.kafka.connect.errors.ConnectException;
24 | import org.apache.kafka.connect.errors.DataException;
25 | import org.bson.BsonArray;
26 | import org.bson.BsonDocument;
27 | import org.bson.BsonNull;
28 | import org.bson.BsonValue;
29 | import org.slf4j.Logger;
30 | import org.slf4j.LoggerFactory;
31 |
32 | import java.util.*;
33 |
34 | //looks like Avro and JSON + Schema is convertible by means of
35 | //a unified conversion approach since they are using the
36 | //same the Struct/Type information ...
37 | public class AvroJsonSchemafulRecordConverter implements RecordConverter {
38 |
39 | public static final Set LOGICAL_TYPE_NAMES = new HashSet<>(
40 | Arrays.asList(Date.LOGICAL_NAME, Decimal.LOGICAL_NAME,
41 | Time.LOGICAL_NAME, Timestamp.LOGICAL_NAME)
42 | );
43 |
44 | private final Map converters = new HashMap<>();
45 | private final Map logicalConverters = new HashMap<>();
46 |
47 | private static Logger logger = LoggerFactory.getLogger(AvroJsonSchemafulRecordConverter.class);
48 |
49 | public AvroJsonSchemafulRecordConverter() {
50 |
51 | //standard types
52 | registerSinkFieldConverter(new BooleanFieldConverter());
53 | registerSinkFieldConverter(new Int8FieldConverter());
54 | registerSinkFieldConverter(new Int16FieldConverter());
55 | registerSinkFieldConverter(new Int32FieldConverter());
56 | registerSinkFieldConverter(new Int64FieldConverter());
57 | registerSinkFieldConverter(new Float32FieldConverter());
58 | registerSinkFieldConverter(new Float64FieldConverter());
59 | registerSinkFieldConverter(new StringFieldConverter());
60 | registerSinkFieldConverter(new BytesFieldConverter());
61 |
62 | //logical types
63 | registerSinkFieldLogicalConverter(new DateFieldConverter());
64 | registerSinkFieldLogicalConverter(new TimeFieldConverter());
65 | registerSinkFieldLogicalConverter(new TimestampFieldConverter());
66 | registerSinkFieldLogicalConverter(new DecimalFieldConverter());
67 | }
68 |
69 | @Override
70 | public BsonDocument convert(Schema schema, Object value) {
71 |
72 | if(schema == null || value == null) {
73 | throw new DataException("error: schema and/or value was null for AVRO conversion");
74 | }
75 |
76 | return toBsonDoc(schema, value);
77 |
78 | }
79 |
80 | private void registerSinkFieldConverter(SinkFieldConverter converter) {
81 | converters.put(converter.getSchema().type(), converter);
82 | }
83 |
84 | private void registerSinkFieldLogicalConverter(SinkFieldConverter converter) {
85 | logicalConverters.put(converter.getSchema().name(), converter);
86 | }
87 |
88 | private BsonDocument toBsonDoc(Schema schema, Object value) {
89 | BsonDocument doc = new BsonDocument();
90 | schema.fields().forEach(f -> processField(doc, (Struct)value, f));
91 | return doc;
92 | }
93 |
94 | private void processField(BsonDocument doc, Struct struct, Field field) {
95 |
96 | logger.trace("processing field '{}'",field.name());
97 |
98 | if(isSupportedLogicalType(field.schema())) {
99 | doc.put(field.name(), getConverter(field.schema()).toBson(struct.get(field),field.schema()));
100 | return;
101 | }
102 |
103 | try {
104 | switch(field.schema().type()) {
105 | case BOOLEAN:
106 | case FLOAT32:
107 | case FLOAT64:
108 | case INT8:
109 | case INT16:
110 | case INT32:
111 | case INT64:
112 | case STRING:
113 | case BYTES:
114 | handlePrimitiveField(doc, struct.get(field), field);
115 | break;
116 | case STRUCT:
117 | handleStructField(doc, (Struct)struct.get(field), field);
118 | break;
119 | case ARRAY:
120 | doc.put(field.name(),handleArrayField((List)struct.get(field), field));
121 | break;
122 | case MAP:
123 | handleMapField(doc, (Map)struct.get(field), field);
124 | break;
125 | default:
126 | logger.error("Invalid schema. unexpected / unsupported schema type '"
127 | + field.schema().type() + "' for field '"
128 | + field.name() + "' value='" + struct + "'");
129 | throw new DataException("unexpected / unsupported schema type " + field.schema().type());
130 | }
131 | } catch (Exception exc) {
132 | logger.error("Error while processing field. schema type '"
133 | + field.schema().type() + "' for field '"
134 | + field.name() + "' value='" + struct + "'");
135 | throw new DataException("error while processing field " + field.name(), exc);
136 | }
137 |
138 | }
139 |
140 | private void handleMapField(BsonDocument doc, Map m, Field field) {
141 | logger.trace("handling complex type 'map'");
142 | if(m==null) {
143 | logger.trace("no field in struct -> adding null");
144 | doc.put(field.name(), BsonNull.VALUE);
145 | return;
146 | }
147 | BsonDocument bd = new BsonDocument();
148 | for(Object entry : m.keySet()) {
149 | String key = (String)entry;
150 | Schema.Type valueSchemaType = field.schema().valueSchema().type();
151 | if(valueSchemaType.isPrimitive()) {
152 | bd.put(key, getConverter(field.schema().valueSchema()).toBson(m.get(key),field.schema()));
153 | } else if (valueSchemaType.equals(Schema.Type.ARRAY)) {
154 | final Field elementField = new Field(key, 0, field.schema().valueSchema());
155 | final List list = (List)m.get(key);
156 | logger.trace("adding array values to {} of type valueSchema={} value='{}'",
157 | elementField.name(), elementField.schema().valueSchema(), list);
158 | bd.put(key, handleArrayField(list, elementField));
159 | } else {
160 | bd.put(key, toBsonDoc(field.schema().valueSchema(), m.get(key)));
161 | }
162 | }
163 | doc.put(field.name(), bd);
164 | }
165 |
166 | private BsonValue handleArrayField(List list, Field field) {
167 | logger.trace("handling complex type 'array' of types '{}'",
168 | field.schema().valueSchema().type());
169 | if(list==null) {
170 | logger.trace("no array -> adding null");
171 | return BsonNull.VALUE;
172 | }
173 | BsonArray array = new BsonArray();
174 | Schema.Type st = field.schema().valueSchema().type();
175 | for(Object element : list) {
176 | if(st.isPrimitive()) {
177 | array.add(getConverter(field.schema().valueSchema()).toBson(element,field.schema()));
178 | } else if(st == Schema.Type.ARRAY) {
179 | Field elementField = new Field("first", 0, field.schema().valueSchema());
180 | array.add(handleArrayField((List)element,elementField));
181 | } else {
182 | array.add(toBsonDoc(field.schema().valueSchema(), element));
183 | }
184 | }
185 | return array;
186 | }
187 |
188 | private void handleStructField(BsonDocument doc, Struct struct, Field field) {
189 | logger.trace("handling complex type 'struct'");
190 | if(struct!=null) {
191 | logger.trace(struct.toString());
192 | doc.put(field.name(), toBsonDoc(field.schema(), struct));
193 | } else {
194 | logger.trace("no field in struct -> adding null");
195 | doc.put(field.name(), BsonNull.VALUE);
196 | }
197 | }
198 |
199 | private void handlePrimitiveField(BsonDocument doc, Object value, Field field) {
200 | logger.trace("handling primitive type '{}' name='{}'",field.schema().type(),field.name());
201 | doc.put(field.name(), getConverter(field.schema()).toBson(value,field.schema()));
202 | }
203 |
204 | private boolean isSupportedLogicalType(Schema schema) {
205 |
206 | if(schema.name() == null) {
207 | return false;
208 | }
209 |
210 | return LOGICAL_TYPE_NAMES.contains(schema.name());
211 |
212 | }
213 |
214 | private SinkFieldConverter getConverter(Schema schema) {
215 |
216 | SinkFieldConverter converter;
217 |
218 | if(isSupportedLogicalType(schema)) {
219 | converter = logicalConverters.get(schema.name());
220 | } else {
221 | converter = converters.get(schema.type());
222 | }
223 |
224 | if (converter == null) {
225 | throw new ConnectException("error no registered converter found for " + schema.type().getName());
226 | }
227 |
228 | return converter;
229 | }
230 | }
231 |
--------------------------------------------------------------------------------
/src/main/java/com/github/cedelsb/kafka/connect/smt/converter/FieldConverter.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2017. Hans-Peter Grahsl (grahslhp@gmail.com)
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.github.cedelsb.kafka.connect.smt.converter;
18 |
19 | import org.apache.kafka.connect.data.Schema;
20 |
21 | public abstract class FieldConverter {
22 |
23 | private final Schema schema;
24 |
25 | public FieldConverter(Schema schema) {
26 | this.schema = schema;
27 | }
28 |
29 | public Schema getSchema() { return schema; }
30 |
31 | }
32 |
--------------------------------------------------------------------------------
/src/main/java/com/github/cedelsb/kafka/connect/smt/converter/JsonRawStringRecordConverter.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2017. Hans-Peter Grahsl (grahslhp@gmail.com)
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.github.cedelsb.kafka.connect.smt.converter;
18 |
19 | import org.apache.kafka.connect.data.Schema;
20 | import org.apache.kafka.connect.errors.DataException;
21 | import org.bson.BsonDocument;
22 |
23 | public class JsonRawStringRecordConverter implements RecordConverter {
24 |
25 | @Override
26 | public BsonDocument convert(Schema schema, Object value) {
27 |
28 | if(value == null) {
29 | throw new DataException("error: value was null for JSON conversion");
30 | }
31 |
32 | return BsonDocument.parse((String)value);
33 |
34 | }
35 | }
36 |
37 |
--------------------------------------------------------------------------------
/src/main/java/com/github/cedelsb/kafka/connect/smt/converter/JsonSchemalessRecordConverter.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2017. Hans-Peter Grahsl (grahslhp@gmail.com)
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.github.cedelsb.kafka.connect.smt.converter;
18 |
19 | import org.apache.kafka.connect.data.Schema;
20 | import org.apache.kafka.connect.errors.DataException;
21 | import org.bson.BsonDocument;
22 | import org.bson.Document;
23 | import org.bson.codecs.BsonValueCodecProvider;
24 | import org.bson.codecs.DocumentCodecProvider;
25 | import org.bson.codecs.MapCodecProvider;
26 | import org.bson.codecs.ValueCodecProvider;
27 | import org.bson.codecs.configuration.CodecRegistries;
28 | import org.bson.codecs.configuration.CodecRegistry;
29 |
30 | import java.util.Map;
31 |
32 | public class JsonSchemalessRecordConverter implements RecordConverter {
33 |
34 | private final CodecRegistry codecRegistry =
35 | CodecRegistries.fromProviders(
36 | new DocumentCodecProvider(),
37 | new BsonValueCodecProvider(),
38 | new ValueCodecProvider(),
39 | new MapCodecProvider()
40 | );
41 |
42 | @Override
43 | public BsonDocument convert(Schema schema, Object value) {
44 |
45 | if(value == null) {
46 | throw new DataException("error: value was null for JSON conversion");
47 | }
48 |
49 | return new Document((Map)value).toBsonDocument(null, codecRegistry);
50 |
51 | }
52 | }
53 |
54 |
--------------------------------------------------------------------------------
/src/main/java/com/github/cedelsb/kafka/connect/smt/converter/RecordConverter.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2017. Hans-Peter Grahsl (grahslhp@gmail.com)
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.github.cedelsb.kafka.connect.smt.converter;
18 |
19 | import org.apache.kafka.connect.data.Schema;
20 | import org.bson.BsonDocument;
21 |
22 | public interface RecordConverter {
23 |
24 | BsonDocument convert(Schema schema, Object value);
25 |
26 | }
27 |
--------------------------------------------------------------------------------
/src/main/java/com/github/cedelsb/kafka/connect/smt/converter/SinkConverter.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2017. Hans-Peter Grahsl (grahslhp@gmail.com)
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.github.cedelsb.kafka.connect.smt.converter;
18 |
19 | import org.apache.kafka.connect.data.Schema;
20 | import org.apache.kafka.connect.data.Struct;
21 | import org.apache.kafka.connect.errors.DataException;
22 | import org.apache.kafka.connect.sink.SinkRecord;
23 | import org.bson.BsonDocument;
24 | import org.slf4j.Logger;
25 | import org.slf4j.LoggerFactory;
26 |
27 | import java.util.Map;
28 |
29 | public class SinkConverter {
30 |
31 | private static Logger logger = LoggerFactory.getLogger(SinkConverter.class);
32 |
33 | private RecordConverter schemafulConverter = new AvroJsonSchemafulRecordConverter();
34 | private RecordConverter schemalessConverter = new JsonSchemalessRecordConverter();
35 | private RecordConverter rawConverter = new JsonRawStringRecordConverter();
36 |
37 | public SinkDocument convert(SinkRecord record) {
38 |
39 | logger.debug(record.toString());
40 |
41 | BsonDocument keyDoc = null;
42 | if(record.key() != null) {
43 | keyDoc = getRecordConverter(record.key(),record.keySchema())
44 | .convert(record.keySchema(), record.key());
45 | }
46 |
47 | BsonDocument valueDoc = null;
48 | if(record.value() != null) {
49 | valueDoc = getRecordConverter(record.value(),record.valueSchema())
50 | .convert(record.valueSchema(), record.value());
51 | }
52 |
53 | return new SinkDocument(keyDoc, valueDoc);
54 |
55 | }
56 |
57 | private RecordConverter getRecordConverter(Object data, Schema schema) {
58 |
59 | //AVRO or JSON with schema
60 | if(schema != null && data instanceof Struct) {
61 | logger.debug("using schemaful converter");
62 | return schemafulConverter;
63 | }
64 |
65 | //structured JSON without schema
66 | if(data instanceof Map) {
67 | logger.debug("using schemaless converter");
68 | return schemalessConverter;
69 | }
70 |
71 | //raw JSON string
72 | if(data instanceof String) {
73 | logger.debug("using raw converter");
74 | return rawConverter;
75 | }
76 |
77 | throw new DataException("error: no converter present due to unexpected object type "
78 | + data.getClass().getName());
79 | }
80 |
81 | }
82 |
--------------------------------------------------------------------------------
/src/main/java/com/github/cedelsb/kafka/connect/smt/converter/SinkDocument.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2017. Hans-Peter Grahsl (grahslhp@gmail.com)
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.github.cedelsb.kafka.connect.smt.converter;
18 |
19 | import org.bson.BsonDocument;
20 |
21 | import java.util.Optional;
22 |
23 | public class SinkDocument {
24 |
25 | private final Optional keyDoc;
26 | private final Optional valueDoc;
27 |
28 | public SinkDocument(BsonDocument keyDoc, BsonDocument valueDoc) {
29 | this.keyDoc = Optional.ofNullable(keyDoc);
30 | this.valueDoc = Optional.ofNullable(valueDoc);
31 | }
32 |
33 | public Optional getKeyDoc() {
34 | return keyDoc;
35 | }
36 |
37 | public Optional getValueDoc() {
38 | return valueDoc;
39 | }
40 |
41 | public SinkDocument clone() {
42 | BsonDocument kd = keyDoc.isPresent() ? keyDoc.get().clone() : null;
43 | BsonDocument vd = valueDoc.isPresent() ? valueDoc.get().clone() : null;
44 | return new SinkDocument(kd,vd);
45 | }
46 |
47 | }
48 |
--------------------------------------------------------------------------------
/src/main/java/com/github/cedelsb/kafka/connect/smt/converter/SinkFieldConverter.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2017. Hans-Peter Grahsl (grahslhp@gmail.com)
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.github.cedelsb.kafka.connect.smt.converter;
18 |
19 | import org.apache.kafka.connect.data.Schema;
20 | import org.apache.kafka.connect.errors.DataException;
21 | import org.bson.BsonNull;
22 | import org.bson.BsonValue;
23 | import org.slf4j.Logger;
24 | import org.slf4j.LoggerFactory;
25 |
26 | public abstract class SinkFieldConverter extends FieldConverter {
27 |
28 | private static Logger logger = LoggerFactory.getLogger(SinkFieldConverter.class);
29 |
30 | public SinkFieldConverter(Schema schema) {
31 | super(schema);
32 | }
33 |
34 | public abstract BsonValue toBson(Object data);
35 |
36 | public BsonValue toBson(Object data, Schema fieldSchema) {
37 | if(!fieldSchema.isOptional()) {
38 |
39 | if(data == null)
40 | throw new DataException("error: schema not optional but data was null");
41 |
42 | logger.trace("field not optional and data is '{}'",data.toString());
43 | return toBson(data);
44 | }
45 |
46 | if(data != null) {
47 | logger.trace("field optional and data is '{}'",data.toString());
48 | return toBson(data);
49 | }
50 |
51 | if(fieldSchema.defaultValue() != null) {
52 | logger.trace("field optional and no data but default value is '{}'",fieldSchema.defaultValue().toString());
53 | return toBson(fieldSchema.defaultValue());
54 | }
55 |
56 | logger.trace("field optional, no data and no default value thus '{}'", BsonNull.VALUE);
57 | return BsonNull.VALUE;
58 | }
59 |
60 | }
61 |
--------------------------------------------------------------------------------
/src/main/java/com/github/cedelsb/kafka/connect/smt/converter/types/json/JsonBinaryConverter.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2022. Guido Schmutz (schmutz68@gmail.com)
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.github.cedelsb.kafka.connect.smt.converter.types.json;
18 |
19 | import org.bson.BsonBinary;
20 | import org.bson.json.Converter;
21 | import org.bson.json.StrictJsonWriter;
22 | import org.slf4j.Logger;
23 | import org.slf4j.LoggerFactory;
24 |
25 | import java.util.Base64;
26 |
27 | public class JsonBinaryConverter implements Converter {
28 | private static final Logger LOGGER = LoggerFactory.getLogger(JsonBinaryConverter.class);
29 | @Override
30 | public void convert(BsonBinary value, StrictJsonWriter writer) {
31 | try {
32 | byte[] b = value.getData();
33 | String base64 = Base64.getEncoder().encodeToString(b);
34 | writer.writeStartObject();
35 | writer.writeString("base64", base64);
36 | writer.writeEndObject();
37 | } catch (Exception e) {
38 | LOGGER.error(String.format("Fail to convert offset %s to JSON binary", value.toString()), e);
39 | }
40 | }
41 | }
42 |
--------------------------------------------------------------------------------
/src/main/java/com/github/cedelsb/kafka/connect/smt/converter/types/json/JsonDateTimeAsLongConverter.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2022. Guido Schmutz (schmutz68@gmail.com)
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.github.cedelsb.kafka.connect.smt.converter.types.json;
18 |
19 | import org.bson.json.Converter;
20 | import org.bson.json.StrictJsonWriter;
21 | import org.slf4j.Logger;
22 | import org.slf4j.LoggerFactory;
23 |
24 | public class JsonDateTimeAsLongConverter implements Converter {
25 | private static final Logger LOGGER = LoggerFactory.getLogger(JsonDateTimeAsLongConverter.class);
26 |
27 | @Override
28 | public void convert(Long value, StrictJsonWriter writer) {
29 | try {
30 | writer.writeNumber(Long.toString(value));
31 | } catch (Exception e) {
32 | LOGGER.error(String.format("Fail to convert offset %d to JSON date", value), e);
33 | }
34 | }
35 | }
36 |
--------------------------------------------------------------------------------
/src/main/java/com/github/cedelsb/kafka/connect/smt/converter/types/json/JsonDateTimeAsStringConverter.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2022. Guido Schmutz (schmutz68@gmail.com)
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.github.cedelsb.kafka.connect.smt.converter.types.json;
18 |
19 | import org.bson.json.Converter;
20 | import org.bson.json.StrictJsonWriter;
21 | import org.slf4j.Logger;
22 | import org.slf4j.LoggerFactory;
23 |
24 | import java.time.Instant;
25 | import java.time.ZoneId;
26 | import java.time.format.DateTimeFormatter;
27 | import java.util.Date;
28 |
29 | public class JsonDateTimeAsStringConverter implements Converter {
30 |
31 | private static final Logger LOGGER = LoggerFactory.getLogger(JsonDateTimeAsStringConverter.class);
32 |
33 | private DateTimeFormatter dateTimeFormatter = null;
34 |
35 | private DateTimeFormatter toDateTimeFormat(String dateTimeFormat)
36 | {
37 | switch(dateTimeFormat.toUpperCase())
38 | {
39 | case "ISO_DATE":
40 | return DateTimeFormatter.ISO_DATE;
41 | case "ISO_DATE_TIME":
42 | return DateTimeFormatter.ISO_DATE_TIME;
43 | case "ISO_INSTANT":
44 | return DateTimeFormatter.ISO_INSTANT;
45 | case "ISO_TIME":
46 | return DateTimeFormatter.ISO_TIME;
47 | case "ISO_LOCAL_DATE":
48 | return DateTimeFormatter.ISO_LOCAL_DATE;
49 | case "ISO_LOCAL_DATE_TIME":
50 | return DateTimeFormatter.ISO_LOCAL_DATE_TIME;
51 | case "ISO_LOCAL_TIME":
52 | return DateTimeFormatter.ISO_LOCAL_TIME;
53 | case "RFC_1123_DATE_TIME":
54 | return DateTimeFormatter.RFC_1123_DATE_TIME;
55 | case "ISO_ZONED_DATE_TIME":
56 | return DateTimeFormatter.ISO_ZONED_DATE_TIME;
57 | case "ISO_OFFSET_DATE":
58 | return DateTimeFormatter.ISO_OFFSET_DATE;
59 | case "ISO_OFFSET_DATE_TIME":
60 | return DateTimeFormatter.ISO_OFFSET_DATE_TIME;
61 | case "ISO_OFFSET_TIME":
62 | return DateTimeFormatter.ISO_OFFSET_TIME;
63 | case "BASIC_ISO_DATE":
64 | return DateTimeFormatter.BASIC_ISO_DATE;
65 | case "ISO_ORDINAL_DATE":
66 | return DateTimeFormatter.ISO_ORDINAL_DATE;
67 | case "ISO_WEEK_DATE":
68 | return DateTimeFormatter.ISO_WEEK_DATE;
69 | default:
70 | return DateTimeFormatter.ofPattern(dateTimeFormat);
71 | }
72 | }
73 |
74 | public JsonDateTimeAsStringConverter(String dateTimeFormat, String zoneId) {
75 | if (dateTimeFormat == null || dateTimeFormat.isEmpty()) {
76 | dateTimeFormatter = DateTimeFormatter.ISO_INSTANT.withZone(ZoneId.of(zoneId));
77 | } else {
78 | dateTimeFormatter = toDateTimeFormat(dateTimeFormat).withZone(ZoneId.of(zoneId));
79 | }
80 | }
81 |
82 | @Override
83 | public void convert(Long value, StrictJsonWriter writer) {
84 | try {
85 | Instant instant = new Date(value).toInstant();
86 | String s = dateTimeFormatter.format(instant);
87 | writer.writeString(s);
88 | } catch (Exception e) {
89 | LOGGER.error(String.format("Fail to convert offset %d to JSON date", value), e);
90 | }
91 | }
92 | }
93 |
--------------------------------------------------------------------------------
/src/main/java/com/github/cedelsb/kafka/connect/smt/converter/types/json/JsonDecimalConverter.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2022. Guido Schmutz (schmutz68@gmail.com)
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.github.cedelsb.kafka.connect.smt.converter.types.json;
18 |
19 | import org.bson.json.Converter;
20 | import org.bson.json.StrictJsonWriter;
21 | import org.bson.types.Decimal128;
22 | import org.slf4j.Logger;
23 | import org.slf4j.LoggerFactory;
24 |
25 | public class JsonDecimalConverter implements Converter {
26 | private static final Logger LOGGER = LoggerFactory.getLogger(JsonDecimalConverter.class);
27 | @Override
28 | public void convert(Decimal128 value, StrictJsonWriter writer) {
29 | try {
30 | writer.writeNumber(value.toString());
31 | } catch (Exception e) {
32 | LOGGER.error(String.format("Fail to convert offset %s to JSON decimal", String.valueOf(value)), e);
33 | }
34 | }
35 | }
36 |
--------------------------------------------------------------------------------
/src/main/java/com/github/cedelsb/kafka/connect/smt/converter/types/sink/bson/BooleanFieldConverter.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2017. Hans-Peter Grahsl (grahslhp@gmail.com)
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.github.cedelsb.kafka.connect.smt.converter.types.sink.bson;
18 |
19 | import com.github.cedelsb.kafka.connect.smt.converter.SinkFieldConverter;
20 | import org.apache.kafka.connect.data.Schema;
21 | import org.bson.BsonBoolean;
22 | import org.bson.BsonValue;
23 |
24 | public class BooleanFieldConverter extends SinkFieldConverter {
25 |
26 | public BooleanFieldConverter() {
27 | super(Schema.BOOLEAN_SCHEMA);
28 | }
29 |
30 | public BsonValue toBson(Object data) {
31 | return new BsonBoolean((Boolean) data);
32 | }
33 |
34 | }
35 |
--------------------------------------------------------------------------------
/src/main/java/com/github/cedelsb/kafka/connect/smt/converter/types/sink/bson/BytesFieldConverter.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2017. Hans-Peter Grahsl (grahslhp@gmail.com)
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.github.cedelsb.kafka.connect.smt.converter.types.sink.bson;
18 |
19 | import com.github.cedelsb.kafka.connect.smt.converter.SinkFieldConverter;
20 | import org.apache.kafka.connect.data.Schema;
21 | import org.apache.kafka.connect.errors.DataException;
22 | import org.bson.BsonBinary;
23 | import org.bson.BsonValue;
24 |
25 | import java.nio.ByteBuffer;
26 |
27 | public class BytesFieldConverter extends SinkFieldConverter {
28 |
29 | public BytesFieldConverter() {
30 | super(Schema.BYTES_SCHEMA);
31 | }
32 |
33 | @Override
34 | public BsonValue toBson(Object data) {
35 |
36 | //obviously SinkRecords may contain different types
37 | //to represent byte arrays
38 | if(data instanceof ByteBuffer)
39 | return new BsonBinary(((ByteBuffer) data).array());
40 |
41 | if(data instanceof byte[])
42 | return new BsonBinary((byte[])data);
43 |
44 | throw new DataException("error: bytes field conversion failed to due"
45 | + " unexpected object type "+ data.getClass().getName());
46 |
47 | }
48 |
49 | }
50 |
--------------------------------------------------------------------------------
/src/main/java/com/github/cedelsb/kafka/connect/smt/converter/types/sink/bson/Float32FieldConverter.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2017. Hans-Peter Grahsl (grahslhp@gmail.com)
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.github.cedelsb.kafka.connect.smt.converter.types.sink.bson;
18 |
19 | import com.github.cedelsb.kafka.connect.smt.converter.SinkFieldConverter;
20 | import org.apache.kafka.connect.data.Schema;
21 | import org.bson.BsonDouble;
22 | import org.bson.BsonValue;
23 |
24 | public class Float32FieldConverter extends SinkFieldConverter {
25 |
26 | public Float32FieldConverter() {
27 | super(Schema.FLOAT32_SCHEMA);
28 | }
29 |
30 | @Override
31 | public BsonValue toBson(Object data) {
32 | return new BsonDouble((Float) data);
33 | }
34 |
35 | }
36 |
--------------------------------------------------------------------------------
/src/main/java/com/github/cedelsb/kafka/connect/smt/converter/types/sink/bson/Float64FieldConverter.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2017. Hans-Peter Grahsl (grahslhp@gmail.com)
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.github.cedelsb.kafka.connect.smt.converter.types.sink.bson;
18 |
19 | import com.github.cedelsb.kafka.connect.smt.converter.SinkFieldConverter;
20 | import org.apache.kafka.connect.data.Schema;
21 | import org.bson.BsonDouble;
22 | import org.bson.BsonValue;
23 |
24 | public class Float64FieldConverter extends SinkFieldConverter {
25 |
26 | public Float64FieldConverter() {
27 | super(Schema.FLOAT64_SCHEMA);
28 | }
29 |
30 | @Override
31 | public BsonValue toBson(Object data) {
32 | return new BsonDouble((Double) data);
33 | }
34 |
35 | }
36 |
--------------------------------------------------------------------------------
/src/main/java/com/github/cedelsb/kafka/connect/smt/converter/types/sink/bson/Int16FieldConverter.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2017. Hans-Peter Grahsl (grahslhp@gmail.com)
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.github.cedelsb.kafka.connect.smt.converter.types.sink.bson;
18 |
19 | import com.github.cedelsb.kafka.connect.smt.converter.SinkFieldConverter;
20 | import org.apache.kafka.connect.data.Schema;
21 | import org.bson.BsonInt32;
22 | import org.bson.BsonValue;
23 |
24 | public class Int16FieldConverter extends SinkFieldConverter {
25 |
26 | public Int16FieldConverter() {
27 | super(Schema.INT16_SCHEMA);
28 | }
29 |
30 | @Override
31 | public BsonValue toBson(Object data) {
32 | return new BsonInt32(((Short) data).intValue());
33 | }
34 |
35 | }
36 |
--------------------------------------------------------------------------------
/src/main/java/com/github/cedelsb/kafka/connect/smt/converter/types/sink/bson/Int32FieldConverter.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2017. Hans-Peter Grahsl (grahslhp@gmail.com)
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.github.cedelsb.kafka.connect.smt.converter.types.sink.bson;
18 |
19 | import com.github.cedelsb.kafka.connect.smt.converter.SinkFieldConverter;
20 | import org.apache.kafka.connect.data.Schema;
21 | import org.bson.BsonInt32;
22 | import org.bson.BsonValue;
23 |
24 | public class Int32FieldConverter extends SinkFieldConverter {
25 |
26 | public Int32FieldConverter() {
27 | super(Schema.INT32_SCHEMA);
28 | }
29 |
30 | @Override
31 | public BsonValue toBson(Object data) {
32 | return new BsonInt32((Integer) data);
33 | }
34 |
35 | }
36 |
--------------------------------------------------------------------------------
/src/main/java/com/github/cedelsb/kafka/connect/smt/converter/types/sink/bson/Int64FieldConverter.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2017. Hans-Peter Grahsl (grahslhp@gmail.com)
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.github.cedelsb.kafka.connect.smt.converter.types.sink.bson;
18 |
19 | import com.github.cedelsb.kafka.connect.smt.converter.SinkFieldConverter;
20 | import org.apache.kafka.connect.data.Schema;
21 | import org.bson.BsonInt64;
22 | import org.bson.BsonValue;
23 |
24 | public class Int64FieldConverter extends SinkFieldConverter {
25 |
26 | public Int64FieldConverter() {
27 | super(Schema.INT64_SCHEMA);
28 | }
29 |
30 | @Override
31 | public BsonValue toBson(Object data) {
32 | return new BsonInt64((Long) data);
33 | }
34 |
35 | }
36 |
--------------------------------------------------------------------------------
/src/main/java/com/github/cedelsb/kafka/connect/smt/converter/types/sink/bson/Int8FieldConverter.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2017. Hans-Peter Grahsl (grahslhp@gmail.com)
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.github.cedelsb.kafka.connect.smt.converter.types.sink.bson;
18 |
19 | import com.github.cedelsb.kafka.connect.smt.converter.SinkFieldConverter;
20 | import org.apache.kafka.connect.data.Schema;
21 | import org.bson.BsonInt32;
22 | import org.bson.BsonValue;
23 |
24 | public class Int8FieldConverter extends SinkFieldConverter {
25 |
26 | public Int8FieldConverter() {
27 | super(Schema.INT8_SCHEMA);
28 | }
29 |
30 | @Override
31 | public BsonValue toBson(Object data) {
32 | return new BsonInt32(((Byte) data).intValue());
33 | }
34 |
35 | }
36 |
--------------------------------------------------------------------------------
/src/main/java/com/github/cedelsb/kafka/connect/smt/converter/types/sink/bson/StringFieldConverter.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2017. Hans-Peter Grahsl (grahslhp@gmail.com)
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.github.cedelsb.kafka.connect.smt.converter.types.sink.bson;
18 |
19 | import com.github.cedelsb.kafka.connect.smt.converter.SinkFieldConverter;
20 | import org.apache.kafka.connect.data.Schema;
21 | import org.bson.BsonString;
22 | import org.bson.BsonValue;
23 |
24 | public class StringFieldConverter extends SinkFieldConverter {
25 |
26 | public StringFieldConverter() {
27 | super(Schema.STRING_SCHEMA);
28 | }
29 |
30 | @Override
31 | public BsonValue toBson(Object data) {
32 | return new BsonString((String) data);
33 | }
34 |
35 | }
36 |
--------------------------------------------------------------------------------
/src/main/java/com/github/cedelsb/kafka/connect/smt/converter/types/sink/bson/logical/DateFieldConverter.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2017. Hans-Peter Grahsl (grahslhp@gmail.com)
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.github.cedelsb.kafka.connect.smt.converter.types.sink.bson.logical;
18 |
19 | import com.github.cedelsb.kafka.connect.smt.converter.SinkFieldConverter;
20 | import org.apache.kafka.connect.data.Date;
21 | import org.bson.BsonDateTime;
22 | import org.bson.BsonValue;
23 |
24 | public class DateFieldConverter extends SinkFieldConverter {
25 |
26 | public DateFieldConverter() {
27 | super(Date.SCHEMA);
28 | }
29 |
30 | @Override
31 | public BsonValue toBson(Object data) {
32 | return new BsonDateTime(((java.util.Date)data).getTime());
33 | }
34 | }
35 |
--------------------------------------------------------------------------------
/src/main/java/com/github/cedelsb/kafka/connect/smt/converter/types/sink/bson/logical/DecimalFieldConverter.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2017. Hans-Peter Grahsl (grahslhp@gmail.com)
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.github.cedelsb.kafka.connect.smt.converter.types.sink.bson.logical;
18 |
19 | import com.github.cedelsb.kafka.connect.smt.converter.SinkFieldConverter;
20 | import org.apache.kafka.connect.data.Decimal;
21 | import org.apache.kafka.connect.errors.DataException;
22 | import org.bson.BsonDecimal128;
23 | import org.bson.BsonDouble;
24 | import org.bson.BsonValue;
25 | import org.bson.types.Decimal128;
26 |
27 | import java.math.BigDecimal;
28 |
29 | public class DecimalFieldConverter extends SinkFieldConverter {
30 |
31 | public enum Format {
32 | DECIMAL128, //needs MongoDB v3.4+
33 | LEGACYDOUBLE //results in double approximation
34 | }
35 |
36 | private Format format;
37 |
38 | public DecimalFieldConverter() {
39 | super(Decimal.schema(0));
40 | this.format = Format.DECIMAL128;
41 | }
42 |
43 | public DecimalFieldConverter(Format format) {
44 | super(Decimal.schema(0));
45 | this.format = format;
46 | }
47 |
48 | @Override
49 | public BsonValue toBson(Object data) {
50 |
51 | if(data instanceof BigDecimal) {
52 | if(format.equals(Format.DECIMAL128))
53 | return new BsonDecimal128(new Decimal128((BigDecimal)data));
54 |
55 | if(format.equals(Format.LEGACYDOUBLE))
56 | return new BsonDouble(((BigDecimal)data).doubleValue());
57 | }
58 |
59 | throw new DataException("error: decimal conversion not possible when data is"
60 | + " of type "+data.getClass().getName() + " and format is "+format);
61 |
62 | }
63 | }
64 |
--------------------------------------------------------------------------------
/src/main/java/com/github/cedelsb/kafka/connect/smt/converter/types/sink/bson/logical/TimeFieldConverter.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2017. Hans-Peter Grahsl (grahslhp@gmail.com)
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.github.cedelsb.kafka.connect.smt.converter.types.sink.bson.logical;
18 |
19 | import com.github.cedelsb.kafka.connect.smt.converter.SinkFieldConverter;
20 | import org.apache.kafka.connect.data.Time;
21 | import org.bson.BsonDateTime;
22 | import org.bson.BsonValue;
23 |
24 | public class TimeFieldConverter extends SinkFieldConverter {
25 |
26 | public TimeFieldConverter() {
27 | super(Time.SCHEMA);
28 | }
29 |
30 | @Override
31 | public BsonValue toBson(Object data) {
32 | return new BsonDateTime(((java.util.Date)data).getTime());
33 | }
34 | }
35 |
--------------------------------------------------------------------------------
/src/main/java/com/github/cedelsb/kafka/connect/smt/converter/types/sink/bson/logical/TimestampFieldConverter.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2017. Hans-Peter Grahsl (grahslhp@gmail.com)
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.github.cedelsb.kafka.connect.smt.converter.types.sink.bson.logical;
18 |
19 | import com.github.cedelsb.kafka.connect.smt.converter.SinkFieldConverter;
20 | import org.apache.kafka.connect.data.Timestamp;
21 | import org.bson.BsonDateTime;
22 | import org.bson.BsonValue;
23 |
24 | public class TimestampFieldConverter extends SinkFieldConverter {
25 |
26 | public TimestampFieldConverter() {
27 | super(Timestamp.SCHEMA);
28 | }
29 |
30 | @Override
31 | public BsonValue toBson(Object data) {
32 | return new BsonDateTime(((java.util.Date)data).getTime());
33 | }
34 | }
35 |
--------------------------------------------------------------------------------
/src/test/java/com/github/cedelsb/integration/Record2JsonStringIT.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright © 2021 Christian Edelsbrunner (christian.edelsbrunner@gmail.com)
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.github.cedelsb.integration;
18 |
19 | import com.esotericsoftware.yamlbeans.YamlReader;
20 | import okhttp3.*;
21 | import org.apache.avro.Schema;
22 | import org.apache.avro.generic.GenericRecord;
23 | import org.apache.avro.generic.GenericRecordBuilder;
24 | import org.apache.kafka.clients.producer.KafkaProducer;
25 | import org.apache.kafka.clients.producer.ProducerRecord;
26 | import org.apache.kafka.clients.producer.RecordMetadata;
27 | import org.junit.ClassRule;
28 | import org.junit.jupiter.api.BeforeAll;
29 | import org.junit.jupiter.api.Test;
30 | import org.junit.platform.runner.JUnitPlatform;
31 | import org.junit.runner.RunWith;
32 | import org.testcontainers.containers.DockerComposeContainer;
33 | import org.testcontainers.containers.wait.strategy.Wait;
34 |
35 | import java.io.File;
36 | import java.io.FileReader;
37 | import java.io.IOException;
38 | import java.nio.file.Files;
39 | import java.nio.file.Paths;
40 | import java.sql.DriverManager;
41 | import java.sql.ResultSet;
42 | import java.sql.SQLException;
43 | import java.sql.Statement;
44 | import java.time.Duration;
45 | import java.util.List;
46 | import java.util.Map;
47 | import java.util.Properties;
48 | import java.util.concurrent.ExecutionException;
49 |
50 | import static org.junit.jupiter.api.Assertions.*;
51 |
52 | @RunWith(JUnitPlatform.class)
53 | public class Record2JsonStringIT {
54 |
55 | public static final String DOCKER_COMPOSE_FILE = "src/test/resources/docker/compose.yaml";
56 | public static final String SINK_CONNECTOR_CONFIG = "src/test/resources/docker/jdbcsinkconnector.json";
57 | public static final String DEFAULT_COMPOSE_SERVICE_SUFFIX = "_1";
58 |
59 |
60 | public static final String KAFKA_BROKER;
61 | public static final int KAFKA_BROKER_PORT;
62 |
63 | public static final String KAFKA_CONNECT;
64 | public static final int KAFKA_CONNECT_PORT;
65 |
66 | public static final String SCHEMA_REGISTRY;
67 | public static final int SCHEMA_REGISTRY_PORT;
68 |
69 | public static final String POSTGRES;
70 | public static int POSTGRES_PORT;
71 | static {
72 | try {
73 | Map composeFile = (Map)new YamlReader(new FileReader(DOCKER_COMPOSE_FILE)).read();
74 |
75 | KAFKA_BROKER = extractHostnameFromDockerCompose(composeFile,"kafkabroker");
76 | KAFKA_BROKER_PORT = extractHostPortFromDockerCompose(composeFile,"kafkabroker");
77 |
78 | KAFKA_CONNECT = extractHostnameFromDockerCompose(composeFile,"kafkaconnect");
79 | KAFKA_CONNECT_PORT = extractHostPortFromDockerCompose(composeFile,"kafkaconnect");
80 |
81 | SCHEMA_REGISTRY = extractHostnameFromDockerCompose(composeFile,"schemaregistry");
82 | SCHEMA_REGISTRY_PORT = extractHostPortFromDockerCompose(composeFile,"schemaregistry");
83 |
84 | POSTGRES = extractHostnameFromDockerCompose(composeFile,"postgres");
85 | POSTGRES_PORT = extractHostPortFromDockerCompose(composeFile,"postgres");
86 | } catch(Exception exc) {
87 | throw new RuntimeException("error: parsing the docker-compose YAML",exc);
88 | }
89 | }
90 |
91 | @ClassRule
92 | public static DockerComposeContainer CONTAINER_ENV =
93 | new DockerComposeContainer(new File(DOCKER_COMPOSE_FILE))
94 | .withOptions("--compatibility")
95 | .withLocalCompose(true)
96 | .withExposedService(KAFKA_BROKER+DEFAULT_COMPOSE_SERVICE_SUFFIX,KAFKA_BROKER_PORT)
97 | .withExposedService(KAFKA_CONNECT+DEFAULT_COMPOSE_SERVICE_SUFFIX,KAFKA_CONNECT_PORT,
98 | Wait.forListeningPort().withStartupTimeout(Duration.ofSeconds(120)))
99 | .withExposedService(SCHEMA_REGISTRY +DEFAULT_COMPOSE_SERVICE_SUFFIX, SCHEMA_REGISTRY_PORT)
100 | .withExposedService(POSTGRES+DEFAULT_COMPOSE_SERVICE_SUFFIX,POSTGRES_PORT)
101 |
102 | ;
103 |
104 | @BeforeAll
105 | public static void setup() throws IOException, InterruptedException {
106 | CONTAINER_ENV.start();
107 |
108 | String config = new String(Files.readAllBytes(Paths.get(SINK_CONNECTOR_CONFIG)));
109 |
110 | // Wait another 5 Sec to be sure connect is up and ready
111 | Thread.sleep(5000);
112 |
113 | registerJDBCSinkConnector(config);
114 |
115 | }
116 |
117 | @Test
118 | public void sendRecordAndTransformItToJSONString() throws ExecutionException, InterruptedException {
119 |
120 | // Build Test Avro Message
121 | Schema.Parser parser = new Schema.Parser();
122 | Schema schema = parser.parse("{\n" +
123 | " \"type\": \"record\",\n" +
124 | " \"namespace\": \"com.github.cedelsb.integration\",\n" +
125 | " \"name\": \"TestRecord\",\n" +
126 | " \"fields\": [\n" +
127 | " { \"name\": \"field1\", \"type\": \"string\" },\n" +
128 | " { \"name\": \"field2\", \"type\": \"string\" },\n" +
129 | " { \"name\": \"field3\", \"type\": \"int\"},\n" +
130 | " { \"name\": \"field4\", \"type\": \"float\" },\n" +
131 | " { \"name\": \"field5\", \"type\": \"float\" },\n" +
132 | " { \"name\": \"field6\", \"type\": \"boolean\", \"default\": true}\n" +
133 | " ]\n" +
134 | "}");
135 |
136 | GenericRecordBuilder avroBuilder = new GenericRecordBuilder(schema);
137 |
138 | avroBuilder.set(schema.getField("field1"), "TestValue");
139 | avroBuilder.set(schema.getField("field2"), "TestValue2");
140 | avroBuilder.set(schema.getField("field3"), 1337);
141 | avroBuilder.set(schema.getField("field4"), 1.5f);
142 | avroBuilder.set(schema.getField("field5"), 3.14f);
143 |
144 | avroBuilder.build();
145 |
146 | // Send it to Kafka
147 | Properties props = new Properties();
148 | props.put("bootstrap.servers","localhost:" + KAFKA_BROKER_PORT);
149 | props.put("key.serializer", "io.confluent.kafka.serializers.KafkaAvroSerializer");
150 | props.put("value.serializer", "io.confluent.kafka.serializers.KafkaAvroSerializer");
151 | props.put("schema.registry.url","http://"+SCHEMA_REGISTRY+":"+ SCHEMA_REGISTRY_PORT);
152 |
153 | KafkaProducer producer = new KafkaProducer<>(props);
154 |
155 | ProducerRecord record = new ProducerRecord<>("smttest", avroBuilder.build());
156 |
157 | producer.send(record, (RecordMetadata r, Exception exc) -> {
158 | assertNull(exc, () -> "unexpected error while sending"
159 | + " | exc: " + exc.getMessage()
160 | );
161 | }).get();
162 |
163 | //Give the connector time to do his work
164 | Thread.sleep(1000);
165 |
166 | java.sql.Connection conn = null;
167 | try {
168 | conn = DriverManager.getConnection("jdbc:postgresql://localhost:5432/postgres?currentSchema=smttest", "postgres", "postgres");
169 |
170 | Statement st = conn.createStatement();
171 | ResultSet rs = st.executeQuery("SELECT * FROM smttest.smttest");
172 |
173 | assertTrue(rs.next());
174 |
175 | assertEquals("{\"field1\": \"TestValue\", \"field2\": \"TestValue2\", \"field3\": 1337, \"field4\": 1.5, \"field5\": 3.140000104904175, \"field6\": true}",rs.getString(1));
176 |
177 | rs.close();
178 | st.close();
179 |
180 | conn.close();
181 | } catch (SQLException e) {
182 | assertNull(e, () -> "unexpected error while sending"
183 | + " | exc: " + e.getMessage()
184 | );
185 | }
186 |
187 | }
188 |
189 | private static void registerJDBCSinkConnector(String configuration) throws IOException {
190 | RequestBody body = RequestBody.create(
191 | MediaType.parse("application/json"), configuration
192 | );
193 |
194 | Request request = new Request.Builder()
195 | .url("http://localhost:"+KAFKA_CONNECT_PORT+"/connectors")
196 | .post(body)
197 | .build();
198 |
199 | Response response = new OkHttpClient().newCall(request).execute();
200 | assert(response.code() == 201);
201 | response.close();
202 | }
203 |
204 | private static void deferExecutionToWaitForDataPropagation(Duration delay, String message) {
205 | System.out.println(message);
206 | try {
207 | System.out.println("sleeping for "+delay.toMillis()+" millis");
208 | Thread.sleep(delay.toMillis());
209 | } catch (InterruptedException e) {}
210 | }
211 |
212 | private static String extractHostnameFromDockerCompose(Map compose,String serviceName) {
213 | return (String)((Map)((Map)compose.get("services")).get(serviceName)).get("hostname");
214 | }
215 |
216 | private static int extractHostPortFromDockerCompose(Map compose,String serviceName) {
217 | return Integer.parseInt(((String)((List)((Map)((Map)compose.get("services"))
218 | .get(serviceName)).get("ports")).get(0)).split(":")[1]);
219 | }
220 |
221 | }
222 |
--------------------------------------------------------------------------------
/src/test/java/com/github/cedelsb/kafka/connect/smt/Record2JsonStringConverterTest.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright © 2021 Christian Edelsbrunner (christian.edelsbrunner@gmail.com)
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.github.cedelsb.kafka.connect.smt;
18 |
19 | import org.apache.kafka.connect.data.*;
20 | import org.apache.kafka.connect.data.Date;
21 | import org.apache.kafka.connect.sink.SinkRecord;
22 | import org.junit.Before;
23 | import org.junit.Test;
24 |
25 | import java.math.BigDecimal;
26 | import java.time.LocalDate;
27 | import java.time.LocalTime;
28 | import java.time.ZoneOffset;
29 | import java.time.ZonedDateTime;
30 | import java.util.*;
31 |
32 | import static org.junit.Assert.assertEquals;
33 | import static org.junit.Assert.fail;
34 |
35 | public class Record2JsonStringConverterTest {
36 |
37 | private Record2JsonStringConverter valueSmt = new Record2JsonStringConverter.Value<>();
38 | private Record2JsonStringConverter keySmt = new Record2JsonStringConverter.Key<>();
39 |
40 | private Schema nestedSchema;
41 | private Schema simpleStructSchema;
42 | private Struct simpleStruct;
43 |
44 | @Before
45 | public void createSchemasAndStructs()
46 | {
47 |
48 | nestedSchema = SchemaBuilder
49 | .struct()
50 | .name("nestedElement")
51 | .version(1)
52 | .field("entry", Schema.STRING_SCHEMA)
53 | .build();
54 |
55 | simpleStructSchema = SchemaBuilder
56 | .struct()
57 | .name("testSchema")
58 | .version(1)
59 | .field("simpleString", Schema.STRING_SCHEMA)
60 | .field("simpleBoolean", Schema.BOOLEAN_SCHEMA)
61 | .field("simpleFLOAT32", Schema.FLOAT32_SCHEMA)
62 | .field("simpleFLOAT64", Schema.FLOAT64_SCHEMA)
63 | .field("simpleInt8", Schema.INT8_SCHEMA)
64 | .field("simpleInt16", Schema.INT16_SCHEMA)
65 | .field("simpleInt32", Schema.INT32_SCHEMA)
66 | .field("simpleInt64", Schema.INT64_SCHEMA)
67 | .field("simpleBytes", Schema.BYTES_SCHEMA)
68 | .field("optionalBoolean", Schema.OPTIONAL_BOOLEAN_SCHEMA)
69 | .field("optionalString", Schema.OPTIONAL_STRING_SCHEMA)
70 | .field("optionalFloat", Schema.OPTIONAL_FLOAT32_SCHEMA)
71 | .field("optionalInt", Schema.OPTIONAL_INT64_SCHEMA)
72 | .field("optionalBytes", Schema.OPTIONAL_BYTES_SCHEMA)
73 | .field("nestedArray", SchemaBuilder.array(nestedSchema))
74 | .field("simpleDate", Date.SCHEMA)
75 | .field("simpleTime", Time.SCHEMA)
76 | .field("simpleTimestamp", Timestamp.SCHEMA)
77 | .field("simpleDecimal", Decimal.schema(0))
78 | .build();
79 |
80 | simpleStruct = new Struct(simpleStructSchema);
81 |
82 | simpleStruct.put("simpleString", "TestString");
83 | simpleStruct.put("simpleBoolean", true);
84 | simpleStruct.put("simpleFLOAT32", 1.0f);
85 | simpleStruct.put("simpleFLOAT64", 2.0d);
86 | simpleStruct.put("simpleInt8", (byte) 8);
87 | simpleStruct.put("simpleInt16", (short) 2);
88 | simpleStruct.put("simpleInt32", 3);
89 | simpleStruct.put("simpleInt64", 4L);
90 | simpleStruct.put("simpleBytes", new byte[]{75, 97, 102, 107, 97, 32, 114, 111, 99, 107, 115, 33});
91 |
92 | final Struct simpleNestedStruct1 = new Struct(nestedSchema);
93 | simpleNestedStruct1.put("entry", "testEntry");
94 | final Struct simpleNestedStruct2 = new Struct(nestedSchema);
95 | simpleNestedStruct2.put("entry", "testEntry2");
96 |
97 | final List nestedStructArray = Arrays.asList(simpleNestedStruct1, simpleNestedStruct2);
98 |
99 | simpleStruct.put("nestedArray", nestedStructArray);
100 |
101 | simpleStruct.put("simpleDate",java.util.Date.from(ZonedDateTime.of(LocalDate.of(2022,12,3), LocalTime.MIDNIGHT, ZoneOffset.UTC).toInstant()));
102 | simpleStruct.put("simpleTime",java.util.Date.from(ZonedDateTime.of(LocalDate.of(2022,12,3), LocalTime.MIDNIGHT, ZoneOffset.UTC).toInstant()));
103 | simpleStruct.put("simpleTimestamp",java.util.Date.from(ZonedDateTime.of(LocalDate.of(2022,12,3), LocalTime.NOON, ZoneOffset.UTC).toInstant()));
104 | simpleStruct.put("simpleDecimal", new BigDecimal("12345.6789"));
105 | }
106 |
107 | @Test
108 | public void transformRecordValue2JsonStringTest() {
109 | final Map props = new HashMap<>();
110 |
111 | props.put("json.string.field.name", "myawesomejsonstringfield");
112 |
113 | valueSmt.configure(props);
114 |
115 | final SinkRecord record = new SinkRecord(null, 0, null, "test", simpleStructSchema, simpleStruct, 0);
116 | final SinkRecord transformedRecord = valueSmt.apply(record);
117 |
118 | assertEquals(1, transformedRecord.valueSchema().fields().size());
119 | assertEquals(Schema.STRING_SCHEMA,transformedRecord.valueSchema().field("myawesomejsonstringfield").schema());
120 |
121 | Struct value = (Struct) transformedRecord.value();
122 | String jsonString = (String) value.get("myawesomejsonstringfield");
123 |
124 | assertEquals("{\"simpleString\": \"TestString\", \"simpleBoolean\": true, \"simpleFLOAT32\": 1.0, \"simpleFLOAT64\": 2.0, \"simpleInt8\": 8, \"simpleInt16\": 2, \"simpleInt32\": 3, \"simpleInt64\": 4, \"simpleBytes\": {\"$binary\": {\"base64\": \"S2Fma2Egcm9ja3Mh\", \"subType\": \"00\"}}, \"optionalBoolean\": null, \"optionalString\": null, \"optionalFloat\": null, \"optionalInt\": null, \"optionalBytes\": null, \"nestedArray\": [{\"entry\": \"testEntry\"}, {\"entry\": \"testEntry2\"}], \"simpleDate\": {\"$date\": \"2022-12-03T00:00:00Z\"}, \"simpleTime\": {\"$date\": \"2022-12-03T00:00:00Z\"}, \"simpleTimestamp\": {\"$date\": \"2022-12-03T12:00:00Z\"}, \"simpleDecimal\": {\"$numberDecimal\": \"12345.6789\"}}",jsonString);
125 | }
126 |
127 | @Test
128 | public void transformRecordValue2JsonStringEXTENDEDModeTest() {
129 | final Map props = new HashMap<>();
130 |
131 | props.put("json.string.field.name", "myawesomejsonstringfield");
132 | props.put("json.writer.output.mode", "EXTENDED");
133 |
134 | valueSmt.configure(props);
135 |
136 | final SinkRecord record = new SinkRecord(null, 0, null, "test", simpleStructSchema, simpleStruct, 0);
137 | final SinkRecord transformedRecord = valueSmt.apply(record);
138 |
139 | assertEquals(1,transformedRecord.valueSchema().fields().size());
140 | assertEquals(Schema.STRING_SCHEMA, transformedRecord.valueSchema().field("myawesomejsonstringfield").schema());
141 |
142 | Struct value = (Struct) transformedRecord.value();
143 | String jsonString = (String) value.get("myawesomejsonstringfield");
144 |
145 | assertEquals("{\"simpleString\": \"TestString\", \"simpleBoolean\": true, \"simpleFLOAT32\": {\"$numberDouble\": \"1.0\"}, \"simpleFLOAT64\": {\"$numberDouble\": \"2.0\"}, \"simpleInt8\": {\"$numberInt\": \"8\"}, \"simpleInt16\": {\"$numberInt\": \"2\"}, \"simpleInt32\": {\"$numberInt\": \"3\"}, \"simpleInt64\": {\"$numberLong\": \"4\"}, \"simpleBytes\": {\"$binary\": {\"base64\": \"S2Fma2Egcm9ja3Mh\", \"subType\": \"00\"}}, \"optionalBoolean\": null, \"optionalString\": null, \"optionalFloat\": null, \"optionalInt\": null, \"optionalBytes\": null, \"nestedArray\": [{\"entry\": \"testEntry\"}, {\"entry\": \"testEntry2\"}], \"simpleDate\": {\"$date\": {\"$numberLong\": \"1670025600000\"}}, \"simpleTime\": {\"$date\": {\"$numberLong\": \"1670025600000\"}}, \"simpleTimestamp\": {\"$date\": {\"$numberLong\": \"1670068800000\"}}, \"simpleDecimal\": {\"$numberDecimal\": \"12345.6789\"}}",jsonString);
146 | }
147 |
148 | @Test
149 | public void transformRecordValue2JsonStringSTRICTModeTest() {
150 | final Map props = new HashMap<>();
151 |
152 | props.put("json.string.field.name", "myawesomejsonstringfield");
153 | props.put("json.writer.output.mode", "STRICT");
154 |
155 | valueSmt.configure(props);
156 |
157 | final SinkRecord record = new SinkRecord(null, 0, null, "test", simpleStructSchema, simpleStruct, 0);
158 | final SinkRecord transformedRecord = valueSmt.apply(record);
159 |
160 | assertEquals(1,transformedRecord.valueSchema().fields().size());
161 | assertEquals(Schema.STRING_SCHEMA, transformedRecord.valueSchema().field("myawesomejsonstringfield").schema());
162 |
163 | Struct value = (Struct) transformedRecord.value();
164 | String jsonString = (String) value.get("myawesomejsonstringfield");
165 |
166 | assertEquals("{\"simpleString\": \"TestString\", \"simpleBoolean\": true, \"simpleFLOAT32\": 1.0, \"simpleFLOAT64\": 2.0, \"simpleInt8\": 8, \"simpleInt16\": 2, \"simpleInt32\": 3, \"simpleInt64\": {\"$numberLong\": \"4\"}, \"simpleBytes\": {\"$binary\": \"S2Fma2Egcm9ja3Mh\", \"$type\": \"00\"}, \"optionalBoolean\": null, \"optionalString\": null, \"optionalFloat\": null, \"optionalInt\": null, \"optionalBytes\": null, \"nestedArray\": [{\"entry\": \"testEntry\"}, {\"entry\": \"testEntry2\"}], \"simpleDate\": {\"$date\": 1670025600000}, \"simpleTime\": {\"$date\": 1670025600000}, \"simpleTimestamp\": {\"$date\": 1670068800000}, \"simpleDecimal\": {\"$numberDecimal\": \"12345.6789\"}}",jsonString);
167 | }
168 |
169 | @Test
170 | public void transformRecordValue2JsonStringWithoutSchemaTest() {
171 | // value for json format without schema
172 | HashMap simpleValueWithoutSchema = new LinkedHashMap<>();
173 | simpleValueWithoutSchema.put("simpleString", "TestString");
174 | simpleValueWithoutSchema.put("simpleBoolean", true);
175 | simpleValueWithoutSchema.put("simpleFLOAT32", 1.0f);
176 | simpleValueWithoutSchema.put("simpleFLOAT64", 2.0d);
177 | simpleValueWithoutSchema.put("simpleInt8", (byte) 8);
178 | simpleValueWithoutSchema.put("simpleInt16", (short) 2);
179 | simpleValueWithoutSchema.put("simpleInt32", 3);
180 | simpleValueWithoutSchema.put("simpleInt64", 4L);
181 | simpleValueWithoutSchema.put("simpleBytes", new byte[]{75, 97, 102, 107, 97, 32, 114, 111, 99, 107, 115, 33});
182 |
183 | LinkedHashMap simpleNestedObject1 = new LinkedHashMap<>();
184 | simpleNestedObject1.put("entry", "testEntry");
185 | LinkedHashMap simpleNestedObject2 = new LinkedHashMap<>();
186 | simpleNestedObject2.put("entry", "testEntry2");
187 |
188 | simpleValueWithoutSchema.put("nestedArray1", simpleNestedObject1);
189 | simpleValueWithoutSchema.put("nestedArray2", simpleNestedObject2);
190 |
191 | simpleValueWithoutSchema.put("simpleDate",java.util.Date.from(ZonedDateTime.of(LocalDate.of(2022,12,3), LocalTime.MIDNIGHT, ZoneOffset.UTC).toInstant()));
192 | simpleValueWithoutSchema.put("simpleTime",java.util.Date.from(ZonedDateTime.of(LocalDate.of(2022,12,3), LocalTime.MIDNIGHT, ZoneOffset.UTC).toInstant()));
193 | simpleValueWithoutSchema.put("simpleTimestamp",java.util.Date.from(ZonedDateTime.of(LocalDate.of(2022,12,3), LocalTime.NOON, ZoneOffset.UTC).toInstant()));
194 | simpleValueWithoutSchema.put("simpleDecimal", new BigDecimal("12345.6789"));
195 |
196 |
197 | final Map props = new HashMap<>();
198 | props.put("json.string.field.name", "myawesomejsonstringfield");
199 |
200 | valueSmt.configure(props);
201 | final SinkRecord record = new SinkRecord(null, 0, null, null, null, simpleValueWithoutSchema, 0);
202 | final SinkRecord transformedRecord = valueSmt.apply(record);
203 |
204 | assertEquals(1, transformedRecord.valueSchema().fields().size());
205 | assertEquals(Schema.STRING_SCHEMA,transformedRecord.valueSchema().field("myawesomejsonstringfield").schema());
206 |
207 | Struct value = (Struct) transformedRecord.value();
208 | String jsonString = (String) value.get("myawesomejsonstringfield");
209 |
210 | assertEquals("{\"simpleString\": \"TestString\", \"simpleBoolean\": true, \"simpleFLOAT32\": 1.0, \"simpleFLOAT64\": 2.0, \"simpleInt8\": 8, \"simpleInt16\": 2, \"simpleInt32\": 3, \"simpleInt64\": 4, \"simpleBytes\": {\"$binary\": {\"base64\": \"S2Fma2Egcm9ja3Mh\", \"subType\": \"00\"}}, \"nestedArray1\": {\"entry\": \"testEntry\"}, \"nestedArray2\": {\"entry\": \"testEntry2\"}, \"simpleDate\": {\"$date\": \"2022-12-03T00:00:00Z\"}, \"simpleTime\": {\"$date\": \"2022-12-03T00:00:00Z\"}, \"simpleTimestamp\": {\"$date\": \"2022-12-03T12:00:00Z\"}, \"simpleDecimal\": {\"$numberDecimal\": \"12345.6789\"}}", jsonString);
211 | }
212 |
213 | @Test
214 | public void transformRecordValue2JsonStringLogicalTypesDatetimeAsStringTest() {
215 | final Map props = new HashMap<>();
216 |
217 | props.put("json.string.field.name", "myawesomejsonstringfield");
218 | props.put("json.writer.handle.logical.types", true);
219 | props.put("json.writer.datetime.logical.types.as", "STRING");
220 |
221 | valueSmt.configure(props);
222 |
223 | final SinkRecord record = new SinkRecord(null, 0, null, "test", simpleStructSchema, simpleStruct, 0);
224 | final SinkRecord transformedRecord = valueSmt.apply(record);
225 |
226 | assertEquals(1,transformedRecord.valueSchema().fields().size());
227 | assertEquals(Schema.STRING_SCHEMA, transformedRecord.valueSchema().field("myawesomejsonstringfield").schema());
228 |
229 | Struct value = (Struct) transformedRecord.value();
230 | String jsonString = (String) value.get("myawesomejsonstringfield");
231 |
232 | assertEquals("{\"simpleString\": \"TestString\", \"simpleBoolean\": true, \"simpleFLOAT32\": 1.0, \"simpleFLOAT64\": 2.0, \"simpleInt8\": 8, \"simpleInt16\": 2, \"simpleInt32\": 3, \"simpleInt64\": 4, \"simpleBytes\": {\"base64\": \"S2Fma2Egcm9ja3Mh\"}, \"optionalBoolean\": null, \"optionalString\": null, \"optionalFloat\": null, \"optionalInt\": null, \"optionalBytes\": null, \"nestedArray\": [{\"entry\": \"testEntry\"}, {\"entry\": \"testEntry2\"}], \"simpleDate\": \"2022-12-03T00:00:00Z\", \"simpleTime\": \"2022-12-03T00:00:00Z\", \"simpleTimestamp\": \"2022-12-03T12:00:00Z\", \"simpleDecimal\": 12345.6789}",jsonString);
233 | }
234 |
235 | @Test
236 | public void transformRecordValue2JsonStringLogicalTypesDatetimeAsStringWithZoneIdTest() {
237 | final Map props = new HashMap<>();
238 |
239 | props.put("json.string.field.name", "myawesomejsonstringfield");
240 | props.put("json.writer.handle.logical.types", true);
241 | props.put("json.writer.datetime.logical.types.as", "STRING");
242 | props.put("json.writer.datetime.zoneid", "Europe/Zurich");
243 |
244 | valueSmt.configure(props);
245 |
246 | final SinkRecord record = new SinkRecord(null, 0, null, "test", simpleStructSchema, simpleStruct, 0);
247 | final SinkRecord transformedRecord = valueSmt.apply(record);
248 |
249 | assertEquals(1,transformedRecord.valueSchema().fields().size());
250 | assertEquals(Schema.STRING_SCHEMA, transformedRecord.valueSchema().field("myawesomejsonstringfield").schema());
251 |
252 | Struct value = (Struct) transformedRecord.value();
253 | String jsonString = (String) value.get("myawesomejsonstringfield");
254 |
255 | assertEquals("{\"simpleString\": \"TestString\", \"simpleBoolean\": true, \"simpleFLOAT32\": 1.0, \"simpleFLOAT64\": 2.0, \"simpleInt8\": 8, \"simpleInt16\": 2, \"simpleInt32\": 3, \"simpleInt64\": 4, \"simpleBytes\": {\"base64\": \"S2Fma2Egcm9ja3Mh\"}, \"optionalBoolean\": null, \"optionalString\": null, \"optionalFloat\": null, \"optionalInt\": null, \"optionalBytes\": null, \"nestedArray\": [{\"entry\": \"testEntry\"}, {\"entry\": \"testEntry2\"}], \"simpleDate\": \"2022-12-03T00:00:00Z\", \"simpleTime\": \"2022-12-03T00:00:00Z\", \"simpleTimestamp\": \"2022-12-03T12:00:00Z\", \"simpleDecimal\": 12345.6789}",jsonString);
256 | }
257 |
258 | @Test
259 | public void transformRecordValue2JsonStringLogicalTypesDatetimeAsStringWithDefinedPatternAndZoneIdTest() {
260 | final Map props = new HashMap<>();
261 |
262 | props.put("json.string.field.name", "myawesomejsonstringfield");
263 | props.put("json.writer.handle.logical.types", true);
264 | props.put("json.writer.datetime.logical.types.as", "STRING");
265 | props.put("json.writer.datetime.pattern", "ISO_DATE_TIME");
266 | props.put("json.writer.datetime.zoneid", "CET");
267 |
268 | valueSmt.configure(props);
269 |
270 | final SinkRecord record = new SinkRecord(null, 0, null, "test", simpleStructSchema, simpleStruct, 0);
271 | final SinkRecord transformedRecord = valueSmt.apply(record);
272 |
273 | assertEquals(1,transformedRecord.valueSchema().fields().size());
274 | assertEquals(Schema.STRING_SCHEMA, transformedRecord.valueSchema().field("myawesomejsonstringfield").schema());
275 |
276 | Struct value = (Struct) transformedRecord.value();
277 | String jsonString = (String) value.get("myawesomejsonstringfield");
278 |
279 | assertEquals("{\"simpleString\": \"TestString\", \"simpleBoolean\": true, \"simpleFLOAT32\": 1.0, \"simpleFLOAT64\": 2.0, \"simpleInt8\": 8, \"simpleInt16\": 2, \"simpleInt32\": 3, \"simpleInt64\": 4, \"simpleBytes\": {\"base64\": \"S2Fma2Egcm9ja3Mh\"}, \"optionalBoolean\": null, \"optionalString\": null, \"optionalFloat\": null, \"optionalInt\": null, \"optionalBytes\": null, \"nestedArray\": [{\"entry\": \"testEntry\"}, {\"entry\": \"testEntry2\"}], \"simpleDate\": \"2022-12-03T01:00:00+01:00[CET]\", \"simpleTime\": \"2022-12-03T01:00:00+01:00[CET]\", \"simpleTimestamp\": \"2022-12-03T13:00:00+01:00[CET]\", \"simpleDecimal\": 12345.6789}",jsonString);
280 | }
281 |
282 | @Test
283 | public void transformRecordValue2JsonStringLogicalTypesDatetimeAsStringWithPatternAndZoneIdTest() {
284 | final Map props = new HashMap<>();
285 |
286 | props.put("json.string.field.name", "myawesomejsonstringfield");
287 | props.put("json.writer.handle.logical.types", true);
288 | props.put("json.writer.datetime.logical.types.as", "STRING");
289 | props.put("json.writer.datetime.pattern", "dd.MM.yyyy HH:mm z");
290 | props.put("json.writer.datetime.zoneid", "CET");
291 |
292 | Locale.setDefault(new Locale("en", "GB")); // Force Locale to be English so that the timezone display name is properly returned
293 |
294 | valueSmt.configure(props);
295 |
296 | final SinkRecord record = new SinkRecord(null, 0, null, "test", simpleStructSchema, simpleStruct, 0);
297 | final SinkRecord transformedRecord = valueSmt.apply(record);
298 |
299 | assertEquals(1,transformedRecord.valueSchema().fields().size());
300 | assertEquals(Schema.STRING_SCHEMA, transformedRecord.valueSchema().field("myawesomejsonstringfield").schema());
301 |
302 | Struct value = (Struct) transformedRecord.value();
303 | String jsonString = (String) value.get("myawesomejsonstringfield");
304 |
305 | assertEquals("{\"simpleString\": \"TestString\", \"simpleBoolean\": true, \"simpleFLOAT32\": 1.0, \"simpleFLOAT64\": 2.0, \"simpleInt8\": 8, \"simpleInt16\": 2, \"simpleInt32\": 3, \"simpleInt64\": 4, \"simpleBytes\": {\"base64\": \"S2Fma2Egcm9ja3Mh\"}, \"optionalBoolean\": null, \"optionalString\": null, \"optionalFloat\": null, \"optionalInt\": null, \"optionalBytes\": null, \"nestedArray\": [{\"entry\": \"testEntry\"}, {\"entry\": \"testEntry2\"}], \"simpleDate\": \"03.12.2022 01:00 CET\", \"simpleTime\": \"03.12.2022 01:00 CET\", \"simpleTimestamp\": \"03.12.2022 13:00 CET\", \"simpleDecimal\": 12345.6789}",jsonString);
306 | }
307 | @Test
308 | public void transformRecordValue2JsonStringLogicalTypesDatetimeAsLongTest() {
309 | final Map props = new HashMap<>();
310 |
311 | props.put("json.string.field.name", "myawesomejsonstringfield");
312 | props.put("json.writer.handle.logical.types", true);
313 | props.put("json.writer.datetime.logical.types.as", "LONG");
314 |
315 | valueSmt.configure(props);
316 |
317 | final SinkRecord record = new SinkRecord(null, 0, null, "test", simpleStructSchema, simpleStruct, 0);
318 | final SinkRecord transformedRecord = valueSmt.apply(record);
319 |
320 | assertEquals(1,transformedRecord.valueSchema().fields().size());
321 | assertEquals(Schema.STRING_SCHEMA, transformedRecord.valueSchema().field("myawesomejsonstringfield").schema());
322 |
323 | Struct value = (Struct) transformedRecord.value();
324 | String jsonString = (String) value.get("myawesomejsonstringfield");
325 |
326 | assertEquals("{\"simpleString\": \"TestString\", \"simpleBoolean\": true, \"simpleFLOAT32\": 1.0, \"simpleFLOAT64\": 2.0, \"simpleInt8\": 8, \"simpleInt16\": 2, \"simpleInt32\": 3, \"simpleInt64\": 4, \"simpleBytes\": {\"base64\": \"S2Fma2Egcm9ja3Mh\"}, \"optionalBoolean\": null, \"optionalString\": null, \"optionalFloat\": null, \"optionalInt\": null, \"optionalBytes\": null, \"nestedArray\": [{\"entry\": \"testEntry\"}, {\"entry\": \"testEntry2\"}], \"simpleDate\": 1670025600000, \"simpleTime\": 1670025600000, \"simpleTimestamp\": 1670068800000, \"simpleDecimal\": 12345.6789}",jsonString);
327 | }
328 |
329 | @Test
330 | public void transformRecordValue2JsonStringLogicalTypesDatetimeAsWRONGVLAUETest() {
331 | final Map props = new HashMap<>();
332 |
333 | props.put("json.string.field.name", "myawesomejsonstringfield");
334 | props.put("json.writer.handle.logical.types", true);
335 | props.put("json.writer.datetime.logical.types.as", "wrong value");
336 |
337 | try {
338 | valueSmt.configure(props);
339 | fail("Expected an IllegalArgumentException");
340 | } catch (IllegalArgumentException ex) {
341 | assertEquals("Wrong value for configuration setting: json.writer.datetime.logical.types.as=wrong value", ex.getMessage());
342 | }
343 | }
344 |
345 | @Test
346 | public void transformRecordKey2JsonStringTest() {
347 | final Map props = new HashMap<>();
348 |
349 | props.put("json.string.field.name", "myawesomejsonstringfield");
350 |
351 | keySmt.configure(props);
352 |
353 | final Schema simpleStructSchema = SchemaBuilder
354 | .struct()
355 | .name("testSchema")
356 | .version(1)
357 | .field("simpleString", Schema.STRING_SCHEMA)
358 | .field("simpleBoolean", Schema.BOOLEAN_SCHEMA)
359 | .build();
360 |
361 | final Struct simpleStruct = new Struct(simpleStructSchema);
362 |
363 | simpleStruct.put("simpleString", "TestString");
364 | simpleStruct.put("simpleBoolean", true);
365 |
366 | final SinkRecord record = new SinkRecord(null, 0, simpleStructSchema, simpleStruct, null, "value", 0);
367 | final SinkRecord transformedRecord = keySmt.apply(record);
368 |
369 | assertEquals(1,transformedRecord.keySchema().fields().size());
370 | assertEquals(Schema.STRING_SCHEMA, transformedRecord.keySchema().field("myawesomejsonstringfield").schema());
371 |
372 | Struct key = (Struct) transformedRecord.key();
373 | String jsonString = (String) key.get("myawesomejsonstringfield");
374 |
375 | assertEquals("{\"simpleString\": \"TestString\", \"simpleBoolean\": true}",jsonString);
376 | }
377 |
378 | @Test
379 | public void handleTombstoneRecord() {
380 | final Map props = new HashMap<>();
381 |
382 | props.put("json.string.field.name", "myawesomejsonstringfield");
383 |
384 | valueSmt.configure(props);
385 |
386 | final Schema nestedSchema = SchemaBuilder
387 | .struct()
388 | .name("nestedElement")
389 | .version(1)
390 | .field("entry", Schema.STRING_SCHEMA)
391 | .build();
392 |
393 | final Schema simpleStructSchema = SchemaBuilder
394 | .struct()
395 | .name("testSchema")
396 | .version(1)
397 | .field("simpleString", Schema.STRING_SCHEMA)
398 | .field("simpleBoolean", Schema.BOOLEAN_SCHEMA)
399 | .field("simpleFLOAT32", Schema.FLOAT32_SCHEMA)
400 | .field("simpleFLOAT64", Schema.FLOAT64_SCHEMA)
401 | .field("simpleInt8", Schema.INT8_SCHEMA)
402 | .field("simpleInt16", Schema.INT16_SCHEMA)
403 | .field("simpleInt32", Schema.INT32_SCHEMA)
404 | .field("simpleInt64", Schema.INT64_SCHEMA)
405 | .field("optionalBoolean", Schema.OPTIONAL_BOOLEAN_SCHEMA)
406 | .field("optionalString", Schema.OPTIONAL_STRING_SCHEMA)
407 | .field("optionalFloat", Schema.OPTIONAL_FLOAT32_SCHEMA)
408 | .field("optionalInt", Schema.OPTIONAL_INT64_SCHEMA)
409 | .field("nestedArray", SchemaBuilder.array(nestedSchema))
410 | .build();
411 |
412 | final SinkRecord record = new SinkRecord(null, 0, null, "test", simpleStructSchema, null, 0);
413 | final SinkRecord transformedRecord = valueSmt.apply(record);
414 |
415 | assertEquals(13,transformedRecord.valueSchema().fields().size());
416 |
417 | assertEquals(null, transformedRecord.value());
418 |
419 | }
420 |
421 | @Test
422 | public void transformRecordValue2JsonStringXMLTest() {
423 | final Map props = new HashMap<>();
424 |
425 | props.put("json.string.field.name", "myawesomejsonstringfield");
426 | props.put("post.processing.to.xml", true);
427 |
428 | valueSmt.configure(props);
429 |
430 | final SinkRecord record = new SinkRecord(null, 0, null, "test", simpleStructSchema, simpleStruct, 0);
431 | final SinkRecord transformedRecord = valueSmt.apply(record);
432 |
433 | assertEquals(transformedRecord.valueSchema().fields().size(), 1);
434 | assertEquals(transformedRecord.valueSchema().field("myawesomejsonstringfield").schema(), Schema.STRING_SCHEMA);
435 |
436 | Struct value = (Struct) transformedRecord.value();
437 | String jsonString = (String) value.get("myawesomejsonstringfield");
438 |
439 | assertEquals("\n" +
440 | "\n" +
441 | " TestString \n" +
442 | " true \n" +
443 | " 1.0 \n" +
444 | " 2.0 \n" +
445 | " 8 \n" +
446 | " 2 \n" +
447 | " 3 \n" +
448 | " 4 \n" +
449 | " \n" +
450 | " <__EQ__binary>\n" +
451 | " S2Fma2Egcm9ja3Mh \n" +
452 | " 00 \n" +
453 | " \n" +
454 | " \n" +
455 | " \n" +
456 | " \n" +
457 | " \n" +
458 | " \n" +
459 | " \n" +
460 | " \n" +
461 | " testEntry \n" +
462 | " \n" +
463 | " \n" +
464 | " testEntry2 \n" +
465 | " \n" +
466 | " \n" +
467 | " <__EQ__date>2022-12-03T00:00:00Z\n" +
468 | " \n" +
469 | " \n" +
470 | " <__EQ__date>2022-12-03T00:00:00Z\n" +
471 | " \n" +
472 | " \n" +
473 | " <__EQ__date>2022-12-03T12:00:00Z\n" +
474 | " \n" +
475 | " \n" +
476 | " <__EQ__numberDecimal>12345.6789\n" +
477 | " \n" +
478 | " ",jsonString);
479 | }
480 |
481 | @Test
482 | public void transformRecordValue2JsonStringLogicalTypesTest() {
483 | final Map props = new HashMap<>();
484 |
485 | props.put("json.string.field.name", "myawesomejsonstringfield");
486 |
487 | valueSmt.configure(props);
488 |
489 | final SinkRecord record = new SinkRecord(null, 0, null, "test", simpleStructSchema, simpleStruct, 0);
490 | final SinkRecord transformedRecord = valueSmt.apply(record);
491 |
492 | assertEquals(1, transformedRecord.valueSchema().fields().size());
493 | assertEquals(Schema.STRING_SCHEMA,transformedRecord.valueSchema().field("myawesomejsonstringfield").schema());
494 |
495 | Struct value = (Struct) transformedRecord.value();
496 | String jsonString = (String) value.get("myawesomejsonstringfield");
497 |
498 | assertEquals("{\"simpleString\": \"TestString\", \"simpleBoolean\": true, \"simpleFLOAT32\": 1.0, \"simpleFLOAT64\": 2.0, \"simpleInt8\": 8, \"simpleInt16\": 2, \"simpleInt32\": 3, \"simpleInt64\": 4, \"simpleBytes\": {\"$binary\": {\"base64\": \"S2Fma2Egcm9ja3Mh\", \"subType\": \"00\"}}, \"optionalBoolean\": null, \"optionalString\": null, \"optionalFloat\": null, \"optionalInt\": null, \"optionalBytes\": null, \"nestedArray\": [{\"entry\": \"testEntry\"}, {\"entry\": \"testEntry2\"}], \"simpleDate\": {\"$date\": \"2022-12-03T00:00:00Z\"}, \"simpleTime\": {\"$date\": \"2022-12-03T00:00:00Z\"}, \"simpleTimestamp\": {\"$date\": \"2022-12-03T12:00:00Z\"}, \"simpleDecimal\": {\"$numberDecimal\": \"12345.6789\"}}",jsonString);
499 | }
500 | }
501 |
--------------------------------------------------------------------------------
/src/test/java/com/github/cedelsb/kafka/connect/smt/converter/RecordConverterTest.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2017. Hans-Peter Grahsl (grahslhp@gmail.com)
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.github.cedelsb.kafka.connect.smt.converter;
18 |
19 | import org.apache.kafka.connect.data.*;
20 | import org.apache.kafka.connect.errors.DataException;
21 | import org.bson.*;
22 | import org.bson.types.Decimal128;
23 | import org.junit.jupiter.api.BeforeAll;
24 | import org.junit.jupiter.api.DisplayName;
25 | import org.junit.jupiter.api.Test;
26 | import org.junit.platform.runner.JUnitPlatform;
27 | import org.junit.runner.RunWith;
28 |
29 | import java.math.BigDecimal;
30 | import java.time.LocalDate;
31 | import java.time.LocalTime;
32 | import java.time.ZoneOffset;
33 | import java.time.ZonedDateTime;
34 | import java.util.Arrays;
35 | import java.util.HashMap;
36 | import java.util.LinkedHashMap;
37 | import java.util.List;
38 | import java.util.Map;
39 |
40 | import static org.junit.jupiter.api.Assertions.*;
41 |
42 | @RunWith(JUnitPlatform.class)
43 | public class RecordConverterTest {
44 |
45 | public static String JSON_STRING_1;
46 | public static Schema OBJ_SCHEMA_1;
47 | public static Struct OBJ_STRUCT_1;
48 | public static Map OBJ_MAP_1;
49 | public static BsonDocument EXPECTED_BSON_DOC_OBJ_STRUCT_1;
50 | public static BsonDocument EXPECTED_BSON_DOC_OBJ_MAP_1;
51 | public static BsonDocument EXPECTED_BSON_DOC_RAW_1;
52 |
53 | @BeforeAll
54 | public static void initializeTestData() {
55 |
56 | JSON_STRING_1 =
57 | "{\"_id\":\"1234567890\"," +
58 | "\"myString\":\"some foo bla text\"," +
59 | "\"myInt\":42," +
60 | "\"myBoolean\":true," +
61 | "\"mySubDoc1\":{\"myString\":\"hello json\"}," +
62 | "\"myArray1\":[\"str_1\",\"str_2\",\"...\",\"str_N\"]," +
63 | "\"myArray2\":[{\"k\":\"a\",\"v\":1},{\"k\":\"b\",\"v\":2},{\"k\":\"c\",\"v\":3}]," +
64 | "\"myArray3\":[[[1],[],[2,3],[4,5,6]]]," +
65 | "\"mySubDoc2\":{\"k1\":9,\"k2\":8,\"k3\":7}," +
66 | "\"myMapOfStrings\":{\"k1\": [ \"v1-a\", \"v1-b\" ],\"k2\": [ \"v2-a\" ],\"k3\":[ \"v3-a\", \"v3-b\", \"v3-c\" ]}," +
67 | "\"myMapOfInts\":{\"k1\": [ 11, 12 ],\"k2\": [ 21 ],\"k3\":[ 31, 32, 33 ]}," +
68 | "\"myBytes\":\"S2Fma2Egcm9ja3Mh\"," +
69 | "\"myDate\": 1489705200000," +
70 | "\"myTimestamp\": 1489705200000," +
71 | "\"myTime\": 946724400000, " +
72 | "\"myDecimal\": 12345.6789 }";
73 |
74 | OBJ_SCHEMA_1 = SchemaBuilder.struct()
75 | .field("_id", Schema.STRING_SCHEMA)
76 | .field("myString", Schema.STRING_SCHEMA)
77 | .field("myInt",Schema.INT32_SCHEMA)
78 | .field("myBoolean", Schema.BOOLEAN_SCHEMA)
79 | .field("mySubDoc1", SchemaBuilder.struct()
80 | .field("myString",Schema.STRING_SCHEMA)
81 | .build()
82 | )
83 | .field("myArray1", SchemaBuilder.array(Schema.STRING_SCHEMA).build())
84 | .field("myArray2",SchemaBuilder.array(SchemaBuilder.struct()
85 | .field("k",Schema.STRING_SCHEMA)
86 | .field("v",Schema.INT32_SCHEMA)
87 | .build())
88 | )
89 | .field("myArray3", SchemaBuilder.array(SchemaBuilder.array(SchemaBuilder.array(Schema.INT32_SCHEMA))))
90 | .field("mySubDoc2", SchemaBuilder.map(Schema.STRING_SCHEMA, Schema.INT32_SCHEMA).build())
91 | .field( "myMapOfStrings", SchemaBuilder.map(Schema.STRING_SCHEMA, SchemaBuilder.array(Schema.STRING_SCHEMA).build()).build())
92 | .field( "myMapOfInts", SchemaBuilder.map(Schema.STRING_SCHEMA, SchemaBuilder.array(Schema.INT32_SCHEMA).build()).build())
93 | .field("myBytes", Schema.BYTES_SCHEMA)
94 | .field("myDate", Date.SCHEMA)
95 | .field("myTimestamp", Timestamp.SCHEMA)
96 | .field("myTime", Time.SCHEMA)
97 | .field("myDecimal", Decimal.schema(0))
98 | .build();
99 |
100 | OBJ_STRUCT_1 = new Struct(OBJ_SCHEMA_1)
101 | .put("_id","1234567890")
102 | .put("myString","some foo bla text")
103 | .put("myInt",42)
104 | .put("myBoolean",true)
105 | .put("mySubDoc1",new Struct(OBJ_SCHEMA_1.field("mySubDoc1").schema())
106 | .put("myString","hello json")
107 | )
108 | .put("myArray1",Arrays.asList("str_1","str_2","...","str_N"))
109 | .put("myArray2", Arrays.asList(
110 | new Struct(OBJ_SCHEMA_1.field("myArray2").schema().valueSchema())
111 | .put("k","a").put("v",1),
112 | new Struct(OBJ_SCHEMA_1.field("myArray2").schema().valueSchema())
113 | .put("k","b").put("v",2),
114 | new Struct(OBJ_SCHEMA_1.field("myArray2").schema().valueSchema())
115 | .put("k","c").put("v",3)
116 | )
117 | )
118 | .put("myArray3", Arrays.asList(
119 | Arrays.asList(Arrays.asList(1),Arrays.asList(),Arrays.asList(2,3),Arrays.asList(4,5,6))
120 | ))
121 | .put("mySubDoc2",new HashMap(){{ put("k1",9); put("k2",8); put("k3",7);}})
122 | .put("myMapOfStrings", new HashMap>(){{
123 | put("k1", Arrays.asList("v1-a", "v1-b"));
124 | put("k2", Arrays.asList("v2-a"));
125 | put("k3", Arrays.asList("v3-a", "v3-b", "v3-c"));
126 | }})
127 | .put("myMapOfInts", new HashMap>(){{
128 | put("k1", Arrays.asList(11, 12));
129 | put("k2", Arrays.asList(21));
130 | put("k3", Arrays.asList(31, 32, 33));
131 | }})
132 | .put("myBytes", new byte[]{75, 97, 102, 107, 97, 32, 114, 111, 99, 107, 115, 33})
133 | .put("myDate", java.util.Date.from(ZonedDateTime.of(
134 | LocalDate.of(2017,3,17), LocalTime.MIDNIGHT, ZoneOffset.systemDefault()
135 | ).toInstant())
136 | )
137 | .put("myTimestamp", java.util.Date.from(ZonedDateTime.of(
138 | LocalDate.of(2017,3,17), LocalTime.MIDNIGHT, ZoneOffset.systemDefault()
139 | ).toInstant())
140 | )
141 | .put("myTime", java.util.Date.from(ZonedDateTime.of(
142 | LocalDate.of(2000,1,1), LocalTime.NOON, ZoneOffset.systemDefault()
143 | ).toInstant())
144 | )
145 | .put("myDecimal", new BigDecimal("12345.6789"));
146 |
147 | OBJ_MAP_1 = new LinkedHashMap<>();
148 | OBJ_MAP_1.put("_id","1234567890");
149 | OBJ_MAP_1.put("myString","some foo bla text");
150 | OBJ_MAP_1.put("myInt",42);
151 | OBJ_MAP_1.put("myBoolean",true);
152 | OBJ_MAP_1.put("mySubDoc1",new HashMap(){{put("myString","hello json");}});
153 | OBJ_MAP_1.put("myArray1",Arrays.asList("str_1","str_2","...","str_N"));
154 | OBJ_MAP_1.put("myArray2", Arrays.asList(
155 | new HashMap(){{put("k","a");put("v",1);}},
156 | new HashMap(){{put("k","b");put("v",2);}},
157 | new HashMap(){{put("k","c");put("v",3);}}
158 | )
159 | );
160 | OBJ_MAP_1.put("myArray3",Arrays.asList(
161 | Arrays.asList(Arrays.asList(1), Arrays.asList(), Arrays.asList(2,3), Arrays.asList(4,5,6))
162 | )
163 | );
164 | OBJ_MAP_1.put("mySubDoc2",new HashMap(){{ put("k1",9); put("k2",8); put("k3",7);}});
165 | OBJ_MAP_1.put("myMapOfStrings",new HashMap>(){{
166 | put("k1",Arrays.asList("v1-a", "v1-b"));
167 | put("k2",Arrays.asList("v2-a"));
168 | put("k3",Arrays.asList("v3-a", "v3-b", "v3-c"));}});
169 | OBJ_MAP_1.put("myMapOfInts",new HashMap>(){{
170 | put("k1",Arrays.asList(11, 12));
171 | put("k2",Arrays.asList(21));
172 | put("k3",Arrays.asList(31, 32, 33));}});
173 | OBJ_MAP_1.put("myBytes", new byte[]{75, 97, 102, 107, 97, 32, 114, 111, 99, 107, 115, 33});
174 | OBJ_MAP_1.put("myDate", java.util.Date.from(ZonedDateTime.of(
175 | LocalDate.of(2017,3,17), LocalTime.MIDNIGHT, ZoneOffset.systemDefault()
176 | ).toInstant())
177 | );
178 | OBJ_MAP_1.put("myTimestamp", java.util.Date.from(ZonedDateTime.of(
179 | LocalDate.of(2017,3,17), LocalTime.MIDNIGHT, ZoneOffset.systemDefault()
180 | ).toInstant())
181 | );
182 | OBJ_MAP_1.put("myTime", java.util.Date.from(ZonedDateTime.of(
183 | LocalDate.of(2000,1,1), LocalTime.NOON, ZoneOffset.systemDefault()
184 | ).toInstant())
185 | );
186 | //NOTE: as of now the BSON codec package seems to be missing a BigDecimalCodec
187 | // thus I'm cheating a little by using a Decimal128 here...
188 | OBJ_MAP_1.put("myDecimal", Decimal128.parse("12345.6789"));
189 |
190 | BsonDocument commonMapAndStructFields = new BsonDocument()
191 | .append("_id", new BsonString("1234567890"))
192 | .append("myString", new BsonString("some foo bla text"))
193 | .append("myInt", new BsonInt32(42))
194 | .append("myBoolean", new BsonBoolean(true))
195 | .append("mySubDoc1", new BsonDocument("myString", new BsonString("hello json")))
196 | .append("myArray1", new BsonArray(Arrays.asList(
197 | new BsonString("str_1"),
198 | new BsonString("str_2"),
199 | new BsonString("..."),
200 | new BsonString("str_N")))
201 | )
202 | .append("myArray2", new BsonArray(Arrays.asList(
203 | new BsonDocument("k", new BsonString("a")).append("v", new BsonInt32(1)),
204 | new BsonDocument("k", new BsonString("b")).append("v", new BsonInt32(2)),
205 | new BsonDocument("k", new BsonString("c")).append("v", new BsonInt32(3))))
206 | )
207 | .append("myArray3", new BsonArray(Arrays.asList(
208 | new BsonArray(Arrays.asList(
209 | new BsonArray(Arrays.asList(new BsonInt32(1))),
210 | new BsonArray(),
211 | new BsonArray(Arrays.asList(new BsonInt32(2),new BsonInt32(3))),
212 | new BsonArray(Arrays.asList(new BsonInt32(4),new BsonInt32(5),new BsonInt32(6)))
213 | ))))
214 | )
215 | .append("mySubDoc2", new BsonDocument("k1", new BsonInt32(9))
216 | .append("k2", new BsonInt32(8))
217 | .append("k3", new BsonInt32(7))
218 | )
219 | .append("myMapOfStrings", new BsonDocument("k1", new BsonInt32(9))
220 | .append("k1", new BsonArray(Arrays.asList(
221 | new BsonString("v1-a"),
222 | new BsonString("v1-b"))))
223 | .append("k2", new BsonArray(Arrays.asList(
224 | new BsonString("v2-a"))))
225 | .append("k3", new BsonArray(Arrays.asList(
226 | new BsonString("v3-a"),
227 | new BsonString("v3-b"),
228 | new BsonString("v3-c")))))
229 | .append("myMapOfInts", new BsonDocument("k1", new BsonInt32(9))
230 | .append("k1", new BsonArray(Arrays.asList(
231 | new BsonInt32(11),
232 | new BsonInt32(12))))
233 | .append("k2", new BsonArray(Arrays.asList(
234 | new BsonInt32(21))))
235 | .append("k3", new BsonArray(Arrays.asList(
236 | new BsonInt32(31),
237 | new BsonInt32(32),
238 | new BsonInt32(33)))))
239 | .append("myBytes", new BsonBinary(new byte[]{75, 97, 102, 107, 97, 32, 114, 111, 99, 107, 115, 33}))
240 | .append("myDate", new BsonDateTime(
241 | java.util.Date.from(ZonedDateTime.of(
242 | LocalDate.of(2017,3,17), LocalTime.MIDNIGHT, ZoneOffset.systemDefault()
243 | ).toInstant()).getTime()
244 | ))
245 | .append("myTimestamp", new BsonDateTime(
246 | java.util.Date.from(ZonedDateTime.of(
247 | LocalDate.of(2017,3,17), LocalTime.MIDNIGHT, ZoneOffset.systemDefault()
248 | ).toInstant()).getTime()
249 | ))
250 | .append("myTime", new BsonDateTime(
251 | java.util.Date.from(ZonedDateTime.of(
252 | LocalDate.of(2000,1,1), LocalTime.NOON, ZoneOffset.systemDefault()
253 | ).toInstant()).getTime()
254 | ))
255 | .append("myDecimal", new BsonDecimal128(new Decimal128(new BigDecimal("12345.6789"))));
256 |
257 | EXPECTED_BSON_DOC_OBJ_STRUCT_1 = commonMapAndStructFields.clone();
258 | EXPECTED_BSON_DOC_OBJ_MAP_1 = commonMapAndStructFields.clone();
259 |
260 | EXPECTED_BSON_DOC_RAW_1 = commonMapAndStructFields.clone();
261 | EXPECTED_BSON_DOC_RAW_1.replace("myBytes",new BsonString("S2Fma2Egcm9ja3Mh"));
262 | EXPECTED_BSON_DOC_RAW_1.replace("myDate",new BsonInt64(1489705200000L));
263 | EXPECTED_BSON_DOC_RAW_1.replace("myTimestamp",new BsonInt64(1489705200000L));
264 | EXPECTED_BSON_DOC_RAW_1.replace("myTime",new BsonInt64(946724400000L));
265 | EXPECTED_BSON_DOC_RAW_1.replace("myDecimal", new BsonDouble(12345.6789));
266 |
267 | }
268 |
269 | @Test
270 | @DisplayName("test raw json conversion")
271 | public void testJsonRawStringConversion() {
272 | RecordConverter converter = new JsonRawStringRecordConverter();
273 | assertAll("",
274 | () -> assertEquals(EXPECTED_BSON_DOC_RAW_1, converter.convert(null, JSON_STRING_1)),
275 | () -> assertThrows(DataException.class, () -> converter.convert(null,null))
276 | );
277 | }
278 |
279 | @Test
280 | @DisplayName("test avro or (json + schema) conversion (which is handled the same)")
281 | public void testAvroOrJsonWithSchemaConversion() {
282 | RecordConverter converter = new AvroJsonSchemafulRecordConverter();
283 | assertAll("",
284 | () -> assertEquals(EXPECTED_BSON_DOC_OBJ_STRUCT_1, converter.convert(OBJ_SCHEMA_1, OBJ_STRUCT_1)),
285 | () -> assertThrows(DataException.class, () -> converter.convert(OBJ_SCHEMA_1,null)),
286 | () -> assertThrows(DataException.class, () -> converter.convert(null, OBJ_STRUCT_1)),
287 | () -> assertThrows(DataException.class, () -> converter.convert(null,null))
288 | );
289 | }
290 | /*
291 | @Test
292 | @DisplayName("test json object conversion")
293 | public void testJsonObjectConversion() {
294 | RecordConverter converter = new JsonSchemalessRecordConverter();
295 | assertAll("",
296 | () -> assertEquals(EXPECTED_BSON_DOC_OBJ_MAP_1, converter.convert(null, OBJ_MAP_1)),
297 | () -> assertThrows(DataException.class, () -> converter.convert(null,null))
298 | );
299 | }
300 | */
301 | }
302 |
--------------------------------------------------------------------------------
/src/test/java/com/github/cedelsb/kafka/connect/smt/converter/SinkConverterTest.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2017. Hans-Peter Grahsl (grahslhp@gmail.com)
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.github.cedelsb.kafka.connect.smt.converter;
18 |
19 | import org.apache.kafka.connect.data.Schema;
20 | import org.apache.kafka.connect.data.SchemaBuilder;
21 | import org.apache.kafka.connect.data.Struct;
22 | import org.apache.kafka.connect.errors.DataException;
23 | import org.apache.kafka.connect.sink.SinkRecord;
24 | import org.bson.BsonDocument;
25 | import org.bson.BsonString;
26 | import org.junit.jupiter.api.*;
27 | import org.junit.platform.runner.JUnitPlatform;
28 | import org.junit.runner.RunWith;
29 |
30 | import java.util.*;
31 |
32 | import static org.junit.jupiter.api.Assertions.*;
33 | import static org.junit.jupiter.api.DynamicTest.dynamicTest;
34 |
35 | @RunWith(JUnitPlatform.class)
36 | public class SinkConverterTest {
37 |
38 | public static String JSON_STRING_1;
39 | public static Schema OBJ_SCHEMA_1;
40 | public static Struct OBJ_STRUCT_1;
41 | public static Map OBJ_MAP_1;
42 | public static BsonDocument EXPECTED_BSON_DOC;
43 |
44 | private static Map combinations;
45 | private SinkConverter sinkConverter = new SinkConverter();
46 |
47 |
48 | @BeforeAll
49 | public static void initializeTestData() {
50 |
51 | JSON_STRING_1 = "{\"myField\":\"some text\"}";
52 |
53 | OBJ_SCHEMA_1 = SchemaBuilder.struct()
54 | .field("myField", Schema.STRING_SCHEMA);
55 |
56 | OBJ_STRUCT_1 = new Struct(OBJ_SCHEMA_1)
57 | .put("myField", "some text");
58 |
59 | OBJ_MAP_1 = new LinkedHashMap<>();
60 | OBJ_MAP_1.put("myField", "some text");
61 |
62 | EXPECTED_BSON_DOC = new BsonDocument("myField", new BsonString("some text"));
63 |
64 | combinations = new HashMap<>();
65 | combinations.put(JSON_STRING_1, null);
66 | combinations.put(OBJ_STRUCT_1, OBJ_SCHEMA_1);
67 | combinations.put(OBJ_MAP_1, null);
68 | }
69 |
70 | @TestFactory
71 | @DisplayName("test different combinations for sink record conversions")
72 | public List testDifferentOptionsForSinkRecordConversion() {
73 |
74 | List tests = new ArrayList<>();
75 |
76 | for (Map.Entry entry : combinations.entrySet()) {
77 |
78 | tests.add(dynamicTest("key only SinkRecord conversion for type " + entry.getKey().getClass().getName()
79 | + " with data -> " + entry.getKey(), () -> {
80 | SinkDocument converted = sinkConverter.convert(
81 | new SinkRecord(
82 | "topic", 1, entry.getValue(), entry.getKey(), null, null, 0L
83 | )
84 | );
85 | assertAll("checks on conversion results",
86 | () -> assertNotNull(converted),
87 | () -> assertEquals(EXPECTED_BSON_DOC, converted.getKeyDoc().get()),
88 | () -> assertEquals(Optional.empty(), converted.getValueDoc())
89 | );
90 | }));
91 |
92 | tests.add(dynamicTest("value only SinkRecord conversion for type " + entry.getKey().getClass().getName()
93 | + " with data -> " + entry.getKey(), () -> {
94 | SinkDocument converted = sinkConverter.convert(
95 | new SinkRecord(
96 | "topic", 1, null, null, entry.getValue(), entry.getKey(), 0L
97 | )
98 | );
99 | assertAll("checks on conversion results",
100 | () -> assertNotNull(converted),
101 | () -> assertEquals(Optional.empty(), converted.getKeyDoc()),
102 | () -> assertEquals(EXPECTED_BSON_DOC, converted.getValueDoc().get())
103 | );
104 | }));
105 |
106 | tests.add(dynamicTest("key + value SinkRecord conversion for type " + entry.getKey().getClass().getName()
107 | + " with data -> " + entry.getKey(), () -> {
108 | SinkDocument converted = sinkConverter.convert(
109 | new SinkRecord(
110 | "topic", 1, entry.getValue(), entry.getKey(), entry.getValue(), entry.getKey(), 0L
111 | )
112 | );
113 | assertAll("checks on conversion results",
114 | () -> assertNotNull(converted),
115 | () -> assertEquals(EXPECTED_BSON_DOC, converted.getKeyDoc().get()),
116 | () -> assertEquals(EXPECTED_BSON_DOC, converted.getValueDoc().get())
117 | );
118 | }));
119 |
120 | }
121 |
122 | return tests;
123 |
124 | }
125 |
126 | @Test
127 | @DisplayName("test empty sink record conversion")
128 | public void testEmptySinkRecordConversion() {
129 |
130 | SinkDocument converted = sinkConverter.convert(
131 | new SinkRecord(
132 | "topic", 1, null, null, null, null, 0L
133 | )
134 | );
135 |
136 | assertAll("checks on conversion result",
137 | () -> assertNotNull(converted),
138 | () -> assertEquals(Optional.empty(), converted.getKeyDoc()),
139 | () -> assertEquals(Optional.empty(), converted.getValueDoc())
140 | );
141 |
142 | }
143 |
144 | @Test
145 | @DisplayName("test invalid sink record conversion")
146 | public void testInvalidSinkRecordConversion() {
147 |
148 | assertAll("checks on conversion result",
149 | () -> assertThrows(DataException.class, () -> sinkConverter.convert(
150 | new SinkRecord(
151 | "topic", 1, null, new Object(), null, null, 0L
152 | )
153 | )),
154 | () -> assertThrows(DataException.class, () -> sinkConverter.convert(
155 | new SinkRecord(
156 | "topic", 1, null, null, null, new Object(), 0L
157 | )
158 | )),
159 | () -> assertThrows(DataException.class, () -> sinkConverter.convert(
160 | new SinkRecord(
161 | "topic", 1, null, new Object(), null, new Object(), 0L
162 | )
163 | ))
164 | );
165 |
166 | }
167 |
168 | }
169 |
--------------------------------------------------------------------------------
/src/test/java/com/github/cedelsb/kafka/connect/smt/converter/SinkDocumentTest.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2017. Hans-Peter Grahsl (grahslhp@gmail.com)
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.github.cedelsb.kafka.connect.smt.converter;
18 |
19 | import org.bson.*;
20 | import org.bson.types.ObjectId;
21 | import org.junit.jupiter.api.BeforeAll;
22 | import org.junit.jupiter.api.DisplayName;
23 | import org.junit.jupiter.api.Test;
24 | import org.junit.platform.runner.JUnitPlatform;
25 | import org.junit.runner.RunWith;
26 |
27 | import java.util.Arrays;
28 |
29 | import static org.junit.jupiter.api.Assertions.*;
30 |
31 | @RunWith(JUnitPlatform.class)
32 | public class SinkDocumentTest {
33 |
34 | private static BsonDocument flatStructKey;
35 | private static BsonDocument flatStructValue;
36 |
37 | private static BsonDocument nestedStructKey;
38 | private static BsonDocument nestedStructValue;
39 |
40 | @BeforeAll
41 | public static void initBsonDocs() {
42 |
43 | flatStructKey = new BsonDocument();
44 | flatStructKey.put("_id", new BsonObjectId(ObjectId.get()));
45 | flatStructKey.put("myBoolean",new BsonBoolean(true));
46 | flatStructKey.put("myInt",new BsonInt32(42));
47 | flatStructKey.put("myBytes",new BsonBinary(new byte[] {65,66,67}));
48 | BsonArray ba1 = new BsonArray();
49 | ba1.addAll(Arrays.asList(new BsonInt32(1),new BsonInt32(2),new BsonInt32(3)));
50 | flatStructKey.put("myArray", ba1);
51 |
52 | flatStructValue = new BsonDocument();
53 | flatStructValue.put("myLong",new BsonInt64(42L));
54 | flatStructValue.put("myDouble",new BsonDouble(23.23d));
55 | flatStructValue.put("myString",new BsonString("BSON"));
56 | flatStructValue.put("myBytes",new BsonBinary(new byte[] {120,121,122}));
57 | BsonArray ba2 = new BsonArray();
58 | ba2.addAll(Arrays.asList(new BsonInt32(9),new BsonInt32(8),new BsonInt32(7)));
59 | flatStructValue.put("myArray", ba2);
60 |
61 | nestedStructKey = new BsonDocument();
62 | nestedStructKey.put("_id", new BsonDocument("myString", new BsonString("doc")));
63 | nestedStructKey.put("mySubDoc", new BsonDocument("mySubSubDoc",
64 | new BsonDocument("myInt",new BsonInt32(23))));
65 |
66 | nestedStructValue = new BsonDocument();
67 | nestedStructValue.put("mySubDocA", new BsonDocument("myBoolean", new BsonBoolean(false)));
68 | nestedStructValue.put("mySubDocB", new BsonDocument("mySubSubDocC",
69 | new BsonDocument("myString",new BsonString("some text..."))));
70 |
71 | }
72 |
73 | @Test
74 | @DisplayName("test SinkDocument clone with missing key / value")
75 | public void testCloneNoKeyValue() {
76 |
77 | SinkDocument orig = new SinkDocument(null,null);
78 |
79 | assertAll("orig key/value docs NOT present",
80 | () -> assertFalse(orig.getKeyDoc().isPresent()),
81 | () -> assertFalse(orig.getValueDoc().isPresent())
82 | );
83 |
84 | SinkDocument clone = orig.clone();
85 |
86 | assertAll("clone key/value docs NOT present",
87 | () -> assertFalse(clone.getKeyDoc().isPresent()),
88 | () -> assertFalse(clone.getValueDoc().isPresent())
89 | );
90 |
91 | }
92 |
93 | @Test
94 | @DisplayName("test SinkDocument clone of flat key / value")
95 | public void testCloneFlatKeyValue() {
96 |
97 | SinkDocument orig = new SinkDocument(flatStructKey, flatStructValue);
98 |
99 | checkClonedAsserations(orig);
100 |
101 | }
102 |
103 | @Test
104 | @DisplayName("test SinkDocument clone of nested key / value")
105 | public void testCloneNestedKeyValue() {
106 |
107 | SinkDocument orig = new SinkDocument(nestedStructKey, nestedStructValue);
108 |
109 | checkClonedAsserations(orig);
110 |
111 | }
112 |
113 | private void checkClonedAsserations(SinkDocument orig) {
114 |
115 | assertAll("orig key/value docs present",
116 | () -> assertTrue(orig.getKeyDoc().isPresent()),
117 | () -> assertTrue(orig.getValueDoc().isPresent())
118 | );
119 |
120 | SinkDocument clone = orig.clone();
121 |
122 | assertAll("clone key/value docs present",
123 | () -> assertTrue(clone.getKeyDoc().isPresent()),
124 | () -> assertTrue(clone.getValueDoc().isPresent())
125 | );
126 |
127 | assertAll("check equality of key/value BSON document structure of clone vs. orig",
128 | () -> assertTrue(clone.getKeyDoc().get().equals(orig.getKeyDoc().get())),
129 | () -> assertTrue(clone.getValueDoc().get().equals(orig.getValueDoc().get()))
130 | );
131 | }
132 |
133 | }
134 |
--------------------------------------------------------------------------------
/src/test/java/com/github/cedelsb/kafka/connect/smt/converter/SinkFieldConverterTest.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright (c) 2017. Hans-Peter Grahsl (grahslhp@gmail.com)
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package com.github.cedelsb.kafka.connect.smt.converter;
18 |
19 | import com.github.cedelsb.kafka.connect.smt.converter.types.sink.bson.*;
20 | import com.github.cedelsb.kafka.connect.smt.converter.types.sink.bson.logical.DateFieldConverter;
21 | import com.github.cedelsb.kafka.connect.smt.converter.types.sink.bson.logical.DecimalFieldConverter;
22 | import com.github.cedelsb.kafka.connect.smt.converter.types.sink.bson.logical.TimeFieldConverter;
23 | import com.github.cedelsb.kafka.connect.smt.converter.types.sink.bson.logical.TimestampFieldConverter;
24 | import org.apache.kafka.connect.data.*;
25 | import org.apache.kafka.connect.errors.DataException;
26 | import org.bson.*;
27 | import org.junit.jupiter.api.DisplayName;
28 | import org.junit.jupiter.api.DynamicTest;
29 | import org.junit.jupiter.api.Test;
30 | import org.junit.jupiter.api.TestFactory;
31 | import org.junit.platform.runner.JUnitPlatform;
32 | import org.junit.runner.RunWith;
33 |
34 | import java.math.BigDecimal;
35 | import java.nio.ByteBuffer;
36 | import java.time.LocalDate;
37 | import java.time.LocalTime;
38 | import java.time.ZoneOffset;
39 | import java.time.ZonedDateTime;
40 | import java.util.ArrayList;
41 | import java.util.Arrays;
42 | import java.util.List;
43 |
44 | import static org.junit.jupiter.api.Assertions.*;
45 | import static org.junit.jupiter.api.DynamicTest.dynamicTest;
46 |
47 | @RunWith(JUnitPlatform.class)
48 | public class SinkFieldConverterTest {
49 |
50 | @TestFactory
51 | @DisplayName("tests for boolean field conversions")
52 | public List testBooleanFieldConverter() {
53 |
54 | SinkFieldConverter converter = new BooleanFieldConverter();
55 |
56 | List tests = new ArrayList<>();
57 | new ArrayList<>(Arrays.asList(true,false)).forEach(el -> {
58 | tests.add(dynamicTest("conversion with "
59 | + converter.getClass().getSimpleName() + " for "+el,
60 | () -> assertEquals(el, ((BsonBoolean)converter.toBson(el)).getValue())
61 | ));
62 | });
63 |
64 | tests.add(dynamicTest("optional type conversion checks", () -> {
65 | Schema valueOptionalDefault = SchemaBuilder.bool().optional().defaultValue(true);
66 | assertAll("",
67 | () -> assertThrows(DataException.class, () -> converter.toBson(null, Schema.BOOLEAN_SCHEMA)),
68 | () -> assertEquals(new BsonNull(), converter.toBson(null, Schema.OPTIONAL_BOOLEAN_SCHEMA)),
69 | () -> assertEquals(valueOptionalDefault.defaultValue(),
70 | converter.toBson(null, valueOptionalDefault).asBoolean().getValue())
71 | );
72 | }));
73 |
74 | return tests;
75 |
76 | }
77 |
78 | @TestFactory
79 | @DisplayName("tests for int8 field conversions")
80 | public List testInt8FieldConverter() {
81 |
82 | SinkFieldConverter converter = new Int8FieldConverter();
83 |
84 | List tests = new ArrayList<>();
85 | new ArrayList<>(Arrays.asList(Byte.MIN_VALUE,(byte)0,Byte.MAX_VALUE)).forEach(
86 | el -> tests.add(dynamicTest("conversion with "
87 | + converter.getClass().getSimpleName() + " for "+el,
88 | () -> assertEquals((int)el, ((BsonInt32)converter.toBson(el)).getValue())
89 | ))
90 | );
91 |
92 | tests.add(dynamicTest("optional type conversions", () -> {
93 | Schema valueOptionalDefault = SchemaBuilder.int8().optional().defaultValue((byte)0);
94 | assertAll("checks",
95 | () -> assertThrows(DataException.class, () -> converter.toBson(null, Schema.INT8_SCHEMA)),
96 | () -> assertEquals(new BsonNull(), converter.toBson(null, Schema.OPTIONAL_INT8_SCHEMA)),
97 | () -> assertEquals(((Byte)valueOptionalDefault.defaultValue()).intValue(),
98 | ((BsonInt32)converter.toBson(null, valueOptionalDefault)).getValue())
99 | );
100 | }));
101 |
102 | return tests;
103 |
104 | }
105 |
106 | @TestFactory
107 | @DisplayName("tests for int16 field conversions")
108 | public List testInt16FieldConverter() {
109 |
110 | SinkFieldConverter converter = new Int16FieldConverter();
111 |
112 | List tests = new ArrayList<>();
113 | new ArrayList<>(Arrays.asList(Short.MIN_VALUE,(short)0,Short.MAX_VALUE)).forEach(
114 | el -> tests.add(dynamicTest("conversion with "
115 | + converter.getClass().getSimpleName() + " for "+el,
116 | () -> assertEquals((short)el, ((BsonInt32)converter.toBson(el)).getValue())
117 | ))
118 | );
119 |
120 | tests.add(dynamicTest("optional type conversions", () -> {
121 | Schema valueOptionalDefault = SchemaBuilder.int16().optional().defaultValue((short)0);
122 | assertAll("checks",
123 | () -> assertThrows(DataException.class, () -> converter.toBson(null, Schema.INT16_SCHEMA)),
124 | () -> assertEquals(new BsonNull(), converter.toBson(null, Schema.OPTIONAL_INT16_SCHEMA)),
125 | () -> assertEquals(((short)valueOptionalDefault.defaultValue()),
126 | ((BsonInt32)converter.toBson(null, valueOptionalDefault)).getValue())
127 | );
128 | }));
129 |
130 | return tests;
131 |
132 | }
133 |
134 | @TestFactory
135 | @DisplayName("tests for int32 field conversions")
136 | public List testInt32FieldConverter() {
137 |
138 | SinkFieldConverter converter = new Int32FieldConverter();
139 |
140 | List tests = new ArrayList<>();
141 | new ArrayList<>(Arrays.asList(Integer.MIN_VALUE,0,Integer.MAX_VALUE)).forEach(
142 | el -> tests.add(dynamicTest("conversion with "
143 | + converter.getClass().getSimpleName() + " for "+el,
144 | () -> assertEquals((int)el, ((BsonInt32)converter.toBson(el)).getValue())
145 | ))
146 | );
147 |
148 | tests.add(dynamicTest("optional type conversions", () -> {
149 | Schema valueOptionalDefault = SchemaBuilder.int32().optional().defaultValue(0);
150 | assertAll("checks",
151 | () -> assertThrows(DataException.class, () -> converter.toBson(null, Schema.INT32_SCHEMA)),
152 | () -> assertEquals(new BsonNull(), converter.toBson(null, Schema.OPTIONAL_INT32_SCHEMA)),
153 | () -> assertEquals(valueOptionalDefault.defaultValue(),
154 | ((BsonInt32)converter.toBson(null, valueOptionalDefault)).getValue())
155 | );
156 | }));
157 |
158 | return tests;
159 |
160 | }
161 |
162 | @TestFactory
163 | @DisplayName("tests for int64 field conversions")
164 | public List testInt64FieldConverter() {
165 |
166 | SinkFieldConverter converter = new Int64FieldConverter();
167 |
168 | List tests = new ArrayList<>();
169 | new ArrayList<>(Arrays.asList(Long.MIN_VALUE,0L,Long.MAX_VALUE)).forEach(
170 | el -> tests.add(dynamicTest("conversion with "
171 | + converter.getClass().getSimpleName() + " for "+el,
172 | () -> assertEquals((long)el, ((BsonInt64)converter.toBson(el)).getValue())
173 | ))
174 | );
175 |
176 | tests.add(dynamicTest("optional type conversions", () -> {
177 | Schema valueOptionalDefault = SchemaBuilder.int64().optional().defaultValue(0L);
178 | assertAll("checks",
179 | () -> assertThrows(DataException.class, () -> converter.toBson(null, Schema.INT64_SCHEMA)),
180 | () -> assertEquals(new BsonNull(), converter.toBson(null, Schema.OPTIONAL_INT64_SCHEMA)),
181 | () -> assertEquals((long)valueOptionalDefault.defaultValue(),
182 | ((BsonInt64)converter.toBson(null, valueOptionalDefault)).getValue())
183 | );
184 | }));
185 |
186 | return tests;
187 |
188 | }
189 |
190 | @TestFactory
191 | @DisplayName("tests for float32 field conversions")
192 | public List testFloat32FieldConverter() {
193 |
194 | SinkFieldConverter converter = new Float32FieldConverter();
195 |
196 | List tests = new ArrayList<>();
197 | new ArrayList<>(Arrays.asList(Float.MIN_VALUE,0f,Float.MAX_VALUE)).forEach(
198 | el -> tests.add(dynamicTest("conversion with "
199 | + converter.getClass().getSimpleName() + " for "+el,
200 | () -> assertEquals((float)el, ((BsonDouble)converter.toBson(el)).getValue())
201 | ))
202 | );
203 |
204 | tests.add(dynamicTest("optional type conversions", () -> {
205 | Schema valueOptionalDefault = SchemaBuilder.float32().optional().defaultValue(0.0f);
206 | assertAll("checks",
207 | () -> assertThrows(DataException.class, () -> converter.toBson(null, Schema.FLOAT32_SCHEMA)),
208 | () -> assertEquals(new BsonNull(), converter.toBson(null, Schema.OPTIONAL_FLOAT32_SCHEMA)),
209 | () -> assertEquals(((Float)valueOptionalDefault.defaultValue()).doubleValue(),
210 | ((BsonDouble)converter.toBson(null, valueOptionalDefault)).getValue())
211 | );
212 | }));
213 |
214 | return tests;
215 |
216 | }
217 |
218 | @TestFactory
219 | @DisplayName("tests for float64 field conversions")
220 | public List testFloat64FieldConverter() {
221 |
222 | SinkFieldConverter converter = new Float64FieldConverter();
223 |
224 | List tests = new ArrayList<>();
225 | new ArrayList<>(Arrays.asList(Double.MIN_VALUE,0d,Double.MAX_VALUE)).forEach(
226 | el -> tests.add(dynamicTest("conversion with "
227 | + converter.getClass().getSimpleName() + " for "+el,
228 | () -> assertEquals((double)el, ((BsonDouble)converter.toBson(el)).getValue())
229 | ))
230 | );
231 |
232 | tests.add(dynamicTest("optional type conversions", () -> {
233 | Schema valueOptionalDefault = SchemaBuilder.float64().optional().defaultValue(0.0d);
234 | assertAll("checks",
235 | () -> assertThrows(DataException.class, () -> converter.toBson(null, Schema.FLOAT64_SCHEMA)),
236 | () -> assertEquals(new BsonNull(), converter.toBson(null, Schema.OPTIONAL_FLOAT64_SCHEMA)),
237 | () -> assertEquals(valueOptionalDefault.defaultValue(),
238 | ((BsonDouble)converter.toBson(null, valueOptionalDefault)).getValue())
239 | );
240 | }));
241 |
242 | return tests;
243 |
244 | }
245 |
246 | @TestFactory
247 | @DisplayName("tests for string field conversions")
248 | public List testStringFieldConverter() {
249 |
250 | SinkFieldConverter converter = new StringFieldConverter();
251 |
252 | List tests = new ArrayList<>();
253 | new ArrayList<>(Arrays.asList("fooFOO","","blahBLAH")).forEach(
254 | el -> tests.add(dynamicTest("conversion with "
255 | + converter.getClass().getSimpleName() + " for "+el,
256 | () -> assertEquals(el, ((BsonString)converter.toBson(el)).getValue())
257 | ))
258 | );
259 |
260 | tests.add(dynamicTest("optional type conversions", () -> {
261 | Schema valueOptionalDefault = SchemaBuilder.string().optional().defaultValue("");
262 | assertAll("checks",
263 | () -> assertThrows(DataException.class, () -> converter.toBson(null, Schema.STRING_SCHEMA)),
264 | () -> assertEquals(new BsonNull(), converter.toBson(null, Schema.OPTIONAL_STRING_SCHEMA)),
265 | () -> assertEquals(valueOptionalDefault.defaultValue(),
266 | ((BsonString)converter.toBson(null, valueOptionalDefault)).getValue())
267 | );
268 | }));
269 |
270 | return tests;
271 |
272 | }
273 |
274 | @TestFactory
275 | @DisplayName("tests for bytes field conversions based on byte[]")
276 | public List testBytesFieldConverterByteArray() {
277 |
278 | SinkFieldConverter converter = new BytesFieldConverter();
279 |
280 | List tests = new ArrayList<>();
281 | new ArrayList<>(Arrays.asList(new byte[]{-128,-127,0},new byte[]{},new byte[]{0,126,127})).forEach(
282 | el -> tests.add(dynamicTest("conversion with "
283 | + converter.getClass().getSimpleName() + " for "+Arrays.toString(el),
284 | () -> assertEquals(el, ((BsonBinary)converter.toBson(el)).getData())
285 | ))
286 | );
287 |
288 | tests.add(dynamicTest("optional type conversions", () -> {
289 | Schema valueOptionalDefault = SchemaBuilder.bytes().optional().defaultValue(new byte[]{});
290 | assertAll("checks",
291 | () -> assertThrows(DataException.class, () -> converter.toBson(null, Schema.BYTES_SCHEMA)),
292 | () -> assertEquals(new BsonNull(), converter.toBson(null, Schema.OPTIONAL_BYTES_SCHEMA)),
293 | () -> assertEquals(valueOptionalDefault.defaultValue(),
294 | ((BsonBinary)converter.toBson(null, valueOptionalDefault)).getData())
295 | );
296 | }));
297 |
298 | return tests;
299 |
300 | }
301 |
302 | @TestFactory
303 | @DisplayName("tests for bytes field conversions based on ByteBuffer")
304 | public List testBytesFieldConverterByteBuffer() {
305 |
306 | SinkFieldConverter converter = new BytesFieldConverter();
307 |
308 | List tests = new ArrayList<>();
309 | new ArrayList<>(Arrays.asList(ByteBuffer.wrap(new byte[]{-128,-127,0}),
310 | ByteBuffer.wrap(new byte[]{}),
311 | ByteBuffer.wrap(new byte[]{0,126,127}))).forEach(
312 | el -> tests.add(dynamicTest("conversion with "
313 | + converter.getClass().getSimpleName() + " for "+el.toString()
314 | +" -> "+Arrays.toString(el.array()),
315 | () -> assertEquals(el.array(), ((BsonBinary)converter.toBson(el)).getData())
316 | ))
317 | );
318 |
319 | tests.add(dynamicTest("optional type conversions", () -> {
320 | Schema valueOptionalDefault = SchemaBuilder.bytes().optional().defaultValue(ByteBuffer.wrap(new byte[]{}));
321 | assertAll("checks",
322 | () -> assertThrows(DataException.class, () -> converter.toBson(null, Schema.BYTES_SCHEMA)),
323 | () -> assertEquals(new BsonNull(), converter.toBson(null, Schema.OPTIONAL_BYTES_SCHEMA)),
324 | () -> assertEquals(((ByteBuffer)valueOptionalDefault.defaultValue()).array(),
325 | ((BsonBinary)converter.toBson(null, valueOptionalDefault)).getData())
326 | );
327 | }));
328 |
329 | return tests;
330 |
331 | }
332 |
333 | @Test
334 | @DisplayName("tests for bytes field conversions with invalid type")
335 | public void testBytesFieldConverterInvalidType() {
336 | assertThrows(DataException.class, () -> new BytesFieldConverter().toBson(new Object()));
337 | }
338 |
339 | @TestFactory
340 | @DisplayName("tests for logical type date field conversions")
341 | public List testDateFieldConverter() {
342 |
343 | SinkFieldConverter converter = new DateFieldConverter();
344 |
345 | List tests = new ArrayList<>();
346 | new ArrayList<>(Arrays.asList(
347 | java.util.Date.from(ZonedDateTime.of(LocalDate.of(1970,1,1), LocalTime.MIDNIGHT, ZoneOffset.systemDefault()).toInstant()),
348 | java.util.Date.from(ZonedDateTime.of(LocalDate.of(1983,7,31), LocalTime.MIDNIGHT, ZoneOffset.systemDefault()).toInstant()),
349 | java.util.Date.from(ZonedDateTime.of(LocalDate.now(), LocalTime.MIDNIGHT, ZoneOffset.systemDefault()).toInstant())
350 | )).forEach(
351 | el -> tests.add(dynamicTest("conversion with "
352 | + converter.getClass().getSimpleName() + " for "+el,
353 | () -> assertEquals(el.toInstant().getEpochSecond()*1000,
354 | ((BsonDateTime)converter.toBson(el)).getValue())
355 | ))
356 | );
357 |
358 | tests.add(dynamicTest("optional type conversions", () -> {
359 | Schema valueOptionalDefault = Date.builder().optional().defaultValue(
360 | java.util.Date.from(ZonedDateTime.of(LocalDate.of(1970,1,1), LocalTime.MIDNIGHT, ZoneOffset.systemDefault()).toInstant())
361 | );
362 | assertAll("checks",
363 | () -> assertThrows(DataException.class, () -> converter.toBson(null, Date.SCHEMA)),
364 | () -> assertEquals(new BsonNull(), converter.toBson(null, Date.builder().optional())),
365 | () -> assertEquals(((java.util.Date)valueOptionalDefault.defaultValue()).toInstant().getEpochSecond()*1000,
366 | ((BsonDateTime)converter.toBson(null, valueOptionalDefault)).getValue())
367 | );
368 | }));
369 |
370 | return tests;
371 |
372 | }
373 |
374 | @TestFactory
375 | @DisplayName("tests for logical type time field conversions")
376 | public List testTimeFieldConverter() {
377 |
378 | SinkFieldConverter converter = new TimeFieldConverter();
379 |
380 | List tests = new ArrayList<>();
381 | new ArrayList<>(Arrays.asList(
382 | java.util.Date.from(ZonedDateTime.of(LocalDate.of(1970,1,1), LocalTime.MIDNIGHT, ZoneOffset.systemDefault()).toInstant()),
383 | java.util.Date.from(ZonedDateTime.of(LocalDate.of(1970,1,1), LocalTime.NOON, ZoneOffset.systemDefault()).toInstant())
384 | )).forEach(
385 | el -> tests.add(dynamicTest("conversion with "
386 | + converter.getClass().getSimpleName() + " for "+el,
387 | () -> assertEquals(el.toInstant().getEpochSecond()*1000,
388 | ((BsonDateTime)converter.toBson(el)).getValue())
389 | ))
390 | );
391 |
392 | tests.add(dynamicTest("optional type conversions", () -> {
393 | Schema valueOptionalDefault = Time.builder().optional().defaultValue(
394 | java.util.Date.from(ZonedDateTime.of(LocalDate.of(1970,1,1), LocalTime.MIDNIGHT, ZoneOffset.systemDefault()).toInstant())
395 | );
396 | assertAll("checks",
397 | () -> assertThrows(DataException.class, () -> converter.toBson(null, Time.SCHEMA)),
398 | () -> assertEquals(new BsonNull(), converter.toBson(null, Time.builder().optional())),
399 | () -> assertEquals(((java.util.Date)valueOptionalDefault.defaultValue()).toInstant().getEpochSecond()*1000,
400 | ((BsonDateTime)converter.toBson(null, valueOptionalDefault)).getValue())
401 | );
402 | }));
403 |
404 | return tests;
405 |
406 | }
407 |
408 | @TestFactory
409 | @DisplayName("tests for logical type timestamp field conversions")
410 | public List testTimestampFieldConverter() {
411 |
412 | SinkFieldConverter converter = new TimestampFieldConverter();
413 |
414 | List tests = new ArrayList<>();
415 | new ArrayList<>(Arrays.asList(
416 | java.util.Date.from(ZonedDateTime.of(LocalDate.of(1970,1,1), LocalTime.MIDNIGHT, ZoneOffset.systemDefault()).toInstant()),
417 | java.util.Date.from(ZonedDateTime.of(LocalDate.of(1983,7,31), LocalTime.MIDNIGHT, ZoneOffset.systemDefault()).toInstant()),
418 | java.util.Date.from(ZonedDateTime.of(LocalDate.now(), LocalTime.MIDNIGHT, ZoneOffset.systemDefault()).toInstant())
419 | )).forEach(
420 | el -> tests.add(dynamicTest("conversion with "
421 | + converter.getClass().getSimpleName() + " for "+el,
422 | () -> assertEquals(el.toInstant().getEpochSecond()*1000,
423 | ((BsonDateTime)converter.toBson(el)).getValue())
424 | ))
425 | );
426 |
427 | tests.add(dynamicTest("optional type conversions", () -> {
428 | Schema valueOptionalDefault = Timestamp.builder().optional().defaultValue(
429 | java.util.Date.from(ZonedDateTime.of(LocalDate.of(1970,1,1), LocalTime.MIDNIGHT, ZoneOffset.systemDefault()).toInstant())
430 | );
431 | assertAll("checks",
432 | () -> assertThrows(DataException.class, () -> converter.toBson(null, Timestamp.SCHEMA)),
433 | () -> assertEquals(new BsonNull(), converter.toBson(null, Timestamp.builder().optional())),
434 | () -> assertEquals(((java.util.Date)valueOptionalDefault.defaultValue()).toInstant().getEpochSecond()*1000,
435 | ((BsonDateTime)converter.toBson(null, valueOptionalDefault)).getValue())
436 | );
437 | }));
438 |
439 | return tests;
440 |
441 | }
442 |
443 | @TestFactory
444 | @DisplayName("tests for logical type decimal field conversions (new)")
445 | public List testDecimalFieldConverterNew() {
446 |
447 | SinkFieldConverter converter = new DecimalFieldConverter();
448 |
449 | List tests = new ArrayList<>();
450 | new ArrayList<>(Arrays.asList(
451 | new BigDecimal("-1234567890.09876543210"),
452 | BigDecimal.ZERO,
453 | new BigDecimal("+1234567890.09876543210")
454 | )).forEach(
455 | el -> tests.add(dynamicTest("conversion with "
456 | + converter.getClass().getSimpleName() + " for "+el,
457 | () -> assertEquals(el,
458 | ((BsonDecimal128)converter.toBson(el)).getValue().bigDecimalValue())
459 | ))
460 | );
461 |
462 | tests.add(dynamicTest("optional type conversions", () -> {
463 | Schema valueOptionalDefault = Decimal.builder(0).optional().defaultValue(BigDecimal.ZERO);
464 | assertAll("checks",
465 | () -> assertThrows(DataException.class, () -> converter.toBson(null, Decimal.schema(0))),
466 | () -> assertEquals(new BsonNull(), converter.toBson(null, Decimal.builder(0).optional())),
467 | () -> assertEquals(valueOptionalDefault.defaultValue(),
468 | ((BsonDecimal128)converter.toBson(null,valueOptionalDefault)).getValue().bigDecimalValue())
469 | );
470 | }));
471 |
472 | return tests;
473 |
474 | }
475 |
476 | @TestFactory
477 | @DisplayName("tests for logical type decimal field conversions (legacy)")
478 | public List testDecimalFieldConverterLegacy() {
479 |
480 | SinkFieldConverter converter =
481 | new DecimalFieldConverter(DecimalFieldConverter.Format.LEGACYDOUBLE);
482 |
483 | List tests = new ArrayList<>();
484 | new ArrayList<>(Arrays.asList(
485 | new BigDecimal("-1234567890.09876543210"),
486 | BigDecimal.ZERO,
487 | new BigDecimal("+1234567890.09876543210")
488 | )).forEach(
489 | el -> tests.add(dynamicTest("conversion with "
490 | + converter.getClass().getSimpleName() + " for "+el,
491 | () -> assertEquals(el.doubleValue(),
492 | ((BsonDouble)converter.toBson(el)).getValue())
493 | ))
494 | );
495 |
496 | tests.add(dynamicTest("optional type conversions", () -> {
497 | Schema valueOptionalDefault = Decimal.builder(0).optional().defaultValue(BigDecimal.ZERO);
498 | assertAll("checks",
499 | () -> assertThrows(DataException.class, () -> converter.toBson(null, Decimal.schema(0))),
500 | () -> assertEquals(new BsonNull(), converter.toBson(null, Decimal.builder(0).optional())),
501 | () -> assertEquals(((BigDecimal)valueOptionalDefault.defaultValue()).doubleValue(),
502 | ((BsonDouble)converter.toBson(null,valueOptionalDefault)).getValue())
503 | );
504 | }));
505 |
506 | return tests;
507 |
508 | }
509 |
510 | @Test
511 | @DisplayName("tests for logical type decimal field conversions (invalid)")
512 | public void testDecimalFieldConverterInvalidType() {
513 | assertThrows(DataException.class, () -> new DecimalFieldConverter().toBson(new Object()));
514 | }
515 |
516 | }
517 |
--------------------------------------------------------------------------------
/src/test/resources/docker/compose.yaml:
--------------------------------------------------------------------------------
1 | ---
2 | version: '2'
3 | services:
4 | zookeeper:
5 | image: confluentinc/cp-zookeeper:7.2.2
6 | hostname: zookeeper
7 | ports:
8 | - "2181:2181"
9 | environment:
10 | ZOOKEEPER_CLIENT_PORT: 2181
11 | ZOOKEEPER_TICK_TIME: 2000
12 |
13 | kafkabroker:
14 | image: confluentinc/cp-kafka:7.2.2
15 | hostname: kafkabroker
16 | depends_on:
17 | - zookeeper
18 | ports:
19 | - "9092:9092"
20 | environment:
21 | KAFKA_BROKER_ID: 1
22 | KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2181'
23 | KAFKA_AUTO_CREATE_TOPICS_ENABLE: "true"
24 | KAFKA_ADVERTISED_LISTENERS: 'PLAINTEXT://kafkabroker:9092'
25 | KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
26 | KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
27 |
28 | schemaregistry:
29 | image: confluentinc/cp-schema-registry:7.2.2
30 | hostname: schemaregistry
31 | depends_on:
32 | - zookeeper
33 | - kafkabroker
34 | ports:
35 | - "8081:8081"
36 | environment:
37 | SCHEMA_REGISTRY_HOST_NAME: schemaregistry
38 | SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: 'kafkabroker:9092'
39 | SCHEMA_REGISTRY_LISTENERS: http://0.0.0.0:8081
40 |
41 | kafkaconnect:
42 | image: confluentinc/cp-kafka-connect:7.2.2
43 | hostname: kafkaconnect
44 | depends_on:
45 | - zookeeper
46 | - kafkabroker
47 | - schemaregistry
48 | ports:
49 | - "8083:8083"
50 | volumes:
51 | - ../../../../target/kafka-tojsonstring-smt:/usr/share/confluent-hub-components/kafka-tojsonstring-smt
52 | environment:
53 | CONNECT_BOOTSTRAP_SERVERS: 'kafkabroker:9092'
54 | CONNECT_REST_ADVERTISED_HOST_NAME: kafkaconnect
55 | CONNECT_REST_PORT: 8083
56 | CONNECT_GROUP_ID: compose-connect-group
57 | CONNECT_CONFIG_STORAGE_TOPIC: docker-connect-configs
58 | CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR: 1
59 | CONNECT_OFFSET_FLUSH_INTERVAL_MS: 10000
60 | CONNECT_OFFSET_STORAGE_TOPIC: docker-connect-offsets
61 | CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR: 1
62 | CONNECT_STATUS_STORAGE_TOPIC: docker-connect-status
63 | CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: 1
64 | CONNECT_KEY_CONVERTER: io.confluent.connect.avro.AvroConverter
65 | CONNECT_KEY_CONVERTER_SCHEMA_REGISTRY_URL: 'http://schemaregistry:8081'
66 | CONNECT_VALUE_CONVERTER: io.confluent.connect.avro.AvroConverter
67 | CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL: 'http://schemaregistry:8081'
68 | CONNECT_INTERNAL_KEY_CONVERTER: org.apache.kafka.connect.json.JsonConverter
69 | CONNECT_INTERNAL_VALUE_CONVERTER: org.apache.kafka.connect.json.JsonConverter
70 | CONNECT_ZOOKEEPER_CONNECT: 'zookeeper:2181'
71 | CONNECT_PLUGIN_PATH: "/usr/share/java,/usr/share/confluent-hub-components"
72 |
73 | command:
74 | - bash
75 | - -c
76 | - |
77 | echo "Installing Connector"
78 | confluent-hub install --no-prompt confluentinc/kafka-connect-jdbc:10.5.4
79 | #
80 | echo "Launching Kafka Connect worker"
81 | /etc/confluent/docker/run &
82 | #
83 | sleep infinity
84 |
85 | postgres:
86 | image: postgres
87 | hostname: postgres
88 | environment:
89 | POSTGRES_USER: postgres
90 | POSTGRES_PASSWORD: postgres
91 | PGDATA: /data/postgres
92 | volumes:
93 | - ./init.sql:/docker-entrypoint-initdb.d/init.sql
94 | ports:
95 | - "5432:5432"
96 |
--------------------------------------------------------------------------------
/src/test/resources/docker/init.sql:
--------------------------------------------------------------------------------
1 | CREATE SCHEMA smttest
2 | CREATE TABLE smttest(
3 | jsonstring TEXT
4 | )
--------------------------------------------------------------------------------
/src/test/resources/docker/jdbcsinkconnector.json:
--------------------------------------------------------------------------------
1 | {
2 | "name": "jdbc-sink",
3 | "config": {
4 | "connector.class": "io.confluent.connect.jdbc.JdbcSinkConnector",
5 | "tasks.max": "1",
6 | "topics": "smttest",
7 | "connection.host": "postgres",
8 | "connection.port": "5432",
9 | "connection.user": "postgres",
10 | "connection.password": "postgres",
11 | "connection.url": "jdbc:postgresql://postgres:5432/postgres?currentSchema=smttest",
12 | "auto.create": "true",
13 | "name": "jdbc-sink",
14 | "auto.create": true,
15 | "transforms": "tojson",
16 | "transforms.tojson.json.string.field.name" : "jsonstring",
17 | "transforms.tojson.type": "com.github.cedelsb.kafka.connect.smt.Record2JsonStringConverter$Value"
18 | }
19 | }
--------------------------------------------------------------------------------