├── .gitignore ├── LICENSE ├── README.md ├── build.gradle ├── gradle └── wrapper │ ├── gradle-wrapper.jar │ └── gradle-wrapper.properties ├── gradlew ├── gradlew.bat ├── settings.gradle ├── src ├── main │ ├── java │ │ └── com │ │ │ └── tngtech │ │ │ └── flink │ │ │ └── connector │ │ │ └── email │ │ │ ├── common │ │ │ ├── ConnectorOptions.java │ │ │ ├── EmailConfigOptions.java │ │ │ ├── MessageUtil.java │ │ │ ├── Protocol.java │ │ │ ├── SessionProperties.java │ │ │ └── SubRowData.java │ │ │ ├── imap │ │ │ ├── Heartbeat.java │ │ │ ├── ImapCatalog.java │ │ │ ├── ImapCatalogFactory.java │ │ │ ├── ImapCatalogOptions.java │ │ │ ├── ImapConfigOptions.java │ │ │ ├── ImapSource.java │ │ │ ├── ImapSourceException.java │ │ │ ├── ImapSourceFactory.java │ │ │ ├── ImapSourceOptions.java │ │ │ ├── ImapTableSource.java │ │ │ └── ReadableMetadata.java │ │ │ └── smtp │ │ │ ├── SmtpConfigOptions.java │ │ │ ├── SmtpSink.java │ │ │ ├── SmtpSinkException.java │ │ │ ├── SmtpSinkFactory.java │ │ │ ├── SmtpSinkOptions.java │ │ │ ├── SmtpTableSink.java │ │ │ └── WritableMetadata.java │ └── resources │ │ └── META-INF │ │ └── services │ │ └── org.apache.flink.table.factories.Factory └── test │ └── java │ └── com │ └── tngtech │ └── flink │ └── connector │ └── email │ ├── architecture │ └── ArchitectureTest.java │ ├── common │ ├── MessageUtilTest.java │ └── SubRowDataTest.java │ ├── imap │ ├── ImapCatalogTest.java │ └── ImapSourceTest.java │ ├── smtp │ └── SmtpSinkTest.java │ └── testing │ └── TestBase.java └── testing ├── .gitignore ├── README.md ├── build_and_run.sh ├── docker-compose.yaml ├── images └── client │ ├── Dockerfile │ ├── conf │ └── init.sql │ └── sql-client │ └── sql-client.sh ├── run.sh └── send_mail.sh /.gitignore: -------------------------------------------------------------------------------- 1 | ### Gradle ### 2 | .gradle 3 | build/ 4 | 5 | # Ignore Gradle GUI config 6 | gradle-app.setting 7 | 8 | # Avoid ignoring Gradle wrapper jar file (.jar files are usually ignored) 9 | !gradle-wrapper.jar 10 | 11 | # Cache of project 12 | .gradletasknamecache 13 | 14 | ### Gradle Patch ### 15 | **/build/ 16 | 17 | .idea 18 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Introduction 2 | 3 | Connect directly to your email server using Flink. This project provides an IMAP source connector, 4 | an IMAP catalog, and a SMTP sink connector. These allow you to read (and send) emails directly from 5 | Flink. 6 | 7 | The connectors are written for Flink's Table API and SQL. They are not meant to be used with the 8 | DataStream API. 9 | 10 | > :warning: This project is not considered production-ready. 11 | > It is currently intended to be more of a playground project. 12 | 13 | # Getting Started 14 | 15 | We currently do not publish this connector as a package. If you want to try it out, clone this 16 | repository and take a look at [testing/README.md](testing/README.md). There you will find a 17 | self-contained docker-compose setup along with a dockerized local mail server. 18 | 19 | # Connectors 20 | 21 | ## IMAP 22 | 23 | Exposes a specific folder on an IMAP server as a table source: 24 | 25 | 26 | ```sql 27 | CREATE TABLE inbox ( 28 | uid STRING NOT NULL METADATA, 29 | subject STRING METADATA, 30 | content STRING 31 | ) WITH ( 32 | 'connector' = 'imap', 33 | 'host' = '…', 34 | 'user' = '…', 35 | 'password' = '…' 36 | ); 37 | ``` 38 | 39 | 40 | Most message information are exposed through metadata. The only information exposed through physical 41 | columns is the message content itself, which is deserialized using a given format. By default, 42 | the `raw` format is used, meaning that a single physical column of type `STRING` can be declared to 43 | contain the content. 44 | 45 | ### Configuration 46 | 47 | Property | Type | Required | Default | Description 48 | -------------------|----------|----------|-------------|------------ 49 | host | String | Yes | | 50 | user | String | Yes | | 51 | password | String | Yes | | 52 | port | Integer | | (automatic) | Port of the IMAP server. If omitted, the default IMAP port is used. 53 | ssl | Boolean | Yes | false | Whether to connect using SSL. 54 | folder | String | Yes | INBOX | Name of the IMAP folder to use. 55 | format | String | Yes | raw | Format with which to decode the message content. 56 | mode | Enum | Yes | all | Set to "new" to only collect new messages arriving (unbounded), "all" to also fetch existing messages (unbounded), or "current" to only fetch existing emails and finish (bounded). 57 | offset | Long | | | If set, existing messages are only read starting from this specified UID. This requires "mode" to be "all" or "current". 58 | batch-size | Integer | | 50 | Defines how many existing messages are queried at a time. This requires "mode" to be "all". 59 | connection.timeout | Duration | | 1min | Timeout when connecting to the server before giving up. 60 | heartbeat.interval | Duration | | 15min | How often to send a heartbeat request to the IMAP server to keep the IDLE connection alive. 61 | interval | Duration | | 1s | If the IMAP server does not support the IDLE protocol, the connector falls back to polling. This defines the interval with which to do so. 62 | 63 | ### Metadata 64 | 65 | Key | Type 66 | ------------|--------------------------------------------- 67 | uid | `BIGINT NOT NULL` 68 | subject | `STRING` 69 | sent | `TIMESTAMP WITH LOCAL TIMEZONE(3) NOT NULL` 70 | received | `TIMESTAMP WITH LOCAL TIMEZONE(3) NOT NULL` 71 | from | `ARRAY` 72 | fromFirst | `STRING` 73 | to | `ARRAY` 74 | toFirst | `STRING` 75 | cc | `ARRAY` 76 | bcc | `ARRAY` 77 | recipients | `ARRAY` 78 | replyTo | `ARRAY` 79 | contentType | `STRING` 80 | sizeInBytes | `INT NOT NULL` 81 | seen | `BOOLEAN` 82 | draft | `BOOLEAN` 83 | answered | `BOOLEAN` 84 | headers | `ARRAY>` 85 | 86 | ## SMTP 87 | 88 | Exposes an SMTP server as a table sink, effectively allowing to send emails by writing into the 89 | sink. 90 | 91 | > :warning: This is an early-stage prototype and currently only supports sending plain text emails. 92 | 93 | 94 | ```sql 95 | CREATE TABLE outbox ( 96 | subject STRING NOT NULL METADATA, 97 | `from` ARRAY METADATA, 98 | `to` ARRAY METADATA, 99 | content STRING 100 | ) WITH ( 101 | 'connector' = 'smtp', 102 | 'host' = '…', 103 | 'user' = '…', 104 | 'password' = '…' 105 | ); 106 | ``` 107 | 108 | 109 | Most message information can be written through metadata. The only information writable through 110 | physical columns is the message content itself, which is serialized using a given format. By 111 | default, the `raw` format is used, meaning that a single physical column of type `STRING` can be 112 | declared to write the content to. 113 | 114 | ### Configuration 115 | 116 | Property | Type | Required | Default | Description 117 | -------------------|----------|----------|-------------|------------ 118 | host | String | Yes | | 119 | user | String | | | 120 | password | String | | | 121 | port | Integer | | (automatic) | Port of the SMTP server. If omitted, the default SMTP port is used. 122 | ssl | Boolean | Yes | false | Whether to connect using SSL. 123 | format | String | Yes | raw | Format with which to encode the message content. 124 | 125 | ### Metadata 126 | 127 | Key | Type 128 | ------------|--------------------------------------------- 129 | subject | `STRING` 130 | from | `ARRAY` 131 | to | `ARRAY` 132 | cc | `ARRAY` 133 | bcc | `ARRAY` 134 | replyTo | `ARRAY` 135 | 136 | # Catalogs 137 | 138 | ## IMAP 139 | 140 | Lists all folders on the IMAP server and exposes them as table sources using the `imap` source 141 | connector above. 142 | 143 | 144 | ```sql 145 | CREATE CATALOG mail WITH ( 146 | 'type' = 'imap', 147 | 'host' = '…', 148 | 'user' = '…', 149 | 'password' = '…' 150 | ); 151 | ``` 152 | 153 | 154 | The default (and only) database in an IMAP catalog is called `folders`. 155 | 156 | Write operations on the catalog are generally not supported as there is no backing persistence 157 | layer, but rather the catalog acts directly on the IMAP server. The catalog is useful for quick 158 | discovery, and the tables can then be stored in a persistent catalog instead. 159 | 160 | You can use dynamic table hints to pass any custom options to the source tables in the catalog: 161 | 162 | 163 | ```sql 164 | SELECT * FROM mail.folders.INBOX /*+ OPTIONS ('mode' = 'new') */; 165 | ``` 166 | 167 | 168 | # License 169 | 170 | See `LICENSE`. 171 | -------------------------------------------------------------------------------- /build.gradle: -------------------------------------------------------------------------------- 1 | plugins { 2 | id "java" 3 | id "com.diffplug.spotless" version "5.14.2" 4 | } 5 | 6 | group "com.tngtech" 7 | version "0.1-SNAPSHOT" 8 | 9 | repositories { 10 | mavenCentral() 11 | } 12 | 13 | java { 14 | sourceCompatibility = JavaVersion.VERSION_1_8 15 | targetCompatibility = JavaVersion.VERSION_1_8 16 | } 17 | 18 | dependencies { 19 | testImplementation "org.junit.jupiter:junit-jupiter-api:5.7.2" 20 | testRuntimeOnly "org.junit.jupiter:junit-jupiter-engine:5.7.2" 21 | } 22 | 23 | spotless { 24 | java { 25 | importOrder() 26 | removeUnusedImports() 27 | 28 | prettier([ 29 | "prettier" : "2.0.5", 30 | "prettier-plugin-java": "0.8.0" 31 | ]).config([ 32 | "parser" : "java", 33 | "tabWidth": 4 34 | ]) 35 | } 36 | } 37 | 38 | configurations { 39 | extraLibs 40 | } 41 | 42 | jar { 43 | enabled = true 44 | duplicatesStrategy = DuplicatesStrategy.INCLUDE 45 | 46 | from { 47 | configurations.extraLibs.collect { it.isDirectory() ? it : zipTree(it) } 48 | } 49 | } 50 | 51 | task copyToDocker(type: Copy) { 52 | duplicatesStrategy = DuplicatesStrategy.INCLUDE 53 | from jar 54 | into "${rootDir}/testing/images/client/jars" 55 | } 56 | 57 | jar.finalizedBy copyToDocker 58 | 59 | clean.doFirst { 60 | delete "${rootDir}/testing/images/client/jars" 61 | } 62 | 63 | ext { 64 | flink = "1.13.1" 65 | lombok = "1.18.20" 66 | greenmail = "2.0.0-alpha-1" 67 | } 68 | 69 | dependencies { 70 | compileOnly "org.apache.flink:flink-table-common:${flink}" 71 | compileOnly "org.apache.flink:flink-table-api-java-bridge_2.12:${flink}" 72 | 73 | extraLibs "com.squareup.okhttp3:okhttp:4.9.1" 74 | extraLibs "com.sun.mail:jakarta.mail:2.0.1" 75 | 76 | // Lombok 77 | compileOnly "org.projectlombok:lombok:${lombok}" 78 | annotationProcessor "org.projectlombok:lombok:${lombok}" 79 | testCompileOnly "org.projectlombok:lombok:${lombok}" 80 | testAnnotationProcessor "org.projectlombok:lombok:${lombok}" 81 | 82 | // Testing 83 | 84 | testImplementation "org.apache.flink:flink-table-planner-blink_2.12:${flink}" 85 | testImplementation "org.apache.flink:flink-streaming-scala_2.12:${flink}" 86 | testImplementation "org.apache.flink:flink-test-utils_2.12:${flink}" 87 | testImplementation "com.icegreen:greenmail-junit4:${greenmail}" 88 | testImplementation "org.awaitility:awaitility-groovy:4.1.0" 89 | testImplementation "com.tngtech.junit.dataprovider:junit4-dataprovider:2.8" 90 | testImplementation 'org.assertj:assertj-core:3.20.2' 91 | testImplementation 'com.tngtech.archunit:archunit-junit4:0.20.1' 92 | 93 | // 94 | 95 | configurations.compile.extendsFrom(configurations.extraLibs) 96 | } 97 | -------------------------------------------------------------------------------- /gradle/wrapper/gradle-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/TNG/flink-connector-email/658727c90a8286979a6a8b3005a0ca07b513e785/gradle/wrapper/gradle-wrapper.jar -------------------------------------------------------------------------------- /gradle/wrapper/gradle-wrapper.properties: -------------------------------------------------------------------------------- 1 | distributionBase=GRADLE_USER_HOME 2 | distributionPath=wrapper/dists 3 | distributionUrl=https\://services.gradle.org/distributions/gradle-6.8-bin.zip 4 | zipStoreBase=GRADLE_USER_HOME 5 | zipStorePath=wrapper/dists 6 | -------------------------------------------------------------------------------- /gradlew: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env sh 2 | 3 | # 4 | # Copyright 2015 the original author or authors. 5 | # 6 | # Licensed under the Apache License, Version 2.0 (the "License"); 7 | # you may not use this file except in compliance with the License. 8 | # You may obtain a copy of the License at 9 | # 10 | # https://www.apache.org/licenses/LICENSE-2.0 11 | # 12 | # Unless required by applicable law or agreed to in writing, software 13 | # distributed under the License is distributed on an "AS IS" BASIS, 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | # See the License for the specific language governing permissions and 16 | # limitations under the License. 17 | # 18 | 19 | ############################################################################## 20 | ## 21 | ## Gradle start up script for UN*X 22 | ## 23 | ############################################################################## 24 | 25 | # Attempt to set APP_HOME 26 | # Resolve links: $0 may be a link 27 | PRG="$0" 28 | # Need this for relative symlinks. 29 | while [ -h "$PRG" ] ; do 30 | ls=`ls -ld "$PRG"` 31 | link=`expr "$ls" : '.*-> \(.*\)$'` 32 | if expr "$link" : '/.*' > /dev/null; then 33 | PRG="$link" 34 | else 35 | PRG=`dirname "$PRG"`"/$link" 36 | fi 37 | done 38 | SAVED="`pwd`" 39 | cd "`dirname \"$PRG\"`/" >/dev/null 40 | APP_HOME="`pwd -P`" 41 | cd "$SAVED" >/dev/null 42 | 43 | APP_NAME="Gradle" 44 | APP_BASE_NAME=`basename "$0"` 45 | 46 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 47 | DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"' 48 | 49 | # Use the maximum available, or set MAX_FD != -1 to use that value. 50 | MAX_FD="maximum" 51 | 52 | warn () { 53 | echo "$*" 54 | } 55 | 56 | die () { 57 | echo 58 | echo "$*" 59 | echo 60 | exit 1 61 | } 62 | 63 | # OS specific support (must be 'true' or 'false'). 64 | cygwin=false 65 | msys=false 66 | darwin=false 67 | nonstop=false 68 | case "`uname`" in 69 | CYGWIN* ) 70 | cygwin=true 71 | ;; 72 | Darwin* ) 73 | darwin=true 74 | ;; 75 | MINGW* ) 76 | msys=true 77 | ;; 78 | NONSTOP* ) 79 | nonstop=true 80 | ;; 81 | esac 82 | 83 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar 84 | 85 | 86 | # Determine the Java command to use to start the JVM. 87 | if [ -n "$JAVA_HOME" ] ; then 88 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then 89 | # IBM's JDK on AIX uses strange locations for the executables 90 | JAVACMD="$JAVA_HOME/jre/sh/java" 91 | else 92 | JAVACMD="$JAVA_HOME/bin/java" 93 | fi 94 | if [ ! -x "$JAVACMD" ] ; then 95 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME 96 | 97 | Please set the JAVA_HOME variable in your environment to match the 98 | location of your Java installation." 99 | fi 100 | else 101 | JAVACMD="java" 102 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 103 | 104 | Please set the JAVA_HOME variable in your environment to match the 105 | location of your Java installation." 106 | fi 107 | 108 | # Increase the maximum file descriptors if we can. 109 | if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then 110 | MAX_FD_LIMIT=`ulimit -H -n` 111 | if [ $? -eq 0 ] ; then 112 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then 113 | MAX_FD="$MAX_FD_LIMIT" 114 | fi 115 | ulimit -n $MAX_FD 116 | if [ $? -ne 0 ] ; then 117 | warn "Could not set maximum file descriptor limit: $MAX_FD" 118 | fi 119 | else 120 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" 121 | fi 122 | fi 123 | 124 | # For Darwin, add options to specify how the application appears in the dock 125 | if $darwin; then 126 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" 127 | fi 128 | 129 | # For Cygwin or MSYS, switch paths to Windows format before running java 130 | if [ "$cygwin" = "true" -o "$msys" = "true" ] ; then 131 | APP_HOME=`cygpath --path --mixed "$APP_HOME"` 132 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` 133 | 134 | JAVACMD=`cygpath --unix "$JAVACMD"` 135 | 136 | # We build the pattern for arguments to be converted via cygpath 137 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` 138 | SEP="" 139 | for dir in $ROOTDIRSRAW ; do 140 | ROOTDIRS="$ROOTDIRS$SEP$dir" 141 | SEP="|" 142 | done 143 | OURCYGPATTERN="(^($ROOTDIRS))" 144 | # Add a user-defined pattern to the cygpath arguments 145 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then 146 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" 147 | fi 148 | # Now convert the arguments - kludge to limit ourselves to /bin/sh 149 | i=0 150 | for arg in "$@" ; do 151 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` 152 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option 153 | 154 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition 155 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` 156 | else 157 | eval `echo args$i`="\"$arg\"" 158 | fi 159 | i=`expr $i + 1` 160 | done 161 | case $i in 162 | 0) set -- ;; 163 | 1) set -- "$args0" ;; 164 | 2) set -- "$args0" "$args1" ;; 165 | 3) set -- "$args0" "$args1" "$args2" ;; 166 | 4) set -- "$args0" "$args1" "$args2" "$args3" ;; 167 | 5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; 168 | 6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; 169 | 7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; 170 | 8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; 171 | 9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; 172 | esac 173 | fi 174 | 175 | # Escape application args 176 | save () { 177 | for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done 178 | echo " " 179 | } 180 | APP_ARGS=`save "$@"` 181 | 182 | # Collect all arguments for the java command, following the shell quoting and substitution rules 183 | eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS" 184 | 185 | exec "$JAVACMD" "$@" 186 | -------------------------------------------------------------------------------- /gradlew.bat: -------------------------------------------------------------------------------- 1 | @rem 2 | @rem Copyright 2015 the original author or authors. 3 | @rem 4 | @rem Licensed under the Apache License, Version 2.0 (the "License"); 5 | @rem you may not use this file except in compliance with the License. 6 | @rem You may obtain a copy of the License at 7 | @rem 8 | @rem https://www.apache.org/licenses/LICENSE-2.0 9 | @rem 10 | @rem Unless required by applicable law or agreed to in writing, software 11 | @rem distributed under the License is distributed on an "AS IS" BASIS, 12 | @rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | @rem See the License for the specific language governing permissions and 14 | @rem limitations under the License. 15 | @rem 16 | 17 | @if "%DEBUG%" == "" @echo off 18 | @rem ########################################################################## 19 | @rem 20 | @rem Gradle startup script for Windows 21 | @rem 22 | @rem ########################################################################## 23 | 24 | @rem Set local scope for the variables with windows NT shell 25 | if "%OS%"=="Windows_NT" setlocal 26 | 27 | set DIRNAME=%~dp0 28 | if "%DIRNAME%" == "" set DIRNAME=. 29 | set APP_BASE_NAME=%~n0 30 | set APP_HOME=%DIRNAME% 31 | 32 | @rem Resolve any "." and ".." in APP_HOME to make it shorter. 33 | for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi 34 | 35 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 36 | set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m" 37 | 38 | @rem Find java.exe 39 | if defined JAVA_HOME goto findJavaFromJavaHome 40 | 41 | set JAVA_EXE=java.exe 42 | %JAVA_EXE% -version >NUL 2>&1 43 | if "%ERRORLEVEL%" == "0" goto execute 44 | 45 | echo. 46 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 47 | echo. 48 | echo Please set the JAVA_HOME variable in your environment to match the 49 | echo location of your Java installation. 50 | 51 | goto fail 52 | 53 | :findJavaFromJavaHome 54 | set JAVA_HOME=%JAVA_HOME:"=% 55 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe 56 | 57 | if exist "%JAVA_EXE%" goto execute 58 | 59 | echo. 60 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 61 | echo. 62 | echo Please set the JAVA_HOME variable in your environment to match the 63 | echo location of your Java installation. 64 | 65 | goto fail 66 | 67 | :execute 68 | @rem Setup the command line 69 | 70 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar 71 | 72 | 73 | @rem Execute Gradle 74 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %* 75 | 76 | :end 77 | @rem End local scope for the variables with windows NT shell 78 | if "%ERRORLEVEL%"=="0" goto mainEnd 79 | 80 | :fail 81 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of 82 | rem the _cmd.exe /c_ return code! 83 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 84 | exit /b 1 85 | 86 | :mainEnd 87 | if "%OS%"=="Windows_NT" endlocal 88 | 89 | :omega 90 | -------------------------------------------------------------------------------- /settings.gradle: -------------------------------------------------------------------------------- 1 | rootProject.name = 'flink-connector-email' 2 | 3 | -------------------------------------------------------------------------------- /src/main/java/com/tngtech/flink/connector/email/common/ConnectorOptions.java: -------------------------------------------------------------------------------- 1 | package com.tngtech.flink.connector.email.common; 2 | 3 | import lombok.Data; 4 | import lombok.experimental.SuperBuilder; 5 | import org.apache.flink.annotation.Internal; 6 | 7 | import javax.annotation.Nullable; 8 | import java.io.Serializable; 9 | 10 | @Internal 11 | @Data 12 | @SuperBuilder(toBuilder = true) 13 | public class ConnectorOptions implements Serializable { 14 | private static final long serialVersionUID = 1L; 15 | 16 | private final String host; 17 | private final @Nullable Long port; 18 | private final @Nullable String user; 19 | private final @Nullable String password; 20 | private final Protocol protocol; 21 | 22 | public boolean usesAuthentication() { 23 | return password != null; 24 | } 25 | 26 | } 27 | -------------------------------------------------------------------------------- /src/main/java/com/tngtech/flink/connector/email/common/EmailConfigOptions.java: -------------------------------------------------------------------------------- 1 | package com.tngtech.flink.connector.email.common; 2 | 3 | import lombok.experimental.UtilityClass; 4 | import org.apache.flink.annotation.Internal; 5 | import org.apache.flink.configuration.ConfigOption; 6 | import org.apache.flink.configuration.ConfigOptions; 7 | 8 | @UtilityClass 9 | @Internal 10 | public class EmailConfigOptions { 11 | 12 | public static final ConfigOption HOST = ConfigOptions.key("host") 13 | .stringType() 14 | .noDefaultValue(); 15 | 16 | public static final ConfigOption PORT = ConfigOptions.key("port") 17 | .longType() 18 | .noDefaultValue(); 19 | 20 | public static final ConfigOption USER = ConfigOptions.key("user") 21 | .stringType() 22 | .noDefaultValue(); 23 | 24 | public static final ConfigOption PASSWORD = ConfigOptions.key("password") 25 | .stringType() 26 | .noDefaultValue(); 27 | 28 | public static final ConfigOption SSL = ConfigOptions.key("ssl") 29 | .booleanType() 30 | .defaultValue(false); 31 | 32 | public static final ConfigOption FORMAT = ConfigOptions.key("format") 33 | .stringType() 34 | .defaultValue("raw"); 35 | 36 | } 37 | -------------------------------------------------------------------------------- /src/main/java/com/tngtech/flink/connector/email/common/MessageUtil.java: -------------------------------------------------------------------------------- 1 | package com.tngtech.flink.connector.email.common; 2 | 3 | import com.tngtech.flink.connector.email.smtp.SmtpSinkException; 4 | import jakarta.mail.Address; 5 | import jakarta.mail.Header; 6 | import jakarta.mail.internet.AddressException; 7 | import jakarta.mail.internet.InternetAddress; 8 | import lombok.experimental.UtilityClass; 9 | import org.apache.flink.annotation.Internal; 10 | import org.apache.flink.table.data.*; 11 | 12 | import javax.annotation.Nullable; 13 | import java.util.Arrays; 14 | import java.util.Collections; 15 | import java.util.Enumeration; 16 | import java.util.stream.IntStream; 17 | 18 | @UtilityClass 19 | @Internal 20 | public class MessageUtil { 21 | 22 | public static @Nullable ArrayData encodeAddresses(@Nullable Address[] items) { 23 | if (items == null) { 24 | return null; 25 | } 26 | 27 | final StringData[] mappedItems = Arrays.stream(items) 28 | .map(MessageUtil::encodeAddress) 29 | .map(StringData::fromString) 30 | .toArray(StringData[]::new); 31 | 32 | return new GenericArrayData(mappedItems); 33 | } 34 | 35 | public static @Nullable String encodeFirstAddress(@Nullable Address[] items) { 36 | if (items == null) { 37 | return null; 38 | } 39 | 40 | return items.length > 0 ? encodeAddress(items[0]) : null; 41 | } 42 | 43 | public static @Nullable String encodeAddress(@Nullable Address address) { 44 | return address != null ? address.toString() : null; 45 | } 46 | 47 | public static @Nullable Address[] decodeAddresses(@Nullable ArrayData addressesData) { 48 | if (addressesData == null) { 49 | return null; 50 | } 51 | 52 | return IntStream.range(0, addressesData.size()) 53 | .mapToObj(addressesData::getString) 54 | .map(StringData::toString) 55 | .map(MessageUtil::decodeAddress) 56 | .toArray(Address[]::new); 57 | } 58 | 59 | public static @Nullable Address decodeAddress(@Nullable String address) { 60 | if (address == null) { 61 | return null; 62 | } 63 | 64 | try { 65 | return new InternetAddress(address, false); 66 | } catch (AddressException e) { 67 | throw SmtpSinkException.propagate(e); 68 | } 69 | } 70 | 71 | // --------------------------------------------------------------------------------------------- 72 | 73 | public static ArrayData encodeHeaders(Enumeration
headers) { 74 | final RowData[] headerRows = Collections.list(headers).stream() 75 | .map(MessageUtil::encodeHeader) 76 | .toArray(RowData[]::new); 77 | 78 | return new GenericArrayData(headerRows); 79 | } 80 | 81 | public static RowData encodeHeader(Header header) { 82 | return GenericRowData.of( 83 | StringData.fromString(header.getName()), 84 | StringData.fromString(header.getValue()) 85 | ); 86 | } 87 | 88 | } 89 | -------------------------------------------------------------------------------- /src/main/java/com/tngtech/flink/connector/email/common/Protocol.java: -------------------------------------------------------------------------------- 1 | package com.tngtech.flink.connector.email.common; 2 | 3 | import lombok.Getter; 4 | import lombok.RequiredArgsConstructor; 5 | import org.apache.flink.annotation.Internal; 6 | 7 | @Internal 8 | @RequiredArgsConstructor 9 | public enum Protocol { 10 | IMAP("imap", false), 11 | IMAPS("imaps", true), 12 | SMTP("smtp", false), 13 | SMTPS("smtps", true); 14 | 15 | @Getter 16 | private final String name; 17 | 18 | @Getter 19 | private final boolean ssl; 20 | } 21 | -------------------------------------------------------------------------------- /src/main/java/com/tngtech/flink/connector/email/common/SessionProperties.java: -------------------------------------------------------------------------------- 1 | package com.tngtech.flink.connector.email.common; 2 | 3 | import lombok.Getter; 4 | import org.apache.flink.annotation.Internal; 5 | 6 | import java.util.Properties; 7 | 8 | @Internal 9 | public class SessionProperties { 10 | 11 | private final Protocol protocol; 12 | 13 | @Getter 14 | private final Properties properties = new Properties(); 15 | 16 | public SessionProperties(ConnectorOptions options) { 17 | this.protocol = options.getProtocol(); 18 | 19 | addProperty("mail.store.protocol", protocol.getName()); 20 | addProtocolProperty("auth", String.valueOf(options.usesAuthentication())); 21 | addProtocolProperty("host", options.getHost()); 22 | 23 | if (options.getPort() != null) { 24 | addProtocolProperty("port", String.valueOf(options.getPort())); 25 | } 26 | 27 | if (options.getProtocol().isSsl()) { 28 | addProtocolProperty("ssl.enable", "true"); 29 | addProtocolProperty("starttls.enable", "true"); 30 | } 31 | } 32 | 33 | public void addProperty(String key, String value) { 34 | properties.put(key, value); 35 | } 36 | 37 | public void addProtocolProperty(String key, String value) { 38 | properties.put(String.format("mail.%s.%s", protocol.getName(), key), value); 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /src/main/java/com/tngtech/flink/connector/email/common/SubRowData.java: -------------------------------------------------------------------------------- 1 | package com.tngtech.flink.connector.email.common; 2 | 3 | import org.apache.flink.annotation.Internal; 4 | import org.apache.flink.table.data.*; 5 | import org.apache.flink.types.RowKind; 6 | 7 | @Internal 8 | public class SubRowData implements RowData { 9 | 10 | private final RowData rowData; 11 | private final int startIndex; 12 | private final int endIndex; 13 | 14 | public SubRowData(RowData rowData, int startIndex, int endIndex) { 15 | this.rowData = rowData; 16 | this.startIndex = startIndex; 17 | this.endIndex = endIndex; 18 | 19 | if (startIndex < 0 || startIndex >= rowData.getArity()) { 20 | throw new IllegalArgumentException("startIndex must be within bounds."); 21 | } 22 | 23 | if (endIndex < 0 || endIndex > rowData.getArity() || endIndex < startIndex) { 24 | throw new IllegalArgumentException("endIndex must be within bounds."); 25 | } 26 | } 27 | 28 | private int convertPos(int pos) { 29 | assert pos + startIndex < endIndex; 30 | return pos + startIndex; 31 | } 32 | 33 | @Override 34 | public int getArity() { 35 | return endIndex - startIndex; 36 | } 37 | 38 | @Override 39 | public RowKind getRowKind() { 40 | return rowData.getRowKind(); 41 | } 42 | 43 | @Override 44 | public void setRowKind(RowKind kind) { 45 | rowData.setRowKind(kind); 46 | } 47 | 48 | @Override 49 | public boolean isNullAt(int pos) { 50 | return rowData.isNullAt(convertPos(pos)); 51 | } 52 | 53 | @Override 54 | public boolean getBoolean(int pos) { 55 | return rowData.getBoolean(convertPos(pos)); 56 | } 57 | 58 | @Override 59 | public byte getByte(int pos) { 60 | return rowData.getByte(convertPos(pos)); 61 | } 62 | 63 | @Override 64 | public short getShort(int pos) { 65 | return rowData.getShort(convertPos(pos)); 66 | } 67 | 68 | @Override 69 | public int getInt(int pos) { 70 | return rowData.getInt(convertPos(pos)); 71 | } 72 | 73 | @Override 74 | public long getLong(int pos) { 75 | return rowData.getLong(convertPos(pos)); 76 | } 77 | 78 | @Override 79 | public float getFloat(int pos) { 80 | return rowData.getFloat(convertPos(pos)); 81 | } 82 | 83 | @Override 84 | public double getDouble(int pos) { 85 | return rowData.getDouble(convertPos(pos)); 86 | } 87 | 88 | @Override 89 | public StringData getString(int pos) { 90 | return rowData.getString(convertPos(pos)); 91 | } 92 | 93 | @Override 94 | public DecimalData getDecimal(int pos, int precision, int scale) { 95 | return rowData.getDecimal(convertPos(pos), precision, scale); 96 | } 97 | 98 | @Override 99 | public TimestampData getTimestamp(int pos, int precision) { 100 | return rowData.getTimestamp(convertPos(pos), precision); 101 | } 102 | 103 | @Override 104 | public RawValueData getRawValue(int pos) { 105 | return rowData.getRawValue(convertPos(pos)); 106 | } 107 | 108 | @Override 109 | public byte[] getBinary(int pos) { 110 | return rowData.getBinary(convertPos(pos)); 111 | } 112 | 113 | @Override 114 | public ArrayData getArray(int pos) { 115 | return rowData.getArray(convertPos(pos)); 116 | } 117 | 118 | @Override 119 | public MapData getMap(int pos) { 120 | return rowData.getMap(convertPos(pos)); 121 | } 122 | 123 | @Override 124 | public RowData getRow(int pos, int numFields) { 125 | return rowData.getRow(convertPos(pos), numFields); 126 | } 127 | } 128 | -------------------------------------------------------------------------------- /src/main/java/com/tngtech/flink/connector/email/imap/Heartbeat.java: -------------------------------------------------------------------------------- 1 | package com.tngtech.flink.connector.email.imap; 2 | 3 | import com.sun.mail.imap.IMAPFolder; 4 | import jakarta.mail.MessagingException; 5 | import org.apache.flink.annotation.Internal; 6 | 7 | import java.time.Duration; 8 | 9 | /** 10 | * The IMAP IDLE protocol doesn't actually idle forever, servers might eventually stop sending 11 | * notifications if they deem the client timed out. This generally happens after ~30 minutes, though 12 | * some clients are known to go down to as much as ~10 minutes. We therefore need to send a periodic 13 | * "hearbeat" in the shape of a noop command. 14 | */ 15 | @Internal 16 | class Heartbeat extends Thread { 17 | private final IMAPFolder folder; 18 | private final Duration heartbeatInterval; 19 | 20 | public Heartbeat(IMAPFolder folder, Duration heartbeatInterval) { 21 | super("IMAP Idle Heartbeat"); 22 | 23 | this.folder = folder; 24 | this.heartbeatInterval = heartbeatInterval; 25 | } 26 | 27 | @Override 28 | public void run() { 29 | while (!Thread.interrupted()) { 30 | try { 31 | Thread.sleep(heartbeatInterval.toMillis()); 32 | 33 | folder.doCommand( 34 | protocol -> { 35 | protocol.simpleCommand("NOOP", null); 36 | return null; 37 | }); 38 | } catch (InterruptedException | MessagingException ignored) { 39 | // We want this thread to just stop 40 | } 41 | } 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /src/main/java/com/tngtech/flink/connector/email/imap/ImapCatalog.java: -------------------------------------------------------------------------------- 1 | package com.tngtech.flink.connector.email.imap; 2 | 3 | import com.tngtech.flink.connector.email.common.SessionProperties; 4 | import jakarta.mail.Folder; 5 | import jakarta.mail.MessagingException; 6 | import jakarta.mail.Session; 7 | import jakarta.mail.Store; 8 | import lombok.RequiredArgsConstructor; 9 | import org.apache.flink.annotation.PublicEvolving; 10 | import org.apache.flink.table.api.Schema; 11 | import org.apache.flink.table.catalog.*; 12 | import org.apache.flink.table.catalog.exceptions.*; 13 | import org.apache.flink.table.catalog.stats.CatalogColumnStatistics; 14 | import org.apache.flink.table.catalog.stats.CatalogTableStatistics; 15 | import org.apache.flink.table.expressions.Expression; 16 | import org.apache.flink.table.factories.Factory; 17 | 18 | import java.util.*; 19 | import java.util.stream.Collectors; 20 | 21 | import static com.tngtech.flink.connector.email.imap.ImapConfigOptions.FOLDER; 22 | 23 | @PublicEvolving 24 | @RequiredArgsConstructor 25 | public class ImapCatalog implements Catalog { 26 | 27 | private static final String DEFAULT_DATABASE = "folders"; 28 | private static final EnumSet DEFAULT_METADATA = EnumSet.of( 29 | ReadableMetadata.UID, 30 | ReadableMetadata.SUBJECT, 31 | ReadableMetadata.SENT, 32 | ReadableMetadata.RECEIVED, 33 | ReadableMetadata.FROM, 34 | ReadableMetadata.TO, 35 | ReadableMetadata.CC, 36 | ReadableMetadata.BCC 37 | ); 38 | 39 | private final String name; 40 | private final ImapCatalogOptions options; 41 | 42 | private Store store; 43 | 44 | @Override 45 | public void open() throws CatalogException { 46 | reconnect(); 47 | } 48 | 49 | @Override 50 | public void close() throws CatalogException { 51 | if (store != null) { 52 | try { 53 | store.close(); 54 | } catch (MessagingException e) { 55 | throw new CatalogException(e.getMessage(), e); 56 | } 57 | } 58 | } 59 | 60 | @Override 61 | public Optional getFactory() { 62 | return Optional.of(new ImapSourceFactory()); 63 | } 64 | 65 | // --------------------------------------------------------------------------------------------- 66 | 67 | private void reconnect() throws CatalogException { 68 | if (store != null && store.isConnected()) { 69 | return; 70 | } 71 | 72 | if (store == null) { 73 | final SessionProperties sessionProperties = new SessionProperties(options); 74 | final Session session = Session.getInstance(sessionProperties.getProperties()); 75 | 76 | try { 77 | store = session.getStore(); 78 | } catch (MessagingException e) { 79 | throw new CatalogException(e.getMessage(), e); 80 | } 81 | } 82 | 83 | try { 84 | if (options.usesAuthentication()) { 85 | store.connect(options.getUser(), options.getPassword()); 86 | } else { 87 | store.connect("", ""); 88 | } 89 | } catch (MessagingException e) { 90 | throw new CatalogException(e.getMessage(), e); 91 | } 92 | } 93 | 94 | // --------------------------------------------------------------------------------------------- 95 | 96 | @Override 97 | public String getDefaultDatabase() throws CatalogException { 98 | return DEFAULT_DATABASE; 99 | } 100 | 101 | @Override 102 | public List listDatabases() throws CatalogException { 103 | return Collections.singletonList(DEFAULT_DATABASE); 104 | } 105 | 106 | @Override 107 | public CatalogDatabase getDatabase(String databaseName) 108 | throws DatabaseNotExistException, CatalogException { 109 | if (!databaseExists(databaseName)) { 110 | throw new DatabaseNotExistException(name, databaseName); 111 | } 112 | 113 | return new CatalogDatabaseImpl(new HashMap<>(), "Lists all folders as source tables"); 114 | } 115 | 116 | @Override 117 | public boolean databaseExists(String databaseName) throws CatalogException { 118 | return getDefaultDatabase().equals(databaseName); 119 | } 120 | 121 | @Override 122 | public void createDatabase( 123 | String databaseName, 124 | CatalogDatabase database, 125 | boolean ignoreIfExists 126 | ) throws DatabaseAlreadyExistException, CatalogException { 127 | if (databaseExists(databaseName)) { 128 | if (ignoreIfExists) { 129 | return; 130 | } 131 | 132 | throw new DatabaseAlreadyExistException(name, databaseName); 133 | } 134 | 135 | throw new UnsupportedOperationException("Not supported by the IMAP catalog."); 136 | } 137 | 138 | @Override 139 | public void dropDatabase( 140 | String databaseName, 141 | boolean ignoreIfNotExists, 142 | boolean cascade 143 | ) throws DatabaseNotExistException, CatalogException { 144 | if (!databaseExists(databaseName)) { 145 | if (ignoreIfNotExists) { 146 | return; 147 | } 148 | 149 | throw new DatabaseNotExistException(name, databaseName); 150 | } 151 | 152 | throw new UnsupportedOperationException("Not supported by the IMAP catalog."); 153 | } 154 | 155 | @Override 156 | public void alterDatabase( 157 | String databaseName, 158 | CatalogDatabase newDatabase, 159 | boolean ignoreIfNotExists 160 | ) throws DatabaseNotExistException, CatalogException { 161 | if (!databaseExists(databaseName)) { 162 | if (ignoreIfNotExists) { 163 | return; 164 | } 165 | 166 | throw new DatabaseNotExistException(name, databaseName); 167 | } 168 | 169 | throw new UnsupportedOperationException("Not supported by the IMAP catalog."); 170 | } 171 | 172 | @Override 173 | public List listTables( 174 | String databaseName 175 | ) throws DatabaseNotExistException, CatalogException { 176 | if (!databaseExists(databaseName)) { 177 | throw new DatabaseNotExistException(name, databaseName); 178 | } 179 | 180 | reconnect(); 181 | try { 182 | return Arrays.stream(store.getDefaultFolder().list("*")) 183 | .filter( 184 | folder -> { 185 | try { 186 | return (folder.getType() & Folder.HOLDS_MESSAGES) != 0; 187 | } catch (MessagingException e) { 188 | throw new CatalogException(e.getMessage(), e); 189 | } 190 | }) 191 | .map(Folder::getFullName) 192 | .collect(Collectors.toList()); 193 | } catch (MessagingException e) { 194 | throw new CatalogException(e.getMessage(), e); 195 | } 196 | } 197 | 198 | @Override 199 | public List listViews( 200 | String databaseName 201 | ) throws CatalogException { 202 | return Collections.emptyList(); 203 | } 204 | 205 | @Override 206 | public CatalogBaseTable getTable( 207 | ObjectPath tablePath 208 | ) throws TableNotExistException, CatalogException { 209 | if (!tableExists(tablePath)) { 210 | throw new TableNotExistException(name, tablePath); 211 | } 212 | 213 | Schema.Builder schemaBuilder = Schema.newBuilder(); 214 | DEFAULT_METADATA.forEach( 215 | metadata -> schemaBuilder.columnByMetadata(metadata.getKey(), metadata.getType())); 216 | 217 | final Map sourceOptions = new HashMap<>(options.toOptions()); 218 | sourceOptions.put(FOLDER.key(), tablePath.getObjectName()); 219 | 220 | return CatalogTable.of(schemaBuilder.build(), null, Collections.emptyList(), sourceOptions); 221 | } 222 | 223 | @Override 224 | public boolean tableExists(ObjectPath tablePath) throws CatalogException { 225 | reconnect(); 226 | try { 227 | return store.getFolder(tablePath.getObjectName()).exists(); 228 | } catch (MessagingException e) { 229 | return false; 230 | } 231 | } 232 | 233 | @Override 234 | public void dropTable( 235 | ObjectPath tablePath, 236 | boolean ignoreIfNotExists 237 | ) throws TableNotExistException, CatalogException { 238 | if (!tableExists(tablePath)) { 239 | if (ignoreIfNotExists) { 240 | return; 241 | } 242 | 243 | throw new TableNotExistException(name, tablePath); 244 | } 245 | 246 | throw new UnsupportedOperationException("Not supported by the IMAP catalog."); 247 | } 248 | 249 | @Override 250 | public void renameTable( 251 | ObjectPath tablePath, 252 | String newTableName, 253 | boolean ignoreIfNotExists 254 | ) throws TableNotExistException, CatalogException { 255 | if (!tableExists(tablePath)) { 256 | if (ignoreIfNotExists) { 257 | return; 258 | } 259 | 260 | throw new TableNotExistException(name, tablePath); 261 | } 262 | 263 | final ObjectPath newTablePath = new ObjectPath(tablePath.getDatabaseName(), newTableName); 264 | if (tableExists(newTablePath)) { 265 | throw new TableNotExistException(name, newTablePath); 266 | } 267 | 268 | throw new UnsupportedOperationException("Not supported by the IMAP catalog."); 269 | } 270 | 271 | @Override 272 | public void createTable( 273 | ObjectPath tablePath, 274 | CatalogBaseTable table, 275 | boolean ignoreIfExists 276 | ) throws TableAlreadyExistException, DatabaseNotExistException, CatalogException { 277 | if (!databaseExists(tablePath.getDatabaseName())) { 278 | throw new DatabaseNotExistException(name, tablePath.getDatabaseName()); 279 | } 280 | 281 | if (tableExists(tablePath)) { 282 | if (ignoreIfExists) { 283 | return; 284 | } 285 | 286 | throw new TableAlreadyExistException(name, tablePath); 287 | } 288 | 289 | throw new UnsupportedOperationException("Not supported by the IMAP catalog."); 290 | } 291 | 292 | @Override 293 | public void alterTable( 294 | ObjectPath tablePath, 295 | CatalogBaseTable newTable, 296 | boolean ignoreIfNotExists 297 | ) throws TableNotExistException, CatalogException { 298 | if (!tableExists(tablePath)) { 299 | if (ignoreIfNotExists) { 300 | return; 301 | } 302 | 303 | throw new TableNotExistException(name, tablePath); 304 | } 305 | 306 | throw new UnsupportedOperationException("Not supported by the IMAP catalog."); 307 | } 308 | 309 | @Override 310 | public List listPartitions( 311 | ObjectPath tablePath 312 | ) throws CatalogException { 313 | return Collections.emptyList(); 314 | } 315 | 316 | @Override 317 | public List listPartitions( 318 | ObjectPath tablePath, 319 | CatalogPartitionSpec partitionSpec 320 | ) throws CatalogException { 321 | return Collections.emptyList(); 322 | } 323 | 324 | @Override 325 | public List listPartitionsByFilter( 326 | ObjectPath tablePath, 327 | List filters 328 | ) throws CatalogException { 329 | return Collections.emptyList(); 330 | } 331 | 332 | @Override 333 | public CatalogPartition getPartition(ObjectPath tablePath, CatalogPartitionSpec partitionSpec) 334 | throws PartitionNotExistException, CatalogException { 335 | 336 | throw new PartitionNotExistException(name, tablePath, partitionSpec); 337 | } 338 | 339 | @Override 340 | public boolean partitionExists( 341 | ObjectPath tablePath, 342 | CatalogPartitionSpec partitionSpec 343 | ) throws CatalogException { 344 | return false; 345 | } 346 | 347 | @Override 348 | public void createPartition( 349 | ObjectPath tablePath, 350 | CatalogPartitionSpec partitionSpec, 351 | CatalogPartition partition, 352 | boolean ignoreIfExists 353 | ) throws PartitionAlreadyExistsException, CatalogException { 354 | if (partitionExists(tablePath, partitionSpec)) { 355 | if (ignoreIfExists) { 356 | return; 357 | } 358 | 359 | throw new PartitionAlreadyExistsException(name, tablePath, partitionSpec); 360 | } 361 | 362 | throw new UnsupportedOperationException("Not supported by the IMAP catalog."); 363 | } 364 | 365 | @Override 366 | public void dropPartition( 367 | ObjectPath tablePath, 368 | CatalogPartitionSpec partitionSpec, 369 | boolean ignoreIfNotExists 370 | ) throws PartitionNotExistException, CatalogException { 371 | if (!partitionExists(tablePath, partitionSpec)) { 372 | if (ignoreIfNotExists) { 373 | return; 374 | } 375 | 376 | throw new PartitionNotExistException(name, tablePath, partitionSpec); 377 | } 378 | 379 | throw new UnsupportedOperationException("Not supported by the IMAP catalog."); 380 | } 381 | 382 | @Override 383 | public void alterPartition( 384 | ObjectPath tablePath, 385 | CatalogPartitionSpec partitionSpec, 386 | CatalogPartition newPartition, 387 | boolean ignoreIfNotExists 388 | ) throws PartitionNotExistException, CatalogException { 389 | if (!partitionExists(tablePath, partitionSpec)) { 390 | if (ignoreIfNotExists) { 391 | return; 392 | } 393 | 394 | throw new PartitionNotExistException(name, tablePath, partitionSpec); 395 | } 396 | 397 | throw new UnsupportedOperationException("Not supported by the IMAP catalog."); 398 | } 399 | 400 | @Override 401 | public List listFunctions( 402 | String dbName 403 | ) throws DatabaseNotExistException, CatalogException { 404 | return Collections.emptyList(); 405 | } 406 | 407 | @Override 408 | public CatalogFunction getFunction( 409 | ObjectPath functionPath 410 | ) throws FunctionNotExistException, CatalogException { 411 | throw new FunctionNotExistException(name, functionPath); 412 | } 413 | 414 | @Override 415 | public boolean functionExists(ObjectPath functionPath) throws CatalogException { 416 | return false; 417 | } 418 | 419 | @Override 420 | public void createFunction( 421 | ObjectPath functionPath, 422 | CatalogFunction function, 423 | boolean ignoreIfExists 424 | ) throws FunctionAlreadyExistException, DatabaseNotExistException, CatalogException { 425 | if (functionExists(functionPath)) { 426 | if (ignoreIfExists) { 427 | return; 428 | } 429 | 430 | throw new FunctionAlreadyExistException(name, functionPath); 431 | } 432 | 433 | throw new UnsupportedOperationException("Not supported by the IMAP catalog."); 434 | } 435 | 436 | @Override 437 | public void alterFunction( 438 | ObjectPath functionPath, 439 | CatalogFunction newFunction, 440 | boolean ignoreIfNotExists 441 | ) throws FunctionNotExistException, CatalogException { 442 | if (!functionExists(functionPath)) { 443 | if (ignoreIfNotExists) { 444 | return; 445 | } 446 | 447 | throw new FunctionNotExistException(name, functionPath); 448 | } 449 | 450 | throw new UnsupportedOperationException("Not supported by the IMAP catalog."); 451 | } 452 | 453 | @Override 454 | public void dropFunction( 455 | ObjectPath functionPath, 456 | boolean ignoreIfNotExists 457 | ) throws FunctionNotExistException, CatalogException { 458 | if (!functionExists(functionPath)) { 459 | if (ignoreIfNotExists) { 460 | return; 461 | } 462 | 463 | throw new FunctionNotExistException(name, functionPath); 464 | } 465 | 466 | throw new UnsupportedOperationException("Not supported by the IMAP catalog."); 467 | } 468 | 469 | @Override 470 | public CatalogTableStatistics getTableStatistics( 471 | ObjectPath tablePath 472 | ) throws TableNotExistException, CatalogException { 473 | if (!tableExists(tablePath)) { 474 | throw new TableNotExistException(name, tablePath); 475 | } 476 | 477 | return CatalogTableStatistics.UNKNOWN; 478 | } 479 | 480 | @Override 481 | public CatalogColumnStatistics getTableColumnStatistics( 482 | ObjectPath tablePath 483 | ) throws TableNotExistException, CatalogException { 484 | if (!tableExists(tablePath)) { 485 | throw new TableNotExistException(name, tablePath); 486 | } 487 | 488 | return CatalogColumnStatistics.UNKNOWN; 489 | } 490 | 491 | @Override 492 | public CatalogTableStatistics getPartitionStatistics( 493 | ObjectPath tablePath, 494 | CatalogPartitionSpec partitionSpec 495 | ) throws PartitionNotExistException, CatalogException { 496 | if (!partitionExists(tablePath, partitionSpec)) { 497 | throw new PartitionNotExistException(name, tablePath, partitionSpec); 498 | } 499 | 500 | return CatalogTableStatistics.UNKNOWN; 501 | } 502 | 503 | @Override 504 | public CatalogColumnStatistics getPartitionColumnStatistics( 505 | ObjectPath tablePath, 506 | CatalogPartitionSpec partitionSpec 507 | ) throws PartitionNotExistException, CatalogException { 508 | if (!partitionExists(tablePath, partitionSpec)) { 509 | throw new PartitionNotExistException(name, tablePath, partitionSpec); 510 | } 511 | 512 | return CatalogColumnStatistics.UNKNOWN; 513 | } 514 | 515 | @Override 516 | public void alterTableStatistics( 517 | ObjectPath tablePath, 518 | CatalogTableStatistics tableStatistics, 519 | boolean ignoreIfNotExists 520 | ) throws TableNotExistException, CatalogException { 521 | if (!tableExists(tablePath)) { 522 | if (ignoreIfNotExists) { 523 | return; 524 | } 525 | 526 | throw new TableNotExistException(name, tablePath); 527 | } 528 | 529 | throw new UnsupportedOperationException("Not supported by the IMAP catalog."); 530 | } 531 | 532 | @Override 533 | public void alterTableColumnStatistics( 534 | ObjectPath tablePath, 535 | CatalogColumnStatistics columnStatistics, 536 | boolean ignoreIfNotExists 537 | ) throws TableNotExistException, CatalogException, TablePartitionedException { 538 | if (!tableExists(tablePath)) { 539 | if (ignoreIfNotExists) { 540 | return; 541 | } 542 | 543 | throw new TableNotExistException(name, tablePath); 544 | } 545 | 546 | throw new UnsupportedOperationException("Not supported by the IMAP catalog."); 547 | } 548 | 549 | @Override 550 | public void alterPartitionStatistics( 551 | ObjectPath tablePath, 552 | CatalogPartitionSpec partitionSpec, 553 | CatalogTableStatistics partitionStatistics, 554 | boolean ignoreIfNotExists 555 | ) throws PartitionNotExistException, CatalogException { 556 | if (!partitionExists(tablePath, partitionSpec)) { 557 | if (ignoreIfNotExists) { 558 | return; 559 | } 560 | 561 | throw new PartitionNotExistException(name, tablePath, partitionSpec); 562 | } 563 | 564 | throw new UnsupportedOperationException("Not supported by the IMAP catalog."); 565 | } 566 | 567 | @Override 568 | public void alterPartitionColumnStatistics( 569 | ObjectPath tablePath, 570 | CatalogPartitionSpec partitionSpec, 571 | CatalogColumnStatistics columnStatistics, 572 | boolean ignoreIfNotExists 573 | ) throws PartitionNotExistException, CatalogException { 574 | if (!partitionExists(tablePath, partitionSpec)) { 575 | if (ignoreIfNotExists) { 576 | return; 577 | } 578 | 579 | throw new PartitionNotExistException(name, tablePath, partitionSpec); 580 | } 581 | 582 | throw new UnsupportedOperationException("Not supported by the IMAP catalog."); 583 | } 584 | } 585 | -------------------------------------------------------------------------------- /src/main/java/com/tngtech/flink/connector/email/imap/ImapCatalogFactory.java: -------------------------------------------------------------------------------- 1 | package com.tngtech.flink.connector.email.imap; 2 | 3 | import org.apache.flink.annotation.Internal; 4 | import org.apache.flink.configuration.ConfigOption; 5 | import org.apache.flink.configuration.ReadableConfig; 6 | import org.apache.flink.table.catalog.Catalog; 7 | import org.apache.flink.table.factories.CatalogFactory; 8 | import org.apache.flink.table.factories.FactoryUtil; 9 | 10 | import java.util.HashSet; 11 | import java.util.Set; 12 | 13 | import static com.tngtech.flink.connector.email.imap.ImapConfigOptions.*; 14 | 15 | @Internal 16 | public class ImapCatalogFactory implements CatalogFactory { 17 | 18 | public static final String IDENTIFIER = "imap"; 19 | 20 | @Override 21 | public String factoryIdentifier() { 22 | return IDENTIFIER; 23 | } 24 | 25 | @Override 26 | public Set> requiredOptions() { 27 | final Set> options = new HashSet<>(); 28 | options.add(HOST); 29 | return options; 30 | } 31 | 32 | @Override 33 | public Set> optionalOptions() { 34 | final Set> options = new HashSet<>(); 35 | options.add(USER); 36 | options.add(PASSWORD); 37 | options.add(PORT); 38 | options.add(SSL); 39 | return options; 40 | } 41 | 42 | @Override 43 | public Catalog createCatalog(Context context) { 44 | final FactoryUtil.CatalogFactoryHelper factoryHelper = 45 | FactoryUtil.createCatalogFactoryHelper(this, context); 46 | factoryHelper.validate(); 47 | 48 | ReadableConfig options = factoryHelper.getOptions(); 49 | return new ImapCatalog(context.getName(), ImapCatalogOptions.fromOptions(options)); 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /src/main/java/com/tngtech/flink/connector/email/imap/ImapCatalogOptions.java: -------------------------------------------------------------------------------- 1 | package com.tngtech.flink.connector.email.imap; 2 | 3 | import com.tngtech.flink.connector.email.common.ConnectorOptions; 4 | import com.tngtech.flink.connector.email.common.Protocol; 5 | import lombok.Data; 6 | import lombok.EqualsAndHashCode; 7 | import lombok.experimental.SuperBuilder; 8 | import org.apache.flink.annotation.PublicEvolving; 9 | import org.apache.flink.configuration.ReadableConfig; 10 | 11 | import java.io.Serializable; 12 | import java.util.HashMap; 13 | import java.util.Map; 14 | 15 | import static com.tngtech.flink.connector.email.imap.ImapConfigOptions.*; 16 | 17 | @PublicEvolving 18 | @Data 19 | @SuperBuilder(toBuilder = true) 20 | @EqualsAndHashCode(callSuper = true) 21 | public class ImapCatalogOptions extends ConnectorOptions implements Serializable { 22 | private static final long serialVersionUID = 1L; 23 | 24 | // --------------------------------------------------------------------------------------------- 25 | 26 | public static ImapCatalogOptions fromOptions(ReadableConfig options) { 27 | return ImapCatalogOptions.builder() 28 | .host(options.get(HOST)) 29 | .port(options.get(PORT)) 30 | .user(options.get(USER)) 31 | .password(options.get(PASSWORD)) 32 | .protocol(options.get(SSL) ? Protocol.IMAPS : Protocol.IMAP) 33 | .build(); 34 | } 35 | 36 | public Map toOptions() { 37 | final Map options = new HashMap<>(); 38 | options.put(HOST.key(), getHost()); 39 | if (getPort() != null) { 40 | options.put(PORT.key(), String.valueOf(getPort())); 41 | } 42 | if (getUser() != null) { 43 | options.put(USER.key(), getUser()); 44 | } 45 | if (getPassword() != null) { 46 | options.put(PASSWORD.key(), getPassword()); 47 | } 48 | options.put(SSL.key(), String.valueOf(getProtocol() == Protocol.IMAPS)); 49 | 50 | return options; 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /src/main/java/com/tngtech/flink/connector/email/imap/ImapConfigOptions.java: -------------------------------------------------------------------------------- 1 | package com.tngtech.flink.connector.email.imap; 2 | 3 | import com.tngtech.flink.connector.email.common.EmailConfigOptions; 4 | import lombok.Getter; 5 | import lombok.RequiredArgsConstructor; 6 | import lombok.experimental.UtilityClass; 7 | import org.apache.flink.annotation.PublicEvolving; 8 | import org.apache.flink.configuration.ConfigOption; 9 | import org.apache.flink.configuration.ConfigOptions; 10 | import org.apache.flink.configuration.description.Description; 11 | 12 | import java.time.Duration; 13 | 14 | @UtilityClass 15 | @PublicEvolving 16 | public class ImapConfigOptions { 17 | 18 | public static final ConfigOption HOST = EmailConfigOptions.HOST; 19 | 20 | public static final ConfigOption PORT = EmailConfigOptions.PORT; 21 | 22 | public static final ConfigOption USER = EmailConfigOptions.USER; 23 | 24 | public static final ConfigOption PASSWORD = EmailConfigOptions.PASSWORD; 25 | 26 | public static final ConfigOption SSL = EmailConfigOptions.SSL; 27 | 28 | public static final ConfigOption FORMAT = EmailConfigOptions.FORMAT; 29 | 30 | public static final ConfigOption FOLDER = ConfigOptions.key("folder") 31 | .stringType() 32 | .defaultValue("INBOX"); 33 | 34 | // --------------------------------------------------------------------------------------------- 35 | 36 | public static final ConfigOption MODE = ConfigOptions.key("mode") 37 | .enumType(StartupMode.class) 38 | .defaultValue(StartupMode.ALL) 39 | .withDescription("Defines which emails to read from the specified folder."); 40 | 41 | public static final ConfigOption BATCH_SIZE = ConfigOptions.key("batch-size") 42 | .intType() 43 | .defaultValue(50) 44 | .withDescription(Description.builder() 45 | .text("Number of messages to fetch per batch.") 46 | .linebreak() 47 | .text(String.format("Only used if '%s' = '%s'.", MODE.key(), StartupMode.ALL)) 48 | .build()); 49 | 50 | public static final ConfigOption OFFSET = ConfigOptions.key("offset") 51 | .longType() 52 | .noDefaultValue() 53 | .withDescription(Description.builder() 54 | .text("If specified, this is the UID from which on messages will be fetched.") 55 | .linebreak() 56 | .text(String.format("Only used if '%s' = '%s'.", MODE.key(), StartupMode.ALL)) 57 | .build()); 58 | 59 | // --------------------------------------------------------------------------------------------- 60 | 61 | public static final ConfigOption CONNECTION_TIMEOUT = 62 | ConfigOptions.key("connection.timeout") 63 | .durationType() 64 | .defaultValue(Duration.ofMinutes(1L)); 65 | 66 | public static final ConfigOption INTERVAL = ConfigOptions.key("interval") 67 | .durationType() 68 | .defaultValue(Duration.ofSeconds(1L)) 69 | .withDescription(Description.builder() 70 | .text("Time between polling attempts.") 71 | .linebreak() 72 | .text( 73 | "This interval is only used if the server doesn't support IDLE. In this case, the connector falls back to polling.") 74 | .build()); 75 | 76 | public static final ConfigOption HEARTBEAT_INTERVAL = 77 | ConfigOptions.key("heartbeat.interval") 78 | .durationType() 79 | .defaultValue(Duration.ofMinutes(15L)) 80 | .withDescription( 81 | "Frequency with which to send a heartbeat signal to maintain the IDLE connection."); 82 | 83 | // --------------------------------------------------------------------------------------------- 84 | 85 | 86 | @RequiredArgsConstructor 87 | public enum StartupMode { 88 | ALL("all"), 89 | NEW("new"), 90 | CURRENT("current"); 91 | 92 | @Getter 93 | private final String value; 94 | 95 | public boolean isOneOf(StartupMode... modes) { 96 | for (StartupMode mode : modes) { 97 | if (this == mode) { 98 | return true; 99 | } 100 | } 101 | 102 | return false; 103 | } 104 | } 105 | 106 | } 107 | -------------------------------------------------------------------------------- /src/main/java/com/tngtech/flink/connector/email/imap/ImapSource.java: -------------------------------------------------------------------------------- 1 | package com.tngtech.flink.connector.email.imap; 2 | 3 | import com.sun.mail.imap.IMAPFolder; 4 | import com.tngtech.flink.connector.email.common.SessionProperties; 5 | import com.tngtech.flink.connector.email.imap.ImapConfigOptions.StartupMode; 6 | import com.tngtech.flink.connector.email.imap.ReadableMetadata.Context; 7 | import jakarta.mail.*; 8 | import jakarta.mail.event.MessageCountAdapter; 9 | import jakarta.mail.event.MessageCountEvent; 10 | import lombok.RequiredArgsConstructor; 11 | import org.apache.commons.io.IOUtils; 12 | import org.apache.flink.annotation.PublicEvolving; 13 | import org.apache.flink.api.common.serialization.DeserializationSchema; 14 | import org.apache.flink.configuration.Configuration; 15 | import org.apache.flink.metrics.MetricGroup; 16 | import org.apache.flink.streaming.api.functions.source.RichSourceFunction; 17 | import org.apache.flink.table.data.GenericRowData; 18 | import org.apache.flink.table.data.RowData; 19 | import org.apache.flink.table.data.StringData; 20 | import org.apache.flink.table.data.TimestampData; 21 | import org.apache.flink.table.data.utils.JoinedRowData; 22 | import org.apache.flink.types.RowKind; 23 | import org.apache.flink.util.UserCodeClassLoader; 24 | 25 | import javax.annotation.Nullable; 26 | import java.time.Instant; 27 | import java.util.List; 28 | import java.util.Properties; 29 | 30 | @PublicEvolving 31 | @RequiredArgsConstructor 32 | public class ImapSource extends RichSourceFunction { 33 | 34 | private final @Nullable DeserializationSchema contentDeserializer; 35 | private final ImapSourceOptions options; 36 | private final List metadataKeys; 37 | 38 | private transient Store store; 39 | private transient IMAPFolder folder; 40 | private transient Heartbeat heartbeat; 41 | private transient FetchProfile fetchProfile; 42 | private transient long nextUIDOnOpen; 43 | 44 | private transient volatile boolean running; 45 | private volatile boolean supportsIdle = true; 46 | 47 | @Override 48 | public void open(Configuration parameters) throws Exception { 49 | fetchProfile = getFetchProfile(); 50 | connect(); 51 | 52 | if (contentDeserializer != null) { 53 | contentDeserializer.open(new DeserializationSchema.InitializationContext() { 54 | @Override 55 | public MetricGroup getMetricGroup() { 56 | return getRuntimeContext().getMetricGroup(); 57 | } 58 | 59 | @Override 60 | public UserCodeClassLoader getUserCodeClassLoader() { 61 | return (UserCodeClassLoader) Thread.currentThread().getContextClassLoader(); 62 | } 63 | }); 64 | } 65 | } 66 | 67 | @Override 68 | public void close() { 69 | try { 70 | if (folder != null) { 71 | folder.close(false); 72 | } 73 | 74 | if (store != null) { 75 | store.close(); 76 | } 77 | } catch (MessagingException ignored) { 78 | } 79 | } 80 | 81 | @Override 82 | public void run(SourceContext ctx) { 83 | running = true; 84 | 85 | final boolean readNewMessages = options.getMode().isOneOf(StartupMode.NEW, StartupMode.ALL); 86 | if (readNewMessages) { 87 | folder.addMessageCountListener(new MessageCountAdapter() { 88 | @Override 89 | public void messagesAdded(MessageCountEvent event) { 90 | collectMessages(ctx, event.getMessages()); 91 | } 92 | }); 93 | } 94 | 95 | if (options.getMode().isOneOf(StartupMode.CURRENT, StartupMode.ALL)) { 96 | fetchExistingMessages(ctx, nextUIDOnOpen - 1); 97 | } 98 | 99 | if (readNewMessages) { 100 | enterWaitLoop(); 101 | } else { 102 | running = false; 103 | } 104 | } 105 | 106 | @Override 107 | public void cancel() { 108 | running = false; 109 | stopIdleHeartbeat(); 110 | } 111 | 112 | // --------------------------------------------------------------------------------------------- 113 | 114 | private void connect() throws ImapSourceException { 115 | final Session session = Session.getInstance(getImapProperties(options)); 116 | 117 | try { 118 | store = session.getStore(); 119 | } catch (NoSuchProviderException e) { 120 | throw ImapSourceException.propagate(e); 121 | } 122 | 123 | try { 124 | if (options.usesAuthentication()) { 125 | store.connect(options.getUser(), options.getPassword()); 126 | } else { 127 | store.connect("", ""); 128 | } 129 | } catch (MessagingException e) { 130 | throw ImapSourceException.propagate(e); 131 | } 132 | 133 | try { 134 | final Folder genericFolder = store.getFolder(options.getFolder()); 135 | folder = (IMAPFolder) genericFolder; 136 | } catch (MessagingException e) { 137 | throw ImapSourceException.propagate(e); 138 | } catch (ClassCastException e) { 139 | throw new ImapSourceException( 140 | "Folder " + folder.getName() + " is not an " + IMAPFolder.class.getSimpleName(), e); 141 | } 142 | 143 | openFolder(); 144 | 145 | final boolean folderExists; 146 | try { 147 | folderExists = folder.exists(); 148 | } catch (MessagingException e) { 149 | throw ImapSourceException.propagate(e); 150 | } 151 | 152 | if (!folderExists) { 153 | throw new ImapSourceException("Folder " + folder.getName() + " does not exist."); 154 | } 155 | 156 | try { 157 | nextUIDOnOpen = folder.getUIDNext(); 158 | if (nextUIDOnOpen == -1) { 159 | throw new ImapSourceException("The highest UID could not be determined."); 160 | } 161 | } catch (MessagingException e) { 162 | throw new ImapSourceException("Error while determining the highest UID", e); 163 | } 164 | } 165 | 166 | private void openFolder() { 167 | try { 168 | if (!folder.isOpen()) { 169 | folder.open(Folder.READ_ONLY); 170 | } 171 | } catch (MessagingException e) { 172 | throw ImapSourceException.propagate(e); 173 | } 174 | } 175 | 176 | private FetchProfile getFetchProfile() { 177 | final FetchProfile fetchProfile = new FetchProfile(); 178 | fetchProfile.add(FetchProfile.Item.ENVELOPE); 179 | fetchProfile.add(UIDFolder.FetchProfileItem.UID); 180 | 181 | if (contentDeserializer != null || metadataKeys.contains(ReadableMetadata.CONTENT_TYPE)) { 182 | fetchProfile.add(FetchProfile.Item.CONTENT_INFO); 183 | } 184 | 185 | if (metadataKeys.contains(ReadableMetadata.SIZE)) { 186 | fetchProfile.add(FetchProfile.Item.SIZE); 187 | } 188 | 189 | if (metadataKeys.contains(ReadableMetadata.SEEN) 190 | || metadataKeys.contains(ReadableMetadata.DRAFT) 191 | || metadataKeys.contains(ReadableMetadata.ANSWERED)) { 192 | fetchProfile.add(FetchProfile.Item.FLAGS); 193 | } 194 | 195 | return fetchProfile; 196 | } 197 | 198 | private void stopIdleHeartbeat() { 199 | if (heartbeat != null && heartbeat.isAlive()) { 200 | heartbeat.interrupt(); 201 | } 202 | } 203 | 204 | // --------------------------------------------------------------------------------------------- 205 | 206 | private void enterWaitLoop() { 207 | heartbeat = new Heartbeat(folder, options.getHeartbeatInterval()); 208 | heartbeat.setName("IMAP Heartbeat"); 209 | heartbeat.setDaemon(true); 210 | heartbeat.start(); 211 | 212 | long nextReadTimeMs = System.currentTimeMillis(); 213 | while (running) { 214 | if (supportsIdle) { 215 | try { 216 | folder.idle(); 217 | } catch (MessagingException ignored) { 218 | supportsIdle = false; 219 | stopIdleHeartbeat(); 220 | } catch (IllegalStateException e) { 221 | openFolder(); 222 | } 223 | } else { 224 | try { 225 | // Trigger some IMAP request to force the server to send a notification 226 | folder.getMessageCount(); 227 | } catch (MessagingException e) { 228 | throw ImapSourceException.propagate(e); 229 | } 230 | 231 | nextReadTimeMs += options.getInterval().toMillis(); 232 | try { 233 | Thread.sleep(Math.max(0, nextReadTimeMs - System.currentTimeMillis())); 234 | } catch (InterruptedException e) { 235 | throw new ImapSourceException("Error while sleeping", e); 236 | } 237 | } 238 | } 239 | } 240 | 241 | private void fetchExistingMessages(SourceContext ctx, long endUID) { 242 | final Thread fetchThread = new Thread(() -> { 243 | long batchStartUID = options.getOffset() == null ? 1 : options.getOffset(); 244 | while (running) { 245 | final long batchEndUID = 246 | Math.min(batchStartUID + options.getBatchSize() - 1, endUID); 247 | try { 248 | collectMessages(ctx, folder.getMessagesByUID(batchStartUID, batchEndUID)); 249 | } catch (MessagingException e) { 250 | throw new ImapSourceException(String.format( 251 | "Error while fetching messages (batchStartUID = %d, batchEndUid = %d", 252 | batchStartUID, batchEndUID), e); 253 | } 254 | 255 | batchStartUID = batchEndUID + 1; 256 | if (batchStartUID > endUID) { 257 | break; 258 | } 259 | } 260 | }); 261 | 262 | fetchThread.setName(String.format("IMAP Fetcher (endUID = %d)", endUID)); 263 | fetchThread.setDaemon(true); 264 | fetchThread.start(); 265 | } 266 | 267 | private void collectMessages(SourceContext ctx, Message[] messages) { 268 | try { 269 | folder.fetch(messages, fetchProfile); 270 | } catch (MessagingException e) { 271 | throw ImapSourceException.propagate(e); 272 | } 273 | 274 | synchronized (ctx.getCheckpointLock()) { 275 | for (Message message : messages) { 276 | if (message == null) { 277 | continue; 278 | } 279 | 280 | try { 281 | collectMessage(ctx, message); 282 | } catch (Exception e) { 283 | throw ImapSourceException.propagate(e); 284 | } 285 | } 286 | } 287 | 288 | ctx.markAsTemporarilyIdle(); 289 | } 290 | 291 | private void collectMessage(SourceContext ctx, Message message) throws Exception { 292 | final Context converterContext = Context.of(folder, message); 293 | final GenericRowData metadataRow = new GenericRowData(metadataKeys.size()); 294 | 295 | for (int i = 0; i < metadataKeys.size(); i++) { 296 | final ReadableMetadata metadata = metadataKeys.get(i); 297 | final ReadableMetadata.Converter converter = metadata.getConverter(); 298 | final Object obj = toInternalType(metadata, converter.convert(converterContext)); 299 | metadataRow.setField(i, obj); 300 | } 301 | 302 | final RowData outputRow; 303 | if (contentDeserializer != null) { 304 | final byte[] content = IOUtils.toByteArray(message.getInputStream()); 305 | final RowData deserializedRow = contentDeserializer.deserialize(content); 306 | 307 | final RowData physicalRow = deserializedRow == null 308 | ? GenericRowData.of(RowKind.INSERT, null) 309 | : deserializedRow; 310 | 311 | outputRow = new JoinedRowData(physicalRow, metadataRow); 312 | } else { 313 | outputRow = metadataRow; 314 | } 315 | 316 | ctx.collect(outputRow); 317 | } 318 | 319 | private @Nullable Object toInternalType(ReadableMetadata metadata, @Nullable Object value) { 320 | if (value == null) { 321 | return null; 322 | } 323 | 324 | switch (metadata) { 325 | case SUBJECT: 326 | case CONTENT_TYPE: 327 | case FROM_FIRST: 328 | case TO_FIRST: 329 | return StringData.fromString((String) value); 330 | case SENT: 331 | case RECEIVED: 332 | return TimestampData.fromInstant((Instant) value); 333 | default: 334 | return value; 335 | } 336 | } 337 | 338 | // --------------------------------------------------------------------------------------------- 339 | 340 | private static Properties getImapProperties(ImapSourceOptions options) { 341 | final SessionProperties sessionProperties = new SessionProperties(options); 342 | 343 | sessionProperties.addProtocolProperty("connectiontimeout", 344 | String.valueOf(options.getConnectionTimeout().toMillis())); 345 | sessionProperties.addProtocolProperty("partialfetch", "false"); 346 | sessionProperties.addProtocolProperty("peek", "true"); 347 | 348 | return sessionProperties.getProperties(); 349 | } 350 | } 351 | -------------------------------------------------------------------------------- /src/main/java/com/tngtech/flink/connector/email/imap/ImapSourceException.java: -------------------------------------------------------------------------------- 1 | package com.tngtech.flink.connector.email.imap; 2 | 3 | import org.apache.flink.annotation.PublicEvolving; 4 | 5 | @PublicEvolving 6 | public final class ImapSourceException extends RuntimeException { 7 | 8 | public ImapSourceException(String s) { 9 | super(s); 10 | } 11 | 12 | public ImapSourceException(String s, Throwable throwable) { 13 | super(s, throwable); 14 | } 15 | 16 | public static ImapSourceException propagate(Exception e) { 17 | throw new ImapSourceException(e.getMessage(), e); 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /src/main/java/com/tngtech/flink/connector/email/imap/ImapSourceFactory.java: -------------------------------------------------------------------------------- 1 | package com.tngtech.flink.connector.email.imap; 2 | 3 | import org.apache.flink.annotation.Internal; 4 | import org.apache.flink.api.common.serialization.DeserializationSchema; 5 | import org.apache.flink.configuration.ConfigOption; 6 | import org.apache.flink.table.connector.format.DecodingFormat; 7 | import org.apache.flink.table.connector.source.DynamicTableSource; 8 | import org.apache.flink.table.data.RowData; 9 | import org.apache.flink.table.factories.DeserializationFormatFactory; 10 | import org.apache.flink.table.factories.DynamicTableSourceFactory; 11 | import org.apache.flink.table.factories.FactoryUtil; 12 | import org.apache.flink.table.types.DataType; 13 | 14 | import java.util.HashSet; 15 | import java.util.Set; 16 | 17 | import static com.tngtech.flink.connector.email.imap.ImapConfigOptions.*; 18 | 19 | @Internal 20 | public class ImapSourceFactory implements DynamicTableSourceFactory { 21 | private static final String IDENTIFIER = "imap"; 22 | 23 | @Override 24 | public String factoryIdentifier() { 25 | return IDENTIFIER; 26 | } 27 | 28 | @Override 29 | public Set> requiredOptions() { 30 | final Set> options = new HashSet<>(); 31 | options.add(HOST); 32 | options.add(USER); 33 | options.add(PASSWORD); 34 | options.add(SSL); 35 | options.add(FOLDER); 36 | options.add(MODE); 37 | return options; 38 | } 39 | 40 | @Override 41 | public Set> optionalOptions() { 42 | final Set> options = new HashSet<>(); 43 | options.add(FORMAT); 44 | 45 | options.add(PORT); 46 | options.add(CONNECTION_TIMEOUT); 47 | options.add(INTERVAL); 48 | options.add(HEARTBEAT_INTERVAL); 49 | options.add(BATCH_SIZE); 50 | options.add(OFFSET); 51 | return options; 52 | } 53 | 54 | @Override 55 | public DynamicTableSource createDynamicTableSource(Context context) { 56 | final FactoryUtil.TableFactoryHelper 57 | factoryHelper = FactoryUtil.createTableFactoryHelper(this, context); 58 | 59 | final DecodingFormat> decodingFormat = factoryHelper 60 | .discoverDecodingFormat(DeserializationFormatFactory.class, FORMAT); 61 | 62 | factoryHelper.validate(); 63 | 64 | final DataType rowType = 65 | context.getCatalogTable().getResolvedSchema().toPhysicalRowDataType(); 66 | 67 | final ImapSourceOptions options = ImapSourceOptions.fromOptions(factoryHelper.getOptions()); 68 | 69 | return new ImapTableSource(rowType, decodingFormat, options); 70 | } 71 | 72 | } 73 | -------------------------------------------------------------------------------- /src/main/java/com/tngtech/flink/connector/email/imap/ImapSourceOptions.java: -------------------------------------------------------------------------------- 1 | package com.tngtech.flink.connector.email.imap; 2 | 3 | import com.tngtech.flink.connector.email.common.ConnectorOptions; 4 | import com.tngtech.flink.connector.email.common.Protocol; 5 | import lombok.Data; 6 | import lombok.EqualsAndHashCode; 7 | import lombok.experimental.SuperBuilder; 8 | import org.apache.flink.annotation.PublicEvolving; 9 | import org.apache.flink.configuration.ReadableConfig; 10 | 11 | import javax.annotation.Nullable; 12 | import java.io.Serializable; 13 | import java.time.Duration; 14 | 15 | import static com.tngtech.flink.connector.email.imap.ImapConfigOptions.*; 16 | 17 | @PublicEvolving 18 | @Data 19 | @SuperBuilder(toBuilder = true) 20 | @EqualsAndHashCode(callSuper = true) 21 | public class ImapSourceOptions extends ConnectorOptions implements Serializable { 22 | private static final long serialVersionUID = 1L; 23 | 24 | private final String folder; 25 | 26 | private final StartupMode mode; 27 | private final int batchSize; 28 | private final @Nullable Long offset; 29 | 30 | private final Duration connectionTimeout; 31 | private final Duration interval; 32 | private final Duration heartbeatInterval; 33 | 34 | // --------------------------------------------------------------------------------------------- 35 | 36 | public static ImapSourceOptions fromOptions(ReadableConfig options) { 37 | return ImapSourceOptions.builder() 38 | .host(options.get(HOST)) 39 | .port(options.get(PORT)) 40 | .user(options.get(USER)) 41 | .password(options.get(PASSWORD)) 42 | .protocol(options.get(SSL) ? Protocol.IMAPS : Protocol.IMAP) 43 | .folder(options.get(FOLDER)) 44 | .mode(options.get(MODE)) 45 | .batchSize(options.get(BATCH_SIZE)) 46 | .offset(options.get(OFFSET)) 47 | .connectionTimeout(options.get(CONNECTION_TIMEOUT)) 48 | .interval(options.get(INTERVAL)) 49 | .heartbeatInterval(options.get(HEARTBEAT_INTERVAL)) 50 | .build(); 51 | } 52 | 53 | } 54 | -------------------------------------------------------------------------------- /src/main/java/com/tngtech/flink/connector/email/imap/ImapTableSource.java: -------------------------------------------------------------------------------- 1 | package com.tngtech.flink.connector.email.imap; 2 | 3 | import com.tngtech.flink.connector.email.imap.ImapConfigOptions.StartupMode; 4 | import org.apache.flink.annotation.Internal; 5 | import org.apache.flink.api.common.serialization.DeserializationSchema; 6 | import org.apache.flink.table.connector.ChangelogMode; 7 | import org.apache.flink.table.connector.format.DecodingFormat; 8 | import org.apache.flink.table.connector.source.DynamicTableSource; 9 | import org.apache.flink.table.connector.source.ScanTableSource; 10 | import org.apache.flink.table.connector.source.SourceFunctionProvider; 11 | import org.apache.flink.table.connector.source.abilities.SupportsProjectionPushDown; 12 | import org.apache.flink.table.connector.source.abilities.SupportsReadingMetadata; 13 | import org.apache.flink.table.data.RowData; 14 | import org.apache.flink.table.types.DataType; 15 | import org.apache.flink.table.types.utils.DataTypeUtils; 16 | 17 | import java.util.ArrayList; 18 | import java.util.Arrays; 19 | import java.util.List; 20 | import java.util.Map; 21 | import java.util.stream.Collectors; 22 | 23 | import static org.apache.flink.util.Preconditions.checkNotNull; 24 | 25 | @Internal 26 | class ImapTableSource implements ScanTableSource, SupportsReadingMetadata, 27 | SupportsProjectionPushDown { 28 | 29 | private DataType rowType; 30 | private final DecodingFormat> decodingFormat; 31 | private final ImapSourceOptions options; 32 | private List metadataKeys = new ArrayList<>(); 33 | 34 | public ImapTableSource(DataType rowType, 35 | DecodingFormat> decodingFormat, 36 | ImapSourceOptions options) { 37 | 38 | this.rowType = checkNotNull(rowType); 39 | this.decodingFormat = decodingFormat; 40 | this.options = options; 41 | } 42 | 43 | @Override 44 | public ChangelogMode getChangelogMode() { 45 | return ChangelogMode.insertOnly(); 46 | } 47 | 48 | @Override 49 | public ScanRuntimeProvider getScanRuntimeProvider(ScanContext context) { 50 | // If the produced row type is empty, no physical columns have been declared, and we don't 51 | // need to deserialize anything. 52 | final boolean readsContent = !rowType.getChildren().isEmpty(); 53 | 54 | final DeserializationSchema deserializer = readsContent 55 | ? decodingFormat.createRuntimeDecoder(context, rowType) 56 | : null; 57 | 58 | final ImapSource source = new ImapSource(deserializer, options, metadataKeys); 59 | final boolean bounded = options.getMode() == StartupMode.CURRENT; 60 | return SourceFunctionProvider.of(source, bounded); 61 | } 62 | 63 | @Override 64 | public DynamicTableSource copy() { 65 | final ImapTableSource source = new ImapTableSource(rowType, decodingFormat, options); 66 | source.metadataKeys = new ArrayList<>(metadataKeys); 67 | return source; 68 | } 69 | 70 | @Override 71 | public String asSummaryString() { 72 | return getClass().getSimpleName(); 73 | } 74 | 75 | // --------------------------------------------------------------------------------------------- 76 | // Abilities 77 | // --------------------------------------------------------------------------------------------- 78 | 79 | 80 | @Override 81 | public boolean supportsNestedProjection() { 82 | return false; 83 | } 84 | 85 | @Override 86 | public void applyProjection(int[][] projectedFields) { 87 | rowType = DataTypeUtils.projectRow(rowType, projectedFields); 88 | } 89 | 90 | @Override 91 | public Map listReadableMetadata() { 92 | return Arrays.stream(ReadableMetadata.values()) 93 | .collect(Collectors.toMap(ReadableMetadata::getKey, ReadableMetadata::getType)); 94 | } 95 | 96 | @Override 97 | public void applyReadableMetadata(List metadataKeys, DataType producedDataType) { 98 | this.metadataKeys = metadataKeys.stream() 99 | .map(ReadableMetadata::ofKey) 100 | .collect(Collectors.toList()); 101 | } 102 | } 103 | -------------------------------------------------------------------------------- /src/main/java/com/tngtech/flink/connector/email/imap/ReadableMetadata.java: -------------------------------------------------------------------------------- 1 | package com.tngtech.flink.connector.email.imap; 2 | 3 | import com.sun.mail.imap.IMAPFolder; 4 | import jakarta.mail.Flags; 5 | import jakarta.mail.Message; 6 | import jakarta.mail.Message.RecipientType; 7 | import lombok.Data; 8 | import lombok.Getter; 9 | import lombok.RequiredArgsConstructor; 10 | import org.apache.flink.annotation.PublicEvolving; 11 | import org.apache.flink.table.api.DataTypes; 12 | import org.apache.flink.table.types.DataType; 13 | 14 | import java.io.Serializable; 15 | 16 | import static com.tngtech.flink.connector.email.common.MessageUtil.*; 17 | 18 | @PublicEvolving 19 | @RequiredArgsConstructor 20 | public enum ReadableMetadata { 21 | UID( 22 | "uid", 23 | DataTypes.BIGINT().notNull(), 24 | new Converter() { 25 | private static final long serialVersionUID = 1L; 26 | 27 | @Override 28 | public Object convert(Context context) throws Exception { 29 | return context.getFolder().getUID(context.getMessage()); 30 | } 31 | }), 32 | 33 | SUBJECT( 34 | "subject", 35 | DataTypes.STRING().nullable(), 36 | new Converter() { 37 | private static final long serialVersionUID = 1L; 38 | 39 | @Override 40 | public Object convert(Context context) throws Exception { 41 | return context.getMessage().getSubject(); 42 | } 43 | }), 44 | 45 | SENT( 46 | "sent", 47 | DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(3).notNull(), 48 | new Converter() { 49 | private static final long serialVersionUID = 1L; 50 | 51 | @Override 52 | public Object convert(Context context) throws Exception { 53 | return context.getMessage().getSentDate().toInstant(); 54 | } 55 | }), 56 | 57 | RECEIVED( 58 | "received", 59 | DataTypes.TIMESTAMP_WITH_LOCAL_TIME_ZONE(3).notNull(), 60 | new Converter() { 61 | private static final long serialVersionUID = 1L; 62 | 63 | @Override 64 | public Object convert(Context context) throws Exception { 65 | return context.getMessage().getReceivedDate().toInstant(); 66 | } 67 | }), 68 | 69 | CONTENT_TYPE( 70 | "contentType", 71 | DataTypes.STRING().nullable(), 72 | new Converter() { 73 | private static final long serialVersionUID = 1L; 74 | 75 | @Override 76 | public Object convert(Context context) throws Exception { 77 | return context.getMessage().getContentType(); 78 | } 79 | } 80 | ), 81 | 82 | SIZE( 83 | "sizeInBytes", 84 | DataTypes.INT().notNull(), 85 | new Converter() { 86 | private static final long serialVersionUID = 1L; 87 | 88 | @Override 89 | public Object convert(Context context) throws Exception { 90 | return context.getMessage().getSize(); 91 | } 92 | } 93 | ), 94 | 95 | SEEN( 96 | "seen", 97 | DataTypes.BOOLEAN().notNull(), 98 | new Converter() { 99 | private static final long serialVersionUID = 1L; 100 | 101 | @Override 102 | public Object convert(Context context) throws Exception { 103 | return context.getMessage().getFlags().contains(Flags.Flag.SEEN); 104 | } 105 | } 106 | ), 107 | 108 | DRAFT( 109 | "draft", 110 | DataTypes.BOOLEAN().notNull(), 111 | new Converter() { 112 | private static final long serialVersionUID = 1L; 113 | 114 | @Override 115 | public Object convert(Context context) throws Exception { 116 | return context.getMessage().getFlags().contains(Flags.Flag.DRAFT); 117 | } 118 | } 119 | ), 120 | 121 | ANSWERED( 122 | "answered", 123 | DataTypes.BOOLEAN().notNull(), 124 | new Converter() { 125 | private static final long serialVersionUID = 1L; 126 | 127 | @Override 128 | public Object convert(Context context) throws Exception { 129 | return context.getMessage().getFlags().contains(Flags.Flag.ANSWERED); 130 | } 131 | } 132 | ), 133 | 134 | FROM( 135 | "from", 136 | DataTypes.ARRAY(DataTypes.STRING()).nullable(), 137 | new Converter() { 138 | private static final long serialVersionUID = 1L; 139 | 140 | @Override 141 | public Object convert(Context context) throws Exception { 142 | return encodeAddresses(context.getMessage().getFrom()); 143 | } 144 | }), 145 | 146 | FROM_FIRST( 147 | "fromFirst", 148 | DataTypes.STRING().nullable(), 149 | new Converter() { 150 | private static final long serialVersionUID = 1L; 151 | 152 | @Override 153 | public Object convert(Context context) throws Exception { 154 | return encodeFirstAddress(context.getMessage().getFrom()); 155 | } 156 | }), 157 | 158 | TO( 159 | "to", 160 | DataTypes.ARRAY(DataTypes.STRING()).nullable(), 161 | new Converter() { 162 | private static final long serialVersionUID = 1L; 163 | 164 | @Override 165 | public Object convert(Context context) throws Exception { 166 | return encodeAddresses(context.getMessage().getRecipients(RecipientType.TO)); 167 | } 168 | }), 169 | 170 | TO_FIRST( 171 | "toFirst", 172 | DataTypes.STRING().nullable(), 173 | new Converter() { 174 | private static final long serialVersionUID = 1L; 175 | 176 | @Override 177 | public Object convert(Context context) throws Exception { 178 | return encodeFirstAddress(context.getMessage().getRecipients(RecipientType.TO)); 179 | } 180 | }), 181 | 182 | CC( 183 | "cc", 184 | DataTypes.ARRAY(DataTypes.STRING()).nullable(), 185 | new Converter() { 186 | private static final long serialVersionUID = 1L; 187 | 188 | @Override 189 | public Object convert(Context context) throws Exception { 190 | return encodeAddresses(context.getMessage().getRecipients(RecipientType.CC)); 191 | } 192 | }), 193 | 194 | BCC( 195 | "bcc", 196 | DataTypes.ARRAY(DataTypes.STRING()).nullable(), 197 | new Converter() { 198 | private static final long serialVersionUID = 1L; 199 | 200 | @Override 201 | public Object convert(Context context) throws Exception { 202 | return encodeAddresses(context.getMessage().getRecipients(RecipientType.BCC)); 203 | } 204 | }), 205 | 206 | RECIPIENTS( 207 | "recipients", 208 | DataTypes.ARRAY(DataTypes.STRING()).nullable(), 209 | new Converter() { 210 | private static final long serialVersionUID = 1L; 211 | 212 | @Override 213 | public Object convert(Context context) throws Exception { 214 | return encodeAddresses(context.getMessage().getAllRecipients()); 215 | } 216 | }), 217 | 218 | REPLY_TO( 219 | "replyTo", 220 | DataTypes.ARRAY(DataTypes.STRING()).nullable(), 221 | new Converter() { 222 | private static final long serialVersionUID = 1L; 223 | 224 | @Override 225 | public Object convert(Context context) throws Exception { 226 | return encodeAddresses(context.getMessage().getReplyTo()); 227 | } 228 | }), 229 | 230 | HEADERS( 231 | "headers", 232 | DataTypes.ARRAY(DataTypes.ROW( 233 | DataTypes.FIELD("key", DataTypes.STRING()), 234 | DataTypes.FIELD("value", DataTypes.STRING()) 235 | )).nullable(), 236 | new Converter() { 237 | private static final long serialVersionUID = 1L; 238 | 239 | @Override 240 | public Object convert(Context context) throws Exception { 241 | return encodeHeaders(context.getMessage().getAllHeaders()); 242 | } 243 | }); 244 | 245 | @Getter 246 | private final String key; 247 | 248 | @Getter 249 | private final DataType type; 250 | 251 | @Getter 252 | private final Converter converter; 253 | 254 | public static ReadableMetadata ofKey(String key) { 255 | for (ReadableMetadata candidate : values()) { 256 | if (key.equals(candidate.getKey())) { 257 | return candidate; 258 | } 259 | } 260 | 261 | throw new IllegalArgumentException(String.format("Metadata key '%s' not found.", key)); 262 | } 263 | 264 | // --------------------------------------------------------------------------------------------- 265 | 266 | 267 | public interface Context { 268 | IMAPFolder getFolder(); 269 | 270 | Message getMessage(); 271 | 272 | static Context of(IMAPFolder folder, Message message) { 273 | return new DefaultContext(folder, message); 274 | } 275 | 276 | @Data 277 | class DefaultContext implements Context { 278 | private final IMAPFolder folder; 279 | private final Message message; 280 | } 281 | } 282 | 283 | 284 | @FunctionalInterface 285 | interface Converter extends Serializable { 286 | Object convert(Context context) throws Exception; 287 | } 288 | } 289 | -------------------------------------------------------------------------------- /src/main/java/com/tngtech/flink/connector/email/smtp/SmtpConfigOptions.java: -------------------------------------------------------------------------------- 1 | package com.tngtech.flink.connector.email.smtp; 2 | 3 | import com.tngtech.flink.connector.email.common.EmailConfigOptions; 4 | import lombok.experimental.UtilityClass; 5 | import org.apache.flink.annotation.PublicEvolving; 6 | import org.apache.flink.configuration.ConfigOption; 7 | 8 | @UtilityClass 9 | @PublicEvolving 10 | public class SmtpConfigOptions { 11 | 12 | public static final ConfigOption HOST = EmailConfigOptions.HOST; 13 | 14 | public static final ConfigOption PORT = EmailConfigOptions.PORT; 15 | 16 | public static final ConfigOption USER = EmailConfigOptions.USER; 17 | 18 | public static final ConfigOption PASSWORD = EmailConfigOptions.PASSWORD; 19 | 20 | public static final ConfigOption SSL = EmailConfigOptions.SSL; 21 | 22 | public static final ConfigOption FORMAT = EmailConfigOptions.FORMAT; 23 | 24 | } 25 | -------------------------------------------------------------------------------- /src/main/java/com/tngtech/flink/connector/email/smtp/SmtpSink.java: -------------------------------------------------------------------------------- 1 | package com.tngtech.flink.connector.email.smtp; 2 | 3 | import com.tngtech.flink.connector.email.common.SessionProperties; 4 | import com.tngtech.flink.connector.email.common.SubRowData; 5 | import jakarta.activation.DataHandler; 6 | import jakarta.activation.DataSource; 7 | import jakarta.mail.Session; 8 | import jakarta.mail.Transport; 9 | import jakarta.mail.internet.MimeBodyPart; 10 | import jakarta.mail.internet.MimeMessage; 11 | import jakarta.mail.internet.MimeMultipart; 12 | import jakarta.mail.util.ByteArrayDataSource; 13 | import lombok.RequiredArgsConstructor; 14 | import org.apache.flink.annotation.PublicEvolving; 15 | import org.apache.flink.api.common.serialization.SerializationSchema; 16 | import org.apache.flink.configuration.Configuration; 17 | import org.apache.flink.metrics.MetricGroup; 18 | import org.apache.flink.streaming.api.functions.sink.RichSinkFunction; 19 | import org.apache.flink.table.data.RowData; 20 | import org.apache.flink.table.types.DataType; 21 | import org.apache.flink.util.UserCodeClassLoader; 22 | 23 | import javax.annotation.Nullable; 24 | import java.util.List; 25 | import java.util.Properties; 26 | 27 | @PublicEvolving 28 | @RequiredArgsConstructor 29 | public class SmtpSink extends RichSinkFunction { 30 | 31 | private final DataType physicalRowType; 32 | private final @Nullable SerializationSchema contentSerializer; 33 | private final SmtpSinkOptions options; 34 | private final List metadataKeys; 35 | 36 | private transient Session session; 37 | private transient Transport transport; 38 | 39 | @Override 40 | public void open(Configuration parameters) throws Exception { 41 | connect(); 42 | 43 | if (contentSerializer != null) { 44 | contentSerializer.open(new SerializationSchema.InitializationContext() { 45 | @Override 46 | public MetricGroup getMetricGroup() { 47 | return getRuntimeContext().getMetricGroup(); 48 | } 49 | 50 | @Override 51 | public UserCodeClassLoader getUserCodeClassLoader() { 52 | return (UserCodeClassLoader) Thread.currentThread().getContextClassLoader(); 53 | } 54 | }); 55 | } 56 | } 57 | 58 | @Override 59 | public void close() throws Exception { 60 | if (transport != null) { 61 | transport.close(); 62 | } 63 | } 64 | 65 | @Override 66 | public void invoke(RowData row, Context context) throws Exception { 67 | final MimeMessage message = new MimeMessage(session); 68 | 69 | int currentPosition = 0; 70 | if (contentSerializer != null) { 71 | final RowData contentRow = new SubRowData(row, 0, physicalRowType.getChildren().size()); 72 | final byte[] content = contentSerializer.serialize(contentRow); 73 | final DataSource dataSource = new ByteArrayDataSource(content, "text/plain"); 74 | 75 | final MimeBodyPart bodyPart = new MimeBodyPart(); 76 | bodyPart.setDataHandler(new DataHandler(dataSource)); 77 | 78 | final MimeMultipart multipart = new MimeMultipart(); 79 | multipart.addBodyPart(bodyPart); 80 | 81 | message.setContent(multipart); 82 | 83 | currentPosition++; 84 | } else { 85 | message.setText(""); 86 | } 87 | 88 | for (WritableMetadata metadata : metadataKeys) { 89 | metadata.getConverter().convert(row, currentPosition, message); 90 | currentPosition++; 91 | } 92 | 93 | message.saveChanges(); 94 | transport.sendMessage(message, message.getAllRecipients()); 95 | } 96 | 97 | // --------------------------------------------------------------------------------------------- 98 | 99 | private void connect() throws Exception { 100 | session = Session.getInstance(getSmtpProperties(options)); 101 | transport = session.getTransport(options.getProtocol().getName()); 102 | if (options.usesAuthentication()) { 103 | transport.connect(options.getUser(), options.getPassword()); 104 | } else { 105 | transport.connect(); 106 | } 107 | } 108 | 109 | private static Properties getSmtpProperties(SmtpSinkOptions options) { 110 | return new SessionProperties(options).getProperties(); 111 | } 112 | } 113 | -------------------------------------------------------------------------------- /src/main/java/com/tngtech/flink/connector/email/smtp/SmtpSinkException.java: -------------------------------------------------------------------------------- 1 | package com.tngtech.flink.connector.email.smtp; 2 | 3 | import org.apache.flink.annotation.PublicEvolving; 4 | 5 | @PublicEvolving 6 | public final class SmtpSinkException extends RuntimeException { 7 | 8 | public SmtpSinkException(String s) { 9 | super(s); 10 | } 11 | 12 | public SmtpSinkException(String s, Throwable throwable) { 13 | super(s, throwable); 14 | } 15 | 16 | public static SmtpSinkException propagate(Exception e) { 17 | throw new SmtpSinkException(e.getMessage(), e); 18 | } 19 | 20 | } 21 | -------------------------------------------------------------------------------- /src/main/java/com/tngtech/flink/connector/email/smtp/SmtpSinkFactory.java: -------------------------------------------------------------------------------- 1 | package com.tngtech.flink.connector.email.smtp; 2 | 3 | import org.apache.flink.annotation.Internal; 4 | import org.apache.flink.api.common.serialization.SerializationSchema; 5 | import org.apache.flink.configuration.ConfigOption; 6 | import org.apache.flink.table.connector.format.EncodingFormat; 7 | import org.apache.flink.table.connector.sink.DynamicTableSink; 8 | import org.apache.flink.table.data.RowData; 9 | import org.apache.flink.table.factories.DynamicTableSinkFactory; 10 | import org.apache.flink.table.factories.FactoryUtil; 11 | import org.apache.flink.table.factories.SerializationFormatFactory; 12 | import org.apache.flink.table.types.DataType; 13 | 14 | import java.util.HashSet; 15 | import java.util.Set; 16 | 17 | import static com.tngtech.flink.connector.email.smtp.SmtpConfigOptions.*; 18 | 19 | @Internal 20 | public class SmtpSinkFactory implements DynamicTableSinkFactory { 21 | 22 | public static final String IDENTIFIER = "smtp"; 23 | 24 | @Override 25 | public String factoryIdentifier() { 26 | return IDENTIFIER; 27 | } 28 | 29 | @Override 30 | public Set> requiredOptions() { 31 | final Set> options = new HashSet<>(); 32 | options.add(HOST); 33 | 34 | return options; 35 | } 36 | 37 | @Override 38 | public Set> optionalOptions() { 39 | final Set> options = new HashSet<>(); 40 | options.add(FORMAT); 41 | 42 | options.add(USER); 43 | options.add(PASSWORD); 44 | options.add(PORT); 45 | options.add(SSL); 46 | return options; 47 | } 48 | 49 | @Override 50 | public DynamicTableSink createDynamicTableSink(Context context) { 51 | final FactoryUtil.TableFactoryHelper 52 | factoryHelper = FactoryUtil.createTableFactoryHelper(this, context); 53 | 54 | final EncodingFormat> encodingFormat = 55 | factoryHelper.discoverEncodingFormat(SerializationFormatFactory.class, FORMAT); 56 | 57 | factoryHelper.validate(); 58 | 59 | final DataType rowType = 60 | context.getCatalogTable().getResolvedSchema().toPhysicalRowDataType(); 61 | 62 | final SmtpSinkOptions options = SmtpSinkOptions.fromOptions(factoryHelper.getOptions()); 63 | 64 | return new SmtpTableSink(rowType, encodingFormat, options); 65 | } 66 | } 67 | -------------------------------------------------------------------------------- /src/main/java/com/tngtech/flink/connector/email/smtp/SmtpSinkOptions.java: -------------------------------------------------------------------------------- 1 | package com.tngtech.flink.connector.email.smtp; 2 | 3 | import com.tngtech.flink.connector.email.common.ConnectorOptions; 4 | import com.tngtech.flink.connector.email.common.Protocol; 5 | import com.tngtech.flink.connector.email.imap.ImapConfigOptions; 6 | import lombok.Data; 7 | import lombok.EqualsAndHashCode; 8 | import lombok.experimental.SuperBuilder; 9 | import org.apache.flink.annotation.PublicEvolving; 10 | import org.apache.flink.configuration.ReadableConfig; 11 | 12 | import java.io.Serializable; 13 | 14 | import static com.tngtech.flink.connector.email.imap.ImapConfigOptions.SSL; 15 | import static com.tngtech.flink.connector.email.smtp.SmtpConfigOptions.*; 16 | 17 | @PublicEvolving 18 | @Data 19 | @SuperBuilder(toBuilder = true) 20 | @EqualsAndHashCode(callSuper = true) 21 | public class SmtpSinkOptions extends ConnectorOptions implements Serializable { 22 | private static final long serialVersionUID = 1L; 23 | 24 | // --------------------------------------------------------------------------------------------- 25 | 26 | public static SmtpSinkOptions fromOptions(ReadableConfig options) { 27 | return SmtpSinkOptions.builder() 28 | .host(options.get(ImapConfigOptions.HOST)) 29 | .port(options.get(PORT)) 30 | .user(options.get(USER)) 31 | .password(options.get(PASSWORD)) 32 | .protocol(options.get(SSL) ? Protocol.SMTPS : Protocol.SMTP) 33 | .build(); 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /src/main/java/com/tngtech/flink/connector/email/smtp/SmtpTableSink.java: -------------------------------------------------------------------------------- 1 | package com.tngtech.flink.connector.email.smtp; 2 | 3 | import lombok.RequiredArgsConstructor; 4 | import org.apache.flink.annotation.Internal; 5 | import org.apache.flink.api.common.serialization.SerializationSchema; 6 | import org.apache.flink.table.connector.ChangelogMode; 7 | import org.apache.flink.table.connector.format.EncodingFormat; 8 | import org.apache.flink.table.connector.sink.DynamicTableSink; 9 | import org.apache.flink.table.connector.sink.SinkFunctionProvider; 10 | import org.apache.flink.table.connector.sink.abilities.SupportsWritingMetadata; 11 | import org.apache.flink.table.data.RowData; 12 | import org.apache.flink.table.types.DataType; 13 | 14 | import java.util.ArrayList; 15 | import java.util.Arrays; 16 | import java.util.List; 17 | import java.util.Map; 18 | import java.util.stream.Collectors; 19 | 20 | @Internal 21 | @RequiredArgsConstructor 22 | class SmtpTableSink implements DynamicTableSink, SupportsWritingMetadata { 23 | 24 | private final DataType rowType; 25 | private final EncodingFormat> encodingFormat; 26 | private final SmtpSinkOptions options; 27 | private final List metadataKeys = new ArrayList<>(); 28 | 29 | @Override 30 | public ChangelogMode getChangelogMode(ChangelogMode requestedMode) { 31 | return ChangelogMode.insertOnly(); 32 | } 33 | 34 | @Override 35 | public SinkRuntimeProvider getSinkRuntimeProvider(Context context) { 36 | // If the produced row type is empty, no physical columns have been declared and we don't 37 | // need to serialize anything. 38 | final boolean writesContent = !rowType.getChildren().isEmpty(); 39 | 40 | final SerializationSchema serializer = writesContent 41 | ? encodingFormat.createRuntimeEncoder(context, rowType) 42 | : null; 43 | 44 | final SmtpSink sink = new SmtpSink(rowType, serializer, options, metadataKeys); 45 | return SinkFunctionProvider.of(sink); 46 | } 47 | 48 | @Override 49 | public DynamicTableSink copy() { 50 | final SmtpTableSink sink = new SmtpTableSink(rowType, encodingFormat, options); 51 | sink.metadataKeys.addAll(metadataKeys); 52 | return sink; 53 | } 54 | 55 | @Override 56 | public String asSummaryString() { 57 | return getClass().getSimpleName(); 58 | } 59 | 60 | // --------------------------------------------------------------------------------------------- 61 | // Abilities 62 | // --------------------------------------------------------------------------------------------- 63 | 64 | @Override 65 | public Map listWritableMetadata() { 66 | return Arrays.stream(WritableMetadata.values()) 67 | .collect(Collectors.toMap(WritableMetadata::getKey, WritableMetadata::getType)); 68 | } 69 | 70 | @Override 71 | public void applyWritableMetadata(List metadataKeys, DataType consumedDataType) { 72 | metadataKeys.stream() 73 | .map(WritableMetadata::ofKey) 74 | .forEach(this.metadataKeys::add); 75 | } 76 | } 77 | -------------------------------------------------------------------------------- /src/main/java/com/tngtech/flink/connector/email/smtp/WritableMetadata.java: -------------------------------------------------------------------------------- 1 | package com.tngtech.flink.connector.email.smtp; 2 | 3 | import jakarta.mail.Address; 4 | import jakarta.mail.Message.RecipientType; 5 | import jakarta.mail.internet.MimeMessage; 6 | import lombok.Getter; 7 | import lombok.RequiredArgsConstructor; 8 | import org.apache.flink.annotation.PublicEvolving; 9 | import org.apache.flink.table.api.DataTypes; 10 | import org.apache.flink.table.data.RowData; 11 | import org.apache.flink.table.types.DataType; 12 | 13 | import java.io.Serializable; 14 | 15 | import static com.tngtech.flink.connector.email.common.MessageUtil.decodeAddresses; 16 | 17 | @PublicEvolving 18 | @RequiredArgsConstructor 19 | public enum WritableMetadata { 20 | SUBJECT( 21 | "subject", 22 | DataTypes.STRING().nullable(), 23 | new Converter() { 24 | private static final long serialVersionUID = 1L; 25 | 26 | @Override 27 | public void convert(RowData rowData, int pos, MimeMessage message) throws Exception { 28 | final String subject = rowData.getString(pos).toString(); 29 | message.setSubject(subject); 30 | } 31 | } 32 | ), 33 | 34 | FROM( 35 | "from", 36 | DataTypes.ARRAY(DataTypes.STRING()).nullable(), 37 | new Converter() { 38 | private static final long serialVersionUID = 1L; 39 | 40 | @Override 41 | public void convert(RowData rowData, int pos, MimeMessage message) throws Exception { 42 | final Address[] addresses = decodeAddresses(rowData.getArray(pos)); 43 | message.addFrom(addresses); 44 | } 45 | } 46 | ), 47 | 48 | TO( 49 | "to", 50 | DataTypes.ARRAY(DataTypes.STRING()).nullable(), 51 | new Converter() { 52 | private static final long serialVersionUID = 1L; 53 | 54 | @Override 55 | public void convert(RowData rowData, int pos, MimeMessage message) throws Exception { 56 | final Address[] addresses = decodeAddresses(rowData.getArray(pos)); 57 | message.addRecipients(RecipientType.TO, addresses); 58 | } 59 | } 60 | ), 61 | 62 | CC( 63 | "cc", 64 | DataTypes.ARRAY(DataTypes.STRING()).nullable(), 65 | new Converter() { 66 | private static final long serialVersionUID = 1L; 67 | 68 | @Override 69 | public void convert(RowData rowData, int pos, MimeMessage message) throws Exception { 70 | final Address[] addresses = decodeAddresses(rowData.getArray(pos)); 71 | message.addRecipients(RecipientType.CC, addresses); 72 | } 73 | } 74 | ), 75 | 76 | BCC( 77 | "bcc", 78 | DataTypes.ARRAY(DataTypes.STRING()).nullable(), 79 | new Converter() { 80 | private static final long serialVersionUID = 1L; 81 | 82 | @Override 83 | public void convert(RowData rowData, int pos, MimeMessage message) throws Exception { 84 | final Address[] addresses = decodeAddresses(rowData.getArray(pos)); 85 | message.addRecipients(RecipientType.BCC, addresses); 86 | } 87 | } 88 | ), 89 | 90 | REPLY_TO( 91 | "replyTo", 92 | DataTypes.ARRAY(DataTypes.STRING()).nullable(), 93 | new Converter() { 94 | private static final long serialVersionUID = 1L; 95 | 96 | @Override 97 | public void convert(RowData rowData, int pos, MimeMessage message) throws Exception { 98 | final Address[] addresses = decodeAddresses(rowData.getArray(pos)); 99 | message.setReplyTo(addresses); 100 | } 101 | } 102 | ); 103 | 104 | @Getter 105 | private final String key; 106 | 107 | @Getter 108 | private final DataType type; 109 | 110 | @Getter 111 | private final Converter converter; 112 | 113 | public static WritableMetadata ofKey(String key) { 114 | for (WritableMetadata candidate : values()) { 115 | if (key.equals(candidate.getKey())) { 116 | return candidate; 117 | } 118 | } 119 | 120 | throw new IllegalArgumentException(String.format("Metadata key '%s' not found.", key)); 121 | } 122 | 123 | // --------------------------------------------------------------------------------------------- 124 | 125 | 126 | @FunctionalInterface 127 | interface Converter extends Serializable { 128 | void convert(RowData rowData, int pos, MimeMessage message) throws Exception; 129 | } 130 | } 131 | -------------------------------------------------------------------------------- /src/main/resources/META-INF/services/org.apache.flink.table.factories.Factory: -------------------------------------------------------------------------------- 1 | com.tngtech.flink.connector.email.imap.ImapSourceFactory 2 | com.tngtech.flink.connector.email.imap.ImapCatalogFactory 3 | com.tngtech.flink.connector.email.smtp.SmtpSinkFactory 4 | -------------------------------------------------------------------------------- /src/test/java/com/tngtech/flink/connector/email/architecture/ArchitectureTest.java: -------------------------------------------------------------------------------- 1 | package com.tngtech.flink.connector.email.architecture; 2 | 3 | import com.tngtech.archunit.junit.AnalyzeClasses; 4 | import com.tngtech.archunit.junit.ArchTest; 5 | import com.tngtech.archunit.junit.ArchUnitRunner; 6 | import com.tngtech.archunit.lang.ArchRule; 7 | import org.junit.runner.RunWith; 8 | 9 | import static com.tngtech.archunit.lang.syntax.ArchRuleDefinition.noClasses; 10 | 11 | @RunWith(ArchUnitRunner.class) 12 | @AnalyzeClasses(packages = "com.tngtech.flink.connector.email") 13 | public class ArchitectureTest { 14 | 15 | @ArchTest 16 | private final ArchRule imapDoesNotDependonSmtp = noClasses() 17 | .that().resideInAPackage("..imap..") 18 | .should().dependOnClassesThat().resideInAPackage("..smtp.."); 19 | 20 | @ArchTest 21 | private final ArchRule smtpDoesNotDependOnImap = noClasses() 22 | .that().resideInAPackage("..imap..") 23 | .should().dependOnClassesThat().resideInAPackage("..smtp.."); 24 | } 25 | -------------------------------------------------------------------------------- /src/test/java/com/tngtech/flink/connector/email/common/MessageUtilTest.java: -------------------------------------------------------------------------------- 1 | package com.tngtech.flink.connector.email.common; 2 | 3 | import com.tngtech.java.junit.dataprovider.DataProvider; 4 | import com.tngtech.java.junit.dataprovider.DataProviderRunner; 5 | import jakarta.mail.internet.AddressException; 6 | import jakarta.mail.internet.InternetAddress; 7 | import org.apache.flink.table.data.ArrayData; 8 | import org.junit.Test; 9 | import org.junit.runner.RunWith; 10 | 11 | import java.util.Arrays; 12 | 13 | import static com.tngtech.flink.connector.email.common.MessageUtil.encodeAddresses; 14 | import static com.tngtech.flink.connector.email.common.MessageUtil.encodeFirstAddress; 15 | import static org.assertj.core.api.Assertions.assertThat; 16 | import static org.assertj.core.api.Assertions.fail; 17 | 18 | @RunWith(DataProviderRunner.class) 19 | public class MessageUtilTest { 20 | 21 | @Test 22 | @DataProvider(value = { 23 | "a@one.ext | a@one.ext", 24 | "a@one.ext | a@one.ext | b@two.ext", 25 | "null" 26 | }, splitBy = "\\|") 27 | public void testConvertFirstAddress(String expected, InternetAddress... addresses) { 28 | assertThat(encodeFirstAddress(addresses)).isEqualTo(expected); 29 | } 30 | 31 | @Test 32 | @DataProvider(value = { 33 | "a@one.ext", 34 | "a@one.ext | b@two.ext", 35 | "Jon Doe | Jane Doe " 36 | }, splitBy = "\\|") 37 | public void testConvertAddresses(String... addresses) { 38 | final InternetAddress[] internetAddresses = Arrays.stream(addresses) 39 | .map(MessageUtilTest::toAddress) 40 | .toArray(InternetAddress[]::new); 41 | 42 | final ArrayData convertedAddresses = encodeAddresses(internetAddresses); 43 | assertThat(convertedAddresses) 44 | .isNotNull() 45 | .extracting(ArrayData::size) 46 | .isEqualTo(addresses.length); 47 | for (int i = 0; i < addresses.length; i++) { 48 | assertThat(convertedAddresses.getString(i).toString()).isEqualTo(addresses[i]); 49 | } 50 | } 51 | 52 | private static InternetAddress toAddress(String address) { 53 | try { 54 | return new InternetAddress(address); 55 | } catch (AddressException e) { 56 | return fail("'%s' is not a valid address.", address); 57 | } 58 | } 59 | 60 | } 61 | -------------------------------------------------------------------------------- /src/test/java/com/tngtech/flink/connector/email/common/SubRowDataTest.java: -------------------------------------------------------------------------------- 1 | package com.tngtech.flink.connector.email.common; 2 | 3 | import org.apache.flink.table.data.GenericRowData; 4 | import org.apache.flink.table.data.RowData; 5 | import org.apache.flink.table.data.StringData; 6 | import org.junit.Test; 7 | 8 | import static org.assertj.core.api.Assertions.assertThat; 9 | 10 | public class SubRowDataTest { 11 | @Test 12 | public void test() { 13 | final RowData row = GenericRowData.of( 14 | StringData.fromString("a"), 15 | StringData.fromString("b"), 16 | StringData.fromString("c"), 17 | StringData.fromString("d"), 18 | StringData.fromString("e")); 19 | 20 | final RowData subRow1 = new SubRowData(row, 0, 2); 21 | assertThat(subRow1.getArity()).isEqualTo(2); 22 | assertThat(subRow1.getString(0).toString()).isEqualTo("a"); 23 | assertThat(subRow1.getString(1).toString()).isEqualTo("b"); 24 | 25 | final RowData subRow2 = new SubRowData(row, 3, 4); 26 | assertThat(subRow2.getArity()).isEqualTo(1); 27 | assertThat(subRow2.getString(0).toString()).isEqualTo("d"); 28 | 29 | final RowData subRow3 = new SubRowData(row, 4, 4); 30 | assertThat(subRow3.getArity()).isEqualTo(0); 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /src/test/java/com/tngtech/flink/connector/email/imap/ImapCatalogTest.java: -------------------------------------------------------------------------------- 1 | package com.tngtech.flink.connector.email.imap; 2 | 3 | import com.tngtech.flink.connector.email.testing.TestBase; 4 | import org.apache.flink.table.catalog.Catalog; 5 | import org.junit.Test; 6 | 7 | import static org.assertj.core.api.Assertions.assertThat; 8 | 9 | public class ImapCatalogTest extends TestBase { 10 | 11 | @Test 12 | public void testListTables() throws Exception { 13 | final Catalog catalog = createImapCatalog("local"); 14 | assertThat(catalog.listTables(catalog.getDefaultDatabase())).containsExactly("INBOX"); 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /src/test/java/com/tngtech/flink/connector/email/imap/ImapSourceTest.java: -------------------------------------------------------------------------------- 1 | package com.tngtech.flink.connector.email.imap; 2 | 3 | import com.icegreen.greenmail.configuration.GreenMailConfiguration; 4 | import com.tngtech.flink.connector.email.testing.TestBase; 5 | import org.apache.flink.api.java.tuple.Tuple2; 6 | import org.apache.flink.table.api.DataTypes; 7 | import org.apache.flink.table.catalog.Column; 8 | import org.apache.flink.table.catalog.ResolvedSchema; 9 | import org.apache.flink.types.Row; 10 | import org.junit.Test; 11 | 12 | import java.util.HashMap; 13 | import java.util.List; 14 | import java.util.Map; 15 | 16 | import static com.icegreen.greenmail.util.GreenMailUtil.sendTextEmail; 17 | import static org.assertj.core.api.Assertions.assertThat; 18 | 19 | public class ImapSourceTest extends TestBase { 20 | 21 | private static final Tuple2 LOGIN = Tuple2.of("ingo@tngtech.test", "123"); 22 | 23 | @Override 24 | public GreenMailConfiguration getGreenmailConfiguration() { 25 | return GreenMailConfiguration.aConfig().withUser(LOGIN.f0, LOGIN.f1); 26 | } 27 | 28 | @Test 29 | public void simpleSelect() throws Exception { 30 | final ResolvedSchema schema = ResolvedSchema.of( 31 | metadataColumn(ReadableMetadata.SUBJECT), 32 | metadataColumn(ReadableMetadata.FROM_FIRST), 33 | metadataColumn(ReadableMetadata.TO_FIRST), 34 | Column.physical("content", DataTypes.STRING()) 35 | ); 36 | 37 | sendTextEmail("ingo@tngtech.test", "sender@tngtech.test", "Subject", "Message Content", 38 | SMTP); 39 | 40 | final List rows = collect(createImapSource("T", schema, getLogin()).execute(), 1); 41 | 42 | assertThat(rows).hasSize(1); 43 | assertThat(rows.get(0).getField(0)).isEqualTo("Subject"); 44 | assertThat(rows.get(0).getField(1)).isEqualTo("sender@tngtech.test"); 45 | assertThat(rows.get(0).getField(2)).isEqualTo("ingo@tngtech.test"); 46 | assertThat(rows.get(0).getField(3)).isEqualTo("Message Content"); 47 | } 48 | 49 | // --------------------------------------------------------------------------------------------- 50 | 51 | private static Map getLogin() { 52 | final Map options = new HashMap<>(); 53 | options.put("user", LOGIN.f0); 54 | options.put("password", LOGIN.f1); 55 | return options; 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /src/test/java/com/tngtech/flink/connector/email/smtp/SmtpSinkTest.java: -------------------------------------------------------------------------------- 1 | package com.tngtech.flink.connector.email.smtp; 2 | 3 | import com.icegreen.greenmail.util.GreenMailUtil; 4 | import com.tngtech.flink.connector.email.testing.TestBase; 5 | import jakarta.mail.internet.MimeMessage; 6 | import org.apache.flink.table.api.DataTypes; 7 | import org.apache.flink.table.catalog.Column; 8 | import org.apache.flink.table.catalog.ResolvedSchema; 9 | import org.junit.Test; 10 | 11 | import static org.apache.flink.table.api.Expressions.row; 12 | import static org.assertj.core.api.Assertions.assertThat; 13 | 14 | public class SmtpSinkTest extends TestBase { 15 | 16 | @Test 17 | public void simpleInsert() throws Exception { 18 | final ResolvedSchema schema = ResolvedSchema.of( 19 | metadataColumn(WritableMetadata.SUBJECT), 20 | metadataColumn(WritableMetadata.FROM), 21 | metadataColumn(WritableMetadata.TO), 22 | Column.physical("content", DataTypes.STRING()) 23 | ); 24 | 25 | createSmtpSink("T", schema); 26 | 27 | tEnv.fromValues(schema.toSinkRowDataType(), 28 | row( 29 | "Subject", 30 | new String[] {"sender@tngtech.test"}, 31 | new String[] {"ingo@tngtech.test"}, 32 | "Message Content" 33 | ) 34 | ).executeInsert("T").await(); 35 | 36 | final MimeMessage[] sentMessages = greenMailRule.getReceivedMessages(); 37 | assertThat(sentMessages).hasSize(1); 38 | assertThat(sentMessages[0].getSubject()).isEqualTo("Subject"); 39 | assertThat(GreenMailUtil.getBody(sentMessages[0])).contains("Message Content"); 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /src/test/java/com/tngtech/flink/connector/email/testing/TestBase.java: -------------------------------------------------------------------------------- 1 | package com.tngtech.flink.connector.email.testing; 2 | 3 | import com.icegreen.greenmail.configuration.GreenMailConfiguration; 4 | import com.icegreen.greenmail.junit4.GreenMailRule; 5 | import com.icegreen.greenmail.util.ServerSetup; 6 | import com.tngtech.flink.connector.email.imap.ReadableMetadata; 7 | import com.tngtech.flink.connector.email.smtp.WritableMetadata; 8 | import jakarta.mail.Session; 9 | import lombok.RequiredArgsConstructor; 10 | import org.apache.flink.runtime.testutils.MiniClusterResourceConfiguration; 11 | import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; 12 | import org.apache.flink.table.api.Table; 13 | import org.apache.flink.table.api.TableResult; 14 | import org.apache.flink.table.api.bridge.java.StreamTableEnvironment; 15 | import org.apache.flink.table.catalog.Catalog; 16 | import org.apache.flink.table.catalog.Column; 17 | import org.apache.flink.table.catalog.ResolvedSchema; 18 | import org.apache.flink.test.util.MiniClusterWithClientResource; 19 | import org.apache.flink.types.Row; 20 | import org.apache.flink.util.CloseableIterator; 21 | import org.junit.Assert; 22 | import org.junit.Before; 23 | import org.junit.ClassRule; 24 | import org.junit.Rule; 25 | 26 | import java.time.Duration; 27 | import java.util.*; 28 | import java.util.concurrent.Callable; 29 | import java.util.stream.Collectors; 30 | 31 | import static org.apache.flink.table.utils.EncodingUtils.escapeIdentifier; 32 | import static org.apache.flink.table.utils.EncodingUtils.escapeSingleQuotes; 33 | import static org.awaitility.Awaitility.await; 34 | 35 | public class TestBase { 36 | 37 | @ClassRule 38 | public static MiniClusterWithClientResource flinkCluster = getCluster(); 39 | 40 | // Use 4xxx instead of the default 3xxx to avoid interference with a running standalone image 41 | public static final ServerSetup IMAP = new ServerSetup(4143, null, "imap"); 42 | public static final ServerSetup IMAPS = new ServerSetup(4993, null, "imaps"); 43 | public static final ServerSetup SMTP = new ServerSetup(4025, null, "smtp"); 44 | 45 | private static final Duration DEFAULT_DURATION = Duration.ofSeconds(10); 46 | 47 | @Rule 48 | public final GreenMailRule greenMailRule = 49 | new GreenMailRule(new ServerSetup[] {IMAP, IMAPS, SMTP}) 50 | .withConfiguration(getGreenmailConfiguration()); 51 | 52 | public Session session; 53 | public StreamExecutionEnvironment executionEnv; 54 | public StreamTableEnvironment tEnv; 55 | 56 | @Before 57 | public void before() throws Exception { 58 | greenMailRule.purgeEmailFromAllMailboxes(); 59 | session = greenMailRule.getImap().createSession(); 60 | 61 | executionEnv = StreamExecutionEnvironment.getExecutionEnvironment(); 62 | executionEnv.setParallelism(1); 63 | 64 | tEnv = StreamTableEnvironment.create(executionEnv); 65 | } 66 | 67 | private static MiniClusterWithClientResource getCluster() { 68 | return new MiniClusterWithClientResource( 69 | new MiniClusterResourceConfiguration.Builder() 70 | .setNumberSlotsPerTaskManager(1) 71 | .setNumberTaskManagers(1) 72 | .build()); 73 | } 74 | 75 | /** 76 | * Can be overridden by sub classes. 77 | */ 78 | public GreenMailConfiguration getGreenmailConfiguration() { 79 | return GreenMailConfiguration.aConfig().withDisabledAuthentication(); 80 | } 81 | 82 | /** 83 | * Collects at most {@param expectedRows} rows. 84 | * 85 | *

If the given number of rows have been collected, it stops collecting, which means unintended 86 | * further rows will not be collected. If fewer than expected rows are returned, it fails the 87 | * test. 88 | */ 89 | public List collect(TableResult tableResult, int expectedRows) throws Exception { 90 | return collect(tableResult, expectedRows, DEFAULT_DURATION); 91 | } 92 | 93 | /** 94 | * Collects at most {@param expectedRows} rows, with a timeout of {@param maxTime}. 95 | * 96 | *

If the given number of rows have been collected, it stops collecting, which means unintended 97 | * further rows will not be collected. If fewer than expected rows are returned, it fails the 98 | * test. 99 | */ 100 | public List collect(TableResult tableResult, int expectedRows, Duration maxTime) 101 | throws Exception { 102 | 103 | final List rows = new ArrayList<>(); 104 | try (final CloseableIterator it = tableResult.collect()) { 105 | final Callable predicate = () -> rows.size() >= expectedRows; 106 | 107 | final CollectorThread collectorThread = new CollectorThread(it, rows, predicate); 108 | collectorThread.start(); 109 | 110 | try { 111 | await().atMost(maxTime).until(predicate); 112 | } finally { 113 | collectorThread.interrupt(); 114 | } 115 | } 116 | 117 | if (rows.size() < expectedRows) { 118 | Assert.fail(String.format("Expected %d rows, but only got %d before the timeout.", 119 | expectedRows, rows.size())); 120 | } 121 | 122 | return rows; 123 | } 124 | 125 | // --------------------------------------------------------------------------------------------- 126 | 127 | public Catalog createImapCatalog(String name) throws Exception { 128 | return createImapCatalog(name, Collections.emptyMap()); 129 | } 130 | 131 | public Catalog createImapCatalog(String name, Map options) throws Exception { 132 | Map allOptions = new HashMap<>(); 133 | allOptions.put("type", "imap"); 134 | allOptions.put("host", "localhost"); 135 | allOptions.put("port", String.valueOf(IMAP.getPort())); 136 | allOptions.putAll(options); 137 | 138 | tEnv.executeSql(String.format("CREATE CATALOG %s WITH (%s)", 139 | escapeIdentifier(name), 140 | allOptions.entrySet().stream() 141 | .map(entry -> String.format("'%s' = '%s'", 142 | escapeSingleQuotes(entry.getKey()), 143 | escapeSingleQuotes(entry.getValue()))) 144 | .collect(Collectors.joining(",")))) 145 | .await(); 146 | 147 | return tEnv.getCatalog(name).orElseThrow(() -> new IllegalStateException( 148 | String.format("Expected catalog '%s' to exist.", name))); 149 | } 150 | 151 | public Table createImapSource(String name, ResolvedSchema schema) throws Exception { 152 | return createImapSource(name, schema, Collections.emptyMap()); 153 | } 154 | 155 | public Table createImapSource(String name, ResolvedSchema schema, Map options) 156 | throws Exception { 157 | Map allOptions = new HashMap<>(); 158 | allOptions.put("connector", "imap"); 159 | allOptions.put("host", "localhost"); 160 | allOptions.put("port", String.valueOf(IMAP.getPort())); 161 | allOptions.putAll(options); 162 | 163 | createTable(name, schema, allOptions); 164 | return tEnv.from(name); 165 | } 166 | 167 | public void createSmtpSink(String name, ResolvedSchema schema) throws Exception { 168 | createSmtpSink(name, schema, Collections.emptyMap()); 169 | } 170 | 171 | public void createSmtpSink(String name, ResolvedSchema schema, Map options) 172 | throws Exception { 173 | Map allOptions = new HashMap<>(); 174 | allOptions.put("connector", "smtp"); 175 | allOptions.put("host", "localhost"); 176 | allOptions.put("port", String.valueOf(SMTP.getPort())); 177 | allOptions.putAll(options); 178 | 179 | createTable(name, schema, allOptions); 180 | } 181 | 182 | public Column metadataColumn(ReadableMetadata metadata) { 183 | return Column.metadata(metadata.getKey(), metadata.getType(), metadata.getKey(), false); 184 | } 185 | 186 | public Column metadataColumn(WritableMetadata metadata) { 187 | return Column.metadata(metadata.getKey(), metadata.getType(), metadata.getKey(), false); 188 | } 189 | 190 | private void createTable(String name, ResolvedSchema schema, Map options) 191 | throws Exception { 192 | tEnv.executeSql(String.format("CREATE TEMPORARY TABLE %s %s WITH (%s)", 193 | escapeIdentifier(name), 194 | schema, 195 | options.entrySet().stream() 196 | .map(entry -> String.format("'%s' = '%s'", 197 | escapeSingleQuotes(entry.getKey()), 198 | escapeSingleQuotes(entry.getValue()))) 199 | .collect(Collectors.joining(",")))) 200 | .await(); 201 | } 202 | 203 | // --------------------------------------------------------------------------------------------- 204 | 205 | 206 | @RequiredArgsConstructor 207 | private static class CollectorThread extends Thread { 208 | private final CloseableIterator it; 209 | private final List rows; 210 | private final Callable breakCondition; 211 | 212 | @Override 213 | public void run() { 214 | try { 215 | while (it.hasNext()) { 216 | rows.add(it.next()); 217 | 218 | if (breakCondition.call() || Thread.interrupted()) { 219 | break; 220 | } 221 | } 222 | } catch (Exception ignored) { 223 | } 224 | } 225 | } 226 | } 227 | -------------------------------------------------------------------------------- /testing/.gitignore: -------------------------------------------------------------------------------- 1 | images/client/jars 2 | -------------------------------------------------------------------------------- /testing/README.md: -------------------------------------------------------------------------------- 1 | # Local Testing 2 | 3 | This `docker-compose` setup starts containers for Greenmail (mail server), MinIO (storage), and a 4 | Flink cluster. It then configures Flink's SQL client to connect against that cluster. 5 | 6 | Make sure you have the following tools installed: 7 | 8 | * `docker` 9 | * `docker-compose` 10 | * `mailutils` 11 | 12 | Running the following script will build the project, copy the JAR to the correct location, build and 13 | run the Docker images and start the SQL client: 14 | 15 | ``` 16 | ./build_and_run.sh 17 | 18 | # Or this to only start the client without (re-)building 19 | ./run.sh 20 | ``` 21 | 22 | > :warning: Make sure to run `docker-compose down` to shut down all containers when you're done. 23 | 24 | ## Connecting 25 | 26 | Use the following configuration options to connect using either the IMAP source connector or SMTP 27 | sink connector: 28 | 29 | * `'host' = 'greenmail'` 30 | * `'port' = '3143'` (IMAP) 31 | * `'port' = '3025'` (SMTP) 32 | * `user` can be one of jon, jane, alice, or bob. 33 | * `password` is the same as the username. 34 | 35 | Example: 36 | 37 | 38 | ```sql 39 | CREATE TABLE inbox ( 40 | uid STRING NOT NULL METADATA, 41 | subject STRING METADATA, 42 | content STRING 43 | ) WITH ( 44 | 'connector' = 'imap', 45 | 'host' = 'greenmail', 46 | 'user' = 'jon', 47 | 'password' = 'jon' 48 | ); 49 | ``` 50 | 51 | 52 | Two IMAP catalogs called "jon" and "jane" have also already been created for the respective users, 53 | so you can also use the catalog directly: 54 | 55 | 56 | ```sql 57 | SELECT * FROM jon.folders.INBOX; 58 | ``` 59 | 60 | 61 | ## Sending Mails 62 | 63 | `send-mail.sh` is a small convenience script which you can use to send messages on the local 64 | Greenmail server. 65 | 66 | ``` 67 | # Send message from jane@ to jon@ 68 | ./send-mail.sh -f jane -t jon -s "Subject" -m "Message" 69 | ``` 70 | -------------------------------------------------------------------------------- /testing/build_and_run.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | set -eux 3 | 4 | (cd .. && ./gradlew jar) || exit 1 5 | docker-compose up -d --build || exit 1 6 | exec ./run.sh 7 | -------------------------------------------------------------------------------- /testing/docker-compose.yaml: -------------------------------------------------------------------------------- 1 | version: "3" 2 | services: 3 | sql: 4 | image: flink-connector-email/client 5 | build: ./images/client 6 | command: "tail -f /dev/null" 7 | depends_on: 8 | - jobmanager 9 | - greenmail 10 | links: 11 | - greenmail:greenmail 12 | environment: 13 | FLINK_JOBMANAGER_HOST: "jobmanager" 14 | jobmanager: 15 | image: flink:1.13.2-scala_2.12 16 | hostname: jobmanager 17 | expose: 18 | - "6123" 19 | ports: 20 | - "8081:8081" 21 | command: jobmanager 22 | environment: 23 | JOB_MANAGER_RPC_ADDRESS: "jobmanager" 24 | taskmanager: 25 | image: flink:1.13.2-scala_2.12 26 | expose: 27 | - "6121" 28 | - "6122" 29 | depends_on: 30 | - jobmanager 31 | command: taskmanager 32 | links: 33 | - jobmanager:jobmanager 34 | environment: 35 | JOB_MANAGER_RPC_ADDRESS: "jobmanager" 36 | greenmail: 37 | image: greenmail/standalone:1.6.0 38 | ports: 39 | - "3025:3025" # SMTP 40 | - "3143:3143" # IMAP 41 | environment: 42 | GREENMAIL_OPTS: > 43 | -Dgreenmail.setup.test.all 44 | -Dgreenmail.hostname=0.0.0.0 45 | -Dgreenmail.users=jon:jon@tngtech.test,jane:jane@tngtech.test,alice:alice@tngtech.test,bob:bob@tngtech.test 46 | -------------------------------------------------------------------------------- /testing/images/client/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM flink:1.13.2-scala_2.12 2 | ARG FLINK_VERSION=1.13.2 3 | 4 | COPY sql-client/ /opt/sql-client 5 | 6 | COPY jars/* /opt/sql-client/lib/ 7 | COPY conf/* /opt/flink/conf/ 8 | 9 | WORKDIR /opt/sql-client 10 | ENV FLINK_PROPERTIES "jobmanager.rpc.address: jobmanager" 11 | ENV SQL_CLIENT_HOME /opt/sql-client 12 | -------------------------------------------------------------------------------- /testing/images/client/conf/init.sql: -------------------------------------------------------------------------------- 1 | SET 'sql-client.verbose' = 'true'; 2 | SET 'table.dynamic-table-options.enabled' = 'true'; 3 | 4 | CREATE CATALOG jon WITH ( 5 | 'type' = 'imap', 6 | 'host' = 'greenmail', 7 | 'port' = '3143', 8 | 'user' = 'jon', 9 | 'password' = 'jon' 10 | ); 11 | 12 | CREATE CATALOG jane WITH ( 13 | 'type' = 'imap', 14 | 'host' = 'greenmail', 15 | 'port' = '3143', 16 | 'user' = 'jane', 17 | 'password' = 'jane' 18 | ); 19 | -------------------------------------------------------------------------------- /testing/images/client/sql-client/sql-client.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | ${FLINK_HOME}/bin/sql-client.sh embedded \ 4 | -d ${FLINK_HOME}/conf/sql-client-conf.yaml \ 5 | -i ${FLINK_HOME}/conf/init.sql \ 6 | -l ${SQL_CLIENT_HOME}/lib 7 | -------------------------------------------------------------------------------- /testing/run.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | exec docker-compose exec sql ./sql-client.sh 3 | -------------------------------------------------------------------------------- /testing/send_mail.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | send_from="jane" 4 | send_to="jon" 5 | subject="Example Message" 6 | message="This is an example message" 7 | 8 | while getopts f:t:s:m: option 9 | do 10 | case "${option}" in 11 | f) send_from="${OPTARG}" ;; 12 | t) send_to="${OPTARG}" ;; 13 | s) subject="${OPTARG}" ;; 14 | m) message="${OPTARG}" ;; 15 | esac 16 | done 17 | 18 | password="${send_from}" 19 | echo "${message}" | mailx \ 20 | -Sv15-compat \ 21 | -Smta="smtp://${send_from}:${password}@localhost:3025" \ 22 | -s"${subject}" \ 23 | "${send_to}@tngtech.test" 24 | --------------------------------------------------------------------------------