├── .github ├── ISSUE_TEMPLATE │ ├── NEW-ISSUE.yml │ └── release_notes.md ├── dependabot.yml └── workflows │ ├── codeql-analysis.yml │ ├── maven-publish.yml │ └── verify.yml ├── .gitignore ├── 404.html ├── CHANGELOG.md ├── CITATION.cff ├── CODE_OF_CONDUCT.md ├── CONTRIBUTING.md ├── Dockerfile ├── LICENSE ├── README.md ├── README_zh.md ├── SECURITY.md ├── build-docker.sh ├── build-module.sh ├── core ├── package.xml ├── pom.xml └── src │ └── main │ ├── bin │ ├── addax.bat │ ├── addax.py │ ├── addax.sh │ ├── dxprof.py │ ├── encrypt_password.sh │ ├── install_plugins.sh │ └── perftrace.py │ ├── conf │ ├── core.json │ └── logback.xml │ ├── java │ └── com │ │ └── wgzhao │ │ └── addax │ │ └── core │ │ ├── AbstractContainer.java │ │ ├── Engine.java │ │ ├── base │ │ ├── BaseObject.java │ │ ├── Constant.java │ │ ├── HBaseConstant.java │ │ ├── HBaseKey.java │ │ └── Key.java │ │ ├── compress │ │ ├── ExpandLzopInputStream.java │ │ ├── ZipCycleInputStream.java │ │ └── ZipCycleOutputStream.java │ │ ├── constant │ │ ├── PluginType.java │ │ └── Type.java │ │ ├── container │ │ └── util │ │ │ ├── HookInvoker.java │ │ │ └── JobAssignUtil.java │ │ ├── element │ │ ├── BoolColumn.java │ │ ├── BytesColumn.java │ │ ├── Column.java │ │ ├── ColumnCast.java │ │ ├── ColumnEntry.java │ │ ├── DateColumn.java │ │ ├── DoubleColumn.java │ │ ├── LongColumn.java │ │ ├── Record.java │ │ ├── StringColumn.java │ │ └── TimestampColumn.java │ │ ├── exception │ │ └── AddaxException.java │ │ ├── job │ │ ├── JobContainer.java │ │ └── scheduler │ │ │ ├── AbstractScheduler.java │ │ │ └── processinner │ │ │ ├── ProcessInnerScheduler.java │ │ │ └── StandAloneScheduler.java │ │ ├── meta │ │ └── State.java │ │ ├── plugin │ │ ├── AbstractJobPlugin.java │ │ ├── AbstractPlugin.java │ │ ├── AbstractTaskPlugin.java │ │ ├── JobPluginCollector.java │ │ ├── PluginCollector.java │ │ ├── Pluginable.java │ │ ├── RecordReceiver.java │ │ ├── RecordSender.java │ │ └── TaskPluginCollector.java │ │ ├── spi │ │ ├── ErrorCode.java │ │ ├── Reader.java │ │ └── Writer.java │ │ ├── statistics │ │ ├── PerfRecord.java │ │ ├── PerfTrace.java │ │ ├── VMInfo.java │ │ ├── communication │ │ │ ├── Communication.java │ │ │ ├── CommunicationTool.java │ │ │ └── LocalTGCommunicationManager.java │ │ ├── container │ │ │ ├── collector │ │ │ │ ├── AbstractCollector.java │ │ │ │ └── ProcessInnerCollector.java │ │ │ ├── communicator │ │ │ │ ├── AbstractContainerCommunicator.java │ │ │ │ ├── job │ │ │ │ │ └── StandAloneJobContainerCommunicator.java │ │ │ │ └── taskgroup │ │ │ │ │ ├── AbstractTGContainerCommunicator.java │ │ │ │ │ └── StandaloneTGContainerCommunicator.java │ │ │ └── report │ │ │ │ ├── AbstractReporter.java │ │ │ │ └── ProcessInnerReporter.java │ │ └── plugin │ │ │ ├── DefaultJobPluginCollector.java │ │ │ └── task │ │ │ ├── AbstractTaskPluginCollector.java │ │ │ ├── StdoutPluginCollector.java │ │ │ └── util │ │ │ └── DirtyRecord.java │ │ ├── taskgroup │ │ ├── TaskGroupContainer.java │ │ ├── TaskMonitor.java │ │ └── runner │ │ │ ├── AbstractRunner.java │ │ │ ├── ReaderRunner.java │ │ │ ├── TaskGroupContainerRunner.java │ │ │ └── WriterRunner.java │ │ ├── transport │ │ ├── channel │ │ │ ├── Channel.java │ │ │ └── memory │ │ │ │ └── MemoryChannel.java │ │ ├── exchanger │ │ │ ├── BufferedRecordExchanger.java │ │ │ ├── BufferedRecordTransformerExchanger.java │ │ │ ├── RecordExchanger.java │ │ │ └── TransformerExchanger.java │ │ ├── record │ │ │ ├── DefaultRecord.java │ │ │ └── TerminateRecord.java │ │ └── transformer │ │ │ ├── ComplexTransformer.java │ │ │ ├── ComplexTransformerProxy.java │ │ │ ├── FilterTransformer.java │ │ │ ├── GroovyTransformer.java │ │ │ ├── GroovyTransformerStaticUtil.java │ │ │ ├── MapTransformer.java │ │ │ ├── PadTransformer.java │ │ │ ├── ReplaceTransformer.java │ │ │ ├── SubstrTransformer.java │ │ │ ├── Transformer.java │ │ │ ├── TransformerExecution.java │ │ │ ├── TransformerExecutionParas.java │ │ │ ├── TransformerInfo.java │ │ │ └── TransformerRegistry.java │ │ └── util │ │ ├── ClassSize.java │ │ ├── ClassUtil.java │ │ ├── ConfigParser.java │ │ ├── Configuration.java │ │ ├── ConfigurationValidate.java │ │ ├── EncryptUtil.java │ │ ├── ErrorRecordChecker.java │ │ ├── HostUtils.java │ │ ├── ListUtil.java │ │ ├── MathUtil.java │ │ ├── OverFlowUtil.java │ │ ├── RangeSplitUtil.java │ │ ├── RetryUtil.java │ │ ├── ShellUtil.java │ │ ├── StrUtil.java │ │ ├── TransformerUtil.java │ │ └── container │ │ ├── ClassLoaderSwapper.java │ │ ├── CoreConstant.java │ │ ├── JarLoader.java │ │ └── LoadUtil.java │ ├── job │ ├── clickhouse2stream.json │ ├── dbf2hdfs.json │ ├── dbf2orc.json │ ├── dbf2stream.json │ ├── influxdb2pg.json │ ├── influxdb2stream.json │ ├── job.json │ ├── oracle2dbf.json │ ├── oracle2hdfs.json │ ├── oracle2hive.json │ ├── oracle2oracle.json │ ├── oracle2stream.json │ ├── oracle2txt.json │ ├── redis2stream.json │ ├── stream2clickhouse.json │ ├── stream2databend.json │ ├── stream2dbf.json │ ├── stream2es.json │ ├── stream2gp.json │ ├── stream2hdfs_orc.json │ ├── stream2hdfs_parquet.json │ ├── stream2hdfs_text.json │ ├── stream2hive.json │ ├── stream2kudu.json │ ├── stream2oracle.json │ ├── stream2postgresql.json │ ├── stream2sqlite.json │ ├── stream2txt.json │ └── transformer_demo.json │ └── resources │ └── project.properties ├── difference.md ├── docs ├── assets │ ├── excel_reader_demo.zip │ ├── jobs │ │ ├── accessreader.json │ │ ├── accesswriter.json │ │ ├── cassandrareader.json │ │ ├── cassandrawriter.json │ │ ├── clickhousereader.json │ │ ├── clickhousewriter.json │ │ ├── databend2stream.json │ │ ├── databendwriter.json │ │ ├── datareader.json │ │ ├── dbfreader.json │ │ ├── dbfwriter.json │ │ ├── doriswriter.json │ │ ├── esreader.json │ │ ├── eswriter.json │ │ ├── excelreader.json │ │ ├── excelwriter.json │ │ ├── ftpreader.json │ │ ├── ftpwriter.json │ │ ├── gpwriter.json │ │ ├── groovydemo.json │ │ ├── hanareader.json │ │ ├── hanawriter.json │ │ ├── hbase11xreader_normal.json │ │ ├── hbase11xreader_version.json │ │ ├── hbase11xsqlwriter.json │ │ ├── hbase11xwriter.json │ │ ├── hbase20xreader_normal.json │ │ ├── hbase20xreader_version.json │ │ ├── hbase20xsqlwriter.json │ │ ├── hdfsreader.json │ │ ├── hdfswriter.json │ │ ├── hivereader.json │ │ ├── httpreader.json │ │ ├── icebergwriter.json │ │ ├── influx2stream.json │ │ ├── influxdbreader.json │ │ ├── influxdbwriter.json │ │ ├── jsonreader.json │ │ ├── kafka2stream.json │ │ ├── kudureader.json │ │ ├── kuduwriter.json │ │ ├── mongoreader.json │ │ ├── mongowriter.json │ │ ├── mysqlreader.json │ │ ├── mysqlwriter.json │ │ ├── oraclereader.json │ │ ├── oraclewriter.json │ │ ├── paimonwriter.json │ │ ├── pgreader.json │ │ ├── pgwriter.json │ │ ├── quickstart.json │ │ ├── rdbmsreader.json │ │ ├── rdbmswriter.json │ │ ├── redisreader.json │ │ ├── rediswriter.json │ │ ├── s3reader.json │ │ ├── s3writer.json │ │ ├── sqlitereader.json │ │ ├── sqlitewriter.json │ │ ├── sqlserverreader.json │ │ ├── sqlserverwriter.json │ │ ├── starrockswriter.json │ │ ├── stream2influx2.json │ │ ├── stream2kafka.json │ │ ├── streamreader.json │ │ ├── sybasereader.json │ │ ├── sybasewriter.json │ │ ├── tdenginereader.json │ │ ├── tdenginewriter.json │ │ ├── txtreader.json │ │ ├── txtwriter.json │ │ └── udfdemo.json │ ├── output │ │ ├── clickhousereader.txt │ │ ├── datareader.txt │ │ ├── doriswriter.txt │ │ ├── esreader.txt │ │ ├── excelreader.txt │ │ ├── excelwriter.txt │ │ ├── groovydemo.txt │ │ ├── httpreader.txt │ │ ├── job.txt │ │ ├── oracle_geom_reader.txt │ │ ├── quickstart.txt │ │ ├── stream2kafka.txt │ │ ├── sybasereader.txt │ │ ├── tdenginereader.txt │ │ └── tdenginewriter.txt │ └── sql │ │ ├── clickhouse.sql │ │ ├── es.json │ │ ├── gp.sql │ │ ├── hive.sql │ │ ├── http.json │ │ ├── kudu.sql │ │ ├── mysql.sql │ │ ├── oracle_geom.sql │ │ └── postgresql.sql ├── debug.md ├── encrypt_password.md ├── images │ ├── addax-flowchart.drawio │ ├── addax-flowchart.png │ ├── addax_why_new.png │ ├── debug-1.png │ ├── debug-2.png │ ├── debug-3.png │ ├── debug-4.png │ ├── debug-5.png │ ├── debug-6.png │ ├── debug-7.png │ ├── favicon.ico │ ├── logo.png │ ├── logo.svg │ ├── logos │ │ ├── access.svg │ │ ├── cassandra.svg │ │ ├── clickhouse.svg │ │ ├── databend.svg │ │ ├── db2.svg │ │ ├── dbase.svg │ │ ├── doris.svg │ │ ├── elasticsearch.svg │ │ ├── excel.svg │ │ ├── greenplum.jpeg │ │ ├── greenplum.svg │ │ ├── hbase.svg │ │ ├── hive.svg │ │ ├── iceberg.svg │ │ ├── influxdata.svg │ │ ├── kafka.svg │ │ ├── kudu.svg │ │ ├── minio.svg │ │ ├── mongodb.svg │ │ ├── mysql.svg │ │ ├── oracle.svg │ │ ├── paimon.svg │ │ ├── phoenix.svg │ │ ├── postgresql.svg │ │ ├── presto.svg │ │ ├── redis.svg │ │ ├── s3.svg │ │ ├── sap.svg │ │ ├── sqlite.svg │ │ ├── sqlserver.svg │ │ ├── starrocks.svg │ │ ├── sybase.svg │ │ ├── tdengine.svg │ │ └── trino.svg │ └── supported_databases.png ├── index.md ├── plugin_development.md ├── quickstart.md ├── reader │ ├── accessreader.md │ ├── cassandrareader.md │ ├── clickhousereader.md │ ├── databendreader.md │ ├── datareader.md │ ├── dbfreader.md │ ├── elasticsearchreader.md │ ├── excelreader.md │ ├── ftpreader.md │ ├── hanareader.md │ ├── hbase11xreader.md │ ├── hbase11xsqlreader.md │ ├── hbase20xreader.md │ ├── hbase20xsqlreader.md │ ├── hdfsreader.md │ ├── hivereader.md │ ├── httpreader.md │ ├── influxdb2reader.md │ ├── influxdbreader.md │ ├── jsonfilereader.md │ ├── kafkareader.md │ ├── kudureader.md │ ├── mongodbreader.md │ ├── mysqlreader.md │ ├── oraclereader.md │ ├── postgresqlreader.md │ ├── rdbmsreader.md │ ├── redisreader.md │ ├── s3reader.md │ ├── sqlitereader.md │ ├── sqlserverreader.md │ ├── streamreader.md │ ├── sybasereader.md │ ├── tdenginereader.md │ └── txtfilereader.md ├── setupJob.md ├── statsreport.md ├── transformer.md └── writer │ ├── accesswriter.md │ ├── cassandrawriter.md │ ├── clickhousewriter.md │ ├── databendwriter.md │ ├── dbfwriter.md │ ├── doriswriter.md │ ├── elasticsearchwriter.md │ ├── excelwriter.md │ ├── ftpwriter.md │ ├── greenplumwriter.md │ ├── hanawriter.md │ ├── hbase11xsqlwriter.md │ ├── hbase11xwriter.md │ ├── hbase20xsqlwriter.md │ ├── hdfswriter.md │ ├── icebergwriter.md │ ├── influxdb2writer.md │ ├── influxdbwriter.md │ ├── kafkawriter.md │ ├── kuduwriter.md │ ├── mongodbwriter.md │ ├── mysqlwriter.md │ ├── oraclewriter.md │ ├── paimonwriter.md │ ├── postgresqlwriter.md │ ├── rdbmswriter.md │ ├── rediswriter.md │ ├── s3writer.md │ ├── sqlitewriter.md │ ├── sqlserverwriter.md │ ├── starrockswriter.md │ ├── streamwriter.md │ ├── sybasewriter.md │ ├── tdenginewriter.md │ └── txtfilewriter.md ├── install.sh ├── lib ├── addax-rdbms │ ├── package.xml │ ├── pom.xml │ └── src │ │ └── main │ │ └── java │ │ └── com │ │ └── wgzhao │ │ └── addax │ │ └── rdbms │ │ ├── reader │ │ ├── CommonRdbmsReader.java │ │ └── util │ │ │ ├── GetPrimaryKeyUtil.java │ │ │ ├── HintUtil.java │ │ │ ├── MinMaxPackage.java │ │ │ ├── OriginalConfPretreatmentUtil.java │ │ │ ├── PreCheckTask.java │ │ │ ├── ReaderSplitUtil.java │ │ │ └── SingleTableSplitUtil.java │ │ ├── util │ │ ├── ConnectionFactory.java │ │ ├── DBUtil.java │ │ ├── DataBaseType.java │ │ ├── RdbmsException.java │ │ └── TableExpandUtil.java │ │ └── writer │ │ ├── CommonRdbmsWriter.java │ │ └── util │ │ ├── OriginalConfPretreatmentUtil.java │ │ └── WriterUtil.java └── addax-storage │ ├── package.xml │ ├── pom.xml │ └── src │ └── main │ └── java │ └── com │ └── wgzhao │ └── addax │ └── storage │ ├── reader │ └── StorageReaderUtil.java │ ├── util │ └── FileHelper.java │ └── writer │ └── StorageWriterUtil.java ├── logo-slogan.svg ├── mkdocs.yml ├── overrides └── home.html ├── package.xml ├── plugin ├── reader │ ├── accessreader │ │ ├── package.xml │ │ ├── pom.xml │ │ └── src │ │ │ └── main │ │ │ ├── java │ │ │ └── com │ │ │ │ └── wgzhao │ │ │ │ └── addax │ │ │ │ └── plugin │ │ │ │ └── reader │ │ │ │ └── accessreader │ │ │ │ └── AccessReader.java │ │ │ └── resources │ │ │ ├── plugin.json │ │ │ └── plugin_job_template.json │ ├── cassandrareader │ │ ├── package.xml │ │ ├── pom.xml │ │ └── src │ │ │ └── main │ │ │ ├── java │ │ │ └── com │ │ │ │ └── wgzhao │ │ │ │ └── addax │ │ │ │ └── plugin │ │ │ │ └── reader │ │ │ │ └── cassandrareader │ │ │ │ ├── CassandraReader.java │ │ │ │ ├── CassandraReaderHelper.java │ │ │ │ └── MyKey.java │ │ │ └── resources │ │ │ ├── plugin.json │ │ │ └── plugin_job_template.json │ ├── clickhousereader │ │ ├── package.xml │ │ ├── pom.xml │ │ └── src │ │ │ └── main │ │ │ ├── java │ │ │ └── com │ │ │ │ └── wgzhao │ │ │ │ └── addax │ │ │ │ └── plugin │ │ │ │ └── reader │ │ │ │ └── clickhousereader │ │ │ │ └── ClickHouseReader.java │ │ │ └── resources │ │ │ ├── plugin.json │ │ │ └── plugin_job_template.json │ ├── databendreader │ │ ├── package.xml │ │ ├── pom.xml │ │ └── src │ │ │ └── main │ │ │ ├── java │ │ │ └── com │ │ │ │ └── wgzhao │ │ │ │ └── addax │ │ │ │ └── plugin │ │ │ │ └── reader │ │ │ │ └── databendreader │ │ │ │ └── DatabendReader.java │ │ │ └── resources │ │ │ ├── plugin.json │ │ │ └── plugin_job_template.json │ ├── datareader │ │ ├── package.xml │ │ ├── pom.xml │ │ └── src │ │ │ └── main │ │ │ ├── java │ │ │ └── com │ │ │ │ └── wgzhao │ │ │ │ └── addax │ │ │ │ └── plugin │ │ │ │ └── reader │ │ │ │ └── datareader │ │ │ │ ├── DataKey.java │ │ │ │ ├── DataReader.java │ │ │ │ ├── Rule.java │ │ │ │ └── util │ │ │ │ ├── AddressUtil.java │ │ │ │ ├── BankUtil.java │ │ │ │ ├── CommonUtil.java │ │ │ │ ├── CompanyUtil.java │ │ │ │ ├── EmailUtil.java │ │ │ │ ├── GeoUtil.java │ │ │ │ ├── IdCardUtil.java │ │ │ │ ├── JobUtil.java │ │ │ │ ├── PersonUtil.java │ │ │ │ ├── PhoneUtil.java │ │ │ │ └── StockUtil.java │ │ │ └── resources │ │ │ ├── plugin.json │ │ │ └── plugin_job_template.json │ ├── dbfreader │ │ ├── package.xml │ │ ├── pom.xml │ │ └── src │ │ │ └── main │ │ │ ├── java │ │ │ └── com │ │ │ │ └── wgzhao │ │ │ │ └── addax │ │ │ │ └── plugin │ │ │ │ └── reader │ │ │ │ └── dbfreader │ │ │ │ └── DbfReader.java │ │ │ └── resources │ │ │ ├── plugin.json │ │ │ └── plugin_job_template.json │ ├── elasticsearchreader │ │ ├── package.xml │ │ ├── pom.xml │ │ └── src │ │ │ └── main │ │ │ ├── java │ │ │ └── com │ │ │ │ └── wgzhao │ │ │ │ └── addax │ │ │ │ └── plugin │ │ │ │ └── reader │ │ │ │ └── elasticsearchreader │ │ │ │ ├── DefaultMemberAccess.java │ │ │ │ ├── ESClient.java │ │ │ │ ├── ESKey.java │ │ │ │ ├── EsReader.java │ │ │ │ └── gson │ │ │ │ └── MapTypeAdapter.java │ │ │ └── resources │ │ │ ├── plugin.json │ │ │ └── plugin_job_template.json │ ├── excelreader │ │ ├── package.xml │ │ ├── pom.xml │ │ └── src │ │ │ └── main │ │ │ ├── java │ │ │ └── com │ │ │ │ └── wgzhao │ │ │ │ └── addax │ │ │ │ └── plugin │ │ │ │ └── reader │ │ │ │ └── excelreader │ │ │ │ ├── ExcelHelper.java │ │ │ │ └── ExcelReader.java │ │ │ └── resources │ │ │ ├── plugin.json │ │ │ └── plugin_job_template.json │ ├── ftpreader │ │ ├── package.xml │ │ ├── pom.xml │ │ └── src │ │ │ └── main │ │ │ ├── java │ │ │ └── com │ │ │ │ └── wgzhao │ │ │ │ └── addax │ │ │ │ └── plugin │ │ │ │ └── reader │ │ │ │ └── ftpreader │ │ │ │ ├── FtpConstant.java │ │ │ │ ├── FtpHelper.java │ │ │ │ ├── FtpKey.java │ │ │ │ ├── FtpReader.java │ │ │ │ ├── SftpHelper.java │ │ │ │ └── StandardFtpHelper.java │ │ │ └── resources │ │ │ ├── plugin.json │ │ │ └── plugin_job_template.json │ ├── hanareader │ │ ├── package.xml │ │ ├── pom.xml │ │ └── src │ │ │ └── main │ │ │ ├── java │ │ │ ├── com │ │ │ │ └── wgzhao │ │ │ │ │ └── addax │ │ │ │ │ └── plugin │ │ │ │ │ └── reader │ │ │ │ │ └── hanareader │ │ │ │ │ └── HANAReader.java │ │ │ └── resources │ │ │ │ ├── plugin.json │ │ │ │ └── plugin_job_template.json │ │ │ └── resources │ │ │ ├── plugin.json │ │ │ └── plugin_job_template.json │ ├── hbase11xreader │ │ ├── package.xml │ │ ├── pom.xml │ │ └── src │ │ │ └── main │ │ │ ├── java │ │ │ └── com │ │ │ │ └── wgzhao │ │ │ │ └── addax │ │ │ │ └── plugin │ │ │ │ └── reader │ │ │ │ └── hbase11xreader │ │ │ │ ├── ColumnType.java │ │ │ │ ├── Hbase11xHelper.java │ │ │ │ ├── Hbase11xReader.java │ │ │ │ ├── HbaseAbstractTask.java │ │ │ │ ├── HbaseColumnCell.java │ │ │ │ ├── ModeType.java │ │ │ │ ├── MultiVersionDynamicColumnTask.java │ │ │ │ ├── MultiVersionFixedColumnTask.java │ │ │ │ ├── MultiVersionTask.java │ │ │ │ └── NormalTask.java │ │ │ └── resources │ │ │ ├── plugin.json │ │ │ └── plugin_job_template.json │ ├── hbase11xsqlreader │ │ ├── package.xml │ │ ├── pom.xml │ │ └── src │ │ │ └── main │ │ │ ├── java │ │ │ └── com │ │ │ │ └── wgzhao │ │ │ │ └── addax │ │ │ │ └── plugin │ │ │ │ └── reader │ │ │ │ └── hbase11xsqlreader │ │ │ │ ├── HbaseSQLHelper.java │ │ │ │ └── HbaseSQLReader.java │ │ │ └── resources │ │ │ ├── plugin.json │ │ │ └── plugin_job_template.json │ ├── hbase20xreader │ │ ├── package.xml │ │ ├── pom.xml │ │ └── src │ │ │ └── main │ │ │ ├── java │ │ │ └── com │ │ │ │ └── wgzhao │ │ │ │ └── addax │ │ │ │ └── plugin │ │ │ │ └── reader │ │ │ │ └── hbase20xreader │ │ │ │ ├── ColumnType.java │ │ │ │ ├── Hbase20xHelper.java │ │ │ │ ├── Hbase20xReader.java │ │ │ │ ├── HbaseAbstractTask.java │ │ │ │ ├── HbaseColumnCell.java │ │ │ │ ├── ModeType.java │ │ │ │ ├── MultiVersionDynamicColumnTask.java │ │ │ │ ├── MultiVersionFixedColumnTask.java │ │ │ │ ├── MultiVersionTask.java │ │ │ │ └── NormalTask.java │ │ │ └── resources │ │ │ ├── plugin.json │ │ │ └── plugin_job_template.json │ ├── hbase20xsqlreader │ │ ├── package.xml │ │ ├── pom.xml │ │ └── src │ │ │ └── main │ │ │ ├── java │ │ │ └── com │ │ │ │ └── wgzhao │ │ │ │ └── addax │ │ │ │ └── plugin │ │ │ │ └── reader │ │ │ │ └── hbase20xsqlreader │ │ │ │ ├── HBase20SQLReaderHelper.java │ │ │ │ ├── HBase20xSQLReader.java │ │ │ │ └── HBase20xSQLReaderTask.java │ │ │ └── resources │ │ │ ├── plugin.json │ │ │ └── plugin_job_template.json │ ├── hdfsreader │ │ ├── package.xml │ │ ├── pom.xml │ │ └── src │ │ │ └── main │ │ │ ├── java │ │ │ └── com │ │ │ │ └── wgzhao │ │ │ │ └── addax │ │ │ │ └── plugin │ │ │ │ └── reader │ │ │ │ └── hdfsreader │ │ │ │ ├── DFSUtil.java │ │ │ │ ├── FileTypeUtils.java │ │ │ │ ├── HdfsConstant.java │ │ │ │ ├── HdfsReader.java │ │ │ │ ├── JavaType.java │ │ │ │ ├── MyOrcReader.java │ │ │ │ └── MyParquetReader.java │ │ │ └── resources │ │ │ ├── plugin.json │ │ │ └── plugin_job_template.json │ ├── hivereader │ │ ├── package.xml │ │ ├── pom.xml │ │ └── src │ │ │ └── main │ │ │ ├── java │ │ │ └── com │ │ │ │ └── wgzhao │ │ │ │ └── addax │ │ │ │ └── plugin │ │ │ │ └── reader │ │ │ │ └── hivereader │ │ │ │ └── HiveReader.java │ │ │ └── resources │ │ │ ├── plugin.json │ │ │ └── plugin_job_template.json │ ├── httpreader │ │ ├── package.xml │ │ ├── pom.xml │ │ └── src │ │ │ └── main │ │ │ ├── java │ │ │ └── com │ │ │ │ └── wgzhao │ │ │ │ └── addax │ │ │ │ └── plugin │ │ │ │ └── reader │ │ │ │ └── httpreader │ │ │ │ ├── HttpKey.java │ │ │ │ └── HttpReader.java │ │ │ └── resources │ │ │ ├── plugin.json │ │ │ └── plugin_job_template.json │ ├── influxdb2reader │ │ ├── package.xml │ │ ├── pom.xml │ │ └── src │ │ │ └── main │ │ │ ├── java │ │ │ └── com │ │ │ │ └── wgzhao │ │ │ │ └── addax │ │ │ │ └── plugin │ │ │ │ └── reader │ │ │ │ └── influxdb2reader │ │ │ │ ├── InfluxDB2Key.java │ │ │ │ └── InfluxDB2Reader.java │ │ │ └── resources │ │ │ ├── plugin.json │ │ │ └── plugin_job_template.json │ ├── influxdbreader │ │ ├── package.xml │ │ ├── pom.xml │ │ └── src │ │ │ └── main │ │ │ ├── java │ │ │ └── com │ │ │ │ └── wgzhao │ │ │ │ └── addax │ │ │ │ └── plugin │ │ │ │ └── reader │ │ │ │ └── influxdbreader │ │ │ │ ├── InfluxDBKey.java │ │ │ │ ├── InfluxDBReader.java │ │ │ │ └── InfluxDBReaderTask.java │ │ │ └── resources │ │ │ ├── plugin.json │ │ │ └── plugin_job_template.json │ ├── jsonfilereader │ │ ├── package.xml │ │ ├── pom.xml │ │ └── src │ │ │ └── main │ │ │ ├── java │ │ │ └── com │ │ │ │ └── wgzhao │ │ │ │ └── addax │ │ │ │ └── plugin │ │ │ │ └── reader │ │ │ │ └── jsonfilereader │ │ │ │ └── JsonReader.java │ │ │ └── resources │ │ │ ├── plugin.json │ │ │ └── plugin_job_template.json │ ├── kafkareader │ │ ├── package.xml │ │ ├── pom.xml │ │ └── src │ │ │ └── main │ │ │ ├── java │ │ │ └── com │ │ │ │ └── wgzhao │ │ │ │ └── addax │ │ │ │ └── plugin │ │ │ │ └── reader │ │ │ │ └── kafkareader │ │ │ │ ├── KafkaKey.java │ │ │ │ └── KafkaReader.java │ │ │ └── resources │ │ │ ├── plugin.json │ │ │ └── plugin_job_template.json │ ├── kudureader │ │ ├── package.xml │ │ ├── pom.xml │ │ └── src │ │ │ └── main │ │ │ ├── java │ │ │ └── com │ │ │ │ └── wgzhao │ │ │ │ └── addax │ │ │ │ └── plugin │ │ │ │ └── reader │ │ │ │ └── kudureader │ │ │ │ ├── KuduKey.java │ │ │ │ └── KuduReader.java │ │ │ └── resources │ │ │ ├── plugin.json │ │ │ └── plugin_job_template.json │ ├── mongodbreader │ │ ├── package.xml │ │ ├── pom.xml │ │ └── src │ │ │ └── main │ │ │ ├── java │ │ │ └── com │ │ │ │ └── wgzhao │ │ │ │ └── addax │ │ │ │ └── plugin │ │ │ │ └── reader │ │ │ │ └── mongodbreader │ │ │ │ ├── KeyConstant.java │ │ │ │ ├── MongoDBReader.java │ │ │ │ └── util │ │ │ │ ├── CollectionSplitUtil.java │ │ │ │ └── MongoUtil.java │ │ │ └── resources │ │ │ ├── plugin.json │ │ │ └── plugin_job_template.json │ ├── mysqlreader │ │ ├── package.xml │ │ ├── pom.xml │ │ └── src │ │ │ └── main │ │ │ ├── java │ │ │ └── com │ │ │ │ └── wgzhao │ │ │ │ └── addax │ │ │ │ └── plugin │ │ │ │ └── reader │ │ │ │ └── mysqlreader │ │ │ │ └── MysqlReader.java │ │ │ └── resources │ │ │ ├── plugin.json │ │ │ └── plugin_job_template.json │ ├── oraclereader │ │ ├── package.xml │ │ ├── pom.xml │ │ └── src │ │ │ └── main │ │ │ ├── java │ │ │ └── com │ │ │ │ └── wgzhao │ │ │ │ └── addax │ │ │ │ └── plugin │ │ │ │ └── reader │ │ │ │ └── oraclereader │ │ │ │ └── OracleReader.java │ │ │ └── resources │ │ │ ├── plugin.json │ │ │ └── plugin_job_template.json │ ├── postgresqlreader │ │ ├── package.xml │ │ ├── pom.xml │ │ └── src │ │ │ └── main │ │ │ ├── java │ │ │ └── com │ │ │ │ └── wgzhao │ │ │ │ └── addax │ │ │ │ └── plugin │ │ │ │ └── reader │ │ │ │ └── postgresqlreader │ │ │ │ └── PostgresqlReader.java │ │ │ └── resources │ │ │ ├── plugin.json │ │ │ └── plugin_job_template.json │ ├── rdbmsreader │ │ ├── package.xml │ │ ├── pom.xml │ │ └── src │ │ │ └── main │ │ │ ├── java │ │ │ └── com │ │ │ │ └── wgzhao │ │ │ │ └── addax │ │ │ │ └── plugin │ │ │ │ └── reader │ │ │ │ └── rdbmsreader │ │ │ │ ├── RdbmsReader.java │ │ │ │ └── SubCommonRdbmsReader.java │ │ │ └── resources │ │ │ ├── plugin.json │ │ │ └── plugin_job_template.json │ ├── redisreader │ │ ├── package.xml │ │ ├── pom.xml │ │ └── src │ │ │ └── main │ │ │ ├── java │ │ │ └── com │ │ │ │ └── wgzhao │ │ │ │ └── addax │ │ │ │ └── plugin │ │ │ │ └── reader │ │ │ │ └── redisreader │ │ │ │ ├── RedisKey.java │ │ │ │ ├── RedisReader.java │ │ │ │ ├── impl │ │ │ │ ├── DefaultSentinel.java │ │ │ │ ├── Sentinel.java │ │ │ │ ├── SentinelListener.java │ │ │ │ └── SentinelReplicator.java │ │ │ │ └── util │ │ │ │ └── Reflections.java │ │ │ └── resources │ │ │ ├── plugin.json │ │ │ └── plugin_job_template.json │ ├── s3reader │ │ ├── package.xml │ │ ├── pom.xml │ │ └── src │ │ │ └── main │ │ │ ├── java │ │ │ └── com │ │ │ │ └── wgzhao │ │ │ │ └── addax │ │ │ │ └── plugin │ │ │ │ └── reader │ │ │ │ └── s3reader │ │ │ │ ├── S3Key.java │ │ │ │ ├── S3Reader.java │ │ │ │ └── S3Util.java │ │ │ └── resources │ │ │ ├── plugin.json │ │ │ └── plugin_job_template.json │ ├── sqlitereader │ │ ├── package.xml │ │ ├── pom.xml │ │ └── src │ │ │ └── main │ │ │ ├── java │ │ │ └── com │ │ │ │ └── wgzhao │ │ │ │ └── addax │ │ │ │ └── plugin │ │ │ │ └── reader │ │ │ │ └── sqlitereader │ │ │ │ └── SqliteReader.java │ │ │ └── resources │ │ │ ├── plugin.json │ │ │ └── plugin_job_template.json │ ├── sqlserverreader │ │ ├── package.xml │ │ ├── pom.xml │ │ └── src │ │ │ └── main │ │ │ ├── java │ │ │ └── com │ │ │ │ └── wgzhao │ │ │ │ └── addax │ │ │ │ └── plugin │ │ │ │ └── reader │ │ │ │ └── sqlserverreader │ │ │ │ └── SqlServerReader.java │ │ │ └── resources │ │ │ ├── plugin.json │ │ │ └── plugin_job_template.json │ ├── streamreader │ │ ├── package.xml │ │ ├── pom.xml │ │ └── src │ │ │ └── main │ │ │ ├── java │ │ │ └── com │ │ │ │ └── wgzhao │ │ │ │ └── addax │ │ │ │ └── plugin │ │ │ │ └── reader │ │ │ │ └── streamreader │ │ │ │ ├── StreamConstant.java │ │ │ │ └── StreamReader.java │ │ │ └── resources │ │ │ ├── plugin.json │ │ │ └── plugin_job_template.json │ ├── sybasereader │ │ ├── package.xml │ │ ├── pom.xml │ │ └── src │ │ │ └── main │ │ │ ├── java │ │ │ └── com │ │ │ │ └── wgzhao │ │ │ │ └── addax │ │ │ │ └── plugin │ │ │ │ └── reader │ │ │ │ └── sybasereader │ │ │ │ └── SybaseReader.java │ │ │ └── resources │ │ │ ├── plugin.json │ │ │ └── plugin_job_template.json │ ├── tdenginereader │ │ ├── package.xml │ │ ├── pom.xml │ │ └── src │ │ │ └── main │ │ │ ├── java │ │ │ └── com │ │ │ │ └── wgzhao │ │ │ │ └── addax │ │ │ │ └── plugin │ │ │ │ └── reader │ │ │ │ └── tdenginereader │ │ │ │ ├── TDKey.java │ │ │ │ └── TDengineReader.java │ │ │ ├── libs │ │ │ └── libtaos.so.2.0.16.0 │ │ │ └── resources │ │ │ ├── plugin.json │ │ │ └── plugin_job_template.json │ └── txtfilereader │ │ ├── package.xml │ │ ├── pom.xml │ │ └── src │ │ └── main │ │ ├── java │ │ └── com │ │ │ └── wgzhao │ │ │ └── addax │ │ │ └── plugin │ │ │ └── reader │ │ │ └── txtfilereader │ │ │ └── TxtFileReader.java │ │ └── resources │ │ ├── plugin.json │ │ └── plugin_job_template.json └── writer │ ├── accesswriter │ ├── package.xml │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java │ │ └── com │ │ │ └── wgzhao │ │ │ └── addax │ │ │ └── plugin │ │ │ └── writer │ │ │ └── accesswriter │ │ │ └── AccessWriter.java │ │ └── resources │ │ ├── plugin.json │ │ └── plugin_job_template.json │ ├── cassandrawriter │ ├── package.xml │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java │ │ └── com │ │ │ └── wgzhao │ │ │ └── addax │ │ │ └── plugin │ │ │ └── writer │ │ │ └── cassandrawriter │ │ │ ├── CassandraKey.java │ │ │ ├── CassandraWriter.java │ │ │ └── CassandraWriterHelper.java │ │ └── resources │ │ ├── plugin.json │ │ └── plugin_job_template.json │ ├── clickhousewriter │ ├── package.xml │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java │ │ └── com │ │ │ └── wgzhao │ │ │ └── addax │ │ │ └── plugin │ │ │ └── writer │ │ │ └── clickhousewriter │ │ │ └── ClickHouseWriter.java │ │ └── resources │ │ ├── plugin.json │ │ └── plugin_job_template.json │ ├── databendwriter │ ├── package.xml │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java │ │ └── com │ │ │ └── wgzhao │ │ │ └── addax │ │ │ └── plugin │ │ │ └── writer │ │ │ └── databendwriter │ │ │ ├── DatabendWriter.java │ │ │ └── util │ │ │ └── DatabendWriterUtil.java │ │ └── resources │ │ ├── plugin.json │ │ └── plugin_job_template.json │ ├── dbfwriter │ ├── package.xml │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java │ │ └── com │ │ │ └── wgzhao │ │ │ └── addax │ │ │ └── plugin │ │ │ └── writer │ │ │ └── dbfwriter │ │ │ └── DbfWriter.java │ │ └── resources │ │ ├── plugin.json │ │ └── plugin_job_template.json │ ├── doriswriter │ ├── package.xml │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java │ │ └── com │ │ │ └── wgzhao │ │ │ └── addax │ │ │ └── plugin │ │ │ └── writer │ │ │ └── doriswriter │ │ │ ├── DelimiterParser.java │ │ │ ├── DorisKey.java │ │ │ ├── DorisStreamLoadObserver.java │ │ │ ├── DorisUtil.java │ │ │ ├── DorisWriter.java │ │ │ ├── DorisWriterException.java │ │ │ ├── DorisWriterManager.java │ │ │ ├── WriterTuple.java │ │ │ └── codec │ │ │ ├── DorisBaseCodec.java │ │ │ ├── DorisCodec.java │ │ │ ├── DorisCodecFactory.java │ │ │ ├── DorisCsvCodec.java │ │ │ └── DorisJsonCodec.java │ │ └── resources │ │ ├── plugin.json │ │ └── plugin_job_template.json │ ├── elasticsearchwriter │ ├── package.xml │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java │ │ └── com │ │ │ └── wgzhao │ │ │ └── addax │ │ │ └── plugin │ │ │ └── writer │ │ │ └── elasticsearchwriter │ │ │ ├── ESClient.java │ │ │ ├── ESColumn.java │ │ │ ├── ESFieldType.java │ │ │ ├── ESKey.java │ │ │ └── ESWriter.java │ │ └── resources │ │ ├── plug_job_template.json │ │ └── plugin.json │ ├── excelwriter │ ├── package.xml │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java │ │ └── com │ │ │ └── wgzhao │ │ │ └── addax │ │ │ └── plugin │ │ │ └── writer │ │ │ └── excelwriter │ │ │ └── ExcelWriter.java │ │ └── resources │ │ ├── plugin.json │ │ └── plugin_job_template.json │ ├── ftpwriter │ ├── package.xml │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java │ │ └── com │ │ │ └── wgzhao │ │ │ └── addax │ │ │ └── plugin │ │ │ └── writer │ │ │ └── ftpwriter │ │ │ ├── FtpKey.java │ │ │ ├── FtpWriter.java │ │ │ └── util │ │ │ ├── IFtpHelper.java │ │ │ ├── SftpHelperImpl.java │ │ │ └── StandardFtpHelperImpl.java │ │ └── resources │ │ ├── plugin.json │ │ └── plugin_job_template.json │ ├── greenplumwriter │ ├── package.xml │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java │ │ └── com │ │ │ └── wgzhao │ │ │ └── addax │ │ │ └── plugin │ │ │ └── writer │ │ │ └── greenplumwriter │ │ │ ├── CopyWriterTask.java │ │ │ ├── GPConstant.java │ │ │ └── GreenplumWriter.java │ │ └── resources │ │ ├── plugin.json │ │ └── plugin_job_template.json │ ├── hanawriter │ ├── package.xml │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java │ │ └── com │ │ │ └── wgzhao │ │ │ └── addax │ │ │ └── plugin │ │ │ └── writer │ │ │ └── hanawriter │ │ │ └── HANAWriter.java │ │ └── resources │ │ ├── plugin.json │ │ └── plugin_job_template.json │ ├── hbase11xsqlwriter │ ├── package.xml │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java │ │ └── com │ │ │ └── wgzhao │ │ │ └── addax │ │ │ └── plugin │ │ │ └── writer │ │ │ └── hbase11xsqlwriter │ │ │ ├── HbaseSQLHelper.java │ │ │ ├── HbaseSQLWriter.java │ │ │ ├── HbaseSQLWriterConfig.java │ │ │ ├── HbaseSQLWriterTask.java │ │ │ ├── NullModeType.java │ │ │ └── ThinClientPTable.java │ │ └── resources │ │ ├── plugin.json │ │ └── plugin_job_template.json │ ├── hbase11xwriter │ ├── package.xml │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java │ │ └── com │ │ │ └── wgzhao │ │ │ └── addax │ │ │ └── plugin │ │ │ └── writer │ │ │ └── hbase11xwriter │ │ │ ├── ColumnType.java │ │ │ ├── Hbase11xHelper.java │ │ │ ├── Hbase11xWriter.java │ │ │ ├── HbaseAbstractTask.java │ │ │ ├── ModeType.java │ │ │ ├── MultiVersionTask.java │ │ │ ├── NormalTask.java │ │ │ └── NullModeType.java │ │ └── resources │ │ ├── plugin.json │ │ └── plugin_job_template.json │ ├── hbase20xsqlwriter │ ├── package.xml │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java │ │ └── com │ │ │ └── wgzhao │ │ │ └── addax │ │ │ └── plugin │ │ │ └── writer │ │ │ └── hbase20xsqlwriter │ │ │ ├── HBase20xSQLHelper.java │ │ │ ├── HBase20xSQLWriter.java │ │ │ ├── HBase20xSQLWriterTask.java │ │ │ └── NullModeType.java │ │ └── resources │ │ ├── plugin.json │ │ └── plugin_job_template.json │ ├── hdfswriter │ ├── package.xml │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java │ │ └── com │ │ │ └── wgzhao │ │ │ └── addax │ │ │ └── plugin │ │ │ └── writer │ │ │ └── hdfswriter │ │ │ ├── HdfsHelper.java │ │ │ ├── HdfsWriter.java │ │ │ ├── IHDFSWriter.java │ │ │ ├── OrcWriter.java │ │ │ ├── ParquetWriter.java │ │ │ ├── SupportHiveDataType.java │ │ │ └── TextWriter.java │ │ └── resources │ │ ├── addax_logo.png │ │ ├── parquet_schema.asvo │ │ ├── plugin.json │ │ └── plugin_job_template.json │ ├── icebergwriter │ ├── package.xml │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java │ │ └── com │ │ │ └── wgzhao │ │ │ └── addax │ │ │ └── plugin │ │ │ └── writer │ │ │ └── icebergwriter │ │ │ ├── IcebergHelper.java │ │ │ └── IcebergWriter.java │ │ └── resources │ │ ├── plugin.json │ │ └── plugin_job_template.json │ ├── influxdb2writer │ ├── package.xml │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java │ │ └── com │ │ │ └── wgzhao │ │ │ └── addax │ │ │ └── plugin │ │ │ └── writer │ │ │ └── influxdb2writer │ │ │ ├── InfluxDB2Key.java │ │ │ └── InfluxDB2Writer.java │ │ └── resources │ │ ├── plugin.json │ │ └── plugin_job_template.json │ ├── influxdbwriter │ ├── package.xml │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java │ │ └── com │ │ │ └── wgzhao │ │ │ └── addax │ │ │ └── plugin │ │ │ └── writer │ │ │ └── influxdbwriter │ │ │ ├── InfluxDBKey.java │ │ │ ├── InfluxDBWriter.java │ │ │ └── InfluxDBWriterTask.java │ │ └── resources │ │ ├── plugin.json │ │ └── plugin_job_template.json │ ├── kafkawriter │ ├── package.xml │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java │ │ └── com │ │ │ └── wgzhao │ │ │ └── addax │ │ │ └── plugin │ │ │ └── writer │ │ │ └── kafkawriter │ │ │ ├── KafkaKey.java │ │ │ └── KafkaWriter.java │ │ └── resources │ │ ├── plugin.json │ │ └── plugin_job_template.json │ ├── kuduwriter │ ├── package.xml │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java │ │ └── com │ │ │ └── wgzhao │ │ │ └── addax │ │ │ └── plugin │ │ │ └── writer │ │ │ └── kuduwriter │ │ │ ├── KuduHelper.java │ │ │ ├── KuduKey.java │ │ │ ├── KuduWriter.java │ │ │ └── KuduWriterTask.java │ │ └── resources │ │ ├── plugin.json │ │ └── plugin_job_template.json │ ├── mongodbwriter │ ├── package.xml │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java │ │ └── com │ │ │ └── wgzhao │ │ │ └── addax │ │ │ └── plugin │ │ │ └── writer │ │ │ └── mongodbwriter │ │ │ ├── KeyConstant.java │ │ │ ├── MongoDBWriter.java │ │ │ └── util │ │ │ └── MongoUtil.java │ │ └── resources │ │ ├── plugin.json │ │ └── plugin_job_template.json │ ├── mysqlwriter │ ├── package.xml │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java │ │ └── com │ │ │ └── wgzhao │ │ │ └── addax │ │ │ └── plugin │ │ │ └── writer │ │ │ └── mysqlwriter │ │ │ └── MysqlWriter.java │ │ └── resources │ │ ├── plugin.json │ │ └── plugin_job_template.json │ ├── oraclewriter │ ├── package.xml │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java │ │ └── com │ │ │ └── wgzhao │ │ │ └── addax │ │ │ └── plugin │ │ │ └── writer │ │ │ └── oraclewriter │ │ │ └── OracleWriter.java │ │ └── resources │ │ ├── plugin.json │ │ └── plugin_job_template.json │ ├── paimonwriter │ ├── package.xml │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java │ │ └── com │ │ │ └── wgzhao │ │ │ └── addax │ │ │ └── plugin │ │ │ └── writer │ │ │ └── paimonwriter │ │ │ ├── PaimonHelper.java │ │ │ └── PaimonWriter.java │ │ └── resources │ │ ├── plugin.json │ │ └── plugin_job_template.json │ ├── postgresqlwriter │ ├── package.xml │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java │ │ └── com │ │ │ └── wgzhao │ │ │ └── addax │ │ │ └── plugin │ │ │ └── writer │ │ │ └── postgresqlwriter │ │ │ └── PostgresqlWriter.java │ │ └── resources │ │ ├── plugin.json │ │ └── plugin_job_template.json │ ├── rdbmswriter │ ├── package.xml │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java │ │ └── com │ │ │ └── wgzhao │ │ │ └── addax │ │ │ └── plugin │ │ │ └── writer │ │ │ └── rdbmswriter │ │ │ └── RdbmsWriter.java │ │ └── resources │ │ ├── plugin.json │ │ └── plugin_job_template.json │ ├── rediswriter │ ├── package.xml │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java │ │ └── com │ │ │ └── wgzhao │ │ │ └── addax │ │ │ └── plugin │ │ │ └── writer │ │ │ └── rediswriter │ │ │ └── RedisWriter.java │ │ └── resources │ │ ├── plugin.json │ │ └── plugin_job_template.json │ ├── s3writer │ ├── package.xml │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java │ │ └── com │ │ │ └── wgzhao │ │ │ └── addax │ │ │ └── plugin │ │ │ └── writer │ │ │ └── s3writer │ │ │ ├── S3Key.java │ │ │ ├── S3Util.java │ │ │ ├── S3Writer.java │ │ │ └── writer │ │ │ ├── IFormatWriter.java │ │ │ ├── OrcWriter.java │ │ │ ├── ParquetWriter.java │ │ │ ├── SupportHiveDataType.java │ │ │ └── TextWriter.java │ │ └── resources │ │ ├── plugin.json │ │ └── plugin_job_template.json │ ├── sqlitewriter │ ├── package.xml │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java │ │ └── com │ │ │ └── wgzhao │ │ │ └── addax │ │ │ └── plugin │ │ │ └── writer │ │ │ └── sqlitewriter │ │ │ └── SqliteWriter.java │ │ └── resources │ │ ├── plugin.json │ │ └── plugin_job_template.json │ ├── sqlserverwriter │ ├── package.xml │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java │ │ └── com │ │ │ └── wgzhao │ │ │ └── addax │ │ │ └── plugin │ │ │ └── writer │ │ │ └── sqlserverwriter │ │ │ └── SqlServerWriter.java │ │ └── resources │ │ ├── plugin.json │ │ └── plugin_job_template.json │ ├── starrockswriter │ ├── package.xml │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java │ │ └── com │ │ │ └── wgzhao │ │ │ └── addax │ │ │ └── plugin │ │ │ └── writer │ │ │ └── starrockswriter │ │ │ ├── StarRocksWriter.java │ │ │ ├── StarRocksWriterOptions.java │ │ │ ├── manager │ │ │ ├── StarRocksFlushTuple.java │ │ │ ├── StarRocksStreamLoadFailedException.java │ │ │ ├── StarRocksStreamLoadVisitor.java │ │ │ └── StarRocksWriterManager.java │ │ │ ├── row │ │ │ ├── StarRocksBaseSerializer.java │ │ │ ├── StarRocksCsvSerializer.java │ │ │ ├── StarRocksDelimiterParser.java │ │ │ ├── StarRocksISerializer.java │ │ │ ├── StarRocksJsonSerializer.java │ │ │ └── StarRocksSerializerFactory.java │ │ │ └── util │ │ │ └── StarRocksWriterUtil.java │ │ └── resources │ │ ├── plugin.json │ │ └── plugin_job_template.json │ ├── streamwriter │ ├── package.xml │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java │ │ └── com │ │ │ └── wgzhao │ │ │ └── addax │ │ │ └── plugin │ │ │ └── writer │ │ │ └── streamwriter │ │ │ ├── StreamKey.java │ │ │ └── StreamWriter.java │ │ └── resources │ │ ├── plugin.json │ │ └── plugin_job_template.json │ ├── sybasewriter │ ├── package.xml │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java │ │ └── com │ │ │ └── wgzhao │ │ │ └── addax │ │ │ └── plugin │ │ │ └── writer │ │ │ └── sybasewriter │ │ │ └── SybaseWriter.java │ │ └── resources │ │ ├── plugin.json │ │ └── plugin_job_template.json │ ├── tdenginewriter │ ├── package.xml │ ├── pom.xml │ └── src │ │ └── main │ │ ├── java │ │ └── com │ │ │ └── wgzhao │ │ │ └── addax │ │ │ └── plugin │ │ │ └── writer │ │ │ └── tdenginewriter │ │ │ ├── ColumnMeta.java │ │ │ ├── DataHandler.java │ │ │ ├── DefaultDataHandler.java │ │ │ ├── OpentsdbDataHandler.java │ │ │ ├── SchemaManager.java │ │ │ ├── TDKey.java │ │ │ ├── TDengineWriter.java │ │ │ ├── TableMeta.java │ │ │ ├── TableType.java │ │ │ └── TimestampPrecision.java │ │ ├── libs │ │ └── libtaos.so.2.0.16.0 │ │ └── resources │ │ ├── plugin.json │ │ └── plugin_job_template.json │ └── txtfilewriter │ ├── package.xml │ ├── pom.xml │ └── src │ └── main │ ├── java │ └── com │ │ └── wgzhao │ │ └── addax │ │ └── plugin │ │ └── writer │ │ └── txtfilewriter │ │ └── TxtFileWriter.java │ └── resources │ ├── plugin.json │ └── plugin_job_template.json ├── pom.xml ├── shrink_package.sh └── support_data_sources.md /.github/ISSUE_TEMPLATE/release_notes.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Release notes 3 | about: The release notes template 4 | title: 'Release notes for ' 5 | labels: release 6 | assignees: wgzhao 7 | --- 8 | 9 | ## Changelog 10 | 11 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | # To get started with Dependabot version updates, you'll need to specify which 2 | # package ecosystems to update and where the package manifests are located. 3 | # Please see the documentation for all configuration options: 4 | # https://help.github.com/github/administering-a-repository/configuration-options-for-dependency-updates 5 | 6 | version: 2 7 | updates: 8 | - package-ecosystem: "maven" # See documentation for possible values 9 | directory: "/" # Location of package manifests 10 | schedule: 11 | interval: "daily" 12 | ignore: 13 | - dependency-name: "ojbdc8" 14 | versions: ["21.x"] 15 | # Maintain dependencies for GitHub Actions 16 | - package-ecosystem: "github-actions" 17 | directory: "/" 18 | schedule: 19 | interval: "daily" 20 | -------------------------------------------------------------------------------- /.github/workflows/verify.yml: -------------------------------------------------------------------------------- 1 | # This is a basic workflow to help you get started with Actions 2 | 3 | name: CI 4 | 5 | # Controls when the workflow will run 6 | on: 7 | # Triggers the workflow on push or pull request events but only for the master branch 8 | pull_request: 9 | branches: [ master ] 10 | 11 | # Allows you to run this workflow manually from the Actions tab 12 | workflow_dispatch: 13 | 14 | # A workflow run is made up of one or more jobs that can run sequentially or in parallel 15 | jobs: 16 | verify: 17 | runs-on: ubuntu-latest 18 | 19 | steps: 20 | - uses: actions/checkout@v4 21 | - name: Set up Maven 22 | uses: stCarolas/setup-maven@v5 23 | with: 24 | maven-version: 3.8.8 25 | - name: Set up JDK 17 26 | uses: actions/setup-java@v4.7.1 27 | with: 28 | java-version: '17' 29 | distribution: 'adopt' 30 | - run: | 31 | export MAVEN_OPTS="-DskipTests -Dmaven.javadoc.skip=true -Dmaven.source.skip=true -Dgpg.skip=true" 32 | mvn -B -V -T 1 verify 33 | mvn -B -V -T 1 enforcer:enforce 34 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | .AppleDouble 3 | .LSOverride 4 | Icon 5 | ._* 6 | .fseventsd 7 | *.class 8 | *.log 9 | *.ctxt 10 | *.jar 11 | *.war 12 | *.nar 13 | *.ear 14 | *.zip 15 | *.tar.gz 16 | *.rar 17 | hs_err_pid* 18 | .idea/ 19 | target/ 20 | pom.xml.tag 21 | pom.xml.releaseBackup 22 | pom.xml.versionsBackup 23 | pom.xml.next 24 | release.properties 25 | dependency-reduced-pom.xml 26 | *.iml 27 | __pycache__/ 28 | *.py[cod] 29 | *$py.class 30 | .vscode 31 | .classpath 32 | .settings 33 | .project 34 | *.secret 35 | site 36 | -------------------------------------------------------------------------------- /404.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 18 | 19 | 20 | -------------------------------------------------------------------------------- /CITATION.cff: -------------------------------------------------------------------------------- 1 | cff-version: 1.2.0 2 | message: "If you use this software, please cite it as below." 3 | authors: 4 | - given-names: "wgzhao" 5 | email: wgzhao@gmail.com 6 | title: "Addax" 7 | version: 4.1.5 8 | identifiers: 9 | - type: url 10 | value: 'https://github.com/wgzhao/Addax' 11 | repository-code: 'https://github.com/wgzhao/Addax' 12 | url: 'https://wgzhao.github.io/Addax' 13 | license: Apache-2.0 14 | -------------------------------------------------------------------------------- /SECURITY.md: -------------------------------------------------------------------------------- 1 | # Security Policy 2 | 3 | ## Supported Versions 4 | 5 | The current release is the supported version. Security fixes are released together 6 | with all other fixes in each new release. 7 | 8 | ## Reporting a Vulnerability 9 | 10 | Please report security vulnerabilities to **wgzhao@gmail.com**. 11 | -------------------------------------------------------------------------------- /core/src/main/bin/encrypt_password.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | # 3 | # Licensed under the Apache License, Version 2.0 (the "License"); 4 | # you may not use this file except in compliance with the License. 5 | # You may obtain a copy of the License at 6 | # 7 | # http://www.apache.org/licenses/LICENSE-2.0 8 | # 9 | # Unless required by applicable law or agreed to in writing, software 10 | # distributed under the License is distributed on an "AS IS" BASIS, 11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 12 | # See the License for the specific language governing permissions and 13 | # limitations under the License. 14 | # 15 | 16 | # encrypt the giving password 17 | 18 | SCRIPT_PATH="$( 19 | cd -- "$(dirname "$0")" >/dev/null 2>&1 20 | pwd -P 21 | )" 22 | 23 | ADDAX_HOME=$(dirname $SCRIPT_PATH) 24 | if [ -z "${ADDAX_HOME}" ]; then 25 | exit 2 26 | fi 27 | 28 | if [ $# -ne 1 ]; then 29 | echo "Usage: $0 " 30 | exit 1 31 | fi 32 | 33 | cd ${ADDAX_HOME} 34 | commjar=$(ls lib/addax-common-*.jar lib/slf4j-*.jar lib/logback*.jar |tr '\t' ':') 35 | for jar in ${commjar[@]} 36 | do 37 | classpath=${classpath}:$jar 38 | done 39 | java -cp $classpath com.wgzhao.addax.common.util.EncryptUtil $1 40 | #java -cp ${ADDAX_HOME}/lib/addax-common-*.jar:${ADDAX_HOME}/lib/slf4j-api-*.jar com.wgzhao.addax.core.util.EncryptUtil $1 41 | -------------------------------------------------------------------------------- /core/src/main/conf/core.json: -------------------------------------------------------------------------------- 1 | { 2 | "entry": { 3 | "jvm": "-Xms32M -Xmx1G", 4 | "environment": {} 5 | }, 6 | "common": { 7 | "column": { 8 | "datetimeFormat": "yyyy-MM-dd HH:mm:ss", 9 | "timeFormat": "HH:mm:ss", 10 | "dateFormat": "yyyy-MM-dd", 11 | "extraFormats": [ 12 | "yyyyMMdd" 13 | ], 14 | "timeZone": "PRC", 15 | "encoding": "utf-8" 16 | } 17 | }, 18 | "core": { 19 | "server": { 20 | "address": "", 21 | "timeout": 5 22 | }, 23 | "transport": { 24 | "channel": { 25 | "speed": { 26 | "byte": -1, 27 | "record": -1 28 | }, 29 | "flowControlInterval": 20, 30 | "capacity": 512, 31 | "byteCapacity": 67108864 32 | } 33 | }, 34 | "container": { 35 | "job": { 36 | "reportInterval": 10000, 37 | "sleepInterval": 3000 38 | }, 39 | "taskGroup": { 40 | "channel": 5 41 | } 42 | } 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /core/src/main/java/com/wgzhao/addax/core/constant/Type.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * Unless required by applicable law or agreed to in writing, 11 | * software distributed under the License is distributed on an 12 | * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 13 | * KIND, either express or implied. See the License for the 14 | * specific language governing permissions and limitations 15 | * under the License. 16 | */ 17 | 18 | package com.wgzhao.addax.core.constant; 19 | 20 | /** 21 | * Addax 内部类型 22 | */ 23 | public enum Type 24 | { 25 | STRING, LONG, BOOLEAN, DOUBLE, DATE, 26 | } 27 | -------------------------------------------------------------------------------- /core/src/main/java/com/wgzhao/addax/core/plugin/JobPluginCollector.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * Unless required by applicable law or agreed to in writing, 11 | * software distributed under the License is distributed on an 12 | * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 13 | * KIND, either express or implied. See the License for the 14 | * specific language governing permissions and limitations 15 | * under the License. 16 | */ 17 | 18 | package com.wgzhao.addax.core.plugin; 19 | 20 | import java.util.List; 21 | import java.util.Map; 22 | 23 | /** 24 | * Created by jingxing on 14-9-9. 25 | */ 26 | public interface JobPluginCollector 27 | extends PluginCollector 28 | { 29 | 30 | // gather the message from the task 31 | Map> getMessage(); 32 | 33 | // gather the message from the task 34 | List getMessage(String key); 35 | } 36 | -------------------------------------------------------------------------------- /core/src/main/java/com/wgzhao/addax/core/plugin/PluginCollector.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * Unless required by applicable law or agreed to in writing, 11 | * software distributed under the License is distributed on an 12 | * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 13 | * KIND, either express or implied. See the License for the 14 | * specific language governing permissions and limitations 15 | * under the License. 16 | */ 17 | 18 | package com.wgzhao.addax.core.plugin; 19 | 20 | public interface PluginCollector 21 | { 22 | 23 | } 24 | -------------------------------------------------------------------------------- /core/src/main/java/com/wgzhao/addax/core/plugin/RecordSender.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * Unless required by applicable law or agreed to in writing, 11 | * software distributed under the License is distributed on an 12 | * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 13 | * KIND, either express or implied. See the License for the 14 | * specific language governing permissions and limitations 15 | * under the License. 16 | */ 17 | 18 | package com.wgzhao.addax.core.plugin; 19 | 20 | import com.wgzhao.addax.core.element.Record; 21 | 22 | public interface RecordSender 23 | { 24 | 25 | Record createRecord(); 26 | 27 | void sendToWriter(Record record); 28 | 29 | void flush(); 30 | 31 | void terminate(); 32 | 33 | void shutdown(); 34 | } 35 | -------------------------------------------------------------------------------- /core/src/main/java/com/wgzhao/addax/core/statistics/container/report/AbstractReporter.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, 13 | * software distributed under the License is distributed on an 14 | * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 15 | * KIND, either express or implied. See the License for the 16 | * specific language governing permissions and limitations 17 | * under the License. 18 | */ 19 | 20 | package com.wgzhao.addax.core.statistics.container.report; 21 | 22 | import com.wgzhao.addax.core.statistics.communication.Communication; 23 | 24 | public abstract class AbstractReporter 25 | { 26 | 27 | public abstract void reportJobCommunication(Communication communication); 28 | 29 | public abstract void reportTGCommunication(Integer taskGroupId, Communication communication); 30 | } 31 | -------------------------------------------------------------------------------- /core/src/main/java/com/wgzhao/addax/core/transport/transformer/GroovyTransformerStaticUtil.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, 13 | * software distributed under the License is distributed on an 14 | * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 15 | * KIND, either express or implied. See the License for the 16 | * specific language governing permissions and limitations 17 | * under the License. 18 | */ 19 | 20 | package com.wgzhao.addax.core.transport.transformer; 21 | 22 | public class GroovyTransformerStaticUtil 23 | { 24 | 25 | } 26 | -------------------------------------------------------------------------------- /core/src/main/job/clickhouse2stream.json: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "setting": { 4 | "speed": { 5 | "channel": 3 6 | }, 7 | "errorLimit": { 8 | "record": 0, 9 | "percentage": 0.02 10 | } 11 | }, 12 | "content": { 13 | "reader": { 14 | "name": "clickhousereader", 15 | "parameter": { 16 | "username": "default", 17 | "password": "", 18 | "column": [ 19 | "*" 20 | ], 21 | "connection": { 22 | "jdbcUrl": "jdbc:clickhouse://127.0.0.1:8123/tpch", 23 | "querySql": [ 24 | "select * from orders limit 10" 25 | ] 26 | } 27 | } 28 | }, 29 | "writer": { 30 | "name": "streamwriter", 31 | "parameter": { 32 | "print": true 33 | } 34 | } 35 | } 36 | } 37 | } -------------------------------------------------------------------------------- /core/src/main/job/dbf2stream.json: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "setting": { 4 | "speed": { 5 | "channel": 2 6 | } 7 | }, 8 | "content": { 9 | "reader": { 10 | "name": "dbfreader", 11 | "parameter": { 12 | "column": [ 13 | { 14 | "index": 0, 15 | "type": "string" 16 | }, 17 | { 18 | "index": 1, 19 | "type": "long" 20 | }, 21 | { 22 | "index": 2, 23 | "type": "string" 24 | }, 25 | { 26 | "index": 3, 27 | "type": "boolean" 28 | }, 29 | { 30 | "index": 4, 31 | "type": "string" 32 | }, 33 | { 34 | "value": "201908", 35 | "type": "string" 36 | }, 37 | { 38 | "value": "dbf", 39 | "type": "string" 40 | } 41 | ], 42 | "path": [ 43 | "/tmp/out" 44 | ], 45 | "compress": "gzip", 46 | "encoding": "UTF-8" 47 | } 48 | }, 49 | "writer": { 50 | "name": "streamwriter", 51 | "parameter": { 52 | "print": "true" 53 | } 54 | } 55 | } 56 | } 57 | } 58 | -------------------------------------------------------------------------------- /core/src/main/job/influxdb2pg.json: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "content": { 4 | "reader": { 5 | "name": "influxdbreader", 6 | "parameter": { 7 | "column": [ 8 | "*" 9 | ], 10 | "connection": { 11 | "endpoint": "http://localhost:8086", 12 | "database": "NOAA_water_database", 13 | "table": "h2o_feet" 14 | }, 15 | "username": "influx", 16 | "password": "influx123" 17 | } 18 | }, 19 | "writer": { 20 | "name": "postgresqlwriter", 21 | "parameter": { 22 | "username": "wgzhao", 23 | "password": "wgzhao", 24 | "column": [ 25 | "*" 26 | ], 27 | "connection": { 28 | "table": [ 29 | "influx_tbl" 30 | ], 31 | "jdbcUrl": "jdbc:postgresql://localhost:5432/wgzhao" 32 | } 33 | } 34 | } 35 | }, 36 | "setting": { 37 | "speed": { 38 | "bytes": -1, 39 | "channel": 1 40 | } 41 | } 42 | } 43 | } -------------------------------------------------------------------------------- /core/src/main/job/influxdb2stream.json: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "content": { 4 | "reader": { 5 | "name": "influxdbreader", 6 | "parameter": { 7 | "column": [ 8 | "*" 9 | ], 10 | "connection": { 11 | "endpoint": "http://localhost:8086", 12 | "database": "NOAA_water_database", 13 | "table": "h2o_feet" 14 | }, 15 | "username": "influx", 16 | "password": "influx123" 17 | } 18 | }, 19 | "writer": { 20 | "name": "streamwriter", 21 | "parameter": { 22 | "print": "true" 23 | } 24 | } 25 | }, 26 | "setting": { 27 | "speed": { 28 | "bytes": -1, 29 | "channel": 1 30 | } 31 | } 32 | } 33 | } -------------------------------------------------------------------------------- /core/src/main/job/job.json: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "setting": { 4 | "speed": { 5 | "byte": -1, 6 | "channel": 1 7 | }, 8 | "errorLimit": { 9 | "record": 0, 10 | "percentage": 0.02 11 | } 12 | }, 13 | "content": { 14 | "reader": { 15 | "name": "streamreader", 16 | "parameter": { 17 | "column": [ 18 | { 19 | "value": "addax", 20 | "type": "string" 21 | }, 22 | { 23 | "value": 19890604, 24 | "type": "long" 25 | }, 26 | { 27 | "value": "1989-06-04 00:00:00", 28 | "type": "date" 29 | }, 30 | { 31 | "value": true, 32 | "type": "bool" 33 | }, 34 | { 35 | "value": "test", 36 | "type": "bytes" 37 | } 38 | ], 39 | "sliceRecordCount": 10 40 | } 41 | }, 42 | "writer": { 43 | "name": "streamwriter", 44 | "parameter": { 45 | "print": true, 46 | "column": [ 47 | "col1" 48 | ], 49 | "encoding": "UTF-8" 50 | } 51 | } 52 | } 53 | } 54 | } -------------------------------------------------------------------------------- /core/src/main/job/oracle2oracle.json: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "content": [ 4 | { 5 | "writer": { 6 | "name": "oraclewriter", 7 | "parameter": { 8 | "username": "oracle", 9 | "password": "password", 10 | "column": [ 11 | "*" 12 | ], 13 | "connection": { 14 | "table": [ 15 | "${ddb}.${dtable}" 16 | ], 17 | "jdbcUrl": "jdbc:oracle:thin:@127.0.0.1/stage" 18 | }, 19 | "preSql": [ 20 | "truncate table @table" 21 | ] 22 | } 23 | }, 24 | "reader": { 25 | "name": "oraclereader", 26 | "parameter": { 27 | "column": [ 28 | "*" 29 | ], 30 | "username": "oracle", 31 | "password": "password", 32 | "connection": [ 33 | { 34 | "table": [ 35 | "${sdb}.${stable}" 36 | ], 37 | "jdbcUrl": [ 38 | "jdbc:oracle:thin:@127.0.0.1/stage" 39 | ] 40 | } 41 | ] 42 | } 43 | } 44 | } 45 | ], 46 | "setting": { 47 | "speed": { 48 | "record": -1, 49 | "byte": -1, 50 | "channel": 1 51 | } 52 | } 53 | } 54 | } -------------------------------------------------------------------------------- /core/src/main/job/oracle2stream.json: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "content": [ 4 | { 5 | "reader": { 6 | "name": "oraclereader", 7 | "parameter": { 8 | "column": [ 9 | "COL1" 10 | ], 11 | "connection": { 12 | "jdbcUrl": "jdbc:oracle:thin:@127.0.0.1:1521/stage", 13 | "table": [ 14 | "FINEBI.T" 15 | ] 16 | }, 17 | "username": "oracle", 18 | "password": "password" 19 | } 20 | }, 21 | "writer": { 22 | "name": "txtfilewriter", 23 | "parameter": { 24 | "path": "/tmp/result", 25 | "fileName": "luohw", 26 | "writeMode": "truncate", 27 | "dateFormat": "yyyy-MM-dd" 28 | } 29 | } 30 | } 31 | ], 32 | "setting": { 33 | "speed": { 34 | "bytes": -1, 35 | "channel": 1 36 | } 37 | } 38 | } 39 | } -------------------------------------------------------------------------------- /core/src/main/job/redis2stream.json: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "content": [ 4 | { 5 | "reader": { 6 | "name": "redisreader", 7 | "parameter": { 8 | "connection": [ 9 | { 10 | "uri": "tcp://10.60.172.162:6379", 11 | "auth": "redis@123" 12 | } 13 | ], 14 | "include":[], 15 | "exclude":[], 16 | "db":[0] 17 | } 18 | }, 19 | "writer": { 20 | "name": "streamwriter", 21 | "parameter": { 22 | "print":"true" 23 | } 24 | } 25 | } 26 | ], 27 | "setting": { 28 | "speed": { 29 | "channel": 1 30 | } 31 | } 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /core/src/main/job/stream2txt.json: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "setting": { 4 | "speed": { 5 | "byte": -1, 6 | "channel": 1 7 | } 8 | }, 9 | "content": [ 10 | { 11 | "reader": { 12 | "name": "streamreader", 13 | "parameter": { 14 | "column": [ 15 | { 16 | "value": "addax", 17 | "type": "string" 18 | }, 19 | { 20 | "value": 19890604, 21 | "type": "long" 22 | }, 23 | { 24 | "value": "1989-06-04 00:00:00", 25 | "type": "date" 26 | }, 27 | { 28 | "value": true, 29 | "type": "bool" 30 | }, 31 | { 32 | "value": "test", 33 | "type": "bytes" 34 | } 35 | ], 36 | "sliceRecordCount": 1000 37 | } 38 | }, 39 | "writer": { 40 | "name": "txtfilewriter", 41 | "parameter": { 42 | "path": "/tmp/out", 43 | "fileName": "test", 44 | "writeMode": "truncate", 45 | "encoding": "UTF-8" 46 | } 47 | } 48 | } 49 | ] 50 | } 51 | } -------------------------------------------------------------------------------- /core/src/main/resources/project.properties: -------------------------------------------------------------------------------- 1 | version=${project.version} 2 | artifactId=${project.artifactId} -------------------------------------------------------------------------------- /docs/assets/excel_reader_demo.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wgzhao/Addax/13bc07a72b56ba54f9d9bbacf898b9e144245b48/docs/assets/excel_reader_demo.zip -------------------------------------------------------------------------------- /docs/assets/jobs/accessreader.json: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "setting": { 4 | "speed": { 5 | "byte": -1, 6 | "channel": 1 7 | } 8 | }, 9 | "content": 10 | { 11 | "reader": { 12 | "name": "accessreader", 13 | "parameter": { 14 | "username": "root", 15 | "password": "", 16 | "column": [ 17 | "*" 18 | ], 19 | "connection": { 20 | "table": [ 21 | "tbl_Users" 22 | ], 23 | "jdbcUrl": "jdbc:ucanaccess:///Users/wgzhao/Downloads/AccessThemeDemo.mdb" 24 | }, 25 | "where": "" 26 | } 27 | }, 28 | "writer": { 29 | "name": "streamwriter", 30 | "parameter": { 31 | "encoding": "utf-8", 32 | "print": true 33 | } 34 | } 35 | } 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /docs/assets/jobs/cassandrareader.json: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "setting": { 4 | "speed": { 5 | "channel": 1, 6 | "bytes": -1 7 | } 8 | }, 9 | "content": { 10 | "reader": { 11 | "name": "cassandrareader", 12 | "parameter": { 13 | "host": "localhost", 14 | "port": 9042, 15 | "useSSL": false, 16 | "keyspace": "test", 17 | "table": "addax_src", 18 | "column": [ 19 | "textCol", 20 | "blobCol", 21 | "writetime(blobCol)", 22 | "boolCol", 23 | "smallintCol", 24 | "tinyintCol", 25 | "intCol", 26 | "bigintCol", 27 | "varintCol", 28 | "floatCol", 29 | "doubleCol", 30 | "decimalCol", 31 | "dateCol", 32 | "timeCol", 33 | "timeStampCol", 34 | "uuidCol", 35 | "inetCol", 36 | "durationCol", 37 | "listCol", 38 | "mapCol", 39 | "setCol", 40 | "tupleCol", 41 | "udtCol" 42 | ] 43 | } 44 | }, 45 | "writer": { 46 | "name": "streamwriter", 47 | "parameter": { 48 | "print": true 49 | } 50 | } 51 | } 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /docs/assets/jobs/clickhousereader.json: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "setting": { 4 | "speed": { 5 | "channel": 1, 6 | "bytes": -1 7 | }, 8 | "errorLimit": { 9 | "record": 0, 10 | "percentage": 0.02 11 | } 12 | }, 13 | "content": { 14 | "reader": { 15 | "name": "clickhousereader", 16 | "parameter": { 17 | "username": "root", 18 | "password": "root", 19 | "column": [ 20 | "*" 21 | ], 22 | "connection": { 23 | "table": [ 24 | "ck_addax" 25 | ], 26 | "jdbcUrl": "jdbc:clickhouse://127.0.0.1:8123/default" 27 | } 28 | } 29 | }, 30 | "writer": { 31 | "name": "streamwriter", 32 | "parameter": { 33 | "print": true 34 | } 35 | } 36 | } 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /docs/assets/jobs/clickhousewriter.json: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "setting": { 4 | "speed": { 5 | "channel": 1 6 | } 7 | }, 8 | "content": { 9 | "writer": { 10 | "name": "clickhousewriter", 11 | "parameter": { 12 | "username": "default", 13 | "column": [ 14 | "*" 15 | ], 16 | "connection": { 17 | "table": [ 18 | "ck_addax_writer" 19 | ], 20 | "jdbcUrl": "jdbc:clickhouse://127.0.0.1:8123/default" 21 | }, 22 | "preSql": [ 23 | "alter table @table delete where 1=1" 24 | ] 25 | } 26 | }, 27 | "reader": { 28 | "name": "clickhousereader", 29 | "parameter": { 30 | "username": "default", 31 | "column": [ 32 | "*" 33 | ], 34 | "connection": { 35 | "jdbcUrl": "jdbc:clickhouse://127.0.0.1:8123/", 36 | "table": [ 37 | "ck_addax" 38 | ] 39 | } 40 | } 41 | } 42 | } 43 | } 44 | } 45 | -------------------------------------------------------------------------------- /docs/assets/jobs/databend2stream.json: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "content": [ 4 | { 5 | "reader": { 6 | "name": "databendreader", 7 | "parameter": { 8 | "column": [ 9 | "*" 10 | ], 11 | "connection": { 12 | "jdbcUrl": "jdbc:databend://127.0.0.1:8000/default", 13 | "table": [ 14 | "addax_reader" 15 | ] 16 | }, 17 | "username": "databend", 18 | "password": "databend" 19 | } 20 | }, 21 | "writer": { 22 | "name": "streamwriter", 23 | "parameter": { 24 | "print": "true" 25 | } 26 | } 27 | } 28 | ], 29 | "setting": { 30 | "speed": { 31 | "bytes": -1, 32 | "channel": 1 33 | } 34 | } 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /docs/assets/jobs/databendwriter.json: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "setting": { 4 | "speed": { 5 | "channel": 2 6 | } 7 | }, 8 | "content": { 9 | "writer": { 10 | "name": "databendwriter", 11 | "parameter": { 12 | "preSql": [ 13 | "truncate table @table" 14 | ], 15 | "postSql": [], 16 | "connection": { 17 | "jdbcUrl": "jdbc:databend://localhost:8000/addax", 18 | "table": [ 19 | "table1" 20 | ] 21 | }, 22 | "username": "u1", 23 | "password": "123", 24 | "column": [ 25 | "*" 26 | ] 27 | } 28 | }, 29 | "reader": { 30 | "name": "streamreader", 31 | "parameter": { 32 | "column": [ 33 | { 34 | "random": "1,500", 35 | "type": "long" 36 | }, 37 | { 38 | "random": "1,127", 39 | "type": "long" 40 | }, 41 | { 42 | "value": "this is a text", 43 | "type": "string" 44 | }, 45 | { 46 | "random": "5,200", 47 | "type": "long" 48 | } 49 | ], 50 | "sliceRecordCount": 100 51 | } 52 | } 53 | } 54 | } 55 | } 56 | -------------------------------------------------------------------------------- /docs/assets/jobs/dbfreader.json: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "setting": { 4 | "speed": { 5 | "channel": 2, 6 | "bytes": -1 7 | } 8 | }, 9 | "content": { 10 | "reader": { 11 | "name": "dbfreader", 12 | "parameter": { 13 | "column": [ 14 | { 15 | "index": 0, 16 | "type": "string" 17 | }, 18 | { 19 | "index": 1, 20 | "type": "long" 21 | }, 22 | { 23 | "index": 2, 24 | "type": "string" 25 | }, 26 | { 27 | "index": 3, 28 | "type": "boolean" 29 | }, 30 | { 31 | "index": 4, 32 | "type": "string" 33 | }, 34 | { 35 | "value": "dbf", 36 | "type": "string" 37 | } 38 | ], 39 | "path": [ 40 | "/tmp/out" 41 | ], 42 | "encoding": "GBK" 43 | } 44 | }, 45 | "writer": { 46 | "name": "streamwriter", 47 | "parameter": { 48 | "print": "true" 49 | } 50 | } 51 | } 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /docs/assets/jobs/excelreader.json: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "setting": { 4 | "speed": { 5 | "channel": 2, 6 | "bytes": -1 7 | } 8 | }, 9 | "content": { 10 | "reader": { 11 | "name": "excelreader", 12 | "parameter": { 13 | "path": [ 14 | "/tmp/in" 15 | ], 16 | "header": true, 17 | "skipRows": 0 18 | } 19 | }, 20 | "writer": { 21 | "parameter": { 22 | "print": true 23 | }, 24 | "name": "streamwriter" 25 | } 26 | } 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /docs/assets/jobs/excelwriter.json: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "setting": { 4 | "speed": { 5 | "byte": -1, 6 | "channel": 1 7 | } 8 | }, 9 | "content": { 10 | "reader": { 11 | "name": "streamreader", 12 | "parameter": { 13 | "column": [ 14 | { 15 | "value": "DataX", 16 | "type": "string" 17 | }, 18 | { 19 | "value": 19890604, 20 | "type": "long" 21 | }, 22 | { 23 | "value": "1989-06-04 11:22:33", 24 | "type": "date" 25 | }, 26 | { 27 | "value": true, 28 | "type": "bool" 29 | }, 30 | { 31 | "value": "test", 32 | "type": "bytes" 33 | } 34 | ], 35 | "sliceRecordCount": 1000 36 | } 37 | }, 38 | "writer": { 39 | "name": "excelwriter", 40 | "parameter": { 41 | "path": "/tmp/out", 42 | "fileName": "test", 43 | "header": [ 44 | "str", 45 | "长度", 46 | "日期", 47 | "是否为真", 48 | "字节类型" 49 | ] 50 | } 51 | } 52 | } 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /docs/assets/jobs/ftpwriter.json: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "setting": { 4 | "speed": { 5 | "channel": 2, 6 | "bytes": -1 7 | } 8 | }, 9 | "content": { 10 | "reader": {}, 11 | "writer": { 12 | "name": "ftpwriter", 13 | "parameter": { 14 | "protocol": "sftp", 15 | "host": "***", 16 | "port": 22, 17 | "username": "xxx", 18 | "password": "xxx", 19 | "timeout": "60000", 20 | "connectPattern": "PASV", 21 | "path": "/tmp/data/", 22 | "fileName": "test", 23 | "writeMode": "truncate|append|nonConflict", 24 | "fieldDelimiter": ",", 25 | "encoding": "UTF-8", 26 | "nullFormat": "null", 27 | "dateFormat": "yyyy-MM-dd", 28 | "fileFormat": "csv", 29 | "useKey": false, 30 | "keyPath": "", 31 | "keyPass": "", 32 | "header": [] 33 | } 34 | } 35 | } 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /docs/assets/jobs/gpwriter.json: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "setting": { 4 | "speed": { 5 | "channel": 1, 6 | "bytes": -1 7 | } 8 | }, 9 | "content": { 10 | "reader": { 11 | "name": "postgresqlreader", 12 | "parameter": { 13 | "username": "wgzhao", 14 | "password": "wgzhao", 15 | "column": [ 16 | "*" 17 | ], 18 | "connection": { 19 | "table": [ 20 | "addax_tbl" 21 | ], 22 | "jdbcUrl": "jdbc:postgresql://localhost:5432/wgzhao" 23 | } 24 | } 25 | }, 26 | "writer": { 27 | "name": "greenplumwriter", 28 | "parameter": { 29 | "username": "wgzhao", 30 | "password": "wgzhao", 31 | "column": [ 32 | "*" 33 | ], 34 | "preSql": [ 35 | "truncate table @table" 36 | ], 37 | "connection": { 38 | "jdbcUrl": "jdbc:postgresql://localhost:5432/wgzhao", 39 | "table": [ 40 | "gp_test" 41 | ] 42 | } 43 | } 44 | } 45 | } 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /docs/assets/jobs/hanareader.json: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "content": [ 4 | { 5 | "reader": { 6 | "name": "hanareader", 7 | "parameter": { 8 | "column": [ 9 | "*" 10 | ], 11 | "connection": { 12 | "jdbcUrl": "jdbc:sap://wgzhao-pc:39017/system", 13 | "table": [ 14 | "addax_tbl" 15 | ] 16 | }, 17 | "username": "system", 18 | "password": "HXEHana1" 19 | } 20 | }, 21 | "writer": { 22 | "name": "streamwriter", 23 | "parameter": { 24 | "print": true 25 | } 26 | } 27 | } 28 | ], 29 | "setting": { 30 | "speed": { 31 | "bytes": -1, 32 | "channel": 1 33 | } 34 | } 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /docs/assets/jobs/hbase20xsqlwriter.json: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "content": { 4 | "reader": { 5 | "name": "txtfilereader", 6 | "parameter": { 7 | "path": "/tmp/normal.txt", 8 | "charset": "UTF-8", 9 | "column": [ 10 | { 11 | "index": 0, 12 | "type": "String" 13 | }, 14 | { 15 | "index": 1, 16 | "type": "string" 17 | }, 18 | { 19 | "index": 2, 20 | "type": "string" 21 | }, 22 | { 23 | "index": 3, 24 | "type": "string" 25 | } 26 | ], 27 | "fieldDelimiter": "," 28 | } 29 | }, 30 | "writer": { 31 | "name": "hbase20xsqlwriter", 32 | "parameter": { 33 | "batchSize": "100", 34 | "column": [ 35 | "UID", 36 | "TS", 37 | "EVENTID", 38 | "CONTENT" 39 | ], 40 | "queryServerAddress": "http://127.0.0.1:8765", 41 | "nullMode": "skip", 42 | "table": "TEST_TBL" 43 | } 44 | } 45 | }, 46 | "setting": { 47 | "speed": { 48 | "channel": 5, 49 | "bytes": -1 50 | } 51 | } 52 | } 53 | } 54 | -------------------------------------------------------------------------------- /docs/assets/jobs/hdfsreader.json: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "setting": { 4 | "speed": { 5 | "channel": 3, 6 | "bytes": -1 7 | } 8 | }, 9 | "content": { 10 | "reader": { 11 | "name": "hdfsreader", 12 | "parameter": { 13 | "path": "/user/hive/warehouse/mytable01/*", 14 | "defaultFS": "hdfs://xxx:port", 15 | "column": [ 16 | { 17 | "index": 0, 18 | "type": "long" 19 | }, 20 | { 21 | "index": 1, 22 | "type": "boolean" 23 | }, 24 | { 25 | "type": "string", 26 | "value": "hello" 27 | }, 28 | { 29 | "index": 2, 30 | "type": "double" 31 | } 32 | ], 33 | "fileType": "orc", 34 | "encoding": "UTF-8", 35 | "fieldDelimiter": "," 36 | } 37 | }, 38 | "writer": { 39 | "name": "streamwriter", 40 | "parameter": { 41 | "print": true 42 | } 43 | } 44 | } 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /docs/assets/jobs/hivereader.json: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "setting": { 4 | "speed": { 5 | "byte": -1, 6 | "channel": 1 7 | }, 8 | "errorLimit": { 9 | "record": 0, 10 | "percentage": 0 11 | } 12 | }, 13 | "content": { 14 | "reader": { 15 | "name": "hivereader", 16 | "parameter": { 17 | "column": [ 18 | "*" 19 | ], 20 | "username": "hive", 21 | "password": "", 22 | "connection": { 23 | "jdbcUrl": "jdbc:hive2://localhost:10000/default;principal=hive/_HOST@EXAMPLE.COM", 24 | "table": [ 25 | "hive_reader" 26 | ] 27 | }, 28 | "where": "logdate='20211013'", 29 | "haveKerberos": true, 30 | "kerberosKeytabFilePath": "/etc/security/keytabs/hive.headless.keytab", 31 | "kerberosPrincipal": "hive@EXAMPLE.COM" 32 | } 33 | }, 34 | "writer": { 35 | "name": "streamwriter", 36 | "parameter": { 37 | "print": true 38 | } 39 | } 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /docs/assets/jobs/httpreader.json: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "setting": { 4 | "speed": { 5 | "channel": 1, 6 | "bytes": -1 7 | } 8 | }, 9 | "content": { 10 | "reader": { 11 | "name": "httpreader", 12 | "parameter": { 13 | "connection": [ 14 | { 15 | "url": "http://127.0.0.1:9090/mock/17/LDJSC/ASSET", 16 | "proxy": { 17 | "host": "http://127.0.0.1:3128", 18 | "auth": "user:pass" 19 | } 20 | } 21 | ], 22 | "reqParams": { 23 | "CURR_DATE": "2021-01-18", 24 | "DEPT": "9700" 25 | }, 26 | "resultKey": "result", 27 | "method": "GET", 28 | "column": [ 29 | "CURR_DATE", 30 | "DEPT", 31 | "TOTAL_MANAGED_MARKET_VALUE", 32 | "TOTAL_MANAGED_MARKET_VALUE_GROWTH" 33 | ], 34 | "username": "user", 35 | "password": "passw0rd", 36 | "headers": { 37 | "X-Powered-by": "Addax" 38 | } 39 | } 40 | }, 41 | "writer": { 42 | "name": "streamwriter", 43 | "parameter": { 44 | "print": "true" 45 | } 46 | } 47 | } 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /docs/assets/jobs/influx2stream.json: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "content": { 4 | "reader": { 5 | "name": "influxdb2reader", 6 | "parameter": { 7 | "column": [ 8 | "location", 9 | "height", 10 | "wet" 11 | ], 12 | "connection": { 13 | "endpoint": "http://localhost:8086", 14 | "bucket": "test", 15 | "table": [ 16 | "temperature" 17 | ], 18 | "org": "com.wgzhao" 19 | }, 20 | "token": "YOUR_SECURE_TOKEN", 21 | "range": [ 22 | "-1h", 23 | "-5m" 24 | ], 25 | "limit": 10 26 | } 27 | }, 28 | "writer": { 29 | "name": "streamwriter", 30 | "parameter": { 31 | "print": "true" 32 | } 33 | } 34 | }, 35 | "setting": { 36 | "speed": { 37 | "bytes": -1, 38 | "channel": 1 39 | } 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /docs/assets/jobs/influxdbreader.json: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "content": { 4 | "reader": { 5 | "name": "influxdbreader", 6 | "parameter": { 7 | "column": [ 8 | "*" 9 | ], 10 | "where": "1=1", 11 | "connection": { 12 | "endpoint": "http://localhost:8086", 13 | "database": "NOAA_water_database", 14 | "table": "h2o_feet" 15 | }, 16 | "connTimeout": 15, 17 | "readTimeout": 20, 18 | "writeTimeout": 20, 19 | "username": "influx", 20 | "password": "influx123" 21 | } 22 | }, 23 | "writer": { 24 | "name": "streamwriter", 25 | "parameter": { 26 | "print": "true" 27 | } 28 | } 29 | }, 30 | "setting": { 31 | "speed": { 32 | "bytes": -1, 33 | "channel": 1 34 | } 35 | } 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /docs/assets/jobs/jsonreader.json: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "setting": { 4 | "speed": { 5 | "channel": 1, 6 | "bytes": -1 7 | } 8 | }, 9 | "content": { 10 | "writer": { 11 | "name": "streamwriter", 12 | "parameter": { 13 | "print": "true" 14 | } 15 | }, 16 | "reader": { 17 | "name": "jsonfilereader", 18 | "parameter": { 19 | "path": [ 20 | "/tmp/test*.json" 21 | ], 22 | "column": [ 23 | { 24 | "index": "$.id", 25 | "type": "long" 26 | }, 27 | { 28 | "index": "$.name", 29 | "type": "string" 30 | }, 31 | { 32 | "index": "$.age", 33 | "type": "long" 34 | }, 35 | { 36 | "index": "$.score.math", 37 | "type": "double" 38 | }, 39 | { 40 | "index": "$.score.english", 41 | "type": "double" 42 | }, 43 | { 44 | "index": "$.pubdate", 45 | "type": "date" 46 | }, 47 | { 48 | "type": "string", 49 | "value": "constant string" 50 | } 51 | ] 52 | } 53 | } 54 | } 55 | } 56 | } 57 | -------------------------------------------------------------------------------- /docs/assets/jobs/kafka2stream.json: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "setting": { 4 | "speed": { 5 | "channel": 1 6 | } 7 | }, 8 | "content": [ 9 | { 10 | "writer": { 11 | "name": "streamwriter", 12 | "parameter": { 13 | "print": true 14 | } 15 | }, 16 | "reader": { 17 | "name": "kafkareader", 18 | "parameter": { 19 | "brokerList": "wgzhao-laptop:9092", 20 | "topic": "test-1", 21 | "column": [ 22 | "col1", 23 | "col3", 24 | "col0", 25 | "col9" 26 | ], 27 | "missingKeyValue": "\\N", 28 | "properties": { 29 | "auto.offset.reset": "earliest" 30 | } 31 | } 32 | } 33 | } 34 | ] 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /docs/assets/jobs/kudureader.json: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "setting": { 4 | "speed": { 5 | "channel": 3 6 | }, 7 | "errorLimit": { 8 | "record": 0, 9 | "percentage": 0.02 10 | } 11 | }, 12 | "content": { 13 | "reader": { 14 | "name": "kudureader", 15 | "parameter": { 16 | "masterAddress": "localhost:7051,localhost:7151,localhost:7251", 17 | "table": "users", 18 | "splitPk": "user_id", 19 | "lowerBound": 1, 20 | "upperBound": 100, 21 | "readTimeout": 5, 22 | "scanTimeout": 10 23 | } 24 | }, 25 | "writer": { 26 | "name": "streamwriter", 27 | "parameter": { 28 | "print": true 29 | } 30 | } 31 | } 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /docs/assets/jobs/kuduwriter.json: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "setting": { 4 | "speed": { 5 | "channel": 1, 6 | "bytes": -1 7 | } 8 | }, 9 | "content": { 10 | "reader": { 11 | "name": "streamreader", 12 | "parameter": { 13 | "column": [ 14 | { 15 | "random": "1,1000", 16 | "type": "long" 17 | }, 18 | { 19 | "random": "1,10", 20 | "type": "string" 21 | }, 22 | { 23 | "random": "1000,50000", 24 | "type": "double" 25 | } 26 | ], 27 | "sliceRecordCount": 1000 28 | } 29 | }, 30 | "writer": { 31 | "name": "kuduwriter", 32 | "parameter": { 33 | "masterAddress": "127.0.0.1:7051,127.0.0.1:7151,127.0.0.1:7251", 34 | "timeout": 60, 35 | "table": "users", 36 | "writeMode": "upsert", 37 | "column": [ "user_id", "user_name", "salary"], 38 | "batchSize": 1024, 39 | "bufferSize": 2048, 40 | "skipFail": false, 41 | "encoding": "UTF-8" 42 | } 43 | } 44 | } 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /docs/assets/jobs/mongoreader.json: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "setting": { 4 | "speed": { 5 | "channel": 1, 6 | "bytes": -1 7 | } 8 | }, 9 | "content": { 10 | "reader": { 11 | "name": "mongodbreader", 12 | "parameter": { 13 | "connection": { 14 | "address": [ 15 | "127.0.0.1:27017" 16 | ], 17 | "database": "tag_per_data", 18 | "collection": "tag_data", 19 | "authDb": "admin" 20 | }, 21 | "username": "", 22 | "password": "", 23 | "column": [ 24 | "unique_id", 25 | "sid", 26 | "user_id", 27 | "auction_id", 28 | "content_type", 29 | "pool_type", 30 | "frontcat_id", 31 | "catagoryid", 32 | "gmt_create", 33 | "taglist", 34 | "property", 35 | "scorea", 36 | "scoreb", 37 | "scorec" 38 | ] 39 | } 40 | }, 41 | "writer": { 42 | "name": "streamwriter", 43 | "parameter": { 44 | "print": "true" 45 | } 46 | } 47 | } 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /docs/assets/jobs/mysqlreader.json: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "setting": { 4 | "speed": { 5 | "channel": 3, 6 | "bytes": -1 7 | } 8 | }, 9 | "content": { 10 | "reader": { 11 | "name": "mysqlreader", 12 | "parameter": { 13 | "username": "root", 14 | "password": "root", 15 | "column": [ 16 | "*" 17 | ], 18 | "connection": { 19 | "table": [ 20 | "addax_reader" 21 | ], 22 | "jdbcUrl": "jdbc:mysql://127.0.0.1:3306/test", 23 | "driver": "com.mysql.jdbc.Driver" 24 | } 25 | } 26 | }, 27 | "writer": { 28 | "name": "streamwriter", 29 | "parameter": { 30 | "print": true 31 | } 32 | } 33 | } 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /docs/assets/jobs/oraclereader.json: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "setting": { 4 | "speed": { 5 | "byte": 1048576, 6 | "channel": 1 7 | } 8 | }, 9 | "content": { 10 | "reader": { 11 | "name": "oraclereader", 12 | "parameter": { 13 | "username": "root", 14 | "password": "root", 15 | "column": [ 16 | "id", 17 | "name" 18 | ], 19 | "splitPk": "db_id", 20 | "connection": { 21 | "table": [ 22 | "table" 23 | ], 24 | "jdbcUrl": "jdbc:oracle:thin:@:PORT:" 25 | } 26 | } 27 | }, 28 | "writer": { 29 | "name": "streamwriter", 30 | "parameter": { 31 | "print": true 32 | } 33 | } 34 | } 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /docs/assets/jobs/paimonwriter.json: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "setting": { 4 | "speed": { 5 | "channel": 3 6 | }, 7 | "errorLimit": { 8 | "record": 0, 9 | "percentage": 0 10 | } 11 | }, 12 | "content": [ 13 | { 14 | "reader": { 15 | "name": "rdbmsreader", 16 | "parameter": { 17 | "username": "root", 18 | "password": "root", 19 | "column": [ 20 | "*" 21 | ], 22 | "connection": [ 23 | { 24 | "querySql": [ 25 | "select 1+0 id ,'test1' as name" 26 | ], 27 | "jdbcUrl": ["jdbc:mysql://localhost:3306/ruoyi_vue_camunda?allowPublicKeyRetrieval=true",] 28 | } 29 | ], 30 | "fetchSize": 1024 31 | } 32 | }, 33 | "writer": { 34 | "name": "paimonwriter", 35 | "parameter": { 36 | "dbName": "test", 37 | "tableName": "test2", 38 | "writeMode": "truncate", 39 | "paimonConfig": { 40 | "warehouse": "file:///g:/paimon", 41 | "metastore": "filesystem" 42 | } 43 | } 44 | } 45 | } 46 | ] 47 | } 48 | } -------------------------------------------------------------------------------- /docs/assets/jobs/pgreader.json: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "setting": { 4 | "speed": { 5 | "byte": -1, 6 | "channel": 1 7 | } 8 | }, 9 | "content": { 10 | "reader": { 11 | "name": "postgresqlreader", 12 | "parameter": { 13 | "username": "pgtest", 14 | "password": "pgtest", 15 | "column": [ 16 | "*" 17 | ], 18 | "connection": { 19 | "table": [ 20 | "addax_tbl" 21 | ], 22 | "jdbcUrl": "jdbc:postgresql://127.0.0.1:5432/pgtest" 23 | } 24 | } 25 | }, 26 | "writer": { 27 | "name": "streamwriter", 28 | "parameter": { 29 | "print": true 30 | } 31 | } 32 | } 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /docs/assets/jobs/pgwriter.json: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "setting": { 4 | "speed": { 5 | "byte": -1, 6 | "channel": 1 7 | } 8 | }, 9 | "content": { 10 | "reader": { 11 | "name": "postgresqlreader", 12 | "parameter": { 13 | "username": "pgtest", 14 | "password": "pgtest", 15 | "column": [ 16 | "*" 17 | ], 18 | "connection": { 19 | "table": [ 20 | "addax_tbl" 21 | ], 22 | "jdbcUrl": "jdbc:postgresql://localhost:5432/pgtest" 23 | } 24 | } 25 | }, 26 | "writer": { 27 | "name": "postgresqlwriter", 28 | "parameter": { 29 | "column": [ 30 | "*" 31 | ], 32 | "preSql": [ 33 | "truncate table @table" 34 | ], 35 | "connection": { 36 | "jdbcUrl": "jdbc:postgresql://127.0.0.1:5432/pgtest", 37 | "table": [ 38 | "addax_tbl1" 39 | ] 40 | }, 41 | "username": "pgtest", 42 | "password": "pgtest", 43 | "writeMode": "insert" 44 | } 45 | } 46 | } 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /docs/assets/jobs/quickstart.json: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "content": { 4 | "reader": { 5 | "name": "streamreader", 6 | "parameter": { 7 | "column": [ 8 | { 9 | "value": "addax", 10 | "type": "string" 11 | }, 12 | { 13 | "value": 19890604, 14 | "type": "long" 15 | }, 16 | { 17 | "value": "1989-06-04 00:00:00", 18 | "type": "date" 19 | }, 20 | { 21 | "value": true, 22 | "type": "bool" 23 | } 24 | ], 25 | "sliceRecordCount": 10 26 | } 27 | }, 28 | "writer": { 29 | "name": "streamwriter", 30 | "parameter": { 31 | "print": true 32 | } 33 | } 34 | } 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /docs/assets/jobs/rdbmsreader.json: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "setting": { 4 | "speed": { 5 | "byte": 1048576, 6 | "channel": 1 7 | }, 8 | "errorLimit": { 9 | "record": 0, 10 | "percentage": 0.02 11 | } 12 | }, 13 | "content": { 14 | "reader": { 15 | "name": "rdbmsreader", 16 | "parameter": { 17 | "username": "hive", 18 | "password": "", 19 | "column": [ 20 | "*" 21 | ], 22 | "driver": "io.prestosql.jdbc.PrestoDriver", 23 | "connection": { 24 | "table": [ 25 | "default.table" 26 | ], 27 | "jdbcUrl": "jdbc:presto://127.0.0.1:8080/hive" 28 | }, 29 | "fetchSize": 1024, 30 | "where": "1 = 1" 31 | } 32 | }, 33 | "writer": { 34 | "name": "streamwriter", 35 | "parameter": { 36 | "print": true 37 | } 38 | } 39 | } 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /docs/assets/jobs/redisreader.json: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "content": { 4 | "reader": { 5 | "name": "redisreader", 6 | "parameter": { 7 | "connection": { 8 | "uri": ["tcp://127.0.0.1:6379", "file:///data/dump.rdb", "http://localhost/dump.rdb"], 9 | "auth": "password" 10 | }, 11 | "include": [ 12 | "^user" 13 | ], 14 | "exclude": [ 15 | "^password" 16 | ], 17 | "db": [ 18 | 0, 19 | 1 20 | ] 21 | } 22 | }, 23 | "writer": { 24 | "name": "rediswriter", 25 | "parameter": { 26 | "connection": { 27 | "uri": "tcp://127.0.0.1:6379", 28 | "auth": "123456" 29 | }, 30 | "timeout": 60000 31 | } 32 | } 33 | }, 34 | "setting": { 35 | "speed": { 36 | "channel": 1 37 | } 38 | } 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /docs/assets/jobs/rediswriter.json: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "content": { 4 | "reader": { 5 | "name": "redisreader", 6 | "parameter": { 7 | "connection": [ 8 | { 9 | "uri": "tcp://127.0.0.1:7003" 10 | } 11 | ] 12 | } 13 | }, 14 | "writer": { 15 | "name": "rediswriter", 16 | "parameter": { 17 | "connection": { 18 | "uri": "tcp://127.0.0.1:6379", 19 | "auth": "123456" 20 | }, 21 | "redisCluster": false, 22 | "flushDB": false 23 | } 24 | } 25 | }, 26 | "setting": { 27 | "speed": { 28 | "channel": 1, 29 | "bytes": -1 30 | } 31 | } 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /docs/assets/jobs/s3reader.json: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "setting": { 4 | "speed": { 5 | "channel": 1, 6 | "bytes": -1 7 | }, 8 | "errorLimit": { 9 | "record": 0, 10 | "percentage": 0.02 11 | } 12 | }, 13 | "content": { 14 | "reader": { 15 | "name": "s3reader", 16 | "parameter": { 17 | "endpoint": "https://s3.amazonaws.com", 18 | "accessId": "xxxxxxxxxxxx", 19 | "accessKey": "xxxxxxxxxxxxxxxxxxxxxxx", 20 | "bucket": "test", 21 | "object": [ 22 | "1.csv", 23 | "aa.csv", 24 | "upload_*.csv", 25 | "bb_??.csv" 26 | ], 27 | "column": [ 28 | "*" 29 | ], 30 | "region": "ap-northeast-1", 31 | "fileFormat": "csv", 32 | "fieldDelimiter": "," 33 | } 34 | }, 35 | "writer": { 36 | "name": "streamwriter", 37 | "parameter": { 38 | "print": true 39 | } 40 | } 41 | } 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /docs/assets/jobs/sqlitereader.json: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "setting": { 4 | "speed": { 5 | "channel": 3 6 | }, 7 | "errorLimit": { 8 | "record": 0, 9 | "percentage": 0.02 10 | } 11 | }, 12 | "content": { 13 | "reader": { 14 | "name": "sqlitereader", 15 | "parameter": { 16 | "username": "fakeuser", 17 | "password": "", 18 | "column": [ 19 | "*" 20 | ], 21 | "connection": { 22 | "jdbcUrl": "jdbc:sqlite:/tmp/test.sqlite3", 23 | "table": [ 24 | "test" 25 | ] 26 | } 27 | } 28 | }, 29 | "writer": { 30 | "name": "streamwriter", 31 | "parameter": { 32 | "print": true 33 | } 34 | } 35 | } 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /docs/assets/jobs/sqlserverreader.json: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "setting": { 4 | "speed": { 5 | "byte": -1, 6 | "channel": 1 7 | } 8 | }, 9 | "content": { 10 | "reader": { 11 | "name": "sqlserverreader", 12 | "parameter": { 13 | "username": "root", 14 | "password": "root", 15 | "column": [ 16 | "*" 17 | ], 18 | "splitPk": "db_id", 19 | "connection": { 20 | "table": [ 21 | "table" 22 | ], 23 | "jdbcUrl": "jdbc:sqlserver://localhost:3433;DatabaseName=dbname" 24 | } 25 | } 26 | }, 27 | "writer": { 28 | "name": "streamwriter", 29 | "parameter": { 30 | "print": true, 31 | "encoding": "UTF-8" 32 | } 33 | } 34 | } 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /docs/assets/jobs/sqlserverwriter.json: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "setting": { 4 | "speed": { 5 | "channel": 1, 6 | "bytes": -1 7 | } 8 | }, 9 | "content": { 10 | "reader": {}, 11 | "writer": { 12 | "name": "sqlserverwriter", 13 | "parameter": { 14 | "username": "root", 15 | "password": "root", 16 | "column": [ 17 | "db_id", 18 | "db_type", 19 | "db_ip", 20 | "db_port", 21 | "db_role", 22 | "db_name", 23 | "db_username", 24 | "db_password", 25 | "db_modify_time", 26 | "db_modify_user", 27 | "db_description", 28 | "db_tddl_info" 29 | ], 30 | "connection": { 31 | "table": [ 32 | "db_info_for_writer" 33 | ], 34 | "jdbcUrl": "jdbc:sqlserver://[HOST_NAME]:PORT;DatabaseName=[DATABASE_NAME]" 35 | }, 36 | "preSql": [ 37 | "delete from @table where db_id = -1;" 38 | ], 39 | "postSql": [ 40 | "update @table set db_modify_time = now() where db_id = 1;" 41 | ] 42 | } 43 | } 44 | } 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /docs/assets/jobs/streamreader.json: -------------------------------------------------------------------------------- 1 | { 2 | "reader": { 3 | "name": "streamreader", 4 | "parameter": { 5 | "column": [ 6 | { 7 | "value": "unique_id", 8 | "type": "string" 9 | }, 10 | { 11 | "value": "1989-06-04 08:12:13", 12 | "type": "date", 13 | "dateFormat": "yyyy-MM-dd HH:mm:ss" 14 | }, 15 | { 16 | "value": 1984, 17 | "type": "long" 18 | }, 19 | { 20 | "value": 1989.64, 21 | "type": "double" 22 | }, 23 | { 24 | "value": true, 25 | "type": "bool" 26 | }, 27 | { 28 | "value": "a long text", 29 | "type": "bytes" 30 | } 31 | ], 32 | "sliceRecordCount": 10 33 | } 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /docs/assets/jobs/sybasereader.json: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "content": [ 4 | { 5 | "reader": { 6 | "name": "sybasereader", 7 | "parameter": { 8 | "column": [ 9 | "*" 10 | ], 11 | "connection": { 12 | "jdbcUrl": "jdbc:sybase:Tds:127.0.0.1:5000/master", 13 | "table": [ 14 | "dbo.ijdbc_function_escapes" 15 | ] 16 | }, 17 | "username": "sa", 18 | "password": "password" 19 | } 20 | }, 21 | "writer": { 22 | "name": "streamwriter", 23 | "parameter": { 24 | "print": "true" 25 | } 26 | } 27 | } 28 | ], 29 | "setting": { 30 | "speed": { 31 | "bytes": -1, 32 | "channel": 1 33 | } 34 | } 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /docs/assets/jobs/tdenginereader.json: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "setting": { 4 | "speed": { 5 | "channel": 3 6 | }, 7 | "errorLimit": { 8 | "record": 0, 9 | "percentage": 0.02 10 | } 11 | }, 12 | "content": { 13 | "reader": { 14 | "name": "tdenginereader", 15 | "parameter": { 16 | "username": "root", 17 | "password": "taosdata", 18 | "beginDateTime": "2017-07-14 10:40:00", 19 | "endDateTime": "2017-08-14 10:40:00", 20 | "splitInterval": "1d", 21 | "connection": { 22 | "jdbcUrl": "jdbc:TAOS://127.0.0.1:6030/test", 23 | "querySql": [ 24 | "select * from test.meters where ts <'2017-07-14 10:40:02' and loc='beijing' limit 10" 25 | ] 26 | } 27 | } 28 | }, 29 | "writer": { 30 | "name": "streamwriter", 31 | "parameter": { 32 | "print": true 33 | } 34 | } 35 | } 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /docs/assets/jobs/txtreader.json: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "setting": { 4 | "speed": { 5 | "channel": 2, 6 | "bytes": -1 7 | } 8 | }, 9 | "content": { 10 | "reader": { 11 | "name": "txtfilereader", 12 | "parameter": { 13 | "path": [ 14 | "/tmp/data" 15 | ], 16 | "encoding": "UTF-8", 17 | "column": [ 18 | { 19 | "index": 0, 20 | "type": "long" 21 | }, 22 | { 23 | "index": 1, 24 | "type": "boolean" 25 | }, 26 | { 27 | "index": 2, 28 | "type": "double" 29 | }, 30 | { 31 | "index": 3, 32 | "type": "string" 33 | }, 34 | { 35 | "index": 4, 36 | "type": "date", 37 | "format": "yyyy.MM.dd" 38 | } 39 | ], 40 | "fieldDelimiter": "," 41 | } 42 | }, 43 | "writer": { 44 | "name": "txtfilewriter", 45 | "parameter": { 46 | "path": "/tmp/result", 47 | "fileName": "txt_", 48 | "writeMode": "truncate", 49 | "format": "yyyy-MM-dd" 50 | } 51 | } 52 | } 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /docs/assets/jobs/txtwriter.json: -------------------------------------------------------------------------------- 1 | { 2 | "job": { 3 | "setting": { 4 | "speed": { 5 | "channel": 2, 6 | "bytes": -1 7 | } 8 | }, 9 | "content": { 10 | "reader": { 11 | "name": "txtfilereader", 12 | "parameter": { 13 | "path": [ 14 | "/tmp/data" 15 | ], 16 | "encoding": "UTF-8", 17 | "column": [ 18 | { 19 | "index": 0, 20 | "type": "long" 21 | }, 22 | { 23 | "index": 1, 24 | "type": "boolean" 25 | }, 26 | { 27 | "index": 2, 28 | "type": "double" 29 | }, 30 | { 31 | "index": 3, 32 | "type": "string" 33 | }, 34 | { 35 | "index": 4, 36 | "type": "date", 37 | "format": "yyyy.MM.dd" 38 | } 39 | ], 40 | "fieldDelimiter": "," 41 | } 42 | }, 43 | "writer": { 44 | "name": "txtfilewriter", 45 | "parameter": { 46 | "path": "/tmp/result", 47 | "fileName": "luohw", 48 | "writeMode": "truncate", 49 | "dateFormat": "yyyy-MM-dd" 50 | } 51 | } 52 | } 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /docs/assets/sql/clickhouse.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE ck_addax ( 2 | c_int8 Int8, 3 | c_int16 Int16, 4 | c_int32 Int32, 5 | c_int64 Int64, 6 | c_uint8 UInt8, 7 | c_uint16 UInt16, 8 | c_uint32 UInt32, 9 | c_uint64 UInt64, 10 | c_float32 Float32, 11 | c_float64 Float64, 12 | c_decimal Decimal(38,10), 13 | c_string String, 14 | c_fixstr FixedString(36), 15 | c_uuid UUID, 16 | c_date Date, 17 | c_datetime DateTime('Asia/Chongqing'), 18 | c_datetime64 DateTime64(3, 'Asia/Chongqing'), 19 | c_enum Enum('hello' = 1, 'world'=2) 20 | ) ENGINE = MergeTree() ORDER BY (c_int8, c_int16) SETTINGS index_granularity = 8192; 21 | 22 | insert into ck_addax values( 23 | 127, 24 | -32768, 25 | 2147483647, 26 | -9223372036854775808, 27 | 255, 28 | 65535, 29 | 4294967295, 30 | 18446744073709551615, 31 | 0.9999998, 32 | 0.999999999999998, 33 | 1234567891234567891234567891.1234567891, 34 | 'Hello String', 35 | '2c:16:db:a3:3a:4f', 36 | '5F042A36-5B0C-4F71-ADFD-4DF4FCA1B863', 37 | '2021-01-01', 38 | '2021-01-01 11:22:33', 39 | '2021-01-01 10:33:23.123', 40 | 'hello' 41 | ); -------------------------------------------------------------------------------- /docs/assets/sql/es.json: -------------------------------------------------------------------------------- 1 | { 2 | "took": 14, 3 | "timed_out": false, 4 | "_shards": { 5 | "total": 1, 6 | "successful": 1, 7 | "skipped": 0, 8 | "failed": 0 9 | }, 10 | "hits": { 11 | "total": 2, 12 | "max_score": 1, 13 | "hits": [ 14 | { 15 | "_index": "test-1", 16 | "_type": "default", 17 | "_id": "38", 18 | "_score": 1, 19 | "_source": { 20 | "col_date": "2017-05-25T11:22:33.000+08:00", 21 | "col_integer": 19890604, 22 | "col_keyword": "hello world", 23 | "col_ip": "1.1.1.1", 24 | "col_text": "long text", 25 | "col_double": 19890604, 26 | "col_long": 19890604, 27 | "col_geo_point": "41.12,-71.34" 28 | } 29 | }, 30 | { 31 | "_index": "test-1", 32 | "_type": "default", 33 | "_id": "103", 34 | "_score": 1, 35 | "_source": { 36 | "col_date": "2017-05-25T11:22:33.000+08:00", 37 | "col_integer": 19890604, 38 | "col_keyword": "hello world", 39 | "col_ip": "1.1.1.1", 40 | "col_text": "long text", 41 | "col_double": 19890604, 42 | "col_long": 19890604, 43 | "col_geo_point": "41.12,-71.34" 44 | } 45 | } 46 | ] 47 | } 48 | } -------------------------------------------------------------------------------- /docs/assets/sql/gp.sql: -------------------------------------------------------------------------------- 1 | create table if not exists addax_tbl 2 | ( 3 | c_bigint 4 | bigint, 5 | c_bit 6 | bit(3), 7 | c_bool boolean, 8 | c_byte bytea, 9 | c_char char(10), 10 | c_varchar varchar(20), 11 | c_date date, 12 | c_double float8, 13 | c_int integer, 14 | c_json json, 15 | c_number decimal(8, 3), 16 | c_real real, 17 | c_small smallint, 18 | c_text text, 19 | c_ts timestamp, 20 | c_uuid uuid, 21 | c_xml xml, 22 | c_money money, 23 | c_inet inet, 24 | c_cidr cidr, 25 | c_macaddr macaddr 26 | ); 27 | insert into addax_tbl 28 | values (999988887777, 29 | B'101', 30 | TRUE, 31 | '\xDEADBEEF', 32 | 'hello', 33 | 'hello, world', 34 | '2021-01-04', 35 | 999888.9972, 36 | 9876542, 37 | '{"bar": "baz", "balance": 7.77, "active": false}'::json, 38 | 12345.123, 39 | 123.123, 40 | 126, 41 | 'this is a long text ', 42 | '2020-01-04 12:13:14', 43 | 'A0EEBC99-9C0B-4EF8-BB6D-6BB9BD380A11'::uuid, 44 | 'bar'::xml, 45 | '52093.89'::money, 46 | '192.168.1.1'::inet, 47 | '192.168.1/24'::cidr, 48 | '08002b:010203'::macaddr); -------------------------------------------------------------------------------- /docs/assets/sql/hive.sql: -------------------------------------------------------------------------------- 1 | create table default.hive_reader 2 | ( 3 | col1 int, 4 | col2 string, 5 | col3 timestamp 6 | ) 7 | stored as orc; 8 | 9 | 10 | insert into hive_reader values(1, 'hello', current_timestamp()), (2, 'world', current_timestamp()); -------------------------------------------------------------------------------- /docs/assets/sql/kudu.sql: -------------------------------------------------------------------------------- 1 | CREATE TABLE kudu.default.users ( 2 | user_id int WITH (primary_key = true), 3 | user_name varchar with (nullable=true), 4 | age int with (nullable=true), 5 | salary double with (nullable=true), 6 | longtitue decimal(18,6) with (nullable=true), 7 | latitude decimal(18,6) with (nullable=true), 8 | p decimal(21,20) with (nullable=true), 9 | mtime timestamp with (nullable=true) 10 | ) WITH ( 11 | partition_by_hash_columns = ARRAY['user_id'], 12 | partition_by_hash_buckets = 2 13 | ); 14 | 15 | insert into kudu.default.users 16 | values 17 | (1, cast('wgzhao' as varchar), 18, cast(18888.88 as double), 18 | cast(123.282424 as decimal(18,6)), cast(23.123456 as decimal(18,6)), 19 | cast(1.12345678912345678912 as decimal(21,20)), 20 | timestamp '2021-01-10 14:40:41'), 21 | (2, cast('anglina' as varchar), 16, cast(23456.12 as double), 22 | cast(33.192123 as decimal(18,6)), cast(56.654321 as decimal(18,6)), 23 | cast(1.12345678912345678912 as decimal(21,20)), 24 | timestamp '2021-01-10 03:40:41'); 25 | -- ONLY insert primary key value 26 | insert into kudu.default.users(user_id) values (3); -------------------------------------------------------------------------------- /docs/assets/sql/oracle_geom.sql: -------------------------------------------------------------------------------- 1 | -- create demo table 2 | CREATE TABLE spatial_data(id int, name varchar2(50), geo SDO_GEOMETRY); 3 | -- insert some records 4 | INSERT INTO HR.spatial_data (id, name, geo) VALUES ( 5 | 1, 6 | 'Point 1', 7 | MDSYS.SDO_GEOMETRY( 8 | 2001, -- 二维点类型 9 | NULL, 10 | MDSYS.SDO_POINT_TYPE(1, 1, NULL), -- X, Y 坐标为 1 11 | NULL, 12 | NULL 13 | ) 14 | ); 15 | 16 | INSERT INTO HR.spatial_data (id, name, geo) VALUES ( 17 | 2, 18 | 'Line 1', 19 | MDSYS.SDO_GEOMETRY( 20 | 2002, -- 二维线类型 21 | NULL, 22 | NULL, 23 | MDSYS.SDO_ELEM_INFO_ARRAY(1,2,1), -- 表示一个由两个点组成的线段 24 | MDSYS.SDO_ORDINATE_ARRAY(1,1, 2,2) -- 线段的起点为 (1,1),终点为 (2,2) 25 | ) 26 | ); 27 | 28 | INSERT INTO HR.spatial_data (id, name, geo) VALUES ( 29 | 3, 30 | 'Polygon 1', 31 | MDSYS.SDO_GEOMETRY( 32 | 2003, -- 二维多边形类型 33 | NULL, 34 | NULL, 35 | MDSYS.SDO_ELEM_INFO_ARRAY(1,1003,1), -- 表示一个由三个点组成的三角形 36 | MDSYS.SDO_ORDINATE_ARRAY(1,1, 2,2, 3,1) -- 三角形的三个顶点分别为 (1,1)、(2,2) 和 (3,1) 37 | ) 38 | ); 39 | -------------------------------------------------------------------------------- /docs/assets/sql/postgresql.sql: -------------------------------------------------------------------------------- 1 | create table if not exists addax_tbl 2 | ( 3 | c_bigint bigint, 4 | c_bit bit(3), 5 | c_bool boolean, 6 | c_byte bytea, 7 | c_char char(10), 8 | c_varchar varchar(20), 9 | c_date date, 10 | c_double float8, 11 | c_int integer, 12 | c_json json, 13 | c_number decimal(8, 3), 14 | c_real real, 15 | c_small smallint, 16 | c_text text, 17 | c_ts timestamp, 18 | c_uuid uuid, 19 | c_xml xml, 20 | c_money money, 21 | c_inet inet, 22 | c_cidr cidr, 23 | c_macaddr macaddr 24 | ); 25 | 26 | insert into addax_tbl 27 | values (999988887777, 28 | b'101', 29 | TRUE, 30 | '\xDEADBEEF', 31 | 'hello', 32 | 'hello, world', 33 | '2021-01-04', 34 | 999888.9972, 35 | 9876542, 36 | '{"bar": "baz", "balance": 7.77, "active": false}'::json, 37 | 12345.123, 38 | 123.123, 39 | 126, 40 | 'this is a long text ', 41 | '2020-01-04 12:13:14', 42 | 'A0EEBC99-9C0B-4EF8-BB6D-6BB9BD380A11'::uuid, 43 | 'bar'::xml, 44 | '52093.89'::money, 45 | '192.168.1.1'::inet, 46 | '192.168.1/24'::cidr, 47 | '08002b:010203'::macaddr); -------------------------------------------------------------------------------- /docs/encrypt_password.md: -------------------------------------------------------------------------------- 1 | # 加密配置文件的中密码 2 | 3 | 默认情况下,配置文件的密码是明文的,这带来了一定的安全隐患,从 `4.0.9` 版本起,我们增加了对配置文件的的密码加密功能。 同时我们提供了一个 `shell` 脚本 `encrypt_password.sh` 来帮助你加密配置文件的密码。 4 | 5 | 假定你的原始密码是 `123456`,你希望在配置文件中使用加密密码配置。 首先执行下面的指令: 6 | 7 | ```shell 8 | $ bin/encrypt_password.sh 123456 9 | The encrypt string is : '${enc:tFd05jnm1mSq+PEK9t/Rgg==}', you can paste it to json file. 10 | ``` 11 | 12 | 上述结果中的 `tFd05jnm1mSq+PEK9t/Rgg==` 为 `123456` 的密文。 `${enc:` 开头是为了让 `addax` 知道这是一个加密密文。 13 | 14 | 你将上述字符串 `${enc:tFd05jnm1mSq+PEK9t/Rgg==}` 替换你的配置文件中设置密码为 `123456` 的地方即可。 -------------------------------------------------------------------------------- /docs/images/addax-flowchart.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wgzhao/Addax/13bc07a72b56ba54f9d9bbacf898b9e144245b48/docs/images/addax-flowchart.png -------------------------------------------------------------------------------- /docs/images/addax_why_new.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wgzhao/Addax/13bc07a72b56ba54f9d9bbacf898b9e144245b48/docs/images/addax_why_new.png -------------------------------------------------------------------------------- /docs/images/debug-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wgzhao/Addax/13bc07a72b56ba54f9d9bbacf898b9e144245b48/docs/images/debug-1.png -------------------------------------------------------------------------------- /docs/images/debug-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wgzhao/Addax/13bc07a72b56ba54f9d9bbacf898b9e144245b48/docs/images/debug-2.png -------------------------------------------------------------------------------- /docs/images/debug-3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wgzhao/Addax/13bc07a72b56ba54f9d9bbacf898b9e144245b48/docs/images/debug-3.png -------------------------------------------------------------------------------- /docs/images/debug-4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wgzhao/Addax/13bc07a72b56ba54f9d9bbacf898b9e144245b48/docs/images/debug-4.png -------------------------------------------------------------------------------- /docs/images/debug-5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wgzhao/Addax/13bc07a72b56ba54f9d9bbacf898b9e144245b48/docs/images/debug-5.png -------------------------------------------------------------------------------- /docs/images/debug-6.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wgzhao/Addax/13bc07a72b56ba54f9d9bbacf898b9e144245b48/docs/images/debug-6.png -------------------------------------------------------------------------------- /docs/images/debug-7.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wgzhao/Addax/13bc07a72b56ba54f9d9bbacf898b9e144245b48/docs/images/debug-7.png -------------------------------------------------------------------------------- /docs/images/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wgzhao/Addax/13bc07a72b56ba54f9d9bbacf898b9e144245b48/docs/images/favicon.ico -------------------------------------------------------------------------------- /docs/images/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wgzhao/Addax/13bc07a72b56ba54f9d9bbacf898b9e144245b48/docs/images/logo.png -------------------------------------------------------------------------------- /docs/images/logos/greenplum.jpeg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wgzhao/Addax/13bc07a72b56ba54f9d9bbacf898b9e144245b48/docs/images/logos/greenplum.jpeg -------------------------------------------------------------------------------- /docs/images/logos/sap.svg: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | -------------------------------------------------------------------------------- /docs/images/supported_databases.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wgzhao/Addax/13bc07a72b56ba54f9d9bbacf898b9e144245b48/docs/images/supported_databases.png -------------------------------------------------------------------------------- /docs/quickstart.md: -------------------------------------------------------------------------------- 1 | # 快速使用 2 | 3 | ## 安装 Addax 4 | 5 | ### 使用 Docker 镜像 6 | 7 | 可以直接使用 Docker 镜像,只需要执行下面的命令即可 8 | 9 | ```shell 10 | docker run -it --rm quay.io/wgzhao/addax:latest /opt/addax/bin/addax.sh /opt/addax/job/job.json 11 | ``` 12 | 13 | ### 一键安装 14 | 15 | 如果你不想编译,你可以执行下面的命令,一键安装(当前仅支持 Linux 和 macOS ) 16 | 17 | ```shell 18 | /bin/bash -c "$(curl -fsSL https://raw.githubusercontent.com/wgzhao/Addax/master/install.sh)" 19 | ``` 20 | 21 | 如果是 macOS ,默认安装在 `/usr/local/addax` 目录下, 如果是 Linux, 则安装在 `/opt/addax` 目录下 22 | 23 | ### 源代码编译安装 24 | 25 | 你可以选择从源代码编译安装,基本操作如下: 26 | 27 | ```shell 28 | git clone https://github.com/wgzhao/addax.git 29 | cd addax 30 | mvn clean package 31 | mvn package assembly:single 32 | cd target/addax/addax- 33 | ``` 34 | 35 | ## 开始第一个采集任务 36 | 37 | 要使用 `Addax` 进行数据采集,只需要编写一个任务采集文件,该文件为 JSON 格式,以下是一个简单的配置文件,该任务的目的是从内存读取读取指定内容的数据,并将其打印出来,文件保存在 `job/test.json` 中 38 | 39 | === "job/test.json" 40 | 41 | ```json 42 | --8<-- "jobs/quickstart.json" 43 | ``` 44 | 45 | 将上述文件保存为 `job/test.json` 46 | 47 | 然后执行下面的命令: 48 | 49 | ```shell 50 | bin/addax.sh job/test.json 51 | ``` 52 | 53 | 如果没有报错,应该会有类似这样的输出 54 | 55 | ```shell 56 | --8<-- "output/quickstart.txt" 57 | ``` 58 | -------------------------------------------------------------------------------- /docs/reader/accessreader.md: -------------------------------------------------------------------------------- 1 | # Access Reader 2 | 3 | AccessReader 实现了从 [Access](https://en.wikipedia.org/wiki/Microsoft_Access) 数据库上读取数据的能力,他基于 [Addax RDBMS Reader](../rdbmsreader) 实现。 4 | 5 | ## 示例 6 | 7 | 我们下载用于测试用的 [AcessThemeDemo.zip](http://www.databasedev.co.uk/downloads/AccessThemeDemo.zip) 文件,解压后得到 `AccessThemeDemo.mdb` 文件,该文件中包含了一个 `tbl_Users` 表,我们将该表的数据同步到终端上。 8 | 9 | 下面的配置是读取该表到终端的作业: 10 | 11 | === "job/access2stream.json" 12 | 13 | ```json 14 | --8<-- "jobs/accessreader.json" 15 | ``` 16 | 17 | 将上述配置文件保存为 `job/access2stream.json` 18 | 19 | ### 执行采集命令 20 | 21 | 执行以下命令进行数据采集 22 | 23 | ```shell 24 | bin/addax.sh job/access2stream.json 25 | ``` 26 | 27 | ## 参数说明 28 | 29 | AccessReader 基于 [RDBMS Reader](../rdbmsreader) 实现,因此可以参考 RDBMS Reader 的所有配置项。 30 | -------------------------------------------------------------------------------- /docs/reader/databendreader.md: -------------------------------------------------------------------------------- 1 | # Databend Reader 2 | 3 | DatabendReader 插件实现了从 [Databend](https://databend.rs) 读取数据 4 | 5 | 注意,databender 有兼容 MySQL 客户端的协议实现,因此你可以直接使用 [MySQL Reader](../mysqlreader) 来读取 Databend 数据。 6 | 7 | ## 示例 8 | 9 | 我们可以通过如下方式启动 Databend 数据库 10 | 11 | ```shell 12 | docker run -tid --rm -p 8000:8000 \ 13 | -e QUERY_DEFAULT_USER=databend \ 14 | -e QUERY_DEFAULT_PASSWORD=databend \ 15 | datafuselabs/databend 16 | ``` 17 | 18 | 然后创建一张需要读取的表 19 | 20 | ```sql 21 | ( 22 | id int, 23 | name varchar(255), 24 | salary float, 25 | created_at datetime, 26 | updated_at datetime 27 | ); 28 | ``` 29 | 30 | 并填充必要的数据 31 | 32 | 33 | 下面的配置是读取该表到终端的作业: 34 | 35 | === "job/databend2stream.json" 36 | 37 | ```json 38 | --8<-- "jobs/databend2stream.json" 39 | ``` 40 | 41 | 将上述配置文件保存为 `job/databend2stream.json` 42 | 43 | ### 执行采集命令 44 | 45 | 执行以下命令进行数据采集 46 | 47 | ```shell 48 | bin/addax.sh job/databend2stream.json 49 | ``` 50 | 51 | ## 参数说明 52 | 53 | 该插件基于 [RDBMS Reader](../rdbmsreader) 实现,因此可以参考 RDBMS Reader 的所有参数。 54 | 55 | ## 限制 56 | 57 | 暂无 -------------------------------------------------------------------------------- /docs/reader/hanareader.md: -------------------------------------------------------------------------------- 1 | # HANA Reader 2 | 3 | HANA Reader 插件实现了从 SAP HANA 读取数据的能力 4 | 5 | ## 示例 6 | 7 | 8 | 下面的配置是读取该表到终端的作业: 9 | 10 | === "job/hanareader.json" 11 | 12 | ```json 13 | --8<-- "jobs/hanareader.json" 14 | ``` 15 | 16 | 将上述配置文件保存为 `job/hana2stream.json` 17 | 18 | ### 执行采集命令 19 | 20 | 执行以下命令进行数据采集 21 | 22 | ```shell 23 | bin/addax.sh job/hana2stream.json 24 | ``` 25 | 26 | ## 参数说明 27 | 28 | 该插件基于 [RDBMS Reader](../rdbmsreader) 实现,因此可以参考 RDBMS Reader 的所有参数。 29 | -------------------------------------------------------------------------------- /docs/reader/mysqlreader.md: -------------------------------------------------------------------------------- 1 | # MySQL Reader 2 | 3 | MysqlReader 插件实现了从 MySQL 读取数据的能力 4 | 5 | ## 示例 6 | 7 | 我们在 MySQL 的 test 库上创建如下表,并插入一条记录 8 | 9 | ```sql 10 | --8<-- "sql/mysql.sql" 11 | ``` 12 | 13 | 下面的配置是读取该表到终端的作业: 14 | 15 | === "job/mysql2stream.json" 16 | 17 | ```json 18 | --8<-- "jobs/mysqlreader.json" 19 | ``` 20 | 21 | 将上述配置文件保存为 `job/mysql2stream.json` 22 | 23 | ### 执行采集命令 24 | 25 | 执行以下命令进行数据采集 26 | 27 | ```shell 28 | bin/addax.sh job/mysql2stream.json 29 | ``` 30 | 31 | ## 参数说明 32 | 33 | 该插件基于 [RDBMS Reader](../rdbmsreader) 实现,因此可以参考 RDBMS Reader 的所有配置项。 34 | 35 | ### driver 36 | 37 | 当前 Addax 采用的 MySQL JDBC 驱动为 8.0 以上版本,驱动类名使用的 `com.mysql.cj.jdbc.Driver`,而不是 `com.mysql.jdbc.Driver`。 如果你需要采集的 MySQL 服务低于 `5.6`,需要使用到 `Connector/J 5.1` 驱动,则可以采取下面的步骤: 38 | 39 | **替换插件内置的驱动** 40 | 41 | `rm -f plugin/reader/mysqlreader/libs/mysql-connector-java-*.jar` 42 | 43 | **拷贝老的驱动到插件目录** 44 | 45 | `cp mysql-connector-java-5.1.48.jar plugin/reader/mysqlreader/libs/` 46 | 47 | **指定驱动类名称** 48 | 49 | 在你的 json 文件类,配置 `"driver": "com.mysql.jdbc.Driver"` 50 | 51 | ## 类型转换注意事项 52 | 53 | * `tinyint(1)` 会视为整形 54 | * `year` 被视为整形 55 | * `bit` 如果是 `bit(1)` 被视为布尔类型,否则当作二进制类型 56 | 57 | ## 注意事项 58 | 59 | 1. 在使用 `autoPk` 或者 `splitPk` 时,如果选择的字段类型是字符类型,且该字段设置的 `COLLATE` 为不区分大小写的字符集,比如 `utf8_general_ci` 和 `utf8mb4_general_ci` ,则会出现数据重复的情况。因此需要避免使用不区分大小写的字符集。 -------------------------------------------------------------------------------- /docs/reader/oraclereader.md: -------------------------------------------------------------------------------- 1 | # Oracle Reader 2 | 3 | Oracle Reader 插件用于从 Oracle 读取数据 4 | 5 | ## 配置样例 6 | 7 | 配置一个从Oracle数据库同步抽取数据到本地的作业: 8 | 9 | === "job/oracle2stream.json" 10 | 11 | ```json 12 | --8<-- "jobs/oraclereader.json" 13 | ``` 14 | 15 | ## 参数说明 16 | 17 | 该插件基于 [RDBMS Reader](../rdbmsreader) 实现,因此可以参考 RDBMS Reader 的所有配置项。 18 | 19 | 20 | ## 对 GEOMETRY 类型的支持 21 | 22 | 从 Addax `4.0.13` 开始,实验性的支持 Oracle GEOMETRY 类型,该插件会把该类型的数据转为 JSON 数组字符串。 23 | 24 | 假定你有这样的的表和数据 25 | 26 | ```sql 27 | --8<-- "assets/sql/oracle_geom.sql 28 | ``` 29 | 30 | 读取表该的数据的最后输出结果类似如下: 31 | 32 | ``` 33 | --8<-- "assets/output/oracle_geom_reader.txt 34 | ``` 35 | 36 | 注意:该数据类型目前还处于实验支持阶段,作者对此数据类型的理解并不深刻,也未经过全面的测试,请勿直接在生产环境使用。 37 | -------------------------------------------------------------------------------- /docs/reader/postgresqlreader.md: -------------------------------------------------------------------------------- 1 | # PostgreSQL Reader 2 | 3 | PostgreSQL Reader 插件用于从 [PostgreSQL](https://postgresql.org) 读取数据 4 | 5 | ## 示例 6 | 7 | 假定建表语句以及输入插入语句如下: 8 | 9 | ```sql 10 | --8<-- "sql/postgresql.sql" 11 | ``` 12 | 13 | 配置一个从PostgreSQL数据库同步抽取数据到本地的作业: 14 | 15 | === "job/postgres2stream.json" 16 | 17 | ```json 18 | --8<-- "jobs/pgreader.json" 19 | ``` 20 | 21 | 将上述配置文件保存为 `job/postgres2stream.json` 22 | 23 | ### 执行采集命令 24 | 25 | 执行以下命令进行数据采集 26 | 27 | ```shell 28 | bin/addax.sh job/postgres2stream.json 29 | ``` 30 | 31 | ## 参数说明 32 | 33 | 该插件基于 [RDBMS Reader](../rdbmsreader) 实现,因此可以参考 RDBMS Reader 的所有配置项。 -------------------------------------------------------------------------------- /docs/reader/redisreader.md: -------------------------------------------------------------------------------- 1 | # Redis Reader 2 | 3 | Redis Reader 插件用于读取 [Redis RDB](https://redis.io) 数据 4 | 5 | ## 配置样例 6 | 7 | ```json 8 | --8<-- "jobs/redisreader.json" 9 | ``` 10 | 11 | ## 参数说明 12 | 13 | | 配置项 | 是否必须 | 默认值 | 描述 | 14 | | :------ | :------: | ------ | --------------------------------------------------------------------------- | 15 | | uri | 是 | 否 | redis链接,支持多个本地rdb文件/网络rdb文件,如果是集群,填写所有master节点地址 | 16 | | db | 否 | 无 | 需要读取的db索引,若不填写,则读取所有db | 17 | | include | 否 | 无 | 要包含的 key, 支持正则表达式 | 18 | | exclude | 否 | 无 | 要排除的 key,支持正则表达式 | 19 | 20 | ## 约束限制 21 | 22 | 1. 不支持直接读取任何不支持 `sync` 命令的 redis server,如果需要请备份的rdb文件进行读取。 23 | 2. 如果是原生redis cluster集群,请填写所有master节点的tcp地址,`redisreader` 插件会自动dump 所有节点的rdb文件。 24 | 3. 仅解析 `String` 数据类型,其他复合类型(`Sets`, `List` 等会忽略) 25 | -------------------------------------------------------------------------------- /docs/reader/sqlitereader.md: -------------------------------------------------------------------------------- 1 | # SQLite Reader 2 | 3 | SQLite Reader 插件用于读取指定目录下的 sqlite 文件, 他继承于 [RDBMS Reader](../rdbmsreader) 4 | 5 | ## 示例 6 | 7 | 我们创建示例文件: 8 | 9 | ```shell 10 | $ sqlite3 /tmp/test.sqlite3 11 | SQLite version 3.7.17 2013-05-20 00:56:22 12 | Enter ".help" for instructions 13 | Enter SQL statements terminated with a ";" 14 | sqlite> create table test(id int, name varchar(10), salary double); 15 | sqlite> insert into test values(1,'foo', 12.13),(2,'bar',202.22); 16 | sqlite> .q 17 | ``` 18 | 19 | 下面的配置是读取该表到终端的作业: 20 | 21 | === "job/sqlite2stream.json" 22 | 23 | ```json 24 | --8<-- "jobs/sqlitereader.json" 25 | ``` 26 | 27 | 将上述配置文件保存为 `job/sqlite2stream.json` 28 | 29 | ### 执行采集命令 30 | 31 | 执行以下命令进行数据采集 32 | 33 | ```shell 34 | bin/addax.sh job/sqlite2stream.json 35 | ``` 36 | 37 | ## 参数说明 38 | 39 | 该插件基于 [RDBMS Reader](../rdbmsreader) 实现,因此可以参考 RDBMS Reader 的所有配置项。 40 | -------------------------------------------------------------------------------- /docs/reader/sqlserverreader.md: -------------------------------------------------------------------------------- 1 | # SQLServer Reader 2 | 3 | SqlServerReader 插件用于从从 SQLServer 读取数据。 4 | 5 | ## 配置样例 6 | 7 | 配置一个从 SQLServer 数据库同步抽取数据到本地的作业: 8 | 9 | === "job/sqlserver2stream.json" 10 | 11 | ```json 12 | --8<-- "jobs/sqlserverreader.json" 13 | ``` 14 | 15 | ## 参数说明 16 | 17 | 该插件基于 [RDBMS Reader](../rdbmsreader) 实现,因此可以参考 RDBMS Reader 的所有配置项。 18 | 19 | -------------------------------------------------------------------------------- /docs/reader/sybasereader.md: -------------------------------------------------------------------------------- 1 | # Sybase Reader 2 | 3 | SybaseReader 插件实现了从 [Sybase][1] 读取数据 4 | 5 | ## 示例 6 | 7 | 我们可以用 Docker 容器来启动一个 Sybase 数据库 8 | 9 | ```shell 10 | docker run -tid --rm -h dksybase --name sybase -p 5000:5000 ifnazar/sybase_15_7 bash /sybase/start 11 | ``` 12 | 13 | 下面的配置是读取该表到终端的作业: 14 | 15 | === "job/sybasereader.json" 16 | 17 | ```json 18 | --8<-- "jobs/sybasereader.json" 19 | ``` 20 | 21 | 将上述配置文件保存为 `job/sybase2stream.json` 22 | 23 | ### 执行采集命令 24 | 25 | 执行以下命令进行数据采集 26 | 27 | ```shell 28 | bin/addax.sh job/sybase2stream.json 29 | ``` 30 | 31 | ## 参数说明 32 | 33 | 该插件基于 [RDBMS Reader](../rdbmsreader) 实现,因此可以参考 RDBMS Reader 的所有配置项。 34 | 35 | [1]: https://en.wikipedia.org/wiki/Sybase 36 | -------------------------------------------------------------------------------- /docs/writer/accesswriter.md: -------------------------------------------------------------------------------- 1 | # Access Writer 2 | 3 | Access Writer 插件实现了写入数据到 [Access](https://en.wikipedia.org/wiki/Microsoft_Access) 目的表的功能。 4 | 5 | ## 示例 6 | 7 | 假定要写入的 Access 表建表语句如下: 8 | 9 | ```sql 10 | create table tbl_test(name varchar(20), file_size int, file_date date, file_open boolean, memo blob); 11 | ``` 12 | 13 | 这里使用一份从内存产生到 Access 导入的数据。 14 | 15 | === "job/stream2access.json" 16 | 17 | ```json 18 | --8<-- "jobs/accesswriter.json" 19 | ``` 20 | 21 | 将上述配置文件保存为 `job/stream2access.json` 22 | 23 | ### 执行采集命令 24 | 25 | 执行以下命令进行数据采集 26 | 27 | ```shell 28 | bin/addax.sh job/stream2access.json 29 | ``` 30 | 31 | ## 参数说明 32 | 33 | 该插件基于 [RDBMS Writer](../rdbmswriter) 实现,因此可以参考 RDBMS Writer 的所有配置项。 34 | 35 | ## 变更记录 36 | 37 | 1. 从 `5.0.1` 版本其,当要写入的 Access 数据库文件不存在时,会自动创建,并设置数据库格式为 `Access 2016` -------------------------------------------------------------------------------- /docs/writer/clickhousewriter.md: -------------------------------------------------------------------------------- 1 | # ClickHouse Writer 2 | 3 | ClickHouse Writer 插件用于向 [ClickHouse](https://clickhouse.tech) 写入数据。 4 | 5 | ## 示例 6 | 7 | 以下示例我们演示从 clickhouse 中读取一张表的内容,并写入到相同表结构的另外一张表中,用来测试插件所支持的数据结构 8 | 9 | ### 表结构以数据 10 | 11 | 假定要读取的表结构及数据如下: 12 | 13 | ```sql 14 | --8<-- "sql/clickhouse.sql" 15 | ``` 16 | 17 | 要写入的表采取和读取表结构相同,其建表语句如下: 18 | 19 | ```sql 20 | create table ck_addax_writer as ck_addax; 21 | ``` 22 | 23 | ## 配置 24 | 25 | 以下为配置文件 26 | 27 | === "job/clickhouse2clickhouse.json" 28 | 29 | ```json 30 | --8<-- "jobs/clickhousewriter.json" 31 | ``` 32 | 33 | 将上述配置文件保存为 `job/clickhouse2clickhouse.json` 34 | 35 | ### 执行采集命令 36 | 37 | 执行以下命令进行数据采集 38 | 39 | ```shell 40 | bin/addax.sh job/clickhouse2clickhouse.json 41 | ``` 42 | 43 | ## 参数说明 44 | 45 | 该插件基于 [RDBMS Writer](../rdbmswriter) 实现,因此可以参考 RDBMS Writer 的所有配置项。 -------------------------------------------------------------------------------- /docs/writer/excelwriter.md: -------------------------------------------------------------------------------- 1 | # Excel Writer 2 | 3 | Excel Writer 实现了将数据写入到 Excel 文件的功能 4 | 5 | ## 配置示例 6 | 7 | 我们假定从内存读取数据,并写入到 Excel 文件中 8 | 9 | ```json 10 | --8<-- "jobs/excelwriter.json" 11 | ``` 12 | 13 | 讲上述内容保存为 `job/stream2excel.json` 14 | 15 | 执行下面的命令: 16 | 17 | ```shell 18 | bin/addax.sh job/stream2excel.sh 19 | ``` 20 | 21 | 应该得到类似如下的输出 22 | 23 |
24 | 点击展开 25 | 26 | ```shell 27 | --8<-- "output/excelwriter.txt" 28 | ``` 29 |
30 | 31 | ## 参数说明 32 | 33 | | 配置项 | 是否必须 | 类型 | 默认值 | 描述 | 34 | | :------- | -------- | ------ | ------ | ---------------------------------------------------- | 35 | | path | 是 | string | 无 | 指定文件保存的目录, 指定的目录如果不存在,则尝试创建 | 36 | | fileName | 是 | string | 无 | 要生成的excel 文件名,详述如下 | 37 | | header | 否 | list | 无 | Excel 表头 | 38 | 39 | ### fileName 40 | 41 | 如果配置的 `fileName` 没有后缀,则自动加上 `.xlsx`; 42 | 如果后缀为 `.xls`,则报错,因为当前仅生成 Excel 97 以后的文件格式,即 `.xlsx` 后缀的文件 43 | 44 | ### header 45 | 46 | 如果不指定 `header` ,则生成的 Excel 文件没有表头,只有数据。 47 | 注意,插件不关心 header 的数量是否匹配数据中的列数,也就是说表头的列数并不要求和接下来的数据的列数相等。 48 | 49 | ## 限制 50 | 51 | 1. 当前仅生成一个 Excel 文件,且没有考虑行数和列数是否超过了 Excel 的限定 52 | 2. 如果指定的目录下有同名文件,当前会被覆盖,后续会统一处理目标目录的问题 53 | 3. 当前日期格式的数据,设置单元格样式为 `yyyy-MM-dd HH:mm:ss`,且不能定制 54 | 4. 不支持二进制类型的数据写入 55 | -------------------------------------------------------------------------------- /docs/writer/greenplumwriter.md: -------------------------------------------------------------------------------- 1 | # Greenplum Writer 2 | 3 | Greenplum Writer 插件使用 `COPY FROM` 语法 将数据写入 [Greenplum](https://greenplum.org) 数据库。 4 | 5 | ## 示例 6 | 7 | 以下配置演示从 greenplum 指定的表读取数据,并插入到具有相同表结构的另外一张表中,用来测试该插件所支持的数据类型。 8 | 9 | ```sql 10 | --8<-- "sql/gp.sql" 11 | ``` 12 | 13 | 创建需要插入的表的语句如下: 14 | 15 | ```sql 16 | create table gp_test like addax_tbl; 17 | ``` 18 | 19 | ### 任务配置 20 | 21 | 以下是配置文件 22 | 23 | === "job/pg2gp.json" 24 | 25 | ```json 26 | --8<-- "jobs/gpwriter.json" 27 | ``` 28 | 29 | 将上述配置文件保存为 `job/pg2gp.json` 30 | 31 | ### 执行采集命令 32 | 33 | 执行以下命令进行数据采集 34 | 35 | ```shell 36 | bin/addax.sh job/pg2gp.json 37 | ``` 38 | 39 | ## 参数说明 40 | 41 | 该插件基于 [RDBMS Writer](../rdbmswriter) 实现,因此可以参考 RDBMS Writer 的所有配置项。 -------------------------------------------------------------------------------- /docs/writer/hanawriter.md: -------------------------------------------------------------------------------- 1 | # HANA Writer 2 | 3 | HANA Writer 插件实现了写入数据到 [SAP HANA](https://www.sap.com/products/hana.html) 目的表的功能。 4 | 5 | ## 示例 6 | 7 | 假定要写入的 HANA 表建表语句如下: 8 | 9 | ```sql 10 | create table system.addax_tbl 11 | ( 12 | col1 varchar(200) , 13 | col2 int(4), 14 | col3 date, 15 | col4 boolean, 16 | col5 clob 17 | ); 18 | ``` 19 | 20 | 这里使用一份从内存产生到 HANA 导入的数据。 21 | 22 | === "job/hanawriter.json" 23 | 24 | ```json 25 | --8<-- "jobs/hanawriter.json" 26 | ``` 27 | 28 | 将上述配置文件保存为 `job/hana2stream.json` 29 | 30 | ### 执行采集命令 31 | 32 | 执行以下命令进行数据采集 33 | 34 | ```shell 35 | bin/addax.sh job/hana2stream.json 36 | ``` 37 | 38 | ## 参数说明 39 | 40 | 该插件基于 [RDBMS Writer](../rdbmswriter) 实现,因此可以参考 RDBMS Writer 的所有配置项。 41 | 42 | 43 | -------------------------------------------------------------------------------- /docs/writer/oraclewriter.md: -------------------------------------------------------------------------------- 1 | # Oracle Writer 2 | 3 | Oracle Writer 插件实现了写入数据到 Oracle 目的表的功能。 4 | 5 | ## 配置样例 6 | 7 | 这里使用一份从内存产生到 Oracle 导入的数据。 8 | 9 | === "job/stream2oracle.json" 10 | 11 | ```json 12 | --8<-- "jobs/oraclewriter.json" 13 | ``` 14 | 15 | ## 参数说明 16 | 17 | 该插件基于 [RDBMS Writer](../rdbmswriter) 实现,因此可以参考 RDBMS Writer 的所有配置项,并在此基础上增加了一些 OracleWriter 特有的配置项。 18 | 19 | | 配置项 | 是否必须 | 默认值 | 描述 | 20 | | :-------- | :------: | ------ | ---------------------------------------- | 21 | | writeMode | 否 | insert | 写入方式,支持 insert, update,详见下文 | 22 | 23 | 24 | ### writeMode 25 | 26 | 默认情况下, 采取 `insert into` 语法写入 Oracle 表,如果你希望采取主键存在时更新,不存在则写入的方式,也就是 Oracle 的 `merge into` 语法, 可以使用 `update` 模式。假定表的主键为 `id` ,则 `writeMode` 配置方法如下: 27 | 28 | ```json 29 | "writeMode": "update(id)" 30 | ``` 31 | 32 | 如果是联合唯一索引,则配置方法如下: 33 | 34 | ```json 35 | "writeMode": "update(col1, col2)" 36 | ``` 37 | 38 | 39 | 40 | -------------------------------------------------------------------------------- /docs/writer/rediswriter.md: -------------------------------------------------------------------------------- 1 | # Redis Writer 2 | 3 | Redis Writer 提供了还原 Redis dump 命令的能力,并写入到目标 Redis。支持 redis cluster 集群、proxy、以及单机 4 | 5 | ## 配置样例 6 | 7 | ```json 8 | --8<-- "jobs/rediswriter.json" 9 | ``` 10 | 11 | ## 参数说明 12 | 13 | | 配置项 | 是否必须 | 数据类型 | 默认值 | 描述 | 14 | | :----------- | :------: | -------- | ------ | --------------------------------------------------- | 15 | | uri | 是 | string | 否 | redis链接 | 16 | | redisCluster | 否 | boolean | false | 是否为redis cluster集群,如果是 proxy 或单机忽略该项 | 17 | | flushDB | 否 | boolean | false | 迁移前是否清空目标 Redis | 18 | | batchSize | 否 | string | 1000 | 每次批量处理数量。如果key过大/小,可以相应的调整 | 19 | | timeout | 否 | string | 60000 | 每次执行最大超时时间, 单位毫秒(ms) | 20 | -------------------------------------------------------------------------------- /docs/writer/sqlitewriter.md: -------------------------------------------------------------------------------- 1 | # SQLite Writer 2 | 3 | SQLite Writer 插件实现了写入数据到 [SQLite](https://sqlite.org/index.html) 数据库的功能。 4 | 5 | ## 示例 6 | 7 | 假定要写入的表如下: 8 | 9 | ```sql 10 | create table addax_tbl 11 | ( 12 | col1 varchar(20) , 13 | col2 int(4), 14 | col3 datetime, 15 | col4 boolean, 16 | col5 binary 17 | ); 18 | ``` 19 | 20 | 这里使用一份从内存产生到 SQLite 的数据。 21 | 22 | === "job/stream2sqlite.json" 23 | 24 | ```json 25 | --8<-- "jobs/sqlitewriter.json" 26 | ``` 27 | 28 | 将上述配置文件保存为 `job/stream2sqlite.json` 29 | 30 | ### 执行采集命令 31 | 32 | 执行以下命令进行数据采集 33 | 34 | ```shell 35 | bin/addax.sh job/stream2sqlite.json 36 | ``` 37 | 38 | ## 参数说明 39 | 40 | 该插件基于 [RDBMS Writer](../rdbmswriter) 实现,因此可以参考 RDBMS Writer 的所有配置项。因为 SQLite 连接无需账号密码,因此其他数据库写入插件需要配置的 `username`, `password` 在这里不需要。 41 | 42 | ### writeMode 43 | 44 | - `insert` 表示采用 `insert into` 45 | - `replace`表示采用`replace into`方式 46 | - `update` 表示采用 `ON DUPLICATE KEY UPDATE` 语句 47 | 48 | ## 类型转换 49 | 50 | | Addax 内部类型 | SQLite 数据类型 | 51 | | -------------- | --------------- | 52 | | Long | integer | 53 | | Double | real | 54 | | String | varchar | 55 | | Date | datetime | 56 | | Boolean | bool | 57 | | Bytes | blob, binary | 58 | -------------------------------------------------------------------------------- /docs/writer/sqlserverwriter.md: -------------------------------------------------------------------------------- 1 | # SQLServer Writer 2 | 3 | SQLServer Writer 插件实现了写入数据到 [SQL Server](https://www.microsoft.com/en-us/sql-server/sql-server-downloads) 库表的功能。 4 | 5 | ## 配置样例 6 | 7 | 这里使用一份从内存产生到 SQL Server 导入的数据。 8 | 9 | ```json 10 | --8<-- "jobs/sqlserverwriter.json" 11 | ``` 12 | 13 | ## 参数说明 14 | 15 | 该插件基于 [RDBMS Writer](../rdbmswriter) 实现,因此可以参考 RDBMS Writer 的所有配置项。 16 | 17 | ### writeMode 18 | 19 | 默认情况下, 采取 `insert into` 语法写入 SQL Server 表,如果你希望采取主键存在时更新,不存在则写入的方式,也就是 SQL Server 的 `MERGE INTO` 语法, 可以使用 `update` 模式。 假定表的主键为 `id` 20 | ,则 `writeMode` 配置方法如下: 21 | 22 | ```json 23 | { 24 | "writeMode": "update(id)" 25 | } 26 | ``` 27 | 28 | 如果是联合唯一索引,则配置方法如下: 29 | 30 | ```json 31 | { 32 | "writeMode": "update(col1, col2)" 33 | } 34 | ``` 35 | -------------------------------------------------------------------------------- /docs/writer/streamwriter.md: -------------------------------------------------------------------------------- 1 | # Stream Writer 2 | 3 | Stream Writer 是一个将数据写入内存的插件,一般用来将获取到的数据写到终端,用来调试读取插件的数据处理情况。 4 | 5 | 一个典型的 Stream Writer 配置如下: 6 | 7 | ```json 8 | { 9 | "name": "streamwriter", 10 | "parameter": { 11 | "encoding": "UTF-8", 12 | "print": true, 13 | "nullFormat": "NULL" 14 | } 15 | } 16 | ``` 17 | 18 | 上述配置会将获取的数据直接打印到终端。 其中 `nullFormat` 用来指定当值为空时,如何在终端表示,默认是字符串 `NULL`, 如果不想打印空值,可以将其设置为 `""`。 19 | 20 | 该插件也支持将数据写入到文件,配置如下: 21 | 22 | ```json 23 | { 24 | "name": "streamwriter", 25 | "parameter": { 26 | "encoding": "UTF-8", 27 | "path": "/tmp/out", 28 | "fileName": "out.txt", 29 | "fieldDelimiter": ",", 30 | "recordNumBeforeSleep": "100", 31 | "sleepTime": "5" 32 | } 33 | } 34 | ``` 35 | 36 | 上述配置中: 37 | 38 | - `fieldDelimiter` 表示字段分隔符,默认为制表符(`\t`) 39 | - `recordNumBeforeSleep` 表示获取多少条记录后,执行休眠,默认为0,表示不启用该功能 40 | - `sleepTime` 则表示休眠多长时间,单位为秒,默认为0,表示不启用该功能。 41 | 42 | 上述配置的含义是将数据写入到 `/tmp/out/out.txt` 文件, 每获取100条记录后,休眠5秒。 43 | -------------------------------------------------------------------------------- /docs/writer/sybasewriter.md: -------------------------------------------------------------------------------- 1 | # Sybase Writer 2 | 3 | Sybase Writer 插件实现了写入数据到 [Sybase](https://en.wikipedia.org/wiki/Sybase) 库表的功能。 4 | 5 | ## 配置样例 6 | 7 | 我们可以用 Docker 容器来启动一个 Sybase 数据库 8 | 9 | ```shell 10 | docker run -tid --rm -h dksybase --name sybase -p 5000:5000 ifnazar/sybase_15_7 bash /sybase/start 11 | ``` 12 | 13 | 然后创建一张如下表 14 | 15 | ```sql 16 | create table addax_writer 17 | ( 18 | id int, 19 | name varchar(255), 20 | salary float(2), 21 | created_at datetime, 22 | updated_at datetime 23 | ); 24 | ``` 25 | 26 | 再使用下面的任务配置文件 27 | 28 | ```json 29 | --8<-- "jobs/sybasewriter.json" 30 | ``` 31 | 32 | ## 参数说明 33 | 34 | 该插件基于 [RDBMS Writer](../rdbmswriter) 实现,因此可以参考 RDBMS Writer 的所有配置项。 -------------------------------------------------------------------------------- /lib/addax-rdbms/package.xml: -------------------------------------------------------------------------------- 1 | 5 | release 6 | 7 | dir 8 | 9 | false 10 | 11 | 12 | target/ 13 | 14 | ${project.artifactId}-${project.version}.jar 15 | 16 | lib 17 | 18 | 19 | 20 | 21 | false 22 | lib 23 | runtime 24 | 25 | com.wgzhao.addax:addax-core 26 | 27 | 28 | 29 | 30 | -------------------------------------------------------------------------------- /lib/addax-rdbms/src/main/java/com/wgzhao/addax/rdbms/util/ConnectionFactory.java: -------------------------------------------------------------------------------- 1 | /* 2 | * 3 | * * Licensed to the Apache Software Foundation (ASF) under one 4 | * * or more contributor license agreements. See the NOTICE file 5 | * * distributed with this work for additional information 6 | * * regarding copyright ownership. The ASF licenses this file 7 | * * to you under the Apache License, Version 2.0 (the 8 | * * "License"); you may not use this file except in compliance 9 | * * with the License. You may obtain a copy of the License at 10 | * * 11 | * * http://www.apache.org/licenses/LICENSE-2.0 12 | * * 13 | * * Unless required by applicable law or agreed to in writing, 14 | * * software distributed under the License is distributed on an 15 | * * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 16 | * * KIND, either express or implied. See the License for the 17 | * * specific language governing permissions and limitations 18 | * * under the License. 19 | * 20 | */ 21 | 22 | package com.wgzhao.addax.rdbms.util; 23 | 24 | import java.sql.Connection; 25 | 26 | public interface ConnectionFactory 27 | { 28 | 29 | Connection getConnection(); 30 | 31 | Connection getConnectionWithoutRetry(); 32 | 33 | String getConnectionInfo(); 34 | } 35 | -------------------------------------------------------------------------------- /lib/addax-storage/package.xml: -------------------------------------------------------------------------------- 1 | 5 | release 6 | 7 | dir 8 | 9 | false 10 | 11 | 12 | target/ 13 | 14 | ${project.artifactId}-${project.version}.jar 15 | 16 | lib 17 | 18 | 19 | 20 | 21 | false 22 | lib 23 | runtime 24 | 25 | com.wgzhao.addax:addax-core 26 | 27 | 28 | 29 | 30 | -------------------------------------------------------------------------------- /overrides/home.html: -------------------------------------------------------------------------------- 1 | {% extends "base.html" %} 2 | 3 | {% block scripts %} 4 | 5 | {{ super() }} 6 | 7 | {% endblock %} -------------------------------------------------------------------------------- /plugin/reader/accessreader/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "accessreader", 3 | "class": "com.wgzhao.addax.plugin.reader.accessreader.AccessReader", 4 | "description": "access reader", 5 | "developer": "wgzhao" 6 | } -------------------------------------------------------------------------------- /plugin/reader/accessreader/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "accessreader", 3 | "parameter": { 4 | "username": "root", 5 | "password": "root", 6 | "column": [ 7 | "*" 8 | ], 9 | "connection": 10 | { 11 | "table": [ 12 | "addax_reader" 13 | ], 14 | "jdbcUrl": "jdbc:ucanaccess://" 15 | }, 16 | "where": "" 17 | } 18 | } -------------------------------------------------------------------------------- /plugin/reader/cassandrareader/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "cassandrareader", 3 | "class": "com.wgzhao.addax.plugin.reader.cassandrareader.CassandraReader", 4 | "description": "useScene: prod. mechanism: execute select cql, retrieve data from the ResultSet. warn: The more you know about the database, the less problems you encounter.", 5 | "developer": "alibaba" 6 | } -------------------------------------------------------------------------------- /plugin/reader/cassandrareader/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "cassandrareader", 3 | "parameter": { 4 | "username": "", 5 | "password": "", 6 | "host": "", 7 | "port": "", 8 | "useSSL": false, 9 | "keyspace": "", 10 | "table": "", 11 | "column": [ 12 | "c1", 13 | "c2", 14 | "c3" 15 | ] 16 | } 17 | } -------------------------------------------------------------------------------- /plugin/reader/clickhousereader/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "clickhousereader", 3 | "class": "com.wgzhao.addax.plugin.reader.clickhousereader.ClickHouseReader", 4 | "description": "useScene: prod. mechanism: Jdbc connection using the database, execute select sql, retrieve data from the ResultSet. warn: The more you know about the database, the less problems you encounter.", 5 | "developer": "alibaba" 6 | } 7 | -------------------------------------------------------------------------------- /plugin/reader/clickhousereader/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "clickhousereader", 3 | "parameter": { 4 | "username": "default", 5 | "password": "", 6 | "column": [ 7 | "*" 8 | ], 9 | "connection": { 10 | "jdbcUrl": "jdbc:clickhouse://127.0.0.1:8123/default", 11 | "table": [ 12 | "yourtable" 13 | ] 14 | }, 15 | "where": "" 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /plugin/reader/databendreader/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "databendreader", 3 | "class": "com.wgzhao.addax.plugin.reader.databendreader.DatabendReader", 4 | "description": "Read from Databend server", 5 | "developer": "wgzhao" 6 | } -------------------------------------------------------------------------------- /plugin/reader/databendreader/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "databend", 3 | "parameter": { 4 | "username": "root", 5 | "password": "root", 6 | "column": [ 7 | "*" 8 | ], 9 | "connection": { 10 | "jdbcUrl": "jdbc:databend://127.0.0.1:8000/test", 11 | "table": [ 12 | "addax_reader" 13 | ] 14 | }, 15 | "where": "" 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /plugin/reader/datareader/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "datareader", 3 | "class": "com.wgzhao.addax.plugin.reader.datareader.DataReader", 4 | "description": { 5 | "useScene": "generate fake data for populating database", 6 | "mechanism": "", 7 | "warn": "" 8 | }, 9 | "developer": "wgzhao" 10 | } -------------------------------------------------------------------------------- /plugin/reader/dbfreader/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "dbfreader", 3 | "class": "com.wgzhao.addax.plugin.reader.dbfreader.DbfReader", 4 | "description": "reader from dbf file", 5 | "developer": "wgzhao" 6 | } -------------------------------------------------------------------------------- /plugin/reader/dbfreader/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "dbfreader", 3 | "parameter": { 4 | "column": [ 5 | { 6 | "index": 0, 7 | "type": "string" 8 | }, 9 | { 10 | "index": 1, 11 | "type": "long" 12 | }, 13 | { 14 | "index": 2, 15 | "type": "string" 16 | }, 17 | { 18 | "index": 3, 19 | "type": "boolean" 20 | }, 21 | { 22 | "index": 4, 23 | "type": "string" 24 | }, 25 | { 26 | "value": "dbf", 27 | "type": "string" 28 | } 29 | ], 30 | "path": [ 31 | "/tmp/out" 32 | ], 33 | "encoding": "GBK" 34 | } 35 | } -------------------------------------------------------------------------------- /plugin/reader/elasticsearchreader/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "elasticsearchreader", 3 | "class": "com.wgzhao.addax.plugin.reader.elasticsearchreader.EsReader", 4 | "description": "Retrieve data from ElasticSearch", 5 | "developer": "kesc" 6 | } -------------------------------------------------------------------------------- /plugin/reader/elasticsearchreader/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "elasticsearchreader", 3 | "parameter": { 4 | "endpoint": "http://127.0.0.1:9200", 5 | "accessId": "", 6 | "accesskey": "", 7 | "index": "test-1", 8 | "type": "default", 9 | "searchType": "dfs_query_then_fetch", 10 | "headers": {}, 11 | "scroll": "3m", 12 | "search": [ 13 | { 14 | "query": { 15 | "match": { 16 | "col_ip": "1.1.1.1" 17 | } 18 | }, 19 | "aggregations": { 20 | "top_10_states": { 21 | "terms": { 22 | "field": "col_date", 23 | "size": 10 24 | } 25 | } 26 | } 27 | } 28 | ], 29 | "column": [ 30 | "col_ip", 31 | "col_double", 32 | "col_long", 33 | "col_integer", 34 | "col_keyword", 35 | "col_text", 36 | "col_geo_point", 37 | "col_date" 38 | ] 39 | } 40 | } -------------------------------------------------------------------------------- /plugin/reader/excelreader/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "excelreader", 3 | "class": "com.wgzhao.addax.plugin.reader.excelreader.ExcelReader", 4 | "description": "Retrieve data from Microsoft Excel Application", 5 | "developer": "wgzhao" 6 | } -------------------------------------------------------------------------------- /plugin/reader/excelreader/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "excelreader", 3 | "parameter": { 4 | "path": [], 5 | "header": false, 6 | "skipRows": 0 7 | } 8 | } -------------------------------------------------------------------------------- /plugin/reader/ftpreader/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "ftpreader", 3 | "class": "com.wgzhao.addax.plugin.reader.ftpreader.FtpReader", 4 | "description": "useScene: test. mechanism: use addax framework to transport data from txt file. warn: The more you know about the data, the less problems you encounter.", 5 | "developer": "wgzhao" 6 | } 7 | 8 | -------------------------------------------------------------------------------- /plugin/reader/ftpreader/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "ftpreader", 3 | "parameter": { 4 | "protocol": "sftp", 5 | "host": "127.0.0.1", 6 | "port": 22, 7 | "username": "xx", 8 | "password": "xxx", 9 | "path": [ 10 | "/var/ftp/*.txt", "/var/tmp/*.txt", "/public/ftp" 11 | ], 12 | "column": [ 13 | { 14 | "index": 0, 15 | "type": "long" 16 | }, 17 | { 18 | "index": 1, 19 | "type": "boolean" 20 | }, 21 | { 22 | "index": 2, 23 | "type": "double" 24 | }, 25 | { 26 | "index": 3, 27 | "type": "string" 28 | }, 29 | { 30 | "index": 4, 31 | "type": "date", 32 | "format": "yyyy.MM.dd" 33 | } 34 | ], 35 | "encoding": "UTF-8", 36 | "fieldDelimiter": "," 37 | } 38 | } -------------------------------------------------------------------------------- /plugin/reader/hanareader/src/main/java/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "rdbmsreader", 3 | "class": "com.wgzhao.addax.plugin.reader.rdbmsreader.RdbmsReader", 4 | "description": "general RDBMS reader plugin", 5 | "developer": "alibaba", 6 | "drivers": [] 7 | } 8 | -------------------------------------------------------------------------------- /plugin/reader/hanareader/src/main/java/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "rdbmsreader", 3 | "parameter": { 4 | "username": "", 5 | "password": "", 6 | "column": [], 7 | "connection": [ 8 | { 9 | "jdbcUrl": [], 10 | "table": [], 11 | "driver": "" 12 | } 13 | ], 14 | "where": "1=1", 15 | "autoPk": false, 16 | "fetchSize": 2048, 17 | "splitPk": "" 18 | } 19 | } -------------------------------------------------------------------------------- /plugin/reader/hanareader/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "hanareader", 3 | "class": "com.wgzhao.addax.plugin.reader.hanareader.HANAReader", 4 | "description": "SAP HANA reader plugin", 5 | "developer": "wgzhao", 6 | "drivers": [] 7 | } 8 | -------------------------------------------------------------------------------- /plugin/reader/hanareader/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "hanareader", 3 | "parameter": { 4 | "username": "system", 5 | "password": "", 6 | "column": [ 7 | "*" 8 | ], 9 | "connection": { 10 | "jdbcUrl": "jdbc:sap://localhost:30015/?currentschema=SYSTEM", 11 | "table": [ 12 | "addax_table" 13 | ] 14 | }, 15 | "where": "1=1", 16 | "autoPk": false, 17 | "fetchSize": 2048, 18 | "splitPk": "" 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /plugin/reader/hbase11xreader/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "hbase11xreader", 3 | "class": "com.wgzhao.addax.plugin.reader.hbase11xreader.Hbase11xReader", 4 | "description": "useScene: prod. mechanism: Scan to read data.", 5 | "developer": "alibaba" 6 | } 7 | 8 | -------------------------------------------------------------------------------- /plugin/reader/hbase11xreader/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "hbase11xreader", 3 | "parameter": { 4 | "hbaseConfig": { 5 | "hbase.zookeeper.quorum": "node01,node02,node03", 6 | "zookeeper.znode.parent": "/hbase-secure" 7 | }, 8 | "haveKerberos": "true", 9 | "kerberosPrincipal": "hbase@EXAMPLE.COM", 10 | "kerberosKeytabFilePath": "/etc/security/keytabs/hbase.headless.keytab", 11 | "table": "users", 12 | "encoding": "utf-8", 13 | "mode": "normal", 14 | "column": [ 15 | { 16 | "name": "rowkey", 17 | "type": "string" 18 | }, 19 | { 20 | "name": "info: age", 21 | "type": "string" 22 | }, 23 | { 24 | "name": "info: birthday", 25 | "type": "date", 26 | "format": "yyyy-MM-dd" 27 | }, 28 | { 29 | "name": "info: company", 30 | "type": "string" 31 | }, 32 | { 33 | "name": "address: country", 34 | "type": "string" 35 | }, 36 | { 37 | "name": "address: province", 38 | "type": "string" 39 | }, 40 | { 41 | "name": "address: city", 42 | "type": "string" 43 | } 44 | ], 45 | "range": { 46 | "startRowkey": "", 47 | "endRowkey": "", 48 | "isBinaryRowkey": true 49 | } 50 | } 51 | } -------------------------------------------------------------------------------- /plugin/reader/hbase11xsqlreader/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "hbase11xsqlreader", 3 | "class": "com.wgzhao.addax.plugin.reader.hbase11xsqlreader.HbaseSQLReader", 4 | "description": "useScene: prod. mechanism: Scan to read data.", 5 | "developer": "liwei.li, bug reported to : liwei.li@alibaba-inc.com" 6 | } 7 | 8 | -------------------------------------------------------------------------------- /plugin/reader/hbase11xsqlreader/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "hbase11sqlreader", 3 | "parameter": { 4 | "hbaseConfig": { 5 | "hbase.zookeeper.quorum": "node1,node2,node3", 6 | "zookeeper.znode.parent": "/hbase-unsecure" 7 | }, 8 | "haveKerberos": "true", 9 | "kerberosPrincipal": "hbase@EXAMPLE.COM", 10 | "kerberosKeytabFilePath": "/etc/security/keytabs/hbase.headless.keytab", 11 | "table": "TABLE1", 12 | "column": [ 13 | "ID", 14 | "COL1" 15 | ] 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /plugin/reader/hbase20xreader/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "hbase20xreader", 3 | "class": "com.wgzhao.addax.plugin.reader.hbase20xreader.Hbase20xReader", 4 | "description": "useScene: prod. mechanism: Scan to read data.", 5 | "developer": "alibaba" 6 | } 7 | 8 | -------------------------------------------------------------------------------- /plugin/reader/hbase20xreader/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "hbase20xreader", 3 | "parameter": { 4 | "hbaseConfig": { 5 | "hbase.zookeeper.quorum": "node1,node2,node3", 6 | "zookeeper.znode.paren": "/hbase-unsecure" 7 | }, 8 | "table": "", 9 | "encoding": "", 10 | "mode": "", 11 | "column": [], 12 | "range": { 13 | "startRowkey": "", 14 | "endRowkey": "", 15 | "isBinaryRowkey": true 16 | } 17 | } 18 | } -------------------------------------------------------------------------------- /plugin/reader/hbase20xsqlreader/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "hbase20xsqlreader", 3 | "class": "com.wgzhao.addax.plugin.reader.hbase20xsqlreader.HBase20xSQLReader", 4 | "description": "useScene: prod. mechanism: read data from phoenix through queryserver.", 5 | "developer": "bake" 6 | } 7 | 8 | -------------------------------------------------------------------------------- /plugin/reader/hbase20xsqlreader/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "hbase20xsqlreader", 3 | "parameter": { 4 | "queryserverAddress": "", 5 | "serialization": "PROTOBUF", 6 | "schema": "", 7 | "table": "TABLE1", 8 | "column": [ 9 | "ID", 10 | "NAME" 11 | ], 12 | "splitKey": "rowkey", 13 | "splitPoint": [], 14 | "where": "" 15 | } 16 | } 17 | -------------------------------------------------------------------------------- /plugin/reader/hdfsreader/src/main/java/com/wgzhao/addax/plugin/reader/hdfsreader/JavaType.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, 13 | * software distributed under the License is distributed on an 14 | * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 15 | * KIND, either express or implied. See the License for the 16 | * specific language governing permissions and limitations 17 | * under the License. 18 | */ 19 | 20 | package com.wgzhao.addax.plugin.reader.hdfsreader; 21 | 22 | public enum JavaType { 23 | TINYINT, 24 | SMALLINT, 25 | INT, 26 | INTEGER, 27 | BIGINT, 28 | FLOAT, 29 | DOUBLE, 30 | TIMESTAMP, 31 | DATE, 32 | DECIMAL, 33 | STRING, 34 | VARCHAR, 35 | CHAR, 36 | LONG, 37 | BOOLEAN, 38 | BINARY, 39 | ARRAY, 40 | MAP 41 | } 42 | -------------------------------------------------------------------------------- /plugin/reader/hdfsreader/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "hdfsreader", 3 | "class": "com.wgzhao.addax.plugin.reader.hdfsreader.HdfsReader", 4 | "description": "useScene: test. mechanism: use addax framework to transport data from hdfs. warn: The more you know about the data, the less problems you encounter.", 5 | "developer": "alibaba" 6 | } -------------------------------------------------------------------------------- /plugin/reader/hdfsreader/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "hdfsreader", 3 | "parameter": { 4 | "path": "/user/hive/warehouse/mytable01/", 5 | "defaultFS": "hdfs://cluster", 6 | "column": [ 7 | { 8 | "index": 0, 9 | "type": "long" 10 | }, 11 | { 12 | "index": 1, 13 | "type": "boolean" 14 | }, 15 | { 16 | "type": "string", 17 | "value": "hello" 18 | }, 19 | { 20 | "index": 2, 21 | "type": "double" 22 | } 23 | ], 24 | "fileType": "orc", 25 | "encoding": "UTF-8", 26 | "fieldDelimiter": "," 27 | }, 28 | "haveKerberos": "true", 29 | "kerberosPrincipal": "hive@COMPANY.COM", 30 | "kerberosKeytabFilePath": "/etc/security/keytabs/hive.headless.keytab", 31 | "hadoopConfig": { 32 | "dfs.nameservices": "cluster", 33 | "dfs.ha.namenodes.cluster": "nn1,nn2", 34 | "dfs.namenode.rpc-address.cluster.nn1": "node1:8020", 35 | "dfs.namenode.rpc-address.cluster.nn2": "node2:8020", 36 | "dfs.client.failover.proxy.provider.cluster": "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider" 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /plugin/reader/hivereader/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "hivereader", 3 | "class": "com.wgzhao.addax.plugin.reader.hivereader.HiveReader", 4 | "description": "Efficiently retrieve data from Hive databases using JDBC, enabling seamless integration and data manipulation.", 5 | "developer": "wgzhao" 6 | } -------------------------------------------------------------------------------- /plugin/reader/hivereader/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "hivereader", 3 | "parameter": { 4 | "column": [ 5 | "*" 6 | ], 7 | "username": "hive", 8 | "password": "", 9 | "connection": { 10 | "jdbcUrl": "jdbc:hive2://localhost:10000/default;principal=hive/_HOST@COMPANY.COM", 11 | "table": [ 12 | "hive_reader" 13 | ] 14 | }, 15 | "where": "logdate='20211013'", 16 | "haveKerberos": true, 17 | "kerberosKeytabFilePath": "/etc/security/keytabs/hive.headless.keytab", 18 | "kerberosPrincipal": "hive@COMPANY.COM" 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /plugin/reader/httpreader/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "httpreader", 3 | "class": "com.wgzhao.addax.plugin.reader.httpreader.HttpReader", 4 | "description": "Retrive data from RESTful API", 5 | "developer": "wgzhao" 6 | } -------------------------------------------------------------------------------- /plugin/reader/httpreader/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "httpreader", 3 | "parameter": { 4 | "connection": 5 | { 6 | "url": "http://127.0.0.1:9090/mock/17/LDJSC/ASSET", 7 | "proxy": { 8 | "host": "http://127.0.0.1:3128", 9 | "auth": "user:pass" 10 | } 11 | } 12 | , 13 | "reqParams": { 14 | "CURR_DATE":"2021-01-18", 15 | "DEPT":"9700" 16 | }, 17 | "isPage": true, 18 | "pageParams": { 19 | "pageSize": {"key":"pageSize","value":10}, 20 | "pageIndex":{"key": "pageNo", "value": 1} 21 | }, 22 | "resultKey":"result", 23 | "method": "GET", 24 | "column": ["CURR_DATE","DEPT","TOTAL_MANAGED_MARKET_VALUE","TOTAL_MANAGED_MARKET_VALUE_GROWTH"], 25 | "username": "user", 26 | "password": "passw0rd", 27 | "headers": { 28 | "X-Powered-by": "Addax" 29 | } 30 | } 31 | } -------------------------------------------------------------------------------- /plugin/reader/influxdb2reader/src/main/java/com/wgzhao/addax/plugin/reader/influxdb2reader/InfluxDB2Key.java: -------------------------------------------------------------------------------- 1 | package com.wgzhao.addax.plugin.reader.influxdb2reader; 2 | 3 | import com.wgzhao.addax.core.base.Key; 4 | 5 | public final class InfluxDB2Key 6 | extends Key 7 | { 8 | public static final String ENDPOINT = "endpoint"; 9 | public static final String BUCKET = "bucket"; 10 | public static final String ORG = "org"; 11 | public static final String TOKEN = "token"; 12 | public static final String RANGE = "range"; 13 | public static final String LIMIT = "limit"; 14 | } 15 | -------------------------------------------------------------------------------- /plugin/reader/influxdb2reader/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "influxdb2reader", 3 | "class": "com.wgzhao.addax.plugin.reader.influxdb2reader.InfluxDB2Reader", 4 | "description": "read from InfluxDB table, support InfluxDB 1.8 , 2.0 or later", 5 | "developer": "wgzhao" 6 | } -------------------------------------------------------------------------------- /plugin/reader/influxdb2reader/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "influxdb2reader", 3 | "parameter": { 4 | "column": [ 5 | "location", "height", "wet" 6 | ], 7 | "connection": [ 8 | { 9 | "endpoint": "http://localhost:8086", 10 | "bucket":"test", 11 | "table": ["temperature"], 12 | "org": "com.wgzhao" 13 | } 14 | ], 15 | "token":"YOUR_SECURE_TOKEN", 16 | "range": ["-1h", "-5m"], 17 | "limit": 10 18 | } 19 | } -------------------------------------------------------------------------------- /plugin/reader/influxdbreader/src/main/java/com/wgzhao/addax/plugin/reader/influxdbreader/InfluxDBKey.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, 13 | * software distributed under the License is distributed on an 14 | * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 15 | * KIND, either express or implied. See the License for the 16 | * specific language governing permissions and limitations 17 | * under the License. 18 | */ 19 | 20 | package com.wgzhao.addax.plugin.reader.influxdbreader; 21 | 22 | import com.wgzhao.addax.core.base.Key; 23 | 24 | public final class InfluxDBKey 25 | extends Key 26 | { 27 | public static final String ENDPOINT = "endpoint"; 28 | public static final String CONNECT_TIMEOUT_SECONDS = "connTimeout"; 29 | public static final String SOCKET_TIMEOUT_SECONDS = "socketTimeout"; 30 | } 31 | -------------------------------------------------------------------------------- /plugin/reader/influxdbreader/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "influxdbreader", 3 | "class": "com.wgzhao.addax.plugin.reader.influxdbreader.InfluxDBReader", 4 | "description": "read from InfluxDB table", 5 | "developer": "wgzhao" 6 | } -------------------------------------------------------------------------------- /plugin/reader/influxdbreader/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "influxdbreader", 3 | "parameter": { 4 | "column": [ 5 | "*" 6 | ], 7 | "where": "1=1", 8 | "connection": 9 | { 10 | "endpoint": "http://localhost:8086", 11 | "database": "NOAA_water_database", 12 | "table": "h2o_feet" 13 | }, 14 | "connTimeout": 15, 15 | "readTimeout": 20, 16 | "writeTimeout": 20, 17 | "username": "influx", 18 | "password": "influx123" 19 | } 20 | } -------------------------------------------------------------------------------- /plugin/reader/jsonfilereader/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "jsonfilereader", 3 | "class": "com.wgzhao.addax.plugin.reader.jsonfilereader.JsonReader", 4 | "description": "read json file, support JSON and JSON Lines format", 5 | "developer": "szunicom, wgzhao" 6 | } 7 | -------------------------------------------------------------------------------- /plugin/reader/jsonfilereader/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "jsonfilereader", 3 | "parameter": { 4 | "path": [ 5 | "/tmp/test*.json" 6 | ], 7 | "singleLine": true, 8 | "column": [ 9 | { 10 | "index": "$.id", 11 | "type": "long" 12 | }, 13 | { 14 | "index": "$.name", 15 | "type": "string" 16 | }, 17 | { 18 | "index": "$.age", 19 | "type": "long" 20 | }, 21 | { 22 | "index": "$.score.math", 23 | "type": "double" 24 | }, 25 | { 26 | "index": "$.score.english", 27 | "type": "double" 28 | }, 29 | { 30 | "index": "$.pubdate", 31 | "type": "date" 32 | }, 33 | { 34 | "type": "string", 35 | "value": "constant string" 36 | } 37 | ] 38 | } 39 | } -------------------------------------------------------------------------------- /plugin/reader/kafkareader/src/main/java/com/wgzhao/addax/plugin/reader/kafkareader/KafkaKey.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed under the Apache License, Version 2.0 (the "License"); 3 | * you may not use this file except in compliance with the License. 4 | * You may obtain a copy of the License at 5 | * 6 | * http://www.apache.org/licenses/LICENSE-2.0 7 | * 8 | * Unless required by applicable law or agreed to in writing, software 9 | * distributed under the License is distributed on an "AS IS" BASIS, 10 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | * See the License for the specific language governing permissions and 12 | * limitations under the License. 13 | */ 14 | 15 | package com.wgzhao.addax.plugin.reader.kafkareader; 16 | 17 | import com.wgzhao.addax.core.base.Key; 18 | 19 | public class KafkaKey 20 | extends Key 21 | { 22 | public final static String BROKER_LIST = "brokerList"; 23 | public final static String TOPIC = "topic"; 24 | public final static String PROPERTIES = "properties"; 25 | public final static String MISSING_KEY_VALUE = "missingKeyValue"; 26 | public static final String MAX_MESSAGE_NUMBER = "maxMessageNumber"; 27 | } 28 | -------------------------------------------------------------------------------- /plugin/reader/kafkareader/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "kafkareader", 3 | "class": "com.wgzhao.addax.plugin.reader.kafkareader.KafkaReader", 4 | "description": "read specified kafka topic", 5 | "developer": "wgzhao" 6 | } 7 | -------------------------------------------------------------------------------- /plugin/reader/kafkareader/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "kafkareader", 3 | "parameter": { 4 | "column": [ 5 | "*" 6 | ], 7 | "brokerList": "localhost:9092,localhost:9093,localhost:9094", 8 | "topic": "test", 9 | "missingKeyValue": "\\N", 10 | "properties": { 11 | "auto.offset.reset": "earliest" 12 | } 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /plugin/reader/kudureader/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "kudureader", 3 | "class": "com.wgzhao.addax.plugin.reader.kudureader.KuduReader", 4 | "description": "useScene: prod. mechanism: via kuduclient connect kudu reader data concurrent.", 5 | "developer": "wgzhao" 6 | } 7 | -------------------------------------------------------------------------------- /plugin/reader/kudureader/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "kudureader", 3 | "parameter": { 4 | "masterAddress": "127.0.0.1:7051", 5 | "table": "test", 6 | "splitPk": "", 7 | "lowerBound": "min", 8 | "upperBound": "max", 9 | "ReadTimeoutMs": 30000, 10 | "scanRequestTimeout": 30000, 11 | "columns": [ 12 | "col1", 13 | "col2", 14 | "col3" 15 | ] 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /plugin/reader/mongodbreader/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "mongodbreader", 3 | "class": "com.wgzhao.addax.plugin.reader.mongodbreader.MongoDBReader", 4 | "description": "useScene: prod. mechanism: via mongoclient connect mongodb reader data concurrent.", 5 | "developer": "alibaba" 6 | } 7 | -------------------------------------------------------------------------------- /plugin/reader/mongodbreader/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "mongodbreader", 3 | "parameter": { 4 | "username": "", 5 | "password": "", 6 | "connection": [ 7 | { 8 | "address": ["127.0.0.1:32768"], 9 | "database": "tag_per_data", 10 | "authDb": "admin", 11 | "collection": "tag_data" 12 | } 13 | ], 14 | "column": [ 15 | "unique_id", 16 | "sid", 17 | "user_id", 18 | "auction_id", 19 | "content_type", 20 | "pool_type", 21 | "frontcat_id", 22 | "catagoryid", 23 | "gmt_create", 24 | "taglist", 25 | "property", 26 | "scorea", 27 | "scoreb", 28 | "scorec" 29 | ], 30 | "query": "" 31 | } 32 | } -------------------------------------------------------------------------------- /plugin/reader/mysqlreader/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "mysqlreader", 3 | "class": "com.wgzhao.addax.plugin.reader.mysqlreader.MysqlReader", 4 | "description": "useScene: prod. mechanism: Jdbc connection using the database, execute select sql, retrieve data from the ResultSet. warn: The more you know about the database, the less problems you encounter.", 5 | "developer": "alibaba" 6 | } -------------------------------------------------------------------------------- /plugin/reader/mysqlreader/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "mysqlreader", 3 | "parameter": { 4 | "username": "root", 5 | "password": "root", 6 | "autoPk": "false", 7 | "splitPk": "", 8 | "column": [ 9 | "*" 10 | ], 11 | "connection": { 12 | "jdbcUrl": "jdbc:mysql://127.0.0.1:3306/test", 13 | "table": [ 14 | "addax_reader" 15 | ], 16 | "driver": "com.mysql.jdbc.Driver", 17 | "fetchSize": 2048 18 | }, 19 | "where": "" 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /plugin/reader/oraclereader/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "oraclereader", 3 | "class": "com.wgzhao.addax.plugin.reader.oraclereader.OracleReader", 4 | "description": "useScene: prod. mechanism: Jdbc connection using the database, execute select sql, retrieve data from the ResultSet. warn: The more you know about the database, the less problems you encounter.", 5 | "developer": "alibaba" 6 | } -------------------------------------------------------------------------------- /plugin/reader/oraclereader/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "oraclereader", 3 | "parameter": { 4 | "username": "root", 5 | "password": "root", 6 | "column": [ 7 | "id", 8 | "name" 9 | ], 10 | "splitPk": "db_id", 11 | "autoPk": false, 12 | "connection": { 13 | "jdbcUrl": "jdbc:oracle:thin:@:PORT:", 14 | "table": [ 15 | "table" 16 | ] 17 | }, 18 | "where": "", 19 | "session": [ 20 | "alter session set NLS_DATE_FORMAT='yyyy-mm-dd hh24:mi:ss'", 21 | "alter session set NLS_TIMESTAMP_FORMAT='yyyy-mm-dd hh24:mi:ss'", 22 | "alter session set NLS_TIMESTAMP_TZ_FORMAT='yyyy-mm-dd hh24:mi:ss'", 23 | "alter session set TIME_ZONE='Asia/Chongqing'" 24 | ], 25 | "fetchSize": 2048 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /plugin/reader/postgresqlreader/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "postgresqlreader", 3 | "class": "com.wgzhao.addax.plugin.reader.postgresqlreader.PostgresqlReader", 4 | "description": "useScene: prod. mechanism: Jdbc connection using the database, execute select sql, retrieve data from the ResultSet. warn: The more you know about the database, the less problems you encounter.", 5 | "developer": "alibaba" 6 | } -------------------------------------------------------------------------------- /plugin/reader/postgresqlreader/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "postgresqlreader", 3 | "parameter": { 4 | "username": "pgtest", 5 | "password": "pgtest", 6 | "autoPk": false, 7 | "splitPk": "", 8 | "column": [ 9 | "*" 10 | ], 11 | "connection": { 12 | "jdbcUrl": "jdbc:postgresql://127.0.0.1:5432/pgtest", 13 | "table": [ 14 | "addax_tbl" 15 | ] 16 | }, 17 | "where": "", 18 | "fetchSize": 2048 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /plugin/reader/rdbmsreader/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "rdbmsreader", 3 | "class": "com.wgzhao.addax.plugin.reader.rdbmsreader.RdbmsReader", 4 | "description": "general RDBMS reader plugin", 5 | "developer": "alibaba", 6 | "drivers": [] 7 | } 8 | -------------------------------------------------------------------------------- /plugin/reader/rdbmsreader/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "rdbmsreader", 3 | "parameter": { 4 | "username": "", 5 | "password": "", 6 | "column": [ 7 | "*" 8 | ], 9 | "connection": { 10 | "jdbcUrl": "jdbc:://:/", 11 | "table": [ 12 | "adax_table" 13 | ], 14 | "driver": "" 15 | }, 16 | "where": "1=1", 17 | "autoPk": false, 18 | "fetchSize": 2048, 19 | "splitPk": "" 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /plugin/reader/redisreader/src/main/java/com/wgzhao/addax/plugin/reader/redisreader/impl/SentinelListener.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, software 13 | * distributed under the License is distributed on an "AS IS" BASIS, 14 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | * See the License for the specific language governing permissions and 16 | * limitations under the License. 17 | */ 18 | 19 | package com.wgzhao.addax.plugin.reader.redisreader.impl; 20 | 21 | import redis.clients.jedis.HostAndPort; 22 | 23 | public interface SentinelListener 24 | { 25 | 26 | void onClose(Sentinel sentinel); 27 | 28 | void onSwitch(Sentinel sentinel, HostAndPort host); 29 | } -------------------------------------------------------------------------------- /plugin/reader/redisreader/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "redisreader", 3 | "class": "com.wgzhao.addax.plugin.reader.redisreader.RedisReader", 4 | "description": "Use redis-replicator connect to redis, execute dump command", 5 | "developer": "alibaba" 6 | } -------------------------------------------------------------------------------- /plugin/reader/redisreader/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "redisreader", 3 | "parameter": { 4 | "connection": 5 | { 6 | "uri": ["tcp://127.0.0.1:6379", "file:///data/dump.rdb", "http://localhost/dump.rdb"], 7 | "auth": "", 8 | "mode": "sentinel|master/slave|standalone|cluster", 9 | "masterName": "mymaster" 10 | }, 11 | "db": [ 12 | 0, 13 | 1 14 | ], 15 | "include": [ 16 | "^user" 17 | ], 18 | "exclude": [] 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /plugin/reader/s3reader/src/main/java/com/wgzhao/addax/plugin/reader/s3reader/S3Key.java: -------------------------------------------------------------------------------- 1 | package com.wgzhao.addax.plugin.reader.s3reader; 2 | 3 | import com.wgzhao.addax.core.base.Key; 4 | 5 | public class S3Key extends Key 6 | { 7 | public static final String REGION = "region"; 8 | 9 | public static final String ENDPOINT = "endpoint"; 10 | 11 | public static final String ACCESS_ID = "accessId"; 12 | 13 | public static final String ACCESS_KEY = "accessKey"; 14 | 15 | public static final String BUCKET = "bucket"; 16 | 17 | public static final String OBJECT = "object"; 18 | 19 | public static final String PATH_STYLE_ACCESS_ENABLED = "pathStyleAccessEnabled"; 20 | } 21 | -------------------------------------------------------------------------------- /plugin/reader/s3reader/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "s3reader", 3 | "class": "com.wgzhao.addax.plugin.reader.s3reader.S3Reader", 4 | "description": { 5 | "useScene": "retrieve object from Amazon S3 service", 6 | "mechanism": "", 7 | "warn": "" 8 | }, 9 | "developer": "wgzhao" 10 | } 11 | -------------------------------------------------------------------------------- /plugin/reader/s3reader/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "s3reader", 3 | "parameter": { 4 | "endpoint": "https://s3.amazonaws.com", 5 | "accessId": "xxxxxxxxxxxx", 6 | "accessKey": "xxxxxxxxxxxxxxxxxxxxxxx", 7 | "bucket": "test", 8 | "object": [ 9 | "1.csv", 10 | "aa.csv" 11 | ], 12 | "column": [ 13 | "*" 14 | ], 15 | "region": "ap-northeast-1", 16 | "fileFormat": "csv", 17 | "fieldDelimiter": "," 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /plugin/reader/sqlitereader/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "sqlitereader", 3 | "class": "com.wgzhao.addax.plugin.reader.sqlitereader.SqliteReader", 4 | "description": "read sqlite file via jdbc protocol", 5 | "developer": "wgzhao" 6 | } -------------------------------------------------------------------------------- /plugin/reader/sqlitereader/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "sqlitereader", 3 | "parameter": { 4 | "username": "", 5 | "password": "", 6 | "column": [ 7 | "*" 8 | ], 9 | "connection": { 10 | "jdbcUrl": "jdbc:sqlite:/tmp/test.sqlite3", 11 | "table": [ 12 | "test" 13 | ] 14 | }, 15 | "where": "" 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /plugin/reader/sqlserverreader/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "sqlserverreader", 3 | "class": "com.wgzhao.addax.plugin.reader.sqlserverreader.SqlServerReader", 4 | "description": "useScene: test. mechanism: use addax framework to transport data from SQL Server. warn: The more you know about the data, the less problems you encounter.", 5 | "developer": "alibaba" 6 | } 7 | 8 | -------------------------------------------------------------------------------- /plugin/reader/sqlserverreader/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "sqlserverreader", 3 | "parameter": { 4 | "username": "", 5 | "password": "", 6 | "connection": { 7 | "jdbcUrl": "jdbc:sqlserver://localhost:3433;DatabaseName=dbname", 8 | "table": [ 9 | "table" 10 | ] 11 | }, 12 | "where": "1=1", 13 | "autoPk": false, 14 | "fetchSize": 2048, 15 | "splitPk": "" 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /plugin/reader/streamreader/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "streamreader", 3 | "class": "com.wgzhao.addax.plugin.reader.streamreader.StreamReader", 4 | "description": { 5 | "useScene": "only for developer test.", 6 | "mechanism": "use addax framework to transport data from stream.", 7 | "warn": "Never use it in your real job." 8 | }, 9 | "developer": "alibaba" 10 | } -------------------------------------------------------------------------------- /plugin/reader/streamreader/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "streamreader", 3 | "parameter": { 4 | "column": [ 5 | { 6 | "value": "unique_id", 7 | "type": "string" 8 | }, 9 | { 10 | "value": "1989-06-04 08:12:13", 11 | "type": "date", 12 | "dateFormat": "yyyy-MM-dd HH:mm:ss" 13 | }, 14 | { 15 | "value": 1984, 16 | "type": "long" 17 | }, 18 | { 19 | "value": 1989.64, 20 | "type": "double" 21 | }, 22 | { 23 | "value": true, 24 | "type": "bool" 25 | }, 26 | { 27 | "value": "a long text", 28 | "type": "bytes" 29 | } 30 | ], 31 | "sliceRecordCount": 10 32 | } 33 | } -------------------------------------------------------------------------------- /plugin/reader/sybasereader/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "sybasereader", 3 | "class": "com.wgzhao.addax.plugin.reader.sybasereader.SybaseReader", 4 | "description": "Sybase Reader plugin", 5 | "developer": "wgzhao" 6 | } -------------------------------------------------------------------------------- /plugin/reader/sybasereader/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "sybasereader", 3 | "parameter": { 4 | "username": "root", 5 | "password": "root", 6 | "column": [ 7 | "*" 8 | ], 9 | "connection": { 10 | "jdbcUrl": "jdbc:sybase://127.0.0.1:3306/test", 11 | "table": [ 12 | "addax_reader" 13 | ] 14 | }, 15 | "where": "" 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /plugin/reader/tdenginereader/src/main/java/com/wgzhao/addax/plugin/reader/tdenginereader/TDKey.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed under the Apache License, Version 2.0 (the "License"); 3 | * you may not use this file except in compliance with the License. 4 | * You may obtain a copy of the License at 5 | * 6 | * http://www.apache.org/licenses/LICENSE-2.0 7 | * 8 | * Unless required by applicable law or agreed to in writing, software 9 | * distributed under the License is distributed on an "AS IS" BASIS, 10 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | * See the License for the specific language governing permissions and 12 | * limitations under the License. 13 | */ 14 | 15 | package com.wgzhao.addax.plugin.reader.tdenginereader; 16 | 17 | import com.wgzhao.addax.core.base.Key; 18 | 19 | public class TDKey 20 | extends Key 21 | { 22 | 23 | public static final String BEGIN_DATETIME = "beginDateTime"; 24 | public static final String END_DATETIME = "endDateTime"; 25 | public static final String SPLIT_INTERVAL = "splitInterval"; 26 | } 27 | -------------------------------------------------------------------------------- /plugin/reader/tdenginereader/src/main/libs/libtaos.so.2.0.16.0: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wgzhao/Addax/13bc07a72b56ba54f9d9bbacf898b9e144245b48/plugin/reader/tdenginereader/src/main/libs/libtaos.so.2.0.16.0 -------------------------------------------------------------------------------- /plugin/reader/tdenginereader/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "tdenginereader", 3 | "class": "com.wgzhao.addax.plugin.reader.tdenginereader.TDengineReader", 4 | "description": "Retrieve data from TDengine", 5 | "developer": "wgzhao" 6 | } -------------------------------------------------------------------------------- /plugin/reader/tdenginereader/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "tdenginereader", 3 | "parameter": { 4 | "username": "root", 5 | "password": "taosdata", 6 | "connection": { 7 | "jdbcUrl": "jdbc:TAOS://127.0.0.1:6030/test", 8 | "querySql": [ 9 | "select * from test.meters where ts <'2017-07-14 10:40:02' and loc='beijing' limit 10" 10 | ] 11 | }, 12 | "where": "1=1", 13 | "autoPk": false, 14 | "fetchSize": 2048, 15 | "splitPk": "" 16 | } 17 | } -------------------------------------------------------------------------------- /plugin/reader/txtfilereader/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "txtfilereader", 3 | "class": "com.wgzhao.addax.plugin.reader.txtfilereader.TxtFileReader", 4 | "description": "useScene: test. mechanism: use addax framework to transport data from txt file. warn: The more you know about the data, the less problems you encounter.", 5 | "developer": "alibaba" 6 | } -------------------------------------------------------------------------------- /plugin/reader/txtfilereader/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "txtfilereader", 3 | "parameter": { 4 | "path": [ 5 | "/tmp/data" 6 | ], 7 | "encoding": "UTF-8", 8 | "column": [ 9 | { 10 | "index": 0, 11 | "type": "long" 12 | }, 13 | { 14 | "index": 1, 15 | "type": "boolean" 16 | }, 17 | { 18 | "index": 2, 19 | "type": "double" 20 | }, 21 | { 22 | "index": 3, 23 | "type": "string" 24 | }, 25 | { 26 | "index": 4, 27 | "type": "date", 28 | "format": "yyyy.MM.dd" 29 | } 30 | ], 31 | "fieldDelimiter": "," 32 | } 33 | } -------------------------------------------------------------------------------- /plugin/writer/accesswriter/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "accesswriter", 3 | "class": "com.wgzhao.addax.plugin.writer.accesswriter.AccessWriter", 4 | "description": "writer to access database", 5 | "developer": "wgzhao" 6 | } -------------------------------------------------------------------------------- /plugin/writer/accesswriter/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "accesswriter", 3 | "parameter": { 4 | "writeMode": "insert", 5 | "username": "root", 6 | "password": "", 7 | "column": [ 8 | "*" 9 | ], 10 | "preSql": [ 11 | "delete from @table" 12 | ], 13 | "postSql": [], 14 | "connection": [ 15 | { 16 | "jdbcUrl": "jdbc:ucanaccess://", 17 | "table": [ 18 | "addax_tbl" 19 | ] 20 | } 21 | ] 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /plugin/writer/cassandrawriter/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "cassandrawriter", 3 | "class": "com.wgzhao.addax.plugin.writer.cassandrawriter.CassandraWriter", 4 | "description": "useScene: prod. mechanism: use addax driver, execute insert sql.", 5 | "developer": "alibaba" 6 | } 7 | 8 | -------------------------------------------------------------------------------- /plugin/writer/cassandrawriter/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "cassandrawriter", 3 | "parameter": { 4 | "host": "localhost", 5 | "port": 9042, 6 | "useSSL": false, 7 | "keyspace": "stresscql", 8 | "table": "dst", 9 | "batchSize": 10, 10 | "column": [ 11 | "name", 12 | "choice", 13 | "date", 14 | "address", 15 | "dbl", 16 | "lval", 17 | "fval", 18 | "ival", 19 | "uid", 20 | "value", 21 | "listval" 22 | ] 23 | } 24 | } -------------------------------------------------------------------------------- /plugin/writer/clickhousewriter/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "clickhousewriter", 3 | "class": "com.wgzhao.addax.plugin.writer.clickhousewriter.ClickHouseWriter", 4 | "description": "ClickHouse writer plugin ,support 20.3.5 and later", 5 | "developer": "jiye.tjy" 6 | } -------------------------------------------------------------------------------- /plugin/writer/clickhousewriter/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "clickhousewriter", 3 | "parameter": { 4 | "username": "username", 5 | "password": "password", 6 | "column": [ 7 | "*" 8 | ], 9 | "connection": [ 10 | { 11 | "jdbcUrl": "jdbc:clickhouse://:[/]", 12 | "table": [ 13 | "table1" 14 | ] 15 | } 16 | ], 17 | "preSql": [], 18 | "postSql": [], 19 | "batchSize": 65536, 20 | "writeMode": "insert" 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /plugin/writer/databendwriter/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "databendwriter", 3 | "class": "com.wgzhao.addax.plugin.writer.databendwriter.DatabendWriter", 4 | "description": "writer data into Databend", 5 | "developer": "databend" 6 | } -------------------------------------------------------------------------------- /plugin/writer/databendwriter/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "databendwriter", 3 | "parameter": { 4 | "username": "u1", 5 | "password": "123", 6 | "column": [ 7 | "*" 8 | ], 9 | "preSql": [], 10 | "postSql": [], 11 | "connection": [ 12 | { 13 | "jdbcUrl": "jdbc:databend://localhost:8000/addax", 14 | "table": [ 15 | "table1" 16 | ] 17 | } 18 | ] 19 | } 20 | } 21 | -------------------------------------------------------------------------------- /plugin/writer/dbfwriter/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "dbfwriter", 3 | "class": "com.wgzhao.addax.plugin.writer.dbfwriter.DbfWriter", 4 | "description": "write a dbf file", 5 | "developer": "wgzhao" 6 | } -------------------------------------------------------------------------------- /plugin/writer/dbfwriter/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "dbfwriter", 3 | "parameter": { 4 | "column": [ 5 | { 6 | "name": "col1", 7 | "type": "char", 8 | "length": 100 9 | }, 10 | { 11 | "name": "col2", 12 | "type": "numeric", 13 | "length": 18, 14 | "scale": 0 15 | }, 16 | { 17 | "name": "col3", 18 | "type": "date" 19 | }, 20 | { 21 | "name": "col4", 22 | "type": "logical" 23 | }, 24 | { 25 | "name": "col5", 26 | "type": "char", 27 | "length": 100 28 | } 29 | ], 30 | "fileName": "test.dbf", 31 | "path": "/tmp/out", 32 | "writeMode": "truncate", 33 | "encoding": "GBK" 34 | } 35 | } -------------------------------------------------------------------------------- /plugin/writer/doriswriter/src/main/java/com/wgzhao/addax/plugin/writer/doriswriter/codec/DorisCodec.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, 13 | * software distributed under the License is distributed on an 14 | * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 15 | * KIND, either express or implied. See the License for the 16 | * specific language governing permissions and limitations 17 | * under the License. 18 | */ 19 | 20 | 21 | package com.wgzhao.addax.plugin.writer.doriswriter.codec; 22 | 23 | import com.wgzhao.addax.core.element.Record; 24 | 25 | import java.io.Serializable; 26 | 27 | public interface DorisCodec 28 | extends Serializable 29 | { 30 | 31 | String codec(Record row); 32 | } 33 | -------------------------------------------------------------------------------- /plugin/writer/doriswriter/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "doriswriter", 3 | "class": "com.wgzhao.addax.plugin.writer.doriswriter.DorisWriter", 4 | "description": "writer data into DorisDB", 5 | "developer": "wgzhao" 6 | } -------------------------------------------------------------------------------- /plugin/writer/doriswriter/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "doriswriter", 3 | "parameter": { 4 | "username": "test", 5 | "password": "123456", 6 | "column": [ 7 | "col1", 8 | "col2" 9 | ], 10 | "batchSize": 102400, 11 | "loadUrl": [ 12 | "fe:fe_port" 13 | ], 14 | "connection": [ 15 | { 16 | "jdbcUrl": "jdbc:mysql://fe:fe_port", 17 | "database": "example_db", 18 | "table": [ 19 | "table1" 20 | ] 21 | } 22 | ], 23 | "loadProps": { 24 | "workers": "2" 25 | } 26 | } 27 | } 28 | -------------------------------------------------------------------------------- /plugin/writer/elasticsearchwriter/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "elasticsearchwriter", 3 | "class": "com.wgzhao.addax.plugin.writer.elasticsearchwriter.ESWriter", 4 | "description": "适用于: 生产环境. 原理: TODO", 5 | "developer": "alibaba", 6 | "parameter": { 7 | "endpoint": "", 8 | "accessId": "", 9 | "accessKey": "", 10 | "index": "", 11 | "type": "", 12 | "cleanup": true, 13 | "discovery": false, 14 | "batchSize": 1000, 15 | "trySize": 30, 16 | "timeout": 600000, 17 | "compression": true, 18 | "multiThread": true, 19 | "ignoreWriteError": false, 20 | "ignoreParseError": true, 21 | "alias": "", 22 | "aliasMode": "", 23 | "settings": { 24 | "index": { 25 | "number_of_shards": 1, 26 | "number_of_replicas": 0 27 | } 28 | }, 29 | "splitter": ",", 30 | "column": [], 31 | "dynamic": false 32 | } 33 | } -------------------------------------------------------------------------------- /plugin/writer/excelwriter/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "excelwriter", 3 | "class": "com.wgzhao.addax.plugin.writer.excelwriter.ExcelWriter", 4 | "description": "Write data to Microsoft Excel file", 5 | "developer": "wgzhao" 6 | } -------------------------------------------------------------------------------- /plugin/writer/excelwriter/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "excelwriter", 3 | "parameter": { 4 | "path": "/tmp/out", 5 | "fileName": "test.xlsx", 6 | "header": [ 7 | "name", 8 | "age" 9 | ] 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /plugin/writer/ftpwriter/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "ftpwriter", 3 | "class": "com.wgzhao.addax.plugin.writer.ftpwriter.FtpWriter", 4 | "description": "useScene: test. mechanism: use addax framework to transport data from ftp txt file. warn: The more you know about the data, the less problems you encounter.", 5 | "developer": "alibaba" 6 | } 7 | 8 | -------------------------------------------------------------------------------- /plugin/writer/ftpwriter/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "writer": { 3 | "name": "ftpwriter", 4 | "parameter": { 5 | "protocol": "ftp", 6 | "host": "***", 7 | "port": 21, 8 | "username": "xxx", 9 | "password": "xxx", 10 | "timeout": "60000", 11 | "connectPattern": "PASV", 12 | "path": "/tmp/data/", 13 | "fileName": "test", 14 | "writeMode": "truncate|append|nonConflict", 15 | "fieldDelimiter": ",", 16 | "encoding": "UTF-8", 17 | "nullFormat": "null", 18 | "dateFormat": "yyyy-MM-dd", 19 | "fileFormat": "csv", 20 | "useKey": false, 21 | "keyPath": "", 22 | "keyPass": "", 23 | "header": [] 24 | } 25 | } 26 | } -------------------------------------------------------------------------------- /plugin/writer/greenplumwriter/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "greenplumwriter", 3 | "class": "com.wgzhao.addax.plugin.writer.greenplumwriter.GreenplumWriter", 4 | "description": "Write data to Greenplum Database", 5 | "developer": "wgzhao" 6 | } -------------------------------------------------------------------------------- /plugin/writer/greenplumwriter/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "greenplumwriter", 3 | "parameter": { 4 | "username": "", 5 | "password": "", 6 | "column": [ 7 | "*" 8 | ], 9 | "batchSize": "1024", 10 | "preSql": [ 11 | "truncate table @table" 12 | ], 13 | "postSql": [], 14 | "connection": [ 15 | { 16 | "jdbcUrl": "jdbc:postgresql://localhost:5432/wgzhao", 17 | "table": [ 18 | "gp_test" 19 | ] 20 | } 21 | ] 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /plugin/writer/hanawriter/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "hanawriter", 3 | "class": "com.wgzhao.addax.plugin.writer.hanawriter.HANAWriter", 4 | "description": "Writer to SAP HANA database", 5 | "developer": "wgzhao" 6 | } -------------------------------------------------------------------------------- /plugin/writer/hanawriter/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "hanawriter", 3 | "parameter": { 4 | "writeMode": "insert", 5 | "username": "root", 6 | "password": "", 7 | "column": [ 8 | "*" 9 | ], 10 | "preSql": [ 11 | "delete from @table" 12 | ], 13 | "connection": [ 14 | { 15 | "jdbcUrl": "jdbc:sap://127.0.0.1:37019/test", 16 | "table": [ 17 | "addax_tbl" 18 | ] 19 | } 20 | ] 21 | } 22 | } -------------------------------------------------------------------------------- /plugin/writer/hbase11xsqlwriter/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "hbase11xsqlwriter", 3 | "class": "com.wgzhao.addax.plugin.writer.hbase11xsqlwriter.HbaseSQLWriter", 4 | "description": "useScene: prod. mechanism: use hbase sql UPSERT to put data, index tables will be updated too.", 5 | "developer": "alibaba" 6 | } 7 | 8 | -------------------------------------------------------------------------------- /plugin/writer/hbase11xsqlwriter/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "hbase11xsqlwriter", 3 | "parameter": { 4 | "column": [], 5 | "haveKerberos": false, 6 | "kerberosPrincipal": "", 7 | "kerberosKeytabFilePath": "", 8 | "batchSize": "100", 9 | "nullMode": "skip", 10 | "hbaseConfig": { 11 | "hbase.zookeeper.quorum": "", 12 | "zookeeper.znode.parent": "" 13 | } 14 | } 15 | } -------------------------------------------------------------------------------- /plugin/writer/hbase11xwriter/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "hbase11xwriter", 3 | "class": "com.wgzhao.addax.plugin.writer.hbase11xwriter.Hbase11xWriter", 4 | "description": "use put: prod. mechanism: use hbase java api put data.", 5 | "developer": "alibaba" 6 | } 7 | 8 | -------------------------------------------------------------------------------- /plugin/writer/hbase11xwriter/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "hbase11xwriter", 3 | "parameter": { 4 | "hbaseConfig": { 5 | "hbase.rootdir": "", 6 | "hbase.cluster.distributed": "", 7 | "hbase.zookeeper.quorum": "" 8 | }, 9 | "table": "", 10 | "mode": "", 11 | "rowkeyColumn": [ 12 | ], 13 | "column": [ 14 | ], 15 | "versionColumn": { 16 | "index": "", 17 | "value": "" 18 | }, 19 | "encoding": "" 20 | } 21 | } -------------------------------------------------------------------------------- /plugin/writer/hbase20xsqlwriter/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "hbase20xsqlwriter", 3 | "class": "com.wgzhao.addax.plugin.writer.hbase20xsqlwriter.HBase20xSQLWriter", 4 | "description": "useScene: prod. mechanism: use hbase sql UPSERT to put data, index tables will be updated too.", 5 | "developer": "bake" 6 | } 7 | 8 | -------------------------------------------------------------------------------- /plugin/writer/hbase20xsqlwriter/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "hbase20xsqlwriter", 3 | "parameter": { 4 | "queryServerAddress": "jdbc:phoenix[:thin]:", 5 | "table": "", 6 | "serialization": "PROTOBUF", 7 | "haveKerberos": false, 8 | "kerberosPrincipal": "", 9 | "kerberosKeytabFilePath": "", 10 | "column": [ 11 | ], 12 | "batchSize": "100", 13 | "nullMode": "skip", 14 | "schema": "" 15 | } 16 | } -------------------------------------------------------------------------------- /plugin/writer/hdfswriter/src/main/java/com/wgzhao/addax/plugin/writer/hdfswriter/SupportHiveDataType.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, 13 | * software distributed under the License is distributed on an 14 | * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 15 | * KIND, either express or implied. See the License for the 16 | * specific language governing permissions and limitations 17 | * under the License. 18 | */ 19 | 20 | package com.wgzhao.addax.plugin.writer.hdfswriter; 21 | 22 | public enum SupportHiveDataType 23 | { 24 | TINYINT, 25 | SMALLINT, 26 | INT, 27 | INTEGER, 28 | BIGINT, 29 | FLOAT, 30 | DOUBLE, 31 | TIMESTAMP, 32 | DATE, 33 | DECIMAL, 34 | STRING, 35 | VARCHAR, 36 | CHAR, 37 | LONG, 38 | BOOLEAN, 39 | BINARY, 40 | MAP, 41 | ARRAY 42 | } 43 | -------------------------------------------------------------------------------- /plugin/writer/hdfswriter/src/main/resources/addax_logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wgzhao/Addax/13bc07a72b56ba54f9d9bbacf898b9e144245b48/plugin/writer/hdfswriter/src/main/resources/addax_logo.png -------------------------------------------------------------------------------- /plugin/writer/hdfswriter/src/main/resources/parquet_schema.asvo: -------------------------------------------------------------------------------- 1 | { 2 | "type": "record", 3 | "name": "Test", 4 | "aliases": ["addax","parquet"], 5 | "fields" : [ 6 | {"name": "col1", "type": ["null", "long"]}, 7 | {"name": "col2", "type": ["null", "int"]}, 8 | {"name": "col3", "type": ["null", "string"]}, 9 | {"name": "col4", "type": ["null", {"type": "fixed", "logicalType": "decimal", "name": "col4", "precision": 38, "scale": 10, "size": 16}]}, 10 | {"name": "col5", "type": ["null", {"type": "int", "logicalType":"date"}]}, 11 | {"name": "col6", "type": ["null", {"type": "long", "logicalType": "timestamp"}]}, 12 | {"name": "col7", "type": ["null", {"type": "bytes", "logicalType": "binary"}]} 13 | ] 14 | } 15 | 16 | -------------------------------------------------------------------------------- /plugin/writer/hdfswriter/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "hdfswriter", 3 | "class": "com.wgzhao.addax.plugin.writer.hdfswriter.HdfsWriter", 4 | "description": "useScene: prod. mechanism: via FileSystem connect HDFS write data concurrent.", 5 | "developer": "alibaba" 6 | } 7 | -------------------------------------------------------------------------------- /plugin/writer/icebergwriter/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "icebergwriter", 3 | "class": "com.wgzhao.addax.plugin.writer.icebergwriter.IcebergWriter", 4 | "description": "write data to iceberg", 5 | "developer": "awol2005ex" 6 | } 7 | -------------------------------------------------------------------------------- /plugin/writer/icebergwriter/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "icebergwriter", 3 | "parameter": { 4 | "tableName": "test.test1", 5 | "catalogType" : "hadoop", 6 | "writeMode": "truncate", 7 | "warehouse": "s3a://pvc-91d1e2cd-4d25-45c9-8613-6c4f7bf0a4cc/iceberg", 8 | "hadoopConfig": { 9 | "fs.s3a.endpoint":"http://localhost:9000", 10 | "fs.s3a.access.key":"gy0dX5lALP176g6c9fYf", 11 | "fs.s3a.secret.key":"ReuUrCzzu5wKWAegtswoHIWV389BYl9AB1ZQbiKr", 12 | "fs.s3a.connection.ssl.enabled":"false", 13 | "fs.s3a.path.style.access":"true", 14 | "fs.s3a.impl":"org.apache.hadoop.fs.s3a.S3AFileSystem" 15 | } 16 | } 17 | } 18 | -------------------------------------------------------------------------------- /plugin/writer/influxdb2writer/src/main/java/com/wgzhao/addax/plugin/writer/influxdb2writer/InfluxDB2Key.java: -------------------------------------------------------------------------------- 1 | package com.wgzhao.addax.plugin.writer.influxdb2writer; 2 | 3 | import com.wgzhao.addax.core.base.Key; 4 | 5 | public final class InfluxDB2Key 6 | extends Key 7 | { 8 | public static final String BUCKET = "bucket"; 9 | public static final String ORG = "org"; 10 | public static final String TOKEN = "token"; 11 | public static final String TAG = "tag"; 12 | public static final String INTERVAL = "interval"; 13 | } 14 | -------------------------------------------------------------------------------- /plugin/writer/influxdb2writer/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "influxdb2writer", 3 | "class": "com.wgzhao.addax.plugin.writer.influxdb2writer.InfluxDB2Writer", 4 | "description": "write data to InfluxDB table, support InfluxDB 1.8 , 2.0 or later", 5 | "developer": "wgzhao" 6 | } -------------------------------------------------------------------------------- /plugin/writer/influxdb2writer/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "influxdb2writer", 3 | "parameter": { 4 | "connection": [ 5 | { 6 | "endpoint": "http://localhost:8086", 7 | "org": "com.wgzhao", 8 | "bucket": "test", 9 | "table": "addax_tbl" 10 | } 11 | ], 12 | "token": "YOUR_SECURE_TOKEN", 13 | "tag": [ 14 | { 15 | "location": "east" 16 | }, 17 | { 18 | "lat": 23.123445 19 | } 20 | ], 21 | "interval": "ms", 22 | "column": [ 23 | "c_long", 24 | "c_string", 25 | "c_double" 26 | ], 27 | "batchSize": 1024 28 | } 29 | } -------------------------------------------------------------------------------- /plugin/writer/influxdbwriter/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "influxdbwriter", 3 | "class": "com.wgzhao.addax.plugin.writer.influxdbwriter.InfluxDBWriter", 4 | "description": "writer data into InfluxDB table", 5 | "developer": "wgzhao" 6 | } -------------------------------------------------------------------------------- /plugin/writer/influxdbwriter/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "influxdbwriter", 3 | "parameter": { 4 | "connection": [ 5 | { 6 | "endpoint": "http://localhost:8086", 7 | "database": "addax", 8 | "table": "addax_tbl" 9 | } 10 | ], 11 | "connTimeout": 15, 12 | "readTimeout": 20, 13 | "writeTimeout": 20, 14 | "username": "influx", 15 | "password": "influx123", 16 | "column": [ 17 | { 18 | "name": "time", 19 | "type": "timestamp" 20 | }, 21 | { 22 | "name": "user_id", 23 | "type": "int" 24 | }, 25 | { 26 | "name": "user_name", 27 | "type": "string" 28 | }, 29 | { 30 | "name": "salary", 31 | "type": "double" 32 | } 33 | ], 34 | "preSql": [ 35 | "delete from addax_tbl" 36 | ], 37 | "batchSize": 1024, 38 | "retentionPolicy": { 39 | "name": "one_day_only", 40 | "duration": "1d", 41 | "replication": 1 42 | } 43 | } 44 | } -------------------------------------------------------------------------------- /plugin/writer/kafkawriter/src/main/java/com/wgzhao/addax/plugin/writer/kafkawriter/KafkaKey.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed under the Apache License, Version 2.0 (the "License"); 3 | * you may not use this file except in compliance with the License. 4 | * You may obtain a copy of the License at 5 | * 6 | * http://www.apache.org/licenses/LICENSE-2.0 7 | * 8 | * Unless required by applicable law or agreed to in writing, software 9 | * distributed under the License is distributed on an "AS IS" BASIS, 10 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | * See the License for the specific language governing permissions and 12 | * limitations under the License. 13 | */ 14 | 15 | package com.wgzhao.addax.plugin.writer.kafkawriter; 16 | 17 | import com.wgzhao.addax.core.base.Key; 18 | 19 | public class KafkaKey 20 | extends Key 21 | { 22 | public final static String BROKER_LIST = "brokerList"; 23 | public final static String TOPIC = "topic"; 24 | public final static String PROPERTIES = "properties"; 25 | } 26 | -------------------------------------------------------------------------------- /plugin/writer/kafkawriter/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "kafkawriter", 3 | "class": "com.wgzhao.addax.plugin.writer.kafkawriter.KafkaWriter", 4 | "description": "Use it to import data from any relational database into Apache Kafka topics.", 5 | "developer": "wgzhao" 6 | } -------------------------------------------------------------------------------- /plugin/writer/kafkawriter/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "kafkawriter", 3 | "parameter": { 4 | "column": [ 5 | "id", 6 | "name" 7 | ], 8 | "batchSize": 1024, 9 | "brokerList": "localhost:9092,localhost:9093,localhost:9094", 10 | "topic": "test", 11 | "partitions": 0, 12 | "properties": {} 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /plugin/writer/kuduwriter/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "kuduwriter", 3 | "class": "com.wgzhao.addax.plugin.writer.kuduwriter.KuduWriter", 4 | "description": "use put: prod. mechanism: use kudu java api put data.", 5 | "developer": "wgzhao" 6 | } 7 | 8 | -------------------------------------------------------------------------------- /plugin/writer/kuduwriter/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "kuduwriter", 3 | "parameter": { 4 | "masterAddress": "127.0.0.1:7051,127.0.0.1:7151,127.0.0.1:7251", 5 | "timeout": 60, 6 | "table": "users", 7 | "writeMode": "upsert", 8 | "column": [ 9 | "user_id", 10 | "user_name", 11 | "salary" 12 | ], 13 | "batchSize": 1024, 14 | "bufferSize": 2048, 15 | "skipFail": false, 16 | "encoding": "UTF-8" 17 | } 18 | } -------------------------------------------------------------------------------- /plugin/writer/mongodbwriter/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "mongodbwriter", 3 | "class": "com.wgzhao.addax.plugin.writer.mongodbwriter.MongoDBWriter", 4 | "description": "useScene: prod. mechanism: via mongoclient connect mongodb write data concurrent.", 5 | "developer": "alibaba" 6 | } 7 | -------------------------------------------------------------------------------- /plugin/writer/mongodbwriter/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "mongodbwriter", 3 | "parameter": { 4 | "username": "", 5 | "password": "", 6 | "column": [ 7 | { 8 | "name": "unique_id", 9 | "type": "string" 10 | }, 11 | { 12 | "name": "frontcat_id", 13 | "type": "Array", 14 | "splitter": " " 15 | }, 16 | { 17 | "name": "property", 18 | "type": "string" 19 | }, 20 | { 21 | "name": "score", 22 | "type": "int" 23 | } 24 | ], 25 | "upsertInfo": { 26 | "isUpsert": "true", 27 | "upsertKey": "unique_id" 28 | }, 29 | "connection": [ 30 | { 31 | "address": [ 32 | "127.0.0.1:32768" 33 | ], 34 | "database": "tag_per_data", 35 | "collection": "tag_data", 36 | "authDb": "admin" 37 | } 38 | ] 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /plugin/writer/mysqlwriter/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "mysqlwriter", 3 | "class": "com.wgzhao.addax.plugin.writer.mysqlwriter.MysqlWriter", 4 | "description": "useScene: prod. mechanism: Jdbc connection using the database, execute insert sql. warn: The more you know about the database, the less problems you encounter.", 5 | "developer": "alibaba" 6 | } -------------------------------------------------------------------------------- /plugin/writer/mysqlwriter/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "mysqlwriter", 3 | "parameter": { 4 | "writeMode": "insert", 5 | "username": "root", 6 | "password": "", 7 | "column": [ 8 | "*" 9 | ], 10 | "session": [ 11 | "set session sql_mode='ANSI'" 12 | ], 13 | "preSql": [ 14 | "delete from @table" 15 | ], 16 | "connection": [ 17 | { 18 | "jdbcUrl": "jdbc:mysql://127.0.0.1:3306/test?useSSL=false&serverTimezone=GMT%2B8", 19 | "table": [ 20 | "addax_tbl" 21 | ], 22 | "driver": "com.mysql.jdbc.Driver" 23 | } 24 | ] 25 | } 26 | } -------------------------------------------------------------------------------- /plugin/writer/oraclewriter/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "oraclewriter", 3 | "class": "com.wgzhao.addax.plugin.writer.oraclewriter.OracleWriter", 4 | "description": "useScene: prod. mechanism: Jdbc connection using the database, execute insert sql. warn: The more you know about the database, the less problems you encounter.", 5 | "developer": "alibaba" 6 | } -------------------------------------------------------------------------------- /plugin/writer/oraclewriter/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "oraclewriter", 3 | "parameter": { 4 | "username": "root", 5 | "password": "root", 6 | "column": [ 7 | "id", 8 | "name" 9 | ], 10 | "preSql": [ 11 | "delete from @table" 12 | ], 13 | "connection": [ 14 | { 15 | "jdbcUrl": "jdbc:oracle:thin:@[HOST_NAME]:PORT:[DATABASE_NAME]", 16 | "table": [ 17 | "test" 18 | ] 19 | } 20 | ] 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /plugin/writer/paimonwriter/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "paimonwriter", 3 | "class": "com.wgzhao.addax.plugin.writer.paimonwriter.PaimonWriter", 4 | "description": "write data to paimon", 5 | "developer": "awol2005ex" 6 | } 7 | -------------------------------------------------------------------------------- /plugin/writer/paimonwriter/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "paimonwriter", 3 | "parameter": { 4 | "dbName": "test", 5 | "tableName": "test", 6 | "writeMode": "truncate", 7 | "paimonConfig": { 8 | "warehouse": "file:///tmp/paimon", 9 | "metastore": "filesystem" 10 | } 11 | } 12 | } 13 | -------------------------------------------------------------------------------- /plugin/writer/postgresqlwriter/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "postgresqlwriter", 3 | "class": "com.wgzhao.addax.plugin.writer.postgresqlwriter.PostgresqlWriter", 4 | "description": "useScene: prod. mechanism: Jdbc connection using the database, execute insert sql. warn: The more you know about the database, the less problems you encounter.", 5 | "developer": "alibaba" 6 | } -------------------------------------------------------------------------------- /plugin/writer/postgresqlwriter/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "postgresqlwriter", 3 | "parameter": { 4 | "username": "user", 5 | "password": "passw0rd", 6 | "writeMode": "insert", 7 | "column": [ 8 | "*" 9 | ], 10 | "preSql": [ 11 | "truncate table @table" 12 | ], 13 | "postSql": [], 14 | "connection": [ 15 | { 16 | "jdbcUrl": "jdbc:postgresql://127.0.0.1:5432/test", 17 | "table": [ 18 | "addax_tbl1" 19 | ] 20 | } 21 | ], 22 | "batchSize": 1024 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /plugin/writer/rdbmswriter/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "rdbmswriter", 3 | "class": "com.wgzhao.addax.plugin.writer.rdbmswriter.RdbmsWriter", 4 | "description": "general RDBMS writer plugin", 5 | "developer": "alibaba", 6 | "drivers": [] 7 | } 8 | -------------------------------------------------------------------------------- /plugin/writer/rdbmswriter/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "rdbmswriter", 3 | "parameter": { 4 | "username": "username", 5 | "password": "password", 6 | "writeMode": "insert", 7 | "column": [ 8 | "*" 9 | ], 10 | "session": [], 11 | "preSql": [], 12 | "connection": [ 13 | { 14 | "jdbcUrl": "jdbc:://:/", 15 | "table": [ 16 | "addax_table" 17 | ] 18 | } 19 | ] 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /plugin/writer/rediswriter/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "rediswriter", 3 | "class": "com.wgzhao.addax.plugin.writer.rediswriter.RedisWriter", 4 | "description": "Use jedis connect to redis, execute restore command", 5 | "developer": "alibaba" 6 | } -------------------------------------------------------------------------------- /plugin/writer/rediswriter/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "rediswriter", 3 | "parameter": { 4 | "connection": [ 5 | { 6 | "uri": "tcp://127.0.0.1:7001", 7 | "auth": "" 8 | } 9 | ], 10 | "batchSize": 1000, 11 | "timeout": 100000, 12 | "redisCluster": false, 13 | "flushDB": true 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /plugin/writer/s3writer/src/main/java/com/wgzhao/addax/plugin/writer/s3writer/S3Key.java: -------------------------------------------------------------------------------- 1 | package com.wgzhao.addax.plugin.writer.s3writer; 2 | 3 | import com.wgzhao.addax.core.base.Key; 4 | 5 | public class S3Key extends Key 6 | { 7 | public static final String REGION = "region"; 8 | 9 | public static final String ENDPOINT = "endpoint"; 10 | 11 | public static final String ACCESS_ID = "accessId"; 12 | 13 | public static final String ACCESS_KEY = "accessKey"; 14 | 15 | public static final String BUCKET = "bucket"; 16 | 17 | public static final String OBJECT = "object"; 18 | 19 | // unit: MB 20 | public static final String MAX_FILE_SIZE = "maxFileSize"; 21 | 22 | public static final String DEFAULT_SUFFIX = "defaultSuffix"; 23 | 24 | public static final String PATH_STYLE_ACCESS_ENABLED = "pathStyleAccessEnabled"; 25 | 26 | public static final String FILE_TYPE = "fileType"; 27 | 28 | public static final String SSL_ENABLED = "sslEnabled"; 29 | 30 | 31 | } 32 | -------------------------------------------------------------------------------- /plugin/writer/s3writer/src/main/java/com/wgzhao/addax/plugin/writer/s3writer/writer/IFormatWriter.java: -------------------------------------------------------------------------------- 1 | package com.wgzhao.addax.plugin.writer.s3writer.writer; 2 | 3 | import com.wgzhao.addax.core.plugin.RecordReceiver; 4 | import com.wgzhao.addax.core.plugin.TaskPluginCollector; 5 | import com.wgzhao.addax.core.util.Configuration; 6 | 7 | public interface IFormatWriter 8 | { 9 | void init(Configuration config); 10 | 11 | void write(RecordReceiver lineReceiver, Configuration config, 12 | TaskPluginCollector taskPluginCollector); 13 | } 14 | -------------------------------------------------------------------------------- /plugin/writer/s3writer/src/main/java/com/wgzhao/addax/plugin/writer/s3writer/writer/SupportHiveDataType.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one 3 | * or more contributor license agreements. See the NOTICE file 4 | * distributed with this work for additional information 5 | * regarding copyright ownership. The ASF licenses this file 6 | * to you under the Apache License, Version 2.0 (the 7 | * "License"); you may not use this file except in compliance 8 | * with the License. You may obtain a copy of the License at 9 | * 10 | * http://www.apache.org/licenses/LICENSE-2.0 11 | * 12 | * Unless required by applicable law or agreed to in writing, 13 | * software distributed under the License is distributed on an 14 | * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY 15 | * KIND, either express or implied. See the License for the 16 | * specific language governing permissions and limitations 17 | * under the License. 18 | */ 19 | 20 | package com.wgzhao.addax.plugin.writer.s3writer.writer; 21 | 22 | public enum SupportHiveDataType { 23 | TINYINT, 24 | SMALLINT, 25 | INT, 26 | INTEGER, 27 | BIGINT, 28 | FLOAT, 29 | DOUBLE, 30 | TIMESTAMP, 31 | DATE, 32 | DECIMAL, 33 | STRING, 34 | VARCHAR, 35 | CHAR, 36 | LONG, 37 | BOOLEAN, 38 | BINARY 39 | } 40 | -------------------------------------------------------------------------------- /plugin/writer/s3writer/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "s3writer", 3 | "class": "com.wgzhao.addax.plugin.writer.s3writer.S3Writer", 4 | "description": "write data to Amazon AWS S3 service", 5 | "developer": "wgzhao" 6 | } 7 | -------------------------------------------------------------------------------- /plugin/writer/s3writer/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "s3writer", 3 | "parameter": { 4 | "endpoint": "https://s3.amazonaws.com", 5 | "accessId": "xxxxxxxxxxxx", 6 | "accessKey": "xxxxxxxxxxxxxxxxxxxxxxx", 7 | "bucket": "test", 8 | "object": "upload.csv", 9 | "region": "ap-northeast-1", 10 | "encoding": "", 11 | "fieldDelimiter": ",", 12 | "writeMode": "truncate" 13 | } 14 | } 15 | -------------------------------------------------------------------------------- /plugin/writer/sqlitewriter/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "sqlitewriter", 3 | "class": "com.wgzhao.addax.plugin.writer.sqlitewriter.SqliteWriter", 4 | "description": "write a sqlite db file", 5 | "developer": "wgzhao" 6 | } 7 | -------------------------------------------------------------------------------- /plugin/writer/sqlitewriter/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "sqlitewriter", 3 | "parameter": { 4 | "writeMode": "insert", 5 | "column": [ 6 | "*" 7 | ], 8 | "preSql": [ 9 | "delete from @table" 10 | ], 11 | "connection": [ 12 | { 13 | "jdbcUrl": "jdbc:sqlite://tmp/writer.sqlite3", 14 | "table": [ 15 | "addax_tbl" 16 | ] 17 | } 18 | ] 19 | } 20 | } -------------------------------------------------------------------------------- /plugin/writer/sqlserverwriter/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "sqlserverwriter", 3 | "class": "com.wgzhao.addax.plugin.writer.sqlserverwriter.SqlServerWriter", 4 | "description": "useScene: prod. mechanism: Jdbc connection using the database, execute insert sql. warn: The more you know about the database, the less problems you encounter.", 5 | "developer": "alibaba" 6 | } -------------------------------------------------------------------------------- /plugin/writer/sqlserverwriter/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "sqlserverwriter", 3 | "parameter": { 4 | "username": "root", 5 | "password": "root", 6 | "column": [ 7 | "db_id", 8 | "db_type", 9 | "db_ip", 10 | "db_port", 11 | "db_role", 12 | "db_name", 13 | "db_username", 14 | "db_password", 15 | "db_modify_time", 16 | "db_modify_user", 17 | "db_description", 18 | "db_tddl_info" 19 | ], 20 | "preSql": [ 21 | "delete from @table where db_id = -1;" 22 | ], 23 | "postSql": [ 24 | "update @table set db_modify_time = now() where db_id = 1;" 25 | ], 26 | "connection": [ 27 | { 28 | "table": [ 29 | "db_info_for_writer" 30 | ], 31 | "jdbcUrl": "jdbc:sqlserver://[HOST_NAME]:PORT;DatabaseName=[DATABASE_NAME]" 32 | } 33 | ] 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /plugin/writer/starrockswriter/package.xml: -------------------------------------------------------------------------------- 1 | 5 | release 6 | 7 | dir 8 | 9 | false 10 | 11 | 12 | src/main/resources 13 | 14 | *.json 15 | 16 | plugin/writer/${project.artifactId} 17 | 18 | 19 | target/ 20 | 21 | ${project.artifactId}-${project.version}.jar 22 | 23 | plugin/writer/${project.artifactId} 24 | 25 | 26 | 27 | 28 | 29 | false 30 | plugin/writer/${project.artifactId}/libs 31 | runtime 32 | 33 | com.wgzhao.addax:* 34 | 35 | 36 | 37 | 38 | -------------------------------------------------------------------------------- /plugin/writer/starrockswriter/src/main/java/com/wgzhao/addax/plugin/writer/starrockswriter/manager/StarRocksFlushTuple.java: -------------------------------------------------------------------------------- 1 | package com.wgzhao.addax.plugin.writer.starrockswriter.manager; 2 | 3 | import java.util.List; 4 | 5 | public class StarRocksFlushTuple 6 | { 7 | 8 | private String label; 9 | private final Long bytes; 10 | private final List rows; 11 | 12 | public StarRocksFlushTuple(String label, Long bytes, List rows) 13 | { 14 | this.label = label; 15 | this.bytes = bytes; 16 | this.rows = rows; 17 | } 18 | 19 | public String getLabel() {return label;} 20 | 21 | public void setLabel(String label) {this.label = label;} 22 | 23 | public Long getBytes() {return bytes;} 24 | 25 | public List getRows() {return rows;} 26 | } 27 | -------------------------------------------------------------------------------- /plugin/writer/starrockswriter/src/main/java/com/wgzhao/addax/plugin/writer/starrockswriter/manager/StarRocksStreamLoadFailedException.java: -------------------------------------------------------------------------------- 1 | package com.wgzhao.addax.plugin.writer.starrockswriter.manager; 2 | 3 | import java.io.IOException; 4 | import java.util.Map; 5 | 6 | public class StarRocksStreamLoadFailedException 7 | extends IOException 8 | { 9 | private static final long serialVersionUID = 1L; 10 | 11 | private final Map response; 12 | private boolean reCreateLabel; 13 | 14 | public StarRocksStreamLoadFailedException(String message, Map response) 15 | { 16 | super(message); 17 | this.response = response; 18 | } 19 | 20 | public StarRocksStreamLoadFailedException(String message, Map response, boolean reCreateLabel) 21 | { 22 | super(message); 23 | this.response = response; 24 | this.reCreateLabel = reCreateLabel; 25 | } 26 | 27 | public Map getFailedResponse() 28 | { 29 | return response; 30 | } 31 | 32 | public boolean needReCreateLabel() 33 | { 34 | return reCreateLabel; 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /plugin/writer/starrockswriter/src/main/java/com/wgzhao/addax/plugin/writer/starrockswriter/row/StarRocksBaseSerializer.java: -------------------------------------------------------------------------------- 1 | package com.wgzhao.addax.plugin.writer.starrockswriter.row; 2 | 3 | import com.wgzhao.addax.core.element.Column; 4 | 5 | public class StarRocksBaseSerializer 6 | { 7 | 8 | protected String fieldConvertion(Column col) 9 | { 10 | if (null == col.getRawData() || Column.Type.NULL == col.getType()) { 11 | return null; 12 | } 13 | if (Column.Type.BOOL == col.getType()) { 14 | return String.valueOf(col.asLong()); 15 | } 16 | if (Column.Type.BYTES == col.getType()) { 17 | byte[] bts = (byte[]) col.getRawData(); 18 | long value = 0; 19 | for (int i = 0; i < bts.length; i++) { 20 | value += (bts[bts.length - i - 1] & 0xffL) << (8 * i); 21 | } 22 | return String.valueOf(value); 23 | } 24 | return col.asString(); 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /plugin/writer/starrockswriter/src/main/java/com/wgzhao/addax/plugin/writer/starrockswriter/row/StarRocksCsvSerializer.java: -------------------------------------------------------------------------------- 1 | package com.wgzhao.addax.plugin.writer.starrockswriter.row; 2 | 3 | import com.wgzhao.addax.core.element.Record; 4 | 5 | public class StarRocksCsvSerializer 6 | extends StarRocksBaseSerializer 7 | implements StarRocksISerializer 8 | { 9 | 10 | private static final long serialVersionUID = 1L; 11 | 12 | private final String columnSeparator; 13 | 14 | public StarRocksCsvSerializer(String sp) 15 | { 16 | this.columnSeparator = StarRocksDelimiterParser.parse(sp, "\t"); 17 | } 18 | 19 | @Override 20 | public String serialize(Record row) 21 | { 22 | StringBuilder sb = new StringBuilder(); 23 | for (int i = 0; i < row.getColumnNumber(); i++) { 24 | String value = fieldConvertion(row.getColumn(i)); 25 | sb.append(null == value ? "\\N" : value); 26 | if (i < row.getColumnNumber() - 1) { 27 | sb.append(columnSeparator); 28 | } 29 | } 30 | return sb.toString(); 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /plugin/writer/starrockswriter/src/main/java/com/wgzhao/addax/plugin/writer/starrockswriter/row/StarRocksISerializer.java: -------------------------------------------------------------------------------- 1 | package com.wgzhao.addax.plugin.writer.starrockswriter.row; 2 | 3 | import com.wgzhao.addax.core.element.Record; 4 | 5 | import java.io.Serializable; 6 | 7 | public interface StarRocksISerializer 8 | extends Serializable 9 | { 10 | 11 | String serialize(Record row); 12 | } 13 | -------------------------------------------------------------------------------- /plugin/writer/starrockswriter/src/main/java/com/wgzhao/addax/plugin/writer/starrockswriter/row/StarRocksJsonSerializer.java: -------------------------------------------------------------------------------- 1 | package com.wgzhao.addax.plugin.writer.starrockswriter.row; 2 | 3 | import com.alibaba.fastjson2.JSON; 4 | import com.wgzhao.addax.core.element.Record; 5 | 6 | import java.util.HashMap; 7 | import java.util.List; 8 | import java.util.Map; 9 | 10 | public class StarRocksJsonSerializer 11 | extends StarRocksBaseSerializer 12 | implements StarRocksISerializer 13 | { 14 | 15 | private static final long serialVersionUID = 1L; 16 | 17 | private final List fieldNames; 18 | 19 | public StarRocksJsonSerializer(List fieldNames) 20 | { 21 | this.fieldNames = fieldNames; 22 | } 23 | 24 | @Override 25 | public String serialize(Record row) 26 | { 27 | if (null == fieldNames) { 28 | return ""; 29 | } 30 | Map rowMap = new HashMap<>(fieldNames.size()); 31 | int idx = 0; 32 | for (String fieldName : fieldNames) { 33 | rowMap.put(fieldName, fieldConvertion(row.getColumn(idx))); 34 | idx++; 35 | } 36 | return JSON.toJSONString(rowMap); 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /plugin/writer/starrockswriter/src/main/java/com/wgzhao/addax/plugin/writer/starrockswriter/row/StarRocksSerializerFactory.java: -------------------------------------------------------------------------------- 1 | package com.wgzhao.addax.plugin.writer.starrockswriter.row; 2 | 3 | import com.wgzhao.addax.plugin.writer.starrockswriter.StarRocksWriterOptions; 4 | 5 | import java.util.Map; 6 | 7 | public class StarRocksSerializerFactory 8 | { 9 | 10 | private StarRocksSerializerFactory() {} 11 | 12 | public static StarRocksISerializer createSerializer(StarRocksWriterOptions writerOptions) 13 | { 14 | if (StarRocksWriterOptions.StreamLoadFormat.CSV.equals(writerOptions.getStreamLoadFormat())) { 15 | Map props = writerOptions.getLoadProps(); 16 | return new StarRocksCsvSerializer(null == props || !props.containsKey("column_separator") ? 17 | null : String.valueOf(props.get("column_separator"))); 18 | } 19 | if (StarRocksWriterOptions.StreamLoadFormat.JSON.equals(writerOptions.getStreamLoadFormat())) { 20 | return new StarRocksJsonSerializer(writerOptions.getColumns()); 21 | } 22 | throw new RuntimeException("Failed to create row serializer, unsupported `format` from stream load properties."); 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /plugin/writer/starrockswriter/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "starrockswriter", 3 | "class": "com.wgzhao.addax.plugin.writer.starrockswriter.StarRocksWriter", 4 | "description": "useScene: prod. mechanism: StarRocksStreamLoad. warn: The more you know about the database, the less problems you encounter.", 5 | "developer": "starrocks" 6 | } 7 | -------------------------------------------------------------------------------- /plugin/writer/starrockswriter/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "starrockswriter", 3 | "parameter": { 4 | "username": "test", 5 | "password": "123456", 6 | "database": "example_db", 7 | "table": "table1", 8 | "column": [ 9 | "siteid", 10 | "citycode", 11 | "username", 12 | "pv" 13 | ], 14 | "jdbcUrl": "jdbc:mysql://172.28.17.100:9030/", 15 | "loadUrl": [ 16 | "172.28.17.100:8030", 17 | "172.28.17.100:8030" 18 | ], 19 | "loadProps": { 20 | "column_separator": "\\x01", 21 | "row_delimiter": "\\x02" 22 | } 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /plugin/writer/streamwriter/pom.xml: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4.0.0 4 | 5 | 6 | com.wgzhao.addax 7 | addax-all 8 | 5.1.3-SNAPSHOT 9 | ../../../pom.xml 10 | 11 | 12 | streamwriter 13 | 14 | stream-writer 15 | stream(console) writer plugin for Addax 16 | jar 17 | 18 | 19 | 20 | com.wgzhao.addax 21 | addax-core 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | org.apache.maven.plugins 30 | maven-assembly-plugin 31 | 32 | 33 | 34 | 35 | -------------------------------------------------------------------------------- /plugin/writer/streamwriter/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "streamwriter", 3 | "class": "com.wgzhao.addax.plugin.writer.streamwriter.StreamWriter", 4 | "description": { 5 | "useScene": "only for developer test.", 6 | "mechanism": "use addax framework to transport data to stream.", 7 | "warn": "Never use it in your real job." 8 | }, 9 | "developer": "alibaba" 10 | } -------------------------------------------------------------------------------- /plugin/writer/streamwriter/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "streamwriter", 3 | "parameter": { 4 | "encoding": "utf-8", 5 | "print": true 6 | } 7 | } -------------------------------------------------------------------------------- /plugin/writer/sybasewriter/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "sybasewriter", 3 | "class": "com.wgzhao.addax.plugin.writer.sybasewriter.SybaseWriter", 4 | "description": "write to Sybase Database Server", 5 | "developer": "wgzhao" 6 | } -------------------------------------------------------------------------------- /plugin/writer/sybasewriter/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "sybasewriter", 3 | "parameter": { 4 | "username": "sa", 5 | "password": "password", 6 | "column": [ 7 | "*" 8 | ], 9 | "connection": [ 10 | { 11 | "jdbcUrl": [ 12 | "jdbc:sybase:Tds:127.0.0.1:5000/test" 13 | ], 14 | "table": [ 15 | "addax_writer" 16 | ] 17 | } 18 | ], 19 | "where": "" 20 | } 21 | } 22 | -------------------------------------------------------------------------------- /plugin/writer/tdenginewriter/src/main/java/com/wgzhao/addax/plugin/writer/tdenginewriter/DataHandler.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed under the Apache License, Version 2.0 (the "License"); 3 | * you may not use this file except in compliance with the License. 4 | * You may obtain a copy of the License at 5 | * 6 | * http://www.apache.org/licenses/LICENSE-2.0 7 | * 8 | * Unless required by applicable law or agreed to in writing, software 9 | * distributed under the License is distributed on an "AS IS" BASIS, 10 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | * See the License for the specific language governing permissions and 12 | * limitations under the License. 13 | */ 14 | 15 | package com.wgzhao.addax.plugin.writer.tdenginewriter; 16 | 17 | import com.wgzhao.addax.core.plugin.RecordReceiver; 18 | import com.wgzhao.addax.core.plugin.TaskPluginCollector; 19 | 20 | public interface DataHandler 21 | { 22 | int handle(RecordReceiver lineReceiver, TaskPluginCollector collector); 23 | } -------------------------------------------------------------------------------- /plugin/writer/tdenginewriter/src/main/java/com/wgzhao/addax/plugin/writer/tdenginewriter/TDKey.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed under the Apache License, Version 2.0 (the "License"); 3 | * you may not use this file except in compliance with the License. 4 | * You may obtain a copy of the License at 5 | * 6 | * http://www.apache.org/licenses/LICENSE-2.0 7 | * 8 | * Unless required by applicable law or agreed to in writing, software 9 | * distributed under the License is distributed on an "AS IS" BASIS, 10 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | * See the License for the specific language governing permissions and 12 | * limitations under the License. 13 | */ 14 | 15 | package com.wgzhao.addax.plugin.writer.tdenginewriter; 16 | 17 | import com.wgzhao.addax.core.base.Key; 18 | 19 | public class TDKey 20 | extends Key 21 | { 22 | public static final String IGNORE_TAGS_UNMATCHED = "ignoreTagsUnmatched"; 23 | } 24 | -------------------------------------------------------------------------------- /plugin/writer/tdenginewriter/src/main/java/com/wgzhao/addax/plugin/writer/tdenginewriter/TableMeta.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed under the Apache License, Version 2.0 (the "License"); 3 | * you may not use this file except in compliance with the License. 4 | * You may obtain a copy of the License at 5 | * 6 | * http://www.apache.org/licenses/LICENSE-2.0 7 | * 8 | * Unless required by applicable law or agreed to in writing, software 9 | * distributed under the License is distributed on an "AS IS" BASIS, 10 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | * See the License for the specific language governing permissions and 12 | * limitations under the License. 13 | */ 14 | 15 | package com.wgzhao.addax.plugin.writer.tdenginewriter; 16 | 17 | public class TableMeta 18 | { 19 | TableType tableType; 20 | String tbname; 21 | int columns; 22 | int tags; 23 | int tables; 24 | String stable_name; 25 | 26 | @Override 27 | public String toString() 28 | { 29 | return "TableMeta{" + 30 | "tableType=" + tableType + 31 | ", tbname='" + tbname + '\'' + 32 | ", columns=" + columns + 33 | ", tags=" + tags + 34 | ", tables=" + tables + 35 | ", stable_name='" + stable_name + '\'' + 36 | '}'; 37 | } 38 | } 39 | -------------------------------------------------------------------------------- /plugin/writer/tdenginewriter/src/main/java/com/wgzhao/addax/plugin/writer/tdenginewriter/TableType.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed under the Apache License, Version 2.0 (the "License"); 3 | * you may not use this file except in compliance with the License. 4 | * You may obtain a copy of the License at 5 | * 6 | * http://www.apache.org/licenses/LICENSE-2.0 7 | * 8 | * Unless required by applicable law or agreed to in writing, software 9 | * distributed under the License is distributed on an "AS IS" BASIS, 10 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | * See the License for the specific language governing permissions and 12 | * limitations under the License. 13 | */ 14 | 15 | package com.wgzhao.addax.plugin.writer.tdenginewriter; 16 | 17 | public enum TableType 18 | { 19 | SUP_TABLE, SUB_TABLE, NML_TABLE 20 | } 21 | -------------------------------------------------------------------------------- /plugin/writer/tdenginewriter/src/main/java/com/wgzhao/addax/plugin/writer/tdenginewriter/TimestampPrecision.java: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed under the Apache License, Version 2.0 (the "License"); 3 | * you may not use this file except in compliance with the License. 4 | * You may obtain a copy of the License at 5 | * 6 | * http://www.apache.org/licenses/LICENSE-2.0 7 | * 8 | * Unless required by applicable law or agreed to in writing, software 9 | * distributed under the License is distributed on an "AS IS" BASIS, 10 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 11 | * See the License for the specific language governing permissions and 12 | * limitations under the License. 13 | */ 14 | 15 | package com.wgzhao.addax.plugin.writer.tdenginewriter; 16 | 17 | public enum TimestampPrecision 18 | { 19 | MILLISEC, MICROSEC, NANOSEC 20 | } 21 | -------------------------------------------------------------------------------- /plugin/writer/tdenginewriter/src/main/libs/libtaos.so.2.0.16.0: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/wgzhao/Addax/13bc07a72b56ba54f9d9bbacf898b9e144245b48/plugin/writer/tdenginewriter/src/main/libs/libtaos.so.2.0.16.0 -------------------------------------------------------------------------------- /plugin/writer/tdenginewriter/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "tdenginewriter", 3 | "class": "com.wgzhao.addax.plugin.writer.tdenginewriter.TDengineWriter", 4 | "description": "Writer data into TDengine database", 5 | "developer": "wgzhao" 6 | } -------------------------------------------------------------------------------- /plugin/writer/tdenginewriter/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "tdenginewriter", 3 | "parameter": { 4 | "username": "root", 5 | "password": "taosdata", 6 | "column": [ 7 | "ts", 8 | "name", 9 | "file_size", 10 | "file_date", 11 | "flag_open", 12 | "memo" 13 | ], 14 | "connection": [ 15 | { 16 | "jdbcUrl": "jdbc:TAOS://127.0.0.1:6030/test", 17 | "table": [ 18 | "addax_test" 19 | ] 20 | } 21 | ] 22 | } 23 | } 24 | -------------------------------------------------------------------------------- /plugin/writer/txtfilewriter/src/main/resources/plugin.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "txtfilewriter", 3 | "class": "com.wgzhao.addax.plugin.writer.txtfilewriter.TxtFileWriter", 4 | "description": "useScene: test. mechanism: use addax framework to transport data to txt file. warn: The more you know about the data, the less problems you encounter.", 5 | "developer": "alibaba" 6 | } -------------------------------------------------------------------------------- /plugin/writer/txtfilewriter/src/main/resources/plugin_job_template.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "txtfilewriter", 3 | "parameter": { 4 | "path": "/tmp/result", 5 | "fileName": "test", 6 | "writeMode": "truncate", 7 | "dateFormat": "yyyy-MM-dd" 8 | } 9 | } --------------------------------------------------------------------------------