├── .gitattributes
├── .github
├── CODEOWNERS
├── dependabot.yml
└── workflows
│ ├── codeql-analysis.yml
│ ├── create_release.yml
│ ├── main_push_and_pull_request_workflow.yml
│ └── release_pr_workflow.yml
├── .gitignore
├── CODE_OF_CONDUCT.md
├── CONTRIBUTING.md
├── LICENSE
├── README.md
├── SECURITY.md
├── build.gradle
├── checkstyle
├── checkstyle.xml
├── import-control.xml
├── java.header
└── suppressions.xml
├── gradle.properties
├── gradle
└── wrapper
│ ├── gradle-wrapper.jar
│ └── gradle-wrapper.properties
├── gradlew
├── gradlew.bat
├── licenses
└── LICENSE-aws.txt
├── notices
└── NOTICE-aws.txt
├── settings.gradle
└── src
├── integration-test
├── java
│ └── io
│ │ └── aiven
│ │ └── kafka
│ │ └── connect
│ │ ├── AvroIntegrationTest.java
│ │ ├── AvroParquetIntegrationTest.java
│ │ ├── ConnectRunner.java
│ │ ├── IntegrationBase.java
│ │ ├── IntegrationTest.java
│ │ ├── ParquetIntegrationTest.java
│ │ ├── ParquetUtils.java
│ │ └── s3
│ │ └── SchemaRegistryContainer.java
└── resources
│ └── logback-test.xml
├── main
├── java
│ └── io
│ │ └── aiven
│ │ └── kafka
│ │ └── connect
│ │ └── s3
│ │ ├── AivenKafkaConnectS3SinkConnector.java
│ │ ├── OldFullKeyFormatters.java
│ │ ├── S3OutputStream.java
│ │ ├── S3SinkTask.java
│ │ ├── Version.java
│ │ └── config
│ │ ├── AwsAccessSecret.java
│ │ ├── AwsCredentialProviderFactory.java
│ │ ├── AwsStsEndpointConfig.java
│ │ ├── AwsStsRole.java
│ │ ├── S3SinkConfig.java
│ │ └── S3SinkConfigDef.java
└── resources
│ └── s3-connector-for-apache-kafka-version.properties
└── test
├── java
└── io
│ └── aiven
│ └── kafka
│ └── connect
│ └── s3
│ ├── AwsCredentialProviderFactoryTest.java
│ ├── S3OutputStreamTest.java
│ ├── S3SinkTaskTest.java
│ ├── config
│ ├── S3SinkConfigTest.java
│ └── S3SinkCredentialsConfigTest.java
│ └── testutils
│ ├── BucketAccessor.java
│ ├── IndexesToString.java
│ ├── KeyValueGenerator.java
│ └── KeyValueMessage.java
└── resources
├── blns.txt
└── logback-test.xml
/.gitattributes:
--------------------------------------------------------------------------------
1 | #
2 | # https://help.github.com/articles/dealing-with-line-endings/
3 | #
4 | # These are explicitly windows files and should use crlf
5 | *.bat text eol=crlf
6 |
7 |
--------------------------------------------------------------------------------
/.github/CODEOWNERS:
--------------------------------------------------------------------------------
1 | * @aiven-open/team-helpful-husky @aiven-open/aiven-open-source
2 |
--------------------------------------------------------------------------------
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 | updates:
3 | - package-ecosystem: "gradle"
4 | directory: "/"
5 | schedule:
6 | interval: "monthly"
7 |
--------------------------------------------------------------------------------
/.github/workflows/codeql-analysis.yml:
--------------------------------------------------------------------------------
1 | # For most projects, this workflow file will not need changing; you simply need
2 | # to commit it to your repository.
3 | #
4 | # You may wish to alter this file to override the set of languages analyzed,
5 | # or to provide custom queries or build logic.
6 | #
7 | # ******** NOTE ********
8 | # We have attempted to detect the languages in your repository. Please check
9 | # the `language` matrix defined below to confirm you have the correct set of
10 | # supported CodeQL languages.
11 | #
12 | name: "CodeQL"
13 |
14 | on:
15 | push:
16 | branches: [main]
17 | pull_request:
18 | # The branches below must be a subset of the branches above
19 | branches: [main]
20 | schedule:
21 | - cron: "42 20 * * 6"
22 |
23 | permissions:
24 | actions: read
25 | contents: read
26 | security-events: write
27 |
28 | jobs:
29 | analyze:
30 | name: Analyze
31 | runs-on: ubuntu-latest
32 |
33 | strategy:
34 | fail-fast: false
35 | matrix:
36 | language: ["java"]
37 | # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ]
38 | # Learn more:
39 | # https://docs.github.com/en/free-pro-team@latest/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#changing-the-languages-that-are-analyzed
40 |
41 | steps:
42 | - name: Checkout repository
43 | uses: actions/checkout@v2
44 |
45 | # Initializes the CodeQL tools for scanning.
46 | - name: Initialize CodeQL
47 | uses: github/codeql-action/init@v2
48 | with:
49 | languages: ${{ matrix.language }}
50 | # If you wish to specify custom queries, you can do so here or in a config file.
51 | # By default, queries listed here will override any specified in a config file.
52 | # Prefix the list here with "+" to use these queries and those in the config file.
53 | # queries: ./path/to/local/query, your-org/your-repo/queries@main
54 |
55 | # Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
56 | # If this step fails, then you should remove it and run the build manually (see below)
57 | #- name: Autobuild
58 | # uses: github/codeql-action/autobuild@v1
59 |
60 | # ℹ️ Command-line programs to run using the OS shell.
61 | # 📚 https://git.io/JvXDl
62 |
63 | # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
64 | # and modify them (or add more) to build your code if your project
65 | # uses a compiled language
66 |
67 | #- run: |
68 | # make bootstrap
69 | # make release
70 | - name: Set up JDK 11
71 | uses: actions/setup-java@v1
72 | with:
73 | java-version: 11
74 |
75 | - name: Build with Gradle
76 | run: ./gradlew build
77 |
78 | - name: Perform CodeQL Analysis
79 | uses: github/codeql-action/analyze@v2
80 |
--------------------------------------------------------------------------------
/.github/workflows/create_release.yml:
--------------------------------------------------------------------------------
1 | name: Create release
2 |
3 | on:
4 | workflow_dispatch:
5 | inputs:
6 | commit_hash:
7 | description: "Hash of 'Release version x.y.z' commit"
8 | required: true
9 |
10 | permissions:
11 | contents: write
12 | pull-requests: write
13 | issues: write
14 |
15 | jobs:
16 | build:
17 | name: Create Release
18 | runs-on: ubuntu-latest
19 | steps:
20 | - name: Setup Java SDK
21 | uses: actions/setup-java@v1.4.3
22 | with:
23 | java-version: 11
24 |
25 | - name: Checkout code
26 | uses: actions/checkout@v2
27 | with:
28 | ref: ${{ github.event.inputs.commit_hash }}
29 |
30 | - name: Check commit title and extract version
31 | run: |
32 | export commit_title=$(git log --pretty=format:%s -1 ${{ github.event.inputs.commit_hash }})
33 | echo "Commit title: $commit_title"
34 | if [[ $commit_title =~ ^Release\ version\ [0-9]*\.[0-9]*\.[0-9]*$ ]]; then
35 | echo "Valid commit title"
36 | else
37 | echo "Invalid commit title"
38 | exit 1
39 | fi
40 | export version=$(echo ${commit_title} | sed s/^Release\ version\ //g)
41 | echo "Will use version ${version}"
42 | echo "version=${version}" >> $GITHUB_ENV
43 |
44 | - name: Build
45 | run: |
46 | ./gradlew distTar distZip
47 |
48 | export tar_file=$(ls ./build/distributions/ | grep tar)
49 | export zip_file=$(ls ./build/distributions/ | grep zip)
50 | echo tar_file=${tar_file} >> $GITHUB_ENV
51 | echo zip_file=${zip_file} >> $GITHUB_ENV
52 |
53 | echo tar_path=`realpath ./build/distributions/${tar_file}` >> $GITHUB_ENV
54 | echo zip_path=`realpath ./build/distributions/${zip_file}` >> $GITHUB_ENV
55 |
56 | - name: Create tag
57 | run: |
58 | git config --local user.name "GitHub Action"
59 | git config --local user.email "action@github.com"
60 | git tag -a "v${{ env.version }}" -m "Release version ${{ env.version }}"
61 | git push origin "v${{ env.version }}"
62 |
63 | - name: Create release draft
64 | id: create_release
65 | uses: actions/create-release@v1
66 | env:
67 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
68 | with:
69 | tag_name: "v${{ env.version }}"
70 | release_name: "v${{ env.version }}"
71 | commitish: ${{ github.event.inputs.commit_hash }}
72 | body: |
73 | *Fill in*
74 | draft: true
75 | prerelease: false
76 |
77 | - name: Upload tar
78 | uses: actions/upload-release-asset@v1
79 | env:
80 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
81 | with:
82 | upload_url: ${{ steps.create_release.outputs.upload_url }}
83 | asset_path: ${{ env.tar_path }}
84 | asset_name: ${{ env.tar_file }}
85 | asset_content_type: application/tar
86 |
87 | - name: Upload zip
88 | uses: actions/upload-release-asset@v1
89 | env:
90 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
91 | with:
92 | upload_url: ${{ steps.create_release.outputs.upload_url }}
93 | asset_path: ${{ env.zip_path }}
94 | asset_name: ${{ env.zip_file }}
95 | asset_content_type: application/zip
96 |
--------------------------------------------------------------------------------
/.github/workflows/main_push_and_pull_request_workflow.yml:
--------------------------------------------------------------------------------
1 | # The workflow to check main after push or pull request.
2 | name: main and pull request checks
3 | on:
4 | push:
5 | branches: [main]
6 | pull_request:
7 | branches: [main]
8 | jobs:
9 | build:
10 | strategy:
11 | matrix:
12 | java-version: [11, 17]
13 | runs-on: [ubuntu-latest]
14 | name: Build on ${{ matrix.runs-on }} with jdk ${{ matrix.java-version }}
15 | runs-on: ${{ matrix.runs-on }}
16 | steps:
17 | - name: Checkout code
18 | uses: actions/checkout@v3.5.2
19 | - name: Set up JDK ${{ matrix.java-version }}
20 | uses: actions/setup-java@v3.11.0
21 | with:
22 | java-version: ${{ matrix.java-version }}
23 | distribution: "temurin"
24 | cache: gradle
25 | - name: Build with Gradle
26 | run: ./gradlew build integrationTest
27 |
--------------------------------------------------------------------------------
/.github/workflows/release_pr_workflow.yml:
--------------------------------------------------------------------------------
1 | # The workflow to create PRs with release commits.
2 | name: Create release PR
3 | on:
4 | workflow_dispatch:
5 | inputs:
6 | release_version:
7 | description: "Release version '0.1.2' (without 'v')"
8 | required: true
9 | snapshot_version:
10 | description: "Snapshot version '0.2.0-SNAPSHOT' (without 'v')"
11 | required: true
12 |
13 | permissions:
14 | contents: write
15 | pull-requests: write
16 | issues: write
17 |
18 | jobs:
19 | create_release_pr:
20 | name: Create release PR (job)
21 | runs-on: ubuntu-latest
22 | steps:
23 | - name: Check versions
24 | run: |
25 | echo "Checking release version..."
26 | if echo ${{ github.event.inputs.release_version }} | grep --invert-match '^[0-9]\+\.[0-9]\+\.[0-9]\+$' > /dev/null; then
27 | echo "Release version is invalid"
28 | exit 1
29 | fi
30 |
31 | echo "Checking snapshot version..."
32 | if echo ${{ github.event.inputs.snapshot_version }} | grep --invert-match '^[0-9]\+\.[0-9]\+\.[0-9]\+-SNAPSHOT$' > /dev/null; then
33 | echo "Snapshot version is invalid"
34 | exit 1
35 | fi
36 |
37 | - name: Checkout main
38 | uses: actions/checkout@v2
39 | with:
40 | ref: main
41 | fetch-depth: 0
42 |
43 | - name: Create release commits
44 | run: |
45 | git config --local user.name "GitHub Action"
46 | git config --local user.email "action@github.com"
47 | sed -i -e "s/^version=.\+$/version=${{ github.event.inputs.release_version }}/g" gradle.properties
48 | git add gradle.properties
49 | git commit -m "Release version ${{ github.event.inputs.release_version }}"
50 | sed -i -e "s/^version=.\+$/version=${{ github.event.inputs.snapshot_version }}/g" gradle.properties
51 | git add gradle.properties
52 | git commit -m "Bump version to ${{ github.event.inputs.snapshot_version }}"
53 |
54 | - name: Create Pull Request
55 | uses: peter-evans/create-pull-request@v3
56 | with:
57 | branch: release-${{ github.event.inputs.release_version }}
58 | delete-branch: true
59 | draft: true
60 | title: Release version ${{ github.event.inputs.release_version }}
61 | body: |
62 | Proposed changelog:
63 | - *fill in*
64 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | target
2 | docs/_build
3 | .idea
4 | *.iml
5 | *.DS_Store
6 | .gradle
7 | .gradletasknamecache
8 | build/
9 | rpm/
10 | rpmbuild/
11 | *.sh
12 |
--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | # Contributor Covenant Code of Conduct
2 |
3 | ## Our Pledge
4 |
5 | We as members, contributors, and leaders pledge to make participation in our
6 | community a harassment-free experience for everyone, regardless of age, body
7 | size, visible or invisible disability, ethnicity, sex characteristics, gender
8 | identity and expression, level of experience, education, socio-economic status,
9 | nationality, personal appearance, race, religion, or sexual identity
10 | and orientation.
11 |
12 | We pledge to act and interact in ways that contribute to an open, welcoming,
13 | diverse, inclusive, and healthy community.
14 |
15 | ## Our Standards
16 |
17 | Examples of behavior that contributes to a positive environment for our
18 | community include:
19 |
20 | * Demonstrating empathy and kindness toward other people
21 | * Being respectful of differing opinions, viewpoints, and experiences
22 | * Giving and gracefully accepting constructive feedback
23 | * Accepting responsibility and apologizing to those affected by our mistakes,
24 | and learning from the experience
25 | * Focusing on what is best not just for us as individuals, but for the
26 | overall community
27 |
28 | Examples of unacceptable behavior include:
29 |
30 | * The use of sexualized language or imagery, and sexual attention or
31 | advances of any kind
32 | * Trolling, insulting or derogatory comments, and personal or political attacks
33 | * Public or private harassment
34 | * Publishing others' private information, such as a physical or email
35 | address, without their explicit permission
36 | * Other conduct which could reasonably be considered inappropriate in a
37 | professional setting
38 |
39 | ## Enforcement Responsibilities
40 |
41 | Community leaders are responsible for clarifying and enforcing our standards of
42 | acceptable behavior and will take appropriate and fair corrective action in
43 | response to any behavior that they deem inappropriate, threatening, offensive,
44 | or harmful.
45 |
46 | Community leaders have the right and responsibility to remove, edit, or reject
47 | comments, commits, code, wiki edits, issues, and other contributions that are
48 | not aligned to this Code of Conduct, and will communicate reasons for moderation
49 | decisions when appropriate.
50 |
51 | ## Scope
52 |
53 | This Code of Conduct applies within all community spaces, and also applies when
54 | an individual is officially representing the community in public spaces.
55 | Examples of representing our community include using an official e-mail address,
56 | posting via an official social media account, or acting as an appointed
57 | representative at an online or offline event.
58 |
59 | ## Enforcement
60 |
61 | Instances of abusive, harassing, or otherwise unacceptable behavior may be
62 | reported to the community leaders responsible for enforcement at
63 | opensource@aiven.io.
64 | All complaints will be reviewed and investigated promptly and fairly.
65 |
66 | All community leaders are obligated to respect the privacy and security of the
67 | reporter of any incident.
68 |
69 | ## Enforcement Guidelines
70 |
71 | Community leaders will follow these Community Impact Guidelines in determining
72 | the consequences for any action they deem in violation of this Code of Conduct:
73 |
74 | ### 1. Correction
75 |
76 | **Community Impact**: Use of inappropriate language or other behavior deemed
77 | unprofessional or unwelcome in the community.
78 |
79 | **Consequence**: A private, written warning from community leaders, providing
80 | clarity around the nature of the violation and an explanation of why the
81 | behavior was inappropriate. A public apology may be requested.
82 |
83 | ### 2. Warning
84 |
85 | **Community Impact**: A violation through a single incident or series
86 | of actions.
87 |
88 | **Consequence**: A warning with consequences for continued behavior. No
89 | interaction with the people involved, including unsolicited interaction with
90 | those enforcing the Code of Conduct, for a specified period of time. This
91 | includes avoiding interactions in community spaces as well as external channels
92 | like social media. Violating these terms may lead to a temporary or
93 | permanent ban.
94 |
95 | ### 3. Temporary Ban
96 |
97 | **Community Impact**: A serious violation of community standards, including
98 | sustained inappropriate behavior.
99 |
100 | **Consequence**: A temporary ban from any sort of interaction or public
101 | communication with the community for a specified period of time. No public or
102 | private interaction with the people involved, including unsolicited interaction
103 | with those enforcing the Code of Conduct, is allowed during this period.
104 | Violating these terms may lead to a permanent ban.
105 |
106 | ### 4. Permanent Ban
107 |
108 | **Community Impact**: Demonstrating a pattern of violation of community
109 | standards, including sustained inappropriate behavior, harassment of an
110 | individual, or aggression toward or disparagement of classes of individuals.
111 |
112 | **Consequence**: A permanent ban from any sort of public interaction within
113 | the community.
114 |
115 | ## Attribution
116 |
117 | This Code of Conduct is adapted from the [Contributor Covenant][homepage],
118 | version 2.0, available at
119 | https://www.contributor-covenant.org/version/2/0/code_of_conduct.html.
120 |
121 | Community Impact Guidelines were inspired by [Mozilla's code of conduct
122 | enforcement ladder](https://github.com/mozilla/diversity).
123 |
124 | [homepage]: https://www.contributor-covenant.org
125 |
126 | For answers to common questions about this code of conduct, see the FAQ at
127 | https://www.contributor-covenant.org/faq. Translations are available at
128 | https://www.contributor-covenant.org/translations.
129 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing Guidelines
2 |
3 | Thank you for your interest in contributing to our project. Whether it's a bug report, new feature, correction, or additional
4 | documentation, we greatly value feedback and contributions from our community.
5 |
6 | Please read through this document before submitting any issues or pull requests to ensure we have all the necessary
7 | information to effectively respond to your bug report or contribution.
8 |
9 |
10 | ## Reporting Bugs/Feature Requests
11 |
12 | We welcome you to use the GitHub issue tracker to report bugs or suggest features.
13 |
14 | When filing an issue, please check existing open, or recently closed, issues to make sure somebody else hasn't already
15 | reported the issue. Please try to include as much information as you can. Details like these are incredibly useful:
16 |
17 | * A reproducible test case or series of steps
18 | * The version of our code being used
19 | * Any modifications you've made relevant to the bug
20 | * Anything unusual about your environment or deployment
21 |
22 |
23 | ## Contributing via Pull Requests
24 | Contributions via pull requests are much appreciated. Before sending us a pull request, please ensure that:
25 |
26 | 1. You are working against the latest source on the *main* branch.
27 | 2. You check existing open, and recently merged, pull requests to make sure someone else hasn't addressed the problem already.
28 | 3. You open an issue to discuss any significant work - we would hate for your time to be wasted.
29 |
30 | To send us a pull request, please:
31 |
32 | 1. Fork the repository.
33 | 2. Modify the source; please focus on the specific change you are contributing. If you also reformat all the code, it will be hard for us to focus on your change.
34 | 3. Ensure local tests pass.
35 | 4. Commit to your fork using clear commit messages.
36 | 5. Send us a pull request, answering any default questions in the pull request interface.
37 | 6. Pay attention to any automated CI failures reported in the pull request, and stay involved in the conversation.
38 | 7. Before merging, clean up the commit history for the PR. Each commit should be self-contained with an informative message, since each commit will be added to the history for this project.
39 |
40 | GitHub provides additional document on [forking a repository](https://help.github.com/articles/fork-a-repo/) and
41 | [creating a pull request](https://help.github.com/articles/creating-a-pull-request/).
42 |
43 | ## Developer Certificate of Origin
44 |
45 | S3 connector for Apache Kafka is an open source product released under the Apache 2.0 license (see either [the Apache site](https://www.apache.org/licenses/LICENSE-2.0) or the [LICENSE.txt file](LICENSE.txt)). The Apache 2.0 license allows you to freely use, modify, distribute, and sell your own products that include Apache 2.0 licensed software.
46 |
47 | We respect intellectual property rights of others and we want to make sure all incoming contributions are correctly attributed and licensed. A Developer Certificate of Origin (DCO) is a lightweight mechanism to do that.
48 |
49 | So we require by making a contribution every contributor certifies that:
50 | ```
51 | The contribution was created in whole or in part by me and I have the right to submit it under the open source license
52 | indicated in the file
53 | ```
54 |
55 | ## Finding contributions to work on
56 | Looking at the existing issues is a great way to find something to contribute on. As our projects, by default, use the default GitHub issue labels (enhancement/bug/duplicate/help wanted/invalid/question/wontfix), looking at any 'help wanted' issues is a great place to start.
57 |
58 |
59 | ## Code of Conduct
60 | This project has adopted the [Contributor Covenant Code of Conduct](CODE_OF_CONDUCT.md).
61 | For more information see the [Code of Conduct FAQ](https://www.contributor-covenant.org/faq/).
62 |
63 |
64 | ## Security issue notifications
65 | If you discover a potential security issue in this project we ask that you report it according to [Security Policy](SECURITY.md). Please do **not** create a public github issue.
66 |
67 | ## Licensing
68 |
69 | See the [LICENSE](LICENSE.txt) file for our project's licensing. We will ask you to confirm the licensing of your contribution.
70 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright [yyyy] [name of copyright owner]
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
--------------------------------------------------------------------------------
/SECURITY.md:
--------------------------------------------------------------------------------
1 | # Security Policy
2 |
3 | ## Supported Versions
4 |
5 | We release patches for security vulnerabilities. Which versions are eligible
6 | receiving such patches depend on the CVSS v3.0 Rating:
7 |
8 | | CVSS v3.0 | Supported Versions |
9 | | --------- | ----------------------------------------- |
10 | | 4.0-10.0 | Most recent release |
11 |
12 | ## Reporting a Vulnerability
13 |
14 | Please report (suspected) security vulnerabilities to our **[bug bounty
15 | program](https://bugcrowd.com/aiven-mbb-og)**. You will receive a response from
16 | us within 2 working days. If the issue is confirmed, we will release a patch as
17 | soon as possible depending on impact and complexity.
18 |
19 | ## Qualifying Vulnerabilities
20 |
21 | Any reproducible vulnerability that has a severe effect on the security or
22 | privacy of our users is likely to be in scope for the program.
23 |
24 | We generally **aren't** interested in the following issues:
25 | * Social engineering (e.g. phishing, vishing, smishing) attacks
26 | * Brute force, DoS, text injection
27 | * Missing best practices such as HTTP security headers (CSP, X-XSS, etc.),
28 | email (SPF/DKIM/DMARC records), SSL/TLS configuration.
29 | * Software version disclosure / Banner identification issues / Descriptive
30 | error messages or headers (e.g. stack traces, application or server errors).
31 | * Clickjacking on pages with no sensitive actions
32 | * Theoretical vulnerabilities where you can't demonstrate a significant
33 | security impact with a proof of concept.
34 |
--------------------------------------------------------------------------------
/build.gradle:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 Aiven Oy
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | plugins {
18 | // https://docs.gradle.org/current/userguide/java_library_plugin.html
19 | id "java-library"
20 |
21 | // https://docs.gradle.org/current/userguide/checkstyle_plugin.html
22 | id "checkstyle"
23 |
24 | // https://docs.gradle.org/current/userguide/idea_plugin.html
25 | id "idea"
26 |
27 | // https://docs.gradle.org/current/userguide/jacoco_plugin.html
28 | id "jacoco"
29 |
30 | // https://docs.gradle.org/current/userguide/distribution_plugin.html
31 | id "distribution"
32 |
33 | // https://docs.gradle.org/current/userguide/publishing_maven.html
34 | id "maven-publish"
35 |
36 | // https://docs.gradle.org/current/userguide/signing_plugin.html
37 | id "signing"
38 | }
39 |
40 | group = "io.aiven"
41 |
42 | java {
43 | sourceCompatibility = JavaVersion.VERSION_11
44 | targetCompatibility = JavaVersion.VERSION_11
45 |
46 | withJavadocJar()
47 | withSourcesJar()
48 | }
49 |
50 | wrapper {
51 | distributionType = 'ALL'
52 | doLast {
53 | def sha256Sum = new String(new URL("${distributionUrl}.sha256").bytes)
54 | propertiesFile << "distributionSha256Sum=${sha256Sum}\n"
55 | println "Added checksum to wrapper properties"
56 | }
57 | }
58 |
59 | compileJava {
60 | options.compilerArgs = ["-Xlint:all", "-Werror"]
61 | }
62 |
63 | jacoco {
64 | toolVersion = "0.8.7"
65 | }
66 |
67 | repositories {
68 | mavenCentral()
69 | // For kafka-avro-serializer and kafka-connect-avro-converter
70 | maven {
71 | url "https://packages.confluent.io/maven"
72 | }
73 | }
74 |
75 | ext {
76 | kafkaVersion = "1.1.0"
77 | // Compatible with Kafka version:
78 | // https://docs.confluent.io/current/installation/versions-interoperability.html
79 | confluentPlatformVersion = "4.1.4"
80 | // Align with version used by commons
81 | avroConverterVersion = "7.2.2"
82 | aivenConnectCommonsVersion = "0.12.0"
83 |
84 | amazonS3Version = "1.12.729"
85 | amazonSTSVersion = "1.12.729"
86 | slf4jVersion = "1.7.36"
87 | junitVersion = "5.10.2"
88 | testcontainersVersion = "1.19.8"
89 | localstackVersion = "0.2.23"
90 | wireMockVersion = "2.35.0"
91 | mockitoVersion = "5.12.0"
92 | }
93 |
94 | sourceSets {
95 | integrationTest {
96 | java {
97 | srcDirs = ['src/integration-test/java']
98 | }
99 | resources {
100 | srcDirs = ['src/integration-test/resources']
101 | }
102 |
103 | compileClasspath += sourceSets.main.output + configurations.testRuntimeClasspath
104 | runtimeClasspath += output + compileClasspath
105 | }
106 | }
107 |
108 | idea {
109 | module {
110 | testSourceDirs += project.sourceSets.integrationTest.java.srcDirs
111 | testSourceDirs += project.sourceSets.integrationTest.resources.srcDirs
112 | }
113 | }
114 |
115 | configurations {
116 | integrationTestImplementation.extendsFrom testImplementation
117 | integrationTestRuntime.extendsFrom testRuntimeClasspath
118 | }
119 |
120 | dependencies {
121 | compileOnly "org.apache.kafka:connect-api:$kafkaVersion"
122 | compileOnly "org.apache.kafka:connect-runtime:$kafkaVersion"
123 |
124 | implementation "org.slf4j:slf4j-api:$slf4jVersion"
125 | implementation "com.amazonaws:aws-java-sdk-s3:$amazonS3Version"
126 | implementation "com.amazonaws:aws-java-sdk-sts:$amazonSTSVersion"
127 | implementation "io.aiven:commons-for-apache-kafka-connect:$aivenConnectCommonsVersion"
128 |
129 | testImplementation "org.xerial.snappy:snappy-java:1.1.10.5"
130 | testImplementation "com.github.luben:zstd-jni:1.5.6-3"
131 |
132 | testImplementation "org.apache.kafka:connect-api:$kafkaVersion"
133 | testImplementation "org.apache.kafka:connect-runtime:$kafkaVersion"
134 | testImplementation "org.apache.kafka:connect-json:$kafkaVersion"
135 | testImplementation "org.slf4j:slf4j-simple:$slf4jVersion"
136 |
137 | testImplementation "org.junit.jupiter:junit-jupiter:$junitVersion"
138 | testImplementation 'org.assertj:assertj-core:3.26.3'
139 |
140 | testImplementation "io.findify:s3mock_2.11:0.2.6"
141 |
142 | testImplementation "org.mockito:mockito-core:$mockitoVersion"
143 |
144 | testRuntimeOnly "org.junit.jupiter:junit-jupiter-engine:$junitVersion"
145 | testImplementation "org.mockito:mockito-junit-jupiter:$mockitoVersion"
146 |
147 | testRuntimeOnly "ch.qos.logback:logback-classic:1.5.6"
148 |
149 | integrationTestImplementation "cloud.localstack:localstack-utils:$localstackVersion"
150 | integrationTestImplementation "org.testcontainers:junit-jupiter:$testcontainersVersion"
151 | integrationTestImplementation "org.testcontainers:kafka:$testcontainersVersion" // this is not Kafka version
152 | integrationTestImplementation "org.testcontainers:localstack:$testcontainersVersion"
153 | integrationTestImplementation "com.github.tomakehurst:wiremock-jre8:$wireMockVersion"
154 |
155 | // TODO: add avro-converter to ConnectRunner via plugin.path instead of on worker classpath
156 | integrationTestImplementation("io.confluent:kafka-connect-avro-converter:$avroConverterVersion") {
157 | exclude group: "org.apache.kafka", module: "kafka-clients"
158 | }
159 |
160 | integrationTestImplementation "org.apache.avro:avro:1.11.3"
161 |
162 | testImplementation ("org.apache.parquet:parquet-tools:1.11.2") {
163 | exclude group: "org.slf4j", module: "slf4j-api"
164 | }
165 | testImplementation("org.apache.hadoop:hadoop-mapreduce-client-core:3.4.0") {
166 | exclude group: "org.apache.hadoop", module: "hadoop-yarn-client"
167 | exclude group: "org.apache.hadoop.thirdparty", module: "hadoop-shaded-protobuf_3_7"
168 | exclude group: "com.google.guava", module: "guava"
169 | exclude group: "commons-cli", module: "commons-cli"
170 | exclude group: "org.apache.commons", module: "commons-math3"
171 | exclude group: "org.apache.httpcomponents", module: "httpclient"
172 | exclude group: "commons-codec", module: "commons-codec"
173 | exclude group: "commons-io", module: "commons-io"
174 | exclude group: "commons-net", module: "commons-net"
175 | exclude group: "org.eclipse.jetty"
176 | exclude group: "org.eclipse.jetty.websocket"
177 | exclude group: "javax.servlet"
178 | exclude group: "javax.servlet.jsp"
179 | exclude group: "javax.activation"
180 | exclude group: "com.sun.jersey"
181 | exclude group: "log4j"
182 | exclude group: "org.apache.commons", module: "commons-text"
183 | exclude group: "org.slf4j", module: "slf4j-api"
184 | exclude group: "org.apache.hadoop", module: "hadoop-auth"
185 | exclude group: "org.apache.hadoop", module: "hadoop-yarn-api"
186 | exclude group: "com.google.re2j"
187 | exclude group: "com.google.protobuf"
188 | exclude group: "com.google.code.gson"
189 | exclude group: "com.jcraft"
190 | exclude group: "org.apache.curator"
191 | exclude group: "org.apache.zookeeper"
192 | exclude group: "org.apache.htrace"
193 | exclude group: "com.google.code.findbugs"
194 | exclude group: "org.apache.kerby"
195 | exclude group: "com.fasterxml.jackson.core"
196 | exclude group: "com.fasterxml.woodstox", module: "woodstox-core:5.0.3"
197 | exclude group: "org.apache.avro", module: "avro"
198 | exclude group: "org.apache.hadoop", module: "hadoop-yarn-common"
199 | exclude group: "com.google.inject.extensions", module: "guice-servlet"
200 | exclude group: "io.netty", module: "netty"
201 | }
202 |
203 | // Make test utils from 'test' available in 'integration-test'
204 | integrationTestImplementation sourceSets.test.output
205 | integrationTestImplementation "org.awaitility:awaitility:4.2.1"
206 | }
207 |
208 | checkstyle {
209 | toolVersion "8.29"
210 | getConfigDirectory().set(rootProject.file("checkstyle/"))
211 | }
212 |
213 | task integrationTest(type: Test) {
214 | description = 'Runs the integration tests.'
215 | group = 'verification'
216 | testClassesDirs = sourceSets.integrationTest.output.classesDirs
217 | classpath = sourceSets.integrationTest.runtimeClasspath
218 |
219 | dependsOn distTar
220 | shouldRunAfter test
221 |
222 | useJUnitPlatform()
223 |
224 | // Run always.
225 | outputs.upToDateWhen { false }
226 | // Pass the distribution file path to the tests.
227 | systemProperty("integration-test.distribution.file.path", distTar.archiveFile.get().asFile.path)
228 | }
229 |
230 | test {
231 | useJUnitPlatform()
232 | }
233 |
234 | distributions {
235 | main {
236 | contents {
237 | from jar
238 | from configurations.runtimeClasspath
239 |
240 | into("/") {
241 | from projectDir
242 | include "version.txt", "README*", "LICENSE*", "NOTICE*", "licenses/"
243 | include "config/"
244 | }
245 | }
246 | }
247 | }
248 |
249 | jar {
250 | manifest {
251 | attributes(
252 | 'Version': "${project.version}"
253 | )
254 | }
255 | }
256 |
257 | javadoc {
258 | options.addBooleanOption('html5', true)
259 | // disable missing javadoc lint and show only warning and error messages
260 | options.addStringOption('Xdoclint:all,-missing', '-quiet')
261 | }
262 |
263 |
264 | processResources {
265 | filesMatching('s3-connector-for-apache-kafka-version.properties') {
266 | expand(version: version)
267 | }
268 | }
269 |
270 | publishing {
271 | publications {
272 | maven(MavenPublication) {
273 | groupId = getGroup()
274 | artifactId = "s3-connector-for-apache-kafka"
275 | version = getVersion()
276 |
277 | from components.java
278 |
279 | pom {
280 | name = "Aiven's S3 Sink Connector for Apache Kafka"
281 | description = "Aiven's S3 Sink Connector for Apache Kafka"
282 | url = "https://github.com/aiven-open/s3-connector-for-apache-kafka"
283 | organization {
284 | name = "Aiven Oy"
285 | url = "https://aiven.io"
286 | }
287 |
288 | licenses {
289 | license {
290 | name = "Apache 2.0"
291 | url = "http://www.apache.org/licenses/LICENSE-2.0"
292 | distribution = "repo"
293 | }
294 | }
295 |
296 | developers {
297 | developer {
298 | id = 'aiven'
299 | name = 'Aiven Opensource'
300 | email = 'opensource@aiven.io'
301 | }
302 | }
303 |
304 | scm {
305 | connection = 'scm:git:git://github.com:aiven/s3-connector-for-apache-kafka.git'
306 | developerConnection = 'scm:git:ssh://github.com:aiven/s3-connector-for-apache-kafka.git'
307 | url = 'https://github.com/aiven-open/s3-connector-for-apache-kafka'
308 | }
309 | }
310 | }
311 | }
312 |
313 | repositories {
314 | maven {
315 | name="sonatype"
316 |
317 | def releasesRepoUrl = "https://oss.sonatype.org/service/local/staging/deploy/maven2"
318 | def snapshotsRepoUrl = "https://oss.sonatype.org/content/repositories/snapshots"
319 | url = version.endsWith('SNAPSHOT') ? snapshotsRepoUrl : releasesRepoUrl
320 |
321 | credentials(PasswordCredentials)
322 | }
323 | }
324 | }
325 |
326 | signing {
327 | sign publishing.publications.maven
328 | useGpgCmd()
329 | // Some issue in the plugin:
330 | // GPG outputs already armored signatures. The plugin also does armoring for `asc` files.
331 | // This results in double armored signatures, i.e. garbage.
332 | // Override the signature type provider to use unarmored output for `asc` files, which works well with GPG.
333 | signatureTypes = new AbstractSignatureTypeProvider() {
334 | {
335 | BinarySignatureType binary = new BinarySignatureType() {
336 | @Override
337 | String getExtension() {
338 | return "asc";
339 | }
340 | }
341 | register(binary);
342 | setDefaultType(binary.getExtension());
343 | }
344 | }
345 | }
346 |
--------------------------------------------------------------------------------
/checkstyle/checkstyle.xml:
--------------------------------------------------------------------------------
1 |
2 |
17 |
18 |
24 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
126 |
127 |
128 |
129 |
130 |
131 |
132 |
133 |
134 |
135 |
136 |
137 |
138 |
139 |
140 |
141 |
142 |
143 |
144 |
145 |
146 |
147 |
148 |
150 |
151 |
152 |
153 |
154 |
155 |
156 |
157 |
158 |
159 |
160 |
161 |
162 |
163 |
164 |
165 |
166 |
167 |
168 |
169 |
171 |
172 |
173 |
174 |
175 |
176 |
177 |
179 |
180 |
181 |
182 |
183 |
185 |
186 |
187 |
188 |
189 |
190 |
191 |
193 |
194 |
195 |
196 |
197 |
199 |
200 |
201 |
202 |
203 |
205 |
206 |
207 |
208 |
209 |
211 |
212 |
213 |
214 |
215 |
217 |
218 |
219 |
220 |
221 |
222 |
223 |
224 |
225 |
226 |
227 |
228 |
229 |
230 |
231 |
232 |
233 |
234 |
235 |
236 |
237 |
238 |
239 |
240 |
241 |
242 |
243 |
244 |
245 |
246 |
247 |
248 |
249 |
250 |
251 |
252 |
253 |
254 |
255 |
256 |
257 |
258 |
259 |
260 |
261 |
262 |
263 |
264 |
265 |
266 |
267 |
269 |
271 |
273 |
275 |
276 |
277 |
278 |
279 |
280 |
281 |
282 |
283 |
284 |
285 |
286 |
287 |
288 |
289 |
290 |
291 |
292 |
293 |
294 |
295 |
296 |
297 |
298 |
299 |
300 |
301 |
302 |
303 |
304 |
305 |
306 |
307 |
308 |
309 |
310 |
311 |
312 |
313 |
314 |
315 |
316 |
317 |
318 |
319 |
320 |
321 |
322 |
323 |
324 |
325 |
326 |
327 |
328 |
329 |
330 |
331 |
332 |
333 |
334 |
335 |
336 |
337 |
338 |
339 |
340 |
341 |
342 |
343 |
344 |
345 |
346 |
347 |
--------------------------------------------------------------------------------
/checkstyle/import-control.xml:
--------------------------------------------------------------------------------
1 |
4 |
5 |
6 |
7 |
--------------------------------------------------------------------------------
/checkstyle/java.header:
--------------------------------------------------------------------------------
1 | /\*
2 | \* Copyright 202[0-9] Aiven Oy
3 | \*
4 | \* Licensed under the Apache License, Version 2.0 \(the "License"\);
5 | \* you may not use this file except in compliance with the License.
6 | \* You may obtain a copy of the License at
7 | \*
8 | \* http://www.apache.org/licenses/LICENSE-2.0
9 | \*
10 | \* Unless required by applicable law or agreed to in writing, software
11 | \* distributed under the License is distributed on an "AS IS" BASIS,
12 | \* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | \* See the License for the specific language governing permissions and
14 | \* limitations under the License.
15 | \*/
16 |
--------------------------------------------------------------------------------
/checkstyle/suppressions.xml:
--------------------------------------------------------------------------------
1 |
2 |
17 |
20 |
21 |
22 |
23 |
24 |
26 |
27 |
29 |
30 |
32 |
33 |
34 |
--------------------------------------------------------------------------------
/gradle.properties:
--------------------------------------------------------------------------------
1 | version=2.16.0-SNAPSHOT
2 |
3 | sonatypeUsername=
4 | sonatypePassword=
5 |
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.jar:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Aiven-Open/s3-connector-for-apache-kafka/25a48b3c598849917de86753efa36fa3502ec501/gradle/wrapper/gradle-wrapper.jar
--------------------------------------------------------------------------------
/gradle/wrapper/gradle-wrapper.properties:
--------------------------------------------------------------------------------
1 | distributionBase=GRADLE_USER_HOME
2 | distributionPath=wrapper/dists
3 | distributionSha256Sum=f2b9ed0faf8472cbe469255ae6c86eddb77076c75191741b4a462f33128dd419
4 | distributionUrl=https\://services.gradle.org/distributions/gradle-8.4-all.zip
5 | networkTimeout=10000
6 | zipStoreBase=GRADLE_USER_HOME
7 | zipStorePath=wrapper/dists
8 |
--------------------------------------------------------------------------------
/gradlew:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | #
4 | # Copyright © 2015-2021 the original authors.
5 | #
6 | # Licensed under the Apache License, Version 2.0 (the "License");
7 | # you may not use this file except in compliance with the License.
8 | # You may obtain a copy of the License at
9 | #
10 | # https://www.apache.org/licenses/LICENSE-2.0
11 | #
12 | # Unless required by applicable law or agreed to in writing, software
13 | # distributed under the License is distributed on an "AS IS" BASIS,
14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | # See the License for the specific language governing permissions and
16 | # limitations under the License.
17 | #
18 |
19 | ##############################################################################
20 | #
21 | # Gradle start up script for POSIX generated by Gradle.
22 | #
23 | # Important for running:
24 | #
25 | # (1) You need a POSIX-compliant shell to run this script. If your /bin/sh is
26 | # noncompliant, but you have some other compliant shell such as ksh or
27 | # bash, then to run this script, type that shell name before the whole
28 | # command line, like:
29 | #
30 | # ksh Gradle
31 | #
32 | # Busybox and similar reduced shells will NOT work, because this script
33 | # requires all of these POSIX shell features:
34 | # * functions;
35 | # * expansions «$var», «${var}», «${var:-default}», «${var+SET}»,
36 | # «${var#prefix}», «${var%suffix}», and «$( cmd )»;
37 | # * compound commands having a testable exit status, especially «case»;
38 | # * various built-in commands including «command», «set», and «ulimit».
39 | #
40 | # Important for patching:
41 | #
42 | # (2) This script targets any POSIX shell, so it avoids extensions provided
43 | # by Bash, Ksh, etc; in particular arrays are avoided.
44 | #
45 | # The "traditional" practice of packing multiple parameters into a
46 | # space-separated string is a well documented source of bugs and security
47 | # problems, so this is (mostly) avoided, by progressively accumulating
48 | # options in "$@", and eventually passing that to Java.
49 | #
50 | # Where the inherited environment variables (DEFAULT_JVM_OPTS, JAVA_OPTS,
51 | # and GRADLE_OPTS) rely on word-splitting, this is performed explicitly;
52 | # see the in-line comments for details.
53 | #
54 | # There are tweaks for specific operating systems such as AIX, CygWin,
55 | # Darwin, MinGW, and NonStop.
56 | #
57 | # (3) This script is generated from the Groovy template
58 | # https://github.com/gradle/gradle/blob/HEAD/subprojects/plugins/src/main/resources/org/gradle/api/internal/plugins/unixStartScript.txt
59 | # within the Gradle project.
60 | #
61 | # You can find Gradle at https://github.com/gradle/gradle/.
62 | #
63 | ##############################################################################
64 |
65 | # Attempt to set APP_HOME
66 |
67 | # Resolve links: $0 may be a link
68 | app_path=$0
69 |
70 | # Need this for daisy-chained symlinks.
71 | while
72 | APP_HOME=${app_path%"${app_path##*/}"} # leaves a trailing /; empty if no leading path
73 | [ -h "$app_path" ]
74 | do
75 | ls=$( ls -ld "$app_path" )
76 | link=${ls#*' -> '}
77 | case $link in #(
78 | /*) app_path=$link ;; #(
79 | *) app_path=$APP_HOME$link ;;
80 | esac
81 | done
82 |
83 | # This is normally unused
84 | # shellcheck disable=SC2034
85 | APP_BASE_NAME=${0##*/}
86 | APP_HOME=$( cd "${APP_HOME:-./}" && pwd -P ) || exit
87 |
88 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
89 | DEFAULT_JVM_OPTS='"-Xmx64m" "-Xms64m"'
90 |
91 | # Use the maximum available, or set MAX_FD != -1 to use that value.
92 | MAX_FD=maximum
93 |
94 | warn () {
95 | echo "$*"
96 | } >&2
97 |
98 | die () {
99 | echo
100 | echo "$*"
101 | echo
102 | exit 1
103 | } >&2
104 |
105 | # OS specific support (must be 'true' or 'false').
106 | cygwin=false
107 | msys=false
108 | darwin=false
109 | nonstop=false
110 | case "$( uname )" in #(
111 | CYGWIN* ) cygwin=true ;; #(
112 | Darwin* ) darwin=true ;; #(
113 | MSYS* | MINGW* ) msys=true ;; #(
114 | NONSTOP* ) nonstop=true ;;
115 | esac
116 |
117 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar
118 |
119 |
120 | # Determine the Java command to use to start the JVM.
121 | if [ -n "$JAVA_HOME" ] ; then
122 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then
123 | # IBM's JDK on AIX uses strange locations for the executables
124 | JAVACMD=$JAVA_HOME/jre/sh/java
125 | else
126 | JAVACMD=$JAVA_HOME/bin/java
127 | fi
128 | if [ ! -x "$JAVACMD" ] ; then
129 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME
130 |
131 | Please set the JAVA_HOME variable in your environment to match the
132 | location of your Java installation."
133 | fi
134 | else
135 | JAVACMD=java
136 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
137 |
138 | Please set the JAVA_HOME variable in your environment to match the
139 | location of your Java installation."
140 | fi
141 |
142 | # Increase the maximum file descriptors if we can.
143 | if ! "$cygwin" && ! "$darwin" && ! "$nonstop" ; then
144 | case $MAX_FD in #(
145 | max*)
146 | # In POSIX sh, ulimit -H is undefined. That's why the result is checked to see if it worked.
147 | # shellcheck disable=SC3045
148 | MAX_FD=$( ulimit -H -n ) ||
149 | warn "Could not query maximum file descriptor limit"
150 | esac
151 | case $MAX_FD in #(
152 | '' | soft) :;; #(
153 | *)
154 | # In POSIX sh, ulimit -n is undefined. That's why the result is checked to see if it worked.
155 | # shellcheck disable=SC3045
156 | ulimit -n "$MAX_FD" ||
157 | warn "Could not set maximum file descriptor limit to $MAX_FD"
158 | esac
159 | fi
160 |
161 | # Collect all arguments for the java command, stacking in reverse order:
162 | # * args from the command line
163 | # * the main class name
164 | # * -classpath
165 | # * -D...appname settings
166 | # * --module-path (only if needed)
167 | # * DEFAULT_JVM_OPTS, JAVA_OPTS, and GRADLE_OPTS environment variables.
168 |
169 | # For Cygwin or MSYS, switch paths to Windows format before running java
170 | if "$cygwin" || "$msys" ; then
171 | APP_HOME=$( cygpath --path --mixed "$APP_HOME" )
172 | CLASSPATH=$( cygpath --path --mixed "$CLASSPATH" )
173 |
174 | JAVACMD=$( cygpath --unix "$JAVACMD" )
175 |
176 | # Now convert the arguments - kludge to limit ourselves to /bin/sh
177 | for arg do
178 | if
179 | case $arg in #(
180 | -*) false ;; # don't mess with options #(
181 | /?*) t=${arg#/} t=/${t%%/*} # looks like a POSIX filepath
182 | [ -e "$t" ] ;; #(
183 | *) false ;;
184 | esac
185 | then
186 | arg=$( cygpath --path --ignore --mixed "$arg" )
187 | fi
188 | # Roll the args list around exactly as many times as the number of
189 | # args, so each arg winds up back in the position where it started, but
190 | # possibly modified.
191 | #
192 | # NB: a `for` loop captures its iteration list before it begins, so
193 | # changing the positional parameters here affects neither the number of
194 | # iterations, nor the values presented in `arg`.
195 | shift # remove old arg
196 | set -- "$@" "$arg" # push replacement arg
197 | done
198 | fi
199 |
200 | # Collect all arguments for the java command;
201 | # * $DEFAULT_JVM_OPTS, $JAVA_OPTS, and $GRADLE_OPTS can contain fragments of
202 | # shell script including quotes and variable substitutions, so put them in
203 | # double quotes to make sure that they get re-expanded; and
204 | # * put everything else in single quotes, so that it's not re-expanded.
205 |
206 | set -- \
207 | "-Dorg.gradle.appname=$APP_BASE_NAME" \
208 | -classpath "$CLASSPATH" \
209 | org.gradle.wrapper.GradleWrapperMain \
210 | "$@"
211 |
212 | # Stop when "xargs" is not available.
213 | if ! command -v xargs >/dev/null 2>&1
214 | then
215 | die "xargs is not available"
216 | fi
217 |
218 | # Use "xargs" to parse quoted args.
219 | #
220 | # With -n1 it outputs one arg per line, with the quotes and backslashes removed.
221 | #
222 | # In Bash we could simply go:
223 | #
224 | # readarray ARGS < <( xargs -n1 <<<"$var" ) &&
225 | # set -- "${ARGS[@]}" "$@"
226 | #
227 | # but POSIX shell has neither arrays nor command substitution, so instead we
228 | # post-process each arg (as a line of input to sed) to backslash-escape any
229 | # character that might be a shell metacharacter, then use eval to reverse
230 | # that process (while maintaining the separation between arguments), and wrap
231 | # the whole thing up as a single "set" statement.
232 | #
233 | # This will of course break if any of these variables contains a newline or
234 | # an unmatched quote.
235 | #
236 |
237 | eval "set -- $(
238 | printf '%s\n' "$DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS" |
239 | xargs -n1 |
240 | sed ' s~[^-[:alnum:]+,./:=@_]~\\&~g; ' |
241 | tr '\n' ' '
242 | )" '"$@"'
243 |
244 | exec "$JAVACMD" "$@"
245 |
--------------------------------------------------------------------------------
/gradlew.bat:
--------------------------------------------------------------------------------
1 | @rem
2 | @rem Copyright 2015 the original author or authors.
3 | @rem
4 | @rem Licensed under the Apache License, Version 2.0 (the "License");
5 | @rem you may not use this file except in compliance with the License.
6 | @rem You may obtain a copy of the License at
7 | @rem
8 | @rem https://www.apache.org/licenses/LICENSE-2.0
9 | @rem
10 | @rem Unless required by applicable law or agreed to in writing, software
11 | @rem distributed under the License is distributed on an "AS IS" BASIS,
12 | @rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | @rem See the License for the specific language governing permissions and
14 | @rem limitations under the License.
15 | @rem
16 |
17 | @if "%DEBUG%"=="" @echo off
18 | @rem ##########################################################################
19 | @rem
20 | @rem Gradle startup script for Windows
21 | @rem
22 | @rem ##########################################################################
23 |
24 | @rem Set local scope for the variables with windows NT shell
25 | if "%OS%"=="Windows_NT" setlocal
26 |
27 | set DIRNAME=%~dp0
28 | if "%DIRNAME%"=="" set DIRNAME=.
29 | @rem This is normally unused
30 | set APP_BASE_NAME=%~n0
31 | set APP_HOME=%DIRNAME%
32 |
33 | @rem Resolve any "." and ".." in APP_HOME to make it shorter.
34 | for %%i in ("%APP_HOME%") do set APP_HOME=%%~fi
35 |
36 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
37 | set DEFAULT_JVM_OPTS="-Xmx64m" "-Xms64m"
38 |
39 | @rem Find java.exe
40 | if defined JAVA_HOME goto findJavaFromJavaHome
41 |
42 | set JAVA_EXE=java.exe
43 | %JAVA_EXE% -version >NUL 2>&1
44 | if %ERRORLEVEL% equ 0 goto execute
45 |
46 | echo.
47 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
48 | echo.
49 | echo Please set the JAVA_HOME variable in your environment to match the
50 | echo location of your Java installation.
51 |
52 | goto fail
53 |
54 | :findJavaFromJavaHome
55 | set JAVA_HOME=%JAVA_HOME:"=%
56 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe
57 |
58 | if exist "%JAVA_EXE%" goto execute
59 |
60 | echo.
61 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
62 | echo.
63 | echo Please set the JAVA_HOME variable in your environment to match the
64 | echo location of your Java installation.
65 |
66 | goto fail
67 |
68 | :execute
69 | @rem Setup the command line
70 |
71 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
72 |
73 |
74 | @rem Execute Gradle
75 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %*
76 |
77 | :end
78 | @rem End local scope for the variables with windows NT shell
79 | if %ERRORLEVEL% equ 0 goto mainEnd
80 |
81 | :fail
82 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
83 | rem the _cmd.exe /c_ return code!
84 | set EXIT_CODE=%ERRORLEVEL%
85 | if %EXIT_CODE% equ 0 set EXIT_CODE=1
86 | if not ""=="%GRADLE_EXIT_CONSOLE%" exit %EXIT_CODE%
87 | exit /b %EXIT_CODE%
88 |
89 | :mainEnd
90 | if "%OS%"=="Windows_NT" endlocal
91 |
92 | :omega
93 |
--------------------------------------------------------------------------------
/licenses/LICENSE-aws.txt:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 |
4 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
5 |
6 | 1. Definitions.
7 |
8 | "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document.
9 |
10 | "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License.
11 |
12 | "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity.
13 |
14 | "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License.
15 |
16 | "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files.
17 |
18 | "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types.
19 |
20 | "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below).
21 |
22 | "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof.
23 |
24 | "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution."
25 |
26 | "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work.
27 |
28 | 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form.
29 |
30 | 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed.
31 |
32 | 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions:
33 |
34 | 1. You must give any other recipients of the Work or Derivative Works a copy of this License; and
35 | 2. You must cause any modified files to carry prominent notices stating that You changed the files; and
36 | 3. You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and
37 | 4. If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License.
38 |
39 | You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License.
40 |
41 | 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions.
42 |
43 | 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file.
44 |
45 | 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License.
46 |
47 | 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages.
48 |
49 | 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability.
50 |
51 | END OF TERMS AND CONDITIONS
52 |
53 | Note: Other license terms may apply to certain, identified software files contained within or distributed with the accompanying software if such terms are included in the directory containing the accompanying software. Such other license terms will then apply in lieu of the terms of the software license above.
54 |
55 | JSON processing code subject to the JSON License from JSON.org:
56 |
57 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
58 |
59 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
60 |
61 | The Software shall be used for Good, not Evil.
62 |
63 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
--------------------------------------------------------------------------------
/notices/NOTICE-aws.txt:
--------------------------------------------------------------------------------
1 | AWS IoT Device SDK for Java
2 | Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
3 |
4 | This product includes software developed by
5 | Amazon Technologies, Inc (http://www.amazon.com/).
6 |
7 | **********************
8 | THIRD PARTY COMPONENTS
9 | **********************
10 | This software includes third party software subject to the following copyrights:
11 | - PKCS#1 and PKCS#8 PEM encoded private key parsing and utility functions from oauth.googlecode.com - Copyright 1998-2010 AOL Inc.
12 |
13 | The licenses for these third party components are included in LICENSE.txt
--------------------------------------------------------------------------------
/settings.gradle:
--------------------------------------------------------------------------------
1 | /*
2 | * This file was generated by the Gradle 'init' task.
3 | *
4 | * The settings file is used to specify which projects to include in your build.
5 | *
6 | * Detailed information about configuring a multi-project build in Gradle can be found
7 | * in the user manual at https://docs.gradle.org/6.1.1/userguide/multi_project_builds.html
8 | */
9 |
10 | rootProject.name = 's3-connector-for-apache-kafka'
11 |
--------------------------------------------------------------------------------
/src/integration-test/java/io/aiven/kafka/connect/AvroIntegrationTest.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 Aiven Oy
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package io.aiven.kafka.connect;
18 |
19 | import java.io.File;
20 | import java.io.IOException;
21 | import java.time.ZonedDateTime;
22 | import java.time.format.DateTimeFormatter;
23 | import java.util.ArrayList;
24 | import java.util.Arrays;
25 | import java.util.HashMap;
26 | import java.util.List;
27 | import java.util.Map;
28 | import java.util.concurrent.ExecutionException;
29 | import java.util.concurrent.Future;
30 | import java.util.stream.Stream;
31 |
32 | import org.apache.kafka.clients.admin.AdminClient;
33 | import org.apache.kafka.clients.producer.KafkaProducer;
34 | import org.apache.kafka.clients.producer.ProducerConfig;
35 | import org.apache.kafka.clients.producer.ProducerRecord;
36 | import org.apache.kafka.clients.producer.RecordMetadata;
37 |
38 | import io.aiven.kafka.connect.common.config.CompressionType;
39 | import io.aiven.kafka.connect.s3.AivenKafkaConnectS3SinkConnector;
40 | import io.aiven.kafka.connect.s3.SchemaRegistryContainer;
41 | import io.aiven.kafka.connect.s3.testutils.BucketAccessor;
42 |
43 | import com.amazonaws.services.s3.AmazonS3;
44 | import org.apache.avro.Schema;
45 | import org.apache.avro.file.DataFileReader;
46 | import org.apache.avro.file.SeekableByteArrayInput;
47 | import org.apache.avro.file.SeekableInput;
48 | import org.apache.avro.generic.GenericData;
49 | import org.apache.avro.generic.GenericDatumReader;
50 | import org.apache.avro.generic.GenericRecord;
51 | import org.apache.avro.util.Utf8;
52 | import org.junit.jupiter.api.AfterEach;
53 | import org.junit.jupiter.api.BeforeAll;
54 | import org.junit.jupiter.api.BeforeEach;
55 | import org.junit.jupiter.api.Test;
56 | import org.junit.jupiter.api.TestInfo;
57 | import org.junit.jupiter.params.ParameterizedTest;
58 | import org.junit.jupiter.params.provider.Arguments;
59 | import org.junit.jupiter.params.provider.MethodSource;
60 | import org.testcontainers.containers.KafkaContainer;
61 | import org.testcontainers.containers.localstack.LocalStackContainer;
62 | import org.testcontainers.junit.jupiter.Container;
63 | import org.testcontainers.junit.jupiter.Testcontainers;
64 |
65 | import static org.assertj.core.api.Assertions.assertThat;
66 |
67 | @Testcontainers
68 | public class AvroIntegrationTest implements IntegrationBase {
69 | private static final String S3_ACCESS_KEY_ID = "test-key-id0";
70 | private static final String S3_SECRET_ACCESS_KEY = "test_secret_key0";
71 | private static final String TEST_BUCKET_NAME = "test-bucket0";
72 |
73 | private static final String CONNECTOR_NAME = "aiven-s3-sink-connector";
74 | private static final String COMMON_PREFIX = "s3-connector-for-apache-kafka-test-";
75 | private static final int OFFSET_FLUSH_INTERVAL_MS = 5000;
76 |
77 | private static String s3Endpoint;
78 | private static String s3Prefix;
79 | private static BucketAccessor testBucketAccessor;
80 | private static File pluginDir;
81 |
82 | @Container
83 | public static final LocalStackContainer LOCALSTACK = IntegrationBase.createS3Container();
84 | @Container
85 | private static final KafkaContainer KAFKA = IntegrationBase.createKafkaContainer();
86 | @Container
87 | private static final SchemaRegistryContainer SCHEMA_REGISTRY = new SchemaRegistryContainer(KAFKA);
88 | private AdminClient adminClient;
89 | private KafkaProducer producer;
90 | private ConnectRunner connectRunner;
91 |
92 | private final Schema avroInputDataSchema = new Schema.Parser().parse(
93 | "{\"type\":\"record\",\"name\":\"input_data\",\"fields\":[{\"name\":\"name\",\"type\":\"string\"}]}");
94 |
95 | @BeforeAll
96 | static void setUpAll() throws IOException, InterruptedException {
97 | s3Prefix = COMMON_PREFIX
98 | + ZonedDateTime.now().format(DateTimeFormatter.ISO_LOCAL_DATE_TIME) + "/";
99 |
100 | final AmazonS3 s3 = IntegrationBase.createS3Client(LOCALSTACK);
101 | s3Endpoint = LOCALSTACK.getEndpoint().toString();
102 | testBucketAccessor = new BucketAccessor(s3, TEST_BUCKET_NAME);
103 | testBucketAccessor.createBucket();
104 |
105 | pluginDir = IntegrationBase.getPluginDir();
106 | IntegrationBase.extractConnectorPlugin(pluginDir);
107 |
108 | IntegrationBase.waitForRunningContainer(KAFKA);
109 | }
110 |
111 |
112 | @BeforeEach
113 | void setUp(final TestInfo testInfo) throws ExecutionException, InterruptedException {
114 | adminClient = newAdminClient(KAFKA);
115 | producer = newProducer();
116 |
117 | final var topicName = IntegrationBase.topicName(testInfo);
118 | IntegrationBase.createTopics(adminClient, List.of(topicName));
119 |
120 | connectRunner = newConnectRunner(KAFKA, pluginDir, OFFSET_FLUSH_INTERVAL_MS);
121 | connectRunner.start();
122 | }
123 |
124 | @AfterEach
125 | final void tearDown() {
126 | connectRunner.stop();
127 | adminClient.close();
128 | producer.close();
129 |
130 | connectRunner.awaitStop();
131 | }
132 |
133 | private static Stream compressionAndCodecTestParameters() {
134 | return Stream.of(Arguments.of("bzip2", "none"), Arguments.of("deflate", "none"), Arguments.of("null", "none"),
135 | Arguments.of("snappy", "gzip"), // single test for codec and compression when both set.
136 | Arguments.of("zstandard", "none"));
137 | }
138 |
139 | @ParameterizedTest
140 | @MethodSource("compressionAndCodecTestParameters")
141 | void avroOutput(final String avroCodec, final String compression, final TestInfo testInfo)
142 | throws ExecutionException, InterruptedException, IOException {
143 | final var topicName = IntegrationBase.topicName(testInfo);
144 | final Map connectorConfig = awsSpecificConfig(basicConnectorConfig(CONNECTOR_NAME), topicName);
145 | connectorConfig.put("file.compression.type", compression);
146 | connectorConfig.put("format.output.fields", "key,value");
147 | connectorConfig.put("format.output.type", "avro");
148 | connectorConfig.put("avro.codec", avroCodec);
149 | connectRunner.createConnector(connectorConfig);
150 |
151 | final int recordCountPerPartition = 10;
152 | produceRecords(recordCountPerPartition, topicName);
153 |
154 | waitForConnectToFinishProcessing();
155 |
156 | final List expectedBlobs = Arrays.asList(
157 | getAvroBlobName(topicName, 0, 0, compression),
158 | getAvroBlobName(topicName, 1, 0, compression),
159 | getAvroBlobName(topicName, 2, 0, compression),
160 | getAvroBlobName(topicName, 3, 0, compression));
161 |
162 | for (final String blobName : expectedBlobs) {
163 | assertThat(testBucketAccessor.doesObjectExist(blobName)).isTrue();
164 | }
165 |
166 | final Map> blobContents = new HashMap<>();
167 | final Map gcsOutputAvroSchemas = new HashMap<>();
168 | for (final String blobName : expectedBlobs) {
169 | final byte[] blobBytes = testBucketAccessor.readBytes(blobName, compression);
170 | try (SeekableInput sin = new SeekableByteArrayInput(blobBytes)) {
171 | final GenericDatumReader datumReader = new GenericDatumReader<>();
172 | try (DataFileReader reader = new DataFileReader<>(sin, datumReader)) {
173 | final List items = new ArrayList<>();
174 | reader.forEach(items::add);
175 | blobContents.put(blobName, items);
176 | gcsOutputAvroSchemas.put(blobName, reader.getSchema());
177 | }
178 | }
179 | }
180 |
181 | int cnt = 0;
182 | for (int i = 0; i < recordCountPerPartition; i++) {
183 | for (int partition = 0; partition < 4; partition++) {
184 | final String blobName = getAvroBlobName(topicName, partition, 0, compression);
185 | final Schema gcsOutputAvroSchema = gcsOutputAvroSchemas.get(blobName);
186 | final GenericData.Record expectedRecord = new GenericData.Record(gcsOutputAvroSchema);
187 | expectedRecord.put("key", new Utf8("key-" + cnt));
188 | final GenericData.Record valueRecord = new GenericData.Record(
189 | gcsOutputAvroSchema.getField("value").schema());
190 | valueRecord.put("name", new Utf8("user-" + cnt));
191 | expectedRecord.put("value", valueRecord);
192 | cnt += 1;
193 |
194 | final GenericRecord actualRecord = blobContents.get(blobName).get(i);
195 | assertThat(actualRecord).isEqualTo(expectedRecord);
196 | }
197 | }
198 | }
199 |
200 | @Test
201 | final void jsonlAvroOutputTest(final TestInfo testInfo)
202 | throws ExecutionException, InterruptedException, IOException {
203 | final var topicName = IntegrationBase.topicName(testInfo);
204 | final Map connectorConfig = awsSpecificConfig(basicConnectorConfig(CONNECTOR_NAME), topicName);
205 | final String compression = "none";
206 | final String contentType = "jsonl";
207 | connectorConfig.put("format.output.fields", "key,value");
208 | connectorConfig.put("format.output.fields.value.encoding", "none");
209 | connectorConfig.put("key.converter", "io.confluent.connect.avro.AvroConverter");
210 | connectorConfig.put("value.converter", "io.confluent.connect.avro.AvroConverter");
211 | connectorConfig.put("value.converter.schemas.enable", "false");
212 | connectorConfig.put("file.compression.type", compression);
213 | connectorConfig.put("format.output.type", contentType);
214 | connectRunner.createConnector(connectorConfig);
215 |
216 | final int recordCountPerPartition = 10;
217 | produceRecords(recordCountPerPartition, topicName);
218 | waitForConnectToFinishProcessing();
219 |
220 | final List expectedBlobs = Arrays.asList(
221 | getBlobName(topicName, 0, 0, compression),
222 | getBlobName(topicName, 1, 0, compression),
223 | getBlobName(topicName, 2, 0, compression),
224 | getBlobName(topicName, 3, 0, compression));
225 |
226 | for (final String blobName : expectedBlobs) {
227 | assertThat(testBucketAccessor.doesObjectExist(blobName)).isTrue();
228 | }
229 |
230 | final Map> blobContents = new HashMap<>();
231 | for (final String blobName : expectedBlobs) {
232 | final List items = new ArrayList<>(testBucketAccessor.readLines(blobName, compression));
233 | blobContents.put(blobName, items);
234 | }
235 |
236 | int cnt = 0;
237 | for (int i = 0; i < recordCountPerPartition; i++) {
238 | for (int partition = 0; partition < 4; partition++) {
239 | final String key = "key-" + cnt;
240 | final String value = "{" + "\"name\":\"user-" + cnt + "\"}";
241 | cnt += 1;
242 |
243 | final String blobName = getBlobName(topicName, partition, 0, "none");
244 | final String expectedLine = "{\"value\":" + value + ",\"key\":\"" + key + "\"}";
245 |
246 | assertThat(blobContents.get(blobName).get(i)).isEqualTo(expectedLine);
247 | }
248 | }
249 | }
250 |
251 | private void waitForConnectToFinishProcessing() throws InterruptedException {
252 | // TODO more robust way to detect that Connect finished processing
253 | Thread.sleep(OFFSET_FLUSH_INTERVAL_MS * 2);
254 | }
255 |
256 | private void produceRecords(final int recordCountPerPartition, final String topicName)
257 | throws ExecutionException, InterruptedException {
258 | final List> sendFutures = new ArrayList<>();
259 | int cnt = 0;
260 | for (int i = 0; i < recordCountPerPartition; i++) {
261 | for (int partition = 0; partition < 4; partition++) {
262 | final String key = "key-" + cnt;
263 | final GenericRecord value = new GenericData.Record(avroInputDataSchema);
264 | value.put("name", "user-" + cnt);
265 | cnt += 1;
266 |
267 | sendFutures.add(sendMessageAsync(producer, topicName, partition, key, value));
268 | }
269 | }
270 | producer.flush();
271 | for (final Future sendFuture : sendFutures) {
272 | sendFuture.get();
273 | }
274 | }
275 |
276 | private KafkaProducer newProducer() {
277 | final Map producerProps = new HashMap<>();
278 | producerProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, KAFKA.getBootstrapServers());
279 | producerProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,
280 | "io.confluent.kafka.serializers.KafkaAvroSerializer");
281 | producerProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,
282 | "io.confluent.kafka.serializers.KafkaAvroSerializer");
283 | producerProps.put("schema.registry.url", SCHEMA_REGISTRY.getSchemaRegistryUrl());
284 | return new KafkaProducer<>(producerProps);
285 | }
286 |
287 | private Future sendMessageAsync(final KafkaProducer producer,
288 | final String topicName,
289 | final int partition,
290 | final String key,
291 | final GenericRecord value) {
292 | final ProducerRecord msg = new ProducerRecord<>(
293 | topicName, partition, key, value);
294 | return producer.send(msg);
295 | }
296 |
297 | private Map basicConnectorConfig(final String connectorName) {
298 | final Map config = new HashMap<>();
299 | config.put("name", connectorName);
300 | config.put("key.converter", "io.confluent.connect.avro.AvroConverter");
301 | config.put("key.converter.schema.registry.url", SCHEMA_REGISTRY.getSchemaRegistryUrl());
302 | config.put("value.converter", "io.confluent.connect.avro.AvroConverter");
303 | config.put("value.converter.schema.registry.url", SCHEMA_REGISTRY.getSchemaRegistryUrl());
304 | config.put("tasks.max", "1");
305 | return config;
306 | }
307 |
308 | private Map awsSpecificConfig(final Map config, final String topicName) {
309 | config.put("connector.class", AivenKafkaConnectS3SinkConnector.class.getName());
310 | config.put("aws.access.key.id", S3_ACCESS_KEY_ID);
311 | config.put("aws.secret.access.key", S3_SECRET_ACCESS_KEY);
312 | config.put("aws.s3.endpoint", s3Endpoint);
313 | config.put("aws.s3.bucket.name", TEST_BUCKET_NAME);
314 | config.put("aws.s3.prefix", s3Prefix);
315 | config.put("topics", topicName);
316 | config.put("key.converter.schema.registry.url", SCHEMA_REGISTRY.getSchemaRegistryUrl());
317 | config.put("value.converter.schema.registry.url", SCHEMA_REGISTRY.getSchemaRegistryUrl());
318 | config.put("tasks.max", "1");
319 | return config;
320 | }
321 |
322 | private String getAvroBlobName(final String topicName, final int partition, final int startOffset,
323 | final String compression) {
324 | final String result = String.format("%s%s-%d-%020d.avro", s3Prefix, topicName, partition, startOffset);
325 | return result + CompressionType.forName(compression).extension();
326 | }
327 |
328 | // WARN: different from GCS
329 | private String getBlobName(final String topicName, final int partition, final int startOffset,
330 | final String compression) {
331 | final String result = String.format("%s%s-%d-%020d", s3Prefix, topicName, partition, startOffset);
332 | return result + CompressionType.forName(compression).extension();
333 | }
334 | }
335 |
--------------------------------------------------------------------------------
/src/integration-test/java/io/aiven/kafka/connect/AvroParquetIntegrationTest.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2021 Aiven Oy
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package io.aiven.kafka.connect;
18 |
19 | import java.io.File;
20 | import java.io.IOException;
21 | import java.nio.file.Path;
22 | import java.nio.file.Paths;
23 | import java.time.ZonedDateTime;
24 | import java.time.format.DateTimeFormatter;
25 | import java.util.ArrayList;
26 | import java.util.Arrays;
27 | import java.util.HashMap;
28 | import java.util.List;
29 | import java.util.Map;
30 | import java.util.concurrent.ExecutionException;
31 | import java.util.concurrent.Future;
32 | import java.util.stream.Collectors;
33 |
34 | import org.apache.kafka.clients.admin.AdminClient;
35 | import org.apache.kafka.clients.producer.KafkaProducer;
36 | import org.apache.kafka.clients.producer.ProducerConfig;
37 | import org.apache.kafka.clients.producer.ProducerRecord;
38 | import org.apache.kafka.clients.producer.RecordMetadata;
39 |
40 | import io.aiven.kafka.connect.common.config.CompressionType;
41 | import io.aiven.kafka.connect.s3.AivenKafkaConnectS3SinkConnector;
42 | import io.aiven.kafka.connect.s3.SchemaRegistryContainer;
43 | import io.aiven.kafka.connect.s3.testutils.BucketAccessor;
44 |
45 | import com.amazonaws.services.s3.AmazonS3;
46 | import org.apache.avro.Schema;
47 | import org.apache.avro.SchemaBuilder;
48 | import org.apache.avro.generic.GenericData;
49 | import org.apache.avro.generic.GenericRecord;
50 | import org.junit.jupiter.api.AfterEach;
51 | import org.junit.jupiter.api.BeforeAll;
52 | import org.junit.jupiter.api.BeforeEach;
53 | import org.junit.jupiter.api.Test;
54 | import org.junit.jupiter.api.TestInfo;
55 | import org.junit.jupiter.api.io.TempDir;
56 | import org.testcontainers.containers.KafkaContainer;
57 | import org.testcontainers.containers.localstack.LocalStackContainer;
58 | import org.testcontainers.junit.jupiter.Container;
59 | import org.testcontainers.junit.jupiter.Testcontainers;
60 |
61 | import static org.assertj.core.api.Assertions.assertThat;
62 |
63 | @Testcontainers
64 | class AvroParquetIntegrationTest implements IntegrationBase {
65 | private static final String S3_ACCESS_KEY_ID = "test-key-id0";
66 | private static final String S3_SECRET_ACCESS_KEY = "test_secret_key0";
67 | private static final String TEST_BUCKET_NAME = "test-bucket0";
68 |
69 | private static final String CONNECTOR_NAME = "aiven-s3-sink-connector";
70 | private static final String COMMON_PREFIX = "s3-connector-for-apache-kafka-test-";
71 | private static final int OFFSET_FLUSH_INTERVAL_MS = 5000;
72 |
73 | private static String s3Endpoint;
74 | private static String s3Prefix;
75 | private static BucketAccessor testBucketAccessor;
76 | private static File pluginDir;
77 |
78 | @Container
79 | public static final LocalStackContainer LOCALSTACK = IntegrationBase.createS3Container();
80 | @Container
81 | private static final KafkaContainer KAFKA = IntegrationBase.createKafkaContainer();
82 | @Container
83 | private static final SchemaRegistryContainer SCHEMA_REGISTRY = new SchemaRegistryContainer(KAFKA);
84 | private AdminClient adminClient;
85 | private KafkaProducer producer;
86 | private ConnectRunner connectRunner;
87 |
88 | @BeforeAll
89 | static void setUpAll() throws IOException, InterruptedException {
90 | s3Prefix = COMMON_PREFIX
91 | + ZonedDateTime.now().format(DateTimeFormatter.ISO_LOCAL_DATE_TIME) + "/";
92 |
93 | final AmazonS3 s3 = IntegrationBase.createS3Client(LOCALSTACK);
94 | s3Endpoint = LOCALSTACK.getEndpoint().toString();
95 | testBucketAccessor = new BucketAccessor(s3, TEST_BUCKET_NAME);
96 | testBucketAccessor.createBucket();
97 |
98 | pluginDir = IntegrationBase.getPluginDir();
99 | IntegrationBase.extractConnectorPlugin(pluginDir);
100 |
101 | IntegrationBase.waitForRunningContainer(KAFKA);
102 | }
103 |
104 | @BeforeEach
105 | void setUp(final TestInfo testInfo) throws ExecutionException, InterruptedException {
106 | adminClient = newAdminClient(KAFKA);
107 | producer = newProducer();
108 |
109 | final var topicName = IntegrationBase.topicName(testInfo);
110 | IntegrationBase.createTopics(adminClient, List.of(topicName));
111 |
112 | connectRunner = newConnectRunner(KAFKA, pluginDir, OFFSET_FLUSH_INTERVAL_MS);
113 | connectRunner.start();
114 | }
115 |
116 | @AfterEach
117 | final void tearDown() {
118 | connectRunner.stop();
119 | adminClient.close();
120 | producer.close();
121 |
122 | connectRunner.awaitStop();
123 | }
124 |
125 | @Test
126 | final void allOutputFields(@TempDir final Path tmpDir, final TestInfo testInfo)
127 | throws ExecutionException, InterruptedException, IOException {
128 | final var topicName = IntegrationBase.topicName(testInfo);
129 | final String compression = "none";
130 | final Map connectorConfig = awsSpecificConfig(basicConnectorConfig(compression), topicName);
131 | connectorConfig.put("format.output.fields", "key,value,offset,timestamp,headers");
132 | connectorConfig.put("format.output.fields.value.encoding", "none");
133 | connectRunner.createConnector(connectorConfig);
134 |
135 | final Schema valueSchema =
136 | SchemaBuilder.record("value")
137 | .fields()
138 | .name("name").type().stringType().noDefault()
139 | .name("value").type().stringType().noDefault()
140 | .endRecord();
141 |
142 | final List> sendFutures = new ArrayList<>();
143 | int cnt = 0;
144 | for (int i = 0; i < 10; i++) {
145 | for (int partition = 0; partition < 4; partition++) {
146 | final var key = "key-" + cnt;
147 | final GenericRecord value = new GenericData.Record(valueSchema);
148 | value.put("name", "user-" + cnt);
149 | value.put("value", "value-" + cnt);
150 | cnt += 1;
151 | sendFutures.add(sendMessageAsync(topicName, partition, key, value));
152 | }
153 | }
154 | producer.flush();
155 | for (final Future sendFuture : sendFutures) {
156 | sendFuture.get();
157 | }
158 |
159 | // TODO more robust way to detect that Connect finished processing
160 | Thread.sleep(OFFSET_FLUSH_INTERVAL_MS * 2);
161 |
162 | final List expectedBlobs = List.of(
163 | getBlobName(topicName, 0, 0, compression),
164 | getBlobName(topicName, 1, 0, compression),
165 | getBlobName(topicName, 2, 0, compression),
166 | getBlobName(topicName, 3, 0, compression));
167 | final Map> blobContents = new HashMap<>();
168 | for (final String blobName : expectedBlobs) {
169 | assertThat(testBucketAccessor.doesObjectExist(blobName)).isTrue();
170 | final var records =
171 | ParquetUtils.readRecords(
172 | tmpDir.resolve(Paths.get(blobName)),
173 | testBucketAccessor.readBytes(blobName)
174 | );
175 | blobContents.put(blobName, records);
176 | }
177 |
178 | cnt = 0;
179 | for (int i = 0; i < 10; i++) {
180 | for (int partition = 0; partition < 4; partition++) {
181 | final var name = "user-" + cnt;
182 | final var value = "value-" + cnt;
183 | final String blobName = getBlobName(topicName, partition, 0, "none");
184 | final GenericRecord record = blobContents.get(blobName).get(i);
185 | final var expectedKey = "key-" + cnt;
186 | final var expectedValue = "{\"name\": \"" + name + "\", \"value\": \"" + value + "\"}";
187 |
188 | assertThat(record.get("key").toString()).isEqualTo(expectedKey);
189 | assertThat(record.get("value").toString()).isEqualTo(expectedValue);
190 | assertThat(record.get("offset")).isNotNull();
191 | assertThat(record.get("timestamp")).isNotNull();
192 | assertThat(record.get("headers")).isNull();
193 |
194 | cnt += 1;
195 | }
196 | }
197 | }
198 |
199 | @Test
200 | final void valueComplexType(@TempDir final Path tmpDir, final TestInfo testInfo)
201 | throws ExecutionException, InterruptedException, IOException {
202 | final var topicName = IntegrationBase.topicName(testInfo);
203 | final String compression = "none";
204 | final Map connectorConfig = awsSpecificConfig(basicConnectorConfig(compression), topicName);
205 | connectorConfig.put("format.output.fields", "value");
206 | connectorConfig.put("format.output.fields.value.encoding", "none");
207 | connectRunner.createConnector(connectorConfig);
208 |
209 | final Schema valueSchema =
210 | SchemaBuilder.record("value")
211 | .fields()
212 | .name("name").type().stringType().noDefault()
213 | .name("value").type().stringType().noDefault()
214 | .endRecord();
215 |
216 | final List> sendFutures = new ArrayList<>();
217 | int cnt = 0;
218 | for (int i = 0; i < 10; i++) {
219 | for (int partition = 0; partition < 4; partition++) {
220 | final var key = "key-" + cnt;
221 | final GenericRecord value = new GenericData.Record(valueSchema);
222 | value.put("name", "user-" + cnt);
223 | value.put("value", "value-" + cnt);
224 | cnt += 1;
225 | sendFutures.add(sendMessageAsync(topicName, partition, key, value));
226 | }
227 | }
228 | producer.flush();
229 | for (final Future sendFuture : sendFutures) {
230 | sendFuture.get();
231 | }
232 |
233 | // TODO more robust way to detect that Connect finished processing
234 | Thread.sleep(OFFSET_FLUSH_INTERVAL_MS * 2);
235 |
236 | final List expectedBlobs = List.of(
237 | getBlobName(topicName, 0, 0, compression),
238 | getBlobName(topicName, 1, 0, compression),
239 | getBlobName(topicName, 2, 0, compression),
240 | getBlobName(topicName, 3, 0, compression));
241 | final Map> blobContents = new HashMap<>();
242 | for (final String blobName : expectedBlobs) {
243 | final var records =
244 | ParquetUtils.readRecords(
245 | tmpDir.resolve(Paths.get(blobName)),
246 | testBucketAccessor.readBytes(blobName)
247 | );
248 | blobContents.put(blobName, records);
249 | }
250 | cnt = 0;
251 | for (int i = 0; i < 10; i++) {
252 | for (int partition = 0; partition < 4; partition++) {
253 | final var name = "user-" + cnt;
254 | final var value = "value-" + cnt;
255 | final String blobName = getBlobName(topicName, partition, 0, "none");
256 | final var record = blobContents.get(blobName).get(i);
257 | final var avroRecord = (GenericRecord) record.get("value");
258 |
259 | assertThat(avroRecord.get("name").toString()).isEqualTo(name);
260 | assertThat(avroRecord.get("value").toString()).isEqualTo(value);
261 |
262 | cnt += 1;
263 | }
264 | }
265 | }
266 |
267 | @Test
268 | final void schemaChanged(@TempDir final Path tmpDir, final TestInfo testInfo)
269 | throws ExecutionException, InterruptedException, IOException {
270 | final var topicName = IntegrationBase.topicName(testInfo);
271 | final String compression = "none";
272 | final Map connectorConfig = awsSpecificConfig(basicConnectorConfig(compression), topicName);
273 | connectorConfig.put("format.output.fields", "value");
274 | connectorConfig.put("format.output.fields.value.encoding", "none");
275 | connectRunner.createConnector(connectorConfig);
276 |
277 | final Schema valueSchema =
278 | SchemaBuilder.record("value")
279 | .fields()
280 | .name("name").type().stringType().noDefault()
281 | .name("value").type().stringType().noDefault()
282 | .endRecord();
283 |
284 | final Schema newValueSchema =
285 | SchemaBuilder.record("value")
286 | .fields()
287 | .name("name").type().stringType().noDefault()
288 | .name("value").type().stringType().noDefault()
289 | .name("blocked").type().booleanType().booleanDefault(false)
290 | .endRecord();
291 |
292 | final List> sendFutures = new ArrayList<>();
293 | int cnt = 0;
294 | final var expectedRecords = new ArrayList();
295 | for (int i = 0; i < 10; i++) {
296 | for (int partition = 0; partition < 4; partition++) {
297 | final var key = "key-" + cnt;
298 | final GenericRecord value;
299 | if (i < 5) {
300 | value = new GenericData.Record(valueSchema);
301 | value.put("name", "user-" + cnt);
302 | value.put("value", "value-" + cnt);
303 | } else {
304 | value = new GenericData.Record(newValueSchema);
305 | value.put("name", "user-" + cnt);
306 | value.put("value", "value-" + cnt);
307 | value.put("blocked", true);
308 | }
309 | expectedRecords.add(value.toString());
310 | cnt += 1;
311 | sendFutures.add(sendMessageAsync(topicName, partition, key, value));
312 | }
313 | }
314 | producer.flush();
315 | for (final Future sendFuture : sendFutures) {
316 | sendFuture.get();
317 | }
318 |
319 | // TODO more robust way to detect that Connect finished processing
320 | Thread.sleep(OFFSET_FLUSH_INTERVAL_MS * 2);
321 |
322 | final List expectedBlobs = Arrays.asList(
323 | getBlobName(topicName, 0, 0, compression),
324 | getBlobName(topicName, 0, 5, compression),
325 | getBlobName(topicName, 1, 0, compression),
326 | getBlobName(topicName, 1, 5, compression),
327 | getBlobName(topicName, 2, 0, compression),
328 | getBlobName(topicName, 2, 5, compression),
329 | getBlobName(topicName, 3, 0, compression),
330 | getBlobName(topicName, 3, 5, compression)
331 | );
332 | final var blobContents = new ArrayList();
333 | for (final String blobName : expectedBlobs) {
334 | final var records =
335 | ParquetUtils.readRecords(
336 | tmpDir.resolve(Paths.get(blobName)),
337 | testBucketAccessor.readBytes(blobName)
338 | );
339 | blobContents.addAll(records.stream().map(r -> r.get("value").toString()).collect(Collectors.toList()));
340 | }
341 |
342 | assertThat(blobContents)
343 | .containsExactlyInAnyOrderElementsOf(expectedRecords);
344 | }
345 |
346 | private KafkaProducer newProducer() {
347 | final Map producerProps = new HashMap<>();
348 | producerProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, KAFKA.getBootstrapServers());
349 | producerProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG,
350 | "io.confluent.kafka.serializers.KafkaAvroSerializer");
351 | producerProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG,
352 | "io.confluent.kafka.serializers.KafkaAvroSerializer");
353 | producerProps.put("schema.registry.url", SCHEMA_REGISTRY.getSchemaRegistryUrl());
354 | return new KafkaProducer<>(producerProps);
355 | }
356 |
357 | private Future sendMessageAsync(final String topicName,
358 | final int partition,
359 | final String key,
360 | final GenericRecord value) {
361 | final ProducerRecord msg = new ProducerRecord<>(
362 | topicName, partition, key, value);
363 | return producer.send(msg);
364 | }
365 |
366 | private Map basicConnectorConfig(final String compression) {
367 | final Map config = new HashMap<>();
368 | config.put("name", CONNECTOR_NAME);
369 | config.put("key.converter", "io.confluent.connect.avro.AvroConverter");
370 | config.put("key.converter.schema.registry.url", SCHEMA_REGISTRY.getSchemaRegistryUrl());
371 | config.put("value.converter", "io.confluent.connect.avro.AvroConverter");
372 | config.put("value.converter.schema.registry.url", SCHEMA_REGISTRY.getSchemaRegistryUrl());
373 | config.put("tasks.max", "1");
374 | config.put("file.compression.type", compression);
375 | config.put("format.output.type", "parquet");
376 | return config;
377 | }
378 |
379 | private Map awsSpecificConfig(final Map config, final String topicName) {
380 | config.put("connector.class", AivenKafkaConnectS3SinkConnector.class.getName());
381 | config.put("aws.access.key.id", S3_ACCESS_KEY_ID);
382 | config.put("aws.secret.access.key", S3_SECRET_ACCESS_KEY);
383 | config.put("aws.s3.endpoint", s3Endpoint);
384 | config.put("aws.s3.bucket.name", TEST_BUCKET_NAME);
385 | config.put("aws.s3.prefix", s3Prefix);
386 | config.put("topics", topicName);
387 | config.put("key.converter.schema.registry.url", SCHEMA_REGISTRY.getSchemaRegistryUrl());
388 | config.put("value.converter.schema.registry.url", SCHEMA_REGISTRY.getSchemaRegistryUrl());
389 | config.put("tasks.max", "1");
390 | return config;
391 | }
392 |
393 | // WARN: different from GCS
394 | private String getBlobName(final String topicName, final int partition, final int startOffset,
395 | final String compression) {
396 | final String result = String.format("%s%s-%d-%020d", s3Prefix, topicName, partition, startOffset);
397 | return result + CompressionType.forName(compression).extension();
398 | }
399 |
400 | }
401 |
--------------------------------------------------------------------------------
/src/integration-test/java/io/aiven/kafka/connect/ConnectRunner.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 Aiven Oy
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package io.aiven.kafka.connect;
18 |
19 | import java.io.File;
20 | import java.util.HashMap;
21 | import java.util.Map;
22 | import java.util.concurrent.ExecutionException;
23 |
24 | import org.apache.kafka.common.utils.Time;
25 | import org.apache.kafka.connect.runtime.Connect;
26 | import org.apache.kafka.connect.runtime.ConnectorConfig;
27 | import org.apache.kafka.connect.runtime.Herder;
28 | import org.apache.kafka.connect.runtime.Worker;
29 | import org.apache.kafka.connect.runtime.isolation.Plugins;
30 | import org.apache.kafka.connect.runtime.rest.RestServer;
31 | import org.apache.kafka.connect.runtime.rest.entities.ConnectorInfo;
32 | import org.apache.kafka.connect.runtime.standalone.StandaloneConfig;
33 | import org.apache.kafka.connect.runtime.standalone.StandaloneHerder;
34 | import org.apache.kafka.connect.storage.MemoryOffsetBackingStore;
35 | import org.apache.kafka.connect.util.Callback;
36 | import org.apache.kafka.connect.util.FutureCallback;
37 |
38 | import org.slf4j.Logger;
39 | import org.slf4j.LoggerFactory;
40 |
41 | final class ConnectRunner {
42 | private static final Logger log = LoggerFactory.getLogger(ConnectRunner.class);
43 |
44 | private final File pluginDir;
45 | private final String bootstrapServers;
46 | private final int offsetFlushInterval;
47 |
48 | private Herder herder;
49 | private Connect connect;
50 |
51 | public ConnectRunner(final File pluginDir,
52 | final String bootstrapServers,
53 | final int offsetFlushIntervalMs) {
54 | this.pluginDir = pluginDir;
55 | this.bootstrapServers = bootstrapServers;
56 | this.offsetFlushInterval = offsetFlushIntervalMs;
57 | }
58 |
59 | void start() {
60 | final Map workerProps = new HashMap<>();
61 | workerProps.put("bootstrap.servers", bootstrapServers);
62 |
63 | workerProps.put("offset.flush.interval.ms", Integer.toString(offsetFlushInterval));
64 |
65 | // These don't matter much (each connector sets its own converters), but need to be filled with valid classes.
66 | workerProps.put("key.converter", "org.apache.kafka.connect.converters.ByteArrayConverter");
67 | workerProps.put("value.converter", "org.apache.kafka.connect.converters.ByteArrayConverter");
68 | workerProps.put("internal.key.converter", "org.apache.kafka.connect.json.JsonConverter");
69 | workerProps.put("internal.key.converter.schemas.enable", "false");
70 | workerProps.put("internal.value.converter", "org.apache.kafka.connect.json.JsonConverter");
71 | workerProps.put("internal.value.converter.schemas.enable", "false");
72 |
73 | // Don't need it since we'll memory MemoryOffsetBackingStore.
74 | workerProps.put("offset.storage.file.filename", "");
75 |
76 | workerProps.put("plugin.path", pluginDir.getPath());
77 |
78 | final Time time = Time.SYSTEM;
79 | final String workerId = "test-worker";
80 | final String kafkaClusterId = "test-cluster";
81 |
82 | final Plugins plugins = new Plugins(workerProps);
83 | final StandaloneConfig config = new StandaloneConfig(workerProps);
84 |
85 | final Worker worker = new Worker(
86 | workerId, time, plugins, config, new MemoryOffsetBackingStore());
87 | herder = new StandaloneHerder(worker, kafkaClusterId);
88 |
89 | final RestServer rest = new RestServer(config);
90 |
91 | connect = new Connect(herder, rest);
92 |
93 | connect.start();
94 | }
95 |
96 | void createConnector(final Map config) throws ExecutionException, InterruptedException {
97 | assert herder != null;
98 |
99 | final FutureCallback> cb = new FutureCallback<>(
100 | new Callback>() {
101 | @Override
102 | public void onCompletion(final Throwable error, final Herder.Created info) {
103 | if (error != null) {
104 | log.error("Failed to create job");
105 | } else {
106 | log.info("Created connector {}", info.result().name());
107 | }
108 | }
109 | });
110 | herder.putConnectorConfig(
111 | config.get(ConnectorConfig.NAME_CONFIG),
112 | config, false, cb
113 | );
114 |
115 | final Herder.Created connectorInfoCreated = cb.get();
116 | assert connectorInfoCreated.created();
117 | }
118 |
119 | void stop() {
120 | connect.stop();
121 | }
122 |
123 | void awaitStop() {
124 | connect.awaitStop();
125 | }
126 | }
127 |
--------------------------------------------------------------------------------
/src/integration-test/java/io/aiven/kafka/connect/IntegrationBase.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 Aiven Oy
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package io.aiven.kafka.connect;
18 |
19 | import java.io.File;
20 | import java.io.IOException;
21 | import java.nio.file.Files;
22 | import java.time.Duration;
23 | import java.util.List;
24 | import java.util.Properties;
25 | import java.util.concurrent.ExecutionException;
26 | import java.util.stream.Collectors;
27 |
28 | import org.apache.kafka.clients.admin.AdminClient;
29 | import org.apache.kafka.clients.admin.AdminClientConfig;
30 | import org.apache.kafka.clients.admin.NewTopic;
31 |
32 | import com.amazonaws.auth.AWSStaticCredentialsProvider;
33 | import com.amazonaws.auth.BasicAWSCredentials;
34 | import com.amazonaws.client.builder.AwsClientBuilder;
35 | import com.amazonaws.services.s3.AmazonS3;
36 | import com.amazonaws.services.s3.AmazonS3ClientBuilder;
37 | import com.github.dockerjava.api.model.Ulimit;
38 | import org.awaitility.Awaitility;
39 | import org.junit.jupiter.api.TestInfo;
40 | import org.testcontainers.containers.Container;
41 | import org.testcontainers.containers.KafkaContainer;
42 | import org.testcontainers.containers.Network;
43 | import org.testcontainers.containers.localstack.LocalStackContainer;
44 | import org.testcontainers.utility.DockerImageName;
45 |
46 | public interface IntegrationBase {
47 |
48 | static LocalStackContainer createS3Container() {
49 | return new LocalStackContainer(
50 | DockerImageName.parse("localstack/localstack:2.0.2")
51 | ).withServices(LocalStackContainer.Service.S3);
52 | }
53 |
54 | static AmazonS3 createS3Client(final LocalStackContainer localStackContainer) {
55 | return AmazonS3ClientBuilder
56 | .standard()
57 | .withEndpointConfiguration(
58 | new AwsClientBuilder.EndpointConfiguration(
59 | localStackContainer.getEndpointOverride(LocalStackContainer.Service.S3).toString(),
60 | localStackContainer.getRegion()
61 | )
62 | )
63 | .withCredentials(
64 | new AWSStaticCredentialsProvider(
65 | new BasicAWSCredentials(localStackContainer.getAccessKey(), localStackContainer.getSecretKey())
66 | )
67 | )
68 | .build();
69 | }
70 |
71 | default AdminClient newAdminClient(final KafkaContainer kafka) {
72 | final Properties adminClientConfig = new Properties();
73 | adminClientConfig.put(AdminClientConfig.BOOTSTRAP_SERVERS_CONFIG, kafka.getBootstrapServers());
74 | return AdminClient.create(adminClientConfig);
75 | }
76 |
77 | default ConnectRunner newConnectRunner(final KafkaContainer kafka,
78 | final File pluginDir,
79 | final int offsetFlushIntervalMs) {
80 | return new ConnectRunner(pluginDir, kafka.getBootstrapServers(), offsetFlushIntervalMs);
81 | }
82 |
83 |
84 | static void extractConnectorPlugin(File pluginDir) throws IOException, InterruptedException {
85 | final File distFile = new File(System.getProperty("integration-test.distribution.file.path"));
86 | assert distFile.exists();
87 |
88 | final String cmd = String.format("tar -xf %s --strip-components=1 -C %s",
89 | distFile, pluginDir.toString());
90 | final Process p = Runtime.getRuntime().exec(cmd);
91 | assert p.waitFor() == 0;
92 | }
93 |
94 | static File getPluginDir() throws IOException {
95 | final File testDir = Files.createTempDirectory("s3-connector-for-apache-kafka-test-").toFile();
96 |
97 | final File pluginDir = new File(testDir, "plugins/s3-connector-for-apache-kafka/");
98 | assert pluginDir.mkdirs();
99 | return pluginDir;
100 | }
101 |
102 | static KafkaContainer createKafkaContainer() {
103 | return new KafkaContainer("5.2.1")
104 | .withEnv("KAFKA_AUTO_CREATE_TOPICS_ENABLE", "false")
105 | .withNetwork(Network.newNetwork())
106 | .withExposedPorts(KafkaContainer.KAFKA_PORT, 9092)
107 | .withCreateContainerCmdModifier(cmd ->
108 | cmd.getHostConfig().withUlimits(List.of(new Ulimit("nofile", 30000L, 30000L)))
109 | );
110 | }
111 |
112 | static String topicName(final TestInfo testInfo) {
113 | return testInfo.getTestMethod().get().getName() + "-" + testInfo.getDisplayName().hashCode();
114 | }
115 |
116 | static void createTopics(final AdminClient adminClient, final List topicNames)
117 | throws ExecutionException, InterruptedException {
118 | final var newTopics = topicNames.stream()
119 | .map(s -> new NewTopic(s, 4, (short) 1))
120 | .collect(Collectors.toList());
121 | adminClient.createTopics(newTopics).all().get();
122 | }
123 |
124 | static void waitForRunningContainer(final Container> kafka) {
125 | Awaitility.await().atMost(Duration.ofMinutes(1)).until(kafka::isRunning);
126 | }
127 | }
128 |
--------------------------------------------------------------------------------
/src/integration-test/java/io/aiven/kafka/connect/ParquetUtils.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2021 Aiven Oy
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package io.aiven.kafka.connect;
18 |
19 | import java.io.IOException;
20 | import java.nio.channels.Channels;
21 | import java.nio.file.Files;
22 | import java.nio.file.Path;
23 | import java.util.ArrayList;
24 | import java.util.List;
25 |
26 | import org.apache.avro.generic.GenericRecord;
27 | import org.apache.parquet.avro.AvroParquetReader;
28 | import org.apache.parquet.io.DelegatingSeekableInputStream;
29 | import org.apache.parquet.io.InputFile;
30 | import org.apache.parquet.io.SeekableInputStream;
31 | import org.testcontainers.shaded.org.apache.commons.io.FileUtils;
32 |
33 | class ParquetUtils {
34 |
35 | static List readRecords(final Path tmpDir, final byte[] bytes) throws IOException {
36 | final var records = new ArrayList();
37 | final var parquetFile = tmpDir.resolve("parquet.file");
38 | FileUtils.writeByteArrayToFile(parquetFile.toFile(), bytes);
39 | final var seekableByteChannel = Files.newByteChannel(parquetFile);
40 | try (final var r = AvroParquetReader.builder(new InputFile() {
41 | @Override
42 | public long getLength() throws IOException {
43 | return seekableByteChannel.size();
44 | }
45 |
46 | @Override
47 | public SeekableInputStream newStream() throws IOException {
48 | return new DelegatingSeekableInputStream(Channels.newInputStream(seekableByteChannel)) {
49 | @Override
50 | public long getPos() throws IOException {
51 | return seekableByteChannel.position();
52 | }
53 |
54 | @Override
55 | public void seek(final long l) throws IOException {
56 | seekableByteChannel.position(l);
57 | }
58 | };
59 | }
60 |
61 | }).withCompatibility(false).build()) {
62 | var record = r.read();
63 | while (record != null) {
64 | records.add(record);
65 | record = r.read();
66 | }
67 | }
68 | return records;
69 | }
70 |
71 | }
72 |
--------------------------------------------------------------------------------
/src/integration-test/java/io/aiven/kafka/connect/s3/SchemaRegistryContainer.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 Aiven Oy
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package io.aiven.kafka.connect.s3;
18 |
19 | import java.util.List;
20 |
21 | import com.github.dockerjava.api.model.Ulimit;
22 | import org.testcontainers.containers.GenericContainer;
23 | import org.testcontainers.containers.KafkaContainer;
24 | import org.testcontainers.utility.Base58;
25 |
26 | public final class SchemaRegistryContainer extends GenericContainer {
27 | public static final int SCHEMA_REGISTRY_PORT = 8081;
28 |
29 | public SchemaRegistryContainer(final KafkaContainer kafka) {
30 | this("5.0.4", kafka);
31 | }
32 |
33 | public SchemaRegistryContainer(final String confluentPlatformVersion, final KafkaContainer kafka) {
34 | super("confluentinc/cp-schema-registry:" + confluentPlatformVersion);
35 |
36 | dependsOn(kafka);
37 | withNetwork(kafka.getNetwork());
38 | withNetworkAliases("schema-registry-" + Base58.randomString(6));
39 |
40 | withEnv("SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS",
41 | String.format("PLAINTEXT://%s:%s", kafka.getNetworkAliases().get(0), 9092));
42 |
43 | withExposedPorts(SCHEMA_REGISTRY_PORT);
44 | withEnv("SCHEMA_REGISTRY_HOST_NAME", "localhost");
45 |
46 | withCreateContainerCmdModifier(cmd ->
47 | cmd.getHostConfig().withUlimits(List.of(new Ulimit("nofile", 30000L, 30000L)))
48 | );
49 | }
50 |
51 | public String getSchemaRegistryUrl() {
52 | return String.format("http://%s:%s", getContainerIpAddress(), getMappedPort(SCHEMA_REGISTRY_PORT));
53 | }
54 | }
55 |
--------------------------------------------------------------------------------
/src/integration-test/resources/logback-test.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
6 |
7 | %d{HH:mm:ss.SSS} [%thread] %-5level %logger{36} - %msg%n
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
--------------------------------------------------------------------------------
/src/main/java/io/aiven/kafka/connect/s3/AivenKafkaConnectS3SinkConnector.java:
--------------------------------------------------------------------------------
1 | /*
2 | * Copyright 2020 Aiven Oy
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | package io.aiven.kafka.connect.s3;
18 |
19 | import java.util.ArrayList;
20 | import java.util.List;
21 | import java.util.Map;
22 | import java.util.Objects;
23 |
24 | import org.apache.kafka.common.config.ConfigDef;
25 | import org.apache.kafka.connect.connector.Task;
26 | import org.apache.kafka.connect.sink.SinkConnector;
27 |
28 | import io.aiven.kafka.connect.s3.config.S3SinkConfig;
29 |
30 | import org.slf4j.Logger;
31 | import org.slf4j.LoggerFactory;
32 |
33 | public class AivenKafkaConnectS3SinkConnector extends SinkConnector {
34 |
35 | private static final Logger LOGGER = LoggerFactory.getLogger(AivenKafkaConnectS3SinkConnector.class);
36 |
37 | private Map configProperties;
38 |
39 | @Override
40 | public ConfigDef config() {
41 | return S3SinkConfig.configDef();
42 | }
43 |
44 | @Override
45 | public String version() {
46 | return Version.VERSION;
47 | }
48 |
49 | @Override
50 | public Class extends Task> taskClass() {
51 | return S3SinkTask.class;
52 | }
53 |
54 | @Override
55 | public List