├── .github
├── CODEOWNERS
└── workflows
│ ├── main.yml
│ ├── codeql-analysis.yml
│ ├── build.yml
│ └── release.yml
├── .README_images
├── DroneFly_logo.png
├── drone-fly-after.png
└── drone-fly-before.png
├── drone-fly-app
├── src
│ ├── main
│ │ ├── resources
│ │ │ ├── drone-fly-app.yml
│ │ │ └── banner.txt
│ │ └── java
│ │ │ └── com
│ │ │ └── expediagroup
│ │ │ └── dataplatform
│ │ │ └── dronefly
│ │ │ └── app
│ │ │ ├── messaging
│ │ │ └── MessageReaderAdapter.java
│ │ │ ├── service
│ │ │ ├── factory
│ │ │ │ ├── HMSHandlerFactory.java
│ │ │ │ └── ListenerCatalogFactory.java
│ │ │ ├── ListenerCatalog.java
│ │ │ ├── DroneFlyNotificationService.java
│ │ │ ├── HiveEventConverterService.java
│ │ │ └── listener
│ │ │ │ └── LoggingMetastoreListener.java
│ │ │ ├── DroneFly.java
│ │ │ ├── DroneFlyRunner.java
│ │ │ └── context
│ │ │ └── CommonBeans.java
│ └── test
│ │ ├── resources
│ │ └── log4j2.xml
│ │ └── java
│ │ └── com
│ │ └── expediagroup
│ │ └── dataplatform
│ │ └── dronefly
│ │ └── app
│ │ ├── service
│ │ ├── factory
│ │ │ ├── HMSHandlerFactoryTest.java
│ │ │ └── ListenerCatalogFactoryTest.java
│ │ ├── HiveTableTestUtils.java
│ │ ├── listener
│ │ │ ├── DummyListener.java
│ │ │ └── AnotherDummyListener.java
│ │ ├── ListenerCatalogTest.java
│ │ ├── DroneFlyNotificationServiceTest.java
│ │ └── HiveEventConverterServiceTest.java
│ │ ├── messaging
│ │ └── MessageReaderAdapterTest.java
│ │ └── DroneFlyRunnerTest.java
└── pom.xml
├── NOTICE
├── .gitignore
├── drone-fly-core
├── src
│ ├── test
│ │ └── java
│ │ │ └── com
│ │ │ └── expediagroup
│ │ │ └── dataplatform
│ │ │ └── dronefly
│ │ │ └── core
│ │ │ └── DroneFlyCoreTest.java
│ └── main
│ │ └── java
│ │ └── com
│ │ └── expediagroup
│ │ └── dataplatform
│ │ └── dronefly
│ │ └── core
│ │ ├── DroneFlyCore.java
│ │ └── exception
│ │ └── DroneFlyException.java
└── pom.xml
├── drone-fly-integration-tests
├── src
│ └── test
│ │ ├── resources
│ │ └── log4j2.xml
│ │ └── java
│ │ └── com
│ │ └── expediagroup
│ │ └── dataplatform
│ │ └── dronefly
│ │ └── core
│ │ └── integration
│ │ ├── DroneFlyIntegrationTestUtils.java
│ │ ├── DummyListener.java
│ │ └── DroneFlyIntegrationTest.java
└── pom.xml
├── CHANGELOG.md
├── CONTRIBUTING.md
├── CODE-OF-CONDUCT.md
├── README.md
├── pom.xml
└── LICENSE.txt
/.github/CODEOWNERS:
--------------------------------------------------------------------------------
1 | * @ExpediaGroup/apiary-committers
2 |
--------------------------------------------------------------------------------
/.README_images/DroneFly_logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ExpediaGroup/drone-fly/HEAD/.README_images/DroneFly_logo.png
--------------------------------------------------------------------------------
/drone-fly-app/src/main/resources/drone-fly-app.yml:
--------------------------------------------------------------------------------
1 | management.endpoints.web.exposure.include: health,info,prometheus
2 |
--------------------------------------------------------------------------------
/.README_images/drone-fly-after.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ExpediaGroup/drone-fly/HEAD/.README_images/drone-fly-after.png
--------------------------------------------------------------------------------
/.README_images/drone-fly-before.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/ExpediaGroup/drone-fly/HEAD/.README_images/drone-fly-before.png
--------------------------------------------------------------------------------
/NOTICE:
--------------------------------------------------------------------------------
1 | Expedia, Inc.
2 | Copyright (C) 2020 Expedia, Inc.
3 |
4 | This product includes software developed by Expedia Inc. licensed under the Apache License, Version 2.0.
5 |
6 | This product includes software developed by The Apache Software Foundation (http://www.apache.org/) licensed under the Apache License, Version 2.0.
7 |
8 |
--------------------------------------------------------------------------------
/drone-fly-app/src/main/resources/banner.txt:
--------------------------------------------------------------------------------
1 | ______ ______ _____ _ _ _____ ______ _ __ __
2 | | _ \| ___ \| _ || \ | || ___| | ___|| | \ \ / /
3 | | | | || |_/ /| | | || \| || |__ ______ | |_ | | \ V /
4 | | | | || / | | | || . ` || __| |______|| _| | | \ /
5 | | |/ / | |\ \ \ \_/ /| |\ || |___ | | | |____ | |
6 | |___/ \_| \_| \___/ \_| \_/\____/ \_| \_____/ \_/
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Eclipse files
2 | .classpath
3 | .settings
4 | .project
5 | .springBeans
6 |
7 | # Checkstyle, findbugs and other eclipse plugins
8 | */.checkstyle
9 | */.fbExcludeFilterFile
10 | */.factorypath
11 | */.apt_generated/
12 |
13 | # Jenv configuration
14 | .java-version
15 |
16 | # Intellij
17 | .idea/
18 | *.iml
19 | *.iws
20 |
21 | # VS code
22 | .vscode/
23 |
24 | # Netbeans files
25 | nbactions.xml
26 |
27 | # Maven target folder
28 | target/
29 | bin/
30 |
31 | # MAC stuff
32 | .DS_Store
33 |
34 | # Generated pom
35 | core/dependency-reduced-pom.xml
36 |
37 | # Build script log
38 | /build.log
39 |
40 | */native-libs
41 |
42 | lib/*
43 | *.jar
44 |
45 |
--------------------------------------------------------------------------------
/drone-fly-core/src/test/java/com/expediagroup/dataplatform/dronefly/core/DroneFlyCoreTest.java:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright (C) 2020 Expedia, Inc.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.expediagroup.dataplatform.dronefly.core;
17 |
18 | public class DroneFlyCoreTest {
19 | // Tests for DroneFlyCore
20 | }
21 |
--------------------------------------------------------------------------------
/drone-fly-core/src/main/java/com/expediagroup/dataplatform/dronefly/core/DroneFlyCore.java:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright (C) 2020 Expedia, Inc.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.expediagroup.dataplatform.dronefly.core;
17 |
18 | public class DroneFlyCore {
19 | // Some core functionality will be implemented here.
20 |
21 | }
22 |
--------------------------------------------------------------------------------
/drone-fly-app/src/test/resources/log4j2.xml:
--------------------------------------------------------------------------------
1 |
2 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
--------------------------------------------------------------------------------
/drone-fly-integration-tests/src/test/resources/log4j2.xml:
--------------------------------------------------------------------------------
1 |
2 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
--------------------------------------------------------------------------------
/drone-fly-core/src/main/java/com/expediagroup/dataplatform/dronefly/core/exception/DroneFlyException.java:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright (C) 2020 Expedia, Inc.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.expediagroup.dataplatform.dronefly.core.exception;
17 |
18 | public class DroneFlyException extends RuntimeException {
19 |
20 | private static final long serialVersionUID = 1L;
21 |
22 | public DroneFlyException(String message, Exception e) {
23 | super(message, e);
24 | }
25 |
26 | public DroneFlyException(String message, Throwable e) {
27 | super(message, e);
28 | }
29 |
30 | public DroneFlyException(String message) {
31 | super(message);
32 | }
33 | }
34 |
--------------------------------------------------------------------------------
/.github/workflows/main.yml:
--------------------------------------------------------------------------------
1 | name: Automatic build
2 |
3 | on:
4 | pull_request:
5 | push:
6 | branches:
7 | - main
8 |
9 | jobs:
10 | test:
11 | name: Package and run all tests
12 | runs-on: ubuntu-latest
13 | steps:
14 | - uses: actions/checkout@v2
15 | with:
16 | fetch-depth: 0
17 | - name: Init Coveralls
18 | shell: bash
19 | run: |
20 | COVERALLS_TOKEN=${{ secrets.COVERALLS_REPO_TOKEN }}
21 | if [[ -z "${COVERALLS_TOKEN}" ]];
22 | then
23 | echo "Coveralls token not available"
24 | COVERALLS_SKIP=true
25 | else
26 | echo "Coveralls token available"
27 | COVERALLS_SKIP=false
28 | fi
29 | echo "COVERALLS_SKIP=${COVERALLS_SKIP}" >> $GITHUB_ENV
30 | - name: Set up JDK
31 | uses: actions/setup-java@v2
32 | with:
33 | distribution: 'adopt'
34 | java-version: 8
35 | java-package: jdk
36 | # this creates a settings.xml with the following server
37 | settings-path: ${{ github.workspace }}
38 | - name: Run Maven Targets
39 | run: mvn package jacoco:report coveralls:report --batch-mode --show-version --activate-profiles coveralls -Dcoveralls.skip=$COVERALLS_SKIP -DrepoToken=${{ secrets.COVERALLS_REPO_TOKEN }}
40 |
--------------------------------------------------------------------------------
/drone-fly-app/src/main/java/com/expediagroup/dataplatform/dronefly/app/messaging/MessageReaderAdapter.java:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright (C) 2020 Expedia, Inc.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.expediagroup.dataplatform.dronefly.app.messaging;
17 |
18 | import java.io.IOException;
19 |
20 | import com.expediagroup.apiary.extensions.events.metastore.event.ApiaryListenerEvent;
21 | import com.expediagroup.apiary.extensions.events.metastore.kafka.messaging.KafkaMessageReader;
22 |
23 | public class MessageReaderAdapter {
24 |
25 | private final KafkaMessageReader delegate;
26 |
27 | public MessageReaderAdapter(KafkaMessageReader delegate) {
28 | this.delegate = delegate;
29 | }
30 |
31 | public ApiaryListenerEvent read() {
32 | return delegate.next();
33 | }
34 |
35 | public void close() throws IOException {
36 | delegate.close();
37 | }
38 | }
39 |
--------------------------------------------------------------------------------
/drone-fly-app/src/main/java/com/expediagroup/dataplatform/dronefly/app/service/factory/HMSHandlerFactory.java:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright (C) 2020 Expedia, Inc.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.expediagroup.dataplatform.dronefly.app.service.factory;
17 |
18 | import org.apache.hadoop.hive.conf.HiveConf;
19 | import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler;
20 | import org.apache.hadoop.hive.metastore.api.MetaException;
21 | import org.springframework.beans.factory.annotation.Autowired;
22 | import org.springframework.stereotype.Component;
23 |
24 | @Component
25 | public class HMSHandlerFactory {
26 | private final HiveConf hiveConf;
27 |
28 | @Autowired
29 | public HMSHandlerFactory(HiveConf hiveConf) {
30 | this.hiveConf = hiveConf;
31 | }
32 |
33 | public HMSHandler newInstance() throws MetaException {
34 | return new HMSHandler("drone-fly", hiveConf, false);
35 | }
36 | }
37 |
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | # Changelog
2 | All notable changes to this project will be documented in this file.
3 |
4 | The format is based on [Keep a Changelog](http://keepachangelog.com/en/1.0.0/) and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.html).
5 |
6 | ## [1.0.8] - 2025-07-22
7 | ###
8 | * Update to parent 3.0.1
9 | * Upgrade parent pom to accomodate for new sonatype deployment.
10 | * set drone-fly-app-exec.jar executable flag to false as it breaks deployment. (This jar is not used inside the docker image).
11 |
12 | ## YANKED (incomplete release) [1.0.7] - 2025-07-22
13 |
14 | ## YANKED (incomplete release) [1.0.6] - 2025-07-22
15 |
16 | ## [1.0.5] - 2025-06-06
17 | ###
18 | * Upgrade `hive` version from `2.3.7` to `2.3.9`.
19 | * Hive Metrics can be sent now.
20 |
21 | ## [1.0.2] - 2024-11-08
22 | ### Added
23 | * Support for consumer properties allowing connecting to Kafka cloud provider.
24 |
25 | ## [1.0.0] - 2023-04-27
26 | ### Changed
27 | * Upgrade `Springboot` version from `2.3.3.RELEASE` to `2.7.10`.
28 | * Upgrade `Springframework` version from `5.2.8.RELEASE` to `5.3.25`.
29 | * Upgrade `Mockito` version from `2.25.1` to `3.12.4`.
30 |
31 | ## [0.0.3] - 2021-12-14
32 | ### Changed
33 | * Updated log4j version to 2.16.0 because of zero day vulnerability.
34 |
35 | ## [0.0.2] - 2021-10-13
36 | ### Added
37 | * Integration tests.
38 |
39 | ### Changed
40 | * Updated `springframework.boot.version` to `2.3.3.RELEASE` (was `2.1.3.RELEASE`).
41 | * Updated `springframework.version` to `5.2.8.RELEASE` (was `5.1.5.RELEASE`).
42 |
43 | ## [0.0.1] - 2020-08-03
44 | ### Added
45 | * First Release.
46 |
--------------------------------------------------------------------------------
/drone-fly-app/src/test/java/com/expediagroup/dataplatform/dronefly/app/service/factory/HMSHandlerFactoryTest.java:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright (C) 2020 Expedia, Inc.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.expediagroup.dataplatform.dronefly.app.service.factory;
17 |
18 | import static org.assertj.core.api.Assertions.assertThat;
19 |
20 | import org.apache.hadoop.hive.conf.HiveConf;
21 | import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler;
22 | import org.apache.hadoop.hive.metastore.api.MetaException;
23 | import org.junit.jupiter.api.Test;
24 |
25 |
26 | public class HMSHandlerFactoryTest {
27 | private HMSHandlerFactory factory;
28 |
29 | @Test
30 | public void typical() throws MetaException {
31 | HiveConf conf = new HiveConf();
32 | conf.set("test-property", "test");
33 | factory = new HMSHandlerFactory(conf);
34 |
35 | HMSHandler hmsHandler = factory.newInstance();
36 |
37 | assertThat(hmsHandler.getName()).isEqualTo("drone-fly");
38 | assertThat(hmsHandler.getHiveConf().get("test-property")).isEqualTo("test");
39 | }
40 |
41 | }
42 |
--------------------------------------------------------------------------------
/.github/workflows/codeql-analysis.yml:
--------------------------------------------------------------------------------
1 | name: "Code scanning - action"
2 |
3 | on:
4 | pull_request:
5 | schedule:
6 | - cron: '0 0 * * 4'
7 |
8 | jobs:
9 | CodeQL-Build:
10 |
11 | runs-on: ubuntu-latest
12 |
13 | steps:
14 | - name: Checkout repository
15 | uses: actions/checkout@v2
16 | with:
17 | # We must fetch at least the immediate parents so that if this is
18 | # a pull request then we can checkout the head.
19 | fetch-depth: 2
20 |
21 | # If this run was triggered by a pull request event, then checkout
22 | # the head of the pull request instead of the merge commit.
23 | - run: git checkout HEAD^2
24 | if: ${{ github.event_name == 'pull_request' }}
25 |
26 | # Initializes the CodeQL tools for scanning.
27 | - name: Initialize CodeQL
28 | uses: github/codeql-action/init@v1
29 | with:
30 | languages: java
31 |
32 | # Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
33 | # If this step fails, then you should remove it and run the build manually (see below)
34 | - name: Autobuild
35 | uses: github/codeql-action/autobuild@v1
36 |
37 | # ℹ️ Command-line programs to run using the OS shell.
38 | # 📚 https://git.io/JvXDl
39 |
40 | # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
41 | # and modify them (or add more) to build your code if your project
42 | # uses a compiled language
43 |
44 | #- run: |
45 | # make bootstrap
46 | # make release
47 |
48 | - name: Perform CodeQL Analysis
49 | uses: github/codeql-action/analyze@v1
50 |
--------------------------------------------------------------------------------
/drone-fly-core/pom.xml:
--------------------------------------------------------------------------------
1 |
2 | 4.0.0
3 |
4 |
5 | com.expediagroup
6 | drone-fly-parent
7 | 1.0.9-SNAPSHOT
8 |
9 |
10 | drone-fly-core
11 | jar
12 | ${project.groupId}:${project.artifactId}
13 |
14 |
15 |
16 |
17 | org.springframework.boot
18 | spring-boot-starter-actuator
19 |
20 |
21 | org.springframework.boot
22 | spring-boot-starter-logging
23 |
24 |
25 |
26 |
27 | org.springframework.boot
28 | spring-boot-starter-web
29 |
30 |
31 | org.springframework.boot
32 | spring-boot-starter-logging
33 |
34 |
35 |
36 |
37 | org.springframework
38 | spring-test
39 | test
40 |
41 |
42 | org.junit.jupiter
43 | junit-jupiter-params
44 | test
45 |
46 |
47 |
48 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # How To Contribute
2 |
3 | We'd love to accept your patches and contributions to this project. There are just a few guidelines you need to follow which are described in detail below.
4 |
5 | ## 1. Fork this repo
6 |
7 | You should create a fork of this project in your account and work from there. You can create a fork by clicking the fork button in GitHub.
8 |
9 | ## 2. One feature, one branch
10 |
11 | Work for each new feature/issue should occur in its own branch. To create a new branch from the command line:
12 | ```shell
13 | git checkout -b my-new-feature
14 | ```
15 | where "my-new-feature" describes what you're working on.
16 |
17 | ## 3. Add unit tests
18 | If your contribution modifies existing or adds new code please add corresponding unit tests for this.
19 |
20 | ## 4. Ensure that the build passes
21 |
22 | Run
23 | ```shell
24 | mvn package
25 | ```
26 | and check that there are no errors.
27 |
28 | ## 5. Add documentation for new or updated functionality
29 |
30 | Please review all of the .md files in this project to see if they are impacted by your change and update them accordingly.
31 |
32 | ## 6. Add to CHANGELOG.md
33 |
34 | Any notable changes should be recorded in the CHANGELOG.md following the [Keep a Changelog](https://keepachangelog.com/en/1.0.0/) conventions.
35 |
36 | ## 7. Submit a pull request and describe the change
37 |
38 | Push your changes to your branch and open a pull request against the parent repo on GitHub. The project administrators will review your pull request and respond with feedback.
39 |
40 | # How your contribution gets merged
41 |
42 | Upon pull request submission, your code will be reviewed by the maintainers. They will confirm at least the following:
43 |
44 | - Tests run successfully (unit, coverage, integration, style).
45 | - Contribution policy has been followed.
46 |
47 | Two (human) reviewers will need to sign off on your pull request before it can be merged.
48 |
--------------------------------------------------------------------------------
/.github/workflows/build.yml:
--------------------------------------------------------------------------------
1 | name: Build with parameters
2 | on:
3 | workflow_dispatch:
4 | inputs:
5 | branch:
6 | description: "The branch to build a SNAPSHOT from."
7 | required: true
8 | default: "main"
9 | jobs:
10 | deploy:
11 | name: Build SNAPSHOT to Sonatype
12 | runs-on: ubuntu-24.04
13 |
14 | steps:
15 | - uses: actions/checkout@v2
16 | with:
17 | fetch-depth: 0
18 | ref: ${{ github.event.inputs.branch }}
19 |
20 | - name: Set up JDK
21 | uses: actions/setup-java@v2
22 | with:
23 | distribution: 'adopt'
24 | java-version: '8'
25 | java-package: jdk
26 | server-id: central # Value of the distributionManagement/repository/id field of the pom.xml
27 | server-username: SONATYPE_USERNAME # env variable for username in deploy
28 | server-password: SONATYPE_PASSWORD # env variable for token in deploy
29 | # only signed artifacts will be released to maven central. this sets up things for the maven-gpg-plugin
30 | gpg-private-key: ${{ secrets.GPG_PRIVATE_KEY }} # Value of the GPG private key to import
31 | gpg-passphrase: GPG_PASSPHRASE # env variable for GPG private key passphrase
32 | # this creates a settings.xml with the following server
33 | settings-path: ${{ github.workspace }}
34 |
35 | - name: Run Maven Targets
36 | run: mvn deploy jacoco:report checkstyle:checkstyle spotbugs:spotbugs --settings $GITHUB_WORKSPACE/settings.xml --batch-mode --show-version --no-transfer-progress --activate-profiles sonatype-oss-release-github-actions
37 | env:
38 | SONATYPE_USERNAME: ${{ secrets.SONATYPE_USERNAME }}
39 | SONATYPE_PASSWORD: ${{ secrets.SONATYPE_PASSWORD }}
40 | DOCKERHUB_USERNAME: ${{ secrets.DOCKER_HUB_USERNAME }}
41 | DOCKERHUB_PASSWORD: ${{ secrets.DOCKER_HUB_PASSWORD }}
42 | GPG_PASSPHRASE: ${{ secrets.GPG_PRIVATE_KEY_PASSPHRASE }}
43 |
--------------------------------------------------------------------------------
/drone-fly-app/src/test/java/com/expediagroup/dataplatform/dronefly/app/messaging/MessageReaderAdapterTest.java:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright (C) 2020 Expedia, Inc.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.expediagroup.dataplatform.dronefly.app.messaging;
17 |
18 | import static org.assertj.core.api.Assertions.assertThat;
19 | import static org.mockito.Mockito.verify;
20 | import static org.mockito.Mockito.when;
21 |
22 | import java.io.IOException;
23 |
24 | import org.junit.jupiter.api.BeforeEach;
25 | import org.junit.jupiter.api.Test;
26 | import org.junit.jupiter.api.extension.ExtendWith;
27 | import org.mockito.Mock;
28 | import org.mockito.junit.jupiter.MockitoExtension;
29 |
30 | import com.expediagroup.apiary.extensions.events.metastore.event.ApiaryListenerEvent;
31 | import com.expediagroup.apiary.extensions.events.metastore.kafka.messaging.KafkaMessageReader;
32 |
33 | @ExtendWith(MockitoExtension.class)
34 | public class MessageReaderAdapterTest {
35 |
36 | private @Mock KafkaMessageReader delegate;
37 | private @Mock ApiaryListenerEvent event;
38 | private MessageReaderAdapter messageReaderAdapter;
39 |
40 | @BeforeEach
41 | public void init() {
42 | messageReaderAdapter = new MessageReaderAdapter(delegate);
43 | }
44 |
45 | @Test
46 | public void typicalRead() {
47 | when(delegate.next()).thenReturn(event);
48 | ApiaryListenerEvent result = messageReaderAdapter.read();
49 | verify(delegate).next();
50 | assertThat(result).isEqualTo(event);
51 | }
52 |
53 | @Test
54 | public void typicalClose() throws IOException {
55 | messageReaderAdapter.close();
56 | verify(delegate).close();
57 | }
58 |
59 | }
60 |
--------------------------------------------------------------------------------
/drone-fly-app/src/main/java/com/expediagroup/dataplatform/dronefly/app/service/factory/ListenerCatalogFactory.java:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright (C) 2020 Expedia, Inc.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.expediagroup.dataplatform.dronefly.app.service.factory;
17 |
18 | import org.apache.commons.lang.StringUtils;
19 | import org.apache.hadoop.hive.conf.HiveConf;
20 | import org.slf4j.Logger;
21 | import org.slf4j.LoggerFactory;
22 |
23 | import com.expediagroup.dataplatform.dronefly.app.service.ListenerCatalog;
24 | import com.expediagroup.dataplatform.dronefly.app.service.listener.LoggingMetastoreListener;
25 |
26 | public class ListenerCatalogFactory {
27 | private static final Logger log = LoggerFactory.getLogger(ListenerCatalogFactory.class);
28 | private final HiveConf hiveConf;
29 |
30 | public ListenerCatalogFactory(HiveConf hiveConf) {
31 | this.hiveConf = hiveConf;
32 | }
33 |
34 | public ListenerCatalog newInstance(String confProvidedList) {
35 | String listenerImplList = confProvidedList;
36 | if (StringUtils.isBlank(listenerImplList)) {
37 | log.info("{apiary.listener.list} is empty. Going to look in hive-site.xml if it is provided on the classpath.");
38 | listenerImplList = hiveConf.getVar(HiveConf.ConfVars.METASTORE_EVENT_LISTENERS);
39 | }
40 |
41 | if (StringUtils.isBlank(listenerImplList)) {
42 | listenerImplList = LoggingMetastoreListener.class.getName();
43 | log
44 | .warn(
45 | "No Hive metastore listeners have been provided as argument {apiary.listener.list} or hive-site.xml. Going to use: {}",
46 | listenerImplList);
47 |
48 | }
49 |
50 | return new ListenerCatalog(hiveConf, listenerImplList);
51 | }
52 |
53 | }
54 |
--------------------------------------------------------------------------------
/.github/workflows/release.yml:
--------------------------------------------------------------------------------
1 | name: Release to Maven Central
2 | on:
3 | workflow_dispatch:
4 | inputs:
5 | branch:
6 | description: "The branch to release from."
7 | required: true
8 | default: "main"
9 | jobs:
10 | release:
11 | name: Release to Maven Central
12 | runs-on: ubuntu-24.04
13 |
14 | steps:
15 | - name: Checkout source code
16 | uses: actions/checkout@v2
17 | with:
18 | fetch-depth: 0
19 | ref: ${{ github.event.inputs.branch }}
20 | # We need a personal access token to be able to push to a protected branch
21 | token: ${{ secrets.GH_PERSONAL_ACCESS_TOKEN }}
22 |
23 | - name: Set up JDK
24 | uses: actions/setup-java@v2
25 | with:
26 | distribution: 'adopt'
27 | java-version: '8'
28 | java-package: jdk
29 | server-id: central # Value of the distributionManagement/repository/id field of the pom.xml
30 | server-username: SONATYPE_USERNAME # env variable for username in deploy
31 | server-password: SONATYPE_PASSWORD # env variable for token in deploy
32 | # only signed artifacts will be released to maven central. This sets up things for the maven-gpg-plugin
33 | gpg-private-key: ${{ secrets.GPG_PRIVATE_KEY }} # Value of the GPG private key to import
34 | gpg-passphrase: GPG_PASSPHRASE # env variable for GPG private key passphrase
35 | # this creates a settings.xml with the following server
36 | settings-path: ${{ github.workspace }}
37 |
38 | - name: Configure Git User
39 | run: |
40 | git config user.email "oss@expediagroup.com"
41 | git config user.name "eg-oss-ci"
42 | - name: Run Maven Targets
43 | run: mvn release:prepare release:perform --settings $GITHUB_WORKSPACE/settings.xml --activate-profiles sonatype-oss-release-github-actions --batch-mode --show-version --no-transfer-progress
44 | env:
45 | SONATYPE_PASSWORD: ${{ secrets.SONATYPE_PASSWORD }}
46 | SONATYPE_USERNAME: ${{ secrets.SONATYPE_USERNAME }}
47 | DOCKERHUB_USERNAME: ${{ secrets.DOCKER_HUB_USERNAME }}
48 | DOCKERHUB_PASSWORD: ${{ secrets.DOCKER_HUB_PASSWORD }}
49 | GPG_PASSPHRASE: ${{secrets.GPG_PRIVATE_KEY_PASSPHRASE}}
50 |
--------------------------------------------------------------------------------
/drone-fly-app/src/main/java/com/expediagroup/dataplatform/dronefly/app/DroneFly.java:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright (C) 2020 Expedia, Inc.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.expediagroup.dataplatform.dronefly.app;
17 |
18 | import java.util.TimeZone;
19 |
20 | import org.springframework.beans.BeansException;
21 | import org.springframework.boot.autoconfigure.SpringBootApplication;
22 | import org.springframework.boot.builder.SpringApplicationBuilder;
23 | import org.springframework.boot.context.properties.EnableConfigurationProperties;
24 | import org.springframework.context.ApplicationContext;
25 | import org.springframework.context.ApplicationContextAware;
26 | import org.springframework.context.ConfigurableApplicationContext;
27 |
28 | import com.google.common.annotations.VisibleForTesting;
29 |
30 | @SpringBootApplication
31 | @EnableConfigurationProperties
32 | public class DroneFly implements ApplicationContextAware {
33 |
34 | private static ConfigurableApplicationContext context;
35 |
36 | public static void main(String[] args) {
37 | TimeZone.setDefault(TimeZone.getTimeZone("UTC"));
38 | new SpringApplicationBuilder(DroneFly.class)
39 | .properties("spring.config.additional-location:classpath:/drone-fly-app.yml")
40 | .properties("server.port:${endpoint.port:8008}")
41 | .build()
42 | .run(args);
43 | }
44 |
45 | @VisibleForTesting
46 | public static boolean isRunning() {
47 | return context != null && context.isRunning();
48 | }
49 |
50 | @VisibleForTesting
51 | public static void stop() {
52 | if (context == null) {
53 | throw new RuntimeException("Application context has not been started.");
54 | }
55 | context.close();
56 | }
57 |
58 | @Override
59 | public void setApplicationContext(ApplicationContext applicationContext) throws BeansException {
60 | context = (ConfigurableApplicationContext) applicationContext;
61 | }
62 | }
63 |
--------------------------------------------------------------------------------
/drone-fly-app/src/main/java/com/expediagroup/dataplatform/dronefly/app/DroneFlyRunner.java:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright (C) 2020-2025 Expedia, Inc.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.expediagroup.dataplatform.dronefly.app;
17 |
18 | import java.io.IOException;
19 | import java.util.concurrent.atomic.AtomicBoolean;
20 |
21 | import javax.annotation.PreDestroy;
22 |
23 | import org.slf4j.Logger;
24 | import org.slf4j.LoggerFactory;
25 | import org.springframework.beans.factory.annotation.Autowired;
26 | import org.springframework.boot.ApplicationArguments;
27 | import org.springframework.boot.ApplicationRunner;
28 | import org.springframework.stereotype.Component;
29 |
30 | import com.expediagroup.dataplatform.dronefly.app.service.DroneFlyNotificationService;
31 | import com.expediagroup.dataplatform.dronefly.core.exception.DroneFlyException;
32 |
33 | @Component
34 | public class DroneFlyRunner implements ApplicationRunner {
35 |
36 | private static final Logger log = LoggerFactory.getLogger(DroneFlyRunner.class);
37 | private final DroneFlyNotificationService droneFlyNotificationService;
38 | private final AtomicBoolean running = new AtomicBoolean(false);
39 |
40 | @Autowired
41 | public DroneFlyRunner(DroneFlyNotificationService droneFlyNotificationService) {
42 | this.droneFlyNotificationService = droneFlyNotificationService;
43 | }
44 |
45 | @Override
46 | public void run(ApplicationArguments args) {
47 | running.set(true);
48 | while (running.get()) {
49 | try {
50 | droneFlyNotificationService.notifyListeners();
51 | } catch (Exception e) {
52 | log.error("Problem processing this event.", e);
53 | }
54 | }
55 | log.info("Drone Fly has stopped");
56 | }
57 |
58 | @PreDestroy
59 | public void destroy() {
60 | log.info("Shutting down Drone Fly...");
61 | running.set(false);
62 | try {
63 | droneFlyNotificationService.close();
64 | } catch (IOException e) {
65 | throw new DroneFlyException("Problem closing notification service.", e);
66 | }
67 |
68 | log.info("Drone Fly shutdown complete.");
69 | }
70 | }
71 |
--------------------------------------------------------------------------------
/drone-fly-integration-tests/src/test/java/com/expediagroup/dataplatform/dronefly/core/integration/DroneFlyIntegrationTestUtils.java:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright (C) 2020 Expedia, Inc.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.expediagroup.dataplatform.dronefly.core.integration;
17 |
18 | import java.util.HashMap;
19 | import java.util.List;
20 | import java.util.Map;
21 |
22 | import org.apache.hadoop.hive.metastore.api.FieldSchema;
23 | import org.apache.hadoop.hive.metastore.api.Partition;
24 | import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
25 | import org.apache.hadoop.hive.metastore.api.Table;
26 |
27 | import com.google.common.collect.Lists;
28 |
29 | public class DroneFlyIntegrationTestUtils {
30 |
31 | static final String TOPIC = "apiary-events";
32 | static final String DATABASE = "database";
33 | static final String TABLE = "table";
34 |
35 | public static Table buildTable() {
36 | return buildTable(TABLE);
37 | }
38 |
39 | public static Table buildTable(String tableName) {
40 | List partitions = Lists.newArrayList();
41 | partitions.add(new FieldSchema("a", "string", "comment"));
42 | partitions.add(new FieldSchema("b", "string", "comment"));
43 | partitions.add(new FieldSchema("c", "string", "comment"));
44 | return new Table(tableName, DATABASE, "me", 1, 1, 1, new StorageDescriptor(), partitions, buildTableParameters(),
45 | "originalText", "expandedText", "tableType");
46 | }
47 |
48 | public static Partition buildPartition() {
49 | return buildPartition("partition");
50 | }
51 |
52 | public static Partition buildPartition(String partitionName) {
53 | List values = Lists.newArrayList();
54 | values.add(partitionName + "1");
55 | values.add(partitionName + "2");
56 | StorageDescriptor sd = new StorageDescriptor();
57 | sd.setStoredAsSubDirectories(false);
58 | return new Partition(values, DATABASE, TABLE, 1, 1, sd, buildTableParameters());
59 | }
60 |
61 | public static Map buildTableParameters() {
62 | Map parameters = new HashMap<>();
63 | parameters.put("key1", "value1");
64 | parameters.put("key2", "value2");
65 | return parameters;
66 | }
67 |
68 | public static String buildQualifiedTableName() {
69 | return DATABASE + "." + TABLE;
70 | }
71 |
72 | }
73 |
--------------------------------------------------------------------------------
/drone-fly-app/src/test/java/com/expediagroup/dataplatform/dronefly/app/service/HiveTableTestUtils.java:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright (C) 2020 Expedia, Inc.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.expediagroup.dataplatform.dronefly.app.service;
17 |
18 | import java.util.ArrayList;
19 | import java.util.Arrays;
20 | import java.util.HashMap;
21 | import java.util.List;
22 |
23 | import org.apache.hadoop.hive.metastore.TableType;
24 | import org.apache.hadoop.hive.metastore.api.FieldSchema;
25 | import org.apache.hadoop.hive.metastore.api.Partition;
26 | import org.apache.hadoop.hive.metastore.api.SerDeInfo;
27 | import org.apache.hadoop.hive.metastore.api.StorageDescriptor;
28 | import org.apache.hadoop.hive.metastore.api.Table;
29 |
30 | public class HiveTableTestUtils {
31 |
32 | private HiveTableTestUtils() {}
33 |
34 | public static final List PARTITION_COLUMNS = Arrays
35 | .asList(new FieldSchema("partition1", "string", ""), new FieldSchema("partition2", "string", ""));
36 |
37 | public static Table createPartitionedTable(String database, String table, String location) {
38 | Table hiveTable = new Table();
39 | hiveTable.setDbName(database);
40 | hiveTable.setTableName(table);
41 | hiveTable.setTableType(TableType.EXTERNAL_TABLE.name());
42 | hiveTable.putToParameters("EXTERNAL", "TRUE");
43 |
44 | hiveTable.setPartitionKeys(PARTITION_COLUMNS);
45 |
46 | List columns = new ArrayList();
47 | columns.add(new FieldSchema("test_col1", "string", ""));
48 | columns.add(new FieldSchema("test_col2", "string", ""));
49 | columns.add(new FieldSchema("test_col3", "string", ""));
50 |
51 | StorageDescriptor sd = new StorageDescriptor();
52 | sd.setCols(columns);
53 | sd.setLocation(location);
54 | sd.setParameters(new HashMap());
55 | sd.setSerdeInfo(new SerDeInfo());
56 |
57 | hiveTable.setSd(sd);
58 |
59 | return hiveTable;
60 | }
61 |
62 | public static Partition newPartition(Table hiveTable, List values, String location) {
63 | Partition partition = new Partition();
64 | partition.setDbName(hiveTable.getDbName());
65 | partition.setTableName(hiveTable.getTableName());
66 | partition.setValues(values);
67 | partition.setSd(new StorageDescriptor(hiveTable.getSd()));
68 | partition.getSd().setLocation(location);
69 | return partition;
70 | }
71 |
72 | }
73 |
--------------------------------------------------------------------------------
/drone-fly-app/src/main/java/com/expediagroup/dataplatform/dronefly/app/service/ListenerCatalog.java:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright (C) 2009-2020 Expedia, Inc and Apache Hive contributors.
3 | *
4 | * Based on {@code org.apache.hadoop.hive.metastore.MetaStoreUtils} from hive-metastore 2.3.7:
5 | *
6 | * https://github.com/apache/hive/blob/rel/release-2.3.7/metastore/src/java/org/apache/hadoop/hive/metastore/MetaStoreUtils.java#L1642
7 | *
8 | * Licensed under the Apache License, Version 2.0 (the "License");
9 | * you may not use this file except in compliance with the License.
10 | * You may obtain a copy of the License at
11 | *
12 | * http://www.apache.org/licenses/LICENSE-2.0
13 | *
14 | * Unless required by applicable law or agreed to in writing, software
15 | * distributed under the License is distributed on an "AS IS" BASIS,
16 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17 | * See the License for the specific language governing permissions and
18 | * limitations under the License.
19 | */
20 |
21 | package com.expediagroup.dataplatform.dronefly.app.service;
22 |
23 | import static com.expediagroup.apiary.extensions.events.metastore.common.Preconditions.checkNotEmpty;
24 |
25 | import java.util.ArrayList;
26 | import java.util.List;
27 |
28 | import org.apache.hadoop.conf.Configuration;
29 | import org.apache.hadoop.hive.common.JavaUtils;
30 | import org.apache.hadoop.hive.conf.HiveConf;
31 | import org.apache.hadoop.hive.metastore.MetaStoreEventListener;
32 |
33 | import com.expediagroup.dataplatform.dronefly.core.exception.DroneFlyException;
34 |
35 | public class ListenerCatalog {
36 | private final List listeners;
37 |
38 | public ListenerCatalog(HiveConf conf, String listenerImplList) {
39 | checkNotEmpty(listenerImplList, "ListenerImplList cannot be null or empty");
40 | listeners = getMetaStoreListeners(MetaStoreEventListener.class, conf, listenerImplList.trim());
41 | }
42 |
43 | public List getListeners() {
44 | return listeners;
45 | }
46 |
47 | /**
48 | * create listener instances as per the configuration.
49 | *
50 | * @param clazz
51 | * @param conf
52 | * @param listenerImplList
53 | * @return
54 | * @throws DroneFlyException
55 | */
56 | private List getMetaStoreListeners(Class clazz, HiveConf conf, String listenerImplList)
57 | throws DroneFlyException {
58 |
59 | List listeners = new ArrayList();
60 |
61 | String[] listenerImpls = listenerImplList.split(",");
62 | for (String listenerImpl : listenerImpls) {
63 | try {
64 | T listener = (T) Class
65 | .forName(listenerImpl.trim(), true, JavaUtils.getClassLoader())
66 | .getConstructor(Configuration.class)
67 | .newInstance(conf);
68 | listeners.add(listener);
69 | } catch (Exception e) {
70 | throw new DroneFlyException("Failed to instantiate listener named: " + listenerImpl + ", reason: ", e);
71 | }
72 | }
73 |
74 | return listeners;
75 | }
76 |
77 | }
78 |
--------------------------------------------------------------------------------
/drone-fly-app/src/test/java/com/expediagroup/dataplatform/dronefly/app/DroneFlyRunnerTest.java:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright (C) 2020-2025 Expedia, Inc.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.expediagroup.dataplatform.dronefly.app;
17 |
18 | import static org.assertj.core.api.Assertions.fail;
19 | import static org.awaitility.Awaitility.await;
20 | import static org.mockito.Mockito.atLeast;
21 | import static org.mockito.Mockito.doNothing;
22 | import static org.mockito.Mockito.verify;
23 |
24 | import java.io.IOException;
25 | import java.util.concurrent.ExecutorService;
26 | import java.util.concurrent.Executors;
27 | import java.util.concurrent.TimeUnit;
28 |
29 | import org.awaitility.Duration;
30 | import org.junit.jupiter.api.BeforeEach;
31 | import org.junit.jupiter.api.Test;
32 | import org.junit.jupiter.api.extension.ExtendWith;
33 | import org.mockito.Mock;
34 | import org.mockito.junit.jupiter.MockitoExtension;
35 | import org.springframework.boot.ApplicationArguments;
36 |
37 | import com.expediagroup.dataplatform.dronefly.app.service.DroneFlyNotificationService;
38 |
39 | @ExtendWith(MockitoExtension.class)
40 | public class DroneFlyRunnerTest {
41 |
42 | @Mock
43 | private ApplicationArguments args;
44 |
45 | private @Mock DroneFlyNotificationService droneFlyNotificationService;
46 |
47 | private DroneFlyRunner runner;
48 | private final ExecutorService executor = Executors.newFixedThreadPool(1);
49 |
50 | @BeforeEach
51 | public void init() {
52 | runner = new DroneFlyRunner(droneFlyNotificationService);
53 | }
54 |
55 | @Test
56 | public void typical() throws IOException, InterruptedException {
57 | runRunner();
58 | await()
59 | .atMost(Duration.FIVE_SECONDS)
60 | .untilAsserted(() -> {
61 | verify(droneFlyNotificationService, atLeast(1)).notifyListeners();
62 | }
63 | );
64 | destroy();
65 | verify(droneFlyNotificationService).close();
66 | }
67 |
68 | @Test
69 | public void typicalRunWithException() throws Exception {
70 | doNothing().doThrow(new RuntimeException()).doNothing().when(droneFlyNotificationService).notifyListeners();
71 | runRunner();
72 | await()
73 | .atMost(Duration.FIVE_SECONDS)
74 | .untilAsserted(() -> {
75 | verify(droneFlyNotificationService, atLeast(3)).notifyListeners();
76 | }
77 | );
78 | destroy();
79 | verify(droneFlyNotificationService).close();
80 | }
81 |
82 | private void runRunner() {
83 | executor.execute(() -> {
84 | try {
85 | runner.run(args);
86 | } catch (Exception e) {
87 | fail("Exception thrown on run");
88 | }
89 | });
90 | }
91 |
92 | private void destroy() throws InterruptedException {
93 | runner.destroy();
94 | executor.awaitTermination(1, TimeUnit.SECONDS);
95 | }
96 | }
97 |
--------------------------------------------------------------------------------
/drone-fly-integration-tests/pom.xml:
--------------------------------------------------------------------------------
1 |
2 | 4.0.0
3 |
4 |
5 | com.expediagroup
6 | drone-fly-parent
7 | 1.0.9-SNAPSHOT
8 |
9 |
10 | drone-fly-integration-tests
11 | jar
12 | ${project.groupId}:${project.artifactId}
13 |
14 |
15 |
16 | com.expediagroup
17 | drone-fly-app
18 | ${project.version}
19 | test
20 |
21 |
22 | com.tdunning
23 | json
24 |
25 |
26 | jdk.tools
27 | jdk.tools
28 |
29 |
30 |
31 |
32 | com.expediagroup.apiary
33 | kafka-metastore-listener
34 | 6.0.2
35 | test
36 |
37 |
38 | org.slf4j
39 | slf4j-log4j12
40 |
41 |
42 |
43 |
44 | org.awaitility
45 | awaitility
46 | test
47 |
48 |
49 | org.springframework.boot
50 | spring-boot-starter
51 | test
52 |
53 |
54 | org.springframework.boot
55 | spring-boot-starter-logging
56 |
57 |
58 |
59 |
60 | org.springframework.kafka
61 | spring-kafka
62 | test
63 |
64 |
65 | org.springframework.kafka
66 | spring-kafka-test
67 | test
68 |
69 |
70 | org.springframework.boot
71 | spring-boot-starter-test
72 | test
73 |
74 |
75 | org.junit.vintage
76 | junit-vintage-engine
77 |
78 |
79 |
80 |
81 | org.junit.jupiter
82 | junit-jupiter-params
83 | test
84 |
85 |
86 | software.amazon.msk
87 | aws-msk-iam-auth
88 | ${msk.iam.version}
89 |
90 |
91 | io.dropwizard.metrics
92 | metrics-core
93 | ${dropwizard.version}
94 | test
95 |
96 |
97 |
98 |
--------------------------------------------------------------------------------
/CODE-OF-CONDUCT.md:
--------------------------------------------------------------------------------
1 | # Contributor Covenant Code of Conduct
2 |
3 | ## Our Pledge
4 |
5 | In the interest of fostering an open and welcoming environment, we as
6 | contributors and maintainers pledge to making participation in our project and
7 | our community a harassment-free experience for everyone, regardless of age, body
8 | size, disability, ethnicity, gender identity and expression, level of experience,
9 | nationality, personal appearance, race, religion, or sexual identity and
10 | orientation.
11 |
12 | ## Our Standards
13 |
14 | Examples of behaviour that contributes to creating a positive environment
15 | include:
16 |
17 | * Using welcoming and inclusive language
18 | * Being respectful of differing viewpoints and experiences
19 | * Gracefully accepting constructive criticism
20 | * Focusing on what is best for the community
21 | * Showing empathy towards other community members
22 |
23 | Examples of unacceptable behaviour by participants include:
24 |
25 | * The use of sexualised language or imagery and unwelcome sexual attention or
26 | advances
27 | * Trolling, insulting/derogatory comments, and personal or political attacks
28 | * Public or private harassment
29 | * Publishing others' private information, such as a physical or electronic
30 | address, without explicit permission
31 | * Other conduct which could reasonably be considered inappropriate in a
32 | professional setting
33 |
34 | ## Our Responsibilities
35 |
36 | Project maintainers are responsible for clarifying the standards of acceptable
37 | behaviour and are expected to take appropriate and fair corrective action in
38 | response to any instances of unacceptable behaviour.
39 |
40 | Project maintainers have the right and responsibility to remove, edit, or
41 | reject comments, commits, code, wiki edits, issues, and other contributions
42 | that are not aligned to this Code of Conduct, or to ban temporarily or
43 | permanently any contributor for other behaviours that they deem inappropriate,
44 | threatening, offensive, or harmful.
45 |
46 | ## Scope
47 |
48 | This Code of Conduct applies both within project spaces and in public spaces
49 | when an individual is representing the project or its community. Examples of
50 | representing a project or community include using an official project e-mail
51 | address, posting via an official social media account, or acting as an appointed
52 | representative at an online or offline event. Representation of a project may be
53 | further defined and clarified by project maintainers.
54 |
55 | ## Enforcement
56 |
57 | Instances of abusive, harassing, or otherwise unacceptable behaviour may be
58 | reported by contacting [a member of the project team](https://github.com/orgs/ExpediaGroup/teams/apiary-committers/members). All
59 | complaints will be reviewed and investigated and will result in a response that
60 | is deemed necessary and appropriate to the circumstances. The project team is
61 | obligated to maintain confidentiality with regard to the reporter of an incident.
62 | Further details of specific enforcement policies may be posted separately.
63 |
64 | Project maintainers who do not follow or enforce the Code of Conduct in good
65 | faith may face temporary or permanent repercussions as determined by other
66 | members of the project's leadership.
67 |
68 | ## Attribution
69 |
70 | This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
71 | available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
72 |
73 | [homepage]: https://www.contributor-covenant.org
74 |
75 |
--------------------------------------------------------------------------------
/drone-fly-app/src/main/java/com/expediagroup/dataplatform/dronefly/app/service/DroneFlyNotificationService.java:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright (C) 2020 Expedia, Inc.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.expediagroup.dataplatform.dronefly.app.service;
17 |
18 | import java.io.IOException;
19 | import java.util.List;
20 |
21 | import org.apache.hadoop.hive.metastore.MetaStoreEventListener;
22 | import org.apache.hadoop.hive.metastore.MetaStoreListenerNotifier;
23 | import org.apache.hadoop.hive.metastore.api.MetaException;
24 | import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
25 | import org.apache.hadoop.hive.metastore.events.ListenerEvent;
26 | import org.apache.hadoop.hive.metastore.messaging.EventMessage.EventType;
27 | import org.slf4j.Logger;
28 | import org.slf4j.LoggerFactory;
29 | import org.springframework.beans.factory.annotation.Autowired;
30 | import org.springframework.stereotype.Component;
31 |
32 | import com.expediagroup.apiary.extensions.events.metastore.common.MetaStoreEventsException;
33 | import com.expediagroup.apiary.extensions.events.metastore.event.ApiaryListenerEvent;
34 | import com.expediagroup.dataplatform.dronefly.app.messaging.MessageReaderAdapter;
35 | import com.expediagroup.dataplatform.dronefly.core.exception.DroneFlyException;
36 |
37 | @Component
38 | public class DroneFlyNotificationService {
39 | private static final Logger log = LoggerFactory.getLogger(DroneFlyNotificationService.class);
40 | private final MessageReaderAdapter reader;
41 | private final HiveEventConverterService converterService;
42 | private final ListenerCatalog listenerCatalog;
43 |
44 | @Autowired
45 | public DroneFlyNotificationService(
46 | MessageReaderAdapter reader,
47 | HiveEventConverterService converterService,
48 | ListenerCatalog listenerCatalog) {
49 | this.reader = reader;
50 | this.converterService = converterService;
51 | this.listenerCatalog = listenerCatalog;
52 | }
53 |
54 | public void notifyListeners() throws DroneFlyException {
55 | try {
56 | ApiaryListenerEvent event = reader.read();
57 | ListenerEvent hiveEvent = converterService.toHiveEvent(event);
58 | List listeners = listenerCatalog.getListeners();
59 | log.info("Notifying event type: {}", event.getEventType().toString());
60 | log.debug("Qualified Table Name: {}.{}", event.getDatabaseName().toString(), event.getTableName().toString());
61 | log.debug("Listeners being notified: {}", listeners.size());
62 |
63 | // The following class notifies all the listeners loaded in a loop. It will stop notifying if one of the loaded
64 | // listeners throws an Exception. This is expected behaviour. If Drone Fly is deployed in Kubernetes containers
65 | // with only one listener loaded per instance, it won't be an issue.
66 | MetaStoreListenerNotifier.notifyEvent(listeners, getHiveEventType(event), hiveEvent);
67 | } catch (MetaStoreEventsException e) {
68 | throw new DroneFlyException("Cannot unmarshal this event. It will be ignored.", e);
69 | } catch (MetaException | NoSuchObjectException e) {
70 | throw new DroneFlyException("Hive event was received but Drone Fly failed to notify all the listeners.", e);
71 | }
72 | }
73 |
74 | private EventType getHiveEventType(ApiaryListenerEvent event) {
75 | return EventType.valueOf(event.getEventType().name().substring(3));
76 | }
77 |
78 | public void close() throws IOException {
79 | reader.close();
80 | }
81 |
82 | }
83 |
--------------------------------------------------------------------------------
/drone-fly-app/src/main/java/com/expediagroup/dataplatform/dronefly/app/context/CommonBeans.java:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright (C) 2020-2025 Expedia, Inc.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.expediagroup.dataplatform.dronefly.app.context;
17 |
18 | import java.util.List;
19 | import java.util.Properties;
20 | import java.util.stream.Collectors;
21 |
22 | import org.apache.hadoop.hive.conf.HiveConf;
23 | import org.apache.hadoop.hive.metastore.MetaStoreEventListener;
24 | import org.apache.hadoop.hive.metastore.api.MetaException;
25 | import org.slf4j.Logger;
26 | import org.slf4j.LoggerFactory;
27 | import org.springframework.beans.factory.annotation.Value;
28 | import org.springframework.boot.context.properties.ConfigurationProperties;
29 | import org.springframework.context.annotation.Bean;
30 | import org.springframework.context.annotation.Configuration;
31 | import org.springframework.context.annotation.Primary;
32 |
33 | import com.expediagroup.apiary.extensions.events.metastore.kafka.messaging.KafkaMessageReader;
34 | import com.expediagroup.apiary.extensions.events.metastore.kafka.messaging.KafkaMessageReader.KafkaMessageReaderBuilder;
35 | import com.expediagroup.dataplatform.dronefly.app.messaging.MessageReaderAdapter;
36 | import com.expediagroup.dataplatform.dronefly.app.service.ListenerCatalog;
37 | import com.expediagroup.dataplatform.dronefly.app.service.factory.ListenerCatalogFactory;
38 |
39 | @Configuration
40 | public class CommonBeans {
41 |
42 | private static final Logger log = LoggerFactory.getLogger(CommonBeans.class);
43 | public static final String CONSUMER_PROPERTIES_PREFIX = "apiary.messaging.consumer";
44 |
45 | @Value("${instance.name:drone-fly}")
46 | private String instanceName;
47 |
48 | @Value("${apiary.bootstrap.servers}")
49 | private String bootstrapServers;
50 |
51 | @Value("${apiary.kafka.topic.name}")
52 | private String topicName;
53 |
54 | @Value("${apiary.listener.list:}")
55 | private String confListenerList;
56 |
57 | @Bean
58 | public HiveConf hiveConf() {
59 | return new HiveConf();
60 | }
61 |
62 | @Bean
63 | @Primary
64 | @ConfigurationProperties(CONSUMER_PROPERTIES_PREFIX)
65 | public Properties getEnvProperties() {
66 | return new Properties();
67 | }
68 |
69 | @Bean
70 | public ListenerCatalog listenerCatalog(HiveConf conf) throws MetaException {
71 | ListenerCatalog listenerCatalog = new ListenerCatalogFactory(conf).newInstance(confListenerList);
72 | List listenerList = listenerCatalog.getListeners();
73 | String listeners = listenerList.stream().map(x -> x.getClass().getName()).collect(Collectors.joining(", "));
74 | log.info("DroneFly is starting with {} listeners: {}", listenerList.size(), listeners);
75 | return listenerCatalog;
76 | }
77 |
78 | @Bean
79 | public MessageReaderAdapter messageReaderAdapter() {
80 | Properties consumerProperties = getConsumerProperties();
81 | KafkaMessageReader delegate = KafkaMessageReaderBuilder.
82 | builder(bootstrapServers, topicName, instanceName).
83 | withConsumerProperties(consumerProperties).
84 | build();
85 | return new MessageReaderAdapter(delegate);
86 | }
87 |
88 | private Properties getConsumerProperties() {
89 | Properties consumerProperties = new Properties();
90 | getEnvProperties().forEach((key, value) -> {
91 | consumerProperties.put(key.toString(), value.toString());
92 | log.info("Consumer property {} set with value: {}", key, value);
93 | });
94 | return consumerProperties;
95 | }
96 | }
--------------------------------------------------------------------------------
/drone-fly-app/src/test/java/com/expediagroup/dataplatform/dronefly/app/service/factory/ListenerCatalogFactoryTest.java:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright (C) 2020 Expedia, Inc.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.expediagroup.dataplatform.dronefly.app.service.factory;
17 |
18 | import static org.assertj.core.api.Assertions.assertThat;
19 |
20 | import org.apache.hadoop.hive.conf.HiveConf;
21 | import org.junit.jupiter.api.BeforeEach;
22 | import org.junit.jupiter.api.Test;
23 |
24 | import com.expediagroup.dataplatform.dronefly.app.service.ListenerCatalog;
25 | import com.expediagroup.dataplatform.dronefly.app.service.listener.AnotherDummyListener;
26 |
27 | public class ListenerCatalogFactoryTest {
28 |
29 | private ListenerCatalogFactory listenerCatalogFactory;
30 |
31 | @BeforeEach
32 | public void init() {
33 | HiveConf hiveConf = new HiveConf();
34 | listenerCatalogFactory = new ListenerCatalogFactory(hiveConf);
35 | }
36 |
37 | @Test
38 | public void listenerImplListProvided() {
39 | String confProvidedList = "com.expediagroup.dataplatform.dronefly.app.service.listener.DummyListener";
40 | ListenerCatalog listenerCatalog = listenerCatalogFactory.newInstance(confProvidedList);
41 |
42 | assertThat(listenerCatalog.getListeners().size()).isEqualTo(1);
43 | }
44 |
45 | @Test
46 | public void listenerImplListFromHiveConf() {
47 | HiveConf hiveConf = new HiveConf();
48 | String listenerImplList = "com.expediagroup.dataplatform.dronefly.app.service.listener.DummyListener";
49 | hiveConf.set(HiveConf.ConfVars.METASTORE_EVENT_LISTENERS.toString(), listenerImplList);
50 | ListenerCatalogFactory listenerCatalogFactory = new ListenerCatalogFactory(hiveConf);
51 |
52 | ListenerCatalog listenerCatalog = listenerCatalogFactory.newInstance("");
53 |
54 | assertThat(listenerCatalog.getListeners().size()).isEqualTo(1);
55 | }
56 |
57 | @Test
58 | public void configGivenPriorityOverHiveConf() {
59 | HiveConf hiveConf = new HiveConf();
60 | String listenerImplList = "com.expediagroup.dataplatform.dronefly.app.service.listener.DummyListener";
61 | hiveConf.set(HiveConf.ConfVars.METASTORE_EVENT_LISTENERS.toString(), listenerImplList);
62 | ListenerCatalogFactory listenerCatalogFactory = new ListenerCatalogFactory(hiveConf);
63 |
64 | ListenerCatalog listenerCatalog = listenerCatalogFactory
65 | .newInstance("com.expediagroup.dataplatform.dronefly.app.service.listener.AnotherDummyListener");
66 |
67 | assertThat(listenerCatalog.getListeners().size()).isEqualTo(1);
68 | assertThat(listenerCatalog.getListeners().get(0)).isInstanceOf(AnotherDummyListener.class);
69 | }
70 |
71 | @Test
72 | public void listenerImplListNotProvidedInConfOrHiveSite() {
73 | ListenerCatalog listenerCatalog = listenerCatalogFactory.newInstance(null);
74 | assertThat(listenerCatalog.getListeners().size()).isEqualTo(1);
75 | }
76 |
77 | @Test
78 | public void listenerImplListProvidedWithJustWhitespaces() {
79 | HiveConf hiveConf = new HiveConf();
80 | String listenerImplList = "com.expediagroup.dataplatform.dronefly.app.service.listener.DummyListener";
81 | hiveConf.set(HiveConf.ConfVars.METASTORE_EVENT_LISTENERS.toString(), listenerImplList);
82 | ListenerCatalogFactory listenerCatalogFactory = new ListenerCatalogFactory(hiveConf);
83 |
84 | ListenerCatalog listenerCatalog = listenerCatalogFactory.newInstance("");
85 |
86 | assertThat(listenerCatalog.getListeners().size()).isEqualTo(1);
87 | }
88 |
89 | @Test
90 | public void nullListenerImplListProvided() {
91 | ListenerCatalog listenerCatalog = listenerCatalogFactory.newInstance(null);
92 | assertThat(listenerCatalog.getListeners().size()).isEqualTo(1);
93 | }
94 |
95 | }
96 |
--------------------------------------------------------------------------------
/drone-fly-app/src/test/java/com/expediagroup/dataplatform/dronefly/app/service/listener/DummyListener.java:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright (C) 2020 Expedia, Inc.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.expediagroup.dataplatform.dronefly.app.service.listener;
17 |
18 | import java.util.ArrayList;
19 | import java.util.List;
20 |
21 | import org.apache.hadoop.conf.Configuration;
22 | import org.apache.hadoop.hive.metastore.MetaStoreEventListener;
23 | import org.apache.hadoop.hive.metastore.api.MetaException;
24 | import org.apache.hadoop.hive.metastore.events.AddPartitionEvent;
25 | import org.apache.hadoop.hive.metastore.events.AlterPartitionEvent;
26 | import org.apache.hadoop.hive.metastore.events.AlterTableEvent;
27 | import org.apache.hadoop.hive.metastore.events.ConfigChangeEvent;
28 | import org.apache.hadoop.hive.metastore.events.CreateDatabaseEvent;
29 | import org.apache.hadoop.hive.metastore.events.CreateFunctionEvent;
30 | import org.apache.hadoop.hive.metastore.events.CreateTableEvent;
31 | import org.apache.hadoop.hive.metastore.events.DropDatabaseEvent;
32 | import org.apache.hadoop.hive.metastore.events.DropFunctionEvent;
33 | import org.apache.hadoop.hive.metastore.events.DropPartitionEvent;
34 | import org.apache.hadoop.hive.metastore.events.DropTableEvent;
35 | import org.apache.hadoop.hive.metastore.events.ListenerEvent;
36 | import org.apache.hadoop.hive.metastore.events.LoadPartitionDoneEvent;
37 |
38 | public class DummyListener extends MetaStoreEventListener {
39 |
40 | public final List notifyList = new ArrayList<>();
41 |
42 | /**
43 | * @return The last event received, or null if no event was received.
44 | */
45 | public ListenerEvent getLastEvent() {
46 | if (notifyList.isEmpty()) {
47 | return null;
48 | } else {
49 | return notifyList.get(notifyList.size() - 1);
50 | }
51 | }
52 |
53 | public DummyListener(Configuration config) {
54 | super(config);
55 | }
56 |
57 | @Override
58 | public void onConfigChange(ConfigChangeEvent configChange) {
59 | addEvent(configChange);
60 | }
61 |
62 | @Override
63 | public void onAddPartition(AddPartitionEvent partitionEvent) throws MetaException {
64 | addEvent(partitionEvent);
65 | }
66 |
67 | @Override
68 | public void onCreateDatabase(CreateDatabaseEvent db) throws MetaException {
69 | addEvent(db);
70 | }
71 |
72 | @Override
73 | public void onCreateTable(CreateTableEvent table) throws MetaException {
74 | addEvent(table);
75 | }
76 |
77 | @Override
78 | public void onDropDatabase(DropDatabaseEvent db) throws MetaException {
79 | addEvent(db);
80 | }
81 |
82 | @Override
83 | public void onDropPartition(DropPartitionEvent partitionEvent) throws MetaException {
84 | addEvent(partitionEvent);
85 | }
86 |
87 | @Override
88 | public void onDropTable(DropTableEvent table) throws MetaException {
89 | addEvent(table);
90 | }
91 |
92 | @Override
93 | public void onAlterTable(AlterTableEvent event) throws MetaException {
94 | addEvent(event);
95 | }
96 |
97 | @Override
98 | public void onAlterPartition(AlterPartitionEvent event) throws MetaException {
99 | addEvent(event);
100 | }
101 |
102 | @Override
103 | public void onLoadPartitionDone(LoadPartitionDoneEvent partitionEvent) throws MetaException {
104 | addEvent(partitionEvent);
105 | }
106 |
107 | @Override
108 | public void onCreateFunction(CreateFunctionEvent fnEvent) throws MetaException {
109 | addEvent(fnEvent);
110 | }
111 |
112 | @Override
113 | public void onDropFunction(DropFunctionEvent fnEvent) throws MetaException {
114 | addEvent(fnEvent);
115 | }
116 |
117 | private void addEvent(ListenerEvent event) {
118 | notifyList.add(event);
119 | }
120 | }
121 |
--------------------------------------------------------------------------------
/drone-fly-app/src/test/java/com/expediagroup/dataplatform/dronefly/app/service/listener/AnotherDummyListener.java:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright (C) 2020 Expedia, Inc.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.expediagroup.dataplatform.dronefly.app.service.listener;
17 |
18 | import java.util.ArrayList;
19 | import java.util.List;
20 |
21 | import org.apache.hadoop.conf.Configuration;
22 | import org.apache.hadoop.hive.metastore.MetaStoreEventListener;
23 | import org.apache.hadoop.hive.metastore.api.MetaException;
24 | import org.apache.hadoop.hive.metastore.events.AddPartitionEvent;
25 | import org.apache.hadoop.hive.metastore.events.AlterPartitionEvent;
26 | import org.apache.hadoop.hive.metastore.events.AlterTableEvent;
27 | import org.apache.hadoop.hive.metastore.events.ConfigChangeEvent;
28 | import org.apache.hadoop.hive.metastore.events.CreateDatabaseEvent;
29 | import org.apache.hadoop.hive.metastore.events.CreateFunctionEvent;
30 | import org.apache.hadoop.hive.metastore.events.CreateTableEvent;
31 | import org.apache.hadoop.hive.metastore.events.DropDatabaseEvent;
32 | import org.apache.hadoop.hive.metastore.events.DropFunctionEvent;
33 | import org.apache.hadoop.hive.metastore.events.DropPartitionEvent;
34 | import org.apache.hadoop.hive.metastore.events.DropTableEvent;
35 | import org.apache.hadoop.hive.metastore.events.ListenerEvent;
36 | import org.apache.hadoop.hive.metastore.events.LoadPartitionDoneEvent;
37 |
38 | public class AnotherDummyListener extends MetaStoreEventListener {
39 |
40 | public final List notifyList = new ArrayList<>();
41 |
42 | /**
43 | * @return The last event received, or null if no event was received.
44 | */
45 | public ListenerEvent getLastEvent() {
46 | if (notifyList.isEmpty()) {
47 | return null;
48 | } else {
49 | return notifyList.get(notifyList.size() - 1);
50 | }
51 | }
52 |
53 | public AnotherDummyListener(Configuration config) {
54 | super(config);
55 | }
56 |
57 | @Override
58 | public void onConfigChange(ConfigChangeEvent configChange) {
59 | addEvent(configChange);
60 | }
61 |
62 | @Override
63 | public void onAddPartition(AddPartitionEvent partitionEvent) throws MetaException {
64 | addEvent(partitionEvent);
65 | }
66 |
67 | @Override
68 | public void onCreateDatabase(CreateDatabaseEvent db) throws MetaException {
69 | addEvent(db);
70 | }
71 |
72 | @Override
73 | public void onCreateTable(CreateTableEvent table) throws MetaException {
74 | addEvent(table);
75 | }
76 |
77 | @Override
78 | public void onDropDatabase(DropDatabaseEvent db) throws MetaException {
79 | addEvent(db);
80 | }
81 |
82 | @Override
83 | public void onDropPartition(DropPartitionEvent partitionEvent) throws MetaException {
84 | addEvent(partitionEvent);
85 | }
86 |
87 | @Override
88 | public void onDropTable(DropTableEvent table) throws MetaException {
89 | addEvent(table);
90 | }
91 |
92 | @Override
93 | public void onAlterTable(AlterTableEvent event) throws MetaException {
94 | addEvent(event);
95 | }
96 |
97 | @Override
98 | public void onAlterPartition(AlterPartitionEvent event) throws MetaException {
99 | addEvent(event);
100 | }
101 |
102 | @Override
103 | public void onLoadPartitionDone(LoadPartitionDoneEvent partitionEvent) throws MetaException {
104 | addEvent(partitionEvent);
105 | }
106 |
107 | @Override
108 | public void onCreateFunction(CreateFunctionEvent fnEvent) throws MetaException {
109 | addEvent(fnEvent);
110 | }
111 |
112 | @Override
113 | public void onDropFunction(DropFunctionEvent fnEvent) throws MetaException {
114 | addEvent(fnEvent);
115 | }
116 |
117 | private void addEvent(ListenerEvent event) {
118 | notifyList.add(event);
119 | }
120 | }
121 |
--------------------------------------------------------------------------------
/drone-fly-integration-tests/src/test/java/com/expediagroup/dataplatform/dronefly/core/integration/DummyListener.java:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright (C) 2020-2025 Expedia, Inc.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.expediagroup.dataplatform.dronefly.core.integration;
17 |
18 | import java.util.ArrayList;
19 | import java.util.List;
20 |
21 | import org.apache.hadoop.conf.Configuration;
22 | import org.apache.hadoop.hive.metastore.MetaStoreEventListener;
23 | import org.apache.hadoop.hive.metastore.api.MetaException;
24 | import org.apache.hadoop.hive.metastore.events.AddPartitionEvent;
25 | import org.apache.hadoop.hive.metastore.events.AlterPartitionEvent;
26 | import org.apache.hadoop.hive.metastore.events.AlterTableEvent;
27 | import org.apache.hadoop.hive.metastore.events.ConfigChangeEvent;
28 | import org.apache.hadoop.hive.metastore.events.CreateDatabaseEvent;
29 | import org.apache.hadoop.hive.metastore.events.CreateFunctionEvent;
30 | import org.apache.hadoop.hive.metastore.events.CreateTableEvent;
31 | import org.apache.hadoop.hive.metastore.events.DropDatabaseEvent;
32 | import org.apache.hadoop.hive.metastore.events.DropFunctionEvent;
33 | import org.apache.hadoop.hive.metastore.events.DropPartitionEvent;
34 | import org.apache.hadoop.hive.metastore.events.DropTableEvent;
35 | import org.apache.hadoop.hive.metastore.events.ListenerEvent;
36 | import org.apache.hadoop.hive.metastore.events.LoadPartitionDoneEvent;
37 |
38 | import io.micrometer.core.instrument.Counter;
39 | import io.micrometer.core.instrument.Metrics;
40 |
41 | public class DummyListener extends MetaStoreEventListener {
42 |
43 | public static final List notifyList = new ArrayList<>();
44 | public static final Counter EVENT_COUNT_METRIC = Counter.builder("EVENT_COUNT_CUSTOM_METRIC")
45 | .register(Metrics.globalRegistry);
46 |
47 | /**
48 | * @return The last event received, or null if no event was received.
49 | */
50 | public static ListenerEvent getLastEvent() {
51 | if (notifyList.isEmpty()) {
52 | return null;
53 | } else {
54 | return notifyList.get(notifyList.size() - 1);
55 | }
56 | }
57 |
58 | /**
59 | * @return event at index i, or null if no event was received.
60 | */
61 | public static ListenerEvent get(int index) {
62 | if (notifyList.isEmpty()) {
63 | return null;
64 | } else {
65 | ListenerEvent listenerEvent = notifyList.get(index);
66 | EVENT_COUNT_METRIC.increment();
67 | return listenerEvent;
68 | }
69 | }
70 |
71 | public static int getNumEvents() {
72 | return notifyList.size();
73 | }
74 |
75 | public static void reset() {
76 | notifyList.clear();
77 | }
78 |
79 | public DummyListener(Configuration config) {
80 | super(config);
81 | }
82 |
83 | @Override
84 | public void onConfigChange(ConfigChangeEvent configChange) {
85 | addEvent(configChange);
86 | }
87 |
88 | @Override
89 | public void onAddPartition(AddPartitionEvent partitionEvent) throws MetaException {
90 | addEvent(partitionEvent);
91 | }
92 |
93 | @Override
94 | public void onCreateDatabase(CreateDatabaseEvent db) throws MetaException {
95 | addEvent(db);
96 | }
97 |
98 | @Override
99 | public void onCreateTable(CreateTableEvent table) throws MetaException {
100 | addEvent(table);
101 | }
102 |
103 | @Override
104 | public void onDropDatabase(DropDatabaseEvent db) throws MetaException {
105 | addEvent(db);
106 | }
107 |
108 | @Override
109 | public void onDropPartition(DropPartitionEvent partitionEvent) throws MetaException {
110 | addEvent(partitionEvent);
111 | }
112 |
113 | @Override
114 | public void onDropTable(DropTableEvent table) throws MetaException {
115 | addEvent(table);
116 | }
117 |
118 | @Override
119 | public void onAlterTable(AlterTableEvent event) throws MetaException {
120 | addEvent(event);
121 | }
122 |
123 | @Override
124 | public void onAlterPartition(AlterPartitionEvent event) throws MetaException {
125 | addEvent(event);
126 | }
127 |
128 | @Override
129 | public void onLoadPartitionDone(LoadPartitionDoneEvent partitionEvent) throws MetaException {
130 | addEvent(partitionEvent);
131 | }
132 |
133 | @Override
134 | public void onCreateFunction(CreateFunctionEvent fnEvent) throws MetaException {
135 | addEvent(fnEvent);
136 | }
137 |
138 | @Override
139 | public void onDropFunction(DropFunctionEvent fnEvent) throws MetaException {
140 | addEvent(fnEvent);
141 | }
142 |
143 | private void addEvent(ListenerEvent event) {
144 | notifyList.add(event);
145 | }
146 | }
147 |
--------------------------------------------------------------------------------
/drone-fly-app/pom.xml:
--------------------------------------------------------------------------------
1 |
2 | 4.0.0
3 |
4 |
5 | com.expediagroup
6 | drone-fly-parent
7 | 1.0.9-SNAPSHOT
8 |
9 |
10 | drone-fly-app
11 | jar
12 | ${project.groupId}:${project.artifactId}
13 |
14 |
15 | 1.11.532
16 | 0.2.5
17 | 2.3.9
18 | 8008
19 |
20 |
21 |
22 |
23 | com.expediagroup
24 | drone-fly-core
25 | ${project.version}
26 |
27 |
28 | com.expediagroup.apiary
29 | kafka-metastore-receiver
30 | 6.0.2
31 |
32 |
33 | jdk.tools
34 | jdk.tools
35 |
36 |
37 | org.slf4j
38 | slf4j-log4j12
39 |
40 |
41 | org.apache.logging.log4j
42 | log4j-slf4j-impl
43 |
44 |
45 | javax.servlet
46 | servlet-api
47 |
48 |
49 | org.eclipse.jetty.orbit
50 | javax.servlet
51 |
52 |
53 | org.apache.geronimo.specs
54 | geronimo-jaspic_1.0_spec
55 |
56 |
57 |
58 |
59 | io.micrometer
60 | micrometer-registry-prometheus
61 |
62 |
63 | org.apache.httpcomponents
64 | httpclient
65 |
66 |
67 | org.apache.hive
68 | hive-metastore
69 | ${hive.version}
70 |
71 |
72 | junit
73 | junit
74 |
75 |
76 | org.eclipse.jetty.aggregate
77 | jetty-all
78 |
79 |
80 | org.eclipse.jetty.orbit
81 | javax.servlet
82 |
83 |
84 | javax.servlet
85 | servlet-api
86 |
87 |
88 | jdk.tools
89 | jdk.tools
90 |
91 |
92 |
93 |
94 | org.yaml
95 | snakeyaml
96 |
97 |
98 | com.google.guava
99 | guava
100 | 27.1-jre
101 |
102 |
103 | software.amazon.msk
104 | aws-msk-iam-auth
105 | ${msk.iam.version}
106 |
107 |
108 | io.dropwizard.metrics
109 | metrics-core
110 | ${dropwizard.version}
111 |
112 |
113 | org.springframework
114 | spring-test
115 | test
116 |
117 |
118 | org.springframework.boot
119 | spring-boot-test
120 | test
121 |
122 |
123 | org.awaitility
124 | awaitility
125 | test
126 |
127 |
128 |
129 |
130 |
131 |
132 | org.springframework.boot
133 | spring-boot-maven-plugin
134 |
135 |
136 | com.google.cloud.tools
137 | jib-maven-plugin
138 |
139 |
140 | USE_CURRENT_TIMESTAMP
141 |
142 |
143 |
144 |
145 |
146 |
147 |
--------------------------------------------------------------------------------
/drone-fly-app/src/test/java/com/expediagroup/dataplatform/dronefly/app/service/ListenerCatalogTest.java:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright (C) 2020 Expedia, Inc.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.expediagroup.dataplatform.dronefly.app.service;
17 |
18 | import static org.assertj.core.api.Assertions.assertThat;
19 | import static org.junit.jupiter.api.Assertions.assertThrows;
20 | import static org.junit.jupiter.api.Assertions.assertTrue;
21 |
22 | import java.util.List;
23 |
24 | import org.apache.hadoop.hive.conf.HiveConf;
25 | import org.apache.hadoop.hive.metastore.MetaStoreEventListener;
26 | import org.apache.hadoop.hive.metastore.api.MetaException;
27 | import org.junit.jupiter.api.Test;
28 |
29 | import com.expediagroup.dataplatform.dronefly.app.service.listener.AnotherDummyListener;
30 | import com.expediagroup.dataplatform.dronefly.app.service.listener.DummyListener;
31 | import com.expediagroup.dataplatform.dronefly.core.exception.DroneFlyException;
32 |
33 | public class ListenerCatalogTest {
34 | private ListenerCatalog listenerCatalog;
35 |
36 | @Test
37 | public void typical() throws MetaException {
38 | String listenerImplList = "com.expediagroup.dataplatform.dronefly.app.service.listener.DummyListener,"
39 | + "com.expediagroup.dataplatform.dronefly.app.service.listener.AnotherDummyListener";
40 |
41 | listenerCatalog = new ListenerCatalog(new HiveConf(), listenerImplList);
42 | List result = listenerCatalog.getListeners();
43 |
44 | assertThat(result.size()).isEqualTo(2);
45 | assertThat(result.get(0)).isInstanceOf(DummyListener.class);
46 | assertThat(result.get(1)).isInstanceOf(AnotherDummyListener.class);
47 | }
48 |
49 | @Test
50 | public void oneListenerProvidedAndNotFound() throws MetaException {
51 | String listenerImplList = "com.expediagroup.dataplatform.dronefly.app.service.listener.DummyListener1";
52 | DroneFlyException exception = assertThrows(DroneFlyException.class, () -> {
53 | listenerCatalog = new ListenerCatalog(new HiveConf(), listenerImplList);
54 | });
55 |
56 | assertTrue(exception
57 | .getMessage()
58 | .contains(
59 | "Failed to instantiate listener named: com.expediagroup.dataplatform.dronefly.app.service.listener.DummyListener1"));
60 | }
61 |
62 | @Test
63 | public void oneOutOfTwoListenersNotFound() throws MetaException {
64 | String listenerImplList = "com.expediagroup.dataplatform.dronefly.app.service.listener.DummyListener,"
65 | + "com.expediagroup.dataplatform.dronefly.app.service.listener.AnotherDummyListener1";
66 | DroneFlyException exception = assertThrows(DroneFlyException.class, () -> {
67 | listenerCatalog = new ListenerCatalog(new HiveConf(), listenerImplList);
68 | });
69 |
70 | assertTrue(exception
71 | .getMessage()
72 | .contains(
73 | "Failed to instantiate listener named: com.expediagroup.dataplatform.dronefly.app.service.listener.AnotherDummyListener1"));
74 | }
75 |
76 | @Test
77 | public void whiteSpacesInTheMiddleOfListenerImplList() throws MetaException {
78 | String listenerImplList = "com.expediagroup.dataplatform.dronefly.app.service.listener.DummyListener ,"
79 | + " com.expediagroup.dataplatform.dronefly.app.service.listener.AnotherDummyListener";
80 | listenerCatalog = new ListenerCatalog(new HiveConf(), listenerImplList);
81 | List result = listenerCatalog.getListeners();
82 |
83 | assertThat(result.size()).isEqualTo(2);
84 | assertThat(result.get(0)).isInstanceOf(DummyListener.class);
85 | assertThat(result.get(1)).isInstanceOf(AnotherDummyListener.class);
86 | }
87 |
88 | @Test
89 | public void extraCommaAtTheEndOfListenerImplList() throws MetaException {
90 | String listenerImplList = "com.expediagroup.dataplatform.dronefly.app.service.listener.DummyListener,"
91 | + "com.expediagroup.dataplatform.dronefly.app.service.listener.AnotherDummyListener,";
92 | listenerCatalog = new ListenerCatalog(new HiveConf(), listenerImplList);
93 | List result = listenerCatalog.getListeners();
94 |
95 | assertThat(result.size()).isEqualTo(2);
96 | assertThat(result.get(0)).isInstanceOf(DummyListener.class);
97 | assertThat(result.get(1)).isInstanceOf(AnotherDummyListener.class);
98 | }
99 |
100 | @Test
101 | public void emptyListenerImplList() throws MetaException {
102 | String listenerImplList = " ";
103 | IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, () -> {
104 | listenerCatalog = new ListenerCatalog(new HiveConf(), listenerImplList);
105 | });
106 |
107 | assertTrue(exception.getMessage().contains("ListenerImplList cannot be null or empty"));
108 | }
109 |
110 | @Test
111 | public void nullListenerImplList() throws MetaException {
112 | String listenerImplList = null;
113 | IllegalArgumentException exception = assertThrows(IllegalArgumentException.class, () -> {
114 | listenerCatalog = new ListenerCatalog(new HiveConf(), listenerImplList);
115 | });
116 | assertTrue(exception.getMessage().contains("ListenerImplList cannot be null or empty"));
117 | }
118 |
119 | }
120 |
--------------------------------------------------------------------------------
/drone-fly-app/src/main/java/com/expediagroup/dataplatform/dronefly/app/service/HiveEventConverterService.java:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright (C) 2020 Expedia, Inc.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.expediagroup.dataplatform.dronefly.app.service;
17 |
18 | import java.util.ArrayList;
19 | import java.util.List;
20 | import java.util.Map;
21 |
22 | import org.apache.hadoop.hive.metastore.api.InsertEventRequestData;
23 | import org.apache.hadoop.hive.metastore.api.MetaException;
24 | import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
25 | import org.apache.hadoop.hive.metastore.events.AddPartitionEvent;
26 | import org.apache.hadoop.hive.metastore.events.AlterPartitionEvent;
27 | import org.apache.hadoop.hive.metastore.events.AlterTableEvent;
28 | import org.apache.hadoop.hive.metastore.events.CreateTableEvent;
29 | import org.apache.hadoop.hive.metastore.events.DropPartitionEvent;
30 | import org.apache.hadoop.hive.metastore.events.DropTableEvent;
31 | import org.apache.hadoop.hive.metastore.events.InsertEvent;
32 | import org.apache.hadoop.hive.metastore.events.ListenerEvent;
33 | import org.slf4j.Logger;
34 | import org.slf4j.LoggerFactory;
35 | import org.springframework.beans.factory.annotation.Autowired;
36 | import org.springframework.stereotype.Component;
37 |
38 | import com.expediagroup.apiary.extensions.events.metastore.event.ApiaryAddPartitionEvent;
39 | import com.expediagroup.apiary.extensions.events.metastore.event.ApiaryAlterPartitionEvent;
40 | import com.expediagroup.apiary.extensions.events.metastore.event.ApiaryAlterTableEvent;
41 | import com.expediagroup.apiary.extensions.events.metastore.event.ApiaryCreateTableEvent;
42 | import com.expediagroup.apiary.extensions.events.metastore.event.ApiaryDropPartitionEvent;
43 | import com.expediagroup.apiary.extensions.events.metastore.event.ApiaryDropTableEvent;
44 | import com.expediagroup.apiary.extensions.events.metastore.event.ApiaryInsertEvent;
45 | import com.expediagroup.apiary.extensions.events.metastore.event.ApiaryListenerEvent;
46 | import com.expediagroup.dataplatform.dronefly.app.service.factory.HMSHandlerFactory;
47 | import com.expediagroup.dataplatform.dronefly.core.exception.DroneFlyException;
48 |
49 | @Component
50 | public class HiveEventConverterService {
51 | private static final Logger log = LoggerFactory.getLogger(HiveEventConverterService.class);
52 |
53 | private final HMSHandlerFactory hmsHandlerFactory;
54 |
55 | @Autowired
56 | public HiveEventConverterService(HMSHandlerFactory hmsHandlerFactory) {
57 | this.hmsHandlerFactory = hmsHandlerFactory;
58 | }
59 |
60 | public ListenerEvent toHiveEvent(ApiaryListenerEvent serializableHiveEvent)
61 | throws MetaException, NoSuchObjectException {
62 | ListenerEvent hiveEvent = null;
63 |
64 | if (serializableHiveEvent == null) {
65 | return hiveEvent;
66 | }
67 |
68 | switch (serializableHiveEvent.getEventType()) {
69 | case ON_ADD_PARTITION: {
70 | ApiaryAddPartitionEvent addPartition = (ApiaryAddPartitionEvent) serializableHiveEvent;
71 | hiveEvent = new AddPartitionEvent(addPartition.getTable(), addPartition.getPartitions(), addPartition.getStatus(),
72 | hmsHandlerFactory.newInstance());
73 | break;
74 | }
75 | case ON_ALTER_PARTITION: {
76 | ApiaryAlterPartitionEvent alterPartition = (ApiaryAlterPartitionEvent) serializableHiveEvent;
77 | hiveEvent = new AlterPartitionEvent(alterPartition.getOldPartition(), alterPartition.getNewPartition(),
78 | alterPartition.getTable(), alterPartition.getStatus(), hmsHandlerFactory.newInstance());
79 | break;
80 | }
81 | case ON_DROP_PARTITION: {
82 | ApiaryDropPartitionEvent dropPartition = (ApiaryDropPartitionEvent) serializableHiveEvent;
83 | hiveEvent = new DropPartitionEvent(dropPartition.getTable(), dropPartition.getPartitions().get(0),
84 | dropPartition.getStatus(), dropPartition.getDeleteData(), hmsHandlerFactory.newInstance());
85 | break;
86 | }
87 | case ON_CREATE_TABLE: {
88 | ApiaryCreateTableEvent createTableEvent = (ApiaryCreateTableEvent) serializableHiveEvent;
89 | hiveEvent = new CreateTableEvent(createTableEvent.getTable(), createTableEvent.getStatus(),
90 | hmsHandlerFactory.newInstance());
91 | break;
92 | }
93 | case ON_ALTER_TABLE: {
94 | ApiaryAlterTableEvent alterTableEvent = (ApiaryAlterTableEvent) serializableHiveEvent;
95 | hiveEvent = new AlterTableEvent(alterTableEvent.getOldTable(), alterTableEvent.getNewTable(),
96 | alterTableEvent.getStatus(), hmsHandlerFactory.newInstance());
97 | break;
98 | }
99 | case ON_DROP_TABLE: {
100 | ApiaryDropTableEvent dropTable = (ApiaryDropTableEvent) serializableHiveEvent;
101 | hiveEvent = new DropTableEvent(dropTable.getTable(), dropTable.getStatus(), dropTable.getDeleteData(),
102 | hmsHandlerFactory.newInstance());
103 | break;
104 | }
105 |
106 | case ON_INSERT: {
107 | ApiaryInsertEvent insert = (ApiaryInsertEvent) serializableHiveEvent;
108 |
109 | List partVals = new ArrayList<>();
110 | Map keyValues = insert.getPartitionKeyValues();
111 |
112 | for (String value : keyValues.values()) {
113 | partVals.add(value);
114 | }
115 |
116 | InsertEventRequestData insertEventRequestData = new InsertEventRequestData(insert.getFiles());
117 | insertEventRequestData.setFilesAddedChecksum(insert.getFileChecksums());
118 |
119 | hiveEvent = new InsertEvent(insert.getDatabaseName(), insert.getTableName(), partVals, insertEventRequestData,
120 | insert.getStatus(), hmsHandlerFactory.newInstance());
121 | break;
122 | }
123 | default:
124 | throw new DroneFlyException("Unsupported event type: " + serializableHiveEvent.getEventType().toString());
125 | }
126 |
127 | return hiveEvent;
128 |
129 | }
130 | }
131 |
--------------------------------------------------------------------------------
/drone-fly-app/src/main/java/com/expediagroup/dataplatform/dronefly/app/service/listener/LoggingMetastoreListener.java:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright (C) 2020 Expedia, Inc.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.expediagroup.dataplatform.dronefly.app.service.listener;
17 |
18 | import org.apache.hadoop.conf.Configuration;
19 | import org.apache.hadoop.hive.metastore.MetaStoreEventListener;
20 | import org.apache.hadoop.hive.metastore.api.MetaException;
21 | import org.apache.hadoop.hive.metastore.events.AddPartitionEvent;
22 | import org.apache.hadoop.hive.metastore.events.AlterPartitionEvent;
23 | import org.apache.hadoop.hive.metastore.events.AlterTableEvent;
24 | import org.apache.hadoop.hive.metastore.events.CreateDatabaseEvent;
25 | import org.apache.hadoop.hive.metastore.events.CreateFunctionEvent;
26 | import org.apache.hadoop.hive.metastore.events.CreateTableEvent;
27 | import org.apache.hadoop.hive.metastore.events.DropDatabaseEvent;
28 | import org.apache.hadoop.hive.metastore.events.DropFunctionEvent;
29 | import org.apache.hadoop.hive.metastore.events.DropPartitionEvent;
30 | import org.apache.hadoop.hive.metastore.events.DropTableEvent;
31 | import org.apache.hadoop.hive.metastore.messaging.EventMessage.EventType;
32 | import org.slf4j.Logger;
33 | import org.slf4j.LoggerFactory;
34 |
35 | public class LoggingMetastoreListener extends MetaStoreEventListener {
36 | private static final Logger log = LoggerFactory.getLogger(LoggingMetastoreListener.class);
37 |
38 | public LoggingMetastoreListener(Configuration config) {
39 | super(config);
40 | }
41 |
42 | @Override
43 | public void onAddPartition(AddPartitionEvent addPartitionEvent) throws MetaException {
44 | log
45 | .info("Event Type: {}, DB Name: {}, Table Name: {}, Status: {}", EventType.ADD_PARTITION.toString(),
46 | addPartitionEvent.getTable().getDbName(), addPartitionEvent.getTable().getTableName(),
47 | addPartitionEvent.getStatus());
48 | }
49 |
50 | @Override
51 | public void onCreateDatabase(CreateDatabaseEvent createDatabaseEvent) throws MetaException {
52 | log
53 | .info("Event Type: {}, DB Name: {}, DB Location: {}, Status: {}", EventType.CREATE_DATABASE.toString(),
54 | createDatabaseEvent.getDatabase().getName(), createDatabaseEvent.getDatabase().getLocationUri(),
55 | createDatabaseEvent.getStatus());
56 | }
57 |
58 | @Override
59 | public void onCreateTable(CreateTableEvent createTableEvent) throws MetaException {
60 | log
61 | .info("Event Type: {}, DB Name: {}, Table Name: {}, Status: {}", EventType.CREATE_TABLE.toString(),
62 | createTableEvent.getTable().getDbName(), createTableEvent.getTable().getTableName(),
63 | createTableEvent.getStatus());
64 | }
65 |
66 | @Override
67 | public void onDropDatabase(DropDatabaseEvent dropDatabaseEvent) throws MetaException {
68 | log
69 | .info("Event Type: {}, DB Name: {}, DB Location: {}, Status: {}", EventType.DROP_DATABASE.toString(),
70 | dropDatabaseEvent.getDatabase().getName(), dropDatabaseEvent.getDatabase().getLocationUri(),
71 | dropDatabaseEvent.getStatus());
72 | }
73 |
74 | @Override
75 | public void onDropPartition(DropPartitionEvent dropPartitionEvent) throws MetaException {
76 | log
77 | .info("Event Type: {}, DB Name: {}, Table Name: {}, Status: {}", EventType.DROP_PARTITION.toString(),
78 | dropPartitionEvent.getTable().getDbName(), dropPartitionEvent.getTable().getTableName(),
79 | dropPartitionEvent.getStatus());
80 | }
81 |
82 | @Override
83 | public void onDropTable(DropTableEvent dropTableEvent) throws MetaException {
84 | log
85 | .info("Event Type: {}, DB Name: {}, Table Name: {}, Status: {}", EventType.DROP_TABLE.toString(),
86 | dropTableEvent.getTable().getDbName(), dropTableEvent.getTable().getTableName(),
87 | dropTableEvent.getStatus());
88 | }
89 |
90 | @Override
91 | public void onAlterTable(AlterTableEvent alterTableEvent) throws MetaException {
92 | log
93 | .info(
94 | "Event Type: {}, Old DB Name: {}, Old Table Name: {}, Old Table Location: {}, New DB Name: {}, New Table Name: {}, Old Table Location: {}, Status: {}",
95 | EventType.ALTER_TABLE.toString(), alterTableEvent.getOldTable().getDbName(),
96 | alterTableEvent.getOldTable().getTableName(), alterTableEvent.getOldTable().getSd().getLocation(),
97 | alterTableEvent.getNewTable().getDbName(), alterTableEvent.getNewTable().getTableName(),
98 | alterTableEvent.getNewTable().getSd().getLocation(), alterTableEvent.getStatus());
99 |
100 | }
101 |
102 | @Override
103 | public void onAlterPartition(AlterPartitionEvent alterPartitionEvent) throws MetaException {
104 | log
105 | .info(
106 | "Event Type: {}, DB Name: {}, Table Name: {}, Old partition Location: {}, New partition Location: {}, Status: {}",
107 | EventType.ALTER_PARTITION.toString(), alterPartitionEvent.getOldPartition().getDbName(),
108 | alterPartitionEvent.getOldPartition().getTableName(),
109 | alterPartitionEvent.getOldPartition().getSd().getLocation(),
110 | alterPartitionEvent.getNewPartition().getSd().getLocation(), alterPartitionEvent.getStatus());
111 | }
112 |
113 | @Override
114 | public void onCreateFunction(CreateFunctionEvent createFunctionEvent) throws MetaException {
115 | log
116 | .info("Event Type: {}, Function Name: {}, Function Class Name: {}, Status: {}",
117 | EventType.CREATE_FUNCTION.toString(), createFunctionEvent.getFunction().getFunctionName(),
118 | createFunctionEvent.getFunction().getClassName(), createFunctionEvent.getStatus());
119 | }
120 |
121 | @Override
122 | public void onDropFunction(DropFunctionEvent dropFunctionEvent) throws MetaException {
123 | log
124 | .info("Event Type: {}, Function Name: {}, Function Class Name: {}, Status: {}",
125 | EventType.DROP_FUNCTION.toString(), dropFunctionEvent.getFunction().getClassName(),
126 | dropFunctionEvent.getFunction().getClassName(), dropFunctionEvent.getStatus());
127 | }
128 |
129 | }
130 |
--------------------------------------------------------------------------------
/drone-fly-app/src/test/java/com/expediagroup/dataplatform/dronefly/app/service/DroneFlyNotificationServiceTest.java:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright (C) 2020 Expedia, Inc.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.expediagroup.dataplatform.dronefly.app.service;
17 |
18 | import static org.assertj.core.api.Assertions.assertThat;
19 | import static org.junit.jupiter.api.Assertions.assertThrows;
20 | import static org.junit.jupiter.api.Assertions.assertTrue;
21 | import static org.mockito.ArgumentMatchers.any;
22 | import static org.mockito.Mockito.times;
23 | import static org.mockito.Mockito.verify;
24 | import static org.mockito.Mockito.when;
25 |
26 | import java.io.IOException;
27 | import java.util.ArrayList;
28 | import java.util.List;
29 |
30 | import org.apache.hadoop.hive.conf.HiveConf;
31 | import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler;
32 | import org.apache.hadoop.hive.metastore.MetaStoreEventListener;
33 | import org.apache.hadoop.hive.metastore.api.MetaException;
34 | import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
35 | import org.apache.hadoop.hive.metastore.events.CreateTableEvent;
36 | import org.junit.jupiter.api.BeforeEach;
37 | import org.junit.jupiter.api.Test;
38 | import org.junit.jupiter.api.extension.ExtendWith;
39 | import org.mockito.Mock;
40 | import org.mockito.Mockito;
41 | import org.mockito.junit.jupiter.MockitoExtension;
42 |
43 | import com.expediagroup.apiary.extensions.events.metastore.common.MetaStoreEventsException;
44 | import com.expediagroup.apiary.extensions.events.metastore.event.ApiaryListenerEvent;
45 | import com.expediagroup.apiary.extensions.events.metastore.event.ApiaryListenerEventFactory;
46 | import com.expediagroup.dataplatform.dronefly.app.messaging.MessageReaderAdapter;
47 | import com.expediagroup.dataplatform.dronefly.app.service.listener.DummyListener;
48 | import com.expediagroup.dataplatform.dronefly.core.exception.DroneFlyException;
49 |
50 | @ExtendWith(MockitoExtension.class)
51 | public class DroneFlyNotificationServiceTest {
52 | private @Mock MessageReaderAdapter reader;
53 | private @Mock ListenerCatalog listenerCatalog;
54 | private @Mock HiveEventConverterService converterService;
55 |
56 | private final DummyListener dummyListener = new DummyListener(new HiveConf());
57 | private final List metastoreListeners = new ArrayList();
58 | private DroneFlyNotificationService droneFlyNotificationService;
59 | private CreateTableEvent createTableEvent;
60 | private ApiaryListenerEvent apiaryCreateTableEvent;
61 |
62 | @BeforeEach
63 | public void init() throws MetaException, NoSuchObjectException {
64 | createTableEvent = createTableEvent();
65 | apiaryCreateTableEvent = createApiaryListenerEvent(createTableEvent);
66 |
67 | when(reader.read()).thenReturn(apiaryCreateTableEvent);
68 |
69 | droneFlyNotificationService = new DroneFlyNotificationService(reader, converterService, listenerCatalog);
70 | }
71 |
72 | @Test
73 | public void typical() throws IOException, MetaException, NoSuchObjectException {
74 | metastoreListeners.add(dummyListener);
75 | when(converterService.toHiveEvent(apiaryCreateTableEvent)).thenReturn(createTableEvent);
76 | when(listenerCatalog.getListeners()).thenReturn(metastoreListeners);
77 | droneFlyNotificationService.notifyListeners();
78 | verify(reader).read();
79 | verify(listenerCatalog).getListeners();
80 |
81 | CreateTableEvent actual = (CreateTableEvent) dummyListener.getLastEvent();
82 |
83 | assertEvent(actual);
84 | destroy();
85 | verify(reader).close();
86 | }
87 |
88 | @Test
89 | public void multipleListenersLoaded() throws IOException, MetaException, NoSuchObjectException {
90 | when(converterService.toHiveEvent(apiaryCreateTableEvent)).thenReturn(createTableEvent);
91 | DummyListener listener2 = new DummyListener(new HiveConf());
92 | metastoreListeners.add(dummyListener);
93 | metastoreListeners.add(listener2);
94 | when(listenerCatalog.getListeners()).thenReturn(metastoreListeners);
95 |
96 | droneFlyNotificationService.notifyListeners();
97 | verify(reader, times(1)).read();
98 | verify(listenerCatalog, times(1)).getListeners();
99 |
100 | CreateTableEvent actual1 = (CreateTableEvent) dummyListener.getLastEvent();
101 | CreateTableEvent actual2 = (CreateTableEvent) listener2.getLastEvent();
102 |
103 | assertEvent(actual1);
104 | assertEvent(actual2);
105 |
106 | destroy();
107 | verify(reader).close();
108 | }
109 |
110 | @Test
111 | public void exceptionThrownWhileDeserializingEvent() throws IOException {
112 | when(reader.read()).thenThrow(new MetaStoreEventsException("Cannot deserialize hive event"));
113 |
114 | DroneFlyException exception = assertThrows(DroneFlyException.class, () -> {
115 | droneFlyNotificationService.notifyListeners();
116 | });
117 |
118 | assertTrue(exception.getMessage().contains("Cannot unmarshal this event. It will be ignored."));
119 |
120 | destroy();
121 | verify(reader).close();
122 | }
123 |
124 | @Test
125 | public void metaExceptionThrownWhileNotifying() throws MetaException, NoSuchObjectException, IOException {
126 | when(converterService.toHiveEvent(Mockito.any())).thenThrow(new MetaException("MetaException is thrown."));
127 |
128 | DroneFlyException exception = assertThrows(DroneFlyException.class, () -> {
129 | droneFlyNotificationService.notifyListeners();
130 | });
131 |
132 | assertTrue(
133 | exception.getMessage().contains("Hive event was received but Drone Fly failed to notify all the listeners."));
134 |
135 | destroy();
136 | verify(reader).close();
137 | }
138 |
139 | @Test
140 | public void noSuchObjectExceptionThrownWhileNotifying() throws MetaException, NoSuchObjectException, IOException {
141 | when(converterService.toHiveEvent(Mockito.any()))
142 | .thenThrow(new NoSuchObjectException("NoSuchObjectException is thrown."));
143 |
144 | DroneFlyException exception = assertThrows(DroneFlyException.class, () -> {
145 | droneFlyNotificationService.notifyListeners();
146 | });
147 |
148 | assertTrue(
149 | exception.getMessage().contains("Hive event was received but Drone Fly failed to notify all the listeners."));
150 |
151 | destroy();
152 | verify(reader).close();
153 | }
154 |
155 | @Test
156 | public void eventNotSupportedByConverter() throws MetaException, NoSuchObjectException, IOException {
157 | when(converterService.toHiveEvent(any())).thenThrow(new DroneFlyException("Unsupported event type: DROP_INDEX"));
158 |
159 | DroneFlyException exception = assertThrows(DroneFlyException.class, () -> {
160 | droneFlyNotificationService.notifyListeners();
161 | });
162 | assertTrue(exception.getMessage().contains("Unsupported event type: DROP_INDEX"));
163 |
164 | destroy();
165 | verify(reader).close();
166 | }
167 |
168 | @Test
169 | public void noListenersLoaded() throws IOException, MetaException, NoSuchObjectException {
170 | when(listenerCatalog.getListeners()).thenReturn(new ArrayList());
171 | when(converterService.toHiveEvent(apiaryCreateTableEvent)).thenReturn(createTableEvent);
172 | droneFlyNotificationService.notifyListeners();
173 | verify(reader).read();
174 | verify(listenerCatalog).getListeners();
175 |
176 | CreateTableEvent actual = (CreateTableEvent) dummyListener.getLastEvent();
177 |
178 | assertThat(actual).isNull();
179 |
180 | destroy();
181 | verify(reader).close();
182 | }
183 |
184 | private void assertEvent(CreateTableEvent event) {
185 | assertThat(event.getTable().getDbName()).isEqualTo("test_db");
186 | assertThat(event.getTable().getTableName()).isEqualTo("test_table");
187 | assertThat(event.getTable().getSd().getLocation()).isEqualTo("s3://test_location");
188 | }
189 |
190 | private CreateTableEvent createTableEvent() throws MetaException {
191 | CreateTableEvent event = new CreateTableEvent(
192 | HiveTableTestUtils.createPartitionedTable("test_db", "test_table", "s3://test_location"), true,
193 | new HMSHandler("test", new HiveConf(), false));
194 | return event;
195 | }
196 |
197 | private ApiaryListenerEvent createApiaryListenerEvent(CreateTableEvent event) throws MetaException {
198 | return new ApiaryListenerEventFactory().create(event);
199 | }
200 |
201 | private void destroy() throws IOException {
202 | droneFlyNotificationService.close();
203 | }
204 | }
205 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | 
2 |
3 | A service which allows Hive metastore (HMS) `MetaStoreEventListener` implementations to be deployed in a separate context to the metastore's own.
4 |
5 | ## Overview
6 | [](https://maven-badges.herokuapp.com/maven-central/com.expediagroup/drone-fly-app)
7 | [](https://github.com/ExpediaGroup/drone-fly/actions?query=workflow:"Build")
8 | [](https://coveralls.io/github/ExpediaGroup/drone-fly?branch=main)
9 | [](https://opensource.org/licenses/Apache-2.0)
10 | [](https://hub.docker.com/r/expediagroup/drone-fly-app)
11 |
12 | Drone Fly is a distributed Hive metastore events forwarder service that allows users to deploy metastore listeners outside the Hive metastore service.
13 |
14 | With the advent of event-driven systems, the number of listeners that a user needs to install in the metastore is ever increasing. These listeners can be both internal or can be provided by third party tools for integration purposes. More and more processing is being added to these listeners to address various business use cases.
15 |
16 | Adding these listeners directly on the classpath of your Hive metastore couples them with it and can lead to performance degradation or in the worst case, it could take down the entire metastore (e.g. by running out memory, thread starvation etc.) Drone Fly decouples your HMS from the event listeners by providing a virtual Hive context. The event listeners can be provided on the Drone Fly's classpath and it then forwards the events received from [Kafka metastore Listener](https://github.com/ExpediaGroup/apiary-extensions/tree/main/apiary-metastore-events/kafka-metastore-events/kafka-metastore-listener) on to the respective listeners.
17 |
18 | ## Start using
19 |
20 | A Terraform module for Kubernetes deployment is available [here](https://github.com/ExpediaGroup/apiary-drone-fly).
21 |
22 | Docker images can be found in Expedia Group's [dockerhub](https://hub.docker.com/search/?q=expediagroup%2Fdrone-fly&type=image).
23 |
24 | ## System architecture
25 |
26 | The diagram below shows a typical Hive metastore setup without using Drone Fly. In this example, there are several HiveMetastoreListeners installed which send Hive events to other systems like Apache Atlas, AWS SNS, Apache Kafka and other custom implementations.
27 |
28 | 
29 |
30 | With Drone Fly, the setup gets modified as shown in the diagram below. The only listener installed in the Hive metastore context is the [Apiary Kafka Listener](https://github.com/ExpediaGroup/apiary-extensions/tree/main/apiary-metastore-events/kafka-metastore-events/kafka-metastore-listener). This forwards the Hive events on to Kafka from which Drone Fly can retrieve them. The other listeners are moved out into separate contexts and receive the messages from Drone Fly which forwards them on as if they were Hive events so the listener code doesn't need to change at all.
31 |
32 | 
33 |
34 | Drone Fly can be set up to run in dockerized containers where each instance is initiated with one listener to get even further decoupling.
35 |
36 | ## Usage
37 | ### Using with Docker
38 |
39 | To install a new HMS listener within the Drone Fly context, it is recommended that you build your Docker image using the Drone Fly base [Docker image](https://hub.docker.com/r/expediagroup/drone-fly-app).
40 |
41 | A sample image to install the [Apiary-SNS-Listener](https://github.com/ExpediaGroup/apiary-extensions/tree/main/apiary-metastore-events/sns-metastore-events/apiary-metastore-listener) would be as follows:
42 |
43 | ```
44 | from expediagroup/drone-fly-app:0.0.1
45 |
46 | ENV APIARY_EXTENSIONS_VERSION 6.0.1
47 |
48 | ENV AWS_REGION us-east-1
49 | RUN cd /app/libs && \
50 | wget -q https://search.maven.org/remotecontent?filepath=com/expediagroup/apiary/apiary-metastore-listener/${APIARY_EXTENSIONS_VERSION}/apiary-metastore-listener-${APIARY_EXTENSIONS_VERSION}-all.jar -O apiary-metastore-listener-${APIARY_EXTENSIONS_VERSION}-all.jar
51 | ```
52 |
53 | #### Running Drone Fly Docker image
54 |
55 | docker run --env APIARY_BOOTSTRAP_SERVERS="localhost:9092" \
56 | --env APIARY_LISTENER_LIST="com.expediagroup.sampleListener1,com.expediagroup.sampleListener2" \
57 | --env APIARY_KAFKA_TOPIC_NAME="dronefly" \
58 | expediagroup/drone-fly-app:
59 |
60 | Then [Drone Fly Terraform](https://github.com/ExpediaGroup/apiary-drone-fly) module can be used to install your Docker image in a Kubernetes container.
61 |
62 |
63 | ### Using Uber Jar
64 |
65 | Drone Fly build also produces an [uber jar](https://mvnrepository.com/artifact/com.expediagroup/drone-fly-app) so it can be started as a stand-alone Java service.
66 |
67 | #### Running Drone Fly Jar
68 |
69 | java -Dloader.path=lib/ -jar drone-fly-app--exec.jar \
70 | --apiary.bootstrap.servers=localhost:9092 \
71 | --apiary.kafka.topic.name=apiary \
72 | --apiary.listener.list="com.expediagroup.sampleListener1,com.expediagroup.sampleListener2"
73 |
74 | The properties `instance.name`, `apiary.bootstrap.servers`, `apiary.kafka.topic.name` and `apiary.listener.list` can also be provided in the spring properties file.
75 |
76 | java -Dloader.path=lib/ -jar drone-fly-app--exec.jar --spring.config.location=file:///dronefly.properties
77 |
78 | The parameter `-Dloader-path` is the path where Drone Fly will search for configured HMS listeners.
79 |
80 | ## Configuring Drone Fly
81 |
82 | ### Drone Fly configuration reference
83 | The table below describes all the available configuration values for Drone Fly.
84 |
85 | | Name | Description | Type | Default | Required |
86 | |------|-------------|------|---------|:--------:|
87 | | apiary.bootstrap.servers | Kafka bootstrap servers that receive Hive metastore events. | `string` | n/a | yes |
88 | | apiary.kafka.topic.name | Kafka topic name that receives Hive metastore events. | `string` | n/a | yes |
89 | | apiary.listener.list | Comma separated list of Hive metastore listeners to load from the classpath, e.g. `com.expedia.HMSListener1,com.expedia.HMSListener2` | `string` | `"com.expediagroup.dataplatform.dronefly.app.service.listener.LoggingMetastoreListener"` | no |
90 | | instance.name | Instance name for a Drone Fly instance. `instance.name` is also used to derive the Kafka consumer group. Therefore, in a multi-instance deployment, a unique `instance.name` for each Drone Fly instance needs to be provided to avoid all instances ending up in the same Kafka consumer group. | `string` | `drone-fly` | no |
91 | | endpoint.port | Port on which Drone Fly Spring Boot app will start. | `string` | `8008` | no |
92 |
93 | ### Additional configuration parameters
94 | The Kafka message reader supports properties that are passed to the Kafka consumer builder.
95 | These are environment variables with the PREFIX apiary.messaging.consumer.
96 |
97 | #### Example for sending consumer parameters when using a Kafka cloud provider
98 | - apiary.messaging.consumer.security.protocol=SSL
99 | - apiary.messaging.consumer.sasl.mechanism=AWS_MSK_IAM
100 | - apiary.messaging.consumer.sasl_jaas.config=software.amazon.msk.auth.iam.IAMLoginModule required;
101 | - apiary.messaging.consumer.sasl.client.callback.handler.class=software.amazon.msk.auth.iam.IAMClientCallbackHandler
102 |
103 | In this case we are sending the properties to Kafka's consumer to be able to connect to AWS MSK which also requires the IAM library included as a dependency in the POM.xml file
104 |
105 | java -Dloader.path=lib/ -jar drone-fly-app--exec.jar \
106 | --apiary.bootstrap.servers=localhost:9092 \
107 | --apiary.kafka.topic.name=apiary \
108 | --apiary.listener.list="com.expediagroup.sampleListener1,com.expediagroup.sampleListener2" \
109 | --apiary.messaging.consumer.security.protocol=SSL \
110 | --apiary.messaging.consumer.sasl.mechanism=AWS_MSK_IAM \
111 | --apiary.messaging.consumer.sasl_jaas.config=software.amazon.msk.auth.iam.IAMLoginModule required; \
112 | --apiary.messaging.consumer.sasl.client.callback.handler.class=software.amazon.msk.auth.iam.IAMClientCallbackHandler
113 |
114 | ## Metrics
115 |
116 | Drone Fly exposes standard [JVM and Kafka metrics](https://docs.spring.io/spring-boot/docs/current/reference/htmlsingle/#production-ready-metrics-meter) using [Prometheus on Spring Boot Actuator](https://docs.spring.io/spring-boot/docs/current/reference/html/production-ready-features.html#production-ready-metrics-export-prometheus) endpoint `/actuator/prometheus`.
117 |
118 | ### Some of the useful metrics to track are:
119 |
120 | ```
121 | system_cpu_usage
122 | kafka_consumer_records_consumed_total_records_total
123 | jvm_memory_committed_bytes
124 | ```
125 |
126 |
127 | ## Legal
128 | This project is available under the [Apache 2.0 License](http://www.apache.org/licenses/LICENSE-2.0.html).
129 |
130 | Copyright 2020 Expedia, Inc.
131 |
--------------------------------------------------------------------------------
/drone-fly-integration-tests/src/test/java/com/expediagroup/dataplatform/dronefly/core/integration/DroneFlyIntegrationTest.java:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright (C) 2020-2025 Expedia, Inc.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.expediagroup.dataplatform.dronefly.core.integration;
17 |
18 | import static org.apache.hadoop.hive.metastore.messaging.EventMessage.EventType.ADD_PARTITION;
19 | import static org.apache.hadoop.hive.metastore.messaging.EventMessage.EventType.CREATE_TABLE;
20 | import static org.assertj.core.api.Assertions.assertThat;
21 | import static org.awaitility.Awaitility.await;
22 | import static org.junit.jupiter.api.Assertions.fail;
23 |
24 | import static com.expediagroup.apiary.extensions.events.metastore.kafka.messaging.KafkaProducerProperty.BOOTSTRAP_SERVERS;
25 | import static com.expediagroup.apiary.extensions.events.metastore.kafka.messaging.KafkaProducerProperty.CLIENT_ID;
26 | import static com.expediagroup.apiary.extensions.events.metastore.kafka.messaging.KafkaProducerProperty.TOPIC_NAME;
27 | import static com.expediagroup.dataplatform.dronefly.core.integration.DroneFlyIntegrationTestUtils.DATABASE;
28 | import static com.expediagroup.dataplatform.dronefly.core.integration.DroneFlyIntegrationTestUtils.TABLE;
29 | import static com.expediagroup.dataplatform.dronefly.core.integration.DroneFlyIntegrationTestUtils.TOPIC;
30 | import static com.expediagroup.dataplatform.dronefly.core.integration.DroneFlyIntegrationTestUtils.buildPartition;
31 | import static com.expediagroup.dataplatform.dronefly.core.integration.DroneFlyIntegrationTestUtils.buildTable;
32 | import static com.expediagroup.dataplatform.dronefly.core.integration.DroneFlyIntegrationTestUtils.buildTableParameters;
33 | import static com.expediagroup.dataplatform.dronefly.core.integration.DummyListener.EVENT_COUNT_METRIC;
34 |
35 | import java.util.ArrayList;
36 | import java.util.HashMap;
37 | import java.util.Iterator;
38 | import java.util.List;
39 | import java.util.Map;
40 | import java.util.concurrent.BlockingQueue;
41 | import java.util.concurrent.ExecutorService;
42 | import java.util.concurrent.Executors;
43 | import java.util.concurrent.LinkedBlockingQueue;
44 | import java.util.concurrent.TimeUnit;
45 |
46 | import org.apache.hadoop.conf.Configuration;
47 | import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler;
48 | import org.apache.hadoop.hive.metastore.api.Partition;
49 | import org.apache.hadoop.hive.metastore.events.AddPartitionEvent;
50 | import org.apache.hadoop.hive.metastore.events.CreateTableEvent;
51 | import org.apache.hadoop.hive.metastore.events.ListenerEvent;
52 | import org.apache.hadoop.hive.metastore.messaging.EventMessage.EventType;
53 | import org.apache.kafka.clients.consumer.ConsumerRecord;
54 | import org.apache.kafka.common.serialization.StringDeserializer;
55 | import org.awaitility.Duration;
56 | import org.junit.jupiter.api.AfterAll;
57 | import org.junit.jupiter.api.AfterEach;
58 | import org.junit.jupiter.api.BeforeAll;
59 | import org.junit.jupiter.api.Test;
60 | import org.junit.jupiter.api.TestInstance;
61 | import org.junit.jupiter.api.extension.ExtendWith;
62 | import org.mockito.Mock;
63 | import org.springframework.beans.factory.annotation.Autowired;
64 | import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
65 | import org.springframework.kafka.listener.ContainerProperties;
66 | import org.springframework.kafka.listener.KafkaMessageListenerContainer;
67 | import org.springframework.kafka.listener.MessageListener;
68 | import org.springframework.kafka.test.EmbeddedKafkaBroker;
69 | import org.springframework.kafka.test.context.EmbeddedKafka;
70 | import org.springframework.kafka.test.utils.ContainerTestUtils;
71 | import org.springframework.kafka.test.utils.KafkaTestUtils;
72 | import org.springframework.test.context.junit.jupiter.SpringExtension;
73 |
74 | import com.google.common.collect.Lists;
75 |
76 | import com.expediagroup.apiary.extensions.events.metastore.kafka.listener.KafkaMetaStoreEventListener;
77 | import com.expediagroup.dataplatform.dronefly.app.DroneFly;
78 |
79 | @EmbeddedKafka(count = 1, controlledShutdown = true, topics = { TOPIC }, partitions = 1)
80 | @ExtendWith(SpringExtension.class)
81 | @TestInstance(TestInstance.Lifecycle.PER_CLASS)
82 | public class DroneFlyIntegrationTest {
83 |
84 | private @Mock HMSHandler hmsHandler;
85 |
86 | private final ExecutorService executorService = Executors.newFixedThreadPool(1);
87 | private static Configuration CONF = new Configuration();
88 |
89 | private KafkaMetaStoreEventListener kafkaMetaStoreEventListener;
90 |
91 | @Autowired
92 | private EmbeddedKafkaBroker embeddedKafkaBroker;
93 |
94 | private BlockingQueue> records;
95 |
96 | private KafkaMessageListenerContainer container;
97 |
98 | @BeforeAll
99 | void setUp() throws InterruptedException {
100 | /**
101 | * The function initEmbeddedKafka() is required so that EmbeddedKafka waits for the consumer group assignment to
102 | * complete.
103 | * https://stackoverflow.com/questions/47312373/embeddedkafka-sending-messages-to-consumer-after-delay-in-subsequent-test
104 | */
105 | initEmbeddedKafka();
106 | System.setProperty("instance.name", "test");
107 | System.setProperty("apiary.bootstrap.servers", embeddedKafkaBroker.getBrokersAsString());
108 | System.setProperty("apiary.kafka.topic.name", TOPIC);
109 | System.setProperty("apiary.listener.list", "com.expediagroup.dataplatform.dronefly.core.integration.DummyListener");
110 | initKafkaListener();
111 |
112 | executorService.execute(() -> DroneFly.main(new String[] {}));
113 | await().atMost(Duration.FIVE_MINUTES).until(DroneFly::isRunning);
114 | }
115 |
116 | @AfterEach
117 | public void reset() {
118 | DummyListener.reset();
119 | }
120 |
121 | @AfterAll
122 | public void stop() throws InterruptedException {
123 | DroneFly.stop();
124 | executorService.awaitTermination(5, TimeUnit.SECONDS);
125 | }
126 |
127 | @Test
128 | public void typical() {
129 | AddPartitionEvent addPartitionEvent = new AddPartitionEvent(buildTable(), buildPartition(), true, hmsHandler);
130 | kafkaMetaStoreEventListener.onAddPartition(addPartitionEvent);
131 |
132 | CreateTableEvent createTableEvent = new CreateTableEvent(buildTable(), true, hmsHandler);
133 | kafkaMetaStoreEventListener.onCreateTable(createTableEvent);
134 |
135 | await().atMost(5, TimeUnit.SECONDS).until(() -> DummyListener.getNumEvents() > 1);
136 |
137 | assertThat(DummyListener.getNumEvents()).isEqualTo(2);
138 |
139 | ListenerEvent receivedEventOne = DummyListener.get(0);
140 | ListenerEvent receivedEventTwo = DummyListener.get(1);
141 |
142 | assertEvent(receivedEventOne, ADD_PARTITION);
143 | assertEvent(receivedEventTwo, CREATE_TABLE);
144 | assertThat(EVENT_COUNT_METRIC.count()).isEqualTo(2.0);
145 | }
146 |
147 | private void assertEvent(ListenerEvent event, EventType eventType) {
148 | assertThat(event.getStatus()).isTrue();
149 |
150 | switch (eventType) {
151 | case ADD_PARTITION:
152 | assertThat(event).isInstanceOf(AddPartitionEvent.class);
153 | AddPartitionEvent addPartitionEvent = (AddPartitionEvent) event;
154 | assertThat(addPartitionEvent.getTable().getDbName()).isEqualTo(DATABASE);
155 | assertThat(addPartitionEvent.getTable().getTableName()).isEqualTo(TABLE);
156 | Iterator iterator = addPartitionEvent.getPartitionIterator();
157 | List partitions = new ArrayList<>();
158 | while (iterator.hasNext()) {
159 | partitions.add(iterator.next());
160 | }
161 | assertThat(partitions).isEqualTo(Lists.newArrayList(buildPartition()));
162 | assertThat(addPartitionEvent.getTable().getParameters()).isEqualTo(buildTableParameters());
163 | break;
164 | case CREATE_TABLE:
165 | assertThat(event).isInstanceOf(CreateTableEvent.class);
166 | CreateTableEvent createTableEvent = (CreateTableEvent) event;
167 | assertThat(createTableEvent.getTable().getDbName()).isEqualTo(DATABASE);
168 | assertThat(createTableEvent.getTable().getTableName()).isEqualTo(TABLE);
169 | break;
170 | default:
171 | fail(String
172 | .format("Received an event with type: {%s} that is different than ADD_PARTITION or CREATE_TABLE.",
173 | eventType));
174 | break;
175 | }
176 | }
177 |
178 | private void initEmbeddedKafka() {
179 | Map configs = new HashMap<>(KafkaTestUtils.consumerProps("consumer", "false", embeddedKafkaBroker));
180 | DefaultKafkaConsumerFactory consumerFactory = new DefaultKafkaConsumerFactory<>(configs,
181 | new StringDeserializer(), new StringDeserializer());
182 | ContainerProperties containerProperties = new ContainerProperties(TOPIC);
183 | container = new KafkaMessageListenerContainer<>(consumerFactory, containerProperties);
184 | records = new LinkedBlockingQueue<>();
185 | container.setupMessageListener((MessageListener) records::add);
186 | container.start();
187 | ContainerTestUtils.waitForAssignment(container, embeddedKafkaBroker.getPartitionsPerTopic());
188 | }
189 |
190 | private void initKafkaListener() {
191 | CONF.set(BOOTSTRAP_SERVERS.key(), embeddedKafkaBroker.getBrokersAsString());
192 | CONF.set(CLIENT_ID.key(), "apiary-kafka-listener");
193 | CONF.set(TOPIC_NAME.key(), TOPIC);
194 |
195 | kafkaMetaStoreEventListener = new KafkaMetaStoreEventListener(CONF);
196 | }
197 | }
198 |
--------------------------------------------------------------------------------
/pom.xml:
--------------------------------------------------------------------------------
1 |
2 | 4.0.0
3 |
4 |
5 | com.expediagroup
6 | eg-oss-parent
7 | 3.0.1
8 |
9 |
10 | drone-fly-parent
11 | 1.0.9-SNAPSHOT
12 | 2020
13 | pom
14 | drone-fly
15 | https://github.com/ExpediaGroup/drone-fly
16 |
17 |
18 |
19 | drone-fly-core
20 | drone-fly-app
21 | drone-fly-integration-tests
22 |
23 |
24 |
25 | 3.12.2
26 | 3.1.6
27 | 5.6.0
28 | 3.12.4
29 | 2.7.10
30 | 5.3.25
31 | 1.2.3
32 | 3.2.4
33 | 3.4.3
34 | openjdk
35 | 8-jdk
36 | ${docker.from.image}:${docker.from.tag}
37 | expediagroup
38 | ${project.artifactId}
39 | ${dockerhub.url}/${docker.registry}/${docker.to.image}:${docker.to.tag}
40 | ${project.version}
41 | docker.io
42 | 2.17.1
43 | 1.1.9
44 | 3.1.0
45 | true
46 |
47 |
48 |
49 | scm:git:https://${GIT_USERNAME}:${GIT_PASSWORD}@github.com/ExpediaGroup/drone-fly.git
50 | scm:git:https://${GIT_USERNAME}:${GIT_PASSWORD}@github.com/ExpediaGroup/drone-fly.git
51 |
52 | https://github.com/ExpediaGroup/drone-fly
53 | drone-fly-parent-1.0.1
54 |
55 |
56 |
57 |
58 |
59 | org.springframework
60 | spring-core
61 | ${springframework.version}
62 |
63 |
64 | org.springframework
65 | spring-context
66 | ${springframework.version}
67 |
68 |
69 | org.springframework
70 | spring-beans
71 | ${springframework.version}
72 |
73 |
74 | org.springframework
75 | spring-aop
76 | ${springframework.version}
77 |
78 |
79 | org.springframework.boot
80 | spring-boot-dependencies
81 | ${springframework.boot.version}
82 | pom
83 | import
84 |
85 |
86 | ch.qos.logback
87 | logback-core
88 | ${logback.version}
89 |
90 |
91 |
92 |
93 | org.apache.logging.log4j
94 | log4j-core
95 | ${log4j2.version}
96 |
97 |
98 | org.apache.logging.log4j
99 | log4j-api
100 | ${log4j2.version}
101 |
102 |
103 | org.apache.logging.log4j
104 | log4j-web
105 | ${log4j2.version}
106 |
107 |
108 | org.apache.logging.log4j
109 | log4j-jul
110 | ${log4j2.version}
111 |
112 |
113 | org.apache.logging.log4j
114 | log4j-slf4j-impl
115 | ${log4j2.version}
116 |
117 |
118 | org.apache.logging.log4j
119 | log4j-1.2-api
120 | ${log4j2.version}
121 |
122 |
123 | org.apache.logging.log4j
124 | log4j-to-slf4j
125 | ${log4j2.version}
126 |
127 |
128 |
129 | org.awaitility
130 | awaitility
131 | ${awaitility.version}
132 | test
133 |
134 |
135 |
136 |
137 |
138 |
139 | org.junit.jupiter
140 | junit-jupiter
141 | ${junit.jupiter.version}
142 | test
143 |
144 |
145 | org.mockito
146 | mockito-core
147 | ${mockito.version}
148 | test
149 |
150 |
151 | org.mockito
152 | mockito-junit-jupiter
153 | ${mockito.version}
154 | test
155 |
156 |
157 | org.assertj
158 | assertj-core
159 | ${assertj.version}
160 | test
161 |
162 |
163 |
164 |
165 |
166 |
167 |
168 | com.google.cloud.tools
169 | jib-maven-plugin
170 | ${jib.maven.plugin.version}
171 |
172 |
173 | deploy
174 | deploy
175 |
176 | build
177 |
178 |
179 |
180 | package
181 | package
182 |
183 | dockerBuild
184 |
185 |
186 |
187 |
188 |
189 | ${docker.from.reference}
190 |
191 |
192 | amd64
193 | linux
194 |
195 |
196 | arm64
197 | linux
198 |
199 |
200 |
201 |
202 | ${docker.to.reference}
203 |
204 | ${DOCKERHUB_USERNAME}
205 | ${DOCKERHUB_PASSWORD}
206 |
207 |
208 |
209 | USE_CURRENT_TIMESTAMP
210 |
211 |
212 | ${docker.container.port}
213 |
214 |
215 |
216 |
217 |
218 | org.apache.maven.plugins
219 | maven-shade-plugin
220 | ${maven.shade.plugin.version}
221 |
222 |
223 | com.mycila
224 | license-maven-plugin
225 | ${license.maven.plugin.version}
226 |
227 |
228 |
229 | src/main/java/com/expediagroup/dataplatform/dronefly/app/service/ListenerCatalog.java
230 |
231 |
232 |
233 |
234 | org.springframework.boot
235 | spring-boot-maven-plugin
236 | ${springframework.boot.version}
237 |
238 | com.expediagroup.dataplatform.dronefly.app.DroneFly
239 | ZIP
240 |
241 |
245 |
246 | org.apache.hive
247 | hive-common
248 |
249 |
250 |
251 | false
252 | exec
253 | org.mortbay.jetty,org.eclipse.jetty,org.eclipse.jetty.aggregate,org.eclipse.jetty.orbit
254 |
255 |
256 |
257 | org.codehaus.jettison
258 | jettison
259 |
260 |
261 | javax.servlet
262 | servlet-api
263 |
264 |
265 |
266 |
267 |
268 |
269 | repackage
270 |
271 |
272 |
273 |
274 |
275 |
276 |
277 |
278 |
--------------------------------------------------------------------------------
/LICENSE.txt:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright [yyyy] [name of copyright owner]
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/drone-fly-app/src/test/java/com/expediagroup/dataplatform/dronefly/app/service/HiveEventConverterServiceTest.java:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright (C) 2020 Expedia, Inc.
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * http://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 | package com.expediagroup.dataplatform.dronefly.app.service;
17 |
18 | import static org.assertj.core.api.Assertions.assertThat;
19 | import static org.mockito.ArgumentMatchers.any;
20 | import static org.mockito.Mockito.mock;
21 | import static org.mockito.Mockito.when;
22 |
23 | import java.util.Arrays;
24 | import java.util.List;
25 |
26 | import org.apache.hadoop.hive.conf.HiveConf;
27 | import org.apache.hadoop.hive.metastore.HiveMetaStore.HMSHandler;
28 | import org.apache.hadoop.hive.metastore.api.GetTableResult;
29 | import org.apache.hadoop.hive.metastore.api.InsertEventRequestData;
30 | import org.apache.hadoop.hive.metastore.api.MetaException;
31 | import org.apache.hadoop.hive.metastore.api.NoSuchObjectException;
32 | import org.apache.hadoop.hive.metastore.api.Partition;
33 | import org.apache.hadoop.hive.metastore.api.Table;
34 | import org.apache.hadoop.hive.metastore.events.AddPartitionEvent;
35 | import org.apache.hadoop.hive.metastore.events.AlterPartitionEvent;
36 | import org.apache.hadoop.hive.metastore.events.AlterTableEvent;
37 | import org.apache.hadoop.hive.metastore.events.CreateTableEvent;
38 | import org.apache.hadoop.hive.metastore.events.DropPartitionEvent;
39 | import org.apache.hadoop.hive.metastore.events.DropTableEvent;
40 | import org.apache.hadoop.hive.metastore.events.InsertEvent;
41 | import org.junit.jupiter.api.BeforeEach;
42 | import org.junit.jupiter.api.Test;
43 |
44 | import com.expediagroup.apiary.extensions.events.metastore.event.ApiaryAddPartitionEvent;
45 | import com.expediagroup.apiary.extensions.events.metastore.event.ApiaryAlterPartitionEvent;
46 | import com.expediagroup.apiary.extensions.events.metastore.event.ApiaryAlterTableEvent;
47 | import com.expediagroup.apiary.extensions.events.metastore.event.ApiaryCreateTableEvent;
48 | import com.expediagroup.apiary.extensions.events.metastore.event.ApiaryDropPartitionEvent;
49 | import com.expediagroup.apiary.extensions.events.metastore.event.ApiaryDropTableEvent;
50 | import com.expediagroup.apiary.extensions.events.metastore.event.ApiaryInsertEvent;
51 | import com.expediagroup.apiary.extensions.events.metastore.event.ApiaryListenerEventFactory;
52 | import com.expediagroup.dataplatform.dronefly.app.service.factory.HMSHandlerFactory;
53 |
54 | public class HiveEventConverterServiceTest {
55 |
56 | private static final String APP_NAME = "drone-fly";
57 | private static final String DB_NAME = "test_db";
58 | private static final String TABLE_NAME = "test_table";
59 | private static final String TABLE_LOCATION = "s3://test_location/";
60 | private static final String OLD_TABLE_LOCATION = "s3://old_test_location";
61 | private static final List PARTITION_VALUES = Arrays.asList("p1", "p2");
62 | private static final String PARTITION_LOCATION = "s3://test_location/partition";
63 | private static final String OLD_PARTITION_LOCATION = "s3://old_partition_test_location";
64 |
65 | private final ApiaryListenerEventFactory apiaryListenerEventFactory = new ApiaryListenerEventFactory();
66 | private final Table hiveTable = HiveTableTestUtils.createPartitionedTable(DB_NAME, TABLE_NAME, TABLE_LOCATION);
67 | private final Partition partition = HiveTableTestUtils.newPartition(hiveTable, PARTITION_VALUES, PARTITION_LOCATION);
68 | private HMSHandler hmsHandler;
69 |
70 | private HiveEventConverterService hiveEventConverterService;
71 |
72 | @BeforeEach
73 | public void init() throws MetaException {
74 | hiveEventConverterService = new HiveEventConverterService(new HMSHandlerFactory(new HiveConf()));
75 | }
76 |
77 | @Test
78 | public void createTableEvent() throws MetaException, NoSuchObjectException {
79 | CreateTableEvent createTableEvent = createCreateTableEvent();
80 | ApiaryCreateTableEvent apiaryCreateTableEvent = apiaryListenerEventFactory.create(createTableEvent);
81 | CreateTableEvent result = (CreateTableEvent) hiveEventConverterService.toHiveEvent(apiaryCreateTableEvent);
82 |
83 | assertThat(result.getHandler().getName()).isEqualTo(APP_NAME);
84 | assertThat(result.getTable().getDbName()).isEqualTo(DB_NAME);
85 | assertThat(result.getTable().getTableName()).isEqualTo(TABLE_NAME);
86 | assertThat(result.getTable().getSd().getLocation()).isEqualTo(TABLE_LOCATION);
87 | assertThat(result.getTable().getParameters().get("EXTERNAL")).isEqualTo("TRUE");
88 | }
89 |
90 | @Test
91 | public void dropTableEvent() throws MetaException, NoSuchObjectException {
92 | DropTableEvent dropTableEvent = createDropTableEvent();
93 | ApiaryDropTableEvent apiaryDropTableEvent = apiaryListenerEventFactory.create(dropTableEvent);
94 | DropTableEvent result = (DropTableEvent) hiveEventConverterService.toHiveEvent(apiaryDropTableEvent);
95 |
96 | assertThat(result.getHandler().getName()).isEqualTo(APP_NAME);
97 | assertThat(result.getTable().getDbName()).isEqualTo(DB_NAME);
98 | assertThat(result.getTable().getTableName()).isEqualTo(TABLE_NAME);
99 | assertThat(result.getTable().getSd().getLocation()).isEqualTo(TABLE_LOCATION);
100 | }
101 |
102 | @Test
103 | public void alterTableEvent() throws MetaException, NoSuchObjectException {
104 | AlterTableEvent alterTableEvent = createAlterTableEvent();
105 | ApiaryAlterTableEvent apiaryAlterTableEvent = apiaryListenerEventFactory.create(alterTableEvent);
106 | AlterTableEvent result = (AlterTableEvent) hiveEventConverterService.toHiveEvent(apiaryAlterTableEvent);
107 |
108 | assertThat(result.getHandler().getName()).isEqualTo(APP_NAME);
109 | assertThat(result.getNewTable().getDbName()).isEqualTo(DB_NAME);
110 | assertThat(result.getNewTable().getTableName()).isEqualTo(TABLE_NAME);
111 | assertThat(result.getNewTable().getSd().getLocation()).isEqualTo(TABLE_LOCATION);
112 |
113 | assertThat(result.getOldTable().getDbName()).isEqualTo(DB_NAME);
114 | assertThat(result.getOldTable().getTableName()).isEqualTo(TABLE_NAME);
115 | assertThat(result.getOldTable().getSd().getLocation()).isEqualTo(OLD_TABLE_LOCATION);
116 | }
117 |
118 | @Test
119 | public void addPartitionEvent() throws MetaException, NoSuchObjectException {
120 | AddPartitionEvent addPartitionEvent = createAddPartitionEvent();
121 | ApiaryAddPartitionEvent apiaryAddPartitionEvent = apiaryListenerEventFactory.create(addPartitionEvent);
122 | AddPartitionEvent result = (AddPartitionEvent) hiveEventConverterService.toHiveEvent(apiaryAddPartitionEvent);
123 |
124 | assertThat(result.getHandler().getName()).isEqualTo(APP_NAME);
125 | assertThat(result.getTable().getDbName()).isEqualTo(DB_NAME);
126 | assertThat(result.getTable().getTableName()).isEqualTo(TABLE_NAME);
127 | assertThat(result.getTable().getSd().getLocation()).isEqualTo(TABLE_LOCATION);
128 |
129 | Partition partitionResult = result.getPartitionIterator().next();
130 | assertThat(partitionResult.getValues()).isEqualTo(PARTITION_VALUES);
131 | assertThat(partitionResult.getSd().getLocation()).isEqualTo(PARTITION_LOCATION);
132 | }
133 |
134 | @Test
135 | public void dropPartitionEvent() throws MetaException, NoSuchObjectException {
136 | DropPartitionEvent DropPartitionEvent = createDropPartitionEvent();
137 | ApiaryDropPartitionEvent apiaryDropPartitionEvent = apiaryListenerEventFactory.create(DropPartitionEvent);
138 | DropPartitionEvent result = (DropPartitionEvent) hiveEventConverterService.toHiveEvent(apiaryDropPartitionEvent);
139 |
140 | assertThat(result.getHandler().getName()).isEqualTo(APP_NAME);
141 | assertThat(result.getTable().getDbName()).isEqualTo(DB_NAME);
142 | assertThat(result.getTable().getTableName()).isEqualTo(TABLE_NAME);
143 | assertThat(result.getTable().getSd().getLocation()).isEqualTo(TABLE_LOCATION);
144 |
145 | Partition partitionResult = result.getPartitionIterator().next();
146 | assertThat(partitionResult.getValues()).isEqualTo(PARTITION_VALUES);
147 | assertThat(partitionResult.getSd().getLocation()).isEqualTo(PARTITION_LOCATION);
148 | }
149 |
150 | @Test
151 | public void alterPartitionEvent() throws MetaException, NoSuchObjectException {
152 | AlterPartitionEvent AlterPartitionEvent = createAlterPartitionEvent();
153 | ApiaryAlterPartitionEvent apiaryAlterPartitionEvent = apiaryListenerEventFactory.create(AlterPartitionEvent);
154 | AlterPartitionEvent result = (AlterPartitionEvent) hiveEventConverterService.toHiveEvent(apiaryAlterPartitionEvent);
155 |
156 | assertThat(result.getHandler().getName()).isEqualTo(APP_NAME);
157 | assertThat(result.getTable().getDbName()).isEqualTo(DB_NAME);
158 | assertThat(result.getTable().getTableName()).isEqualTo(TABLE_NAME);
159 | assertThat(result.getTable().getSd().getLocation()).isEqualTo(TABLE_LOCATION);
160 |
161 | Partition newPartitionResult = result.getNewPartition();
162 | assertThat(newPartitionResult.getValues()).isEqualTo(PARTITION_VALUES);
163 | assertThat(newPartitionResult.getSd().getLocation()).isEqualTo(PARTITION_LOCATION);
164 |
165 | Partition oldPartitionResult = result.getOldPartition();
166 | assertThat(oldPartitionResult.getValues()).isEqualTo(PARTITION_VALUES);
167 | assertThat(oldPartitionResult.getSd().getLocation()).isEqualTo(OLD_PARTITION_LOCATION);
168 | }
169 |
170 | @Test
171 | public void insertEvent() throws MetaException, NoSuchObjectException {
172 | // Mocking here is necessary because of handler.get_table_req(req).getTable() call in InsertEvent constructor.
173 | HMSHandlerFactory hmsHandlerFactory = mock(HMSHandlerFactory.class);
174 | HMSHandler mockHmsHandler = mock(HMSHandler.class);
175 | GetTableResult gtr = mock(GetTableResult.class);
176 | when(mockHmsHandler.getName()).thenReturn(APP_NAME);
177 | when(mockHmsHandler.get_table_req(any())).thenReturn(gtr);
178 | when(gtr.getTable()).thenReturn(hiveTable);
179 | when(hmsHandlerFactory.newInstance()).thenReturn(mockHmsHandler);
180 |
181 | hiveEventConverterService = new HiveEventConverterService(hmsHandlerFactory);
182 |
183 | InsertEvent InsertEvent = createInsertEvent(hmsHandlerFactory);
184 | ApiaryInsertEvent apiaryInsertEvent = apiaryListenerEventFactory.create(InsertEvent);
185 | InsertEvent result = (InsertEvent) hiveEventConverterService.toHiveEvent(apiaryInsertEvent);
186 |
187 | assertThat(result.getHandler().getName()).isEqualTo(APP_NAME);
188 | assertThat(result.getDb()).isEqualTo(DB_NAME);
189 | assertThat(result.getTable()).isEqualTo(TABLE_NAME);
190 | assertThat(result.getFiles()).isEqualTo(Arrays.asList("file:/a/b.txt", "file:/a/c.txt"));
191 | assertThat(result.getFileChecksums()).isEqualTo(Arrays.asList("123", "456"));
192 | }
193 |
194 | @Test
195 | public void nullEvent() throws MetaException, NoSuchObjectException {
196 | CreateTableEvent result = (CreateTableEvent) hiveEventConverterService.toHiveEvent(null);
197 | assertThat(result).isNull();
198 | }
199 |
200 | private InsertEvent createInsertEvent(HMSHandlerFactory hmsHandlerFactory)
201 | throws MetaException, NoSuchObjectException {
202 | List files = Arrays.asList("file:/a/b.txt", "file:/a/c.txt");
203 | List fileChecksums = Arrays.asList("123", "456");
204 | InsertEventRequestData insertRequestData = new InsertEventRequestData(files);
205 | insertRequestData.setFilesAddedChecksum(fileChecksums);
206 | InsertEvent event = new InsertEvent(DB_NAME, TABLE_NAME, PARTITION_VALUES, insertRequestData, true,
207 | hmsHandlerFactory.newInstance());
208 | return event;
209 | }
210 |
211 | private AddPartitionEvent createAddPartitionEvent() throws MetaException {
212 | AddPartitionEvent event = new AddPartitionEvent(hiveTable, partition, true, hmsHandler);
213 | return event;
214 | }
215 |
216 | private AlterPartitionEvent createAlterPartitionEvent() throws MetaException {
217 | Partition oldPartition = HiveTableTestUtils.newPartition(hiveTable, PARTITION_VALUES, OLD_PARTITION_LOCATION);
218 | AlterPartitionEvent event = new AlterPartitionEvent(oldPartition, partition, hiveTable, true, hmsHandler);
219 | return event;
220 | }
221 |
222 | private DropPartitionEvent createDropPartitionEvent() throws MetaException {
223 | DropPartitionEvent event = new DropPartitionEvent(hiveTable, partition, true, false, hmsHandler);
224 | return event;
225 | }
226 |
227 | private CreateTableEvent createCreateTableEvent() throws MetaException {
228 | CreateTableEvent event = new CreateTableEvent(hiveTable, true, hmsHandler);
229 | return event;
230 | }
231 |
232 | private AlterTableEvent createAlterTableEvent() throws MetaException {
233 | Table oldTable = HiveTableTestUtils.createPartitionedTable(DB_NAME, TABLE_NAME, OLD_TABLE_LOCATION);
234 | AlterTableEvent event = new AlterTableEvent(oldTable, hiveTable, true, hmsHandler);
235 | return event;
236 | }
237 |
238 | private DropTableEvent createDropTableEvent() throws MetaException {
239 | DropTableEvent event = new DropTableEvent(hiveTable, true, false, hmsHandler);
240 | return event;
241 | }
242 |
243 | }
244 |
--------------------------------------------------------------------------------