├── .asf.yaml ├── .gitignore ├── .travis.yml ├── CHANGELOG.md ├── CONTRIBUTING.md ├── Dockerfile ├── LICENSE.txt ├── NOTICE.txt ├── README.md ├── action ├── kafkaFeed.js ├── kafkaFeedWeb.js ├── kafkaFeedWeb_package.json ├── kafkaFeed_package.json ├── kafkaProduce.py ├── lib │ ├── Database.js │ └── common.js ├── messageHubFeed.js ├── messageHubFeedWeb.js ├── messageHubFeedWeb_package.json ├── messageHubFeed_package.json └── messageHubProduce.py ├── build.gradle ├── docs ├── arch │ ├── README.md │ └── images │ │ ├── Arch-Provider-MHV1-Create.png │ │ ├── Arch-Provider-MHV1-Delete.png │ │ ├── Arch-Provider-MHV1-Read.png │ │ ├── Arch-Provider-MHV1-Update.png │ │ └── Arch-Provider.xml └── dev │ └── README.md ├── gradle ├── docker.gradle └── wrapper │ ├── gradle-wrapper.jar │ └── gradle-wrapper.properties ├── gradlew ├── gradlew.bat ├── installCatalog.sh ├── installKafka.sh ├── provider ├── app.py ├── authHandler.py ├── consumer.py ├── consumercollection.py ├── database.py ├── datetimeutils.py ├── health.py ├── service.py └── thedoctor.py ├── settings.gradle ├── tests ├── build.gradle ├── dat │ ├── createTriggerActions.js │ ├── createTriggerActionsFromEncodedMessage.js │ ├── createTriggerActionsFromKey.js │ ├── missingAdminURL.json │ ├── missingBrokers.json │ ├── missingPackageEndpoint.json │ ├── missingPassword.json │ ├── missingTopic.json │ ├── missingUser.json │ └── multipleValueTypes.json └── src │ └── test │ └── scala │ └── system │ ├── health │ └── BasicHealthTest.scala │ ├── packages │ ├── KafkaFeedTests.scala │ ├── KafkaFeedWebTests.scala │ ├── KafkaProduceTests.scala │ ├── MessageHubFeedTests.scala │ ├── MessageHubFeedWebTests.scala │ ├── MessageHubMultiWorkersTest.scala │ ├── MessageHubProduceTests.scala │ ├── MessagingServiceTests.scala │ └── actionHelper.scala │ ├── stress │ └── StressTest.scala │ └── utils │ └── KafkaUtils.scala └── tools ├── .gitignore ├── travis ├── build.sh ├── deploy.sh └── setup.sh └── verifyDBMigration ├── index.js └── package.json /.asf.yaml: -------------------------------------------------------------------------------- 1 | # 2 | # Licensed to the Apache Software Foundation (ASF) under one or more 3 | # contributor license agreements. See the NOTICE file distributed with 4 | # this work for additional information regarding copyright ownership. 5 | # The ASF licenses this file to You under the Apache License, Version 2.0 6 | # (the "License"); you may not use this file except in compliance with 7 | # the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | # 17 | 18 | github: 19 | description: "Apache OpenWhisk package for communicating with Kafka or Message Hub" 20 | homepage: https://openwhisk.apache.org/ 21 | labels: 22 | - openwhisk 23 | - apache 24 | - serverless 25 | - faas 26 | - functions-as-a-service 27 | - cloud 28 | - serverless-architectures 29 | - serverless-functions 30 | protected_branches: 31 | master: 32 | required_status_checks: 33 | strict: false 34 | required_pull_request_reviews: 35 | required_approving_review_count: 1 36 | required_signatures: false 37 | enabled_merge_buttons: 38 | merge: false 39 | squash: true 40 | rebase: true 41 | features: 42 | issues: true 43 | 44 | notifications: 45 | commits: commits@openwhisk.apache.org 46 | issues_status: issues@openwhisk.apache.org 47 | issues_comment: issues@openwhisk.apache.org 48 | pullrequests_status: issues@openwhisk.apache.org 49 | pullrequests_comment: issues@openwhisk.apache.org 50 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | .gradle 2 | launchConfigurations/ 3 | *.pyc 4 | action/*.zip 5 | action/package.json 6 | tests/build 7 | action/node_modules/ 8 | action/package-lock.json 9 | package-lock.json 10 | 11 | # Eclipse 12 | bin/ 13 | **/.project 14 | .settings/ 15 | .classpath 16 | .cache-main 17 | .cache-tests 18 | 19 | .idea/ 20 | out/ 21 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | # 2 | # Licensed to the Apache Software Foundation (ASF) under one or more 3 | # contributor license agreements. See the NOTICE file distributed with 4 | # this work for additional information regarding copyright ownership. 5 | # The ASF licenses this file to You under the Apache License, Version 2.0 6 | # (the "License"); you may not use this file except in compliance with 7 | # the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | # 17 | 18 | sudo: required 19 | dist: xenial 20 | jdk: openjdk8 21 | language: java 22 | services: 23 | - docker 24 | 25 | notifications: 26 | email: false 27 | webhooks: 28 | urls: 29 | # travis2slack webhook to enable DMs on openwhisk-team.slack.com to PR authors with TravisCI results 30 | secure: "tQdW9TAFev+emlmTwNlE1eA4jcaYvsuVQHBGfcX0GZQZY38tTZd6nfF6UJAJbe604915fAHC2KqFXXUPj1Jo0eMcv71+0zIdc7Qp1bj53D6WZpqBJyGnsdRaCP56PYBcXGXz6PURzjWaq43gZKhUafcY3KIHJdygsV5wd4ynVSJR92rOSR29DPedxnzO41HlbVsZ/oAFasaCi3vJCrpapeul7SHJ+Sxbq+syNAMV+iAdzKBMI8KrMCQhTcRwKpVVUYZArX8gXGx7nuveztv4DOJ32MZSaDYeiLCuvxCWJkj4TfLgdpWok9PaUjrBg7HzNsIGAw4Kn0D7Vdy3UPU9gQshYF/4GC2/16BcCF9LDHAOCAkd2gpd/JuQJ11lhNdAKeKtmGw1NoWhi7BRmHUmbKOJcZoWTc+ng82xusIWqxSn5elDryDq16TgAZ3yt8W7nQ4WbuyJtGtKdxbOSYQhpJNZJWwKN1J8LcVIXwSvBEvK0gvxuj9oh7gyKlidU1/Y9wnJYFFDTdkjkTCFk9oTsPzSgolPa3itn05VkUTjMmd0XSjJ4xYFxoSXMOxS5PtPznyDC8O7ziyiV+fv6xy8GvwCGjCrrkhsCACg+kNmkreI9sd4XxWUR0l8scfO6Vcoqj5G1O74T9+ywCSuxv0JnL/A4u7B7j9/06JWpwekNjM=" 31 | 32 | env: 33 | global: 34 | - secure: "hTP+hA+drp7PDzQrHGqzEI0CJvy8b86MBI6bcKMPlSnGZ1/WzsiWBy67Fza8y06MXtFQjo0SIPruBL0l+HfMwTqz/sziVAyap19pzJr1KJr7sRLjXJVKsrll+gCRex9U0E/5Tg4UGzend8lnBFpEX5r4aqRmYPX2iLoV+DilyG2OEpjU88HUtbHeeSv63cZslfNAz4/5X4mMEmf1k+A3n0JEFbQBhYQzq8yLyQ04Tb98byOFpKclVucylEgK7EvKNvvDuqdUNtQgJX/Q0hcuBYLaHYiCrZpqGc69LSKNGnpL//y0m2oWbKV7eZcn9ziIqaFP2JX4+ZO8OKKE/Za1IQTDQPYxV2IG2ZccTYEgphwepNl9NQmF2SdXN8LCwcjSmxE5QufhmMN5jYmXTm0LjJyjfABe+Yvxl92Gu61hMMUd8bCE4jPj4VOLByD/U0j2qbX8H8nE+ysOHKNaeEsn7pmY0pVxOV53eshVKMq1hJHZelYlBAA7aWqj//h/BmqgIJpz8Zaa75fB0iv1MK4iU8zDNMcag1iViClQqg8QJxtkPRoWiEqC42YPVBYAeFRxAGkoKE4AjXCJKYvBelvGgf0quEgEt8hgjjizDcmdFYUz/MezE/Y+66R00m/pOOq1ibATWCeFhakL5Xa7U9YUQkI8hQrNpQleIxZOGWFDps0=" 35 | - secure: "L71uDU+KTMSCG88rOayessvTtj5XWPj8i3uY+zYa45rzntf1d10NRot/XpIRYu21K30UeND+Y+3hr25eIh894lilUTJJxrDqyjpgmUO5AMsnfZ8PsZY6v3YgW9FYuBLDUEHi4SV9crvp+0C/XNCvnRJkbESkyCeeGoUvIY1L8ckqMjX0+C4IPBuu/xSSXoiXwF0aV571wUTC9rvfJxDMQiCzfOx/TeUxPsoSC7FPcg7EWUJrzhvJcmZVbagPiw82lhRp5E6mN7debI09fEKTPVdIXNyoRMPEee+ksrjKxHriEBGQFakV4ROvQCKcK2EJ9Gg1MXb9m4tKruF2fgvQKRYwvgzOjyyESr+sfd5r7YqneApxKW+ZdhdzO20ZanAcUJ5OcFyNtEvdn8+rmXKEo9XQh5VEHWnIAYNmTy1V5mqOygt40LKwB1qrBvCf/TRwEu1rQhuFVwxnnWoxJoH/q2Rk+ycvSRyfODzz3GHHAZyPh8UpP1oAZyKqMowhb1RqZXympYFXkflIMswJJ9LQkA+gs1w9PjafkjbF2RSqDLNgwmP9NkGZdYGdnNOF8veeRAJ/FqgT7fa38sORrtwhdwqU9AVCk1O/FbvT3W16rhn2ewYKmhWAVxGqXU6OXnKvhtcO/RBan9JMED/boWvgpHCoefQpgmGPhOzNk9Qe0kM=" 36 | 37 | before_install: 38 | - ./tools/travis/setup.sh 39 | 40 | install: true 41 | 42 | script: 43 | - ./tools/travis/build.sh 44 | 45 | deploy: 46 | # deploy nightly 47 | - provider: script 48 | script: ./tools/travis/deploy.sh openwhisk kafkaprovider nightly 49 | on: 50 | branch: master 51 | # deploy tag 52 | - provider: script 53 | script: ./tools/travis/deploy.sh openwhisk kafkaprovider $TRAVIS_TAG 54 | on: 55 | tags: true 56 | all_branches: true 57 | -------------------------------------------------------------------------------- /CHANGELOG.md: -------------------------------------------------------------------------------- 1 | 19 | 20 | # Changelog 21 | 22 | ## 2.1.0 23 | + Handle URL params gently (#370) 24 | + Change nodejs:6 to nodejs:default. (#369) 25 | + Use Kafka client 1.3.0 (#363) 26 | + Handle cases when authKey does not exist in DB documents (#366) 27 | + Retry failed database changes (#365) 28 | + Do not skip last sequence if an exception occurs (#364) 29 | + Update existing trigger feeds on create instead of failing (#360) 30 | + Allow feed to be deleted if trigger does not exist (#359) 31 | + Change some log levels and allow log level to be set on startup (#357) 32 | + Do not update last canary everytime a database connection attempt occurs (#356) 33 | + Disable triggers for invalid auth when using custom auth handler (#354) 34 | + Ensure proper encoding for improper encoded keys (#353) 35 | + Use API key for authentication when username is token (#350) 36 | + Catch Doctor exceptions and do not persist consumer database connections (#343) 37 | + Disable spare connections (#342) 38 | + Set running state after brokers are connected (#340) 39 | + Reset consumer restart counter after 24 hours (#337) 40 | 41 | ## 2.0.1 42 | + Prevent parsing floats as Infinity and -Infinity (#332) 43 | + Upgrade base Python version (#330) 44 | 45 | ## 2.0.0-incubating 46 | 47 | * First Apache Release 48 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | 19 | [![License](https://img.shields.io/badge/license-Apache--2.0-blue.svg)](http://www.apache.org/licenses/LICENSE-2.0) 20 | 21 | # Contributing to Apache OpenWhisk 22 | 23 | Anyone can contribute to the OpenWhisk project, and we welcome your contributions. 24 | 25 | There are multiple ways to contribute: report bugs, improve the docs, and 26 | contribute code, but you must follow these prerequisites and guidelines: 27 | 28 | - [Contributor License Agreement](#contributor-license-agreement) 29 | - [Raising issues](#raising-issues) 30 | - [Coding Standards](#coding-standards) 31 | 32 | ### Contributor License Agreement 33 | 34 | All contributors must sign and submit an Apache CLA (Contributor License Agreement). 35 | 36 | Instructions on how to do this can be found here: 37 | [http://www.apache.org/licenses/#clas](http://www.apache.org/licenses/#clas) 38 | 39 | Once submitted, you will receive a confirmation email from the Apache Software Foundation (ASF) and be added to 40 | the following list: http://people.apache.org/unlistedclas.html. 41 | 42 | Project committers will use this list to verify pull requests (PRs) come from contributors that have signed a CLA. 43 | 44 | We look forward to your contributions! 45 | 46 | ## Raising issues 47 | 48 | Please raise any bug reports on the respective project repository's GitHub issue tracker. Be sure to search the 49 | list to see if your issue has already been raised. 50 | 51 | A good bug report is one that make it easy for us to understand what you were trying to do and what went wrong. 52 | Provide as much context as possible, so we can try to recreate the issue. 53 | 54 | ### Discussion 55 | 56 | Please use the project's developer email list to engage our community: 57 | [dev@openwhisk.apache.org](dev@openwhisk.apache.org) 58 | 59 | In addition, we provide a "dev" Slack team channel for conversations at: 60 | https://openwhisk-team.slack.com/messages/dev/ 61 | 62 | ### Coding standards 63 | 64 | Please ensure you follow the coding standards used throughout the existing 65 | code base. Some basic rules include: 66 | 67 | - all files must have the Apache license in the header. 68 | - all PRs must have passing builds for all operating systems. 69 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | # 2 | # Licensed to the Apache Software Foundation (ASF) under one or more 3 | # contributor license agreements. See the NOTICE file distributed with 4 | # this work for additional information regarding copyright ownership. 5 | # The ASF licenses this file to You under the Apache License, Version 2.0 6 | # (the "License"); you may not use this file except in compliance with 7 | # the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | # 17 | 18 | FROM python:2.7.16 19 | 20 | RUN apt-get update && apt-get upgrade -y 21 | 22 | # install librdkafka 23 | ENV LIBRDKAFKA_VERSION 1.3.0 24 | RUN git clone --depth 1 --branch v${LIBRDKAFKA_VERSION} https://github.com/edenhill/librdkafka.git librdkafka \ 25 | && cd librdkafka \ 26 | && ./configure \ 27 | && make \ 28 | && make install \ 29 | && make clean \ 30 | && ./configure --clean 31 | 32 | ENV CPLUS_INCLUDE_PATH /usr/local/include 33 | ENV LIBRARY_PATH /usr/local/lib 34 | ENV LD_LIBRARY_PATH /usr/local/lib 35 | 36 | RUN pip install gevent==1.1.2 flask==1.1.4 confluent-kafka==${LIBRDKAFKA_VERSION} \ 37 | requests==2.10.0 cloudant==2.5.0 psutil==5.0.0 38 | 39 | # while I expect these will be overridden during deployment, we might as well 40 | # set reasonable defaults 41 | ENV PORT 5000 42 | ENV LOCAL_DEV False 43 | ENV GENERIC_KAFKA True 44 | 45 | RUN mkdir -p /KafkaFeedProvider 46 | ADD provider/*.py /KafkaFeedProvider/ 47 | 48 | # Automatically curl the health endpoint every 5 minutes. 49 | # If the endpoint doesn't respond within 30 seconds, kill the main python process. 50 | # As of docker 1.12, a failed healthcheck never results in the container being 51 | # restarted. Killing the main process is a way to make the restart policy kicks in. 52 | HEALTHCHECK --interval=5m --timeout=1m CMD curl -m 30 --fail http://localhost:5000/health || killall python 53 | 54 | CMD ["/bin/bash", "-c", "cd KafkaFeedProvider && python -u app.py"] 55 | 56 | -------------------------------------------------------------------------------- /NOTICE.txt: -------------------------------------------------------------------------------- 1 | Apache OpenWhisk Package Kafka 2 | Copyright 2016-2021 The Apache Software Foundation 3 | 4 | This product includes software developed at 5 | The Apache Software Foundation (http://www.apache.org/). 6 | -------------------------------------------------------------------------------- /action/kafkaFeed.js: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | const common = require('./lib/common'); 19 | 20 | /** 21 | * Feed to listen to Kafka messages 22 | * @param {string} brokers - array of Kafka brokers 23 | * @param {string} topic - topic to subscribe to 24 | * @param {bool} isJSONData - attempt to parse messages as JSON 25 | * @param {bool} isBinaryKey - encode key as Base64 26 | * @param {bool} isBinaryValue - encode message as Base64 27 | * @param {string} endpoint - address to OpenWhisk deployment (expected to be bound at deployment) 28 | */ 29 | function main(params) { 30 | const endpoint = params.endpoint; 31 | const webActionName = 'kafkaFeedWeb'; 32 | 33 | var massagedParams = common.massageParamsForWeb(params); 34 | massagedParams.triggerName = common.getTriggerFQN(params.triggerName); 35 | 36 | if (params.lifecycleEvent === 'CREATE') { 37 | return common.createTrigger(endpoint, massagedParams, webActionName); 38 | } else if (params.lifecycleEvent === 'READ') { 39 | return common.getTrigger(endpoint, massagedParams, webActionName); 40 | } else if (params.lifecycleEvent === 'UPDATE') { 41 | return common.updateTrigger(endpoint, massagedParams, webActionName); 42 | } else if (params.lifecycleEvent === 'DELETE') { 43 | return common.deleteTrigger(endpoint, massagedParams, webActionName); 44 | } 45 | 46 | return { 47 | error: 'unsupported lifecycleEvent' 48 | }; 49 | } 50 | 51 | exports.main = main; 52 | -------------------------------------------------------------------------------- /action/kafkaFeedWeb.js: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | const common = require('./lib/common'); 19 | const Database = require('./lib/Database'); 20 | var moment = require('moment'); 21 | 22 | /** 23 | * Feed to listen to Kafka messages 24 | * @param {string} brokers - array of Kafka brokers 25 | * @param {string} topic - topic to subscribe to 26 | * @param {bool} isJSONData - attempt to parse messages as JSON 27 | * @param {bool} isBinaryKey - encode key as Base64 28 | * @param {bool} isBinaryValue - encode message as Base64 29 | * @param {string} endpoint - address to OpenWhisk deployment (expected to be bound at deployment) 30 | * @param {string} DB_URL - URL for the DB, must include authentication (expected to be bound at deployment) 31 | * @param {string} DB_NAME - DB name (expected to be bound at deployment) 32 | */ 33 | function main(params) { 34 | var promise = new Promise((resolve, reject) => { 35 | // hold off initializing this until definitely needed 36 | var db; 37 | 38 | if (params.__ow_method === "post") { 39 | var validatedParams; 40 | return validateParameters(params) 41 | .then(cleanParams => { 42 | validatedParams = cleanParams; 43 | 44 | console.log(`VALIDATED: ${JSON.stringify(validatedParams, null, 2)}`); 45 | db = new Database(params.DB_URL, params.DB_NAME); 46 | 47 | // do these in parallel! 48 | return Promise.all([ 49 | db.ensureTriggerIsUnique(validatedParams.triggerName), 50 | verifyTriggerAuth(validatedParams.triggerURL, params.authKey, true) 51 | ]); 52 | }) 53 | .then(() => { 54 | var workers = (params.workers || []); 55 | return db.getTriggerAssignment(workers) 56 | }) 57 | .then((worker) => { 58 | validatedParams['worker'] = worker; 59 | return db.recordTrigger(validatedParams); 60 | }) 61 | .then(() => { 62 | console.log('successfully wrote the trigger'); 63 | resolve(common.webResponse(200, validatedParams.uuid)); 64 | }) 65 | .catch(error => { 66 | console.log(`Failed to write the trigger ${error}`); 67 | 68 | // defaults to potentially be overridden 69 | var statusCode = 500; 70 | var body = error.toString(); 71 | 72 | if(error.validationError) { 73 | statusCode = 400; 74 | body = error.validationError; 75 | } else if(error.authError) { 76 | statusCode = 401; 77 | body = error.authError; 78 | } 79 | 80 | resolve(common.webResponse(statusCode, body)); 81 | }); 82 | } else if (params.__ow_method === "get") { 83 | const triggerURL = common.getTriggerURL(params.endpoint, params.triggerName); 84 | 85 | return verifyTriggerAuth(triggerURL, params.authKey, true) 86 | .then(() => { 87 | db = new Database(params.DB_URL, params.DB_NAME); 88 | return db.getTrigger(params.triggerName); 89 | }) 90 | .then((triggerDoc) => { 91 | var body = { 92 | config: { 93 | triggerName: triggerDoc.triggerName, 94 | topic: triggerDoc.topic, 95 | isJSONData: triggerDoc.isJSONData, 96 | isBinaryValue: triggerDoc.isBinaryValue, 97 | isBinaryKey: triggerDoc.isBinaryKey, 98 | brokers: triggerDoc.brokers 99 | }, 100 | status: { 101 | active: triggerDoc.status.active, 102 | dateChanged: moment(triggerDoc.status.dateChanged).utc().valueOf(), 103 | dateChangedISO: moment(triggerDoc.status.dateChanged).utc().format(), 104 | reason: triggerDoc.status.reason 105 | } 106 | } 107 | resolve(common.webResponse(200, body, 'application/json')); 108 | }) 109 | .catch(error => { 110 | resolve(common.webResponse(500, error.toString())); 111 | }); 112 | } else if (params.__ow_method === "put") { 113 | const triggerURL = common.getTriggerURL(params.endpoint, params.triggerName); 114 | 115 | return verifyTriggerAuth(triggerURL, params.authKey, true) 116 | .then(() => { 117 | db = new Database(params.DB_URL, params.DB_NAME); 118 | return db.getTrigger(params.triggerName); 119 | }) 120 | .then(triggerDoc => { 121 | if (!triggerDoc.status.active) { 122 | return resolve(common.webResponse(400, `${params.triggerName} cannot be updated because it is disabled`)); 123 | } 124 | 125 | return common.performUpdateParameterValidation(params, triggerDoc) 126 | .then(updatedParams => { 127 | return db.disableTrigger(triggerDoc) 128 | .then(() => db.getTrigger(params.triggerName)) 129 | .then(doc => db.updateTrigger(doc, updatedParams)); 130 | }); 131 | }) 132 | .then(() => { 133 | console.log('successfully updated the trigger'); 134 | resolve(common.webResponse(200, 'updated trigger')); 135 | }) 136 | .catch(error => { 137 | console.log(`Failed to update trigger ${error}`); 138 | var statusCode = 500; 139 | var body = error.toString(); 140 | 141 | if (error.validationError) { 142 | statusCode = 400; 143 | body = error.validationError; 144 | } 145 | resolve(common.webResponse(statusCode, body)); 146 | }); 147 | } else if (params.__ow_method === "delete") { 148 | const triggerURL = common.getTriggerURL(params.endpoint, params.triggerName); 149 | 150 | return verifyTriggerAuth(triggerURL, params.authKey, false) 151 | .then(() => { 152 | db = new Database(params.DB_URL, params.DB_NAME); 153 | return db.deleteTrigger(params.triggerName); 154 | }) 155 | .then(() => { 156 | console.log('successfully deleted the trigger'); 157 | resolve(common.webResponse(200, 'deleted trigger')); 158 | }) 159 | .catch(error => { 160 | console.log(`Failed to remove trigger ${error}`); 161 | resolve(common.webResponse(500, error.toString())); 162 | }); 163 | } else { 164 | resolve(common.webResponse(400, 'unsupported lifecycleEvent')); 165 | } 166 | }); 167 | 168 | return promise; 169 | } 170 | 171 | function validateParameters(rawParams) { 172 | var promise = new Promise((resolve, reject) => { 173 | var validatedParams; 174 | 175 | var commonValidationResult = common.performCommonParameterValidation(rawParams); 176 | if(commonValidationResult.validationError) { 177 | reject(commonValidationResult); 178 | return; 179 | } else { 180 | validatedParams = commonValidationResult.validatedParams; 181 | } 182 | 183 | // brokers 184 | if (rawParams.brokers) { 185 | validatedParams.brokers = common.validateBrokerParam(rawParams.brokers); 186 | if (!validatedParams.brokers) { 187 | reject( { validationError: "You must supply a 'brokers' parameter as an array of Message Hub brokers." }); 188 | return; 189 | } 190 | } else { 191 | reject( { validationError: "You must supply a 'brokers' parameter." }); 192 | return; 193 | } 194 | 195 | validatedParams.isMessageHub = false; 196 | 197 | resolve(validatedParams); 198 | }); 199 | 200 | return promise; 201 | } 202 | 203 | function verifyTriggerAuth(triggerURL, apiKey, rejectNotFound) { 204 | var auth = apiKey.split(':'); 205 | return common.verifyTriggerAuth(triggerURL, { user: auth[0], pass: auth[1] }, rejectNotFound); 206 | } 207 | 208 | exports.main = main; 209 | -------------------------------------------------------------------------------- /action/kafkaFeedWeb_package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "kafkaFeedWeb", 3 | "version": "1.0.0", 4 | "main": "kafkaFeedWeb.js", 5 | "dependencies": { 6 | "moment": "^2.26.0", 7 | "nano": "^8.2.2", 8 | "request-promise": "^4.2.5" 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /action/kafkaFeed_package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "kafkaFeed", 3 | "version": "1.0.0", 4 | "main": "kafkaFeed.js", 5 | "dependencies": { 6 | "moment": "^2.26.0", 7 | "nano": "^8.2.2", 8 | "request-promise": "^4.2.5" 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /action/kafkaProduce.py: -------------------------------------------------------------------------------- 1 | """Kafka message producer. 2 | 3 | /* 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with 6 | * this work for additional information regarding copyright ownership. 7 | * The ASF licenses this file to You under the Apache License, Version 2.0 8 | * (the "License"); you may not use this file except in compliance with 9 | * the License. You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, 15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | * See the License for the specific language governing permissions and 17 | * limitations under the License. 18 | */ 19 | """ 20 | 21 | import base64 22 | import logging 23 | import math 24 | import os 25 | import sys 26 | import time 27 | import traceback 28 | 29 | from kafka import KafkaProducer 30 | from kafka.errors import NoBrokersAvailable, KafkaTimeoutError, AuthenticationFailedError 31 | from kafka.version import __version__ 32 | from random import shuffle 33 | 34 | 35 | logging.basicConfig(stream=sys.stdout, level=logging.INFO, 36 | format='%(levelname)-8s %(asctime)s %(message)s', 37 | datefmt='[%H:%M:%S]') 38 | 39 | max_cached_producers = 10 40 | 41 | def main(params): 42 | producer = None 43 | logging.info("Using kafka-python %s", str(__version__)) 44 | 45 | logging.info("Validating parameters") 46 | validationResult = validateParams(params) 47 | if validationResult[0] != True: 48 | return {'error': validationResult[1]} 49 | else: 50 | validatedParams = validationResult[1] 51 | 52 | attempt = 0 53 | max_attempts = 3 54 | 55 | result = {"success": True} 56 | 57 | while attempt < max_attempts: 58 | attempt += 1 59 | logging.info("Starting attempt {}".format(attempt)) 60 | 61 | try: 62 | logging.info("Getting producer") 63 | 64 | # set a client timeout that allows for 3 connection retries while still 65 | # reserving 10s for the actual send 66 | producer_timeout_ms = math.floor(getRemainingTime(reservedTime=10) / max_attempts * 1000) 67 | producer = getProducer(validatedParams, producer_timeout_ms) 68 | 69 | topic = validatedParams['topic'] 70 | logging.info("Finding topic {}".format(topic)) 71 | partition_info = producer.partitions_for(topic) 72 | logging.info("Found topic {} with partition(s) {}".format(topic, partition_info)) 73 | 74 | break 75 | except Exception as e: 76 | if attempt == max_attempts: 77 | producer = None 78 | logging.warning(e) 79 | traceback.print_exc(limit=5) 80 | result = getResultForException(e) 81 | 82 | # we successfully connected and found the topic metadata... let's send! 83 | if producer is not None: 84 | try: 85 | logging.info("Producing message") 86 | 87 | # only use the key parameter if it is present 88 | value = validatedParams['value'] 89 | if 'key' in validatedParams: 90 | messageKey = validatedParams['key'] 91 | future = producer.send( 92 | topic, bytes(value, 'utf-8'), key=bytes(messageKey, 'utf-8')) 93 | else: 94 | future = producer.send(topic, bytes(value, 'utf-8')) 95 | 96 | # future should wait all of the remaining time 97 | future_time_seconds = math.floor(getRemainingTime()) 98 | sent = future.get(timeout=future_time_seconds) 99 | msg = "Successfully sent message to {}:{} at offset {}".format( 100 | sent.topic, sent.partition, sent.offset) 101 | logging.info(msg) 102 | result = {"success": True, "message": msg} 103 | except Exception as e: 104 | logging.warning(e) 105 | traceback.print_exc(limit=5) 106 | result = getResultForException(e) 107 | 108 | return result 109 | 110 | def getResultForException(e): 111 | if isinstance(e, KafkaTimeoutError): 112 | return {'error': 'Timed out communicating with Message Hub'} 113 | elif isinstance(e, AuthenticationFailedError): 114 | return {'error': 'Authentication failed'} 115 | elif isinstance(e, NoBrokersAvailable): 116 | return {'error': 'No brokers available. Check that your supplied brokers are correct and available.'} 117 | else: 118 | return {'error': '{}'.format(e)} 119 | 120 | 121 | def validateParams(params): 122 | validatedParams = params.copy() 123 | requiredParams = ['brokers', 'topic', 'value'] 124 | missingParams = [] 125 | 126 | for requiredParam in requiredParams: 127 | if requiredParam not in params: 128 | missingParams.append(requiredParam) 129 | 130 | if len(missingParams) > 0: 131 | return (False, "You must supply all of the following parameters: {}".format(', '.join(missingParams))) 132 | 133 | if isinstance(params['brokers'], str): 134 | # turn it into a List 135 | validatedParams['brokers'] = params['brokers'].split(',') 136 | 137 | shuffle(validatedParams['brokers']) 138 | 139 | if 'base64DecodeValue' in params and params['base64DecodeValue'] == True: 140 | try: 141 | validatedParams['value'] = base64.b64decode(params['value']).decode('utf-8') 142 | except: 143 | return (False, "value parameter is not Base64 encoded") 144 | 145 | if len(validatedParams['value']) == 0: 146 | return (False, "value parameter is not Base64 encoded") 147 | 148 | if 'base64DecodeKey' in params and params['base64DecodeKey'] == True: 149 | try: 150 | validatedParams['key'] = base64.b64decode(params['key']).decode('utf-8') 151 | except: 152 | return (False, "key parameter is not Base64 encoded") 153 | 154 | if len(validatedParams['key']) == 0: 155 | return (False, "key parameter is not Base64 encoded") 156 | 157 | return (True, validatedParams) 158 | 159 | def getProducer(validatedParams, timeout_ms): 160 | connectionHash = getConnectionHash(validatedParams) 161 | 162 | if globals().get("cached_producers") is None: 163 | logging.info("dictionary was None") 164 | globals()["cached_producers"] = dict() 165 | 166 | # remove arbitrary connection to make room for new one 167 | if len(globals()["cached_producers"]) == max_cached_producers: 168 | poppedProducer = globals()["cached_producers"].popitem()[1] 169 | poppedProducer.close(timeout=1) 170 | logging.info("Removed cached producer") 171 | 172 | if connectionHash not in globals()["cached_producers"]: 173 | logging.info("cache miss") 174 | # create a new connection 175 | 176 | producer = KafkaProducer( 177 | api_version_auto_timeout_ms=15000, 178 | batch_size=0, 179 | bootstrap_servers=validatedParams['brokers'], 180 | max_block_ms=timeout_ms, 181 | request_timeout_ms=timeout_ms, 182 | ) 183 | 184 | logging.info("Created producer") 185 | 186 | # store the producer globally for subsequent invocations 187 | globals()["cached_producers"][connectionHash] = producer 188 | 189 | # return it 190 | return producer 191 | else: 192 | logging.info("Reusing existing producer") 193 | return globals()["cached_producers"][connectionHash] 194 | 195 | 196 | def getConnectionHash(params): 197 | # always use the sorted brokers to combat the effects of shuffle() 198 | brokers = params['brokers'] 199 | brokers.sort() 200 | brokersString = ",".join(brokers) 201 | 202 | return brokersString 203 | 204 | # return the remaining time (in seconds) until the action will expire, 205 | # optionally reserving some time (also in seconds). 206 | def getRemainingTime(reservedTime=0): 207 | deadlineSeconds = int(os.getenv('__OW_DEADLINE', 60000)) / 1000 208 | remaining = deadlineSeconds - time.time() - reservedTime 209 | 210 | # ensure value is at least zero 211 | # yes, this is a little paranoid 212 | return max(remaining, 0) 213 | -------------------------------------------------------------------------------- /action/lib/Database.js: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | // constructor for DB object - a thin, promise-loving wrapper around nano 19 | module.exports = function(dbURL, dbName) { 20 | var nano = require('nano')(dbURL); 21 | this.db = nano.db.use(dbName); 22 | 23 | const designDoc = "filters"; 24 | const assignmentView = "by-worker"; 25 | 26 | this.getTrigger = function(triggerFQN) { 27 | return new Promise((resolve, reject) => { 28 | this.db.get(triggerFQN, (err, result) => { 29 | if(err) { 30 | reject(err); 31 | } else { 32 | resolve(result); 33 | } 34 | }); 35 | }); 36 | }; 37 | 38 | this.ensureTriggerIsUnique = function(triggerFQN) { 39 | return this.getTrigger(this.db, triggerFQN) 40 | .then(result => { 41 | return Promise.reject('Trigger already exists'); 42 | }) 43 | .catch(err => { 44 | // turn that frown upside-down! 45 | return true; 46 | }); 47 | }; 48 | 49 | this.recordTrigger = function(params) { 50 | console.log('recording trigger'); 51 | 52 | params['_id'] = params.triggerName; 53 | params['status'] = { 54 | 'active': true, 55 | 'dateChanged': Date.now() 56 | }; 57 | 58 | return new Promise((resolve, reject) => { 59 | this.db.insert(params, (err, result) => { 60 | if(err) { 61 | if(err.statusCode && err.statusCode === 409) { 62 | this.getTrigger(params.triggerName) 63 | .then(doc => this.disableTrigger(doc)) 64 | .then(() => this.getTrigger(params.triggerName)) 65 | .then(doc => this.updateTrigger(params, {_rev: doc._rev})) 66 | .then(result => resolve(result)) 67 | .catch(err => reject(err)); 68 | } else { 69 | reject(err); 70 | } 71 | } else { 72 | resolve(result); 73 | } 74 | }); 75 | }); 76 | }; 77 | 78 | this.deleteTrigger = function(triggerFQN) { 79 | return this.getTrigger(triggerFQN) 80 | .then(doc => { 81 | return new Promise((resolve, reject) => { 82 | this.db.destroy(doc._id, doc._rev, (err, result) => { 83 | if(err) { 84 | reject(err); 85 | } else { 86 | resolve(result); 87 | } 88 | }); 89 | }); 90 | }) 91 | }; 92 | 93 | this.getTriggerAssignment = function(workers) { 94 | 95 | return new Promise((resolve, reject) => { 96 | var assignment = workers[0] || 'worker0'; 97 | 98 | if (workers.length > 1) { 99 | this.db.view(designDoc, assignmentView, {group: true}, (err, result) => { 100 | if (err) { 101 | reject(err); 102 | } else { 103 | // a map between available workers and their number of assigned triggers 104 | // values will be populated with the results of the assignment view 105 | var counter = {}; 106 | workers.forEach(worker => { 107 | counter[worker] = 0; 108 | }); 109 | 110 | // update counter values with the number of assigned triggers 111 | // for each worker 112 | result.rows.forEach(row => { 113 | if (row.key in counter) { 114 | counter[row.key] = row.value; 115 | } 116 | }); 117 | 118 | // find which of the available workers has the least number of 119 | // assigned triggers 120 | for (availableWorker in counter) { 121 | if (counter[availableWorker] < counter[assignment]) { 122 | assignment = availableWorker; 123 | } 124 | } 125 | resolve(assignment); 126 | } 127 | }); 128 | } else { 129 | resolve(assignment); 130 | } 131 | }); 132 | }; 133 | 134 | this.disableTrigger = function(existing) { 135 | return new Promise((resolve, reject) => { 136 | var message = 'Automatically disabled trigger while updating'; 137 | var status = { 138 | 'active': false, 139 | 'dateChanged': Date.now(), 140 | 'reason': {'kind': 'AUTO', 'statusCode': undefined, 'message': message} 141 | }; 142 | existing.status = status; 143 | this.db.insert(existing, (err, result) => { 144 | if (err) { 145 | reject(err); 146 | } else { 147 | resolve(result); 148 | } 149 | }); 150 | }) 151 | }; 152 | 153 | this.updateTrigger = function(existing, params) { 154 | for (var key in params) { 155 | if (params[key] !== undefined) { 156 | existing[key] = params[key]; 157 | } 158 | } 159 | var status = { 160 | 'active': true, 161 | 'dateChanged': Date.now() 162 | }; 163 | existing.status = status; 164 | 165 | return new Promise((resolve, reject) => { 166 | this.db.insert(existing, (err, result) => { 167 | if(err) { 168 | reject(err); 169 | } else { 170 | resolve(result); 171 | } 172 | }); 173 | }); 174 | }; 175 | }; 176 | -------------------------------------------------------------------------------- /action/messageHubFeed.js: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | const common = require('./lib/common'); 19 | 20 | /** 21 | * Feed to listen to MessageHub messages 22 | * @param {string} kafka_brokers_sasl - array of Message Hub brokers 23 | * @param {string} username - Kafka username 24 | * @param {string} password - Kafka password 25 | * @param {string} topic - topic to subscribe to 26 | * @param {bool} isJSONData - attempt to parse messages as JSON 27 | * @param {bool} isBinaryKey - encode key as Base64 28 | * @param {bool} isBinaryValue - encode message as Base64 29 | * @param {string} endpoint - address to OpenWhisk deployment (expected to be bound at deployment) 30 | */ 31 | function main(params) { 32 | const endpoint = params.endpoint; 33 | const webActionName = 'messageHubFeedWeb' 34 | 35 | var massagedParams = common.massageParamsForWeb(params); 36 | massagedParams.triggerName = common.getTriggerFQN(params.triggerName); 37 | 38 | var iamKey = process.env.__OW_IAM_NAMESPACE_API_KEY; 39 | massagedParams.authKey = iamKey || params.authKey; 40 | massagedParams.isIamKey = iamKey !== undefined; 41 | 42 | if (massagedParams.isIamKey) { 43 | massagedParams.iamUrl = process.env.__OW_IAM_API_URL; 44 | massagedParams.namespaceCRN = process.env.__OW_NAMESPACE_CRN; 45 | } 46 | 47 | if (params.lifecycleEvent === 'CREATE') { 48 | return common.createTrigger(endpoint, massagedParams, webActionName); 49 | } else if (params.lifecycleEvent === 'READ') { 50 | return common.getTrigger(endpoint, massagedParams, webActionName); 51 | } else if (params.lifecycleEvent === 'UPDATE') { 52 | return common.updateTrigger(endpoint, massagedParams, webActionName); 53 | } else if (params.lifecycleEvent === 'DELETE') { 54 | return common.deleteTrigger(endpoint, massagedParams, webActionName); 55 | } 56 | 57 | return { 58 | error: 'unsupported lifecycleEvent' 59 | }; 60 | } 61 | 62 | exports.main = main; 63 | -------------------------------------------------------------------------------- /action/messageHubFeedWeb_package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "messageHubFeedWeb", 3 | "version": "1.0.0", 4 | "main": "messageHubFeedWeb.js", 5 | "dependencies": { 6 | "@ibm-functions/iam-token-manager": "^1.0.0", 7 | "moment": "^2.26.0", 8 | "nano": "^8.2.2", 9 | "request-promise": "^4.2.5" 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /action/messageHubFeed_package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "messageHubFeed", 3 | "version": "1.0.0", 4 | "main": "messageHubFeed.js", 5 | "dependencies": { 6 | "@ibm-functions/iam-token-manager": "^1.0.0", 7 | "moment": "^2.26.0", 8 | "nano": "^8.2.2", 9 | "request-promise": "^4.2.5" 10 | } 11 | } 12 | -------------------------------------------------------------------------------- /action/messageHubProduce.py: -------------------------------------------------------------------------------- 1 | """MessageHub producer. 2 | 3 | /* 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with 6 | * this work for additional information regarding copyright ownership. 7 | * The ASF licenses this file to You under the Apache License, Version 2.0 8 | * (the "License"); you may not use this file except in compliance with 9 | * the License. You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, 15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | * See the License for the specific language governing permissions and 17 | * limitations under the License. 18 | */ 19 | """ 20 | 21 | import base64 22 | import logging 23 | import math 24 | import os 25 | import ssl 26 | import sys 27 | import time 28 | import traceback 29 | 30 | from kafka import KafkaProducer 31 | from kafka.errors import NoBrokersAvailable, KafkaTimeoutError, AuthenticationFailedError 32 | from kafka.version import __version__ 33 | from random import shuffle 34 | 35 | 36 | logging.basicConfig(stream=sys.stdout, level=logging.INFO, 37 | format='%(levelname)-8s %(asctime)s %(message)s', 38 | datefmt='[%H:%M:%S]') 39 | 40 | max_cached_producers = 10 41 | 42 | def main(params): 43 | producer = None 44 | logging.info("Using kafka-python %s", str(__version__)) 45 | 46 | logging.info("Validating parameters") 47 | validationResult = validateParams(params) 48 | if validationResult[0] != True: 49 | return {'error': validationResult[1]} 50 | else: 51 | validatedParams = validationResult[1] 52 | 53 | attempt = 0 54 | max_attempts = 3 55 | 56 | result = {"success": True} 57 | 58 | while attempt < max_attempts: 59 | attempt += 1 60 | logging.info("Starting attempt {}".format(attempt)) 61 | 62 | try: 63 | logging.info("Getting producer") 64 | 65 | # set a client timeout that allows for 3 connection retries while still 66 | # reserving 10s for the actual send 67 | producer_timeout_ms = math.floor(getRemainingTime(reservedTime=10) / max_attempts * 1000) 68 | producer = getProducer(validatedParams, producer_timeout_ms) 69 | 70 | topic = validatedParams['topic'] 71 | logging.info("Finding topic {}".format(topic)) 72 | partition_info = producer.partitions_for(topic) 73 | logging.info("Found topic {} with partition(s) {}".format(topic, partition_info)) 74 | 75 | break 76 | except Exception as e: 77 | if attempt == max_attempts: 78 | producer = None 79 | logging.warning(e) 80 | traceback.print_exc(limit=5) 81 | result = getResultForException(e) 82 | 83 | # we successfully connected and found the topic metadata... let's send! 84 | if producer is not None: 85 | try: 86 | logging.info("Producing message") 87 | 88 | # only use the key parameter if it is present 89 | value = validatedParams['value'] 90 | if 'key' in validatedParams: 91 | messageKey = validatedParams['key'] 92 | future = producer.send( 93 | topic, bytes(value, 'utf-8'), key=bytes(messageKey, 'utf-8')) 94 | else: 95 | future = producer.send(topic, bytes(value, 'utf-8')) 96 | 97 | # future should wait all of the remaining time 98 | future_time_seconds = math.floor(getRemainingTime()) 99 | sent = future.get(timeout=future_time_seconds) 100 | msg = "Successfully sent message to {}:{} at offset {}".format( 101 | sent.topic, sent.partition, sent.offset) 102 | logging.info(msg) 103 | result = {"success": True, "message": msg} 104 | except Exception as e: 105 | logging.warning(e) 106 | traceback.print_exc(limit=5) 107 | result = getResultForException(e) 108 | 109 | return result 110 | 111 | def getResultForException(e): 112 | if isinstance(e, KafkaTimeoutError): 113 | return {'error': 'Timed out communicating with Message Hub'} 114 | elif isinstance(e, AuthenticationFailedError): 115 | return {'error': 'Authentication failed'} 116 | elif isinstance(e, NoBrokersAvailable): 117 | return {'error': 'No brokers available. Check that your supplied brokers are correct and available.'} 118 | else: 119 | return {'error': '{}'.format(e)} 120 | 121 | 122 | def validateParams(params): 123 | validatedParams = params.copy() 124 | requiredParams = ['kafka_brokers_sasl', 'user', 'password', 'topic', 'value'] 125 | missingParams = [] 126 | 127 | for requiredParam in requiredParams: 128 | if requiredParam not in params: 129 | missingParams.append(requiredParam) 130 | 131 | if len(missingParams) > 0: 132 | return (False, "You must supply all of the following parameters: {}".format(', '.join(missingParams))) 133 | 134 | if isinstance(params['kafka_brokers_sasl'], str): 135 | # turn it into a List 136 | validatedParams['kafka_brokers_sasl'] = params['kafka_brokers_sasl'].split(',') 137 | 138 | shuffle(validatedParams['kafka_brokers_sasl']) 139 | 140 | if 'base64DecodeValue' in params and params['base64DecodeValue'] == True: 141 | try: 142 | validatedParams['value'] = base64.b64decode(params['value']).decode('utf-8') 143 | except: 144 | return (False, "value parameter is not Base64 encoded") 145 | 146 | if len(validatedParams['value']) == 0: 147 | return (False, "value parameter is not Base64 encoded") 148 | 149 | if 'base64DecodeKey' in params and params['base64DecodeKey'] == True: 150 | try: 151 | validatedParams['key'] = base64.b64decode(params['key']).decode('utf-8') 152 | except: 153 | return (False, "key parameter is not Base64 encoded") 154 | 155 | if len(validatedParams['key']) == 0: 156 | return (False, "key parameter is not Base64 encoded") 157 | 158 | return (True, validatedParams) 159 | 160 | def getProducer(validatedParams, timeout_ms): 161 | connectionHash = getConnectionHash(validatedParams) 162 | 163 | if globals().get("cached_producers") is None: 164 | logging.info("dictionary was None") 165 | globals()["cached_producers"] = dict() 166 | 167 | # remove arbitrary connection to make room for new one 168 | if len(globals()["cached_producers"]) == max_cached_producers: 169 | poppedProducer = globals()["cached_producers"].popitem()[1] 170 | poppedProducer.close(timeout=1) 171 | logging.info("Removed cached producer") 172 | 173 | if connectionHash not in globals()["cached_producers"]: 174 | logging.info("cache miss") 175 | # create a new connection 176 | sasl_mechanism = 'PLAIN' 177 | security_protocol = 'SASL_SSL' 178 | 179 | # Create a new context using system defaults, disable all but TLS1.2 180 | context = ssl.create_default_context() 181 | context.options &= ssl.OP_NO_TLSv1 182 | context.options &= ssl.OP_NO_TLSv1_1 183 | 184 | producer = KafkaProducer( 185 | api_version=(0, 10), 186 | batch_size=0, 187 | bootstrap_servers=validatedParams['kafka_brokers_sasl'], 188 | max_block_ms=timeout_ms, 189 | request_timeout_ms=timeout_ms, 190 | sasl_plain_username=validatedParams['user'], 191 | sasl_plain_password=validatedParams['password'], 192 | security_protocol=security_protocol, 193 | ssl_context=context, 194 | sasl_mechanism=sasl_mechanism 195 | ) 196 | 197 | logging.info("Created producer") 198 | 199 | # store the producer globally for subsequent invocations 200 | globals()["cached_producers"][connectionHash] = producer 201 | 202 | # return it 203 | return producer 204 | else: 205 | logging.info("Reusing existing producer") 206 | return globals()["cached_producers"][connectionHash] 207 | 208 | def getConnectionHash(params): 209 | apiKey = "{}:{}".format(params['user'], params['password']) 210 | return apiKey 211 | 212 | # return the remaining time (in seconds) until the action will expire, 213 | # optionally reserving some time (also in seconds). 214 | def getRemainingTime(reservedTime=0): 215 | deadlineSeconds = int(os.getenv('__OW_DEADLINE', 60000)) / 1000 216 | remaining = deadlineSeconds - time.time() - reservedTime 217 | 218 | # ensure value is at least zero 219 | # yes, this is a little paranoid 220 | return max(remaining, 0) 221 | -------------------------------------------------------------------------------- /build.gradle: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | ext.dockerImageName = 'catalog_kafkatrigger' 19 | apply from: 'gradle/docker.gradle' 20 | -------------------------------------------------------------------------------- /docs/arch/README.md: -------------------------------------------------------------------------------- 1 | 19 | 20 | ## MessageHub Trigger Provider Architecture 21 | 22 | ### Create Trigger Feed 23 | ![MessageHub Trigger Create](images/Arch-Provider-MHV1-Create.png) 24 | 25 | **Scenario:** User wants to create a trigger `trigger1` for MessageHub service instance `instance1`, using Credentials `Credential-1` and use rule `rule1` to invoke action `action1` with messages from topic `topic1`. 26 | 27 | 1. Developer creates a MessageHub service `instance1` 28 | 2. Developer creates topic `topic1` in MessageHub service `instance1` 29 | 3. Developer creates Credential key `Credential-1` for MessageHub `instance1` 30 | 4. Developer creates trigger `trigger1` on OpenWhisk, the trigger stores the annotation `feed` with the feedAction name from system package or binded package.(`/whisk.system/messagingWeb/messageHubFeed`). 31 | 5. Developer invokes action feedAction to create trigger feed passing input parameters (lifeCycle:`CREATE`, `trigger1`, Credentials1, Options:`topic1`) 32 | 6. The feedAction invokes feedWebAction forwarding input parameter. 33 | 7. The feedWebAction inserts trigger feed doc into the DB for worker group 0 (feedWebAction protects DB credentials) 34 | 8. DB insertion notifies workers group 0 via Cloudant/CouchDB changes API, workers listen on DB view with a filter for their group `worker0` and gets the DB doc. 35 | 9. Kafka Consumer is created on each worker in a consumer group and starts polling for messages on `topic1` from `instance1` using `Credentials-1`. 36 | 10. Developer creates `rule1` indicating that when `trigger1` fires invoke `action1`. 37 | 11. Event source produces messages on `topic1`. 38 | 12. Both consumers will batch the messages from `topic1` and fire `trigger1`. 39 | - The fire is done with an http request containing the batch of messages in the body. 40 | - Consumer will not poll for more messages and will not commit batch of messages until the http request gets a response from OpenWhisk trigger endpoint. 41 | - Consumers in the same consumer group get assigned a set of partitions, each consumer on each worker host will get a unique set of messages avoiding duplicate messages being included in trigger fires. 42 | 9. OpenWhisk will process the trigger fire for `trigger1` and finds the `rule1` and invokes `action1` with messages from topic `topic1`. 43 | 44 | ### Update Trigger Feed 45 | ![MessageHub Trigger Update](images/Arch-Provider-MHV1-Update.png) 46 | 47 | **Scenario:** User wants to update trigger `trigger1` to change from topic `topic1` to topic `topic2`. 48 | 49 | 1. Developer creates topic `topic2` in MessageHub service `instance1`. 50 | 2. Developer gets the annotation `feed` from trigger `trigger1`. 51 | 3. Developer invokes feedAction to update trigger feed passing input parameters (lifeCycle:`UPDATE`, `trigger1`, Options:`topic2`). 52 | 4. The feedAction invokes feedWebAction forwarding input parameter. 53 | 5. The feedWebAction inserts trigger feed doc into the DB for worker group 0 (feedWebAction protects DB credentials). 54 | 6. DB insertion notifies workers group 0 via Cloudant/CouchDB changes API, workers listen on DB view with a filter for their group `worker0` and gets the DB doc. 55 | 7. Kafka Consumer is re-created on each worker in a consumer group and starts polling for messages on `topic2` from `instance1` using `Credentials-1`. 56 | 8. Event source produces messages on `topic2`. 57 | 9. Both consumers will now handle `topic2` instead of `topic1`. 58 | 10. OpenWhisk will process the trigger fire for `trigger1`, finds the rule `rule1` and invokes `action1` with messages from topic `topic2`. 59 | 60 | ### Read Trigger Feed 61 | ![MessageHub Trigger Read](images/Arch-Provider-MHV1-Read.png) 62 | 63 | **Scenario:** User wants to read the configuration and status for trigger `trigger1`. 64 | 65 | 1. Developer gets the annotation `feed` from trigger `trigger1`. 66 | 2. Developer invokes feedAction to read the trigger feed passing input parameters (lifeCycle:`READ`, `trigger1`). 67 | 3. The feedAction invokes feedWebAction forwarding input parameter. 68 | 4. The feedWebAction gets the trigger feed doc from the DB (feedWebAction protects DB credentials). 69 | 5. The DB returns the trigger feed doc for `trigger1`. 70 | 6. The feedWebAction returns a response to feedAction. 71 | 7. The feedAction returns response (config, status) to Developer. 72 | 73 | ### Delete Trigger Feed 74 | ![MessageHub Trigger Read](images/Arch-Provider-MHV1-Delete.png) 75 | 76 | **Scenario:** User wants to delete trigger `trigger1`. 77 | 78 | 1. Developer deletes rule `rule1` 79 | 2. Developer gets the annotation `feed` from trigger `trigger1`. 80 | 3. Developer invokes feedAction to delete the trigger feed passing input parameters (lifeCycle:`DELETE`, `trigger1`). 81 | 4. The feedAction invokes feedWebAction forwarding input parameter. 82 | 5. The feedWebAction updates the trigger feed doc into the DB with a field `delete:true`(feedWebAction protects DB credentials). 83 | 6. DB update notifies workers group 0 via Cloudant/CouchDB changes API, workers listen on DB view with a filter for their group `worker0` and gets the DB doc. The Kafka consumers for `trigger1/topic2` get destroyed. 84 | 7. The feedWebAction deletes the trigger feed doc from the DB. 85 | 8. The Developer deletes trigger `trigger1` 86 | -------------------------------------------------------------------------------- /docs/arch/images/Arch-Provider-MHV1-Create.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/apache/openwhisk-package-kafka/194f1ddf5f393139d0b5c9c263287ae2ae57dbdb/docs/arch/images/Arch-Provider-MHV1-Create.png -------------------------------------------------------------------------------- /docs/arch/images/Arch-Provider-MHV1-Delete.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/apache/openwhisk-package-kafka/194f1ddf5f393139d0b5c9c263287ae2ae57dbdb/docs/arch/images/Arch-Provider-MHV1-Delete.png -------------------------------------------------------------------------------- /docs/arch/images/Arch-Provider-MHV1-Read.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/apache/openwhisk-package-kafka/194f1ddf5f393139d0b5c9c263287ae2ae57dbdb/docs/arch/images/Arch-Provider-MHV1-Read.png -------------------------------------------------------------------------------- /docs/arch/images/Arch-Provider-MHV1-Update.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/apache/openwhisk-package-kafka/194f1ddf5f393139d0b5c9c263287ae2ae57dbdb/docs/arch/images/Arch-Provider-MHV1-Update.png -------------------------------------------------------------------------------- /docs/dev/README.md: -------------------------------------------------------------------------------- 1 | 19 | 20 | # Development and Testing 21 | ## Build 22 | Building the Kafka feed provider is a simple matter of running a `docker build` command from the root of the project. I suggest tagging the image with a memorable name, like "kafkafeedprovider": 23 | 24 | ``` sh 25 | docker build -t kafkafeedprovider . 26 | ``` 27 | 28 | ## Run 29 | Now we need to start the provider service. This is also a simple matter of running a `docker run` command, but the details are a little tricky. The service relies on a number of environment variables in order to operate properly. They are outlined below: 30 | 31 | ### Mandatory Environment Variables 32 | |Name|Type|Description| 33 | |---|---|---| 34 | |DB_URL|URL|The base URL for persistent storage (either CouchDB or Cloudant)| 35 | |DB_USER|String|Username for your DB credentials| 36 | |DB_PASS|String|Password for your DB credentials| 37 | 38 | ### Optional Environment Variables 39 | |Name|Type|Description| 40 | |---|---|---| 41 | |INSTANCE|String|A unique identifier for this service. This is useful to differentiate log messages if you run multiple instances of the service| 42 | |LOCAL_DEV|Boolean|If you are using a locally-deployed OpenWhisk core system, it likely has a self-signed certificate. Set `LOCAL_DEV` to `true` to allow firing triggers without checking the certificate validity. *Do not use this for production systems!*| 43 | |PAYLOAD_LIMIT|Integer (default=900000)|The maximum payload size, in bytes, allowed during message batching. This value should be less than your OpenWhisk deployment's payload limit.| 44 | |WORKER|String|The ID of this running instances. Useful when running multiple instances. This should be of the form `workerX`. e.g. `worker0`. 45 | |DB_PREFIX|String|A prefix to be prepended to the default DB name| 46 | 47 | With that in mind, starting the feed service might look something like: 48 | 49 | ```sh 50 | docker run -e DB_URL=https://myDbHost -e DB_USER=MyDbUser -e DB_PASS=MySuperSecret -e DB_PREFIX=ow_ -p 80:5000 kafkafeedprovider 51 | ``` 52 | 53 | This example will start the provider service with the specified DB details. The container provides a number of RESTful endpoints which can be accessed on port 5000 _inside_ the container. To expose this port to the rest of the world `-p 80:5000` tells Docker to map port 80 of the host machine into port 5000 inside this new container. 54 | 55 | After issuing the `docker run` command, you can confirm the service started correctly by inspecting the container with a `docker logs` command. 56 | 57 | # Install Actions 58 | The provided actions also need to be installed to your OpenWhisk deployment. We have automated this with two different shell scripts, one for Message Hub related actions, and one for generic Kafka related actions. These scripts are `installCatalog.sh` and `installKafka.sh`, respectively. 59 | 60 | Each script requires a number of arguments which are outlined below: 61 | 62 | |Name|Description| 63 | |---|---| 64 | |authKey|The OpenWhisk auth key to use when installing the actions. Typically this would be the auth key for `whisk.system`| 65 | |edgehost|The IP address or hostname of the OpenWhisk core system.| 66 | |dburl|The full URL (including credentials) of the CouchDB or Cloudant account used by the feed service.| 67 | |dbprefix|A prefix to be prepended to the default DB name (ow_kafka_triggers) that will be created by the provider service.| 68 | |apihost|The hostname or IP address of the core OpenWhisk system that will be used as the hostname for all trigger URLs. In most cases, this will be the same as `edgehost`.| 69 | 70 | An example run might look something like: 71 | 72 | ```sh 73 | ./installKafka.sh MyOpenWhiskAuthKey 10.0.1.5 https://cloudant_user:cloudant_pw@cloudant.com staging_db_prefix 10.0.1.5 74 | ``` 75 | 76 | In addition, when running multiple instances, the following argument is required 77 | |Name|Description| 78 | |---|---| 79 | |workers|An array of the IDs of the running instances with each ID of the form `workerX`. e.g. `["worker0", "worker1"]`| 80 | 81 | When running multiple instances, an example run might look something like: 82 | 83 | ```sh 84 | ./installKafka.sh MyOpenWhiskAuthKey 10.0.1.5 https://cloudant_user:cloudant_pw@cloudant.com staging_db_prefix 10.0.1.5 "[\"worker0\", \"worker1\"]" 85 | ``` 86 | 87 | # Testing 88 | To run the automated test suite, you can issue a Gradle command. There are some tests which talk directly to the provider service over REST, and so these tests must know the IP address and port of the running service. This is done by providing the `-Dhealth_url`, `-Dhost` and `-Dport` arguments to Gradle: 89 | 90 | ```sh 91 | ./gradlew :tests:test -Dhealth_url=http://127.0.0.1/health -Dhost=127.0.0.1 -Dport=80 92 | ``` 93 | 94 | The value of the `host` must be the IP/hostname of the Docker host running the service provider container, and the `port` must be the exposed port number. Additionally, the `OPENWHISK_HOME` environment variable must be set to the root of the local OpenWhisk directory. Ex: `export OPENWHISK_HOME=`. 95 | -------------------------------------------------------------------------------- /gradle/docker.gradle: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | import groovy.time.* 19 | 20 | /** 21 | * Utility to build docker images based in gradle projects 22 | * 23 | * This extends gradle's 'application' plugin logic with a 'distDocker' task which builds 24 | * a docker image from the Dockerfile of the project that applies this file. The image 25 | * is automatically tagged and pushed if a tag and/or a registry is given. 26 | * 27 | * Parameters that can be set on project level: 28 | * - dockerImageName (required): The name of the image to build (e.g. controller) 29 | * - dockerRegistry (optional): The registry to push to 30 | * - dockerImageTag (optional, default 'latest'): The tag for the image 31 | * - dockerImagePrefix (optional, default 'whisk'): The prefix for the image, 32 | * 'controller' becomes 'whisk/controller' per default 33 | * - dockerTimeout (optional, default 840): Timeout for docker operations in seconds 34 | * - dockerRetries (optional, default 3): How many times to retry docker operations 35 | * - dockerBinary (optional, default 'docker'): The binary to execute docker commands 36 | * - dockerBuildArgs (options, default ''): Project specific custom docker build arguments 37 | * - dockerHost (optional): The docker host to run commands on, default behaviour is 38 | * docker's own DOCKER_HOST environment variable 39 | */ 40 | 41 | ext { 42 | dockerRegistry = project.hasProperty('dockerRegistry') ? dockerRegistry + '/' : '' 43 | dockerImageTag = project.hasProperty('dockerImageTag') ? dockerImageTag : 'latest' 44 | dockerImagePrefix = project.hasProperty('dockerImagePrefix') ? dockerImagePrefix : 'whisk' 45 | dockerTimeout = project.hasProperty('dockerTimeout') ? dockerTimeout.toInteger() : 840 46 | dockerRetries = project.hasProperty('dockerRetries') ? dockerRetries.toInteger() : 3 47 | dockerBinary = project.hasProperty('dockerBinary') ? [dockerBinary] : ['docker'] 48 | dockerBuildArg = ['build'] 49 | } 50 | ext.dockerTaggedImageName = dockerRegistry + dockerImagePrefix + '/' + dockerImageName + ':' + dockerImageTag 51 | 52 | if(project.hasProperty('dockerHost')) { 53 | dockerBinary += ['--host', project.dockerHost] 54 | } 55 | 56 | if(project.hasProperty('dockerBuildArgs')) { 57 | dockerBuildArgs.each { arg -> 58 | dockerBuildArg += ['--build-arg', arg] 59 | } 60 | } 61 | 62 | task distDocker { 63 | doLast { 64 | def start = new Date() 65 | def cmd = dockerBinary + dockerBuildArg + ['-t', dockerImageName, project.buildscript.sourceFile.getParentFile().getAbsolutePath()] 66 | retry(cmd, dockerRetries, dockerTimeout) 67 | println("Building '${dockerImageName}' took ${TimeCategory.minus(new Date(), start)}") 68 | } 69 | } 70 | 71 | task distDockerCoverage() { 72 | doLast { 73 | def start = new Date() 74 | //Copy the scoverage runtime jars 75 | copy {from configurations.scoverage - configurations.compile; into "build/tmp/docker-coverage/ext-lib"} 76 | //Copy the scoverage prepared jars 77 | coverageDirs.each {dir -> 78 | copy {from file(dir); into "build/tmp/docker-coverage/classes"} 79 | } 80 | 81 | def buildArgs = [ 82 | "OW_ROOT_DIR=${project.rootProject.projectDir.absolutePath}" 83 | ] 84 | def dockerImageNameOrig = dockerImageName 85 | dockerImageName = "$dockerImageName-cov" 86 | 87 | //Use absolute paths for dockerFile and build directory 88 | String dockerFileDir = project.buildscript.sourceFile.getParentFile().getAbsolutePath() 89 | String dockerFile = "$dockerFileDir/Dockerfile.cov" 90 | 91 | def cmd = dockerBinary + prepareBuildArgs(buildArgs) + ['-f', dockerFile, '-t', dockerImageName, dockerFileDir] 92 | retry(cmd, dockerRetries, dockerTimeout) 93 | println("Building '${dockerImageName}' took ${TimeCategory.minus(new Date(), start)}") 94 | 95 | //Replace the original image with coverage one 96 | project.ext.dockerTaggedImageName = dockerImagePrefix + '/' + dockerImageNameOrig + ':' + "cov" 97 | } 98 | finalizedBy('tagImage') 99 | } 100 | 101 | def prepareBuildArgs(List buildArgs) { 102 | def result = ['build'] 103 | if(project.hasProperty('dockerBuildArgs')) { 104 | buildArgs.addAll(dockerBuildArgs) 105 | } 106 | buildArgs.each {arg -> 107 | result += ['--build-arg', arg] 108 | } 109 | result 110 | } 111 | 112 | task tagImage { 113 | doLast { 114 | def versionString = (dockerBinary + ['-v']).execute().text 115 | def matched = (versionString =~ /(\d+)\.(\d+)\.(\d+)/) 116 | 117 | def major = matched[0][1] as int 118 | def minor = matched[0][2] as int 119 | 120 | def dockerCmd = ['tag'] 121 | if(major == 1 && minor < 12) { 122 | dockerCmd += ['-f'] 123 | } 124 | retry(dockerBinary + dockerCmd + [dockerImageName, dockerTaggedImageName], dockerRetries, dockerTimeout) 125 | } 126 | } 127 | 128 | task pushImage { 129 | doLast { 130 | def cmd = dockerBinary + ['push', dockerTaggedImageName] 131 | retry(cmd, dockerRetries, dockerTimeout) 132 | } 133 | } 134 | pushImage.dependsOn tagImage 135 | pushImage.onlyIf { dockerRegistry != '' } 136 | distDocker.finalizedBy pushImage 137 | 138 | def retry(cmd, retries, timeout) { 139 | println("${new Date()}: Executing '${cmd.join(" ")}'") 140 | def proc = cmd.execute() 141 | proc.consumeProcessOutput(System.out, System.err) 142 | proc.waitForOrKill(timeout * 1000) 143 | if(proc.exitValue() != 0) { 144 | def message = "${new Date()}: Command '${cmd.join(" ")}' failed with exitCode ${proc.exitValue()}" 145 | if(proc.exitValue() == 143) { // 143 means the process was killed (SIGTERM signal) 146 | message = "${new Date()}: Command '${cmd.join(" ")}' was killed after ${timeout} seconds" 147 | } 148 | 149 | if(retries > 1) { 150 | println("${message}, ${retries-1} retries left, retrying...") 151 | retry(cmd, retries-1, timeout) 152 | } 153 | else { 154 | println("${message}, no more retries left, aborting...") 155 | throw new GradleException(message) 156 | } 157 | } 158 | } 159 | -------------------------------------------------------------------------------- /gradle/wrapper/gradle-wrapper.jar: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/apache/openwhisk-package-kafka/194f1ddf5f393139d0b5c9c263287ae2ae57dbdb/gradle/wrapper/gradle-wrapper.jar -------------------------------------------------------------------------------- /gradle/wrapper/gradle-wrapper.properties: -------------------------------------------------------------------------------- 1 | # 2 | # Licensed to the Apache Software Foundation (ASF) under one or more 3 | # contributor license agreements. See the NOTICE file distributed with 4 | # this work for additional information regarding copyright ownership. 5 | # The ASF licenses this file to You under the Apache License, Version 2.0 6 | # (the "License"); you may not use this file except in compliance with 7 | # the License. You may obtain a copy of the License at 8 | # 9 | # http://www.apache.org/licenses/LICENSE-2.0 10 | # 11 | # Unless required by applicable law or agreed to in writing, software 12 | # distributed under the License is distributed on an "AS IS" BASIS, 13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | # See the License for the specific language governing permissions and 15 | # limitations under the License. 16 | # 17 | distributionBase=GRADLE_USER_HOME 18 | distributionPath=wrapper/dists 19 | zipStoreBase=GRADLE_USER_HOME 20 | zipStorePath=wrapper/dists 21 | distributionUrl=https\://services.gradle.org/distributions/gradle-5.5.1-bin.zip 22 | -------------------------------------------------------------------------------- /gradlew: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env sh 2 | 3 | # 4 | # Copyright 2015 the original author or authors. 5 | # 6 | # Licensed under the Apache License, Version 2.0 (the "License"); 7 | # you may not use this file except in compliance with the License. 8 | # You may obtain a copy of the License at 9 | # 10 | # http://www.apache.org/licenses/LICENSE-2.0 11 | # 12 | # Unless required by applicable law or agreed to in writing, software 13 | # distributed under the License is distributed on an "AS IS" BASIS, 14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 15 | # See the License for the specific language governing permissions and 16 | # limitations under the License. 17 | # 18 | 19 | ############################################################################## 20 | ## 21 | ## Gradle start up script for UN*X 22 | ## 23 | ############################################################################## 24 | 25 | # Attempt to set APP_HOME 26 | # Resolve links: $0 may be a link 27 | PRG="$0" 28 | # Need this for relative symlinks. 29 | while [ -h "$PRG" ] ; do 30 | ls=`ls -ld "$PRG"` 31 | link=`expr "$ls" : '.*-> \(.*\)$'` 32 | if expr "$link" : '/.*' > /dev/null; then 33 | PRG="$link" 34 | else 35 | PRG=`dirname "$PRG"`"/$link" 36 | fi 37 | done 38 | SAVED="`pwd`" 39 | cd "`dirname \"$PRG\"`/" >/dev/null 40 | APP_HOME="`pwd -P`" 41 | cd "$SAVED" >/dev/null 42 | 43 | APP_NAME="Gradle" 44 | APP_BASE_NAME=`basename "$0"` 45 | 46 | # Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 47 | DEFAULT_JVM_OPTS='-Dfile.encoding=UTF-8' 48 | 49 | # Use the maximum available, or set MAX_FD != -1 to use that value. 50 | MAX_FD="maximum" 51 | 52 | warn () { 53 | echo "$*" 54 | } 55 | 56 | die () { 57 | echo 58 | echo "$*" 59 | echo 60 | exit 1 61 | } 62 | 63 | # OS specific support (must be 'true' or 'false'). 64 | cygwin=false 65 | msys=false 66 | darwin=false 67 | nonstop=false 68 | case "`uname`" in 69 | CYGWIN* ) 70 | cygwin=true 71 | ;; 72 | Darwin* ) 73 | darwin=true 74 | ;; 75 | MINGW* ) 76 | msys=true 77 | ;; 78 | NONSTOP* ) 79 | nonstop=true 80 | ;; 81 | esac 82 | 83 | CLASSPATH=$APP_HOME/gradle/wrapper/gradle-wrapper.jar 84 | 85 | # Determine the Java command to use to start the JVM. 86 | if [ -n "$JAVA_HOME" ] ; then 87 | if [ -x "$JAVA_HOME/jre/sh/java" ] ; then 88 | # IBM's JDK on AIX uses strange locations for the executables 89 | JAVACMD="$JAVA_HOME/jre/sh/java" 90 | else 91 | JAVACMD="$JAVA_HOME/bin/java" 92 | fi 93 | if [ ! -x "$JAVACMD" ] ; then 94 | die "ERROR: JAVA_HOME is set to an invalid directory: $JAVA_HOME 95 | 96 | Please set the JAVA_HOME variable in your environment to match the 97 | location of your Java installation." 98 | fi 99 | else 100 | JAVACMD="java" 101 | which java >/dev/null 2>&1 || die "ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 102 | 103 | Please set the JAVA_HOME variable in your environment to match the 104 | location of your Java installation." 105 | fi 106 | 107 | # Increase the maximum file descriptors if we can. 108 | if [ "$cygwin" = "false" -a "$darwin" = "false" -a "$nonstop" = "false" ] ; then 109 | MAX_FD_LIMIT=`ulimit -H -n` 110 | if [ $? -eq 0 ] ; then 111 | if [ "$MAX_FD" = "maximum" -o "$MAX_FD" = "max" ] ; then 112 | MAX_FD="$MAX_FD_LIMIT" 113 | fi 114 | ulimit -n $MAX_FD 115 | if [ $? -ne 0 ] ; then 116 | warn "Could not set maximum file descriptor limit: $MAX_FD" 117 | fi 118 | else 119 | warn "Could not query maximum file descriptor limit: $MAX_FD_LIMIT" 120 | fi 121 | fi 122 | 123 | # For Darwin, add options to specify how the application appears in the dock 124 | if $darwin; then 125 | GRADLE_OPTS="$GRADLE_OPTS \"-Xdock:name=$APP_NAME\" \"-Xdock:icon=$APP_HOME/media/gradle.icns\"" 126 | fi 127 | 128 | # For Cygwin, switch paths to Windows format before running java 129 | if $cygwin ; then 130 | APP_HOME=`cygpath --path --mixed "$APP_HOME"` 131 | CLASSPATH=`cygpath --path --mixed "$CLASSPATH"` 132 | JAVACMD=`cygpath --unix "$JAVACMD"` 133 | 134 | # We build the pattern for arguments to be converted via cygpath 135 | ROOTDIRSRAW=`find -L / -maxdepth 1 -mindepth 1 -type d 2>/dev/null` 136 | SEP="" 137 | for dir in $ROOTDIRSRAW ; do 138 | ROOTDIRS="$ROOTDIRS$SEP$dir" 139 | SEP="|" 140 | done 141 | OURCYGPATTERN="(^($ROOTDIRS))" 142 | # Add a user-defined pattern to the cygpath arguments 143 | if [ "$GRADLE_CYGPATTERN" != "" ] ; then 144 | OURCYGPATTERN="$OURCYGPATTERN|($GRADLE_CYGPATTERN)" 145 | fi 146 | # Now convert the arguments - kludge to limit ourselves to /bin/sh 147 | i=0 148 | for arg in "$@" ; do 149 | CHECK=`echo "$arg"|egrep -c "$OURCYGPATTERN" -` 150 | CHECK2=`echo "$arg"|egrep -c "^-"` ### Determine if an option 151 | 152 | if [ $CHECK -ne 0 ] && [ $CHECK2 -eq 0 ] ; then ### Added a condition 153 | eval `echo args$i`=`cygpath --path --ignore --mixed "$arg"` 154 | else 155 | eval `echo args$i`="\"$arg\"" 156 | fi 157 | i=$((i+1)) 158 | done 159 | case $i in 160 | (0) set -- ;; 161 | (1) set -- "$args0" ;; 162 | (2) set -- "$args0" "$args1" ;; 163 | (3) set -- "$args0" "$args1" "$args2" ;; 164 | (4) set -- "$args0" "$args1" "$args2" "$args3" ;; 165 | (5) set -- "$args0" "$args1" "$args2" "$args3" "$args4" ;; 166 | (6) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" ;; 167 | (7) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" ;; 168 | (8) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" ;; 169 | (9) set -- "$args0" "$args1" "$args2" "$args3" "$args4" "$args5" "$args6" "$args7" "$args8" ;; 170 | esac 171 | fi 172 | 173 | # Escape application args 174 | save () { 175 | for i do printf %s\\n "$i" | sed "s/'/'\\\\''/g;1s/^/'/;\$s/\$/' \\\\/" ; done 176 | echo " " 177 | } 178 | APP_ARGS=$(save "$@") 179 | 180 | # Collect all arguments for the java command, following the shell quoting and substitution rules 181 | eval set -- $DEFAULT_JVM_OPTS $JAVA_OPTS $GRADLE_OPTS "\"-Dorg.gradle.appname=$APP_BASE_NAME\"" -classpath "\"$CLASSPATH\"" org.gradle.wrapper.GradleWrapperMain "$APP_ARGS" 182 | 183 | # by default we should be in the correct project dir, but when run from Finder on Mac, the cwd is wrong 184 | if [ "$(uname)" = "Darwin" ] && [ "$HOME" = "$PWD" ]; then 185 | cd "$(dirname "$0")" 186 | fi 187 | 188 | exec "$JAVACMD" "$@" 189 | -------------------------------------------------------------------------------- /gradlew.bat: -------------------------------------------------------------------------------- 1 | @rem 2 | @rem Copyright 2015 the original author or authors. 3 | @rem 4 | @rem Licensed under the Apache License, Version 2.0 (the "License"); 5 | @rem you may not use this file except in compliance with the License. 6 | @rem You may obtain a copy of the License at 7 | @rem 8 | @rem http://www.apache.org/licenses/LICENSE-2.0 9 | @rem 10 | @rem Unless required by applicable law or agreed to in writing, software 11 | @rem distributed under the License is distributed on an "AS IS" BASIS, 12 | @rem WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | @rem See the License for the specific language governing permissions and 14 | @rem limitations under the License. 15 | @rem 16 | 17 | @if "%DEBUG%" == "" @echo off 18 | @rem ########################################################################## 19 | @rem 20 | @rem Gradle startup script for Windows 21 | @rem 22 | @rem ########################################################################## 23 | 24 | @rem Set local scope for the variables with windows NT shell 25 | if "%OS%"=="Windows_NT" setlocal 26 | 27 | set DIRNAME=%~dp0 28 | if "%DIRNAME%" == "" set DIRNAME=. 29 | set APP_BASE_NAME=%~n0 30 | set APP_HOME=%DIRNAME% 31 | 32 | @rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script. 33 | set DEFAULT_JVM_OPTS=-Dfile.encoding=UTF-8 34 | 35 | @rem Find java.exe 36 | if defined JAVA_HOME goto findJavaFromJavaHome 37 | 38 | set JAVA_EXE=java.exe 39 | %JAVA_EXE% -version >NUL 2>&1 40 | if "%ERRORLEVEL%" == "0" goto init 41 | 42 | echo. 43 | echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH. 44 | echo. 45 | echo Please set the JAVA_HOME variable in your environment to match the 46 | echo location of your Java installation. 47 | 48 | goto fail 49 | 50 | :findJavaFromJavaHome 51 | set JAVA_HOME=%JAVA_HOME:"=% 52 | set JAVA_EXE=%JAVA_HOME%/bin/java.exe 53 | 54 | if exist "%JAVA_EXE%" goto init 55 | 56 | echo. 57 | echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME% 58 | echo. 59 | echo Please set the JAVA_HOME variable in your environment to match the 60 | echo location of your Java installation. 61 | 62 | goto fail 63 | 64 | :init 65 | @rem Get command-line arguments, handling Windows variants 66 | 67 | if not "%OS%" == "Windows_NT" goto win9xME_args 68 | 69 | :win9xME_args 70 | @rem Slurp the command line arguments. 71 | set CMD_LINE_ARGS= 72 | set _SKIP=2 73 | 74 | :win9xME_args_slurp 75 | if "x%~1" == "x" goto execute 76 | 77 | set CMD_LINE_ARGS=%* 78 | 79 | :execute 80 | @rem Setup the command line 81 | 82 | set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar 83 | 84 | @rem Execute Gradle 85 | "%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS% 86 | 87 | :end 88 | @rem End local scope for the variables with windows NT shell 89 | if "%ERRORLEVEL%"=="0" goto mainEnd 90 | 91 | :fail 92 | rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of 93 | rem the _cmd.exe /c_ return code! 94 | if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1 95 | exit /b 1 96 | 97 | :mainEnd 98 | if "%OS%"=="Windows_NT" endlocal 99 | 100 | :omega 101 | -------------------------------------------------------------------------------- /installCatalog.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # 4 | # Licensed to the Apache Software Foundation (ASF) under one or more 5 | # contributor license agreements. See the NOTICE file distributed with 6 | # this work for additional information regarding copyright ownership. 7 | # The ASF licenses this file to You under the Apache License, Version 2.0 8 | # (the "License"); you may not use this file except in compliance with 9 | # the License. You may obtain a copy of the License at 10 | # 11 | # http://www.apache.org/licenses/LICENSE-2.0 12 | # 13 | # Unless required by applicable law or agreed to in writing, software 14 | # distributed under the License is distributed on an "AS IS" BASIS, 15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | # See the License for the specific language governing permissions and 17 | # limitations under the License. 18 | # 19 | 20 | # use the command line interface to install standard actions deployed 21 | # automatically 22 | 23 | # To run this command 24 | # ./installCatalog.sh 25 | # authkey and apihost are found in $HOME/.wskprops 26 | 27 | set -e 28 | set -x 29 | 30 | : ${OPENWHISK_HOME:?"OPENWHISK_HOME must be set and non-empty"} 31 | WSK_CLI="$OPENWHISK_HOME/bin/wsk" 32 | 33 | if [ $# -eq 0 ] 34 | then 35 | echo "Usage: ./installCatalog.sh " 36 | fi 37 | 38 | AUTH="$1" 39 | EDGEHOST="$2" 40 | DB_URL="$3" 41 | DB_NAME="${4}ow_kafka_triggers" 42 | APIHOST="$5" 43 | WORKERS="$6" 44 | INSTALL_PRODUCE_ACTION=${INSTALL_PRODUCE_ACTION:="true"} 45 | ACTION_RUNTIME_VERSION=${ACTION_RUNTIME_VERSION:="nodejs:default"} 46 | 47 | # If the auth key file exists, read the key in the file. Otherwise, take the 48 | # first argument as the key itself. 49 | if [ -f "$AUTH" ]; then 50 | AUTH=`cat $AUTH` 51 | fi 52 | 53 | # Make sure that the APIHOST is not empty. 54 | : ${APIHOST:?"APIHOST must be set and non-empty"} 55 | 56 | PACKAGE_HOME="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" 57 | 58 | export WSK_CONFIG_FILE= # override local property file to avoid namespace clashes 59 | 60 | echo Installing the Message Hub package and feed action. 61 | 62 | $WSK_CLI -i --apihost "$EDGEHOST" package update messaging \ 63 | --auth "$AUTH" \ 64 | --shared yes \ 65 | -a parameters '[ {"name":"kafka_brokers_sasl", "required":true, "updatable":false, "description": "Array of Message Hub brokers", "bindTime":true},{"name":"user", "required":true, "updatable":false, "description": "Message Hub username", "bindTime":true},{"name":"password", "required":true, "updatable": false, "description": "Message Hub password", "bindTime":true, "type":"password"},{"name":"topic", "required":true, "updatable":false, "description": "Topic to subscribe to"},{"name":"isJSONData", "required":false, "updatable":true, "description": "Attempt to parse message value as JSON"},{"name":"isBinaryKey", "required":false, "updatable":true, "description": "Encode key as Base64"},{"name":"isBinaryValue", "required":false, "updatable":true, "description": "Encode message value as Base64"},{"name":"endpoint", "required":true, "updatable":false, "description": "Hostname and port of OpenWhisk deployment"},{"name":"kafka_admin_url", "required":true, "updatable":false, "description": "Your Message Hub admin REST URL", "bindTime":true}]' \ 66 | -p bluemixServiceName 'messagehub' \ 67 | -p endpoint "$APIHOST" 68 | 69 | # make messageHubFeed.zip 70 | OLD_PATH=`pwd` 71 | cd action 72 | 73 | if [ -e messageHubFeed.zip ] 74 | then 75 | rm -rf messageHubFeed.zip 76 | fi 77 | 78 | cp -f messageHubFeed_package.json package.json 79 | rm -rf node_modules 80 | npm install 81 | zip -r messageHubFeed.zip lib package.json messageHubFeed.js node_modules -q 82 | 83 | $WSK_CLI -i --apihost "$EDGEHOST" action update --kind "$ACTION_RUNTIME_VERSION" messaging/messageHubFeed "$PACKAGE_HOME/action/messageHubFeed.zip" \ 84 | --auth "$AUTH" \ 85 | -a feed true \ 86 | -a description 'Feed to list to Message Hub messages' \ 87 | -a parameters '[ {"name":"kafka_brokers_sasl", "required":true, "description": "Array of Message Hub brokers"},{"name":"user", "required":true, "description": "Message Hub username"},{"name":"password", "required":true, "description": "Message Hub password", "type":"password"},{"name":"topic", "required":true, "description": "Topic to subscribe to"},{"name":"isJSONData", "required":false, "description": "Attempt to parse message value as JSON"},{"name":"isBinaryKey", "required":false, "description": "Encode key as Base64"},{"name":"isBinaryValue", "required":false, "description": "Encode message value as Base64"},{"name":"endpoint", "required":true, "description": "Hostname and port of OpenWhisk deployment"},{"name":"kafka_admin_url", "required":true, "description": "Your Message Hub admin REST URL"}]' \ 88 | -a sampleInput '{"kafka_brokers_sasl":"[\"kafka01-prod01.messagehub.services.us-south.bluemix.net:9093\"]", "username":"someUsername", "password":"somePassword", "topic":"mytopic", "isJSONData": "false", "endpoint":"openwhisk.ng.bluemix.net", "kafka_admin_url":"https://kafka-admin-prod01.messagehub.services.us-south.bluemix.net:443"}' 89 | 90 | # create messagingWeb package and web version of feed action 91 | if [ -n "$WORKERS" ]; 92 | then 93 | $WSK_CLI -i --apihost "$EDGEHOST" package update messagingWeb \ 94 | --auth "$AUTH" \ 95 | --shared no \ 96 | -p endpoint "$APIHOST" \ 97 | -p DB_URL "$DB_URL" \ 98 | -p DB_NAME "$DB_NAME" \ 99 | -p workers "$WORKERS" 100 | else 101 | $WSK_CLI -i --apihost "$EDGEHOST" package update messagingWeb \ 102 | --auth "$AUTH" \ 103 | --shared no \ 104 | -p endpoint "$APIHOST" \ 105 | -p DB_URL "$DB_URL" \ 106 | -p DB_NAME "$DB_NAME" 107 | fi 108 | 109 | # make messageHubFeedWeb.zip 110 | 111 | if [ -e messageHubFeedWeb.zip ] 112 | then 113 | rm -rf messageHubFeedWeb.zip 114 | fi 115 | 116 | cp -f messageHubFeedWeb_package.json package.json 117 | rm -rf node_modules 118 | npm install 119 | zip -r messageHubFeedWeb.zip lib package.json messageHubFeedWeb.js node_modules -q 120 | 121 | cd $OLD_PATH 122 | 123 | 124 | $WSK_CLI -i --apihost "$EDGEHOST" action update --kind "$ACTION_RUNTIME_VERSION" messagingWeb/messageHubFeedWeb "$PACKAGE_HOME/action/messageHubFeedWeb.zip" \ 125 | --auth "$AUTH" \ 126 | --web true \ 127 | -a description 'Write a new trigger to MH provider DB' \ 128 | -a parameters '[ {"name":"kafka_brokers_sasl", "required":true, "description": "Array of Message Hub brokers"},{"name":"user", "required":true, "description": "Message Hub username"},{"name":"password", "required":true, "description": "Message Hub password", "type":"password"},{"name":"topic", "required":true, "description": "Topic to subscribe to"},{"name":"isJSONData", "required":false, "description": "Attempt to parse message value as JSON"},{"name":"isBinaryKey", "required":false, "description": "Encode key as Base64"},{"name":"isBinaryValue", "required":false, "description": "Encode message value as Base64"},{"name":"endpoint", "required":true, "description": "Hostname and port of OpenWhisk deployment"},{"name":"kafka_admin_url", "required":true, "description": "Your Message Hub admin REST URL"}]' 129 | 130 | if [ $INSTALL_PRODUCE_ACTION == "true" ]; then 131 | $WSK_CLI -i --apihost "$EDGEHOST" action update messaging/messageHubProduce "$PACKAGE_HOME/action/messageHubProduce.py" \ 132 | --auth "$AUTH" \ 133 | --kind python:3 \ 134 | -a deprecated true \ 135 | -a description 'Deprecated - Produce a message to Message Hub' \ 136 | -a parameters '[ {"name":"kafka_brokers_sasl", "required":true, "description": "Array of Message Hub brokers"},{"name":"user", "required":true, "description": "Message Hub username"},{"name":"password", "required":true, "description": "Message Hub password", "type":"password"},{"name":"topic", "required":true, "description": "Topic that you wish to produce a message to"},{"name":"value", "required":true, "description": "The value for the message you want to produce"},{"name":"key", "required":false, "description": "The key for the message you want to produce"},{"name":"base64DecodeValue", "required":false, "description": "If true, the message will be produced with a Base64 decoded version of the value parameter"},{"name":"base64DecodeKey", "required":false, "description": "If true, the message will be produced with a Base64 decoded version of the key parameter"}]' \ 137 | -a sampleInput '{"kafka_brokers_sasl":"[\"kafka01-prod01.messagehub.services.us-south.bluemix.net:9093\"]", "username":"someUsername", "password":"somePassword", "topic":"mytopic", "value": "This is my message"}' 138 | fi 139 | -------------------------------------------------------------------------------- /installKafka.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # 4 | # Licensed to the Apache Software Foundation (ASF) under one or more 5 | # contributor license agreements. See the NOTICE file distributed with 6 | # this work for additional information regarding copyright ownership. 7 | # The ASF licenses this file to You under the Apache License, Version 2.0 8 | # (the "License"); you may not use this file except in compliance with 9 | # the License. You may obtain a copy of the License at 10 | # 11 | # http://www.apache.org/licenses/LICENSE-2.0 12 | # 13 | # Unless required by applicable law or agreed to in writing, software 14 | # distributed under the License is distributed on an "AS IS" BASIS, 15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | # See the License for the specific language governing permissions and 17 | # limitations under the License. 18 | # 19 | 20 | # use the command line interface to install standard actions deployed 21 | # automatically 22 | 23 | # To run this command 24 | # ./installKafka.sh 25 | # authkey and apihost are found in $HOME/.wskprops 26 | 27 | set -e 28 | set -x 29 | 30 | : ${OPENWHISK_HOME:?"OPENWHISK_HOME must be set and non-empty"} 31 | WSK_CLI="$OPENWHISK_HOME/bin/wsk" 32 | 33 | if [ $# -eq 0 ] 34 | then 35 | echo "Usage: ./installKafka.sh " 36 | fi 37 | 38 | AUTH="$1" 39 | EDGEHOST="$2" 40 | DB_URL="$3" 41 | DB_NAME="${4}ow_kafka_triggers" 42 | APIHOST="$5" 43 | WORKERS="$6" 44 | 45 | # If the auth key file exists, read the key in the file. Otherwise, take the 46 | # first argument as the key itself. 47 | if [ -f "$AUTH" ]; then 48 | AUTH=`cat $AUTH` 49 | fi 50 | 51 | # Make sure that the APIHOST is not empty. 52 | : ${APIHOST:?"APIHOST must be set and non-empty"} 53 | 54 | PACKAGE_HOME="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" 55 | 56 | export WSK_CONFIG_FILE= # override local property file to avoid namespace clashes 57 | 58 | echo Installing the Kafka package and feed action. 59 | 60 | $WSK_CLI -i --apihost "$EDGEHOST" package update messaging \ 61 | --auth "$AUTH" \ 62 | --shared yes \ 63 | -p endpoint "$APIHOST" 64 | 65 | # make kafkaFeed.zip 66 | OLD_PATH=`pwd` 67 | cd action 68 | 69 | if [ -e kafkaFeed.zip ] 70 | then 71 | rm -rf kafkaFeed.zip 72 | fi 73 | 74 | cp -f kafkaFeed_package.json package.json 75 | rm -rf node_modules 76 | npm install 77 | zip -r kafkaFeed.zip lib package.json kafkaFeed.js node_modules 78 | cd $OLD_PATH 79 | 80 | $WSK_CLI -i --apihost "$EDGEHOST" action update --kind nodejs:default messaging/kafkaFeed "$PACKAGE_HOME/action/kafkaFeed.zip" \ 81 | --auth "$AUTH" \ 82 | -a feed true \ 83 | -a description 'Feed to listen to Kafka messages' \ 84 | -a parameters '[ {"name":"brokers", "required":true, "updatable":false, "description": "Array of Kafka brokers"}, {"name":"topic", "required":true, "updatable":false, "description": "Topic to subscribe to"}, {"name":"isJSONData", "required":false, "updatable":true, "description": "Attempt to parse message value as JSON"}, {"name":"isBinaryKey", "required":false, "updatable":true, "description": "Encode key as Base64"}, {"name":"isBinaryValue", "required":false, "updatable":true, "description": "Encode message value as Base64"}, {"name":"endpoint", "required":true, "updatable":false, "description": "Hostname and port of OpenWhisk deployment"}]' \ 85 | -a sampleInput '{"brokers":"[\"127.0.0.1:9093\"]", "topic":"mytopic", "isJSONData":"false", "endpoint": "openwhisk.ng.bluemix.net"}' 86 | 87 | # create messagingWebDedicated package and web version of feed action 88 | $WSK_CLI -i --apihost "$EDGEHOST" package update messagingWebDedicated \ 89 | --auth "$AUTH" \ 90 | --shared no \ 91 | -p endpoint "$APIHOST" 92 | 93 | # rebind package 94 | $WSK_CLI -i --apihost "$EDGEHOST" package delete messagingWeb --auth "$AUTH" 95 | if [ -n "$WORKERS" ]; 96 | then 97 | $WSK_CLI -i --apihost "$EDGEHOST" package bind messagingWebDedicated messagingWeb \ 98 | --auth "$AUTH" \ 99 | -p endpoint "$APIHOST" \ 100 | -p DB_URL "$DB_URL" \ 101 | -p DB_NAME "$DB_NAME" \ 102 | -p workers "$WORKERS" 103 | else 104 | $WSK_CLI -i --apihost "$EDGEHOST" package bind messagingWebDedicated messagingWeb \ 105 | --auth "$AUTH" \ 106 | -p endpoint "$APIHOST" \ 107 | -p DB_URL "$DB_URL" \ 108 | -p DB_NAME "$DB_NAME" 109 | fi 110 | 111 | # make kafkaFeedWeb.zip 112 | OLD_PATH=`pwd` 113 | cd action 114 | 115 | if [ -e kafkaFeedWeb.zip ] 116 | then 117 | rm -rf kafkaFeedWeb.zip 118 | fi 119 | 120 | cp -f kafkaFeedWeb_package.json package.json 121 | rm -rf node_modules 122 | npm install 123 | zip -r kafkaFeedWeb.zip lib package.json kafkaFeedWeb.js node_modules 124 | 125 | cd $OLD_PATH 126 | 127 | 128 | $WSK_CLI -i --apihost "$EDGEHOST" action update --kind nodejs:default messagingWebDedicated/kafkaFeedWeb "$PACKAGE_HOME/action/kafkaFeedWeb.zip" \ 129 | --auth "$AUTH" \ 130 | --web true \ 131 | -a description 'Write a new trigger to Kafka provider DB' \ 132 | -a parameters '[ {"name":"brokers", "required":true, "description": "Array of Kafka brokers"},{"name":"topic", "required":true, "description": "Topic to subscribe to"},{"name":"isJSONData", "required":false, "description": "Attempt to parse message value as JSON"},{"name":"isBinaryKey", "required":false, "description": "Encode key as Base64"},{"name":"isBinaryValue", "required":false, "description": "Encode message value as Base64"},{"name":"endpoint", "required":true, "description": "Hostname and port of OpenWhisk deployment"}]' 133 | 134 | $WSK_CLI -i --apihost "$EDGEHOST" action update messaging/kafkaProduce "$PACKAGE_HOME/action/kafkaProduce.py" \ 135 | --auth "$AUTH" \ 136 | --kind python:3 \ 137 | -a deprecated true \ 138 | -a description 'Deprecated - Produce a message to a Kafka cluster' \ 139 | -a parameters '[ {"name":"brokers", "required":true, "description": "Array of Kafka brokers"},{"name":"topic", "required":true, "description": "Topic that you want to produce a message to"},{"name":"value", "required":true, "description": "The value for the message you want to produce"},{"name":"key", "required":false, "description": "The key for the message you want to produce"},{"name":"base64DecodeValue", "required":false, "description": "If true, the message will be produced with a Base64 decoded version of the value parameter"},{"name":"base64DecodeKey", "required":false, "description": "If true, the message will be produced with a Base64 decoded version of the key parameter"}]' \ 140 | -a sampleInput '{"brokers":"[\"127.0.0.1:9093\"]", "topic":"mytopic", "value": "This is my message"}' 141 | -------------------------------------------------------------------------------- /provider/app.py: -------------------------------------------------------------------------------- 1 | """Flask application. 2 | 3 | /* 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with 6 | * this work for additional information regarding copyright ownership. 7 | * The ASF licenses this file to You under the Apache License, Version 2.0 8 | * (the "License"); you may not use this file except in compliance with 9 | * the License. You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, 15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | * See the License for the specific language governing permissions and 17 | * limitations under the License. 18 | */ 19 | """ 20 | 21 | import logging 22 | import os 23 | 24 | from flask import Flask, jsonify 25 | from consumercollection import ConsumerCollection 26 | from database import Database 27 | from thedoctor import TheDoctor 28 | from health import generateHealthReport 29 | from gevent.wsgi import WSGIServer 30 | from service import Service 31 | 32 | 33 | app = Flask(__name__) 34 | app.debug = False 35 | 36 | database = None 37 | consumers = ConsumerCollection() 38 | feedService = None 39 | 40 | 41 | @app.route('/') 42 | def testRoute(): 43 | return jsonify('Hi!') 44 | 45 | 46 | # TODO call TheDoctor.isAlive() and report on that 47 | @app.route('/health') 48 | def healthRoute(): 49 | return jsonify(generateHealthReport(consumers, feedService.lastCanaryTime)) 50 | 51 | 52 | def main(): 53 | logLevels = { 54 | "info": logging.INFO, 55 | "debug": logging.DEBUG, 56 | "error": logging.ERROR, 57 | "warning": logging.WARNING, 58 | "critical": logging.CRITICAL 59 | } 60 | logger = logging.getLogger() 61 | logger.setLevel(logLevels.get(os.getenv('LOG_LEVEL', "info"))) 62 | 63 | component = os.getenv('INSTANCE', 'messageHubTrigger-0') 64 | 65 | # Make sure we log to the console 66 | streamHandler = logging.StreamHandler() 67 | formatter = logging.Formatter('[%(asctime)s.%(msecs)03dZ] [%(levelname)s] [??] [kafkatriggers] %(message)s', datefmt="%Y-%m-%dT%H:%M:%S") 68 | streamHandler.setFormatter(formatter) 69 | logger.addHandler(streamHandler) 70 | 71 | # also log to file if /logs is present 72 | if os.path.isdir('/logs'): 73 | fh = logging.FileHandler('/logs/{}_logs.log'.format(component)) 74 | fh.setFormatter(formatter) 75 | logger.addHandler(fh) 76 | 77 | local_dev = os.getenv('LOCAL_DEV', 'False') 78 | logging.debug('LOCAL_DEV is {} {}'.format(local_dev, type(local_dev))) 79 | global check_ssl 80 | check_ssl = (local_dev == 'False') 81 | logging.info('check_ssl is {} {}'.format(check_ssl, type(check_ssl))) 82 | 83 | generic_kafka = os.getenv('GENERIC_KAFKA', 'True') 84 | logging.debug('GENERIC_KAFKA is {} {}'.format(generic_kafka, type(generic_kafka))) 85 | global enable_generic_kafka 86 | enable_generic_kafka = (generic_kafka == 'True') 87 | logging.info('enable_generic_kafka is {} {}'.format(enable_generic_kafka, type(enable_generic_kafka))) 88 | 89 | global database 90 | database = Database() 91 | database.migrate() 92 | 93 | TheDoctor(consumers).start() 94 | 95 | global feedService 96 | feedService = Service(consumers) 97 | feedService.start() 98 | 99 | port = int(os.getenv('PORT', 5000)) 100 | server = WSGIServer(('', port), app, log=logging.getLogger()) 101 | server.serve_forever() 102 | 103 | if __name__ == '__main__': 104 | main() 105 | -------------------------------------------------------------------------------- /provider/authHandler.py: -------------------------------------------------------------------------------- 1 | """IAMAuth class. 2 | 3 | /* 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with 6 | * this work for additional information regarding copyright ownership. 7 | * The ASF licenses this file to You under the Apache License, Version 2.0 8 | * (the "License"); you may not use this file except in compliance with 9 | * the License. You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, 15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | * See the License for the specific language governing permissions and 17 | * limitations under the License. 18 | */ 19 | """ 20 | 21 | import requests 22 | import time 23 | 24 | from requests.auth import AuthBase 25 | 26 | class AuthHandlerException(Exception): 27 | def __init__(self, response): 28 | self.response = response 29 | 30 | class IAMAuth(AuthBase): 31 | 32 | def __init__(self, authKey, endpoint): 33 | self.authKey = authKey 34 | self.endpoint = endpoint 35 | self.tokenInfo = {} 36 | 37 | def __call__(self, r): 38 | r.headers['Authorization'] = 'Bearer {}'.format(self.__getToken()) 39 | return r 40 | 41 | def __getToken(self): 42 | if 'expires_in' not in self.tokenInfo or self.__isRefreshTokenExpired(): 43 | response = self.__requestToken() 44 | if response.ok and 'access_token' in response.json(): 45 | self.tokenInfo = response.json() 46 | return self.tokenInfo['access_token'] 47 | else: 48 | raise AuthHandlerException(response) 49 | elif self.__isTokenExpired(): 50 | response = self.__refreshToken() 51 | if response.ok and 'access_token' in response.json(): 52 | self.tokenInfo = response.json() 53 | return self.tokenInfo['access_token'] 54 | else: 55 | raise AuthHandlerException(response) 56 | else: 57 | return self.tokenInfo['access_token'] 58 | 59 | def __requestToken(self): 60 | headers = { 61 | 'Content-type': 'application/x-www-form-urlencoded', 62 | 'Authorization': 'Basic Yng6Yng=' 63 | } 64 | payload = { 65 | 'grant_type': 'urn:ibm:params:oauth:grant-type:apikey', 66 | 'apikey': self.authKey 67 | } 68 | 69 | return self.__sendRequest(payload, headers) 70 | 71 | def __refreshToken(self): 72 | headers = { 73 | 'Content-type': 'application/x-www-form-urlencoded', 74 | 'Authorization': 'Basic Yng6Yng=' 75 | } 76 | payload = { 77 | 'grant_type': 'refresh_token', 78 | 'refresh_token': self.tokenInfo['refresh_token'] 79 | } 80 | 81 | return self.__sendRequest(payload, headers) 82 | 83 | 84 | def __isTokenExpired(self): 85 | if 'expires_in' not in self.tokenInfo or 'expiration' not in self.tokenInfo: 86 | return True 87 | 88 | fractionOfTtl = 0.8 89 | timeToLive = self.tokenInfo['expires_in'] 90 | expireTime = self.tokenInfo['expiration'] 91 | currentTime = int(time.time()) 92 | refreshTime = expireTime - (timeToLive * (1.0 - fractionOfTtl)) 93 | 94 | return refreshTime < currentTime 95 | 96 | def __isRefreshTokenExpired(self): 97 | if 'expiration' not in self.tokenInfo: 98 | return True 99 | 100 | sevenDays = 7 * 24 * 3600 101 | currentTime = int(time.time()) 102 | newTokenTime = self.tokenInfo['expiration'] + sevenDays 103 | 104 | return newTokenTime < currentTime 105 | 106 | def __sendRequest(self, payload, headers): 107 | response = requests.post(self.endpoint, data=payload, headers=headers) 108 | return response 109 | -------------------------------------------------------------------------------- /provider/consumercollection.py: -------------------------------------------------------------------------------- 1 | """ConsumerCollection class. 2 | 3 | /* 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with 6 | * this work for additional information regarding copyright ownership. 7 | * The ASF licenses this file to You under the Apache License, Version 2.0 8 | * (the "License"); you may not use this file except in compliance with 9 | * the License. You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, 15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | * See the License for the specific language governing permissions and 17 | * limitations under the License. 18 | */ 19 | """ 20 | 21 | from threading import Lock 22 | 23 | # basically a thread safe wrapper around dict 24 | # I'm told that dict is already thread safe, but 25 | # this is just for my own piece of mind. 26 | 27 | 28 | class ConsumerCollection: 29 | 30 | def __init__(self): 31 | self.consumers = dict() 32 | self.lock = Lock() 33 | 34 | def getCopyForRead(self): 35 | with self.lock: 36 | copy = self.consumers.copy() 37 | 38 | return copy 39 | 40 | def hasConsumerForTrigger(self, triggerFQN): 41 | with self.lock: 42 | hasConsumer = triggerFQN in self.consumers 43 | 44 | return hasConsumer 45 | 46 | def getConsumerForTrigger(self, triggerFQN): 47 | with self.lock: 48 | consumer = self.consumers.get(triggerFQN) 49 | 50 | return consumer 51 | 52 | def addConsumerForTrigger(self, triggerFQN, consumer): 53 | with self.lock: 54 | self.consumers[triggerFQN] = consumer 55 | 56 | def removeConsumerForTrigger(self, triggerFQN): 57 | with self.lock: 58 | del self.consumers[triggerFQN] 59 | -------------------------------------------------------------------------------- /provider/database.py: -------------------------------------------------------------------------------- 1 | """Database class. 2 | 3 | /* 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with 6 | * this work for additional information regarding copyright ownership. 7 | * The ASF licenses this file to You under the Apache License, Version 2.0 8 | * (the "License"); you may not use this file except in compliance with 9 | * the License. You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, 15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | * See the License for the specific language governing permissions and 17 | * limitations under the License. 18 | */ 19 | """ 20 | 21 | import logging 22 | import os 23 | import time 24 | 25 | from cloudant.client import CouchDB 26 | from cloudant.client import CouchDatabase 27 | from cloudant.result import Result 28 | from datetime import datetime 29 | 30 | 31 | class Database: 32 | db_prefix = os.getenv('DB_PREFIX', '') 33 | dbname = db_prefix + 'ow_kafka_triggers' 34 | 35 | username = os.environ['DB_USER'] 36 | password = os.environ['DB_PASS'] 37 | url = os.environ['DB_URL'] 38 | 39 | filters_design_doc_id = '_design/filters' 40 | only_triggers_view_id = 'only-triggers' 41 | by_worker_view_id = 'by-worker' 42 | 43 | instance = os.getenv('INSTANCE', 'messageHubTrigger-0') 44 | canaryId = "canary-{}".format(instance) 45 | 46 | def __init__(self, timeout=None): 47 | self.client = CouchDB(self.username, self.password, url=self.url, timeout=timeout, auto_renew=True) 48 | self.client.connect() 49 | 50 | self.database = CouchDatabase(self.client, self.dbname) 51 | 52 | if self.database.exists(): 53 | logging.info('Database exists - connecting to it.') 54 | else: 55 | logging.warn('Database does not exist - creating it.') 56 | self.database.create() 57 | 58 | def destroy(self): 59 | if self.client is not None: 60 | self.client.disconnect() 61 | self.client = None 62 | 63 | def disableTrigger(self, triggerFQN, status_code, message='Automatically disabled after receiving a {} status code when firing the trigger.'): 64 | try: 65 | document = self.database[triggerFQN] 66 | 67 | if document.exists(): 68 | logging.info('Found trigger to disable from DB: {}'.format(triggerFQN)) 69 | 70 | status = { 71 | 'active': False, 72 | 'dateChanged': long(time.time() * 1000), 73 | 'reason': { 74 | 'kind': 'AUTO', 75 | 'statusCode': status_code, 76 | 'message': message.format(status_code) 77 | } 78 | } 79 | 80 | document['status'] = status 81 | document.save() 82 | 83 | logging.info('{} Successfully recorded trigger as disabled.'.format(triggerFQN)) 84 | except Exception as e: 85 | logging.error('[{}] Uncaught exception while disabling trigger: {}'.format(triggerFQN, e)) 86 | 87 | def changesFeed(self, timeout, since=None): 88 | if since == None: 89 | return self.database.infinite_changes(include_docs=True, heartbeat=(timeout*1000)) 90 | else: 91 | return self.database.infinite_changes(include_docs=True, heartbeat=(timeout*1000), since=since) 92 | 93 | def createCanary(self): 94 | maxRetries = 3 95 | retryCount = 0 96 | 97 | while retryCount < maxRetries: 98 | try: 99 | if self.canaryId in self.database.keys(remote=True): 100 | # update the timestamp to cause a document change 101 | logging.debug("[database] Canary doc exists, updating it.") 102 | 103 | myCanaryDocument = self.database[self.canaryId] 104 | myCanaryDocument["canary-timestamp"] = datetime.now().isoformat() 105 | myCanaryDocument.save() 106 | 107 | return 108 | else: 109 | # create the canary doc for this instance 110 | logging.debug("[database] Canary doc does not exist, creating it.") 111 | 112 | document = dict() 113 | document['_id'] = self.canaryId 114 | document['canary-timestamp'] = datetime.now().isoformat() 115 | 116 | result = self.database.create_document(document) 117 | logging.debug('[canary] Successfully wrote canary to DB') 118 | 119 | return 120 | except Exception as e: 121 | retryCount += 1 122 | logging.error( 123 | '[canary] Uncaught exception while writing canary document: {}'.format(e)) 124 | 125 | logging.error('[canary] Retried and failed {} times to create a canary'.format(maxRetries)) 126 | 127 | def migrate(self): 128 | logging.info('Starting DB migration') 129 | 130 | by_worker_view = { 131 | 'map': """function(doc) { 132 | if(doc.triggerURL && (!doc.status || doc.status.active)) { 133 | emit(doc.worker || 'worker0', 1); 134 | } 135 | }""", 136 | 'reduce': '_count' 137 | } 138 | 139 | filtersDesignDoc = self.database.get_design_document(self.filters_design_doc_id) 140 | 141 | if filtersDesignDoc.exists(): 142 | if self.by_worker_view_id not in filtersDesignDoc["views"]: 143 | filtersDesignDoc["views"][self.by_worker_view_id] = by_worker_view 144 | logging.info('Updating the design doc') 145 | filtersDesignDoc.save() 146 | else: 147 | logging.info('Creating the design doc') 148 | 149 | self.database.create_document({ 150 | '_id': self.filters_design_doc_id, 151 | 'views': { 152 | self.only_triggers_view_id: { 153 | 'map': """function (doc) { 154 | if(doc.triggerURL) { 155 | emit(doc._id, 1); 156 | } 157 | }""" 158 | }, 159 | self.by_worker_view_id: by_worker_view 160 | } 161 | }) 162 | 163 | logging.info('Database migration complete') 164 | -------------------------------------------------------------------------------- /provider/datetimeutils.py: -------------------------------------------------------------------------------- 1 | """datetime utilities. 2 | 3 | /* 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with 6 | * this work for additional information regarding copyright ownership. 7 | * The ASF licenses this file to You under the Apache License, Version 2.0 8 | * (the "License"); you may not use this file except in compliance with 9 | * the License. You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, 15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | * See the License for the specific language governing permissions and 17 | * limitations under the License. 18 | */ 19 | """ 20 | 21 | from datetime import datetime 22 | 23 | 24 | def secondsSince(someDateTime): 25 | delta = datetime.now() - someDateTime 26 | return delta.total_seconds() 27 | -------------------------------------------------------------------------------- /provider/health.py: -------------------------------------------------------------------------------- 1 | """Health reporter. 2 | 3 | /* 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with 6 | * this work for additional information regarding copyright ownership. 7 | * The ASF licenses this file to You under the Apache License, Version 2.0 8 | * (the "License"); you may not use this file except in compliance with 9 | * the License. You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, 15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | * See the License for the specific language governing permissions and 17 | * limitations under the License. 18 | */ 19 | """ 20 | 21 | # https://pythonhosted.org/psutil/ 22 | import psutil 23 | 24 | from datetime import datetime 25 | from datetimeutils import secondsSince 26 | 27 | MILLISECONDS_IN_SECOND = 1000 28 | MEGABYTE = 10 ** 6 29 | START_TIME = datetime.now() 30 | CPU_INTERVAL = 0.5 31 | 32 | 33 | def getSwapMemory(): 34 | total, used, free, percent, sin, sout = psutil.swap_memory() 35 | swap_memory = {} 36 | swap_memory['total'] = '%d MB' % (total / MEGABYTE) 37 | swap_memory['used'] = '%d MB' % (used / MEGABYTE) 38 | swap_memory['free'] = '%d MB' % (free / MEGABYTE) 39 | swap_memory['percent'] = '%d MB' % (percent / MEGABYTE) 40 | swap_memory['sin'] = '%d MB' % (sin / MEGABYTE) 41 | swap_memory['sout'] = '%d MB' % (sout / MEGABYTE) 42 | 43 | return swap_memory 44 | 45 | 46 | def getVirtualMemory(): 47 | total, available, percent, used, free, active, inactive, buffers, cached, shared = psutil.virtual_memory() 48 | virtual_memory = {} 49 | virtual_memory['total'] = '%d MB' % (total / MEGABYTE) 50 | virtual_memory['available'] = '%d MB' % (available / MEGABYTE) 51 | virtual_memory['used'] = '%d MB' % (used / MEGABYTE) 52 | virtual_memory['free'] = '%d MB' % (free / MEGABYTE) 53 | virtual_memory['percent'] = '%d MB' % (percent / MEGABYTE) 54 | virtual_memory['active'] = '%d MB' % (active / MEGABYTE) 55 | virtual_memory['inactive'] = '%d MB' % (inactive / MEGABYTE) 56 | virtual_memory['buffers'] = '%d MB' % (buffers / MEGABYTE) 57 | virtual_memory['cached'] = '%d MB' % (cached / MEGABYTE) 58 | virtual_memory['shared'] = '%d MB' % (shared / MEGABYTE) 59 | 60 | return virtual_memory 61 | 62 | 63 | def getCPUTimes(): 64 | user, nice, system, idle, iowait, irq, softirq, steal, guest, guest_nice = psutil.cpu_times() 65 | cpu_times = {} 66 | cpu_times['user'] = '%d seconds' % user 67 | cpu_times['nice'] = '%d seconds' % nice 68 | cpu_times['system'] = '%d seconds' % system 69 | cpu_times['idle'] = '%d seconds' % idle 70 | cpu_times['iowait'] = '%d seconds' % iowait 71 | cpu_times['irq'] = '%d seconds' % irq 72 | cpu_times['softirq'] = '%d seconds' % softirq 73 | cpu_times['steal'] = '%d seconds' % steal 74 | cpu_times['guest'] = '%d seconds' % guest 75 | cpu_times['guest_nice'] = '%d seconds' % guest_nice 76 | 77 | return cpu_times 78 | 79 | 80 | def getCPUPercent(): 81 | return '%d%%' % psutil.cpu_percent(interval=CPU_INTERVAL) 82 | 83 | 84 | def getDiskUsage(): 85 | total, used, free, percent = psutil.disk_usage('/') 86 | disk_usage = {} 87 | disk_usage['total'] = '%d MB' % (total / MEGABYTE) 88 | disk_usage['used'] = '%d MB' % (used / MEGABYTE) 89 | disk_usage['free'] = '%d MB' % (free / MEGABYTE) 90 | disk_usage['percent'] = '%d MB' % (percent / MEGABYTE) 91 | 92 | return disk_usage 93 | 94 | 95 | def getDiskIOCounters(): 96 | read_count, write_count, read_bytes, write_bytes, read_time, write_time, read_merged_count, write_merged_count,\ 97 | busy_time = psutil.disk_io_counters() 98 | disk_io_counters = {} 99 | disk_io_counters['read_count'] = read_count 100 | disk_io_counters['write_count'] = write_count 101 | disk_io_counters['read_bytes'] = '%d MB' % (read_bytes / MEGABYTE) 102 | disk_io_counters['write_bytes'] = '%d MB' % (write_bytes / MEGABYTE) 103 | disk_io_counters['read_time'] = '%d seconds' % (read_time / MILLISECONDS_IN_SECOND) 104 | disk_io_counters['write_time'] = '%d seconds' % (write_time / MILLISECONDS_IN_SECOND) 105 | disk_io_counters['read_merged_count'] = read_merged_count 106 | disk_io_counters['write_merged_count'] = write_merged_count 107 | disk_io_counters['busy_time'] = '%d seconds' % (busy_time / MILLISECONDS_IN_SECOND) 108 | 109 | return disk_io_counters 110 | 111 | def getNetworkIOCounters(): 112 | bytes_sent, bytes_recv, packets_sent, packets_recv, errin, errout, dropin, dropout = psutil.net_io_counters() 113 | net_io_counters = {} 114 | net_io_counters['bytes_sent'] = '%d MB' % (bytes_sent / MEGABYTE) 115 | net_io_counters['bytes_recv'] = '%d MB' % (bytes_recv / MEGABYTE) 116 | net_io_counters['packets_sent'] = packets_sent 117 | net_io_counters['packets_recv'] = packets_recv 118 | net_io_counters['errin'] = errin 119 | net_io_counters['errout'] = errout 120 | net_io_counters['dropin'] = dropin 121 | net_io_counters['dropout'] = dropout 122 | 123 | return net_io_counters 124 | 125 | 126 | def getUpdateTime(): 127 | currentTime = datetime.now() 128 | delta = currentTime - START_TIME 129 | uptimeSeconds = int(round(delta.total_seconds())) 130 | 131 | return '%d seconds' % uptimeSeconds 132 | 133 | 134 | def getConsumers(consumers): 135 | consumerReports = [] 136 | 137 | consumerCopyRO = consumers.getCopyForRead() 138 | for consumerId in consumerCopyRO: 139 | consumer = consumerCopyRO[consumerId] 140 | consumerInfo = {} 141 | consumerInfo[consumer.params['uuid']] = { 142 | 'currentState': consumer.currentState(), 143 | 'desiredState': consumer.desiredState(), 144 | 'secondsSinceLastPoll': consumer.secondsSinceLastPoll(), 145 | 'restartCount': consumer.restartCount() 146 | } 147 | consumerReports.append(consumerInfo) 148 | 149 | return consumerReports 150 | 151 | 152 | def generateHealthReport(consumers, lastCanaryTime): 153 | healthReport = {} 154 | healthReport['last_db_canary'] = secondsSince(lastCanaryTime) 155 | healthReport['uptime'] = getUpdateTime() 156 | healthReport['cpu_times'] = getCPUTimes() 157 | healthReport['cpu_percent'] = getCPUPercent() 158 | healthReport['virtual_memory'] = getVirtualMemory() 159 | healthReport['swap_memory'] = getSwapMemory() 160 | healthReport['disk_usage'] = getDiskUsage() 161 | healthReport['disk_io_counters'] = getDiskIOCounters() 162 | healthReport['net_io_counters'] = getNetworkIOCounters() 163 | healthReport['consumers'] = getConsumers(consumers) 164 | 165 | return healthReport 166 | -------------------------------------------------------------------------------- /provider/service.py: -------------------------------------------------------------------------------- 1 | """Service class, CanaryDocumentGenerator class. 2 | 3 | /* 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with 6 | * this work for additional information regarding copyright ownership. 7 | * The ASF licenses this file to You under the Apache License, Version 2.0 8 | * (the "License"); you may not use this file except in compliance with 9 | * the License. You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, 15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | * See the License for the specific language governing permissions and 17 | * limitations under the License. 18 | */ 19 | """ 20 | 21 | import logging 22 | import os 23 | import time 24 | 25 | from consumer import Consumer 26 | from database import Database 27 | from datetime import datetime 28 | from datetimeutils import secondsSince 29 | from requests.exceptions import ConnectionError, ReadTimeout 30 | from threading import Thread 31 | 32 | # How often to produce canary documents 33 | canaryInterval = 60 # seconds 34 | 35 | # How long the changes feed should poll before timing out 36 | changesFeedTimeout = 30 # seconds 37 | 38 | 39 | class Service (Thread): 40 | def __init__(self, consumers): 41 | Thread.__init__(self) 42 | self.daemon = True 43 | 44 | self.database = None 45 | self.lastSequence = None 46 | self.canaryGenerator = CanaryDocumentGenerator() 47 | 48 | self.consumers = consumers 49 | self.workerId = os.getenv("WORKER", "worker0") 50 | 51 | def run(self): 52 | self.canaryGenerator.start() 53 | self.lastCanaryTime = datetime.now() 54 | 55 | while True: 56 | try: 57 | if self.database is not None: 58 | logging.info("Shutting down existing DB client") 59 | self.database.destroy() 60 | 61 | logging.info("Starting changes feed") 62 | self.database = Database(timeout=changesFeedTimeout) 63 | self.changes = self.database.changesFeed(timeout=changesFeedTimeout, since=self.lastSequence) 64 | 65 | for change in self.changes: 66 | # change could be None because the changes feed will timeout 67 | # if it hasn't detected any changes. This timeout allows us to 68 | # check whether or not the feed is capable of detecting canary 69 | # documents 70 | if change != None: 71 | self.__handleDocChange(change) 72 | 73 | # Record the sequence in case the changes feed needs to be 74 | # restarted. This way the new feed can pick up right where 75 | # the old one left off. 76 | self.lastSequence = change['seq'] 77 | except Exception as e: 78 | logging.error('[canary] Exception caught from changes feed. Restarting changes feed...') 79 | logging.error(e) 80 | self.stopChangesFeed() 81 | 82 | logging.debug("[changes] I made it out of the changes loop!") 83 | 84 | def __handleDocChange(self, change): 85 | retry = True 86 | retryCount = 0 87 | maxRetries = 5 88 | 89 | while retry: 90 | try: 91 | if "deleted" in change and change["deleted"] == True: 92 | logging.info('[changes] Found a delete') 93 | consumer = self.consumers.getConsumerForTrigger(change['id']) 94 | if consumer != None: 95 | if consumer.desiredState() == Consumer.State.Disabled: 96 | # just remove it from memory 97 | logging.info('[{}] Removing disabled trigger'.format(consumer.trigger)) 98 | self.consumers.removeConsumerForTrigger(consumer.trigger) 99 | else: 100 | logging.info('[{}] Shutting down running trigger'.format(consumer.trigger)) 101 | consumer.shutdown() 102 | # since we can't use a filter function for the feed (then 103 | # you don't get deletes) we need to manually verify this 104 | # is a valid trigger doc that has changed 105 | elif 'triggerURL' in change['doc']: 106 | logging.info('[changes] Found a change in a trigger document') 107 | document = change['doc'] 108 | triggerIsAssignedToMe = self.__isTriggerDocAssignedToMe(document) 109 | 110 | if not self.consumers.hasConsumerForTrigger(change["id"]): 111 | if triggerIsAssignedToMe: 112 | logging.info('[{}] Found a new trigger to create'.format(change["id"])) 113 | self.createAndRunConsumer(document) 114 | else: 115 | logging.info("[{}] Found a new trigger, but is assigned to another worker: {}".format(change["id"], document["worker"])) 116 | else: 117 | existingConsumer = self.consumers.getConsumerForTrigger(change["id"]) 118 | 119 | if existingConsumer.desiredState() == Consumer.State.Running and not self.__isTriggerDocActive(document): 120 | # running trigger should become disabled 121 | # this should be done regardless of which worker the document claims to be assigned to 122 | logging.info('[{}] Existing running trigger should become disabled'.format(change["id"])) 123 | existingConsumer.disable() 124 | elif triggerIsAssignedToMe: 125 | logging.info('[{}] Found a change to an existing trigger'.format(change["id"])) 126 | 127 | if existingConsumer.desiredState() == Consumer.State.Dead and self.__isTriggerDocActive(document): 128 | # if a delete occurs followed quickly by a create the consumer might get stuck in a dead state, 129 | # so we need to forcefully delete the process before recreating it. 130 | logging.info('[{}] A create event occurred for a trigger that is shutting down'.format(change["id"])) 131 | 132 | if existingConsumer.process.is_alive(): 133 | logging.info('[{}] Joining dead process.'.format(existingConsumer.trigger)) 134 | existingConsumer.process.join(1) 135 | else: 136 | logging.info('[{}] Process is already dead.'.format(existingConsumer.trigger)) 137 | 138 | self.consumers.removeConsumerForTrigger(existingConsumer.trigger) 139 | self.createAndRunConsumer(document) 140 | elif existingConsumer.desiredState() == Consumer.State.Disabled and self.__isTriggerDocActive(document): 141 | # disabled trigger has become active 142 | logging.info('[{}] Existing disabled trigger should become active'.format(change["id"])) 143 | self.createAndRunConsumer(document) 144 | else: 145 | # trigger has become reassigned to a different worker 146 | logging.info("[{}] Shutting down trigger as it has been re-assigned to {}".format(change["id"], document["worker"])) 147 | existingConsumer.shutdown() 148 | elif 'canary-timestamp' in change['doc']: 149 | # found a canary - update lastCanaryTime 150 | logging.info('[canary] I found a canary. The last one was {} seconds ago.'.format(secondsSince(self.lastCanaryTime))) 151 | self.lastCanaryTime = datetime.now() 152 | else: 153 | logging.debug('[changes] Found a change for a non-trigger document') 154 | 155 | retry = False 156 | except Exception as e: 157 | logging.error('[{}] Exception caught while handling change.'.format(change["id"])) 158 | logging.error(e) 159 | 160 | if retry: 161 | retryCount += 1 162 | 163 | if retryCount >= maxRetries: 164 | logging.warn('[{}] Maximum number of retries exceeded for failed change.'.format(change["id"])) 165 | retry = False 166 | 167 | def __isTriggerDocAssignedToMe(self, doc): 168 | if "worker" in doc: 169 | return doc["worker"] == self.workerId 170 | else: 171 | return self.workerId == "worker0" 172 | 173 | def stopChangesFeed(self): 174 | if self.changes != None: 175 | self.changes.stop() 176 | self.changes = None 177 | 178 | def createAndRunConsumer(self, doc): 179 | triggerFQN = doc['_id'] 180 | 181 | # Create a representation for this trigger, even if it is disabled 182 | # This allows it to appear in /health as well as allow it to be deleted 183 | # Creating this object is lightweight and does not initialize any connections 184 | consumer = Consumer(triggerFQN, doc) 185 | self.consumers.addConsumerForTrigger(triggerFQN, consumer) 186 | 187 | if self.__isTriggerDocActive(doc): 188 | logging.info('[{}] Trigger was determined to be active, starting...'.format(triggerFQN)) 189 | consumer.start() 190 | else: 191 | logging.info('[{}] Trigger was determined to be disabled, not starting...'.format(triggerFQN)) 192 | 193 | def __isTriggerDocActive(self, doc): 194 | return ('status' not in doc or doc['status']['active'] == True) 195 | 196 | 197 | class CanaryDocumentGenerator (Thread): 198 | def __init__(self): 199 | Thread.__init__(self) 200 | self.daemon = True 201 | self.database = Database() 202 | 203 | def run(self): 204 | while True: 205 | # create a new canary document every so often 206 | self.database.createCanary() 207 | time.sleep(canaryInterval) 208 | 209 | logging.error('[canary generator] Exited the main loop!') 210 | -------------------------------------------------------------------------------- /provider/thedoctor.py: -------------------------------------------------------------------------------- 1 | """TheDoctor class. 2 | 3 | /* 4 | * Licensed to the Apache Software Foundation (ASF) under one or more 5 | * contributor license agreements. See the NOTICE file distributed with 6 | * this work for additional information regarding copyright ownership. 7 | * The ASF licenses this file to You under the Apache License, Version 2.0 8 | * (the "License"); you may not use this file except in compliance with 9 | * the License. You may obtain a copy of the License at 10 | * 11 | * http://www.apache.org/licenses/LICENSE-2.0 12 | * 13 | * Unless required by applicable law or agreed to in writing, software 14 | * distributed under the License is distributed on an "AS IS" BASIS, 15 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | * See the License for the specific language governing permissions and 17 | * limitations under the License. 18 | */ 19 | """ 20 | 21 | import logging 22 | import time 23 | 24 | from consumer import Consumer 25 | from threading import Thread 26 | 27 | 28 | class TheDoctor (Thread): 29 | # maximum time to allow a consumer to not successfully poll() before restarting 30 | # this value must be greater than the total amount of time a consumer might retry firing a trigger 31 | poll_timeout_seconds = 200 32 | 33 | # interval between the Doctor making rounds 34 | sleepy_time_seconds = 2 35 | 36 | def __init__(self, consumerCollection): 37 | Thread.__init__(self) 38 | 39 | self.daemon = True 40 | self.consumerCollection = consumerCollection 41 | 42 | def run(self): 43 | logging.info('[Doctor] The Doctor is in!') 44 | 45 | while True: 46 | try: 47 | consumers = self.consumerCollection.getCopyForRead() 48 | 49 | for consumerId in consumers: 50 | consumer = consumers[consumerId] 51 | logging.debug('[Doctor] [{}] Consumer is in state: {}'.format(consumerId, consumer.currentState())) 52 | 53 | if consumer.currentState() == Consumer.State.Dead and consumer.desiredState() == Consumer.State.Running: 54 | # well this is unexpected... 55 | logging.error('[Doctor][{}] Consumer is dead, but should be alive!'.format(consumerId)) 56 | consumer.restart() 57 | elif consumer.currentState() == Consumer.State.Dead and consumer.desiredState() == Consumer.State.Dead: 58 | # Bring out yer dead... 59 | if consumer.process.is_alive(): 60 | logging.info('[{}] Joining dead process.'.format(consumer.trigger)) 61 | # if you don't first join the process, it'll be left hanging around as a "defunct" process 62 | consumer.process.join(1) 63 | else: 64 | logging.info('[{}] Process is already dead.'.format(consumer.trigger)) 65 | 66 | logging.info('[{}] Removing dead consumer from the collection.'.format(consumer.trigger)) 67 | self.consumerCollection.removeConsumerForTrigger(consumer.trigger) 68 | elif consumer.secondsSinceLastPoll() > self.poll_timeout_seconds and consumer.desiredState() == Consumer.State.Running: 69 | # there seems to be an issue with the kafka-python client where it gets into an 70 | # error-handling loop. This causes poll() to never complete, but also does not 71 | # throw an exception. 72 | logging.error('[Doctor][{}] Consumer timed-out, but should be alive! Restarting consumer.'.format(consumerId)) 73 | consumer.restart() 74 | 75 | time.sleep(self.sleepy_time_seconds) 76 | except Exception as e: 77 | logging.error("[Doctor] Uncaught exception: {}".format(e)) 78 | -------------------------------------------------------------------------------- /settings.gradle: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | include 'tests' 19 | 20 | rootProject.name = 'openwhisk-package-kafka' 21 | 22 | gradle.ext.openwhisk = [ 23 | version: '1.0.1-SNAPSHOT' 24 | ] 25 | 26 | gradle.ext.scala = [ 27 | version: '2.12.7', 28 | compileFlags: ['-feature', '-unchecked', '-deprecation', '-Xfatal-warnings', '-Ywarn-unused-import'] 29 | ] 30 | -------------------------------------------------------------------------------- /tests/build.gradle: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | apply plugin: 'scala' 19 | apply plugin: 'eclipse' 20 | compileTestScala.options.encoding = 'UTF-8' 21 | 22 | repositories { 23 | mavenCentral() 24 | flatDir { 25 | dirs 'libs' 26 | } 27 | mavenLocal() 28 | } 29 | 30 | def commonConfiguration = { 31 | systemProperty 'testthreads', System.getProperty('testthreads', '1') 32 | systemProperty 'health_url', System.getProperty('health_url', '') 33 | systemProperty 'host', System.getProperty('host', '') 34 | systemProperty 'port', System.getProperty('port', '') 35 | systemProperty 'trigger.suffix', System.getProperty('trigger.suffix', '') 36 | systemProperty 'max.retries', System.getProperty('max.retries', '60') 37 | testLogging { 38 | events "passed", "skipped", "failed" 39 | showStandardStreams = true 40 | exceptionFormat = 'full' 41 | } 42 | outputs.upToDateWhen { false } // force tests to run everytime 43 | } 44 | 45 | test { 46 | configure commonConfiguration 47 | exclude 'system/stress/**' 48 | } 49 | 50 | task testWithoutCredentials(type: Test) { 51 | configure commonConfiguration 52 | } 53 | 54 | task stress(type: Test) { 55 | configure commonConfiguration 56 | include 'system/stress/**' 57 | } 58 | 59 | task testHealth(type: Test) { 60 | configure commonConfiguration 61 | systemProperty 'test.router', 'true' 62 | include 'system/health/**' 63 | } 64 | 65 | task testWithProducer(type: Test) { 66 | configure commonConfiguration 67 | exclude 'system/packages/MessageHubProduceTests.class' 68 | exclude 'system/packages/MessagingServiceTests.class' 69 | exclude 'system/stress/**' 70 | } 71 | 72 | task testWithProducerAndProducerAction(type: Test) { 73 | configure commonConfiguration 74 | exclude 'system/stress/**' 75 | exclude 'system/packages/MessagingServiceTests.class' 76 | } 77 | 78 | task testMessageHub(type: Test) { 79 | configure commonConfiguration 80 | exclude 'system/stress/**' 81 | exclude 'system/health/**' 82 | exclude 'system/packages/MessagingServiceTests.class' 83 | exclude 'system/packages/KafkaFeedTests.class' 84 | exclude 'system/packages/KafkaFeedWebTests.class' 85 | exclude 'system/packages/KafkaProduceTests.class' 86 | } 87 | 88 | dependencies { 89 | compile "org.scala-lang:scala-library:${gradle.scala.version}" 90 | compile 'com.ibm.messagehub:messagehub.login:1.0.0' 91 | compile "org.apache.openwhisk:openwhisk-tests:${gradle.openwhisk.version}:tests" 92 | compile "org.apache.openwhisk:openwhisk-tests:${gradle.openwhisk.version}:test-sources" 93 | } 94 | 95 | tasks.withType(ScalaCompile) { 96 | scalaCompileOptions.additionalParameters = gradle.scala.compileFlags 97 | } 98 | -------------------------------------------------------------------------------- /tests/dat/createTriggerActions.js: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | var openwhisk = require('openwhisk'); 19 | 20 | function main(params) { 21 | console.log(JSON.stringify(params)); 22 | var name = params.messages[0].value; 23 | var ow = openwhisk({ignore_certs: true}); 24 | return ow.triggers.create({name: name}); 25 | } 26 | -------------------------------------------------------------------------------- /tests/dat/createTriggerActionsFromEncodedMessage.js: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | var openwhisk = require('openwhisk'); 19 | 20 | function main(params) { 21 | console.log(JSON.stringify(params)); 22 | var name = new Buffer(params.messages[0].value, 'base64').toString('ascii'); 23 | var ow = openwhisk({ignore_certs: true}); 24 | return ow.triggers.create({name: name}); 25 | } 26 | -------------------------------------------------------------------------------- /tests/dat/createTriggerActionsFromKey.js: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | var openwhisk = require('openwhisk'); 19 | 20 | function main(params) { 21 | console.log(JSON.stringify(params)); 22 | var name = params.messages[0].key; 23 | var ow = openwhisk({ignore_certs: true}); 24 | return ow.triggers.create({name: name}); 25 | } 26 | -------------------------------------------------------------------------------- /tests/dat/missingAdminURL.json: -------------------------------------------------------------------------------- 1 | { 2 | "kafka_brokers_sasl": [ 3 | "someBroker" 4 | ], 5 | "topic": "someTopic", 6 | "user": "someUsername", 7 | "password": "somePassword", 8 | "api_key": "someKey", 9 | "package_endpoint": "someEndpoint", 10 | "triggerName": "/someNamespace/someTrigger", 11 | "lifecycleEvent": "CREATE" 12 | } 13 | -------------------------------------------------------------------------------- /tests/dat/missingBrokers.json: -------------------------------------------------------------------------------- 1 | { 2 | "topic": "someTopic", 3 | "user": "someUsername", 4 | "password": "somePassword", 5 | "kafka_admin_url": "someURL", 6 | "api_key": "someKey", 7 | "package_endpoint": "someEndpoint", 8 | "triggerName": "/someNamespace/someTrigger", 9 | "lifecycleEvent": "CREATE" 10 | } 11 | -------------------------------------------------------------------------------- /tests/dat/missingPackageEndpoint.json: -------------------------------------------------------------------------------- 1 | { 2 | "kafka_brokers_sasl": [ 3 | "someBroker" 4 | ], 5 | "topic": "someTopic", 6 | "user": "someUser", 7 | "password": "somePassword", 8 | "kafka_admin_url": "someURL", 9 | "api_key": "someKey", 10 | "triggerName": "/someNamespace/someTrigger", 11 | "lifecycleEvent": "CREATE" 12 | } 13 | -------------------------------------------------------------------------------- /tests/dat/missingPassword.json: -------------------------------------------------------------------------------- 1 | { 2 | "kafka_brokers_sasl": [ 3 | "someBroker" 4 | ], 5 | "topic": "someTopic", 6 | "user": "someUsername", 7 | "kafka_admin_url": "someURL", 8 | "api_key": "someKey", 9 | "package_endpoint": "someEndpoint", 10 | "triggerName": "/someNamespace/someTrigger", 11 | "lifecycleEvent": "CREATE" 12 | } 13 | -------------------------------------------------------------------------------- /tests/dat/missingTopic.json: -------------------------------------------------------------------------------- 1 | { 2 | "kafka_brokers_sasl": [ 3 | "someBroker" 4 | ], 5 | "user": "someUsername", 6 | "password": "somePassword", 7 | "kafka_admin_url": "someURL", 8 | "api_key": "someKey", 9 | "package_endpoint": "someEndpoint", 10 | "triggerName": "/someNamespace/someTrigger", 11 | "lifecycleEvent": "CREATE" 12 | } 13 | -------------------------------------------------------------------------------- /tests/dat/missingUser.json: -------------------------------------------------------------------------------- 1 | { 2 | "kafka_brokers_sasl": [ 3 | "someBroker" 4 | ], 5 | "topic": "someTopic", 6 | "password": "somePassword", 7 | "kafka_admin_url": "someURL", 8 | "api_key": "someKey", 9 | "package_endpoint": "someEndpoint", 10 | "triggerName": "/someNamespace/someTrigger", 11 | "lifecycleEvent": "CREATE" 12 | } 13 | -------------------------------------------------------------------------------- /tests/dat/multipleValueTypes.json: -------------------------------------------------------------------------------- 1 | { 2 | "kafka_brokers_sasl": [ 3 | "someBroker" 4 | ], 5 | "brokers": [ 6 | "someBroker" 7 | ], 8 | "topic": "someTopic", 9 | "user": "someUser", 10 | "password": "somePassword", 11 | "kafka_admin_url": "someURL", 12 | "api_key": "someKey", 13 | "package_endpoint": "someEndpoint", 14 | "triggerName": "/_/someTrigger", 15 | "lifecycleEvent": "CREATE", 16 | "isJSONData": true, 17 | "isBinaryValue": true 18 | } 19 | -------------------------------------------------------------------------------- /tests/src/test/scala/system/health/BasicHealthTest.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package system.health 19 | 20 | import common.TestUtils.NOT_FOUND 21 | import common._ 22 | import org.junit.runner.RunWith 23 | import org.scalatest.junit.JUnitRunner 24 | import org.scalatest.{BeforeAndAfterAll, FlatSpec, Inside, Matchers} 25 | import spray.json.DefaultJsonProtocol._ 26 | import spray.json._ 27 | import system.utils.KafkaUtils 28 | import org.apache.openwhisk.utils.retry 29 | import org.apache.openwhisk.core.entity.Annotations 30 | 31 | import scala.concurrent.duration.DurationInt 32 | import scala.language.postfixOps 33 | 34 | @RunWith(classOf[JUnitRunner]) 35 | class BasicHealthTest 36 | extends FlatSpec 37 | with Matchers 38 | with WskActorSystem 39 | with BeforeAndAfterAll 40 | with TestHelpers 41 | with WskTestHelpers 42 | with Inside 43 | with JsHelpers 44 | with KafkaUtils { 45 | 46 | val topic = "test" 47 | val sessionTimeout = 10 seconds 48 | 49 | implicit val wskprops = WskProps() 50 | val wsk = new Wsk() 51 | 52 | val messagingPackage = "/whisk.system/messaging" 53 | val messageHubFeed = "messageHubFeed" 54 | val messageHubProduce = "messageHubProduce" 55 | val actionName = s"$messagingPackage/$messageHubFeed" 56 | val maxRetries = System.getProperty("max.retries", "60").toInt 57 | 58 | behavior of "Message Hub feed" 59 | 60 | it should "create a consumer and fire a trigger when a message is posted to messagehub" in withAssetCleaner(wskprops) { 61 | val currentTime = s"${System.currentTimeMillis}" 62 | 63 | (wp, assetHelper) => 64 | val triggerName = s"/_/dummyMessageHubTrigger-$currentTime" 65 | 66 | createTrigger(assetHelper, triggerName, parameters = Map( 67 | "user" -> getAsJson("user"), 68 | "password" -> getAsJson("password"), 69 | "api_key" -> getAsJson("api_key"), 70 | "kafka_admin_url" -> getAsJson("kafka_admin_url"), 71 | "kafka_brokers_sasl" -> getAsJson("brokers"), 72 | "topic" -> topic.toJson 73 | )) 74 | 75 | // This action creates a trigger if it gets executed. 76 | // The name of the trigger will be the message, that has been send to kafka. 77 | // We only create this trigger to verify, that the action has been executed after sending the message to kafka. 78 | val defaultAction = Some("dat/createTriggerActions.js") 79 | val defaultActionName = s"helloKafka-$currentTime" 80 | 81 | assetHelper.withCleaner(wsk.action, defaultActionName) { (action, name) => 82 | action.create(name, defaultAction, annotations = Map(Annotations.ProvideApiKeyAnnotationName -> JsBoolean(true))) 83 | } 84 | 85 | assetHelper.withCleaner(wsk.rule, s"dummyMessageHub-helloKafka-$currentTime") { (rule, name) => 86 | rule.create(name, trigger = triggerName, action = defaultActionName) 87 | } 88 | 89 | // key to use for the produced message 90 | val key = "TheKey" 91 | 92 | val verificationName = s"trigger-$currentTime" 93 | 94 | // Check that the verification trigger does not exist before the action ran. 95 | // This will also clean up the trigger after the test. 96 | assetHelper.withCleaner(wsk.trigger, verificationName) { (trigger, name) => 97 | trigger.get(name, NOT_FOUND) 98 | } 99 | 100 | produceMessage(topic, key, verificationName) 101 | 102 | // Check if the trigger, that should have been created as reaction on the kafka-message, has been created. 103 | // The trigger should have been created by the action, that has been triggered by the kafka message. 104 | // If we cannot find it, the most probably the action did not run. 105 | retry(wsk.trigger.get(verificationName), 60, Some(1.second)) 106 | } 107 | 108 | it should "return correct status and configuration" in withAssetCleaner(wskprops) { 109 | val currentTime = s"${System.currentTimeMillis}" 110 | 111 | (wp, assetHelper) => 112 | val triggerName = s"/_/dummyMessageHubTrigger-$currentTime" 113 | println(s"Creating trigger $triggerName") 114 | 115 | val username = getAsJson("user") 116 | val password = getAsJson("password") 117 | val admin_url = getAsJson("kafka_admin_url") 118 | val brokers = getAsJson("brokers") 119 | 120 | val feedCreationResult = assetHelper.withCleaner(wsk.trigger, triggerName) { 121 | (trigger, _) => 122 | trigger.create(triggerName, feed = Some(actionName), parameters = Map( 123 | "user" -> username, 124 | "password" -> password, 125 | "api_key" -> getAsJson("api_key"), 126 | "kafka_admin_url" -> admin_url, 127 | "kafka_brokers_sasl" -> brokers, 128 | "topic" -> topic.toJson, 129 | "isBinaryKey" -> false.toJson, 130 | "isBinaryValue" -> false.toJson 131 | )) 132 | } 133 | 134 | withActivation(wsk.activation, feedCreationResult, initialWait = 5 seconds, totalWait = 60 seconds) { 135 | activation => 136 | // should be successful 137 | activation.response.success shouldBe true 138 | } 139 | 140 | val run = wsk.action.invoke(actionName, parameters = Map( 141 | "triggerName" -> triggerName.toJson, 142 | "lifecycleEvent" -> "READ".toJson, 143 | "authKey" -> wp.authKey.toJson 144 | )) 145 | 146 | withActivation(wsk.activation, run) { 147 | activation => 148 | activation.response.success shouldBe true 149 | 150 | inside (activation.response.result) { 151 | case Some(result) => 152 | val config = result.getFields("config").head.asInstanceOf[JsObject].fields 153 | val status = result.getFields("status").head.asInstanceOf[JsObject].fields 154 | 155 | config should contain("kafka_brokers_sasl" -> brokers) 156 | config should contain("isBinaryKey" -> false.toJson) 157 | config should contain("isBinaryValue" -> false.toJson) 158 | config should contain("isJSONData" -> false.toJson) 159 | config should contain("kafka_admin_url" -> admin_url) 160 | config should contain("password" -> password) 161 | config should contain("topic" -> topic.toJson) 162 | config should contain("user" -> username) 163 | config("triggerName").convertTo[String].split("/").last should equal (triggerName.split("/").last) 164 | config should not { 165 | contain key "authKey" 166 | contain key "triggerURL" 167 | contain key "uuid" 168 | contain key "worker" 169 | } 170 | status should contain("active" -> true.toJson) 171 | status should contain key "dateChanged" 172 | status should not(contain key "reason") 173 | } 174 | } 175 | } 176 | } 177 | -------------------------------------------------------------------------------- /tests/src/test/scala/system/packages/KafkaFeedTests.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | package system.packages 18 | 19 | import org.junit.runner.RunWith 20 | import org.scalatest.BeforeAndAfterAll 21 | import org.scalatest.FlatSpec 22 | import org.scalatest.Matchers 23 | import org.scalatest.junit.JUnitRunner 24 | import spray.json._ 25 | 26 | import common.JsHelpers 27 | import common.TestHelpers 28 | import common.Wsk 29 | import common.WskActorSystem 30 | import common.WskProps 31 | import common.WskTestHelpers 32 | import ActionHelper._ 33 | 34 | @RunWith(classOf[JUnitRunner]) 35 | class KafkaFeedTests 36 | extends FlatSpec 37 | with Matchers 38 | with WskActorSystem 39 | with BeforeAndAfterAll 40 | with TestHelpers 41 | with WskTestHelpers 42 | with JsHelpers { 43 | 44 | implicit val wskprops = WskProps() 45 | val wsk = new Wsk() 46 | 47 | val messagingPackage = "/whisk.system/messaging" 48 | val kafkaFeed = "kafkaFeed" 49 | val actionName = s"${messagingPackage}/${kafkaFeed}" 50 | 51 | behavior of "Kafka feed action" 52 | 53 | it should "reject invocation when topic argument is missing" in { 54 | val expectedOutput = JsObject( 55 | "error" -> JsString("You must supply a 'topic' parameter.") 56 | ) 57 | 58 | runActionWithExpectedResult(actionName, "dat/missingTopic.json", expectedOutput, false) 59 | } 60 | 61 | it should "reject invocation when brokers argument is missing" in { 62 | val expectedOutput = JsObject( 63 | "error" -> JsString("You must supply a 'brokers' parameter.") 64 | ) 65 | 66 | runActionWithExpectedResult(actionName, "dat/missingBrokers.json", expectedOutput, false) 67 | } 68 | 69 | it should "reject invocation when isJSONData and isBinaryValue are both enable" in { 70 | val expectedOutput = JsObject( 71 | "error" -> JsString("isJSONData and isBinaryValue cannot both be enabled.") 72 | ) 73 | 74 | runActionWithExpectedResult(actionName, "dat/multipleValueTypes.json", expectedOutput, false) 75 | } 76 | } 77 | -------------------------------------------------------------------------------- /tests/src/test/scala/system/packages/KafkaFeedWebTests.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | package system.packages 18 | 19 | import org.junit.runner.RunWith 20 | 21 | import org.scalatest.BeforeAndAfter 22 | import org.scalatest.FlatSpec 23 | import org.scalatest.Matchers 24 | import org.scalatest.junit.JUnitRunner 25 | 26 | import io.restassured.RestAssured 27 | import io.restassured.config.SSLConfig 28 | 29 | import common.Wsk 30 | import common.WskProps 31 | import common.TestUtils.FORBIDDEN 32 | 33 | import spray.json._ 34 | 35 | @RunWith(classOf[JUnitRunner]) 36 | class KafkaFeedWebTests 37 | extends FlatSpec 38 | with BeforeAndAfter 39 | with Matchers { 40 | 41 | val wskprops = WskProps() 42 | 43 | val webAction = "/whisk.system/messagingWeb/kafkaFeedWeb" 44 | val webActionURL = s"https://${wskprops.apihost}/api/v1/web${webAction}.http" 45 | 46 | val completeParams = JsObject( 47 | "triggerName" -> JsString("/invalidNamespace/invalidTrigger"), 48 | "topic" -> JsString("someTopic"), 49 | "brokers" -> JsArray(JsString("someBroker")), 50 | "user" -> JsString("someUsername"), 51 | "password" -> JsString("somePassword"), 52 | "kafka_admin_url" -> JsString("https://kafka-admin-prod01.messagehub.services.us-south.bluemix.net:443"), 53 | "authKey" -> JsString("DoesNotWork") 54 | ) 55 | 56 | def makePostCallWithExpectedResult(params: JsObject, expectedResult: String, expectedCode: Int) = { 57 | val response = RestAssured.given() 58 | .contentType("application/json\r\n") 59 | .config(RestAssured.config().sslConfig(new SSLConfig().relaxedHTTPSValidation())) 60 | .body(params.toString()) 61 | .post(webActionURL) 62 | assert(response.statusCode() == expectedCode) 63 | response.body.asString shouldBe expectedResult 64 | } 65 | 66 | def makeDeleteCallWithExpectedResult(expectedResult: String, expectedCode: Int) = { 67 | val response = RestAssured.given().contentType("application/json\r\n").config(RestAssured.config().sslConfig(new SSLConfig().relaxedHTTPSValidation())).delete(webActionURL) 68 | assert(response.statusCode() == expectedCode) 69 | response.body.asString shouldBe expectedResult 70 | } 71 | 72 | behavior of "Kafka feed web action" 73 | 74 | it should "not be obtainable using the CLI" in { 75 | val wsk = new Wsk() 76 | implicit val wp = wskprops 77 | 78 | wsk.action.get(webAction, FORBIDDEN) 79 | } 80 | 81 | it should "reject post of a trigger due to missing brokers argument" in { 82 | val params = JsObject(completeParams.fields - "brokers") 83 | 84 | makePostCallWithExpectedResult(params, "You must supply a 'brokers' parameter.", 400) 85 | } 86 | 87 | it should "reject post of a trigger due to missing topic argument" in { 88 | val params = JsObject(completeParams.fields - "topic") 89 | 90 | makePostCallWithExpectedResult(params, "You must supply a 'topic' parameter.", 400) 91 | } 92 | 93 | it should "reject post of a trigger due to missing triggerName argument" in { 94 | val params = JsObject(completeParams.fields - "triggerName") 95 | 96 | makePostCallWithExpectedResult(params, "You must supply a 'triggerName' parameter.", 400) 97 | } 98 | 99 | it should "reject post of a trigger when authentication fails" in { 100 | makePostCallWithExpectedResult(completeParams, "You are not authorized for this trigger.", 401) 101 | } 102 | 103 | // it should "reject delete of a trigger that does not exist" in { 104 | // val expectedJSON = JsObject( 105 | // "triggerName" -> JsString("/invalidNamespace/invalidTrigger"), 106 | // "error" -> JsString("not found") 107 | // ) 108 | // 109 | // makeDeleteCallWithExpectedResult(expectedJSON, 404) 110 | // } 111 | } 112 | -------------------------------------------------------------------------------- /tests/src/test/scala/system/packages/KafkaProduceTests.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package system.packages 19 | 20 | import system.utils.KafkaUtils 21 | 22 | import scala.concurrent.duration.DurationInt 23 | import scala.language.postfixOps 24 | 25 | import org.junit.runner.RunWith 26 | import org.scalatest.BeforeAndAfterAll 27 | import org.scalatest.FlatSpec 28 | import org.scalatest.Matchers 29 | import org.scalatest.junit.JUnitRunner 30 | 31 | import common.JsHelpers 32 | import common.TestHelpers 33 | import common.Wsk 34 | import common.WskActorSystem 35 | import common.WskProps 36 | import common.WskTestHelpers 37 | import spray.json.DefaultJsonProtocol._ 38 | import spray.json._ 39 | 40 | @RunWith(classOf[JUnitRunner]) 41 | class KafkaProduceTests 42 | extends FlatSpec 43 | with Matchers 44 | with WskActorSystem 45 | with BeforeAndAfterAll 46 | with TestHelpers 47 | with WskTestHelpers 48 | with JsHelpers 49 | with KafkaUtils { 50 | 51 | val topic = "test" 52 | val sessionTimeout = 10 seconds 53 | 54 | implicit val wskprops = WskProps() 55 | val wsk = new Wsk() 56 | 57 | val actionName = "kafkaProduceAction" 58 | val actionFile = "../action/kafkaProduce.py" 59 | 60 | behavior of "Kafka Produce action" 61 | 62 | override def beforeAll() { 63 | wsk.action.create(actionName, Some(actionFile)) 64 | super.beforeAll() 65 | } 66 | 67 | override def afterAll() { 68 | wsk.action.delete(actionName) 69 | super.afterAll() 70 | } 71 | 72 | def testMissingParameter(missingParam : String) = { 73 | var fullParamsMap = Map( 74 | "topic" -> topic.toJson, 75 | "brokers" -> getAsJson("brokers"), 76 | "value" -> "This will fail".toJson) 77 | var missingParamsMap = fullParamsMap.filterKeys(_ != missingParam) 78 | 79 | withActivation(wsk.activation, wsk.action.invoke(actionName, missingParamsMap)) { 80 | activation => 81 | activation.response.success shouldBe false 82 | activation.response.result.get.toString should include(missingParam) 83 | } 84 | } 85 | 86 | it should "Require brokers parameter" in { 87 | testMissingParameter("brokers") 88 | } 89 | 90 | it should "Require topic parameter" in { 91 | testMissingParameter("topic") 92 | } 93 | 94 | it should "Require value parameter" in { 95 | testMissingParameter("value") 96 | } 97 | } 98 | -------------------------------------------------------------------------------- /tests/src/test/scala/system/packages/MessageHubFeedWebTests.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | package system.packages 18 | 19 | import org.junit.runner.RunWith 20 | 21 | import org.scalatest.BeforeAndAfter 22 | import org.scalatest.FlatSpec 23 | import org.scalatest.Matchers 24 | import org.scalatest.junit.JUnitRunner 25 | 26 | import io.restassured.RestAssured 27 | import io.restassured.config.SSLConfig 28 | 29 | import common.Wsk 30 | import common.WskProps 31 | import common.TestUtils.FORBIDDEN 32 | 33 | import spray.json._ 34 | 35 | @RunWith(classOf[JUnitRunner]) 36 | class MessageHubFeedWebTests 37 | extends FlatSpec 38 | with BeforeAndAfter 39 | with Matchers { 40 | 41 | val wskprops = WskProps() 42 | 43 | val webAction = "/whisk.system/messagingWeb/messageHubFeedWeb" 44 | val webActionURL = s"https://${wskprops.apihost}/api/v1/web${webAction}.http" 45 | 46 | val completeParams = JsObject( 47 | "triggerName" -> JsString("/invalidNamespace/invalidTrigger"), 48 | "topic" -> JsString("someTopic"), 49 | "kafka_brokers_sasl" -> JsArray(JsString("someBroker")), 50 | "user" -> JsString("someUsername"), 51 | "password" -> JsString("somePassword"), 52 | "kafka_admin_url" -> JsString("https://kafka-admin-prod01.messagehub.services.us-south.bluemix.net:443"), 53 | "authKey" -> JsString("DoesNotWork") 54 | ) 55 | 56 | def makePostCallWithExpectedResult(params: JsObject, expectedResult: String, expectedCode: Int) = { 57 | val response = RestAssured.given() 58 | .contentType("application/json\r\n") 59 | .config(RestAssured.config().sslConfig(new SSLConfig().relaxedHTTPSValidation())) 60 | .body(params.toString()) 61 | .post(webActionURL) 62 | assert(response.statusCode() == expectedCode) 63 | response.body.asString shouldBe expectedResult 64 | } 65 | 66 | def makeDeleteCallWithExpectedResult(expectedResult: String, expectedCode: Int) = { 67 | val response = RestAssured.given().contentType("application/json\r\n").config(RestAssured.config().sslConfig(new SSLConfig().relaxedHTTPSValidation())).delete(webActionURL) 68 | assert(response.statusCode() == expectedCode) 69 | response.body.asString shouldBe expectedResult 70 | } 71 | 72 | behavior of "Message Hub feed web action" 73 | 74 | it should "not be obtainable using the CLI" in { 75 | val wsk = new Wsk() 76 | implicit val wp = wskprops 77 | 78 | wsk.action.get(webAction, FORBIDDEN) 79 | } 80 | 81 | it should "reject post of a trigger due to missing kafka_brokers_sasl argument" in { 82 | val params = JsObject(completeParams.fields - "kafka_brokers_sasl") 83 | 84 | makePostCallWithExpectedResult(params, "You must supply a 'kafka_brokers_sasl' parameter.", 400) 85 | } 86 | 87 | it should "reject post of a trigger due to missing topic argument" in { 88 | val params = JsObject(completeParams.fields - "topic") 89 | 90 | makePostCallWithExpectedResult(params, "You must supply a 'topic' parameter.", 400) 91 | } 92 | 93 | it should "reject post of a trigger due to missing triggerName argument" in { 94 | val params = JsObject(completeParams.fields - "triggerName") 95 | 96 | makePostCallWithExpectedResult(params, "You must supply a 'triggerName' parameter.", 400) 97 | } 98 | 99 | it should "reject post of a trigger due to missing user argument" in { 100 | val params = JsObject(completeParams.fields - "user") 101 | 102 | makePostCallWithExpectedResult(params, "You must supply a 'user' parameter to authenticate with Message Hub.", 400) 103 | } 104 | 105 | it should "reject post of a trigger due to missing password argument" in { 106 | val params = JsObject(completeParams.fields - "password") 107 | 108 | makePostCallWithExpectedResult(params, "You must supply a 'password' parameter to authenticate with Message Hub.", 400) 109 | } 110 | 111 | it should "reject post of a trigger when authentication fails" in { 112 | makePostCallWithExpectedResult(completeParams, "You are not authorized for this trigger.", 401) 113 | } 114 | 115 | // it should "reject delete of a trigger that does not exist" in { 116 | // val expectedJSON = JsObject( 117 | // "triggerName" -> JsString("/invalidNamespace/invalidTrigger"), 118 | // "error" -> JsString("not found") 119 | // ) 120 | // 121 | // makeDeleteCallWithExpectedResult(expectedJSON, 404) 122 | // } 123 | } 124 | -------------------------------------------------------------------------------- /tests/src/test/scala/system/packages/MessageHubMultiWorkersTest.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | package system.packages 18 | 19 | import system.utils.KafkaUtils 20 | 21 | import scala.concurrent.duration.DurationInt 22 | import org.junit.runner.RunWith 23 | import org.scalatest.BeforeAndAfterAll 24 | import org.scalatest.FlatSpec 25 | import org.scalatest.Matchers 26 | import org.scalatest.junit.JUnitRunner 27 | import common.JsHelpers 28 | import common.TestHelpers 29 | import common.StreamLogging 30 | import common.WhiskProperties 31 | import common.Wsk 32 | import common.WskActorSystem 33 | import common.WskProps 34 | import common.WskTestHelpers 35 | import spray.json.DefaultJsonProtocol._ 36 | import spray.json._ 37 | import org.apache.openwhisk.core.WhiskConfig 38 | import org.apache.openwhisk.core.database.test.ExtendedCouchDbRestClient 39 | import org.apache.openwhisk.utils.{JsHelpers, retry} 40 | 41 | import scala.concurrent.Await 42 | 43 | @RunWith(classOf[JUnitRunner]) 44 | class MessageHubMultiWorkersTest extends FlatSpec 45 | with Matchers 46 | with WskActorSystem 47 | with BeforeAndAfterAll 48 | with TestHelpers 49 | with WskTestHelpers 50 | with JsHelpers 51 | with StreamLogging 52 | with KafkaUtils { 53 | 54 | val topic = "test" 55 | 56 | implicit val wskprops = WskProps() 57 | val wsk = new Wsk() 58 | 59 | val messagingPackage = "/whisk.system/messaging" 60 | val messageHubFeed = "messageHubFeed" 61 | val dbProtocol = WhiskProperties.getProperty("db.protocol") 62 | val dbHost = WhiskProperties.getProperty("db.host") 63 | val dbPort = WhiskProperties.getProperty("db.port").toInt 64 | val dbUsername = WhiskProperties.getProperty("db.username") 65 | val dbPassword = WhiskProperties.getProperty("db.password") 66 | val dbPrefix = WhiskProperties.getProperty(WhiskConfig.dbPrefix) 67 | val dbName = s"${dbPrefix}ow_kafka_triggers" 68 | val client = new ExtendedCouchDbRestClient(dbProtocol, dbHost, dbPort, dbUsername, dbPassword, dbName) 69 | 70 | behavior of "Message Hub Feed" 71 | 72 | ignore should "assign two triggers to same worker when only worker0 is available" in withAssetCleaner(wskprops) { 73 | 74 | (wp, assetHelper) => 75 | val firstTrigger = s"firstTrigger-${System.currentTimeMillis()}" 76 | val secondTrigger = s"secondTrigger-${System.currentTimeMillis()}" 77 | 78 | val worker0 = s"worker${System.currentTimeMillis()}" 79 | 80 | val parameters = constructParams(List(worker0)) 81 | 82 | createTrigger(assetHelper, firstTrigger, parameters) 83 | createTrigger(assetHelper, secondTrigger, parameters) 84 | 85 | retry({ 86 | val result = Await.result(client.getAllDocs(includeDocs = Some(true)), 15.seconds) 87 | result should be('right) 88 | val documents = result.right.get.fields("rows").convertTo[List[JsObject]] 89 | 90 | validateTriggerAssignment(documents, firstTrigger, worker0) 91 | validateTriggerAssignment(documents, secondTrigger, worker0) 92 | }) 93 | } 94 | 95 | ignore should "assign a trigger to worker0 and a trigger to worker1 when both workers are available" in withAssetCleaner(wskprops) { 96 | 97 | (wp, assetHelper) => 98 | val firstTrigger = s"firstTrigger-${System.currentTimeMillis()}" 99 | val secondTrigger = s"secondTrigger-${System.currentTimeMillis()}" 100 | 101 | val worker0 = s"worker${System.currentTimeMillis()}" 102 | val worker1 = s"worker${System.currentTimeMillis()}" 103 | 104 | val parameters = constructParams(List(worker0, worker1)) 105 | 106 | createTrigger(assetHelper, firstTrigger, parameters) 107 | createTrigger(assetHelper, secondTrigger, parameters) 108 | 109 | retry({ 110 | val result = Await.result(client.getAllDocs(includeDocs = Some(true)), 15.seconds) 111 | result should be('right) 112 | val documents = result.right.get.fields("rows").convertTo[List[JsObject]] 113 | 114 | validateTriggerAssignment(documents, firstTrigger, worker0) 115 | validateTriggerAssignment(documents, secondTrigger, worker1) 116 | }) 117 | } 118 | 119 | ignore should "assign a trigger to worker1 when worker0 is removed and there is an assignment imbalance" in withAssetCleaner(wskprops) { 120 | 121 | (wp, assetHelper) => 122 | val firstTrigger = s"firstTrigger-${System.currentTimeMillis()}" 123 | val secondTrigger = s"secondTrigger-${System.currentTimeMillis()}" 124 | val thirdTrigger = s"thirdTrigger-${System.currentTimeMillis()}" 125 | val fourthTrigger = s"fourthTrigger-${System.currentTimeMillis()}" 126 | 127 | val worker0 = s"worker${System.currentTimeMillis()}" 128 | val worker1 = s"worker${System.currentTimeMillis()}" 129 | 130 | val parameters = constructParams(List(worker1)) 131 | 132 | createTrigger(assetHelper, firstTrigger, parameters) 133 | createTrigger(assetHelper, secondTrigger, parameters) 134 | createTrigger(assetHelper, thirdTrigger, parameters = constructParams(List(worker0, worker1))) 135 | createTrigger(assetHelper, fourthTrigger, parameters = constructParams(List(worker1))) 136 | 137 | retry({ 138 | val result = Await.result(client.getAllDocs(includeDocs = Some(true)), 15.seconds) 139 | result should be('right) 140 | val documents = result.right.get.fields("rows").convertTo[List[JsObject]] 141 | 142 | validateTriggerAssignment(documents, firstTrigger, worker1) 143 | validateTriggerAssignment(documents, secondTrigger, worker1) 144 | validateTriggerAssignment(documents, thirdTrigger, worker0) 145 | validateTriggerAssignment(documents, fourthTrigger, worker1) 146 | }) 147 | } 148 | 149 | ignore should "balance the load across workers when a worker is added" in withAssetCleaner(wskprops) { 150 | 151 | (wp, assetHelper) => 152 | val firstTrigger = s"firstTrigger-${System.currentTimeMillis()}" 153 | val secondTrigger = s"secondTrigger-${System.currentTimeMillis()}" 154 | val thirdTrigger = s"thirdTrigger-${System.currentTimeMillis()}" 155 | val fourthTrigger = s"fourthTrigger-${System.currentTimeMillis()}" 156 | val fifthTrigger = s"fifthTrigger-${System.currentTimeMillis()}" 157 | val sixthTrigger = s"sixthTrigger-${System.currentTimeMillis()}" 158 | 159 | val worker0 = s"worker${System.currentTimeMillis()}" 160 | val worker1 = s"worker${System.currentTimeMillis()}" 161 | 162 | val parameters = constructParams(List(worker0)) 163 | val updatedParameters = constructParams(List(worker0, worker1)) 164 | 165 | createTrigger(assetHelper, firstTrigger, parameters) 166 | createTrigger(assetHelper, secondTrigger, parameters) 167 | createTrigger(assetHelper, thirdTrigger, updatedParameters) 168 | createTrigger(assetHelper, fourthTrigger, updatedParameters) 169 | createTrigger(assetHelper, fifthTrigger, updatedParameters) 170 | createTrigger(assetHelper, sixthTrigger, updatedParameters) 171 | 172 | retry({ 173 | val result = Await.result(client.getAllDocs(includeDocs = Some(true)), 15.seconds) 174 | result should be('right) 175 | val documents = result.right.get.fields("rows").convertTo[List[JsObject]] 176 | 177 | validateTriggerAssignment(documents, firstTrigger, worker0) 178 | validateTriggerAssignment(documents, secondTrigger, worker0) 179 | validateTriggerAssignment(documents, thirdTrigger, worker1) 180 | validateTriggerAssignment(documents, fourthTrigger, worker1) 181 | validateTriggerAssignment(documents, fifthTrigger, worker0) 182 | validateTriggerAssignment(documents, sixthTrigger, worker1) 183 | }) 184 | } 185 | 186 | def constructParams(workers: List[String]) = { 187 | Map( 188 | "user" -> getAsJson("user"), 189 | "password" -> getAsJson("password"), 190 | "api_key" -> getAsJson("api_key"), 191 | "kafka_admin_url" -> getAsJson("kafka_admin_url"), 192 | "kafka_brokers_sasl" -> getAsJson("brokers"), 193 | "topic" -> topic.toJson, 194 | "workers" -> workers.toJson 195 | ) 196 | } 197 | 198 | def validateTriggerAssignment(documents: List[JsObject], trigger: String, worker: String) = { 199 | val doc = documents.filter(_.fields("id").convertTo[String].contains(trigger)) 200 | JsHelpers.getFieldPath(doc(0), "doc", "worker") shouldBe Some(JsString(worker)) 201 | } 202 | } 203 | -------------------------------------------------------------------------------- /tests/src/test/scala/system/packages/MessageHubProduceTests.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package system.packages 19 | 20 | import system.utils.KafkaUtils 21 | 22 | import scala.concurrent.duration.DurationInt 23 | import scala.language.postfixOps 24 | 25 | import org.junit.runner.RunWith 26 | import org.scalatest.BeforeAndAfterAll 27 | import org.scalatest.FlatSpec 28 | import org.scalatest.Matchers 29 | import org.scalatest.junit.JUnitRunner 30 | 31 | import common.JsHelpers 32 | import common.TestHelpers 33 | import common.TestUtils.NOT_FOUND 34 | import common.Wsk 35 | import common.WskActorSystem 36 | import common.WskProps 37 | import common.WskTestHelpers 38 | 39 | import spray.json.DefaultJsonProtocol._ 40 | import spray.json._ 41 | 42 | import java.util.Base64 43 | import java.nio.charset.StandardCharsets 44 | 45 | import org.apache.openwhisk.utils.retry 46 | import org.apache.openwhisk.core.entity.Annotations 47 | 48 | @RunWith(classOf[JUnitRunner]) 49 | class MessageHubProduceTests 50 | extends FlatSpec 51 | with Matchers 52 | with WskActorSystem 53 | with BeforeAndAfterAll 54 | with TestHelpers 55 | with WskTestHelpers 56 | with JsHelpers 57 | with KafkaUtils { 58 | 59 | val topic = "test" 60 | val sessionTimeout = 10 seconds 61 | 62 | implicit val wskprops = WskProps() 63 | val wsk = new Wsk() 64 | 65 | val messagingPackage = "/whisk.system/messaging" 66 | val messageHubFeed = "messageHubFeed" 67 | val messageHubProduce = "messageHubProduce" 68 | val consumerInitTime = 10000 // ms 69 | val maxRetries = System.getProperty("max.retries", "60").toInt 70 | 71 | // these parameter values are 100% valid and should work as-is 72 | val validParameters = Map( 73 | "user" -> getAsJson("user"), 74 | "password" -> getAsJson("password"), 75 | "topic" -> topic.toJson, 76 | "kafka_brokers_sasl" -> getAsJson("brokers"), 77 | "value" -> "Big Trouble is actually a really good Tim Allen movie. Seriously.".toJson) 78 | 79 | behavior of "Message Hub Produce action" 80 | 81 | def testMissingParameter(missingParam : String) = { 82 | val missingParamsMap = validParameters.filterKeys(_ != missingParam) 83 | 84 | withActivation(wsk.activation, wsk.action.invoke(s"$messagingPackage/$messageHubProduce", missingParamsMap)) { 85 | activation => 86 | activation.response.success shouldBe false 87 | activation.response.result.get.toString should include(missingParam) 88 | } 89 | } 90 | 91 | it should "Require kafka_brokers_sasl parameter" in { 92 | testMissingParameter("kafka_brokers_sasl") 93 | } 94 | 95 | it should "Require user parameter" in { 96 | testMissingParameter("user") 97 | } 98 | 99 | it should "Require password parameter" in { 100 | testMissingParameter("password") 101 | } 102 | 103 | it should "Require topic parameter" in { 104 | testMissingParameter("topic") 105 | } 106 | 107 | it should "Require value parameter" in { 108 | testMissingParameter("value") 109 | } 110 | 111 | it should "Reject trying to decode a non-base64 key" in { 112 | val badKeyParams = validParameters + ("key" -> "?".toJson) + ("base64DecodeKey" -> true.toJson) 113 | 114 | withActivation(wsk.activation, wsk.action.invoke(s"$messagingPackage/$messageHubProduce", badKeyParams)) { 115 | activation => 116 | activation.response.success shouldBe false 117 | activation.response.result.get.toString should include("key parameter is not Base64 encoded") 118 | } 119 | } 120 | 121 | it should "Reject trying to decode a non-base64 value" in { 122 | val badValueParams = validParameters + ("value" -> "?".toJson) + ("base64DecodeValue" -> true.toJson) 123 | 124 | withActivation(wsk.activation, wsk.action.invoke(s"$messagingPackage/$messageHubProduce", badValueParams)) { 125 | activation => 126 | activation.response.success shouldBe false 127 | activation.response.result.get.toString should include("value parameter is not Base64 encoded") 128 | } 129 | } 130 | 131 | it should "Post a message with a binary value" in withAssetCleaner(wskprops) { 132 | val currentTime = s"${System.currentTimeMillis}" 133 | 134 | (wp, assetHelper) => 135 | val triggerName = s"/_/binaryValueTrigger-$currentTime" 136 | 137 | createTrigger(assetHelper, triggerName, parameters = Map( 138 | "user" -> getAsJson("user"), 139 | "password" -> getAsJson("password"), 140 | "api_key" -> getAsJson("api_key"), 141 | "kafka_admin_url" -> getAsJson("kafka_admin_url"), 142 | "kafka_brokers_sasl" -> getAsJson("brokers"), 143 | "topic" -> topic.toJson)) 144 | 145 | val defaultAction = Some("dat/createTriggerActions.js") 146 | val defaultActionName = s"helloKafka-${currentTime}" 147 | 148 | assetHelper.withCleaner(wsk.action, defaultActionName) { (action, name) => 149 | action.create(name, defaultAction, annotations = Map(Annotations.ProvideApiKeyAnnotationName -> JsBoolean(true))) 150 | } 151 | 152 | assetHelper.withCleaner(wsk.rule, s"dummyMessageHub-helloKafka-$currentTime") { (rule, name) => 153 | rule.create(name, trigger = triggerName, action = defaultActionName) 154 | } 155 | 156 | val verificationName = s"trigger-$currentTime" 157 | 158 | assetHelper.withCleaner(wsk.trigger, verificationName) { (trigger, name) => 159 | trigger.get(name, NOT_FOUND) 160 | } 161 | 162 | // produce message 163 | val encodedMessage = Base64.getEncoder.encodeToString(verificationName.getBytes(StandardCharsets.UTF_8)) 164 | val base64ValueParams = validParameters + ("base64DecodeValue" -> true.toJson) + ("value" -> encodedMessage.toJson) 165 | 166 | println("Producing a message") 167 | withActivation(wsk.activation, wsk.action.invoke(s"$messagingPackage/$messageHubProduce", base64ValueParams)) { 168 | _.response.success shouldBe true 169 | } 170 | 171 | retry(wsk.trigger.get(verificationName), 60, Some(1.second)) 172 | } 173 | 174 | it should "Post a message with a binary key" in withAssetCleaner(wskprops) { 175 | // create trigger 176 | val currentTime = s"${System.currentTimeMillis}" 177 | 178 | (wp, assetHelper) => 179 | val triggerName = s"/_/binaryKeyTrigger-$currentTime" 180 | 181 | createTrigger(assetHelper, triggerName, parameters = Map( 182 | "user" -> getAsJson("user"), 183 | "password" -> getAsJson("password"), 184 | "api_key" -> getAsJson("api_key"), 185 | "kafka_admin_url" -> getAsJson("kafka_admin_url"), 186 | "kafka_brokers_sasl" -> getAsJson("brokers"), 187 | "topic" -> topic.toJson)) 188 | 189 | val defaultAction = Some("dat/createTriggerActionsFromKey.js") 190 | val defaultActionName = s"helloKafka-${currentTime}" 191 | 192 | assetHelper.withCleaner(wsk.action, defaultActionName) { (action, name) => 193 | action.create(name, defaultAction, annotations = Map(Annotations.ProvideApiKeyAnnotationName -> JsBoolean(true))) 194 | } 195 | 196 | assetHelper.withCleaner(wsk.rule, s"dummyMessageHub-helloKafka-$currentTime") { (rule, name) => 197 | rule.create(name, trigger = triggerName, action = defaultActionName) 198 | } 199 | 200 | val verificationName = s"trigger-$currentTime" 201 | 202 | assetHelper.withCleaner(wsk.trigger, verificationName) { (trigger, name) => 203 | trigger.get(name, NOT_FOUND) 204 | } 205 | 206 | // produce message 207 | val encodedKey = Base64.getEncoder.encodeToString(verificationName.getBytes(StandardCharsets.UTF_8)) 208 | val base64ValueParams = validParameters + ("base64DecodeKey" -> true.toJson) + ("key" -> encodedKey.toJson) 209 | 210 | println("Producing a message") 211 | withActivation(wsk.activation, wsk.action.invoke(s"$messagingPackage/$messageHubProduce", base64ValueParams)) { 212 | _.response.success shouldBe true 213 | } 214 | 215 | retry(wsk.trigger.get(verificationName), 60, Some(1.second)) 216 | } 217 | } 218 | -------------------------------------------------------------------------------- /tests/src/test/scala/system/packages/MessagingServiceTests.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | package system.packages 18 | 19 | import org.junit.runner.RunWith 20 | import org.scalatest.BeforeAndAfter 21 | import org.scalatest.FlatSpec 22 | import org.scalatest.Matchers 23 | import org.scalatest.junit.JUnitRunner 24 | import io.restassured.RestAssured 25 | 26 | @RunWith(classOf[JUnitRunner]) 27 | class MessagingServiceTests 28 | extends FlatSpec 29 | with BeforeAndAfter 30 | with Matchers { 31 | 32 | val healthEndpoint = "/health" 33 | 34 | val getMessagingAddress = 35 | if (System.getProperty("host") != "" && System.getProperty("port") != "") { 36 | "http://" + System.getProperty("host") + ":" + System.getProperty("port") 37 | } 38 | 39 | behavior of "Messaging feed provider endpoint" 40 | 41 | it should "return status code HTTP 200 OK from /health endpoint" in { 42 | val response = RestAssured.given().get(getMessagingAddress + healthEndpoint) 43 | 44 | assert(response.statusCode() == 200 && response.asString().contains("consumers")) 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /tests/src/test/scala/system/packages/actionHelper.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package system.packages 19 | 20 | import spray.json._ 21 | 22 | import common.Wsk 23 | import common.WskProps 24 | import common.WskTestHelpers 25 | 26 | object ActionHelper extends WskTestHelpers { 27 | 28 | implicit val wskprops = WskProps() 29 | val wsk = new Wsk() 30 | 31 | def runActionWithExpectedResult(actionName: String, inputFile: String, expectedOutput: JsObject, success: Boolean): Unit = { 32 | val run = wsk.action.invoke(actionName, parameterFile = Some(inputFile)) 33 | 34 | withActivation(wsk.activation, run) { 35 | activation => 36 | activation.response.result shouldBe Some(expectedOutput) 37 | activation.response.success shouldBe success 38 | } 39 | } 40 | 41 | } 42 | -------------------------------------------------------------------------------- /tests/src/test/scala/system/stress/StressTest.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package system.stress 19 | 20 | import system.utils.KafkaUtils 21 | 22 | import scala.concurrent.duration.DurationInt 23 | import scala.language.postfixOps 24 | 25 | import org.junit.runner.RunWith 26 | import org.scalatest.FlatSpec 27 | import org.scalatest.Matchers 28 | import org.scalatest.junit.JUnitRunner 29 | 30 | import common.TestHelpers 31 | import common.Wsk 32 | import common.WskActorSystem 33 | import common.WskProps 34 | import common.WskTestHelpers 35 | import spray.json.DefaultJsonProtocol._ 36 | import spray.json._ 37 | 38 | 39 | @RunWith(classOf[JUnitRunner]) 40 | class BasicStressTest 41 | extends FlatSpec 42 | with Matchers 43 | with WskActorSystem 44 | with TestHelpers 45 | with WskTestHelpers 46 | with KafkaUtils { 47 | 48 | val topic = "test" 49 | val sessionTimeout = 10 seconds 50 | 51 | implicit val wskprops = WskProps() 52 | val wsk = new Wsk() 53 | 54 | val messagingPackage = "/whisk.system/messaging" 55 | val messageHubFeed = "messageHubFeed" 56 | val messageHubProduce = "messageHubProduce" 57 | 58 | behavior of "Message Hub provider" 59 | 60 | it should "rapidly create and delete many triggers" in { 61 | stressTriggerCreateAndDelete(totalIterations = 100, keepNthTrigger = 5) 62 | } 63 | 64 | /* 65 | * Recursively create and delete (potentially) lots of triggers 66 | * 67 | * @param totalIterations The total number of triggers to create 68 | * @param keepNthTrigger Optionally, do not delete the trigger created on every N iterations 69 | * @param currentIteration Used for recursion 70 | * @param storedTriggers The list of trigger names that were created, but not deleted (see keepNthTrigger) 71 | */ 72 | def stressTriggerCreateAndDelete(totalIterations : Int, keepNthTrigger : Int, currentIteration : Int = 0, storedTriggers : List[String] = List[String]()) { 73 | if(currentIteration < totalIterations) { 74 | val currentTime = s"${System.currentTimeMillis}" 75 | 76 | // use this to print non-zero-based iteration numbers you know... for humans 77 | val iterationLabel = currentIteration + 1 78 | 79 | val triggerName = s"/_/dummyMessageHubTrigger-$currentTime" 80 | println(s"\nCreating trigger #${iterationLabel}: ${triggerName}") 81 | val feedCreationResult = wsk.trigger.create(triggerName, feed = Some(s"$messagingPackage/$messageHubFeed"), parameters = Map( 82 | "user" -> getAsJson("user"), 83 | "password" -> getAsJson("password"), 84 | "api_key" -> getAsJson("api_key"), 85 | "kafka_admin_url" -> getAsJson("kafka_admin_url"), 86 | "kafka_brokers_sasl" -> getAsJson("brokers"), 87 | "topic" -> topic.toJson)) 88 | 89 | println("Waiting for trigger create") 90 | withActivation(wsk.activation, feedCreationResult, initialWait = 5 seconds, totalWait = 60 seconds) { 91 | activation => 92 | // should be successful 93 | activation.response.success shouldBe true 94 | } 95 | 96 | // optionally allow triggers to pile up on the provider 97 | if((iterationLabel % keepNthTrigger) != 0) { 98 | println("Deleting trigger") 99 | val feedDeletionResult = wsk.trigger.delete(triggerName) 100 | feedDeletionResult.stdout should include("ok") 101 | stressTriggerCreateAndDelete(totalIterations, keepNthTrigger, currentIteration + 1, storedTriggers) 102 | } else { 103 | println("I think I'll keep this trigger...") 104 | stressTriggerCreateAndDelete(totalIterations, keepNthTrigger, currentIteration + 1, triggerName :: storedTriggers) 105 | } 106 | } else { 107 | println("\nCompleted all iterations, now cleaning up stored triggers.") 108 | for(triggerName <- storedTriggers) { 109 | println(s"Deleting trigger: ${triggerName}") 110 | val feedDeletionResult = wsk.trigger.delete(triggerName) 111 | feedDeletionResult.stdout should include("ok") 112 | } 113 | } 114 | } 115 | } 116 | -------------------------------------------------------------------------------- /tests/src/test/scala/system/utils/KafkaUtils.scala: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | package system.utils 19 | 20 | import java.util.HashMap 21 | import java.util.Properties 22 | import java.util.concurrent.{TimeUnit, TimeoutException} 23 | 24 | import io.restassured.RestAssured 25 | import io.restassured.config.{RestAssuredConfig, SSLConfig} 26 | import javax.security.auth.login.Configuration 27 | import javax.security.auth.login.AppConfigurationEntry 28 | import org.apache.kafka.clients.producer.KafkaProducer 29 | 30 | import scala.collection.mutable.ListBuffer 31 | import spray.json.DefaultJsonProtocol._ 32 | import spray.json._ 33 | import system.packages.ActionHelper._ 34 | import org.apache.openwhisk.utils.JsHelpers 35 | 36 | import scala.concurrent.duration.DurationInt 37 | import common.TestHelpers 38 | import common.TestUtils 39 | import common.WskTestHelpers 40 | import common.ActivationResult 41 | import org.apache.openwhisk.utils.retry 42 | import org.apache.kafka.clients.producer.ProducerRecord 43 | 44 | trait KafkaUtils extends TestHelpers with WskTestHelpers { 45 | lazy val messageHubProps = KafkaUtils.initializeMessageHub() 46 | 47 | def createProducer() : KafkaProducer[String, String] = { 48 | // currently only supporting MH 49 | new KafkaProducer[String, String](KafkaUtils.asKafkaProducerProps(this.messageHubProps)) 50 | } 51 | 52 | def apply(key : String) = { 53 | this.messageHubProps.getOrElse(key, "") 54 | } 55 | 56 | def getAsJson(key : String) = { 57 | key match { 58 | case key if key == "brokers" => this(key).asInstanceOf[List[String]].toJson 59 | case key => this(key).asInstanceOf[String].toJson 60 | } 61 | } 62 | 63 | val sslconfig = { 64 | val inner = new SSLConfig().allowAllHostnames() 65 | val config = inner.relaxedHTTPSValidation() 66 | new RestAssuredConfig().sslConfig(config) 67 | } 68 | 69 | def createTrigger(assetHelper: AssetCleaner, name: String, parameters: Map[String, spray.json.JsValue]): String = { 70 | println(s"Creating trigger $name") 71 | 72 | val feedCreationResult = assetHelper.withCleaner(wsk.trigger, name) { 73 | (trigger, _) => 74 | trigger.create(name, feed = Some(s"/whisk.system/messaging/messageHubFeed"), parameters = parameters) 75 | } 76 | 77 | val activation = wsk.parseJsonString(feedCreationResult.stdout.substring(0, feedCreationResult.stdout.indexOf("ok: created trigger"))).convertTo[ActivationResult] 78 | 79 | // should be successful 80 | activation.response.success shouldBe true 81 | 82 | // It takes a moment for the consumer to fully initialize. 83 | println("Giving the consumer a moment to get ready") 84 | Thread.sleep(KafkaUtils.consumerInitTime) 85 | 86 | val uuid = activation.response.result.get.fields.get("uuid").get.toString().replaceAll("\"", "") 87 | consumerExists(uuid) 88 | 89 | uuid 90 | } 91 | 92 | 93 | def consumerExists(uuid: String) = { 94 | println("Checking health endpoint(s) for existence of consumer uuid") 95 | // get /health endpoint(s) and ensure it contains the new uuid 96 | val healthUrls: Array[String] = System.getProperty("health_url").split("\\s*,\\s*").filterNot(_.isEmpty) 97 | assert(healthUrls.size != 0) 98 | 99 | retry({ 100 | val uuids: Array[(String, JsValue)] = healthUrls.flatMap(u => { 101 | val response = RestAssured.given().config(sslconfig).get(u) 102 | assert(response.statusCode() == 200) 103 | 104 | response.asString() 105 | .parseJson 106 | .asJsObject 107 | .getFields("consumers") 108 | .head 109 | .convertTo[JsArray] 110 | .elements 111 | .flatMap(c => { 112 | val consumer = c.asJsObject.fields.head 113 | consumer match { 114 | case (u, v) if u == uuid && v.asJsObject.getFields("currentState").head == "Running".toJson => Some(consumer) 115 | case _ => None 116 | } 117 | }) 118 | }) 119 | 120 | assert(uuids.nonEmpty) 121 | }, N = 60, waitBeforeRetry = Some(1.second)) 122 | } 123 | 124 | def produceMessage(topic: String, key: String, value: String) = { 125 | println(s"Producing message with key: $key and value: $value") 126 | val producer = createProducer() 127 | val record = new ProducerRecord(topic, key, value) 128 | val future = producer.send(record) 129 | 130 | producer.flush() 131 | producer.close() 132 | 133 | try { 134 | val result = future.get(60, TimeUnit.SECONDS) 135 | 136 | println(s"Produced message to topic: ${result.topic()} on partition: ${result.partition()} at offset: ${result.offset()} with timestamp: ${result.timestamp()}.") 137 | } catch { 138 | case e: TimeoutException => 139 | fail(s"TimeoutException received waiting for message to be produced to topic: $topic with key: $key and value: $value. ${e.getMessage}") 140 | case e: Exception => throw e 141 | } 142 | } 143 | } 144 | 145 | object KafkaUtils { 146 | val consumerInitTime = 10000 // ms 147 | 148 | def asKafkaProducerProps(props : Map[String,Object]) : Properties = { 149 | val requiredKeys = List("brokers", 150 | "user", 151 | "password", 152 | "key.serializer", 153 | "value.serializer", 154 | "security.protocol", 155 | "max.request.size") 156 | 157 | val propertyMap = props.filterKeys( 158 | requiredKeys.contains(_) 159 | ).map( 160 | tuple => 161 | tuple match { 162 | // transform "brokers" key to "bootstrap.servers" 163 | case (k, v) if k == "brokers" => ("bootstrap.servers", v.asInstanceOf[List[String]].mkString(",")) 164 | case _ => tuple 165 | } 166 | ) 167 | 168 | val kafkaProducerProps = new Properties() 169 | for ((k, v) <- propertyMap) kafkaProducerProps.put(k, v) 170 | 171 | kafkaProducerProps 172 | } 173 | 174 | def messagesInActivation(activation : JsObject, field: String, value: String) : Array[JsObject] = { 175 | val messages = JsHelpers.getFieldPath(activation, "response", "result", "messages").getOrElse(JsArray.empty).convertTo[Array[JsObject]] 176 | messages.filter { 177 | JsHelpers.getFieldPath(_, field) == Some(value.toJson) 178 | } 179 | } 180 | 181 | private def initializeMessageHub() = { 182 | // get the vcap stuff 183 | var credentials = TestUtils.getCredentials("message_hub") 184 | 185 | // initialize the set of tuples to go into the resulting Map 186 | val user = ("user", credentials.get("user").getAsString()) 187 | val password = ("password", credentials.get("password").getAsString()) 188 | val kafka_admin_url = ("kafka_admin_url", credentials.get("kafka_admin_url").getAsString()) 189 | val api_key = ("api_key", credentials.get("api_key").getAsString()) 190 | val security_protocol = ("security.protocol", "SASL_SSL"); 191 | val keySerializer = ("key.serializer", "org.apache.kafka.common.serialization.StringSerializer"); 192 | val valueSerializer = ("value.serializer", "org.apache.kafka.common.serialization.StringSerializer"); 193 | val maxRequestSize = ("max.request.size", "3000000"); 194 | var brokerList = new ListBuffer[String]() 195 | val jsonArray = credentials.get("kafka_brokers_sasl").getAsJsonArray() 196 | val brokerIterator = jsonArray.iterator() 197 | while(brokerIterator.hasNext()) { 198 | val current = brokerIterator.next().getAsString 199 | brokerList += current 200 | } 201 | 202 | val brokers = ("brokers", brokerList.toList) 203 | 204 | System.setProperty("java.security.auth.login.config", "") 205 | setMessageHubSecurityConfiguration(user._2, password._2) 206 | 207 | Map(user, password, kafka_admin_url, api_key, brokers, security_protocol, keySerializer, valueSerializer, maxRequestSize) 208 | } 209 | 210 | private def setMessageHubSecurityConfiguration(user: String, password: String) = { 211 | val map = new HashMap[String, String]() 212 | map.put("serviceName", "kafka") 213 | map.put("username", user) 214 | map.put("password", password) 215 | Configuration.setConfiguration(new Configuration() 216 | { 217 | def getAppConfigurationEntry(name: String): Array[AppConfigurationEntry] = Array( 218 | new AppConfigurationEntry ( 219 | "com.ibm.messagehub.login.MessageHubLoginModule", 220 | AppConfigurationEntry.LoginModuleControlFlag.REQUIRED, map)) 221 | }) 222 | } 223 | } 224 | -------------------------------------------------------------------------------- /tools/.gitignore: -------------------------------------------------------------------------------- 1 | **/node_modules 2 | -------------------------------------------------------------------------------- /tools/travis/build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # 4 | # Licensed to the Apache Software Foundation (ASF) under one or more 5 | # contributor license agreements. See the NOTICE file distributed with 6 | # this work for additional information regarding copyright ownership. 7 | # The ASF licenses this file to You under the Apache License, Version 2.0 8 | # (the "License"); you may not use this file except in compliance with 9 | # the License. You may obtain a copy of the License at 10 | # 11 | # http://www.apache.org/licenses/LICENSE-2.0 12 | # 13 | # Unless required by applicable law or agreed to in writing, software 14 | # distributed under the License is distributed on an "AS IS" BASIS, 15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | # See the License for the specific language governing permissions and 17 | # limitations under the License. 18 | # 19 | 20 | set -e 21 | 22 | # Build script for Travis-CI. 23 | 24 | SCRIPTDIR=$(cd $(dirname "$0") && pwd) 25 | ROOTDIR="$SCRIPTDIR/../.." 26 | UTILDIR="$ROOTDIR/../openwhisk-utilities" 27 | 28 | # run scancode 29 | cd $UTILDIR 30 | scancode/scanCode.py $ROOTDIR 31 | -------------------------------------------------------------------------------- /tools/travis/deploy.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # 4 | # Licensed to the Apache Software Foundation (ASF) under one or more 5 | # contributor license agreements. See the NOTICE file distributed with 6 | # this work for additional information regarding copyright ownership. 7 | # The ASF licenses this file to You under the Apache License, Version 2.0 8 | # (the "License"); you may not use this file except in compliance with 9 | # the License. You may obtain a copy of the License at 10 | # 11 | # http://www.apache.org/licenses/LICENSE-2.0 12 | # 13 | # Unless required by applicable law or agreed to in writing, software 14 | # distributed under the License is distributed on an "AS IS" BASIS, 15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | # See the License for the specific language governing permissions and 17 | # limitations under the License. 18 | # 19 | 20 | set -eu 21 | 22 | dockerhub_image_prefix="$1" 23 | dockerhub_image_name="$2" 24 | dockerhub_image_tag="$3" 25 | dockerhub_image="${dockerhub_image_prefix}/${dockerhub_image_name}:${dockerhub_image_tag}" 26 | 27 | docker login -u "${DOCKER_USER}" -p "${DOCKER_PASSWORD}" 28 | 29 | echo docker build . --tag ${dockerhub_image} 30 | docker build . --tag ${dockerhub_image} 31 | 32 | echo docker push ${dockerhub_image} 33 | docker push ${dockerhub_image} 34 | 35 | # if image tag is nightly, also push a tag with the hash commit 36 | if [ ${dockerhub_image_tag} == "nightly" ]; then 37 | short_commit=`git rev-parse --short HEAD` 38 | dockerhub_githash_image="${dockerhub_image_prefix}/${dockerhub_image_name}:${short_commit}" 39 | 40 | echo docker tag ${dockerhub_image} ${dockerhub_githash_image} 41 | docker tag ${dockerhub_image} ${dockerhub_githash_image} 42 | 43 | echo docker push ${dockerhub_githash_image} 44 | docker push ${dockerhub_githash_image} 45 | fi 46 | -------------------------------------------------------------------------------- /tools/travis/setup.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # 4 | # Licensed to the Apache Software Foundation (ASF) under one or more 5 | # contributor license agreements. See the NOTICE file distributed with 6 | # this work for additional information regarding copyright ownership. 7 | # The ASF licenses this file to You under the Apache License, Version 2.0 8 | # (the "License"); you may not use this file except in compliance with 9 | # the License. You may obtain a copy of the License at 10 | # 11 | # http://www.apache.org/licenses/LICENSE-2.0 12 | # 13 | # Unless required by applicable law or agreed to in writing, software 14 | # distributed under the License is distributed on an "AS IS" BASIS, 15 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 16 | # See the License for the specific language governing permissions and 17 | # limitations under the License. 18 | # 19 | 20 | SCRIPTDIR=$(cd $(dirname "$0") && pwd) 21 | HOMEDIR="$SCRIPTDIR/../../../" 22 | WHISKDIR="$HOMEDIR/openwhisk" 23 | 24 | sudo gpasswd -a travis docker 25 | sudo -E bash -c 'echo '\''DOCKER_OPTS="-H tcp://0.0.0.0:4243 -H unix:///var/run/docker.sock --api-enable-cors --storage-driver=aufs"'\'' > /etc/default/docker' 26 | 27 | # Docker 28 | sudo apt-get -y update -qq 29 | sudo apt-get -o Dpkg::Options::="--force-confold" --force-yes -y install docker-engine=1.12.0-0~trusty 30 | sudo service docker restart 31 | echo "Docker Version:" 32 | docker version 33 | echo "Docker Info:" 34 | docker info 35 | 36 | # Python 37 | sudo apt-get -y install python-pip 38 | pip install --user jsonschema 39 | 40 | # Ansible 41 | pip install --user ansible==2.1.2.0 42 | 43 | # clone OpenWhisk repo. in order to run scanCode.py 44 | cd $HOMEDIR 45 | git clone https://github.com/apache/openwhisk-utilities.git 46 | 47 | # OpenWhisk stuff 48 | cd $HOMEDIR 49 | git clone https://github.com/apache/openwhisk.git openwhisk 50 | cd $WHISKDIR 51 | 52 | TERM=dumb ./gradlew \ 53 | :common:scala:install \ 54 | :core:controller:install \ 55 | :core:invoker:install \ 56 | :tests:install 57 | -------------------------------------------------------------------------------- /tools/verifyDBMigration/index.js: -------------------------------------------------------------------------------- 1 | /* 2 | * Licensed to the Apache Software Foundation (ASF) under one or more 3 | * contributor license agreements. See the NOTICE file distributed with 4 | * this work for additional information regarding copyright ownership. 5 | * The ASF licenses this file to You under the Apache License, Version 2.0 6 | * (the "License"); you may not use this file except in compliance with 7 | * the License. You may obtain a copy of the License at 8 | * 9 | * http://www.apache.org/licenses/LICENSE-2.0 10 | * 11 | * Unless required by applicable law or agreed to in writing, software 12 | * distributed under the License is distributed on an "AS IS" BASIS, 13 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 14 | * See the License for the specific language governing permissions and 15 | * limitations under the License. 16 | */ 17 | 18 | var shell = require('shelljs'); 19 | 20 | // will be populated by processArgs() 21 | var cloudant, 22 | cloudant_user, 23 | cloudant_pass, 24 | db_name; 25 | 26 | function processArgs() { 27 | if(process.argv.length != 5) { 28 | console.log('USAGE: node index.js CLOUDANT_USER CLOUDANT_PASS DB_NAME'); 29 | process.exit(1); 30 | } else { 31 | cloudant_user = process.argv[2]; 32 | cloudant_pass = process.argv[3]; 33 | db_name = process.argv[4]; 34 | 35 | const Cloudant = require('cloudant'); 36 | cloudant = Cloudant({ 37 | url: `https://${cloudant_user}:${cloudant_pass}@${cloudant_user}.cloudant.com`, 38 | plugin: 'promises' 39 | }); 40 | } 41 | } 42 | 43 | function verifyDBMigration() { 44 | return verifyDBCreatedWithDesignDoc("container0", false) 45 | .then(() => { 46 | return verifyDBCreatedWithDesignDoc("container1", true); 47 | }) 48 | .catch(err => console.error(`Failed to validate migration: ${JSON.stringify(err)}`)); 49 | } 50 | 51 | function verifyDBCreatedWithDesignDoc(containerName, letContainerCreateDB) { 52 | return destroyDBIfNeeded() 53 | .then(() => { 54 | if(!letContainerCreateDB) { 55 | console.log(`Creating DB`); 56 | return cloudant.db.create(db_name) 57 | } else { 58 | console.log(`Letting the container create the DB`); 59 | } 60 | }) 61 | .then(() => { 62 | console.log(`Firing up the docker container`); 63 | return startDockerContainer(containerName) 64 | }) 65 | .then(() => { 66 | return verifyView(); 67 | }); 68 | } 69 | 70 | function destroyDBIfNeeded() { 71 | console.log('destroying db'); 72 | return cloudant.db.list() 73 | .then(existingDBs => { 74 | if(existingDBs.indexOf(db_name) >= 0) { 75 | console.log(`${db_name} already exists - DESTROY!`); 76 | return cloudant.db.destroy(db_name); 77 | } 78 | }); 79 | } 80 | 81 | function startDockerContainer(containerName) { 82 | var dockerStartStopPromise = new Promise((resolve, reject) => { 83 | var returnCode = shell.exec(`docker run -d --name ${containerName} -e CLOUDANT_USER=${cloudant_user} -e CLOUDANT_PASS=${cloudant_pass} openwhisk/kafkaprovider`).code 84 | 85 | if(returnCode != 0) { 86 | reject(`Failed to start docker container: ${returnCode}`); 87 | return; 88 | } 89 | 90 | console.log("Giving the container some time to start up..."); 91 | setTimeout(function() { 92 | console.log("Stopping the container"); 93 | var returnCode = shell.exec(`docker stop ${containerName}`).code; 94 | if(returnCode != 0) { 95 | reject('Failed to stop docker container'); 96 | return; 97 | } 98 | 99 | console.log("Deleting the container"); 100 | returnCode = shell.exec(`docker rm ${containerName}`).code; 101 | if(returnCode != 0) { 102 | reject('Failed to delete container'); 103 | return; 104 | } 105 | 106 | resolve(); 107 | }, 20000); 108 | }); 109 | 110 | return dockerStartStopPromise; 111 | } 112 | 113 | function verifyView() { 114 | var db = cloudant.db.use(db_name); 115 | 116 | console.log('Verifying view exists and works as expected'); 117 | return ensureViewReturns(db, 0) 118 | .then(() => { 119 | return db.insert({ 120 | triggerURL: 'this is the only property needed by the view' 121 | }); 122 | }) 123 | .then(() => { 124 | // give it a few extra seconds to make sure the view is indexed 125 | return new Promise((resolve, reject) => { 126 | setTimeout(() => { 127 | ensureViewReturns(db, 1) 128 | .then(resolve) 129 | .catch(reject); 130 | }, 3000); 131 | }); 132 | }); 133 | } 134 | 135 | function ensureViewReturns(db, expectedNumberOfRows) { 136 | return db.view('filters', 'only-triggers', {include_docs: false}) 137 | .then(results => { 138 | if(results.rows.length != expectedNumberOfRows) { 139 | return Promise.reject(`Expected view to contain ${expectedNumberOfRows} rows but got ${results.rows.length}`); 140 | } 141 | }); 142 | } 143 | 144 | processArgs(); 145 | verifyDBMigration() 146 | .then(() => console.log('done!')) 147 | .catch(err => console.error(JSON.stringify(err))); 148 | -------------------------------------------------------------------------------- /tools/verifyDBMigration/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "verifyDBMigration", 3 | "version": "0.0.1", 4 | "dependencies": { 5 | "cloudant": "^1.7.1", 6 | "shelljs": "^0.7.7" 7 | } 8 | } 9 | --------------------------------------------------------------------------------