├── .gitignore ├── .gitmodules ├── README.md ├── deck └── .gitkeep ├── metadata.yml ├── resources ├── beam-on-kda │ └── cdk │ │ ├── .gitignore │ │ ├── .npmignore │ │ ├── README.md │ │ ├── bin │ │ └── cdk.ts │ │ ├── cdk.json │ │ ├── cdk.out │ │ ├── streaming-analytics-workshop-beam-event-engine.template.json │ │ ├── streaming-analytics-workshop-beam.template.json │ │ └── tree.json │ │ ├── jest.config.js │ │ ├── lambda │ │ ├── add-approximate-arrival-time.js │ │ ├── build-pipeline-helper.py │ │ ├── empty-bucket.py │ │ ├── get-emr-master-id.py │ │ └── stop-kda-app.py │ │ ├── lib │ │ ├── cloud9-dev-environment.ts │ │ ├── empty-bucket.ts │ │ ├── github-build-pipeline.ts │ │ ├── windows-dev-environment.ts │ │ └── workshop-infrastructure.ts │ │ ├── package-lock.json │ │ ├── package.json │ │ ├── test │ │ └── cdk.test.ts │ │ └── tsconfig.json └── flink-on-kda │ ├── cdk │ ├── .gitignore │ ├── .npmignore │ ├── README.md │ ├── bin │ │ └── cdk.ts │ ├── cdk.json │ ├── cdk.out │ │ └── StreamingAnalyticsWorkshop.template.json │ ├── jest.config.js │ ├── lambda │ │ ├── build-pipeline-helper.py │ │ └── empty-bucket.py │ ├── lib │ │ ├── build-workshop-resources.ts │ │ ├── empty-bucket.ts │ │ ├── github-build-pipeline.ts │ │ └── workshop-infrastructure.ts │ ├── package-lock.json │ ├── package.json │ ├── test │ │ └── cdk.test.ts │ └── tsconfig.json │ ├── index.py │ ├── sample-dashboard.yaml │ └── step-scaling.yaml └── workshop ├── .hugo_build.lock ├── buildspec.yml ├── config.toml ├── content ├── _index.en.md ├── beam-on-kda │ ├── _index.en.md │ ├── beam-on-kda │ │ ├── _index.en.md │ │ ├── flink-dashboard.en.md │ │ ├── jackson-dependencies.en.md │ │ ├── runtime-parameters.en.md │ │ └── temporary-credentials.en.md │ ├── cleanup │ │ └── _index.en.md │ ├── configure-dev-env │ │ ├── _index.en.md │ │ └── configure-cloud9.en.md │ ├── create-infrastructure │ │ ├── _index.en.md │ │ ├── create-stream.en.md │ │ ├── firehose │ │ │ ├── _index.en.md │ │ │ ├── choose-destination.en.md │ │ │ ├── configure-settings.en.md │ │ │ ├── name-and-source.en.md │ │ │ ├── process-records.en.md │ │ │ └── review.en.md │ │ └── ingest-data.en.md │ ├── deploy-batch-pipeline │ │ ├── _index.en.md │ │ ├── configure-kda-batch.en.md │ │ └── execute-beam-emr.md │ ├── deploy-streaming-pipeline │ │ ├── _index.en.md │ │ ├── beam-pipeline-details.en.md │ │ ├── build-uber-jar.en.md │ │ ├── configure-kda-app.en.md │ │ ├── create-kda-app.en.md │ │ ├── start-kda-app.en.md │ │ └── update-kda-app.en.md │ ├── getting-started │ │ ├── _index.en.md │ │ ├── aws-event.en.md │ │ └── self-paced.en.md │ ├── monitoring-logging-profiling │ │ ├── _index.en.md │ │ ├── cw-metrics.en.md │ │ ├── inspect-flink-application-logs.en.md │ │ └── profile-flink-application.en.md │ └── overview │ │ └── _index.en.md ├── flink-on-kda-studio │ ├── 1_Getting_Started │ │ ├── _index.en.md │ │ ├── aws_event.en.md │ │ └── self_paced.en.md │ ├── 2_Enviorment_Set_Up │ │ └── _index.en.md │ ├── 3_Send_Data_to_Kinesis_Data_Stream │ │ └── _index.en.md │ ├── 4_Interactive_Notebook │ │ └── _index.en.md │ ├── 5_Deployable_Notebook │ │ └── _index.en.md │ ├── 6_Clean_Up │ │ └── _index.en.md │ └── _index.en.md ├── flink-on-kda │ ├── _index.en.md │ ├── advanced-scale-and-monitor │ │ ├── _index.en.md │ │ ├── cleanup.md │ │ ├── flink-dashboard │ │ │ ├── _index.en.md │ │ │ ├── access-dashboard.md │ │ │ ├── dashboard-job-insights.md │ │ │ ├── dashboard-task-insights.md │ │ │ └── troubleshooting-with-dashboard.md │ │ ├── monitoring │ │ │ ├── _index.en.md │ │ │ ├── cfn-launch-dashboard.md │ │ │ ├── metrics-deep-dive.md │ │ │ └── metrics-to-monitor.md │ │ └── scaling │ │ │ ├── _index.en.md │ │ │ ├── advanced-scaling-components.md │ │ │ ├── getting-started.md │ │ │ └── resources.md │ ├── build-visualization │ │ ├── _index.en.md │ │ ├── create-elasticsearch-indices.en.md │ │ ├── import-kibana-dashboard.en.md │ │ └── ingest-data-elasticsearch.en.md │ ├── cleanup │ │ └── _index.en.md │ ├── configure │ │ ├── _index.en.md │ │ ├── configure-intellij.en.md │ │ ├── connect-rdp.en.md │ │ └── retrieve-password.en.md │ ├── deploy-to-kda │ │ ├── _index.en.md │ │ ├── build-uber-jar.en.md │ │ ├── configure-kda-app.en.md │ │ ├── create-kda-app.en.md │ │ └── start-kda-app.en.md │ ├── extend-flink-program │ │ ├── _index.en.md │ │ ├── add-aes-sink.en.md │ │ ├── event-time.en.md │ │ ├── kda-parameters.en.md │ │ └── window-aggregation.en.md │ ├── getting-started │ │ ├── _index.en.md │ │ ├── aws-event.en.md │ │ └── self-paced.en.md │ ├── ingest-events │ │ ├── _index.en.md │ │ ├── create-stream.en.md │ │ └── ingest-data.en.md │ ├── introduction │ │ └── _index.en.md │ ├── local-flink-development │ │ ├── _index.en.md │ │ ├── anatomy-flink-program.en.md │ │ ├── debug-code.en.md │ │ └── execute-code.en.md │ └── scale-monitor │ │ ├── _index.en.md │ │ ├── cw-metrics.en.md │ │ ├── inspect-flink-application-logs.en.md │ │ ├── scale-kda.en.md │ │ └── scale-kinesis.en.md └── more-resources.en.md ├── layouts ├── .DS_Store ├── index.json └── partials │ └── custom-footer.html ├── static ├── images │ ├── beam-on-kda │ │ ├── apn-logo.jpg │ │ ├── aws-open-source.jpg │ │ ├── beam-on-kda │ │ │ ├── app-properties.png │ │ │ └── beam-app-properties.png │ │ ├── cf-emr-connect.png │ │ ├── cfn-1-create-stack.png │ │ ├── cfn-2-stack-details.png │ │ ├── cfn-4-confirm-capabilities.png │ │ ├── cfn-5-completed.png │ │ ├── cfn-6-parameters.png │ │ ├── cfn-6-secrets.png │ │ ├── cfn_c9_output.png │ │ ├── clean-cf.png │ │ ├── cloud9-3-ingest.png │ │ ├── cloudformation-launch-stack.png │ │ ├── cw-dashboard-1-filter.png │ │ ├── cw-dashboard-2-select-metrics.png │ │ ├── cw-dashboard-3-metric-properties.png │ │ ├── cw-dashboard-4-configure-log.png │ │ ├── cw-dashboard-5-scale-kds.png │ │ ├── cw-dashboard-6-log-result.png │ │ ├── cw-dashboard-total-number-trips.png │ │ ├── cw-dashboard-trips-backfilled.png │ │ ├── cw-dashboard-trips-by-borough.png │ │ ├── emr-copy-dns-name.png │ │ ├── emr-flink-dashboard-job.png │ │ ├── emr-flink-dashboard-overview.png │ │ ├── emr-resource-manager-application.png │ │ ├── emr-resource-manager.png │ │ ├── intellij-1-welcome.png │ │ ├── intellij-2-clone.png │ │ ├── intellij-3-ingest.png │ │ ├── intellij-7-maven-package.png │ │ ├── kda-batch-flink-dashboard.png │ │ ├── kda-cf.png │ │ ├── kda-config.png │ │ ├── kda-config2.png │ │ ├── kda-configure-review.png │ │ ├── kda-create-app.png │ │ ├── kda-create-review.png │ │ ├── kda-flink-streaming-dahsboard.png │ │ ├── kda-monitor-novpc.png │ │ ├── kda-monitoring.png │ │ ├── kda-nographlog.png │ │ ├── kda-nographlogerror.png │ │ ├── kda-patch-job-graph.png │ │ ├── kda-prop-grp1.png │ │ ├── kda-property-group.png │ │ ├── kda-running-beamapp.png │ │ ├── kda-update-config-batch.png │ │ ├── kda-updateborough.png │ │ ├── kds-create-stream-active.png │ │ ├── kds-create-stream.png │ │ ├── kfh-cf.png │ │ ├── kfh-cflambda.png │ │ ├── kfh-check.png │ │ ├── kfh-check2.png │ │ ├── kfh-configure.png │ │ ├── kfh-create.png │ │ ├── kfh-lambda.png │ │ ├── kfh-process.png │ │ ├── kfh-review.png │ │ ├── kfh-s3.png │ │ ├── kfh-selectsources3.png │ │ ├── kinesis-check.png │ │ ├── kinesis-welcome-create-kda.png │ │ ├── kinesis-welcome-create-stream.png │ │ ├── overview-beamarchitecture.png │ │ ├── overview-cw.png │ │ ├── profiler-group-1.png │ │ ├── profiler-group-2.png │ │ ├── profiler-group-3.png │ │ ├── profiler-group-4.png │ │ ├── profiler-group-5.png │ │ ├── remote-desktop-1-create.png │ │ ├── remote-desktop-2-credentials.png │ │ └── secrets-manager-retrieve-secret.png │ ├── flink-on-kda-studio │ │ ├── S3_final_view.png │ │ ├── attach_policies.png │ │ ├── build.png │ │ ├── close_welcome_message.png │ │ ├── cloud_shell_button.png │ │ ├── delete_data_stream.png │ │ ├── delete_s3.png │ │ ├── delete_streaming_app.png │ │ ├── delete_studio.png │ │ ├── deploy.png │ │ ├── deployed_run.png │ │ ├── event_engine_1.png │ │ ├── event_engine_2.png │ │ ├── event_engine_3.png │ │ ├── event_engine_4.png │ │ ├── event_engine_5.png │ │ ├── glue_catalog.png │ │ ├── glue_database_select.png │ │ ├── glue_name_database.png │ │ ├── interactive_notebook.png │ │ ├── producer_notebook.png │ │ ├── select_IAM.png │ │ ├── studio_set_up.png │ │ ├── workshop_architecture.png │ │ └── zeppelin_1.png │ └── flink-on-kda │ │ ├── advanced-monitoring-cw-1-uptime-and-downtime.png │ │ ├── advanced-monitoring-cw-2-checkpoint.png │ │ ├── advanced-monitoring-cw-3-cpumemory.png │ │ ├── advanced-monitoring-cw-4-app-progress.png │ │ ├── advanced-monitoring-cw-5-source-and-sink.png │ │ ├── advanced-monitoring-index-1-cw-dashboard.png │ │ ├── advanced-monitoring-index-3-flink-dashboard.png │ │ ├── advanced-monitoring-metrics-1-cfn-launch.png │ │ ├── advanced-monitoring-metrics-2-cfn-launch.png │ │ ├── advanced-monitoring-metrics-3-cfn-launch.png │ │ ├── advanced-monitoring-metrics-4-cfn-launch.png │ │ ├── advanced-monitoring-metrics-5-cfn-launch.png │ │ ├── advanced-scaling-cfn-1-create-stack.png │ │ ├── advanced-scaling-cfn-2-stack-details.png │ │ ├── advanced-scaling-cfn-3-capabilities.png │ │ ├── advanced-scaling-cw-alarm.png │ │ ├── advanced-scaling-cw-edit-scale-out.png │ │ ├── advanced-scaling-cw-scale-out.png │ │ ├── aes-domain-details.png │ │ ├── apn-logo.jpg │ │ ├── aws-open-source.jpg │ │ ├── cfn-1-create-stack.png │ │ ├── cfn-2-stack-details.png │ │ ├── cfn-4-confirm-capabilities.png │ │ ├── cfn-5-completed.png │ │ ├── cfn-6-parameters.png │ │ ├── click-on-window-box.gif │ │ ├── cloudformation-launch-stack.png │ │ ├── cw-alarm-in-alarm.png │ │ ├── cw-dashboard-1-filter.png │ │ ├── cw-dashboard-2-select-metrics.png │ │ ├── cw-dashboard-3-metric-properties.png │ │ ├── cw-dashboard-4-configure-log.png │ │ ├── cw-dashboard-5-scale-kds.png │ │ ├── cw-dashboard-6-scale-kda.png │ │ ├── cw-in-alarm-2.png │ │ ├── cw-in-alarm.png │ │ ├── enhanced-monitoring-1.jpg │ │ ├── flink-dashboard-backpressure.png │ │ ├── flink-dashboard-checkpoint-history.png │ │ ├── flink-dashboard-checkpoints-tab.png │ │ ├── flink-dashboard-exception-tab.png │ │ ├── flink-dashboard-exception.png │ │ ├── flink-dashboard-operator-subtasks.png │ │ ├── flink-dashboard-task-managers.png │ │ ├── flink-dashboard-watermarks.png │ │ ├── flink-dashboard-wide-view.png │ │ ├── flink-web-dashboard-task-list.png │ │ ├── flink-web-dashboard-tasks.png │ │ ├── flink-web-dashboard-top-level-tabs.png │ │ ├── high-level-architecture.png │ │ ├── iam-1-create-user.png │ │ ├── iam-2-attach-policy.png │ │ ├── iam-3-create-user.png │ │ ├── iam-4-save-url.png │ │ ├── iam-kda-1-create-role.png │ │ ├── iam-kda-2-select-service.png │ │ ├── iam-kda-3-select-permissions.png │ │ ├── iam-kda-4-review.png │ │ ├── intellij-1-welcome.png │ │ ├── intellij-10-configuration-details.png │ │ ├── intellij-11-replay-lag.png │ │ ├── intellij-12-pom-reimport.png │ │ ├── intellij-13-replay-lag-copy.png │ │ ├── intellij-2-clone.png │ │ ├── intellij-3-ingest.png │ │ ├── intellij-4-execute-flink.png │ │ ├── intellij-5-execute-flink-output.png │ │ ├── intellij-6-debug.png │ │ ├── intellij-7-maven-package.png │ │ ├── intellij-8-maven-install.png │ │ ├── intellij-9-edit-configuration.png │ │ ├── kda-configure-scaling.png │ │ ├── kda-configure.png │ │ ├── kda-create-app-permissions.png │ │ ├── kda-create-app.png │ │ ├── kda-monitoring.png │ │ ├── kda-property-group.png │ │ ├── kda-running-app.png │ │ ├── kda-scale-app.png │ │ ├── kds-create-stream.png │ │ ├── kds-update-shards.png │ │ ├── kibana-1-create-index.png │ │ ├── kibana-2-import-objects.png │ │ ├── kibana-3-upload-json.png │ │ ├── kibana-4-default-index.png │ │ ├── kibana-5-visualizatio-partial.png │ │ ├── kibana-visualization.png │ │ ├── kinesis-welcome-create-kda.png │ │ ├── kinesis-welcome-create-stream.png │ │ ├── open-flink-dashboard.png │ │ ├── parallel-data-flow-flink.png │ │ ├── parallelism-updated-cw-alarm-scale-out.png │ │ ├── remote-desktop-1-create.png │ │ ├── remote-desktop-2-credentials.png │ │ ├── secrets-manager-retrieve-secret.png │ │ └── workshop-architecture.png └── js │ └── kinesis.js └── themes └── .DS_Store /.gitignore: -------------------------------------------------------------------------------- 1 | .DS_Store 2 | */.DS_Store 3 | .vscode/ 4 | .hugo_build.lock -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "workshop/themes/learn"] 2 | path = workshop/themes/learn 3 | url = https://github.com/matcornic/hugo-theme-learn.git 4 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | ## Streaming Analytics Workshop 2 | 3 | ⛔️ DEPRECATED ⛔️ 4 | 5 | The sources of this repository have been previously used to generate the https://streaming-analytics.workshop.aws/ webpage. This is no longer the case and therefore the repository is set to archive mode. 6 | -------------------------------------------------------------------------------- /deck/.gitkeep: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/deck/.gitkeep -------------------------------------------------------------------------------- /metadata.yml: -------------------------------------------------------------------------------- 1 | name: streaming-analytics 2 | title: Streaming Analytics Workshop 3 | description: Analyze the telemetry data of a taxi fleet in New York City in near-real time to optimize the fleet operation 4 | categories: 5 | - Analytics 6 | services: 7 | - Kinesis Data Streams 8 | - Kinesis Data Analytics 9 | - Amazon Elasticsearch Service 10 | level: 400 11 | duration: 120 12 | #cost - Cost in USD. If the content is offered without cost, enter 0 13 | cost: 0 14 | author: shausma 15 | audience: 16 | - IT Professional 17 | - Developer 18 | -------------------------------------------------------------------------------- /resources/beam-on-kda/cdk/.gitignore: -------------------------------------------------------------------------------- 1 | 2 | *.js 3 | *.d.ts 4 | node_modules 5 | cdk.out/manifest.json 6 | cdk.out/cdk.out 7 | 8 | !lambda/*.js 9 | 10 | # CDK asset staging directory 11 | .cdk.staging 12 | -------------------------------------------------------------------------------- /resources/beam-on-kda/cdk/.npmignore: -------------------------------------------------------------------------------- 1 | *.ts 2 | !*.d.ts 3 | 4 | # CDK asset staging directory 5 | .cdk.staging 6 | -------------------------------------------------------------------------------- /resources/beam-on-kda/cdk/README.md: -------------------------------------------------------------------------------- 1 | # Useful commands 2 | 3 | - `npm run build` compile typescript to js 4 | - `npm run watch` watch for changes and compile 5 | - `npm run test` perform the jest unit tests 6 | - `cdk deploy` deploy this stack to your default AWS account/region 7 | - `cdk diff` compare deployed stack with current state 8 | - `cdk synth` emits the synthesized CloudFormation template 9 | 10 | ## First time setup with local CDK Deployment 11 | 12 | To set up local CDK and deploy against an AWS profile of your choice, need to install CDK and install dependencies. Once this is done run cdk deploy, note the cf file is > 50KiB so need to upload to a S3 bucket first. The CDK deploy takes about 10 minutes to run. 13 | 14 | - Follow this link to install [AWS CDK](https://docs.aws.amazon.com/cdk/latest/guide/getting_started.html) 15 | - Add your AWS Account and credentials as a profile to `~/.aws/credentials` and `~/.aws/config` 16 | 17 | ```bash 18 | ~/.aws/credentials 19 | [myprofile] 20 | aws_access_key_id = MY_AWS_ACCESSKEY 21 | aws_secret_access_key = MY_AWS_SECRETKEY 22 | aws_session_token = MY_AWS_SESSION_TOKEN 23 | 24 | ~/.aws/config 25 | [profile myprofile] 26 | output = json 27 | region = eu-west-1 28 | ``` 29 | 30 | Once this is setup run the following to deploy: 31 | 32 | ```bash 33 | $ cd resources/cdk 34 | $ cdk --version 35 | 1.51.0 (build 8c2d53c) 36 | 37 | 1. Setup npm dependencies 38 | $ npm install 39 | $ npm watch 40 | 41 | 2. Deploy the CDK stack 42 | $ cdk bootstrap aws://[AWSACCOUNTID]/eu-west-1 --profile myprofile 43 | $ cdk deploy --profile myprofile 44 | ... 45 | 46 | The following o/p will be created upon completion - check the cloud formation stack in aws account 47 | 48 | beam-workshop.WindowsDevEnvironmentInstanceIp9C0E1056 = 49 | beam-workshop.WindowsDevEnvironmentInstanceLoginCredentials9CFB9646 = 50 | beam-workshop.S3Bucket = 51 | beam-workshop.ApplicationTerminatedTopicName = 52 | beam-workshop.InputS3Pattern = 53 | beam-workshop.FirehoseTransformationLambda = 54 | beam-workshop.BeamConsumerJarPath = target/amazon-kinesis-analytics-beam-taxi-consumer-1.0-SNAPSHOT.jar 55 | 56 | 3. For Diffs run 57 | $ cdk diff --profile myprofile 58 | ``` 59 | -------------------------------------------------------------------------------- /resources/beam-on-kda/cdk/bin/cdk.ts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | import 'source-map-support/register'; 3 | import cdk = require('@aws-cdk/core'); 4 | import { WorkshopInfrastructure } from '../lib/workshop-infrastructure'; 5 | 6 | const app = new cdk.App(); 7 | 8 | const synthDate = new Date().toISOString().split('T')[0]; 9 | 10 | const dependencies = { 11 | kinesisReplayVersion: 'release-0.1.0', 12 | beamApplicationVersion: 'release-0.2.0', 13 | beamApplicationJarFile: 'amazon-kinesis-analytics-beam-taxi-consumer-0.2.0.jar', 14 | } 15 | 16 | new WorkshopInfrastructure(app, 'streaming-analytics-workshop-beam', { 17 | appName: 'beam-workshop', 18 | description: `Creates all resources and compiles all artifacts that are required to run the beam workshop (shausma-beam-workshop-self-paced-${synthDate})`, 19 | eventEngine: false, 20 | ...dependencies 21 | }); 22 | 23 | new WorkshopInfrastructure(app, 'streaming-analytics-workshop-beam-event-engine', { 24 | appName: 'beam-workshop', 25 | description: `Creates all resources and compiles all artifacts that are required to run the beam workshop (shausma-beam-workshop-ee-${synthDate})`, 26 | eventEngine: true, 27 | ...dependencies 28 | }); -------------------------------------------------------------------------------- /resources/beam-on-kda/cdk/cdk.json: -------------------------------------------------------------------------------- 1 | { 2 | "app": "npx ts-node bin/cdk.ts" 3 | } 4 | -------------------------------------------------------------------------------- /resources/beam-on-kda/cdk/jest.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | "roots": [ 3 | "/test" 4 | ], 5 | testMatch: [ '**/*.test.ts'], 6 | "transform": { 7 | "^.+\\.tsx?$": "ts-jest" 8 | }, 9 | } 10 | -------------------------------------------------------------------------------- /resources/beam-on-kda/cdk/lambda/add-approximate-arrival-time.js: -------------------------------------------------------------------------------- 1 | 'use strict'; 2 | console.log('Loading function'); 3 | 4 | exports.handler = (event, context, callback) => { 5 | /* Process the list of records and transform them */ 6 | const output = event.records.map((record) => ({ 7 | recordId: record.recordId, 8 | result: 'Ok', 9 | data: enrichPayload(record), 10 | })); 11 | 12 | console.log(`Processing completed. Successful records ${output.length}.`); 13 | 14 | callback(null, { records: output }); 15 | }; 16 | 17 | 18 | function enrichPayload(record) { 19 | const payload = JSON.parse(Buffer.from(record.data, 'base64').toString('utf8')); 20 | const timestamp = new Date(record.kinesisRecordMetadata.approximateArrivalTimestamp).toISOString(); 21 | 22 | const enrichedPayload = Object.assign({approximate_arrival_timestamp: timestamp}, payload); 23 | 24 | return Buffer.from(JSON.stringify(enrichedPayload)+"\n").toString('base64'); 25 | } -------------------------------------------------------------------------------- /resources/beam-on-kda/cdk/lambda/build-pipeline-helper.py: -------------------------------------------------------------------------------- 1 | import os 2 | import json 3 | import boto3 4 | import traceback 5 | import cfnresponse 6 | import urllib.request 7 | 8 | s3client = boto3.client('s3') 9 | code_pipeline = boto3.client('codepipeline') 10 | 11 | 12 | def download_sources(event, context): 13 | url = os.environ['url'] 14 | bucket = os.environ['bucket'] 15 | key = os.environ['key'] 16 | 17 | try: 18 | if event['RequestType'] != 'Delete': 19 | req = urllib.request.Request(url) 20 | response = urllib.request.urlopen(req) 21 | 22 | s3client.put_object(Bucket=bucket, Key=key, Body=response.read()) 23 | 24 | cfnresponse.send(event, context, cfnresponse.SUCCESS, {}) 25 | except Exception: 26 | traceback.print_exc() 27 | 28 | cfnresponse.send(event, context, cfnresponse.FAILED, {}) 29 | 30 | 31 | def notify_build_success(event, context): 32 | job_id = event['CodePipeline.job']['id'] 33 | 34 | url = os.environ['waitHandleUrl'] 35 | headers = { "Content-Type": "" } 36 | data = { "Status": "SUCCESS", "Reason": "Compilation Succeeded", "UniqueId": job_id, "Data": "Compilation Succeeded" } 37 | 38 | try: 39 | req = urllib.request.Request(url, headers=headers, data=bytes(json.dumps(data), encoding="utf-8"), method='PUT') 40 | response = urllib.request.urlopen(req) 41 | 42 | code_pipeline.put_job_success_result(jobId=job_id) 43 | except Exception: 44 | traceback.print_exc() 45 | 46 | code_pipeline.put_job_failure_result(jobId=job_id, failureDetails={'type': 'JobFailed'}) 47 | -------------------------------------------------------------------------------- /resources/beam-on-kda/cdk/lambda/empty-bucket.py: -------------------------------------------------------------------------------- 1 | import os 2 | import json 3 | import boto3 4 | import traceback 5 | import cfnresponse 6 | 7 | def empty_bucket(event, context): 8 | bucket_name = os.environ['bucket_name'] 9 | 10 | try: 11 | if event['RequestType'] == 'Delete': 12 | print("empty bucket: " + bucket_name) 13 | 14 | bucket = boto3.resource('s3').Bucket(bucket_name) 15 | bucket.object_versions.delete() 16 | 17 | cfnresponse.send(event, context, cfnresponse.SUCCESS, {}) 18 | except Exception: 19 | traceback.print_exc() 20 | 21 | cfnresponse.send(event, context, cfnresponse.FAILED, {}) -------------------------------------------------------------------------------- /resources/beam-on-kda/cdk/lambda/get-emr-master-id.py: -------------------------------------------------------------------------------- 1 | import os 2 | import boto3 3 | import cfnresponse 4 | 5 | client = boto3.client('emr') 6 | 7 | def get_instance_id(event, context): 8 | response = client.list_instances( 9 | ClusterId=event['ResourceProperties']['EmrId'], 10 | InstanceGroupTypes=['MASTER'] 11 | ) 12 | 13 | instance_id = response['Instances'][0]['Ec2InstanceId'] 14 | 15 | responseData = { 'EmrMasterInstanceId': instance_id } 16 | 17 | cfnresponse.send(event, context, cfnresponse.SUCCESS, responseData) -------------------------------------------------------------------------------- /resources/beam-on-kda/cdk/lambda/stop-kda-app.py: -------------------------------------------------------------------------------- 1 | import os 2 | import boto3 3 | 4 | client = boto3.client('kinesisanalyticsv2') 5 | 6 | def empty_bucket(event, context): 7 | response = client.stop_application( 8 | ApplicationName=os.environ['application_name'] 9 | ) -------------------------------------------------------------------------------- /resources/beam-on-kda/cdk/lib/cloud9-dev-environment.ts: -------------------------------------------------------------------------------- 1 | import cdk = require('@aws-cdk/core'); 2 | import ec2 = require('@aws-cdk/aws-ec2'); 3 | import iam = require('@aws-cdk/aws-iam'); 4 | import s3 = require('@aws-cdk/aws-s3'); 5 | import c9 = require('@aws-cdk/aws-cloud9'); 6 | import { GithubBuildPipeline } from "./github-build-pipeline"; 7 | 8 | export interface Cloud9DevEnvironmentProps extends cdk.StackProps { 9 | vpc: ec2.Vpc, 10 | bucket: s3.Bucket, 11 | beamSourceRepositoryUrl: string, 12 | kinesisReplayVersion: string, 13 | eventEngine: boolean 14 | } 15 | 16 | export class Cloud9DevEnvironment extends cdk.Construct { 17 | 18 | constructor(scope: cdk.Construct, id: string, props: Cloud9DevEnvironmentProps) { 19 | super(scope, id); 20 | 21 | new GithubBuildPipeline(this, 'KinesisReplayBuildPipeline', { 22 | url: `https://github.com/aws-samples/amazon-kinesis-replay/archive/refs/tags/${props.kinesisReplayVersion}.zip`, 23 | bucket: props.bucket, 24 | extract: true 25 | }); 26 | 27 | const owner = props.eventEngine ? {ownerArn: `arn:aws:iam::${cdk.Aws.ACCOUNT_ID}:assumed-role/TeamRole/MasterKey`} : {}; 28 | 29 | const c9env = new c9.CfnEnvironmentEC2(this, 'Cloud9Instance', { 30 | instanceType: 't3.large', 31 | subnetId: props.vpc.publicSubnets[0].subnetId, 32 | description: 'Cloud9 environment for Apache Beam on KDA workshop', 33 | name: cdk.Aws.STACK_NAME, 34 | repositories: [ 35 | { 36 | pathComponent: '/code', 37 | repositoryUrl: props.beamSourceRepositoryUrl 38 | } 39 | ], 40 | ...owner 41 | }) 42 | 43 | new cdk.CfnOutput(this, 'ReplayJarS3Url', { 44 | exportName: 'ReplayJarS3Url', 45 | description: 'S3 Url for the replay jar file', 46 | value: props.bucket.s3UrlForObject('target') 47 | }); 48 | } 49 | } -------------------------------------------------------------------------------- /resources/beam-on-kda/cdk/lib/empty-bucket.ts: -------------------------------------------------------------------------------- 1 | import fs = require('fs'); 2 | import cdk = require('@aws-cdk/core'); 3 | import s3 = require('@aws-cdk/aws-s3'); 4 | import lambda = require('@aws-cdk/aws-lambda'); 5 | import cfn = require('@aws-cdk/aws-cloudformation'); 6 | import { Duration } from '@aws-cdk/core'; 7 | import { CustomResourceProvider } from '@aws-cdk/aws-cloudformation'; 8 | 9 | 10 | export interface EmptyBucketOnDeleteProps { 11 | bucket: s3.Bucket, 12 | } 13 | 14 | export class EmptyBucketOnDelete extends cdk.Construct { 15 | customResource: cfn.CfnCustomResource; 16 | 17 | constructor(scope: cdk.Construct, id: string, props: EmptyBucketOnDeleteProps) { 18 | super(scope, id); 19 | 20 | const lambdaSource = fs.readFileSync('lambda/empty-bucket.py').toString(); 21 | 22 | const emptyBucketLambda = new lambda.Function(this, 'EmptyBucketLambda', { 23 | runtime: lambda.Runtime.PYTHON_3_7, 24 | timeout: Duration.minutes(15), 25 | code: lambda.Code.fromInline(lambdaSource), 26 | handler: 'index.empty_bucket', 27 | memorySize: 512, 28 | environment: { 29 | bucket_name: props.bucket.bucketName, 30 | } 31 | }); 32 | 33 | props.bucket.grantReadWrite(emptyBucketLambda); 34 | 35 | this.customResource = new cfn.CfnCustomResource(this, 'EmptyBucketResource', { 36 | serviceToken: CustomResourceProvider.lambda(emptyBucketLambda).serviceToken 37 | }); 38 | } 39 | } -------------------------------------------------------------------------------- /resources/beam-on-kda/cdk/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "cdk", 3 | "version": "0.1.0", 4 | "bin": { 5 | "cdk": "bin/cdk.js" 6 | }, 7 | "scripts": { 8 | "build": "tsc", 9 | "watch": "tsc -w", 10 | "test": "jest", 11 | "cdk": "cdk" 12 | }, 13 | "devDependencies": { 14 | "@aws-cdk/assert": "^1.88.0", 15 | "@types/jest": "^26.0.10", 16 | "@types/node": "10.17.27", 17 | "aws-cdk": "1.147.0", 18 | "jest": "^26.4.2", 19 | "ts-jest": "^26.2.0", 20 | "ts-node": "^9.0.0", 21 | "typescript": "~3.9.7" 22 | }, 23 | "dependencies": { 24 | "@aws-cdk/aws-autoscaling": "^1.56.0", 25 | "@aws-cdk/aws-cloud9": "^1.115.0", 26 | "@aws-cdk/aws-cloudwatch-actions": "^1.56.0", 27 | "@aws-cdk/aws-codebuild": "^1.56.0", 28 | "@aws-cdk/aws-codepipeline": "^1.56.0", 29 | "@aws-cdk/aws-codepipeline-actions": "^1.56.0", 30 | "@aws-cdk/aws-ec2": "^1.56.0", 31 | "@aws-cdk/aws-elasticsearch": "^1.56.0", 32 | "@aws-cdk/aws-emr": "^1.56.0", 33 | "@aws-cdk/aws-iam": "^1.56.0", 34 | "@aws-cdk/aws-lambda": "^1.56.0", 35 | "@aws-cdk/aws-s3": "^1.56.0", 36 | "@aws-cdk/aws-secretsmanager": "^1.56.0", 37 | "@aws-cdk/core": "^1.56.0", 38 | "@types/node": "^13.13.15", 39 | "fs": "0.0.1-security", 40 | "source-map-support": "^0.5.19" 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /resources/beam-on-kda/cdk/test/cdk.test.ts: -------------------------------------------------------------------------------- 1 | import { expect as expectCDK, matchTemplate, MatchStyle } from '@aws-cdk/assert'; 2 | import cdk = require('@aws-cdk/core'); 3 | 4 | test('Empty Stack', () => { 5 | const app = new cdk.App(); 6 | // WHEN 7 | const stack = new cdk.Stack(app, 'MyTestStack'); 8 | // THEN 9 | expectCDK(stack).to(matchTemplate({ 10 | "Resources": {} 11 | }, MatchStyle.EXACT)) 12 | }); -------------------------------------------------------------------------------- /resources/beam-on-kda/cdk/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target":"ES2018", 4 | "module": "commonjs", 5 | "lib": ["es2016", "es2017.object", "es2017.string"], 6 | "declaration": true, 7 | "strict": true, 8 | "noImplicitAny": true, 9 | "strictNullChecks": true, 10 | "noImplicitThis": true, 11 | "alwaysStrict": true, 12 | "noUnusedLocals": false, 13 | "noUnusedParameters": false, 14 | "noImplicitReturns": true, 15 | "noFallthroughCasesInSwitch": false, 16 | "inlineSourceMap": true, 17 | "inlineSources": true, 18 | "experimentalDecorators": true, 19 | "strictPropertyInitialization":false, 20 | "typeRoots": ["./node_modules/@types"] 21 | }, 22 | "exclude": ["cdk.out"] 23 | } 24 | -------------------------------------------------------------------------------- /resources/flink-on-kda/cdk/.gitignore: -------------------------------------------------------------------------------- 1 | *.js 2 | !jest.config.js 3 | *.d.ts 4 | node_modules 5 | 6 | # CDK asset staging directory 7 | .cdk.staging 8 | cdk.out/* 9 | !cdk.out/StreamingAnalyticsWorkshop.template.json 10 | -------------------------------------------------------------------------------- /resources/flink-on-kda/cdk/.npmignore: -------------------------------------------------------------------------------- 1 | *.ts 2 | !*.d.ts 3 | 4 | # CDK asset staging directory 5 | .cdk.staging 6 | cdk.out 7 | -------------------------------------------------------------------------------- /resources/flink-on-kda/cdk/README.md: -------------------------------------------------------------------------------- 1 | # Useful commands 2 | 3 | * `npm run build` compile typescript to js 4 | * `npm run watch` watch for changes and compile 5 | * `npm run test` perform the jest unit tests 6 | * `cdk deploy` deploy this stack to your default AWS account/region 7 | * `cdk diff` compare deployed stack with current state 8 | * `cdk synth` emits the synthesized CloudFormation template 9 | -------------------------------------------------------------------------------- /resources/flink-on-kda/cdk/bin/cdk.ts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | import 'source-map-support/register'; 3 | import cdk = require('@aws-cdk/core'); 4 | import { WorkshopInfrastructure } from '../lib/workshop-infrastructure'; 5 | import { BuildWorkshopResources } from '../lib/build-workshop-resources'; 6 | 7 | const app = new cdk.App(); 8 | 9 | const synthDate = new Date().toISOString().split('T')[0]; 10 | 11 | new WorkshopInfrastructure(app, 'StreamingAnalyticsWorkshop', { 12 | description: `Creates all resources and compiles all artifacts that are required to run the streaming analytics workshop (shausma-streaming-analytics-workshop-self-paced-${synthDate})`, 13 | kinesisReplayVersion: 'release-0.1.0', 14 | consumerApplicationVersion: 'release-0.2.1', 15 | consumerApplicationJarObject: 'amazon-kinesis-analytics-taxi-consumer-0.2.1.jar' 16 | }); 17 | 18 | new WorkshopInfrastructure(app, 'StreamingAnalyticsWorkshopEventEngine', { 19 | description: `Creates all resources and compiles all artifacts that are required to run the streaming analytics workshop (shausma-streaming-analytics-workshop-event-engine-${synthDate})`, 20 | kinesisReplayVersion: 'release-0.1.0', 21 | consumerApplicationVersion: 'release-0.2.1', 22 | consumerApplicationJarObject: 'amazon-kinesis-analytics-taxi-consumer-0.2.1.jar' 23 | }); 24 | 25 | new BuildWorkshopResources(app, 'BuildStreamingAnalyticsWorkshopResources'); -------------------------------------------------------------------------------- /resources/flink-on-kda/cdk/cdk.json: -------------------------------------------------------------------------------- 1 | { 2 | "app": "npx ts-node bin/cdk.ts" 3 | } 4 | -------------------------------------------------------------------------------- /resources/flink-on-kda/cdk/jest.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | "roots": [ 3 | "/test" 4 | ], 5 | testMatch: [ '**/*.test.ts'], 6 | "transform": { 7 | "^.+\\.tsx?$": "ts-jest" 8 | }, 9 | } 10 | -------------------------------------------------------------------------------- /resources/flink-on-kda/cdk/lambda/build-pipeline-helper.py: -------------------------------------------------------------------------------- 1 | import os 2 | import json 3 | import boto3 4 | import traceback 5 | import cfnresponse 6 | import urllib.request 7 | 8 | s3client = boto3.client('s3') 9 | code_pipeline = boto3.client('codepipeline') 10 | 11 | 12 | def download_sources(event, context): 13 | url = os.environ['url'] 14 | bucket = os.environ['bucket'] 15 | key = os.environ['key'] 16 | 17 | try: 18 | if event['RequestType'] != 'Delete': 19 | req = urllib.request.Request(url) 20 | response = urllib.request.urlopen(req) 21 | 22 | s3client.put_object(Bucket=bucket, Key=key, Body=response.read()) 23 | 24 | cfnresponse.send(event, context, cfnresponse.SUCCESS, {}) 25 | except Exception: 26 | traceback.print_exc() 27 | 28 | cfnresponse.send(event, context, cfnresponse.FAILED, {}) 29 | 30 | 31 | def notify_build_success(event, context): 32 | job_id = event['CodePipeline.job']['id'] 33 | 34 | url = os.environ['waitHandleUrl'] 35 | headers = { "Content-Type": "" } 36 | data = { "Status": "SUCCESS", "Reason": "Compilation Succeeded", "UniqueId": job_id, "Data": "Compilation Succeeded" } 37 | 38 | try: 39 | req = urllib.request.Request(url, headers=headers, data=bytes(json.dumps(data), encoding="utf-8"), method='PUT') 40 | response = urllib.request.urlopen(req) 41 | 42 | code_pipeline.put_job_success_result(jobId=job_id) 43 | except Exception: 44 | traceback.print_exc() 45 | 46 | code_pipeline.put_job_failure_result(jobId=job_id, failureDetails={'type': 'JobFailed'}) 47 | -------------------------------------------------------------------------------- /resources/flink-on-kda/cdk/lambda/empty-bucket.py: -------------------------------------------------------------------------------- 1 | import os 2 | import json 3 | import boto3 4 | import traceback 5 | import cfnresponse 6 | 7 | def empty_bucket(event, context): 8 | bucket_name = os.environ['bucket_name'] 9 | 10 | try: 11 | if event['RequestType'] == 'Delete': 12 | print("empty bucket: " + bucket_name) 13 | 14 | bucket = boto3.resource('s3').Bucket(bucket_name) 15 | bucket.object_versions.delete() 16 | 17 | cfnresponse.send(event, context, cfnresponse.SUCCESS, {}) 18 | except Exception: 19 | traceback.print_exc() 20 | 21 | cfnresponse.send(event, context, cfnresponse.FAILED, {}) -------------------------------------------------------------------------------- /resources/flink-on-kda/cdk/lib/empty-bucket.ts: -------------------------------------------------------------------------------- 1 | import fs = require('fs'); 2 | import cdk = require('@aws-cdk/core'); 3 | import s3 = require('@aws-cdk/aws-s3'); 4 | import lambda = require('@aws-cdk/aws-lambda'); 5 | import cfn = require('@aws-cdk/aws-cloudformation'); 6 | import { Duration } from '@aws-cdk/core'; 7 | import { CustomResourceProvider } from '@aws-cdk/aws-cloudformation'; 8 | 9 | 10 | export interface EmptyBucketOnDeleteProps { 11 | bucket: s3.Bucket, 12 | } 13 | 14 | export class EmptyBucketOnDelete extends cdk.Construct { 15 | customResource: cfn.CfnCustomResource; 16 | 17 | constructor(scope: cdk.Construct, id: string, props: EmptyBucketOnDeleteProps) { 18 | super(scope, id); 19 | 20 | const lambdaSource = fs.readFileSync('lambda/empty-bucket.py').toString(); 21 | 22 | const emptyBucketLambda = new lambda.Function(this, 'EmptyBucketLambda', { 23 | runtime: lambda.Runtime.PYTHON_3_7, 24 | timeout: Duration.minutes(15), 25 | code: lambda.Code.inline(lambdaSource), 26 | handler: 'index.empty_bucket', 27 | memorySize: 512, 28 | environment: { 29 | bucket_name: props.bucket.bucketName, 30 | } 31 | }); 32 | 33 | props.bucket.grantReadWrite(emptyBucketLambda); 34 | 35 | this.customResource = new cfn.CfnCustomResource(this, 'EmptyBucketResource', { 36 | serviceToken: CustomResourceProvider.lambda(emptyBucketLambda).serviceToken 37 | }); 38 | } 39 | } -------------------------------------------------------------------------------- /resources/flink-on-kda/cdk/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "cdk", 3 | "version": "0.1.0", 4 | "bin": { 5 | "cdk": "bin/cdk.js" 6 | }, 7 | "scripts": { 8 | "build": "tsc", 9 | "watch": "tsc -w", 10 | "test": "jest", 11 | "cdk": "cdk" 12 | }, 13 | "devDependencies": { 14 | "@aws-cdk/assert": "^1.83.0", 15 | "@types/jest": "^26.0.10", 16 | "@types/node": "10.17.27", 17 | "aws-cdk": "1.147.0", 18 | "jest": "^26.4.2", 19 | "ts-jest": "^26.2.0", 20 | "ts-node": "^9.0.0", 21 | "typescript": "~3.9.7" 22 | }, 23 | "dependencies": { 24 | "@aws-cdk/aws-autoscaling": "^1.32.2", 25 | "@aws-cdk/aws-codebuild": "^1.32.2", 26 | "@aws-cdk/aws-codepipeline": "^1.32.2", 27 | "@aws-cdk/aws-codepipeline-actions": "^1.32.2", 28 | "@aws-cdk/aws-ec2": "^1.32.2", 29 | "@aws-cdk/aws-elasticsearch": "^1.32.2", 30 | "@aws-cdk/aws-iam": "^1.32.2", 31 | "@aws-cdk/aws-lambda": "^1.32.2", 32 | "@aws-cdk/aws-s3": "^1.32.2", 33 | "@aws-cdk/aws-secretsmanager": "^1.32.2", 34 | "@aws-cdk/core": "^1.83.0", 35 | "source-map-support": "^0.5.16" 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /resources/flink-on-kda/cdk/test/cdk.test.ts: -------------------------------------------------------------------------------- 1 | import { expect as expectCDK, matchTemplate, MatchStyle } from '@aws-cdk/assert'; 2 | import cdk = require('@aws-cdk/core'); 3 | 4 | test('Empty Stack', () => { 5 | const app = new cdk.App(); 6 | // WHEN 7 | const stack = new cdk.Stack(app, 'MyTestStack'); 8 | // THEN 9 | expectCDK(stack).to(matchTemplate({ 10 | "Resources": {} 11 | }, MatchStyle.EXACT)) 12 | }); -------------------------------------------------------------------------------- /resources/flink-on-kda/cdk/tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "compilerOptions": { 3 | "target":"ES2018", 4 | "module": "commonjs", 5 | "lib": ["es2016", "es2017.object", "es2017.string"], 6 | "declaration": true, 7 | "strict": true, 8 | "noImplicitAny": true, 9 | "strictNullChecks": true, 10 | "noImplicitThis": true, 11 | "alwaysStrict": true, 12 | "noUnusedLocals": false, 13 | "noUnusedParameters": false, 14 | "noImplicitReturns": true, 15 | "noFallthroughCasesInSwitch": false, 16 | "inlineSourceMap": true, 17 | "inlineSources": true, 18 | "experimentalDecorators": true, 19 | "strictPropertyInitialization":false, 20 | "typeRoots": ["./node_modules/@types"] 21 | }, 22 | "exclude": ["cdk.out"] 23 | } 24 | -------------------------------------------------------------------------------- /workshop/.hugo_build.lock: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/.hugo_build.lock -------------------------------------------------------------------------------- /workshop/buildspec.yml: -------------------------------------------------------------------------------- 1 | version: 0.2 2 | phases: 3 | install: 4 | runtime-versions: 5 | golang: 1.12 6 | nodejs: 10 7 | commands: 8 | - echo Entered the install phase... 9 | - apt-get -qq update && apt-get -qq install curl 10 | - apt-get -qq install asciidoctor 11 | - curl -s -L https://github.com/gohugoio/hugo/releases/download/v0.64.1/hugo_0.64.1_Linux-64bit.deb -o hugo.deb 12 | - dpkg -i hugo.deb 13 | finally: 14 | - echo Installation done 15 | build: 16 | commands: 17 | - echo Entered the build phase ... 18 | - echo Build started on `date` 19 | - cd $CODEBUILD_SRC_DIR/workshop 20 | - hugo --baseURL https://$WORKSHOP_NAME.$BASE_DOMAIN_NAME --quiet 21 | finally: 22 | - echo Building the HTML files finished 23 | artifacts: 24 | files: 25 | - "**/*" 26 | - "*" 27 | base-directory: $CODEBUILD_SRC_DIR/workshop/public/ 28 | discard-paths: no -------------------------------------------------------------------------------- /workshop/config.toml: -------------------------------------------------------------------------------- 1 | RelativeURLs=true 2 | CanonifyURLs=true 3 | languageCode = "en-US" 4 | defaultContentLanguage = "en" 5 | defaultContentLanguageInSubdir = false 6 | disableLanguages = ["de"] 7 | 8 | title = "Streaming Analytics Workshop" 9 | theme = "learn" 10 | metaDataFormat = "yaml" 11 | 12 | uglyurls = true 13 | sectionPagesMenu = "main" 14 | pygmentsCodeFences = true 15 | pygmentsStyle = "monokai" 16 | 17 | [params] 18 | editURL = "https://github.com/aws-samples/streaming-analytics-workshop/edit/mainline/workshop/content/" 19 | description = "Streaming Analytics Workshop" 20 | author = "Steffen Hausmann" 21 | disableBreadcrumb = false 22 | disableNextPrev = false 23 | themeVariant = "aws" 24 | disableSearch = false 25 | disableAssetsBusting = true 26 | disableLanguageSwitchingButton = false 27 | disableShortcutsTitle = true 28 | disableInlineCopyToClipBoard = false 29 | 30 | [outputs] 31 | home = [ "HTML", "RSS", "JSON" ] 32 | 33 | [blackfriday] 34 | plainIDAnchors = true 35 | hrefTargetBlank = true 36 | 37 | [markup] 38 | defaultMarkdownHandler = "goldmark" 39 | [markup.goldmark] 40 | [markup.goldmark.renderer] 41 | unsafe = true 42 | 43 | [Languages] 44 | [Languages.en] 45 | title = "Streaming Analytics Workshop" 46 | weight = 1 47 | languageName = "English" 48 | 49 | [[Languages.en.menu.shortcuts]] 50 | name = " More Resources" 51 | url = "more-resources/" 52 | weight = 10 53 | 54 | [[Languages.en.menu.shortcuts]] 55 | name = " GitHub Project" 56 | url = "https://github.com/aws-samples/streaming-analytics-workshop" 57 | weight = 30 58 | 59 | [[Languages.en.menu.shortcuts]] 60 | name = " Have questions?" 61 | url = "mailto:streaming-analytics-workshop@amazon.com" 62 | weight = 40 63 | 64 | 65 | [Languages.de] 66 | title = "Streaming Analytics Workshop" 67 | weight = 1 68 | languageName = "Deutsch" 69 | -------------------------------------------------------------------------------- /workshop/content/_index.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Streaming Analytics Workshop" 3 | chapter: true 4 | weight: 1 5 | --- 6 | 7 | This is a collection of workshops and resources for running streaming analytics workloads on AWS. 8 | 9 | In the workshop [Apache Flink on Amazon Kinesis Data Analytics]({{< relref "flink-on-kda/" >}}) you will learn how to deploy, operate, and scale an Apache Flink application with Kinesis Data Analytics. You will also explore the basic concepts of Apache Flink and running Flink applications in a fully managed environment on AWS. 10 | 11 | In the workshop [Apache Flink on Amazon Kinesis Data Analytics Studio]({{< relref "flink-on-kda-studio/" >}}) you will learn how to build Apache Flink applications via. Kinesis Data Analytics Studio Notebooks. Flink is a framework for real time data processing. In this workshop you will explore Flink programing patterns with Flink's support for SQL. After building a Flink application you will deploy your notebook as a long running application. 12 | 13 | In the workshop [Apache Beam on Amazon Kinesis Data Analytics]({{< relref "beam-on-kda/" >}}) you will learn how you can leverage Beam’s expressive programming model to unify batch and streaming. You will also learn how AWS can help you to effectively build and operate Beam based streaming architectures with low operational overhead in a fully managed environment on AWS. -------------------------------------------------------------------------------- /workshop/content/beam-on-kda/_index.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Apache Beam on Amazon Kinesis Data Analytics" 3 | menuTitle: "Apache Beam on KDA" 4 | chapter: true 5 | weight: 20 6 | url: "/beam-on-kda/" 7 | --- 8 | 9 | In this workshop, we explore an end to end example that combines batch and streaming aspects in one uniform Apache Beam pipeline. We start to analyze incoming taxi trip events in near real time with an Apache Beam pipeline. We then show how to archive the trip data to Amazon S3 for long term storage. We subsequently explain how to read the historic data from S3 and backfill new metrics by executing the same Beam pipeline in a batch fashion. Along the way, you also learn how you can deploy and execute the Beam pipeline with Amazon Kinesis Data Analytics in a fully managed environment. 10 | 11 | ![Overview Beam Architecture](/images/beam-on-kda/overview-beamarchitecture.png) 12 | 13 | So you will not only learn how you can leverage Apache Beam’s expressive programming model to unify batch and streaming you will also learn how AWS can help you to effectively build and operate Beam based streaming architectures with low operational overhead. 14 | -------------------------------------------------------------------------------- /workshop/content/beam-on-kda/beam-on-kda/_index.en.md: -------------------------------------------------------------------------------- 1 | +++ 2 | title = "Beam on KDA" 3 | date = 2020-07-10T10:27:19+02:00 4 | weight = 45 5 | chapter = false 6 | +++ 7 | 8 | While running Apache Beam applications on top of Kinesis Data Analytics is no different from running Beam applications in any Apache Flink environment, there are a few important aspects that developers need to keep in mind. In this section, you'll learn how to efficiently leverage the integrations with the AWS ecosystem effectively. -------------------------------------------------------------------------------- /workshop/content/beam-on-kda/beam-on-kda/flink-dashboard.en.md: -------------------------------------------------------------------------------- 1 | +++ 2 | title = "Accessing the Flink Dashboard" 3 | date = 2020-07-10T10:27:19+02:00 4 | weight = 10 5 | chapter = false 6 | +++ 7 | 8 | Amazon Kinesis Data Analytics exposes the Flink dashboard so that you can gain additional insights. The dashboard allows you to explore the details of the pipeline while it is executed. You can, for instance, obtain detailled metrics for individual operators and checkoint information. 9 | 10 | 1. Select the running application in the [Kinesis Data Analytics console](https://console.aws.amazon.com/kinesisanalytics) and press the **Open Apache Flink dashboard** next to the application name 11 | 12 | 1. Select the running Flink application and explore the operator details in the dashboard 13 | 14 | ![](/images/beam-on-kda/kda-flink-streaming-dahsboard.png) -------------------------------------------------------------------------------- /workshop/content/beam-on-kda/beam-on-kda/jackson-dependencies.en.md: -------------------------------------------------------------------------------- 1 | +++ 2 | title = "Jackson dependencies" 3 | date = 2020-07-10T10:27:19+02:00 4 | weight = 30 5 | chapter = false 6 | +++ 7 | 8 | When running your Beam applications with an Apache Flink runner, you may experience dependency conflicts for Jackson 2.x. You need to ensure that all Jackson dependencies are leveraging the same version. Otherwise you may experience `NoSuchMethodErrors` during runtime. 9 | 10 | ``` 11 | java.lang.NoSuchMethodError: 'com.fasterxml.jackson.core.json.JsonWriteContext 12 | ``` 13 | 14 | For Apache Flink 1.11, this can be fixed by including the following depencency, that explicitely pulls in the correct version. 15 | 16 | {{< highlight xml >}} 17 | 18 | com.fasterxml.jackson.dataformat 19 | jackson-dataformat-cbor 20 | 2.12.1 21 | 22 | {{< / highlight >}} -------------------------------------------------------------------------------- /workshop/content/beam-on-kda/beam-on-kda/runtime-parameters.en.md: -------------------------------------------------------------------------------- 1 | +++ 2 | title = "Passing runtime parameters into the pipeline" 3 | menuTitle = "Runtime parameters" 4 | date = 2020-07-10T10:27:19+02:00 5 | weight = 10 6 | chapter = false 7 | +++ 8 | 9 | Amazon Kinesis Data Analytics (KDA) provides a managed Flink environment that is based on the open source Flink version. Hence, running a Beam pipeline on KDA is almost identical to running a Beam pipeline with a Flink runner on a self managed Flink cluster. 10 | 11 | One of the differences is the way you pass runtime parameters into the pipeline. As you don't have access to the environment, you cannot specify parameters when you are invoking the Flink command. Instead, you can configure application properties, that get exposed to the application during runtime. You've already specified parameters through application properties when you created the KDA application. 12 | 13 | In the Beam application, these properties are obtained from the environment. 14 | 15 | {{< highlight java >}} 16 | Map applicationProperties = KinesisAnalyticsRuntime.getApplicationProperties(); 17 | {{< / highlight >}} 18 | 19 | And then the properties are converted to a common `PipelineOption` with a [helper method](https://github.com/aws-samples/amazon-kinesis-analytics-beam-taxi-consumer/blob/release-0.2.0/src/main/java/com/amazonaws/samples/beam/taxi/count/TaxiCountOptions.java#L60). 20 | 21 | {{< highlight java >}} 22 | String[] kinesisArgs = TaxiCountOptions.argsFromKinesisApplicationProperties( 23 | args, "BeamApplicationProperties"); 24 | 25 | TaxiCountOptions options = PipelineOptionsFactory.fromArgs( 26 | ArrayUtils.addAll(args, kinesisArgs)).as(TaxiCountOptions.class); 27 | 28 | options.setRunner(FlinkRunner.class); 29 | options.setAwsRegion(Regions.getCurrentRegion().getName()); 30 | 31 | PipelineOptionsValidator.validate(TaxiCountOptions.class, options); 32 | {{< / highlight >}} 33 | 34 | 35 | You'll notice the `KinesisAnalyticsRuntime` class above; in order to access this class in your code, be sure to add the following dependency in your `pom.xml`: 36 | 37 | {{< highlight xml >}} 38 | 39 | com.amazonaws 40 | aws-kinesisanalytics-runtime 41 | ${kda.version} 42 | 43 | {{< / highlight >}} 44 | -------------------------------------------------------------------------------- /workshop/content/beam-on-kda/beam-on-kda/temporary-credentials.en.md: -------------------------------------------------------------------------------- 1 | +++ 2 | title = "Leveraging temporary credentials" 3 | menuTitle = "Temporary credentials" 4 | date = 2020-07-10T10:27:19+02:00 5 | weight = 20 6 | chapter = false 7 | +++ 8 | 9 | You can configure your Beam IO connectors to pull temporary credentials from the KDA environment. Here's a snippet illustrating how to configure the `KinesisIO` connector. 10 | 11 | 12 | {{< highlight java >}} 13 | input = p 14 | .apply("Kinesis source", KinesisIO 15 | .read() 16 | .withAWSClientsProvider(new DefaultCredentialsProviderClientsProvider()) 17 | ... 18 | ) 19 | {{< / highlight >}} 20 | 21 | Leveraging temporary credentials has the advantage that you don't need to hard coding credentials in your code. Instead, you access permissions for an [IAM Role](https://docs.aws.amazon.com/IAM/latest/UserGuide/id_roles.html) that is then associated with your KDA application. The temporary credentials are then exposed through the evenvironment and will be automatically picked up by the `DefaultCredentialsProviderClientsProvider`. -------------------------------------------------------------------------------- /workshop/content/beam-on-kda/cleanup/_index.en.md: -------------------------------------------------------------------------------- 1 | +++ 2 | title = "Cleanup" 3 | date = 2020-07-10T11:00:27+02:00 4 | weight = 90 5 | chapter = true 6 | +++ 7 | 8 | This chapter is to delete the resources after completing the workshop. If running a hosted AWS workshop, the resources and account will be available for the rest of the day to allow you to continue the workshop in your own time. 9 | 10 | If you are running in your own account please remove all resources that you have created. Start with deleting the Kinesis Data Analytics application **beam-workshop**, then delete the Kinesis Firehose delivery stream **beam-workshop-s3**, and then the Kinesis data stream **beam-workshop**. Next, delete the Amazon CodeGuru profiling group **flink-beam-app**. Last, delete the CloudFormation template *beam-workshop** and any other resources that you have created throughout the workshop, to prevent incurring any costs. 11 | -------------------------------------------------------------------------------- /workshop/content/beam-on-kda/configure-dev-env/_index.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Configure Development Environment" 3 | menuTitle: "Configure Dev. Environment" 4 | chapter: false 5 | weight: 20 6 | --- 7 | 8 | In this chapter, you will configure the development environment that builds the Beam pipeline which processes the NYC taxi data. The development environment is used to illustrate the development process. You will deploy the Beam pipeline to Amazon Kinesis Data Analytics in a managed environment at a later step. 9 | 10 | To have a consistent experience for all attendees, you will use AWS Cloud9 IDE. 11 | 12 | {{% notice info %}} 13 | If you don't like AWS Cloud9 and are tempted to use your computer for the lab, **please don't**. You will produce up to 4 MB/sec into a Kinesis data stream; if you use your own computer for that, the procedure will either fail, or break the conference network, or both. 14 | {{% /notice %}} 15 | -------------------------------------------------------------------------------- /workshop/content/beam-on-kda/configure-dev-env/configure-cloud9.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Configure AWS Cloud9" 3 | chapter: false 4 | weight: 21 5 | --- 6 | 7 | The AWS Cloud9 development environment is created for you as part of the CloudFormation template. You have to complete the configuration of the environment by following the steps below: 8 | 9 | 1. Navigate to the [AWS Cloud9 console](https://console.aws.amazon.com/cloud9/home). If you are running the workshop on your own the environment is called *beam-workshop*. If you are attending an AWS hosted event, there is only one development environment available in the account. Once you have found the development environment, click on the **Open IDE** button. 10 | 11 | 1. The code [repository](https://github.com/aws-samples/amazon-kinesis-analytics-beam-taxi-consumer) is automatically cloned into your environment the first time you open the IDE. 12 | 13 | 1. Open a terminal window by choosing **Window** and then **New Terminal** form the menu bar at the top. Execute the following commands to configure your environment. 14 | {{< highlight bash >}} 15 | sh ./code/misc/prepare-env.sh -s $C9_PROJECT 16 | {{< / highlight >}} 17 | 18 | 1. The last output that is generated should look like the following: 19 | {{< highlight bash >}} 20 | download: s3://beam-workshop-historictripsef9d049f-8lf5u3xscqz4/target/amazon-kinesis-replay-0.1.0.jar to replay/amazon-kinesis-replay-0.1.0.jar 21 | download: s3://beam-workshop-historictripsef9d049f-8lf5u3xscqz4/target/amazon-kinesis-analytics-beam-taxi-consumer-0.2.0.jar to replay/amazon-kinesis-analytics-beam-taxi-consumer-0.2.0.jar 22 | {{< / highlight >}} 23 | -------------------------------------------------------------------------------- /workshop/content/beam-on-kda/create-infrastructure/_index.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Create Ingestion Infrastructure" 3 | date: 2020-07-10T10:14:06+02:00 4 | weight: 30 5 | chapter: false 6 | --- 7 | 8 | In this chapter, you will set up the infrastructure within AWS to focus on the ingestion, streaming and transformation part of the architecture. 9 | 10 | An [Amazon Kinesis data stream](https://aws.amazon.com/kinesis/data-streams/) is created which serves as a short term streaming store for the events that are created by the taxi fleet. You then start producing a historic set of taxi trips into the Kinesis data stream. 11 | 12 | The Kinesis data stream (`beam-workshop`) serves as a buffer that decouples the producers from the consumers. It is a common pattern for architectures that are analyzing streaming data to separate producers from consumers by means of a streaming store. In this way, the architecture becomes more robust. Producers and consumers can be scaled independently and producers can still persist events into the data stream even if the consumers are currently experiencing operational problems or the consuming application needs to be updated. 13 | 14 | In addition, [Amazon Kinesis Data Firehose](https://aws.amazon.com/kinesis/data-firehose/) (`beam-workshop-s3`) is used to perform any enrichment and transformation on the data prior to it being loaded into data lakes, data stores and analytical tools. These data transformations are performed by invoking [AWS Lambda](https://aws.amazon.com/lambda/), a serverless platform that runs code without provisioning or managing servers. For this architecture we will store the transformed data into Amazon S3 so that it can be read by our Beam pipeline. 15 | 16 | This infrastructure also allows you to experiment and adopt new technologies in the future. Multiple independent applications can concurrently consume the data stored in the Kinesis data stream. You can then test how a new version of an existing application performs with a copy of the production traffic. But you can also introduce a different tool and technology stack to analyze the data, again without affecting the existing production application. 17 | -------------------------------------------------------------------------------- /workshop/content/beam-on-kda/create-infrastructure/create-stream.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Create a Kinesis data stream" 3 | chapter: false 4 | weight: 31 5 | --- 6 | 7 | You start with creating a Kinesis data stream. 8 | 9 | 1. Navigate to the [Kinesis Console](https://console.aws.amazon.com/kinesis) 10 | 11 | 1. If displayed, up press **Get Started** in the service welcome dialog 12 | 13 | 1. Select **Create data stream** to navigate to the Amazon Kinesis Data Stream service: 14 | ![](/images/beam-on-kda/kinesis-welcome-create-stream.png) 15 | 16 | 1. Choose `beam-workshop` as **Kinesis stream name** 17 | 18 | 1. Enter `4` as the **Number of shards**. 19 | 20 | {{% notice info %}} 21 | A Shard is the base throughput unit of an Amazon Kinesis data stream. One shard provides a capacity of 1MB/sec data input and 2MB/sec data output. One shard can support up to 1000 PUT records per second. You will specify the number of shards needed when you create a data stream. For example, we create a data stream with four shards. This data stream has a throughput of 4MB/sec data input and 8MB/sec data output, and allows up to 4000 put records per second. You can monitor shard-level metrics in Amazon Kinesis Data Streams and add or remove shards from your data stream dynamically as your data throughput changes by resharding the data stream. 22 | {{% /notice %}} 23 | 24 | 1. Select **Create Kinesis stream** at the bottom of the page 25 | ![](/images/beam-on-kda/kds-create-stream.png) 26 | 27 | 1. After a few moments, the data stream should have been created successfully and be in an _Active_ state 28 | ![](/images/beam-on-kda/kds-create-stream-active.png) 29 | 30 | 1. We are now ready to create the Firehose delivery stream. 31 | -------------------------------------------------------------------------------- /workshop/content/beam-on-kda/create-infrastructure/firehose/_index.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Create a Firehose delivery stream" 3 | chapter: false 4 | weight: 32 5 | --- 6 | 7 | Next you create an Amazon Kinesis Data Firehose delivery stream to allow for transforming and enrichment of the source data to eventually store into an S3 bucket. The delivery stream will consume data from the data stream that has been created earlier. The delivery stream also enriches the events that are persisted to Amazon S3 by means of an AWS Lambda function that has already been pre-created for the lab. 8 | 9 | Each event that is persisted in the Kinesis stream is automatically assigned an _approximate arrival timestamp_ in the event meta data. The Lambda function is simply adding the _approximate arrival timestamp_ from the meta data into the payload of the message when it is written to Amazon S3. 10 | 11 | The Lambda function that does this can be found under [Services->Lambda](https://console.aws.amazon.com/lambda/) and as _\*EnrichEventsLambda\*_ in the middle of the name. 12 | 13 | ```js 14 | function enrichPayload(record) { 15 | const payload = JSON.parse( 16 | Buffer.from(record.data, "base64").toString("utf8") 17 | ); 18 | const timestamp = new Date( 19 | record.kinesisRecordMetadata.approximateArrivalTimestamp 20 | ).toISOString(); 21 | 22 | const enrichedPayload = Object.assign( 23 | { approximate_arrival_timestamp: timestamp }, 24 | payload 25 | ); 26 | 27 | return Buffer.from(JSON.stringify(enrichedPayload) + "\n").toString("base64"); 28 | } 29 | ``` 30 | 31 | In this way, the streaming and the batch pipeline can refer to the same timestamp so we can get the same result for the batch and streaming pipeline. Follow the steps in this chapter to create the delivery stream. 32 | -------------------------------------------------------------------------------- /workshop/content/beam-on-kda/create-infrastructure/firehose/choose-destination.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Choose a Destination" 3 | chapter: false 4 | weight: 30 5 | --- 6 | 7 | In this step you pick the destination for the transformed records. 8 | 9 | 1. Select **Amazon S3** as the Destination type 10 | 11 | 1. For the S3 Bucket name select the pre-created S3 bucket whose name contains `historictrips` in the middle 12 | 13 | 1. For **Prefix** enter `historic-trip-events/` so that all the transformed records are easily identifiable in Amazon S3 14 | 15 | 1. Leave all other options as default 16 | 17 | 1. Select **Next** to move onto the _Configure Settings_ screen. 18 | 19 | ![](/images/beam-on-kda/kfh-s3.png) 20 | -------------------------------------------------------------------------------- /workshop/content/beam-on-kda/create-infrastructure/firehose/configure-settings.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Configure Settings" 3 | chapter: false 4 | weight: 32 5 | --- 6 | 7 | You then set the buffering hints that determine how often Firehose flushes data to the destination 8 | 9 | 1. Set the **Buffer Interval** to `60` seconds. 10 | 11 | {{% notice info %}} 12 | Amazon Kinesis Data Firehose buffers incoming streaming data to a certain size or for a certain period of time before delivering it to destinations. Buffer size is in MBs and ranges from 1MB to 128MB for Amazon S3 destination Buffer interval is in seconds and ranges from 60 seconds to 900 seconds. Increasing the buffers size allows us to gather data before delivering to ensure all data is delivered to the destination S3 bucket. 13 | {{% /notice %}} 14 | 15 | 1. Enable **S3 compression** by selecting *GZIP* to optimise the storage of data in Amazon S3. Buffer size is is applied before compression. As a result, if you choose to compress your data, the size of the objects within your S3 bucket can be smaller than the buffer size you specify. 16 | 17 | 1. Ensure that **Create or update IAM role** is selected, so that Kinesis Data Firehose uses the correct permissions for operations. 18 | 19 | 1. Leave all other values as default. 20 | 21 | 1. Select **Next** to move onto the _Review_ screen. 22 | 23 | ![](/images/beam-on-kda/kfh-configure.png) 24 | -------------------------------------------------------------------------------- /workshop/content/beam-on-kda/create-infrastructure/firehose/name-and-source.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Name and Source" 3 | chapter: false 4 | weight: 10 5 | --- 6 | 7 | To start creating a Firehose delivery stream, you need to choose a name 8 | 9 | 1. Navigate to the [Amazon Kinesis Data Firehose console](https://console.aws.amazon.com/firehose) and select **Create delivery stream** 10 | 11 | 1. Choose `beam-workshop-s3` as **Delivery Stream name** 12 | 13 | 1. Select *Kinesis Data Stream* as the **Source** 14 | 15 | 1. In the drop down select `beam-workshop` as the Kinesis data stream source 16 | 17 | 1. Select **Next** to move onto the _Process Records_ screen. 18 | 19 | ![](/images/beam-on-kda/kfh-selectsources3.png) 20 | -------------------------------------------------------------------------------- /workshop/content/beam-on-kda/create-infrastructure/firehose/process-records.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Process Records" 3 | chapter: false 4 | weight: 20 5 | --- 6 | 7 | You then attach the Lambda function to enrich the records 8 | 9 | 1. Select *Enabled* under **Data transformation** 10 | 11 | 1. In the dropdown for **Lambda function**, select the Lambda function whose name contains `EnrichEventsLambda` 12 | 13 | 1. Leave the **Record Format Conversion** as **Disabled** 14 | 15 | 1. Select **Next** to move onto the _Choose a destination_ screen. 16 | 17 | ![](/images/beam-on-kda/kfh-process.png) 18 | 19 | -------------------------------------------------------------------------------- /workshop/content/beam-on-kda/create-infrastructure/firehose/review.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Review Settings" 3 | chapter: false 4 | weight: 50 5 | --- 6 | 7 | Once you have checked all the settings select **Create delivery stream** to create the Firehose Delivery Stream. After a few minutes you will see a Firehose Delivery Stream called `beam-workshop-s3` created on the Kinesis Dashboard. Click on the stream name to see more details and use this page to monitor activity in the later stages. 8 | 9 | ![](/images/beam-on-kda/kfh-check.png) 10 | 11 | You are now ready to move to the next stage where replay data into these infrastructure components. 12 | -------------------------------------------------------------------------------- /workshop/content/beam-on-kda/create-infrastructure/ingest-data.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Replay historic taxi trips" 3 | chapter: false 4 | weight: 32 5 | --- 6 | 7 | We will use a Java application to replay a historic set of taxi trips made in NYC that are stored in objects in Amazon S3 into the Kinesis stream. 8 | The Java application has already been compiled and loaded onto the Cloud9 development environment. If you are interested in the details of the application, you can obtain the sources, including a CloudFormation template to build the Java application with Amazon CodePipeline, from [GitHub](https://github.com/aws-samples/amazon-kinesis-analytics-beam-taxi-consumer). 9 | 10 | 1. Navigate back to the terminal window in the Cloud9 IDE. 11 | 12 | 2. Enter the following command into the terminal prompt to start producing events into the Kinesis data stream created earlier, `beam-workshop`. 13 | 14 | {{< highlight plain >}} 15 | java -jar ./replay/amazon-kinesis-replay-*.jar -objectPrefix artifacts/kinesis-analytics-taxi-consumer/taxi-trips-partitioned.json.lz4/dropoff_year=2018/ -speedup 720 -streamName beam-workshop 16 | {{< /highlight >}} 17 | 18 | You should see lines similar to the one below in the terminal if the above operation was successful. Please keep the ingestion running while you continue with the next step. 19 | 20 | {{< highlight plain >}} 21 | INFO StreamPopulator - all events with dropoff time until 2018-01-01T04:36:21Z have been sent (3442.0 events/sec, PT-1S replay lag) 22 | {{< /highlight >}} 23 | 24 | -------------------------------------------------------------------------------- /workshop/content/beam-on-kda/deploy-batch-pipeline/_index.en.md: -------------------------------------------------------------------------------- 1 | +++ 2 | title = "Deploy Batch Pipeline" 3 | date = 2020-07-10T10:27:27+02:00 4 | weight = 70 5 | chapter = false 6 | +++ 7 | 8 | In the previous section, you have deployed a Beam application that is generating statistics on the number of incoming trips. At first the application only generated metrics for the entire city. This got then adapted to so that the application in now generating more fine grained metrics that are broken down per borough. 9 | 10 | However, the new metrics are only generated for newly arriving events. In this section, we will execute the Beam pipeline in a batch fashion on [Kinesis Data Analytics](https://aws.amazon.com/de/kinesis/data-analytics/) to backfill the metric with the historic data that has been persisted to Amazon S3. 11 | -------------------------------------------------------------------------------- /workshop/content/beam-on-kda/deploy-batch-pipeline/configure-kda-batch.en.md: -------------------------------------------------------------------------------- 1 | +++ 2 | title = "Configure Beam pipeline in batch mode" 3 | menuTitle = "Configure Beam pipeline" 4 | date = 2020-07-10T10:27:27+02:00 5 | weight = 20 6 | +++ 7 | 8 | 9 | 1. Navigate to the [AWS CloudFormation console](https://console.aws.amazon.com/cloudformation) and select the CloudFormation template that has been created for the workshop. Click on the **Outputs** tab and copy the value of the key *InputS3Pattern*. 10 | 11 | 1. Then, navigate to the [Kinesis Data Analytics console](https://console.aws.amazon.com/kinesisanalytics/home#/applications/dashboard) and select the application you have created earlier 12 | 13 | 1. Press the blue **Configure** button next to the name of the application 14 | 15 | 1. Expand the properties section, select the **BeamApplicationProperties** group and click on **Edit group** 16 | 17 | 1. Change the value of *Source* to `s3`and add a row with the *Key* `InputS3Pattern` and value you have copied from the output section of the CloudFormation template. 18 | 19 | ![KDA batch configuration](/images/beam-on-kda/kda-update-config-batch.png) 20 | 21 | 1. Confirm these changes by pressing **Save**. Apply the changes to the application by pressing **Update** at the bottom of the page. 22 | 23 | Changing these options will change the source of the application, but it will leave the remaining code unchanged. Instead of reading events in a streaming fashion from a data stream, the aplication will now read the historic data set from Amazon S3. 24 | 25 | {{< highlight java >}} 26 | input = p 27 | .apply("S3 source", TextIO 28 | .read() 29 | .from(options.getInputS3Pattern()) 30 | ) 31 | .apply("Parse S3 events",ParDo.of(new EventParser.S3Parser())); 32 | {{< / highlight >}} 33 | 34 | Once you have updated the settings, your application will be redeployed which takes 1-2 minutes. -------------------------------------------------------------------------------- /workshop/content/beam-on-kda/deploy-batch-pipeline/execute-beam-emr.md: -------------------------------------------------------------------------------- 1 | +++ 2 | title = "Execute the batch pipeline" 3 | date = 2020-07-10T10:27:27+02:00 4 | weight = 30 5 | +++ 6 | 7 | 1. Once the update is completed and the application is running again, inspect the updated job graph. You can see how the structure of the application graph changed, as the Beam pipeline has generated a different job graph for the batch application. 8 | 9 | ![KDA batch job graph](/images/beam-on-kda/kda-patch-job-graph.png) 10 | 11 | 1. Navigate to the Flink dashboard by clicking on **Open Apache Flink dashboard** at the top of the page 12 | 13 | 1. Select the only job from the **Runing Job List** to track the progress of the execution 14 | 15 | ![KDA batch job graph](/images/beam-on-kda/kda-batch-flink-dashboard.png) 16 | 17 | 1. Once the pipeline has completed, you can inspect the metrics for the boroughs that have been generated in [Amazon CloudWatch](https://console.aws.amazon.com/cloudwatch/home?dashboards:). The graph shows count of taxi trips that are backfilled. 18 | 19 | ![](/images/beam-on-kda/cw-dashboard-trips-backfilled.png) 20 | -------------------------------------------------------------------------------- /workshop/content/beam-on-kda/deploy-streaming-pipeline/_index.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Deploy Streaming Pipeline" 3 | menuTitle: "Deploy Streaming Pipeline" 4 | chapter: false 5 | weight: 40 6 | --- 7 | 8 | In this chapter you will compile the Beam pipeline to a Jar file. The Beam pipeline is configured with the Apache Flink Runner, so that it can be executed with Kinesis Data Analytics in a managed Apache Flink environment. You will then create a Kinesis Data Analytics application and execute the Beam pipeline so that it populates an [Amazon CloudWatch](https://aws.amazon.com/cloudwatch/) dashboard with the total number of taxi trips made in NYC in a certain time interval. 9 | 10 | One the application is producing output, you will change the output of the Beam pipeline to generate more fine grained metrics on the number of trips per borough. 11 | -------------------------------------------------------------------------------- /workshop/content/beam-on-kda/deploy-streaming-pipeline/build-uber-jar.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Build Beam Pipeline As Jar" 3 | chapter: false 4 | weight: 41 5 | --- 6 | 7 | You first need to package the Flink application and all its dependencies into a Jar file that can be deployed to an Apache Flink environment. 8 | 9 | 1. Open a new **Terminal** window in Cloud9 IDE. Build the jar file by running the following commands in a terminal window from AWS Cloud9 development environment: 10 | {{< highlight bash >}} 11 | cd code 12 | mvn clean compile package 13 | {{< / highlight >}} 14 | 15 | 16 | 1. Maven will now build a jar file that includes all the dependencies of the Flink application. The jar file is stored in the **target** directory and can be deployed to Amazon Kinesis Data Analytics. Once built we can upload this jar file to Amazon S3. We have already done this for you in this workshop for a pre-built jar file. 17 | 18 | 1. The build takes a couple of minutes to complete. The resulting jar file can be deployed on a Flink cluster and on Kinesis Data Analytics, respectively. For the purpose of this workshop, the jar has already been build and uploaded to S3, so you don't need to wait unitl the build completes. -------------------------------------------------------------------------------- /workshop/content/beam-on-kda/deploy-streaming-pipeline/create-kda-app.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Create application" 3 | chapter: false 4 | weight: 42 5 | --- 6 | 7 | You can then execute the Beam pipeline with Amazon Kinesis Data Analytics in a fully managed Apache Flink environment. A Kinesis Analytics application basically consists of a reference to the Flink application in Amazon S3 and some additional configuration data. Once the Kinesis Data Analytics application has been created, it can be deployed and executed by the services in a fully managed environment. 8 | 9 | 1. Navigate to the [Kinesis Data Analytics Console](https://console.aws.amazon.com/kinesisanalytics) and press **Create application** 10 | 11 | 1. Enter `beam-workshop` as the **Application name** 12 | 13 | 14 | 1. Select _Apache Flink 1.11_ as the **Runtime**. 15 | 16 | 1. Choose _Choose from IAM roles that Kinesis Kinesis Analytics can assume_ as **Access permissions**. If you are attending an AWS hosted event, select the only role that is suggested. If you are running the workshop on your own, select the role that contains `KdaRole`. 17 | 18 | ![Create KDA Application](/images/beam-on-kda/kda-create-app.png) 19 | 20 | 1. Choose *Production* as the **Template for application settings** 21 | 22 | 1. Create the application by pressing the blue **Create Application** button. This will take you to the overview screen where you will configure further properties of the application. 23 | -------------------------------------------------------------------------------- /workshop/content/beam-on-kda/deploy-streaming-pipeline/start-kda-app.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Start application" 3 | chapter: false 4 | weight: 45 5 | --- 6 | 7 | The application is finally ready for execution through the Kinesis Data Analytics service in a fully managed Flink environment. You can now start the execution and send events into the Kinesis data stream. The Flink application will then continuously process the data that is ingested into the data stream and send derived insights to Amazon CloudWatch for visualization. 8 | 9 | 1. Once the update has completed, press **Run** on the resulting page and confirm that you want to run the application *without a snapshot* by choosing **Run** again. The application will now start in the background, which can take a couple of minutes. Once it is running, you can inspect the operator graph of the Flink application. 10 | 11 | ![kda-running-app](/images/beam-on-kda/kda-running-beamapp.png) 12 | 13 | {{% notice tip %}} 14 | If you don't see the object graph after the Kinesis data analytics application is running please hit refresh in the browser. You can further debug the application by inspecting the application logs that are exposed through Amazon CloudWatch. 15 | {{% /notice %}} 16 | 17 | 2. Make sure that you are still producing trip events into the Kinesis data stream. In case the application is no longer running, you can restart it by executing the following command in the console window of IntelliJ. 18 | 19 | {{< highlight plain >}} 20 | java -jar ./replay/amazon-kinesis-replay-*.jar -objectPrefix artifacts/kinesis-analytics-taxi-consumer/taxi-trips-partitioned.json.lz4 -aggregate -streamName beam-workshop -speedup 720 21 | {{< /highlight >}} 22 | 23 | 1. Navigate to the [Amazon CloudWatch Console](https://console.aws.amazon.com/cloudwatch) and select **Dashboards** 24 | 25 | 1. Click on the dashboard whose name contains `BeamWorkshopDashboard`. The dashboard has already been pre-created for you and it contains two widgets, one displaying the total number of taxi trips and one for the number of taxi trips by borough. For now, the Beam application only generates the total amount, but we are going to change that in the next section. 26 | 27 | ![](/images/beam-on-kda/cw-dashboard-total-number-trips.png) 28 | -------------------------------------------------------------------------------- /workshop/content/beam-on-kda/deploy-streaming-pipeline/update-kda-app.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Update application parameters" 3 | chapter: false 4 | weight: 45 5 | --- 6 | 7 | We will now update the application in Kinesis Data Analytics to start counting trips per borough by updating the configuration. This shows how we can use Beam to count trips that cumulates by this new [metric](https://github.com/aws-samples/amazon-kinesis-analytics-beam-taxi-consumer/blob/678096fcd8451f0d4d98871a3d3d97c63384d1fa/src/main/java/com/amazonaws/samples/beam/taxi/count/TaxiCount.java#L127). 8 | 9 | The Beam Java class that does is [PartitionByBorough.java](https://github.com/aws-samples/amazon-kinesis-analytics-beam-taxi-consumer/blob/master/src/main/java/com/amazonaws/samples/beam/taxi/count/PartitionByBorough.java). 10 | 11 | 1. Go back to the Kinesis Data Analytics application and update the configuration. Select **Confiuration** and Scroll down to the **Properties** section you created earlier. Select **Edit group** for the group **BeamApplicationProperties**. 12 | 13 | 1. Update the property `OutputBoroughs` to `true`. Select **update** and wait a few minutes for the operator graph to update. 14 | 15 | ![kda-updateborough](/images/beam-on-kda/kda-updateborough.png) 16 | 17 | 18 | Changing this setting will change the internal application logic. Initially, the Beam application was using a global window to count all trip events in a 5 second window. 19 | 20 | {{< highlight java >}} 21 | metrics = window 22 | .apply("Count globally", Combine 23 | .globally(Count.combineFn()) 24 | .withoutDefaults() 25 | ) 26 | {{< / highlight >}} 27 | 28 | By adapting the parameter, the application will now first partition the trip events by their respective brough and then count the trip event per borough in the same 5 second window. 29 | 30 | {{< highlight java >}} 31 | metrics = window 32 | .apply("Partition by borough", ParDo.of(new PartitionByBorough())) 33 | .apply("Count per borough", Count.perKey()) 34 | {{< / highlight >}} 35 | 36 | In both cases, the result is sent to Amazon CloudWatch for visualization. 37 | 38 | 3. Go back to the [CloudWatch dashboard](https://console.aws.amazon.com/cloudwatch/), and hit refresh in top right. In the bottom graph you should start seeing trips that are counted per borough, similar to this. It appears for this data set the borough of Manhattan has the most trips! 39 | 40 | ![cw-dashboard-trips-by-borough](/images/beam-on-kda/cw-dashboard-trips-by-borough.png) 41 | -------------------------------------------------------------------------------- /workshop/content/beam-on-kda/getting-started/_index.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Getting started" 3 | chapter: false 4 | disableToc: false 5 | weight: 10 6 | --- 7 | 8 | Once you have successfully configured your environment with the browser, you can start the workshop by following one of the following depending on whether you are... 9 | 10 | - ...[attending an AWS hosted event (using AWS provided hashes)]({{< relref "./aws-event/" >}}) 11 | - ...[running the workshop on your own (in your own account)]({{< relref "./self-paced/" >}}) 12 | -------------------------------------------------------------------------------- /workshop/content/beam-on-kda/getting-started/aws-event.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "...at an AWS event" 3 | weight: 11 4 | --- 5 | 6 | {{% notice warning %}} 7 | Only complete this section if you are at an AWS hosted event (such as re:Invent, 8 | Loft, Immersion Day, or any other event hosted by an AWS employee). If you 9 | are running the workshop on your own, go to: [Start the workshop on your own]({{< relref "./self-paced/" >}}). 10 | {{% /notice %}} 11 | 12 | ### Login to AWS Workshop Portal 13 | 14 | This workshop creates an AWS acccount and the development environment that is used throughout the workshop. You will need the **Participant Hash** provided upon entry, and your email address to track your unique session. 15 | 16 | Connect to the portal by clicking the button or browsing to [https://dashboard.eventengine.run/](https://dashboard.eventengine.run/). 17 | 18 | Once you have completed the step above, you can head straight to [**Configure development environment**]({{< ref "../configure-dev-env/" >}}) 19 | -------------------------------------------------------------------------------- /workshop/content/beam-on-kda/getting-started/self-paced.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "...on your own" 3 | weight: 12 4 | --- 5 | 6 | {{% notice warning %}} 7 | Only complete this section if you are running the workshop on your own. If you are at an AWS hosted event (such as re:Invent, 8 | Loft, Immersion Day, or any other event hosted by an AWS employee), continue with [**Configure development environment**]({{< ref "../configure-dev-env/" >}}). 9 | {{% /notice %}} 10 | 11 | ### Running the workshop on your own 12 | 13 | 1. Launch the following CloudFormation template to create a prepared development environment in your account. Supported regions include US East (N. Virginia), US West (Oregon), Asia Pacific (Tokyo), Asia Pacific (Sydney), EU (Ireland), EU (Frankfurt), and any other region supporting these resource types: **Amazon Code Pipeline**, **Amazon CloudWatch Service**, and **Amazon Kinesis Data Analytics**. 14 | 15 | [![Launch CloudFormation Stack](/images/beam-on-kda/cloudformation-launch-stack.png)](https://console.aws.amazon.com/cloudformation/home#/stacks/new?stackName=beam-workshop&templateURL=https://aws-streaming-artifacts.s3.amazonaws.com/streaming-analytics-workshop/cfn-templates/streaming-analytics-workshop-beam.template.json) 16 | 17 | 1. In the following dialog, choose **Next** 18 | 19 | ![Launch stack](/images/beam-on-kda/cfn-1-create-stack.png) 20 | 21 | 1. Choose `beam-workshop` as **Stack name** and confirm with **Next** 22 | 23 | 1. Accept all default values on the next dialog page by clicking **Next** at the bottom of the page 24 | 25 | 1. On the last page of the dialog, confirm that CloudFormation may create IAM resource by selecting **I acknowledge that AWS CloudFormation might create IAM resources**. Click on **Create stack** at the bottom of the page. 26 | 27 | ![Lauch stack](/images/beam-on-kda/cfn-4-confirm-capabilities.png) 28 | 29 | It takes approximately 10 minutes for the stack associated with this CloudFormation template to deploy. You can continue with the next step [Configure development environment]({{< ref "../configure-dev-env/" >}}) while the stack is being deployed. 30 | -------------------------------------------------------------------------------- /workshop/content/beam-on-kda/monitoring-logging-profiling/_index.en.md: -------------------------------------------------------------------------------- 1 | +++ 2 | title = "Optional: Monitoring, Logging, and Profiling" 3 | menuTitle = "Optional: Monitoring and Profiling" 4 | date = 2020-07-10T10:26:21+02:00 5 | weight = 80 6 | chapter = true 7 | +++ 8 | 9 | In this chapter, you will set up observability dashboards in Amazon CloudWatch to inspect an track important Beam and Kinesis metrics. This can used to track cpu, milliseconds behind latest. You will also set-up inspection of logs from Kinesis Data Analytics so you can identify any operational issues. Finally, you will learn how to use Amazon CodeGuru profiler to gain advanced insights about your Apache Beam application managed by Amazon Kinesis Data Analytics. 10 | -------------------------------------------------------------------------------- /workshop/content/beam-on-kda/monitoring-logging-profiling/cw-metrics.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Monitor important metrics" 3 | chapter: false 4 | weight: 71 5 | --- 6 | 7 | Now that the Kinesis data stream is scaling, we'll take a look at how we can identify this overloading situation by looking at the metrics of the stream. To this end, we'll create a CloudWatch dashboard that displays some of the relevant metrics. 8 | 9 | 1. Navigate to the [CloudWatch console](https://console.aws.amazon.com/cloudwatch), click on **Dashboards** in the navigation pane on the left, and select the *BeamWorkshopDashboard* dashboard you have been using earlier. 10 | 11 | 1. Click on **Add widget** and choose the **Line** widget to compare metrics over time. Confirm with **Next**. 12 | 13 | 1. In the next dialog, choose *metrics* and confirm with **Configure** 14 | 15 | 1. Enter `beam-workshop` in the search field and select **Kinesis > Stream metrics** 16 | 17 | ![](/images/beam-on-kda/cw-dashboard-1-filter.png) 18 | 19 | 1. Select the **IncomingRecords** and **GetRecords.IteratorAgeMilliseconds** metrics and navigate to **Graphed metrics** 20 | 21 | ![](/images/beam-on-kda/cw-dashboard-2-select-metrics.png) 22 | 23 | 1. Adjust the period of both metrics to **1 Minute** and change the **Statistic** to _Sum_ and _Maximum_, respectively. Switch the **Y Axis** of the GetRecords.IteratorAgeMilliseconds metric and confirm with **Create**. 24 | 25 | ![](/images/beam-on-kda/cw-dashboard-3-metric-properties.png) 26 | 27 | The dashboard now shows two metrics: the amount of incoming messages and the millisBehindLatest metric. The metric reports the time difference between the oldest record currently read by the Kinesis Data Analytics application and the latest record in the stream according to the ingestion time in milliseconds. So it indicates how much behind the processing is from the tip of the stream. 28 | 29 | Other important metrics include, `ReadProvisionedThroughputExceeded` and `WriteProvisionedThroughputExceeded`. You can add them to the dashboard as well to see how the producer is throttled. 30 | 31 | ![](/images/beam-on-kda/cw-dashboard-5-scale-kds.png) 32 | -------------------------------------------------------------------------------- /workshop/content/beam-on-kda/monitoring-logging-profiling/inspect-flink-application-logs.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Inspect Flink application logs" 3 | chapter: false 4 | weight: 72 5 | --- 6 | 7 | In addition to operational metrics, you can configure the Kinesis Data Analytics application to write messages to CloudWatch Logs. This capability seamlessly integrates with common logging frameworks, such as Apache Log4j and the Simple Logging Facade for Java (SLF4J). So it is useful for debugging and identifying the cause of operational issues. 8 | 9 | As you have enabled logging for your Kinesis Data Analytics application, the logs are sent to a CloudWatch Logs stream. The dashboard already contains a widget to display errors from these logs. You will now add a widget that queries and displays the complete log messages. 10 | 11 | 1. On the CloudWatch dashboard, select **Add widget** 12 | 13 | 1. In the dialog, choose the **Logs table** widget and confirm with **Configure** 14 | 15 | 1. Search for `beam-workshop` and select the log stream of the KDA application named as: `aws/kinesis-analytics/beam-workshop` 16 | 17 | ![](/images/beam-on-kda/cw-dashboard-4-configure-log.png) 18 | 19 | You will now have two additional widgets in your dashboard: 20 | 21 | ![](/images/beam-on-kda/cw-dashboard-6-log-result.png) 22 | -------------------------------------------------------------------------------- /workshop/content/flink-on-kda-studio/1_Getting_Started/_index.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "1. Getting started" 3 | chapter: false 4 | disableToc: false 5 | menuTitle: "Getting started" 6 | weight: 10 7 | --- 8 | 9 | An AWS account and access to the AWS web console is required to complete this workshop. 10 | 11 | If you are are attending an AWS hosted event (re:Invent, Loft, Immersion Day, or any other event hosted by an AWS employee) follow the instructions provided at [AWS Event]({{}}) 12 | 13 | If you are running this workshop on your own follow the instructions provided at [Self Paced]({{}}) -------------------------------------------------------------------------------- /workshop/content/flink-on-kda-studio/1_Getting_Started/aws_event.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "AWS Event" 3 | weight: 10 4 | --- 5 | 6 | {{% notice warning %}} 7 | Only complete this section if you are at an AWS hosted event (such as re:Invent, Loft, Immersion Day, or any other event hosted by an AWS employee). If you are running the workshop on your own, go to [Self Paced]({{}}). 8 | {{% /notice %}} 9 | 10 | #### Log into AWS Console via. AWS Workshop Portal 11 | 12 | Your instructor has already created an AWS account for you. Your instructor should provide you a participant hash. 13 | 14 | Once you have your participant hash go to [https://dashboard.eventengine.run/](https://dashboard.eventengine.run/) 15 | 16 | Enter your participant has and select Accept Terms & Login 17 | 18 | ![Event_Engine_Login_Hash](/images/flink-on-kda-studio/event_engine_1.png) 19 | 20 | Select how you want to log in. Selecting Email One-Time Password (OTP) is recommended. However you can also use your Amazon.com retail account to login. 21 | 22 | ![Event_Engine_Login_Email](/images/flink-on-kda-studio/event_engine_2.png) 23 | 24 | Follow the prompts to complete the login process. Once you have sucssfully loged in. You will see the following screen. Select AWS Console 25 | 26 | ![Event_Engine_Access_Console](/images/flink-on-kda-studio/event_engine_3.png) 27 | 28 | A window will open. From the window select Open AWS Console 29 | 30 | ![Event_Engine_Access_Console](/images/flink-on-kda-studio/event_engine_4.png) 31 | 32 | This will open the AWS Console in a new window on your web browser. If you can see the home page for the AWS Console as depicted below you have sucssfully logged into your AWS account. 33 | 34 | ![Event_Engine_AWS_Console](/images/flink-on-kda-studio/event_engine_5.png) 35 | 36 | Now that you have successfully logged into your AWS account and are able to access the AWS Console, lets begin the next step [Environment Set Up]({{}}) -------------------------------------------------------------------------------- /workshop/content/flink-on-kda-studio/1_Getting_Started/self_paced.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Self Paced" 3 | weight: 10 4 | --- 5 | 6 | {{% notice warning %}} 7 | Only complete this section if you are running the workshop on your own. If you are at an AWS hosted event (such as re:Invent, 8 | Loft, Immersion Day, or any other event hosted by an AWS employee), continue with [AWS Event]({{}}). 9 | {{% /notice %}} 10 | 11 | #### Running the Workshop on Your Own 12 | 13 | You will need an AWS Account with web console access to complete the workshop. 14 | 15 | If you do not have an AWS account [Create an AWS Account](https://aws.amazon.com/getting-started/) 16 | 17 | As pictured below you should be able to access the [AWS Console](https://console.aws.amazon.com/) 18 | 19 | ![Event_Engine_AWS_Console](/images/flink-on-kda-studio/event_engine_5.png) 20 | 21 | Once you have can access the [AWS Console](https://console.aws.amazon.com/), lets begin the next step [Enviorment Set Up]({{}}) -------------------------------------------------------------------------------- /workshop/content/flink-on-kda-studio/3_Send_Data_to_Kinesis_Data_Stream/_index.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "3. Send Data to Kinesis Data Stream" 3 | chapter: false 4 | disableToc: false 5 | menuTitle: "Send Data to Kinesis Data Stream" 6 | weight: 10 7 | --- 8 | 9 | #### Upload Notebooks to KDA Studio 10 | 11 | 1. Go to the [Kinesis Data Analytics Console](https://console.aws.amazon.com/kinesisanalytics/home) 12 | 2. Click on **Studio** 13 | 14 | ![KDA_Stuio](/images/flink-on-kda-studio/studio_set_up.png) 15 | 16 | 3. Click on the KDA studio instance you created 17 | 4. Click **Run** 18 | 5. Click on **Open in Apache Zeppelin** 19 | 6. Click on **Import note** and upload [Nyc_Taxi_Produce_KDA_Zeppelin_Notebook.zpln](https://sharkech-public.s3.amazonaws.com/flink-on-kda/Nyc_Taxi_Produce_KDA_Zeppelin_Notebook.zpln). Open the notebook once uploaded 20 | 21 | ![KDA_Stuio](/images/flink-on-kda-studio/zeppelin_1.png) 22 | 23 | 7. Follow the steps outlined in the notebook. Run each cell in the uploaded notebook. To run a cell hit the small play button in the top left corner of each paragraph 24 | 25 | ![Producer_Notebook](/images/flink-on-kda-studio/producer_notebook.png) 26 | 27 | You are now sucssfully sending data to the Kinesis Data Stream you created earlier. Leave a browser window open with the notebook running. This will ensure it continues to send data to your Kinesis Data Stream as you work on the next section of the lab. 28 | 29 | **Note** the [Nyc_Taxi_Produce_KDA_Zeppelin_Notebook.zpln](https://sharkech-public.s3.amazonaws.com/flink-on-kda/Nyc_Taxi_Produce_KDA_Zeppelin_Notebook.zpln) will send data to Kinesis Data Stream for approx 30 minutes. You may need to perodiclly rerun the notebook to sample data sending to Kinesis Data Stream. If you are working on the subseqent notebook and do not see any results check that your the [Nyc_Taxi_Produce_KDA_Zeppelin_Notebook.zpln](https://sharkech-public.s3.amazonaws.com/flink-on-kda/Nyc_Taxi_Produce_KDA_Zeppelin_Notebook.zpln) is still running and does not need to be restarted. 30 | 31 | When you are ready proceed to the next step [Interactive Notebook]({{}}) -------------------------------------------------------------------------------- /workshop/content/flink-on-kda-studio/4_Interactive_Notebook/_index.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "4. Interactive Notebook" 3 | chapter: false 4 | disableToc: false 5 | menuTitle: "Interactive Notebook" 6 | weight: 10 7 | --- 8 | 9 | #### Upload Notebooks to KDA Studio 10 | 11 | 1. Go to the [Kinesis Data Analytics Console](https://console.aws.amazon.com/kinesisanalytics/home) 12 | 2. Click on **Studio** 13 | 14 | ![KDA_Stuio](/images/flink-on-kda-studio/studio_set_up.png) 15 | 16 | 3. Click on the KDA studio instance you created 17 | 4. Click on **Open in Apache Zeppelin** 18 | 5. Click on **Import note** and [upload sql_1.13.zpln](https://sharkech-public.s3.amazonaws.com/flink-on-kda/sql_1.13.zpln) 19 | 20 | ![KDA_Stuio](/images/flink-on-kda-studio/zeppelin_1.png) 21 | 22 | 6. Follow the steps outlined in the notebook. Run each cell in the uploaded notebook. To run a cell hit the small play button in the top left corner of each 23 | 24 | ![Interactive_Notebook](/images/flink-on-kda-studio/interactive_notebook.png) 25 | 26 | When you complete running the entire interactive notebook proceed to the next step [Deployable Notebook]({{}}) -------------------------------------------------------------------------------- /workshop/content/flink-on-kda-studio/5_Deployable_Notebook/_index.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "5. Deployable Notebook" 3 | chapter: false 4 | menuTitle: "Deployable Notebook" 5 | disableToc: false 6 | weight: 10 7 | --- 8 | 9 | #### Upload Notebooks to KDA Studio 10 | 11 | 1. Go to the [Kinesis Data Analytics Console](https://console.aws.amazon.com/kinesisanalytics/home) 12 | 2. Click on **Studio** 13 | 14 | ![KDA_Stuio](/images/flink-on-kda-studio/studio_set_up.png) 15 | 16 | 3. Click on the KDA studio instance you created 17 | 4. Click on **Open in Apache Zeppelin** 18 | 5. Click on **Import note** and upload [deployable_1.13.zpln](https://sharkech-public.s3.amazonaws.com/flink-on-kda/deployable_1.13.zpln) 19 | 20 | ![KDA_Stuio](/images/flink-on-kda-studio/zeppelin_1.png) 21 | 22 | 6. Open the [deployable_1.13.zpln](https://sharkech-public.s3.amazonaws.com/flink-on-kda/deployable_1.13.zpln) 23 | 24 | #### Build Notebook into Flink Application 25 | 26 | 1. Click on **Actions for ...** 27 | 2. Select Build deployable_1.13 and export to Amazon S3 28 | 29 | ![KDA_Stuio](/images/flink-on-kda-studio/build.png) 30 | 31 | The build will take several minutes to complete 32 | 33 | #### Deploy Flink Application 34 | 35 | 1. Click on **Actions for ...** 36 | 2. Select deploy deployable_1.13 as Kinesis Analytics application. Leave all defaults and click **Create Streaming Application** 37 | 38 | ![KDA_Stuio](/images/flink-on-kda-studio/deploy.png) 39 | 40 | The deployment will take several minutes to complete. Once the deployment is complete 41 | 42 | 3. Go to the [Kinesis Data Analytics Console](https://console.aws.amazon.com/kinesisanalytics/home). You will see your application listed under the Streaming applications section 43 | 4. If your application is not already running start your application by selecting it and clicking run. Leave all defaults and click **Run** 44 | 45 | 46 | ![KDA_Application_Deployed](/images/flink-on-kda-studio/deployed_run.png) 47 | 48 | Your studio notebook is now deployed as a Kinesis Data Analytics application! 49 | 50 | The application will write data to your S3 bucket. Send data to Kinesis via. the [Nyc_Taxi_Produce_KDA_Zeppelin_Notebook.zpln](https://sharkech-public.s3.amazonaws.com/flink-on-kda/Nyc_Taxi_Produce_KDA_Zeppelin_Notebook.zpln) and the deployed application will continue to stream the data to S3. 51 | 52 | When you are ready proceed to the next step [Clean Up]({{}}) if you want to delete the resources we used for this workshop -------------------------------------------------------------------------------- /workshop/content/flink-on-kda-studio/6_Clean_Up/_index.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "6. Clean Up" 3 | menuTitle: "Clean Up" 4 | chapter: false 5 | weight: 10 6 | --- 7 | 8 | Now that we have completed the workshop you can clean up (delete) the resources we created if you want to. 9 | 10 | #### Delete Kinesis Data Stream 11 | 12 | 1. Go to the [Kinesis Data Streams Console](https://console.aws.amazon.com/kinesis/home?region=us-east-1#/streams/list) 13 | 2. Select the data stream 14 | 3. Click on **Actions** 15 | 4. Select Delete 16 | 17 | ![delete_data_stream](/images/flink-on-kda-studio/delete_data_stream.png) 18 | 19 | #### Delete S3 Bucket 20 | 21 | 1. Go to the [S3 Console](https://s3.console.aws.amazon.com/s3/home) 22 | 2. Select the S3 bucket 23 | 3. Click on **Empty** and follow the prompts to empty the bucket 24 | 4. Click on **Delete** and follow the prompts to delete the bucket 25 | 26 | ![delete_s3](/images/flink-on-kda-studio/delete_s3.png) 27 | 28 | #### Delete a KDA Studio + Application 29 | 30 | 1. Go to the [Kinesis Data Analytics Console](https://console.aws.amazon.com/kinesisanalytics/home) 31 | 2. Select the streaming application 32 | 3. Click on **Actions** 33 | 4. Select Delete and follow the prompts to delete the application 34 | 35 | ![delete_streaming_app](/images/flink-on-kda-studio/delete_streaming_app.png) 36 | 37 | 5. Click on **Studio** 38 | 6. Select the studio notebook(s) 39 | 7. Click on **Actions** 40 | 8. Select delete 41 | 42 | ![delete_studio](/images/flink-on-kda-studio/delete_studio.png) 43 | 44 | You have completed deleting the resources used in this workshop -------------------------------------------------------------------------------- /workshop/content/flink-on-kda-studio/_index.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Apache Flink on Amazon Kinesis Data Analytics Studio" 3 | menuTitle: "Apache Flink on KDA Studio" 4 | chapter: true 5 | weight: 10 6 | url: "/flink-on-kda-studio/" 7 | --- 8 | 9 | In this lab we will explore Kinesis Data Analytics (KDA) via. KDA Studio Notebooks. KDA Studio Notebooks provide an interactive development experience for Apache Flink. Studio notebooks allow us to easily develop Flink applications and then deploy them as long running KDA applications. 10 | 11 | For this lab we will stream and analyze the [NYC Taxi Cab trips data set](https://www1.nyc.gov/site/tlc/about/tlc-trip-record-data.page) with the [SQL language in Flink](https://nightlies.apache.org/flink/flink-docs-release-1.13/docs/dev/table/sql/overview/). 12 | 13 | We will implement the following architecture 14 | 15 | ![Flink on KDA Studio Architecture](/images/flink-on-kda-studio/workshop_architecture.png) 16 | 17 | This workshop has 6 steps. A breif description of each is provided below. 18 | 1. [Getting Started]({{}}) - Create and/or log into your AWS account 19 | 2. [Environment Set Up]({{}}) - Deploy the necessary AWS resources for this workshop 20 | 3. [Send Data to Kinesis Data Stream]({{}}) - Set up and run a KDA notebook that will send sample NYC Taxi Cab data to a Kinesis Data Stream 21 | 4. [Interactive Flink Studio Notebook]({{}}) - Run an interactive KDA notebook that will cover key concepts for programming Flink applications via. Flink's support for the SQL language 22 | 5. [Deployable Flink Studio Notebook]({{}}) - Build and deploy a KDA notebook as a long running KDA application 23 | 6. [Environment Clean Up]({{}}) - Delete the resources you created in your AWS account 24 | 25 | When you are ready continue on to [Getting Started]({{}}) 26 | -------------------------------------------------------------------------------- /workshop/content/flink-on-kda/_index.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Apache Flink on Amazon Kinesis Data Analytics" 3 | menuTitle: "Apache Flink on KDA" 4 | chapter: true 5 | weight: 10 6 | url: "/flink-on-kda/" 7 | --- 8 | 9 | In this workshop, you will build an end-to-end streaming architecture to ingest, analyze, and visualize streaming data in near real-time. You set out to improve the operations of a taxi company in New York City. You'll analyze the telemetry data of a taxi fleet in New York City in near-real time to optimize their fleet operations. 10 | 11 | ![Kibana dashboard](/images/flink-on-kda/kibana-visualization.png) 12 | 13 | You will not only learn how to deploy, operate, and scale an [Apache Flink][1] application with [Kinesis Data Analytics for Apache Flink][2], but also explore the basic concepts of Apache Flink and running Flink applications in a fully managed environment on AWS. 14 | 15 | [1]: https://flink.apache.org/ 16 | 17 | [2]: https://aws.amazon.com/kinesis/data-analytics/ 18 | -------------------------------------------------------------------------------- /workshop/content/flink-on-kda/advanced-scale-and-monitor/_index.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Advanced Scaling and Monitoring" 3 | chapter: false 4 | weight: 80 5 | --- 6 | 7 | There are many important operational metrics to measure when assessing the performance of a running Apache Flink Application. In this section, we will look at the important CloudWatch metrics for Kinesis Data Analytics for Apache Flink applications, what they mean and what appropriate alarms might be for each. 8 | 9 | ![Picture of CloudWatch Monitoring Dashboard](/images/flink-on-kda/advanced-monitoring-index-1-cw-dashboard.png?classes=border,shadow) 10 | 11 | --------------------------------- 12 | 13 | Next, we will utilize these metrics to influence the scaling behavior of the Apache Flink application. Using autoscaling groups, we will see how to utilize the `numRecordsInPerSecond` metric to scale up or down Flink Applications automatically. 14 | 15 | ![CW Alarm](/images/flink-on-kda/cw-in-alarm.png) 16 | 17 | ---------------------------------- 18 | 19 | Finally, we will dive into the Apache Flink Dashboard to look at Backpressure, Checkpointing and other Operational Performance Indicators. 20 | 21 | ![Picture of Flink Dashboard](/images/flink-on-kda/advanced-monitoring-index-3-flink-dashboard.png?classes=border,shadow) -------------------------------------------------------------------------------- /workshop/content/flink-on-kda/advanced-scale-and-monitor/cleanup.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Cleanup" 3 | chapter: false 4 | weight: 300 5 | --- 6 | 7 | To clean up the resources in this section, simply delete the CloudFormation templates that we created: 8 | - Advanced Monitoring Dashboard 9 | - Advanced Scaling CloudWatch Alarms 10 | 11 | 12 | The Flink Dashboard is made available to you at simply by running the Kinesis Data Analytics Application 13 | 14 | 15 | ## Nice work! -------------------------------------------------------------------------------- /workshop/content/flink-on-kda/advanced-scale-and-monitor/flink-dashboard/_index.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Flink Dashboard" 3 | chapter: false 4 | weight: 200 5 | --- 6 | 7 | ### Let's dive deep into the Flink Web Dashboard, showing how to utilize it to identify common issues within your KDA Flink Applications. 8 | -------------------------------------------------------------------------------- /workshop/content/flink-on-kda/advanced-scale-and-monitor/flink-dashboard/access-dashboard.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Accessing the Dashboard" 3 | chapter: false 4 | weight: 210 5 | --- 6 | 7 | #### To start, navigate to the streaming-analytics-workshop application we've created previously and click the button which says `Open Apache Flink Dashboard`. 8 | 9 | ![Open Flink Web Dashboard](/images/flink-on-kda/open-flink-dashboard.png) 10 | 11 | 12 | #### This will open a new window. 13 | 14 | ![Flink Dashboard Wide View](/images/flink-on-kda/flink-dashboard-wide-view.png?classes=border,shadow) 15 | 16 | #### This is a running Apache Flink Web Dashboard you would access in any standard Apache Flink workload. 17 | 18 | {{% notice tip %}} 19 | The Flink Dashboard made available to Kinesis Data Analytics for Apache Flink is set to Read-Only, so tasks like deploying jars and modifying configuration are not enabled. 20 | {{% /notice %}} 21 | 22 | Take note of the following components of the Flink Dashboard, as they can be useful for troubleshooting your Flink applciations. 23 | 24 | 25 | 1. [LEFT] Running Jobs - Your running application name and the duration it has been running without manual restarts. 26 | 2. [LEFT] Task Managers - The compute nodes that are running the tasks within the application. 27 | 3. [Right] Running Job List - Contains the list of jobs running - should only ever be one within KDA 28 | 1. The Start Time of the Application 29 | 2. Duration the job has been running 30 | 3. Number of Tasks (total subtasks of the application) 31 | 4. Status - Running, Failing, Stopped, Cancelled 32 | 33 | ### Within this exercise, we'll be looking at a few common areas to look when assessing the health of your application using the Apache Flink Web Dashboard. -------------------------------------------------------------------------------- /workshop/content/flink-on-kda/advanced-scale-and-monitor/monitoring/_index.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Advanced Monitoring" 3 | chapter: false 4 | weight: 80 5 | --- 6 | 7 | ## Let's dive deep into how to monitor the metrics of your Kinesis Data Analytics for Apache Flink Application. -------------------------------------------------------------------------------- /workshop/content/flink-on-kda/advanced-scale-and-monitor/monitoring/cfn-launch-dashboard.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Launch the CloudWatch Dashboard" 3 | chapter: true 4 | weight: 100 5 | --- 6 | 7 | There are quite a few metrics to keep track of and create widgets for. To simplify things, you can launch a CloudFormation Template for the running application which highlights each of these metrics. Let's do that now. 8 | 9 | [![Launch CloudFormation Stack](/images/flink-on-kda/cloudformation-launch-stack.png)](https://console.aws.amazon.com/cloudformation/home#/stacks/new?stackName=kda-advanced-monitoring-dashboard&templateURL=https://aws-data-analytics-workshops.s3.amazonaws.com/kinesis/templates/streaming-analytics-workshop/sample-dashboard.yaml) 10 | 11 | 12 | Click the link above to Launch The Stack, and fill in the necessary details about the workload we've built thus far. 13 | 14 | {{% notice tip %}} 15 | If you haven't built the resources for this lab, please navigate to the [lab which includes these instructions.](/flink-on-kda/getting-started/) 16 | {{% /notice %}} 17 | 18 | ### On the new CloudFormation setup page, click next. 19 | 20 | ![CloudFormation Setup Screenshot](/images/flink-on-kda/advanced-monitoring-metrics-1-cfn-launch.png) 21 | 22 | ### Fill out the necessary details for your existing workload. 23 | 24 | ![CloudFormation Setup Screenshot](/images/flink-on-kda/advanced-monitoring-metrics-2-cfn-launch.png) 25 | 26 | 27 | ### Hit Next and then Create Stack. 28 | 29 | ![CloudFormation Setup Screenshot](/images/flink-on-kda/advanced-monitoring-metrics-4-cfn-launch.png) 30 | 31 | 32 | ![CloudFormation Setup Screenshot](/images/flink-on-kda/advanced-monitoring-metrics-5-cfn-launch.png) 33 | 34 | 35 | This dashboard should take less than 30 seconds to launch, so open up the outputs tab once it finishes and click on the CloudWatch Dashboard link. I'll meet you on the next page! 36 | 37 | -------------------------------------------------------------------------------- /workshop/content/flink-on-kda/advanced-scale-and-monitor/scaling/_index.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Advanced Scaling" 3 | chapter: false 4 | weight: 120 5 | --- 6 | 7 | ## Let's dive deep into how to scale your Kinesis Data Analytics Application based on the previously discussed metrics. 8 | 9 | Currently, the only way to scale a Kinesis Data Analytics Application automatically is to use the built-in [Autoscale feature](https://docs.aws.amazon.com/kinesisanalytics/latest/java/how-scaling.html#how-scaling-auto). This feature will monitor your application's CPU usage over time, and if it is above 80% for some period of time, will increase the overall parallelism of your application. There will be downtime experienced during this scale up, and an application developer should take this into account when using the Autoscaling feature. 10 | 11 | This is an excellent and useful feature of KDA for Apache Flink, however some applications need to scale based on other factors--not CPU. In this session, we'll take a look at an external way to scale your KDA application based on IncomingRecords or millisBehindLatest metrics on the source Kinesis Data Stream. -------------------------------------------------------------------------------- /workshop/content/flink-on-kda/advanced-scale-and-monitor/scaling/getting-started.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Getting Started" 3 | chapter: true 4 | weight: 130 5 | --- 6 | 7 | 8 | To add functionality of scaling based on other metrics, we will utilize [Application Autoscaling](https://docs.aws.amazon.com/autoscaling/application/userguide/what-is-application-auto-scaling.html) to specify our scaling policy as well as other attributes such as cooldown periods. We can also take advantage of any of the autoscaling types--step scaling, target tracking scaling, and schedule-based scaling. The CloudFormation template we will launch will cover step-scaling, but you can review further functionality in [this Github Repository](https://github.com/aws-samples/kda-flink-app-autoscaling). 9 | 10 | ![Application Autoscaling CW Alarm](/images/flink-on-kda/enhanced-monitoring-1.jpg) 11 | 12 | 13 | [![Launch CloudFormation Stack](/images/flink-on-kda/cloudformation-launch-stack.png)](https://console.aws.amazon.com/cloudformation/home#/stacks/new?stackName=kda-advanced-scaling&templateURL=https://aws-data-analytics-workshops.s3.amazonaws.com/kinesis/templates/streaming-analytics-workshop/step-scaling.yaml) 14 | 15 | Let's create this stack and dig into the details of how it works. Click the link above to Launch The Stack, and fill in the necessary details about the workload we've built thus far. 16 | 17 | The above image illustrates the resources that will be created during the launch of this CloudFormation Template. For a more detailed list, view the Resources Tab in the CloudFormation console after launched, or view in designer before launching. 18 | 19 | 20 | ![Create Stack](/images/flink-on-kda/advanced-scaling-cfn-1-create-stack.png) 21 | 22 | 23 | ## Fill out necessary stack details 24 | - Most should be pre-filled for you, but double check to see if they are correct for your workload. 25 | ![Stack Details](/images/flink-on-kda/advanced-scaling-cfn-2-stack-details.png) 26 | 27 | #### Hit next, skipping Advanced Options 28 | 29 | ## On the last page, allow permissions for CloudFormation to auto-create IAM resources, and the CAPABILITY_AUTO_EXPAND capability. 30 | ![Allow Capabilities](/images/flink-on-kda/advanced-scaling-cfn-3-capabilities.png) 31 | 32 | The stack should take no longer than 5 minutes to complete. Look for the `CREATE_COMPLETE` message in the stack events for a signal to move forward. 33 | 34 | ## Let's dive deeper into what we've just launched... 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 | 48 | 49 | 50 | -------------------------------------------------------------------------------- /workshop/content/flink-on-kda/advanced-scale-and-monitor/scaling/resources.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Additional Resources" 3 | chapter: true 4 | weight: 150 5 | --- 6 | 7 | ## Congratulations! 8 | #### You have now seen what it takes to autoscale your applications based on a metric other than CPU Utilization. Feel free to use the step-scaling or target-tracking CloudFormation templates to customize your applications as you see fit. 9 | 10 | #### Further details can be found on [GitHub](https://github.com/aws-samples/kda-flink-app-autoscaling). 11 | 12 | -------------------------------------------------------------------------------- /workshop/content/flink-on-kda/build-visualization/_index.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Build visualizations" 3 | chapter: false 4 | weight: 55 5 | --- 6 | 7 | You'll now configure Elasticsearch indices and create visualizations in Kibana so that you can finally start sending events with the Flink application to Elasticsearch. 8 | -------------------------------------------------------------------------------- /workshop/content/flink-on-kda/build-visualization/import-kibana-dashboard.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Import Kibana dashboard" 3 | chapter: false 4 | weight: 20 5 | --- 6 | 7 | Now that the Elasticsearch indices are in place, you can start to visualize the data in Kibana. As a shortcut for this workshop, we have already created a dashboard that you can import through the Kibana web interface. 8 | 9 | 1. Navigate to the Kibana management screen, select **Saved Object**, and then click on **Import** 10 | 11 | ![](/images/flink-on-kda/kibana-2-import-objects.png) 12 | 13 | 1. A JSON file containing the definition of a suitable Kibana dashboard has already been downloaded to the *workshop-resources* folder located on the Windows desktop. Select the `streaming-analytics-workshop-dashboard.json` document from that folder and confirm with **Import** and **Done**. 14 | 15 | ![](/images/flink-on-kda/kibana-3-upload-json.png) 16 | 17 | 1. After the visualization has been imported, select **Index Pattern**, select **pickup_count**, and choose it as the default index by clicking on the star. 18 | 19 | ![](/images/flink-on-kda/kibana-4-default-index.png) 20 | -------------------------------------------------------------------------------- /workshop/content/flink-on-kda/build-visualization/ingest-data-elasticsearch.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Ingest data to Elasticsearch" 3 | chapter: false 4 | weight: 30 5 | --- 6 | 7 | You can now finally inspect the results that are generated by the Flink application into Elasticsearch and inspect the visualization in Kibana. 8 | 9 | 1. Terminate the Java producer application that is still producing data into the Kinesis data stream. Go to the terminal pane in IntelliJ and press *CTRL-C* until the producer terminates. 10 | 11 | 1. Navigate to the `main` method of the `ProcessTaxiStream` class and run it. However, as soon as it has started to execute, you can terminate it again by clicking the red square. 12 | 13 | 1. Edit the runtime parameters of the `main` method by choosing **Edit Configurations** 14 | 15 | ![](/images/flink-on-kda/intellij-9-edit-configuration.png) 16 | 17 | 1. Under **Program arguments**, enter `--ElasticsearchEndpoint` followed by the **ElasticsearchEndpoint** that you have noted earlier from the Elasticsearch Service console 18 | 19 | ![](/images/flink-on-kda/intellij-10-configuration-details.png) 20 | 21 | 1. Confirm with **OK** and execute the program again by clicking on the green arrow 22 | 23 | 1. Restart the Java producer application by executing the following command in the Terminal pane of IntelliJ 24 | 25 | {{< highlight plain >}} 26 | java -jar C:\Users\Administrator\Desktop\workshop-resources\amazon-kinesis-replay-0.1.0.jar -objectPrefix artifacts/kinesis-analytics-taxi-consumer/taxi-trips-partitioned.json.lz4 -aggregate -streamName streaming-analytics-workshop -speedup 600 27 | {{< /highlight >}} 28 | 29 | 1. Navigate to the Dashboard in Kibana and click on **nyc-tlc-dashboard** to view the visualization of the data generated by the Flink application 30 | 31 | ![](/images/flink-on-kda/kibana-5-visualizatio-partial.png) 32 | 33 | {{% notice info %}} 34 | If you cannot see any new data in the visualization, you may need to adapt the time range in the upper left corner of the Kibana dashboard. The output of the Java producer application will tell you the time of events that are currently produced into the Kinesis stream. 35 | In addition, make sure to check both the Jar and the Flink application are running. 36 | {{% /notice %}} 37 | 38 | 1. Once you have verified that the data generated by the Flink application is visualized by Kibana, stop the Flink application in IntelliJ. Also terminate the Java producer application again by navigating to the Terminal in IntelliJ and pressing *Ctrl-C*. 39 | -------------------------------------------------------------------------------- /workshop/content/flink-on-kda/cleanup/_index.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Cleanup" 3 | chapter: false 4 | weight: 80 5 | --- 6 | 7 | Congratulations! You have not only built a reliable, scalable, and highly available streaming application based on Apache Flink and Kinesis Data Analytics. You also scaled the different components while ingesting and analyzing up to 25k events per second in near-real time. In large parts, this scenario was enabled by using managed services, so you didn’t need to spend time on provisioning and configuring the underlying infrastructure. 8 | 9 | 10 | #### Be frugal! 11 | 12 | If you are running the workshop on your own and have completed all steps, make sure you delete the resources you have created to avoid causing unnecessary costs. 13 | 14 | First, delete the Kinesis data analytics application. Once the application has been deleted, also delete the Kinesis data stream and the CloudWatch dashboard. Finally delete the CloudFromation template and verify that all resources have been successfully removed. 15 | -------------------------------------------------------------------------------- /workshop/content/flink-on-kda/configure/_index.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Configure development environment" 3 | menuTitle: "Configure dev environment" 4 | chapter: false 5 | weight: 30 6 | --- 7 | 8 | In this chapter, you will configure the development environment that you will use throughout the workshop. To have a consistent experience for all attendees, you will connect to a preconfigured EC2 Instance running Windows over RDP and configure IntelliJ as the IDE. 9 | 10 | 11 | {{% notice info %}} 12 | If you don't like Windows or IntelliJ and are tempted to use your laptop for the lab, **please don't**. You will produce more than 12 MB/sec into a Kinesis data stream; if you use your own laptop for that, the procedure will either fail, or break the conference network, or both. 13 | {{% /notice %}} 14 | 15 | -------------------------------------------------------------------------------- /workshop/content/flink-on-kda/configure/configure-intellij.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Configure Intellij" 3 | chapter: false 4 | weight: 40 5 | --- 6 | 7 | Now that you have successfully connected to the Windows instance, the next step is to configure the development environment Intellij. This includes cloning the sources of the Flink application from [this Github repository](https://github.com/aws-samples/amazon-kinesis-analytics-taxi-consumer) that is used in the following steps to analyze the incoming taxi data in real time. 8 | 9 | 1. Double click on the Intellij icon on the Desktop 10 | 11 | 1. When prompted, do not import any settings, confirm that you have read and accept the terms of the User Agreement, and choose to send (or not to send, depending on your preference) user statistics 12 | 13 | 1. Accept the defaults by choosing **Skip Remaining and Set Defaults** in the Customize IntelliJ dialog 14 | 15 | 1. On the welcome screen, choose **Check out from Version Control** and **Git** 16 | 17 | ![Lauch stack](/images/flink-on-kda/intellij-1-welcome.png) 18 | 19 | 1. In the clone repository dialog, insert `https://github.com/aws-samples/amazon-kinesis-analytics-taxi-consumer` as **URL** and confirm with **Clone** 20 | 21 | ![Lauch stack](/images/flink-on-kda/intellij-2-clone.png) 22 | 23 | 1. When asked whether to open the IntelliJ IDEA project file, choose **Yes**. Note that, you will get a dependency failure after the completion of this step. This is expected and we have a remedy in the upcoming steps. For now move on to the next steps as the dependencies are being loaded. 24 | -------------------------------------------------------------------------------- /workshop/content/flink-on-kda/configure/connect-rdp.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Connect to Windows desktop" 3 | chapter: false 4 | weight: 30 5 | --- 6 | 7 | 1. Start the Remote Desktop Client and click **Add Desktop** 8 | 9 | 1. Enter the public ip address of the EC2 instance you obtained from the output section of the CloudFormation template and confirm with **Add** 10 | 11 | ![Lauch stack](/images/flink-on-kda/remote-desktop-1-create.png) 12 | 13 | 1. Double click on the desktop icon and enter `Administrator` as **Username** and the password you've retrieved earlier form the output section of the CloudFormatino template as **Password** 14 | 15 | ![Lauch stack](/images/flink-on-kda/remote-desktop-2-credentials.png) 16 | 17 | 1. When prompted if you want to continue with the warning that the root certificate couldn't be verified, press **Continue** 18 | -------------------------------------------------------------------------------- /workshop/content/flink-on-kda/configure/retrieve-password.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Retrieve Windows password" 3 | chapter: false 4 | weight: 10 5 | --- 6 | 7 | In order to connect to the Windows instance, you need to retrieve the corresponding password. The Windows instance has been configured to set a random password on the first startup. The password is also stored securely in the AWS Secrets Manager from where you will obtain it. 8 | 9 | 1. Navigate to the [CloudFormation console](https://console.aws.amazon.com/cloudformation/home) and select the correct CloudFormation stack. If you are running the workshop on your own the Stack is called **streaming-analytics-workshop** and if you are attending an AWS hosted event, there is only one Stack available in the account. 10 | 11 | 1. Verify that the CloudFormation stack was created successfully and is in the state **CREATE_COMPLETED**. Then, navigate to the **Output** section of the CloudFormation template. 12 | 13 | ![Lauch stack](/images/flink-on-kda/cfn-5-completed.png) 14 | 15 | 1. Take a note of the public ip address of the EC2 instance containing the development environment and open the link to AWS Secrets Manager in a new browser tab 16 | 17 | ![Lauch stack](/images/flink-on-kda/cfn-6-parameters.png) 18 | 19 | 1. In the AWS Secrets Manager console, click on **Retrieve secret value** to obtain the Administrator password that has been set for the instance 20 | 21 | ![Lauch stack](/images/flink-on-kda/secrets-manager-retrieve-secret.png) 22 | 23 | -------------------------------------------------------------------------------- /workshop/content/flink-on-kda/deploy-to-kda/_index.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Deploy to KDA" 3 | chapter: false 4 | weight: 60 5 | --- 6 | 7 | Now that the visualization are available in Kibana and the Flink application is working correctly, we will deploy the Flink application into a proper Flink environment that is managed by Kinesis Data Analytics. We can then increase the amount of data that is sent to the Kinesis stream by 7x to around 12 MB/second. 8 | -------------------------------------------------------------------------------- /workshop/content/flink-on-kda/deploy-to-kda/build-uber-jar.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Build Uber Jar" 3 | chapter: false 4 | weight: 20 5 | --- 6 | 7 | You first need to package the Flink application and all its dependencies into a Jar file that can be deployed to a Flink evnironment. 8 | 9 | 1. Expand the Maven pane and click on the **m** to execute `clean package` as a Maven goal 10 | 11 | ![](/images/flink-on-kda/intellij-7-maven-package.png) 12 | 13 | 1. Maven will now build a jar file that includes all the dependencies of the Flink application. The jar file is stored in the *target* directory and can be deployed to KDA. 14 | 15 | {{% notice info %}} 16 | If you are feeling lucky, you can upload the generated jar file to an S3 bucket and subsequently deploy it to KDA. However, the following steps will use a precompiled jar file that is tested and works as intended. 17 | {{% /notice %}} 18 | -------------------------------------------------------------------------------- /workshop/content/flink-on-kda/deploy-to-kda/configure-kda-app.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Configure KDA application" 3 | chapter: false 4 | weight: 50 5 | --- 6 | 7 | The skeleton of the application has now been created. But you still need to adapt important configuration options, including the location of the Jar file on S3, the name of the Kinesis data stream to read from, and the Elasticsearch endpoint. 8 | 9 | 1. On the resulting page press the blue **Configure** button to configure the Kinesis Analytics application. 10 | 11 | 1. Enter the bucket and prefix of the compiled jar file under **Amazon S3 bucket** and **Path to Amazon S3 object**. You can obtain the correct values from the Output section of the CloudFormation template under **FlinkApplicationJarBucket** and **FlinkApplicationJarObject**. 12 | 13 | ![Configure KDA Application](/images/flink-on-kda/kda-configure.png) 14 | 15 | 1. Expand the **Properties** section and select **Create group**. 16 | 17 | 1. In the resulting dialog, choose `FlinkApplicationProperties` as **Group ID** and add the following two key/value pairs and confirm with **Save**: 18 | 1. `InputStreamName` with the name of the Kinesis stream you've created earlier, eg, `streaming-analytics-workshop` 19 | 1. `ElasticsearchEndpoint` with the Elasticsearch https endpoint you have noted earlier 20 | 21 | ![Configure Property Group](/images/flink-on-kda/kda-property-group.png) 22 | 23 | 1. Expand the **Monitoring** section and verify that **Task** is selected as **Monitoring metrics level** and **CloudWatch logging** is eneabled. 24 | 25 | ![Configure Monitoring](/images/flink-on-kda/kda-monitoring.png) 26 | 27 | 1. Expand the **Scaling** section and reduce the **Parallelism** to `1` 28 | 29 | ![Configure Scaling](/images/flink-on-kda/kda-configure-scaling.png) 30 | 31 | 1. Keep the default settings **VPC Connectivity** and press the blue **Update** button at the bottom of the page to update the properties of the application. 32 | -------------------------------------------------------------------------------- /workshop/content/flink-on-kda/deploy-to-kda/create-kda-app.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Create KDA application" 3 | chapter: false 4 | weight: 40 5 | --- 6 | 7 | You can then create the Kinesis Analytics application. A Kinesis Analytics for Java application basically consists of a reference to the Flink application in S3 and some additional configuration data. Once the Kinesis Analytics for Java application has been created, it can be deployed and executed by the services in a fully managed environment. 8 | 9 | 1. Navigate to the [Kinesis console](https://console.aws.amazon.com/kinesis/home). Select *Kinesis Data Analytics* and press **Create application**. 10 | 11 | ![Kinesis Dashboard](/images/flink-on-kda/kinesis-welcome-create-kda.png) 12 | 13 | 1. Enter `streaming-analytics-workshop` as the **Application name** and select *Apache Flink 1.11* as the **Runtime**. 14 | 15 | ![Create KDA Application](/images/flink-on-kda/kda-create-app.png) 16 | 17 | 1. Choose *Choose from IAM roles that Kinesis Analytics can assume* as **Access permissions**. If you are attending an AWS hosted event, select the only role that is suggested. If you are running the workshop on your own, select the role that starts with `streaming-analytics-workshop`. In case you need to choose between several roles and wonder which one to select, you can obtain the name of the correct role from the **KinesisAnalyticsServiceRole** output of the CloudFormation template. 18 | 19 | ![Create KDA Application](/images/flink-on-kda/kda-create-app-permissions.png) 20 | 21 | 1. Create the application by pressing the blue **Create Application** button 22 | -------------------------------------------------------------------------------- /workshop/content/flink-on-kda/deploy-to-kda/start-kda-app.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Start KDA application" 3 | chapter: false 4 | weight: 60 5 | --- 6 | 7 | The application is finally ready for execution through the Kinesis Data Analytics service in a fully managed Flink environment. You can now start the execution and send events into the Kinesis data stream, this time with a substantially higher throughput. The Flink application will then continuously process the data that is ingested into the data stream and send derived insights to Elasticsearch for visualization. 8 | 9 | {{% notice warning %}} 10 | Before you proceed, make sure that you are currently not producing any events into the Kinesis stream. Navigate to the Terminal pane in IntelliJ and if the Java producer application is still running, terminate it by pressing *Ctrl-C*. 11 | {{% /notice %}} 12 | 13 | 1. Once the update has completed, press **Run** on the resulting page and confirm that you want to run the application by choosing **Run** again. 14 | 15 | 1. When promted, confirm to *Run without snapshot* and press the blue **Run** button. The application will now start in the background, which can take a couple of minutes. Once it is running, you can inspect the operator graph of the Flink application. 16 | 17 | ![](/images/flink-on-kda/kda-running-app.png) 18 | 19 | 2. You can now start producing data into the Kinesis stream again. However, this time we can substantially increase the speedup factor and hence the throughput of events. 20 | 21 | {{< highlight plain >}} 22 | java -jar C:\Users\Administrator\Desktop\workshop-resources\amazon-kinesis-replay-0.1.0.jar -objectPrefix artifacts/kinesis-analytics-taxi-consumer/taxi-trips-partitioned.json.lz4 -aggregate -streamName streaming-analytics-workshop -speedup 4320 23 | {{< /highlight >}} 24 | -------------------------------------------------------------------------------- /workshop/content/flink-on-kda/extend-flink-program/_index.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Extend the Flink program" 3 | chapter: false 4 | weight: 50 5 | --- 6 | 7 | In this chapter, we discuss how to extend the existing Flink application. The current version is just echoing the events that are ingested into the data stream. However, the goal is to analyze the incoming information to provide timely insights into the current condition of the taxi fleet. 8 | 9 | You can learn about the required advanced streaming concepts and how they are implemented in Apache Flink from the [Flink documentation](https://ci.apache.org/projects/flink/flink-docs-release-1.9/dev/datastream_api.html). If you are attending an AWS hosted event, we will cover topics like, event time, watermarks, time windows, and processing semantics in a presentation. If you are running the workshop on your own, you can refer to the [Flink documentation](https://ci.apache.org/projects/flink/flink-docs-stable/) and the [online training](https://training.ververica.com/) available from the original creators of Apache Flink to learn more. 10 | 11 | In the following, you will learn how to generate statistics about hot spots, ie, places in New York that are currently requesting a high number of taxi trip so that operators can proactively send empty taxis there. You will also see how to derive the average time it takes taxis to the airports La Guardia and JFK. The actual changes have already been implemented in the `ProcessTaxiStream` class that you can already open in Intellij for your reference. 12 | 13 | -------------------------------------------------------------------------------- /workshop/content/flink-on-kda/extend-flink-program/add-aes-sink.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Add Elasticsearch sink" 3 | chapter: false 4 | weight: 30 5 | --- 6 | 7 | Finally, we want to send the derived insights to Elasticsearch and Kibana for visualization. To this end, we use an Elasticsearch sink that has been extended to sign the requests with IAM credentials so that they are accepted by Amazon Elasticsearch Service. 8 | 9 | {{< highlight java >}} 10 | if (parameter.has("ElasticsearchEndpoint")) { 11 | String elasticsearchEndpoint = parameter.get("ElasticsearchEndpoint"); 12 | final String region = parameter.get("Region", DEFAULT_REGION_NAME); 13 | 14 | //remove trailling / 15 | if (elasticsearchEndpoint.endsWith(("/"))) { 16 | elasticsearchEndpoint = elasticsearchEndpoint.substring(0, elasticsearchEndpoint.length()-1); 17 | } 18 | 19 | pickupCounts.addSink(AmazonElasticsearchSink.buildElasticsearchSink( 20 | elasticsearchEndpoint, region, "pickup_count", "_doc")); 21 | 22 | tripDurations.addSink(AmazonElasticsearchSink.buildElasticsearchSink( 23 | elasticsearchEndpoint, region, "trip_duration", "_doc")); 24 | } 25 | {{< / highlight >}} 26 | -------------------------------------------------------------------------------- /workshop/content/flink-on-kda/extend-flink-program/event-time.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Enable event time" 3 | chapter: false 4 | weight: 10 5 | --- 6 | 7 | Using event time for window operators provides much more stable semantics compared to processing time, as it is more robust against reordering of events and late arriving events. To activate event time processing, we first need to configure the Flink execution environment appropriately. 8 | 9 | {{< highlight java >}} 10 | if (parameter.get("EventTime", "true").equals("true")) { 11 | env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime); 12 | } 13 | {{< / highlight >}} 14 | 15 | In addition to merely activating event time, we need to tell the Flink program, how it can extract the watermark information from the input stream. In this example, the stream contains special watermark events that inform the Flink runtime that all following events will have a timestamp larger than the current watermark. 16 | 17 | Extracting watermark information and generating watermarks is done with the `TimestampAssigner` class. The class is also responsible for assigning the correct time to individual events. 18 | 19 | {{< highlight java >}} 20 | DataStream trips = kinesisStream 21 | //extract watermarks from watermark events 22 | .assignTimestampsAndWatermarks(new AssignerWithPunctuatedWatermarksAdapter.Strategy<>(new TimestampAssigner())) 23 | ... 24 | {{< / highlight >}} 25 | 26 | -------------------------------------------------------------------------------- /workshop/content/flink-on-kda/extend-flink-program/kda-parameters.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Read parameters from Application Properties" 3 | menuTitle: "Read parameters" 4 | chapter: false 5 | weight: 5 6 | --- 7 | 8 | Eventually, we'd like to deploy the program to KDA. However, instead of logging in to a cluster and directly submitting a job to the Flink runtime, you upload the respective fat JAR to Amazon S3 and start the execution by calling an API. So we don't get access to the Flink cluster and we hence need to adapt the way we pass parameters to the Flink application. 9 | 10 | If the Flink application is executed with Kinesis Data Analytics, we can obtain parameters from the Kinesis Analytics runtime. To this end, we can call the `KinesisAnalyticsRuntime.getApplicationProperties()` function to retrieve the parameters that are passed to the service through an API. 11 | 12 | If the Flink application is executed in the local development environment, we continue to use the command line arguments that are specified when the `main` method is invoked. 13 | 14 | {{< highlight java >}} 15 | ParameterTool parameter; 16 | 17 | if (env instanceof LocalStreamEnvironment) { 18 | //read the parameters specified from the command line args 19 | parameter = ParameterTool.fromArgs(args); 20 | } else { 21 | //read the parameters from the Kinesis Analytics environment 22 | Map applicationProperties = KinesisAnalyticsRuntime.getApplicationProperties(); 23 | 24 | Properties flinkProperties = applicationProperties.get("FlinkApplicationProperties"); 25 | 26 | if (flinkProperties == null) { 27 | throw new RuntimeException("Unable to load FlinkApplicationProperties properties from the runtime."); 28 | } 29 | 30 | parameter = ParameterToolUtils.fromApplicationProperties(flinkProperties); 31 | } 32 | {{< / highlight >}} 33 | 34 | -------------------------------------------------------------------------------- /workshop/content/flink-on-kda/getting-started/_index.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Getting started" 3 | chapter: false 4 | disableToc: false 5 | weight: 20 6 | --- 7 | 8 | Unless you have already an RDP client available on your laptop, you need to install one now so that you can connect to a Windows instance that you will use during the workshop. Follow the specific instructions for your operating system to install an RDP client, preferably Microsoft Remote Desktop 10. 9 | 10 | - **Windows Setup:** Microsoft Remote Desktop is likely already included in your Windows operating system. To verify, type `mstsc` at a Command Prompt window. If it's not already included, you can obtain it through the [Microsoft Store](https://www.microsoft.com/p/microsoft-remotedesktop/9wzdncrfj3ps). 11 | 12 | - **MacOS Setup**: Microsoft Remote Desktop 10 for macOS is available from the [Mac App Store](https://apps.apple.com/app/microsoft-remote-desktop-10/id1295203466). 13 | 14 | - **Linux Setup:** Microsoft Remote Desktop is not available for Linux, but you can use [Remmina](https://remmina.org/) instead. 15 | 16 | 17 | Once you have successfully configured the RDP client, you can start the workshop by following one of the following depending on whether you are... 18 | 19 | * ...[attending an AWS hosted event (using AWS provided hashes)]({{< relref "./aws-event/" >}}) 20 | * ...[running the workshop on your own (in your own account)]({{< relref "./self-paced/" >}}) 21 | -------------------------------------------------------------------------------- /workshop/content/flink-on-kda/getting-started/aws-event.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "...at an AWS event" 3 | weight: 10 4 | --- 5 | 6 | {{% notice warning %}} 7 | Only complete this section if you are at an AWS hosted event (such as re:Invent, 8 | Loft, Immersion Day, or any other event hosted by an AWS employee). If you 9 | are running the workshop on your own, go to: [Start the workshop on your own]({{< relref "./self-paced/" >}}). 10 | {{% /notice %}} 11 | 12 | ### Login to AWS Workshop Portal 13 | 14 | This workshop creates an AWS acccount and the development environment that is used throughout the workshop. You will need the **Participant Hash** provided upon entry, and your email address to track your unique session. 15 | 16 | Connect to the portal by clicking the button or browsing to [https://dashboard.eventengine.run/](https://dashboard.eventengine.run/). 17 | 18 | Once you have completed the step above, you can head straight to [**Configure development environment**]({{< ref "../configure/" >}}) 19 | -------------------------------------------------------------------------------- /workshop/content/flink-on-kda/getting-started/self-paced.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "...on your own" 3 | weight: 20 4 | --- 5 | 6 | {{% notice warning %}} 7 | Only complete this section if you are running the workshop on your own. If you are at an AWS hosted event (such as re:Invent, 8 | Loft, Immersion Day, or any other event hosted by an AWS employee), continue with [**Configure development environment**]({{}}). 9 | {{% /notice %}} 10 | 11 | ### Running the workshop on your own 12 | 13 | 1. Launch the following CloudFormation template to create a prepared development environment in your account. Supported regions include US East (N. Virginia), US West (Oregon), Asia Pacific (Tokyo), Asia Pacific (Sydney), EU (Ireland), EU (Frankfurt), and any other region supporting these resource types: **Amazon Code Pipeline**, **Amazon Elasticsearch Service**, and **Amazon Kinesis Data Analytics**. 14 | 15 | [![Launch CloudFormation Stack](/images/flink-on-kda/cloudformation-launch-stack.png)](https://console.aws.amazon.com/cloudformation/home#/stacks/new?stackName=streaming-analytics-workshop&templateURL=https://aws-streaming-artifacts.s3.amazonaws.com/streaming-analytics-workshop/cfn-templates/StreamingAnalyticsWorkshop.template.json) 16 | 17 | 1. In the following dialog, choose **Next** 18 | 19 | ![Launch stack](/images/flink-on-kda/cfn-1-create-stack.png) 20 | 21 | 1. Choose `streaming-analytics-workshop` as **Stack name** and confirm with **Next** 22 | 23 | 1. Accept all default values on the next dialog page by clicking **Next** at the bottom of the page 24 | 25 | 1. On the last page of the dialog, confirm that CloudFormation may create IAM resource by selecting **I acknowledge that AWS CloudFormation might create IAM resources**. Click on **Create stack** at the bottom of the page. 26 | 27 | ![Lauch stack](/images/flink-on-kda/cfn-4-confirm-capabilities.png) 28 | 29 | It takes approximately 10 minutes for the stack associated with this CloudFormation template to deploy. You can continue with the next step [Configure development environment]({{< ref "../configure/" >}}) while the stack is being deployed. 30 | -------------------------------------------------------------------------------- /workshop/content/flink-on-kda/ingest-events/_index.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Ingest events" 3 | chapter: false 4 | weight: 35 5 | --- 6 | 7 | This chapter focusses on the ingestion part of the architecture. You first create a Kinesis data stream, which serves as a short term streaming store for the events that are created by the taxi fleet. You then start producing a historic set of taxi trips into the Kinesis data stream. 8 | 9 | The Kinesis data stream serves as a buffer that decouples the producers from the consumers. It is a common pattern for architectures that are analyzing streaming data to separate producers from consumers by means of a streaming store. In this way, the architecture becomes more robust. Producers and consumers can be scaled independently and producers can still persist events into the data stream even if the consumers are currently experiencing operational problems or the consuming application needs to be updated. 10 | 11 | This architecture also allows you to experiment and adopt new technologies in the future. Multiple independent applications can concurrently consume the data stored in the Kinesis data stream. You can then test how a new version of an existing application performs with a copy of the production traffic. But you can also introduce a different tool and technology stack to analyze the data, again without affecting the existing production application. 12 | -------------------------------------------------------------------------------- /workshop/content/flink-on-kda/ingest-events/create-stream.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Create a Kinesis data stream" 3 | chapter: false 4 | weight: 11 5 | --- 6 | 7 | You start with creating a Kinesis data stream. 8 | 9 | 1. Navigate to the [Kinesis Console](https://console.aws.amazon.com/kinesis) 10 | 11 | 1. If displayed, up press **Get Started** in the service welcome dialog 12 | 13 | 1. Select **Create data stream** to navigate to the Amazon Kinesis Data Stream service: 14 | ![](/images/flink-on-kda/kinesis-welcome-create-stream.png) 15 | 16 | 1. Choose `streaming-analytics-workshop` as **Kinesis stream name** 17 | 18 | 1. Enter `8` as the **Number of shards** 19 | 20 | 1. Select **Create Kinesis stream** at the bottom of the page 21 | ![](/images/flink-on-kda/kds-create-stream.png) 22 | 23 | 1. After a few moments, the data stream should have been created successfully and be in an *Active* state 24 | 25 | -------------------------------------------------------------------------------- /workshop/content/flink-on-kda/ingest-events/ingest-data.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Replay historic taxi trips" 3 | chapter: false 4 | weight: 16 5 | --- 6 | 7 | We will use a Java application to replay a historic set of taxi trips made in NYC that are stored in objects in Amazon S3 into the Kinesis stream. 8 | The Java application has already been compiled and loaded onto the EC2 instance. If you are interested in the details of the application, you can obtain the sources, including a CloudFormation template to build the Java application with Amazon CodePipeline, from [GitHub](https://github.com/aws-samples/amazon-kinesis-replay). 9 | 10 | 1. Click on the **Terminal** icon in the bottom of Intellij to open a shell 11 | 12 | ![](/images/flink-on-kda/intellij-3-ingest.png) 13 | 14 | 1. Enter the following command into the terminal prompt to start producing events into the Kinesis data stream 15 | 16 | {{< highlight plain >}} 17 | java -jar C:\Users\Administrator\Desktop\workshop-resources\amazon-kinesis-replay-0.1.0.jar -objectPrefix artifacts/kinesis-analytics-taxi-consumer/taxi-trips-partitioned.json.lz4 -aggregate -streamName streaming-analytics-workshop -speedup 600 18 | {{< /highlight >}} 19 | -------------------------------------------------------------------------------- /workshop/content/flink-on-kda/introduction/_index.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Introduction" 3 | chapter: false 4 | weight: 10 5 | --- 6 | 7 | We use a scenario to analyze the telemetry data of a taxi fleet in New York City in near real-time to optimize the fleet operation. 8 | 9 | In this scenario, every taxi in the fleet is capturing information about completed trips. The tracked information includes the pickup and drop-off locations, number of passengers, and generated revenue. This information is produced into a Kinesis data stream as a simple JSON blob. 10 | 11 | From there, the data is processed and analyzed to identify areas that are currently requesting a high number of taxi rides. The derived insights are finally visualized in a dashboard for operators to inspect. 12 | 13 | ![Architecture](/images/flink-on-kda/workshop-architecture.png) 14 | 15 | Throughout the course of this workshop, you will build a fully managed infrastructure that can analyze the data in near-time, ie, within seconds, while being scalable and highly available. The architecture will leverage Amazon Kinesis Data Stream as a streaming store, [Amazon Kinesis Data Analytics](https://aws.amazon.com/kinesis/data-analytics/) to run an [Apache Flink](https://flink.apache.org) application in a fully managed environment, and [Amazon Elasticsearch Service](https://aws.amazon.com/elasticsearch-service/) and [Kibana](https://aws.amazon.com/elasticsearch-service/the-elk-stack/kibana/) for visualization. 16 | 17 | Along the way, we will learn about basic Flink concepts and common patterns for streaming analytics. We will also cover how KDA for Apache Flink is different from a self-managed environment and how to effectively operate and monitor streaming architectures. 18 | -------------------------------------------------------------------------------- /workshop/content/flink-on-kda/local-flink-development/_index.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Local Flink development" 3 | chapter: false 4 | weight: 40 5 | --- 6 | 7 | In this chapter, you learn how to configure the development environment for the local development and execution of Apache Flink programs. We'll also discuss the basic structure of an Apache Flink program. 8 | -------------------------------------------------------------------------------- /workshop/content/flink-on-kda/local-flink-development/anatomy-flink-program.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Anatomy of the Flink program" 3 | chapter: false 4 | weight: 40 5 | --- 6 | 7 | According to the [Flink documentation](https://ci.apache.org/projects/flink/flink-docs-stable/dev/api_concepts.html#anatomy-of-a-flink-program), a Flink program consists of the following basic parts: 8 | 9 | 1. Obtain an execution environment, 10 | 1. Load/create the initial data, 11 | 1. Specify transformations on this data, 12 | 1. Specify where to put the results of your computations, and 13 | 1. Trigger the program execution 14 | 15 | If you are attending an AWS hosted event, we will cover the different parts in a presentation format. If you are running the workshop on your own, please refer to the official [Flink documentation](https://ci.apache.org/projects/flink/flink-docs-stable/dev/api_concepts.html#anatomy-of-a-flink-program) to learn more about the anatomy of a Flink program. 16 | 17 | {{% notice info %}} 18 | If you are attending an AWS hosted event and please pause here and wait for the presentation. Please make yourself noticeable, we will start with the presentation once enough attendees have reached this point. 19 | {{% /notice %}} 20 | -------------------------------------------------------------------------------- /workshop/content/flink-on-kda/local-flink-development/debug-code.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Debug Flink code" 3 | chapter: false 4 | weight: 30 5 | --- 6 | 7 | In addition to simply executing the Flink application, you can also set breakpoints and debug the application locally, which is very convenient when implementing and verifying the business logic. 8 | 9 | 1. Expand the utils package and navigate to and double click the `GeoUtils` class 10 | 11 | 1. Set a breakpoint by clicking left of a code line, eg, Line 35 12 | 13 | 1. Start the debugging process by clicking the green bug next in the upper right corner of the IDE 14 | 15 | ![](/images/flink-on-kda/intellij-6-debug.png) 16 | 17 | 1. Step through the code and inspect the content of the variables and the stack trace 18 | -------------------------------------------------------------------------------- /workshop/content/flink-on-kda/local-flink-development/execute-code.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Execute the Flink program" 3 | chapter: false 4 | weight: 20 5 | --- 6 | 7 | Now that everything is set up correctly, you can execute the basic Flink program that has been cloned from the Github repository. It will simply read the events that have been sent to the Kinesis stream and print them on standard out. 8 | 9 | 10 | 1. Click on **Project** in the upper left of the IntelliJ window 11 | 12 | 1. Navigate to and double click on the `ProcessTaxiStreamLocal` class by gradually expanding the tree structure of the project: *amazon-kinesis-analytics-taxi-consumer* -> *src* -> *main* -> *java* -> *com.amazonaws.samples.kaja.taxi.consumer* 13 | 14 | 1. Start the execution of the program by clicking on the green arrow next to the `main` method and then choosing **Run** 15 | 16 | ![](/images/flink-on-kda/intellij-4-execute-flink.png) 17 | 18 | 1. Once the Flink job has initialized, the *TripEvents* that are received from the Kinesis stream are displayed in the lower part of the IDE 19 | 20 | ![](/images/flink-on-kda/intellij-5-execute-flink-output.png) 21 | 22 | 1. Stop the local execution of the Flink application by pressing the red square or by choosing *Run* -> *Stop* from the menu 23 | -------------------------------------------------------------------------------- /workshop/content/flink-on-kda/scale-monitor/_index.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Scale and Monitor" 3 | chapter: false 4 | weight: 70 5 | --- 6 | 7 | Now that the KDA application is running and sending results to Elasticsearch, we'll take a look at operational aspects, such as monitoring and scaling. 8 | 9 | Good operations is even more important for streaming applications that it is for batch based analytic applications. The throughput of incoming events is often substantially varying for these kind of applications. If the resources of the Flink application are not adapted accordingly, the application may fall substantially behind. It may then generate results that are no longer relevant as they are already too old when the overloaded application is eventually able to produce them. 10 | -------------------------------------------------------------------------------- /workshop/content/flink-on-kda/scale-monitor/cw-metrics.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Monitor important metrics" 3 | chapter: false 4 | weight: 20 5 | --- 6 | 7 | Now that the Kinesis data stream is scaling, we'll take a look at how we can identify this overloading situation by looking at the metrics of the stream. To this end, we'll create a CloudWatch dashboard that displays some of the relevant metrics. 8 | 9 | 1. Navigate to the [CloudWatch console](https://console.aws.amazon.com/cloudwatch), select **Dashboards** in the navigation pane on the left, and click on **Create dashboard** 10 | 11 | 1. In the create dashboard dialog, enter `streaming-analytics-workshop` as **Dashboard name** and confirm with **Create dashboard** 12 | 13 | 1. Choose the **Line** widget and confirm with **Configure** 14 | 15 | 1. Enter `streaming-analytics-workshop` in the search field and select **Kinesis > Stream metrics** 16 | 17 | ![](/images/flink-on-kda/cw-dashboard-1-filter.png) 18 | 19 | 1. Select the **IncomingRecords** and **GetRecords.IteratorAgeMilliseconds** metrics and navigate to **Graphed metrics** 20 | 21 | ![](/images/flink-on-kda/cw-dashboard-2-select-metrics.png) 22 | 23 | 1. Adjust the period of both metrics to **1 Minute** and change the **Statistic** to *Sum* and *Maximum*, respectively. Switch the **Y Axis** of the GetRecords.IteratorAgeMilliseconds metric and confirm with **Update**. 24 | 25 | ![](/images/flink-on-kda/cw-dashboard-3-metric-properties.png) 26 | 27 | The dashboard now shows two metrics: the amount of incoming messages and the millisBehindLatest metric. The metric reports the time difference between the oldest record currently read by the Kinesis Data Analytics application and the latest record in the stream according to the ingestion time in milliseconds. So it indicates how much behind the processing is from the tip of the stream. 28 | 29 | Other important metrics include, `ReadProvisionedThroughputExceeded` and `WriteProvisionedThroughputExceeded`. You can add them to the dashboard as well to see how the producer is throttled. 30 | 31 | ![](/images/flink-on-kda/cw-dashboard-5-scale-kds.png) 32 | -------------------------------------------------------------------------------- /workshop/content/flink-on-kda/scale-monitor/inspect-flink-application-logs.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Inspect Flink application logs" 3 | chapter: false 4 | weight: 30 5 | --- 6 | 7 | When you were running the Flink program locally in IntelliJ, you could obtain (debug) output directly from the IDE. However, we cannot log in to the infrastructure that runs the KDA application to get access to these logs. 8 | 9 | In addition to operational metrics, you can configure the Kinesis Data Analytics application to write messages to CloudWatch Logs. This capability seamlessly integrates with common logging frameworks, such as Apache Log4j and the Simple Logging Facade for Java (SLF4J). So it is useful for debugging and identifying the cause of operational issues. 10 | 11 | As you have enabled logging for your Kinesis Data Analytics application, the logs are sent to a CloudWatch Logs stream. 12 | 13 | 1. On the CloudWatch dashboard, select **Add widget** 14 | 15 | 1. In the dialog, choose the **Query results** widget and confirm with **Configure** 16 | 17 | 1. Search for `streaming-analytics-workshop` and select the log stream of the KDA application 18 | 19 | ![](/images/flink-on-kda/cw-dashboard-4-configure-log.png) 20 | 21 | -------------------------------------------------------------------------------- /workshop/content/flink-on-kda/scale-monitor/scale-kda.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "(Auto)scale the Flink application" 3 | chapter: false 4 | weight: 40 5 | --- 6 | 7 | As you have increased the throughput of the Kinesis stream by doubling the number of shards, more events are sent into the stream. However, as a direct result, more events need to be processed. So now the Kinesis Data Analytics application becomes overloaded and can no longer keep up with the increased number of incoming events. You can observe this through the millisBehindLatest metric, which is published to CloudWatch. 8 | 9 | In contrast to Kinesis Data Streams, Kinesis Data Analytics natively supports auto scaling. After 10-15 minutes, you can see the effect of the scaling activities in the metrics. The millisBehindLatest metric starts to decrease until it reaches zero, when the processing has caught up with the tip of the Kinesis data stream. 10 | 11 | ![](/images/flink-on-kda/cw-dashboard-6-scale-kda.png) 12 | 13 | Notice how the millisBehindLatest metric spikes just before it starts to decline. This is caused by the way that scaling a Kinesis Data Analytics application works today. To scale a running application, the internal state of the application is persisted into a so-called savepoint. This savepoint is exposed as a snapshot by Kinesis Data Analytics. Subsequently, the running instance of the application is terminated, and a new instance of the same application with more resources and a higher parallelism is created. The new instance of the application then populates its internal state from the snapshot and resumes the processing from where the now terminated instance left off. 14 | 15 | The KDA application takes 10-15 minutes to scale; if it hasn't scaled yet, you can scale it manually. 16 | 17 | 1. Navigate to the Kinesis Data Analytics application. Under **Scalin** adapt the **Parallelism** to 2 and confirm the operation with **Update**. 18 | 19 | ![](/images/flink-on-kda/kda-scale-app.png) 20 | -------------------------------------------------------------------------------- /workshop/content/flink-on-kda/scale-monitor/scale-kinesis.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "Scale the Kinesis stream" 3 | chapter: false 4 | weight: 10 5 | --- 6 | 7 | When you closely inspect the output of the producer application, you will notice that it is experiencing write provisioned throughput exceeded exceptions and cannot send data fast enough. For the purpose of the workshop, the Kinesis data stream has been deliberately under provisioned. As this would be a major problem for production environments, you'll now scale the stream to increase its capacity. 8 | 9 | ![](/images/flink-on-kda/intellij-13-replay-lag-copy.png) 10 | 11 | 1. Go to the [Kinesis Data Streams console](https://console.aws.amazon.com/kinesis/home#/streams/list) and click on the *streaming-analytics-workshop* data stream 12 | 13 | 1. Navigate to the **Configuration** tab and choose **Edit** under **Stream capacity** 14 | 15 | ![](/images/flink-on-kda/kds-update-shards.png) 16 | 17 | 1. In the resulting dialog, change the *Number of open shards* to `16` and confirm with **Save changes** 18 | 19 | 1. The scaling operation will add additional shard to the Kinesis stream, effectively doubling its throughput 20 | -------------------------------------------------------------------------------- /workshop/content/more-resources.en.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: "More Resources" 3 | disableToc: true 4 | --- 5 | 6 | Discover more AWS resources for building and running your application on AWS: 7 | 8 | #### Blog Posts 9 | 10 | - [Streaming ETL with Apache Flink and Amazon Kinesis Data Analytics](https://aws.amazon.com/blogs/big-data/streaming-etl-with-apache-flink-and-amazon-kinesis-data-analytics/) 11 | - [Build and run streaming applications with Apache Flink and Amazon Kinesis Data Analytics for Java Applications](https://aws.amazon.com/blogs/big-data/build-and-run-streaming-applications-with-apache-flink-and-amazon-kinesis-data-analytics-for-java-applications/) 12 | - [Build a Real-time Stream Processing Pipeline with Apache Flink on AWS](https://aws.amazon.com/blogs/big-data/build-a-real-time-stream-processing-pipeline-with-apache-flink-on-aws/) 13 | 14 | #### Videos 15 | 16 | - [Build and run streaming applications with Apache Flink and Amazon Kinesis Data Analytics (Flink Forward Europe 2019)](https://www.youtube.com/watch?v=c03_TaW2pR0) 17 | - [Unify Batch and Stream Processing with Apache Beam on AWS (Beam Summit Europe 2019)](https://www.youtube.com/watch?v=eCgZRJqdt_I) 18 | - [Build a Real-time Stream Processing Pipeline with Apache Flink on AWS (Flink Forward Berlin 2017)](https://www.youtube.com/watch?v=tmdEe3jpUX8) 19 | - [Build Your First Big Data Application on AWS (AWS Summit Berlin 2019)](https://www.youtube.com/watch?v=PalRSu8Pqn0) 20 | - [Enhanced Monitoring and Automatic Scaling for Apache Flink](https://aws.amazon.com/blogs/big-data/enhanced-monitoring-and-automatic-scaling-for-apache-flink/) 21 | #### Source Code 22 | 23 | - [amazon-kinesis-analytics-taxi-consumer](https://github.com/aws-samples/amazon-kinesis-analytics-taxi-consumer) 24 | - [amazon-kinesis-analytics-beam-taxi-consumer](https://github.com/aws-samples/amazon-kinesis-analytics-beam-taxi-consumer) 25 | - [amazon-kinesis-replay](https://github.com/aws-samples/amazon-kinesis-replay) 26 | - [autoscaling-kda](https://github.com/aws-samples/kda-flink-app-autoscaling) 27 | 28 | #### More Workshops 29 | 30 | * [Amazon ECS Workshop](https://ecsworkshop.com) - Learn how to use Stelligent Mu to deploy a microservice architecture that runs in AWS Fargate 31 | * [Amazon Lightsail Workshop](https://lightsailworkshop.com) - If you are getting started with the cloud and looking for a way to run an extremely low cost environment Lightsail is perfect. Learn how to deploy to Amazon Lightsail with this workshop. 32 | -------------------------------------------------------------------------------- /workshop/layouts/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/layouts/.DS_Store -------------------------------------------------------------------------------- /workshop/layouts/partials/custom-footer.html: -------------------------------------------------------------------------------- 1 |
2 |
3 |
4 |
5 |
6 |
7 | 8 | 9 | -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/apn-logo.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/apn-logo.jpg -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/aws-open-source.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/aws-open-source.jpg -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/beam-on-kda/app-properties.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/beam-on-kda/app-properties.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/beam-on-kda/beam-app-properties.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/beam-on-kda/beam-app-properties.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/cf-emr-connect.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/cf-emr-connect.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/cfn-1-create-stack.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/cfn-1-create-stack.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/cfn-2-stack-details.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/cfn-2-stack-details.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/cfn-4-confirm-capabilities.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/cfn-4-confirm-capabilities.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/cfn-5-completed.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/cfn-5-completed.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/cfn-6-parameters.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/cfn-6-parameters.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/cfn-6-secrets.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/cfn-6-secrets.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/cfn_c9_output.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/cfn_c9_output.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/clean-cf.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/clean-cf.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/cloud9-3-ingest.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/cloud9-3-ingest.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/cloudformation-launch-stack.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/cloudformation-launch-stack.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/cw-dashboard-1-filter.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/cw-dashboard-1-filter.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/cw-dashboard-2-select-metrics.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/cw-dashboard-2-select-metrics.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/cw-dashboard-3-metric-properties.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/cw-dashboard-3-metric-properties.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/cw-dashboard-4-configure-log.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/cw-dashboard-4-configure-log.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/cw-dashboard-5-scale-kds.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/cw-dashboard-5-scale-kds.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/cw-dashboard-6-log-result.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/cw-dashboard-6-log-result.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/cw-dashboard-total-number-trips.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/cw-dashboard-total-number-trips.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/cw-dashboard-trips-backfilled.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/cw-dashboard-trips-backfilled.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/cw-dashboard-trips-by-borough.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/cw-dashboard-trips-by-borough.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/emr-copy-dns-name.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/emr-copy-dns-name.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/emr-flink-dashboard-job.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/emr-flink-dashboard-job.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/emr-flink-dashboard-overview.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/emr-flink-dashboard-overview.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/emr-resource-manager-application.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/emr-resource-manager-application.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/emr-resource-manager.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/emr-resource-manager.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/intellij-1-welcome.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/intellij-1-welcome.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/intellij-2-clone.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/intellij-2-clone.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/intellij-3-ingest.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/intellij-3-ingest.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/intellij-7-maven-package.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/intellij-7-maven-package.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/kda-batch-flink-dashboard.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/kda-batch-flink-dashboard.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/kda-cf.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/kda-cf.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/kda-config.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/kda-config.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/kda-config2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/kda-config2.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/kda-configure-review.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/kda-configure-review.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/kda-create-app.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/kda-create-app.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/kda-create-review.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/kda-create-review.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/kda-flink-streaming-dahsboard.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/kda-flink-streaming-dahsboard.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/kda-monitor-novpc.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/kda-monitor-novpc.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/kda-monitoring.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/kda-monitoring.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/kda-nographlog.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/kda-nographlog.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/kda-nographlogerror.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/kda-nographlogerror.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/kda-patch-job-graph.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/kda-patch-job-graph.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/kda-prop-grp1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/kda-prop-grp1.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/kda-property-group.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/kda-property-group.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/kda-running-beamapp.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/kda-running-beamapp.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/kda-update-config-batch.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/kda-update-config-batch.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/kda-updateborough.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/kda-updateborough.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/kds-create-stream-active.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/kds-create-stream-active.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/kds-create-stream.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/kds-create-stream.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/kfh-cf.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/kfh-cf.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/kfh-cflambda.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/kfh-cflambda.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/kfh-check.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/kfh-check.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/kfh-check2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/kfh-check2.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/kfh-configure.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/kfh-configure.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/kfh-create.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/kfh-create.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/kfh-lambda.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/kfh-lambda.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/kfh-process.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/kfh-process.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/kfh-review.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/kfh-review.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/kfh-s3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/kfh-s3.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/kfh-selectsources3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/kfh-selectsources3.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/kinesis-check.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/kinesis-check.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/kinesis-welcome-create-kda.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/kinesis-welcome-create-kda.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/kinesis-welcome-create-stream.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/kinesis-welcome-create-stream.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/overview-beamarchitecture.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/overview-beamarchitecture.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/overview-cw.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/overview-cw.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/profiler-group-1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/profiler-group-1.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/profiler-group-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/profiler-group-2.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/profiler-group-3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/profiler-group-3.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/profiler-group-4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/profiler-group-4.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/profiler-group-5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/profiler-group-5.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/remote-desktop-1-create.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/remote-desktop-1-create.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/remote-desktop-2-credentials.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/remote-desktop-2-credentials.png -------------------------------------------------------------------------------- /workshop/static/images/beam-on-kda/secrets-manager-retrieve-secret.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/beam-on-kda/secrets-manager-retrieve-secret.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda-studio/S3_final_view.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda-studio/S3_final_view.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda-studio/attach_policies.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda-studio/attach_policies.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda-studio/build.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda-studio/build.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda-studio/close_welcome_message.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda-studio/close_welcome_message.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda-studio/cloud_shell_button.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda-studio/cloud_shell_button.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda-studio/delete_data_stream.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda-studio/delete_data_stream.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda-studio/delete_s3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda-studio/delete_s3.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda-studio/delete_streaming_app.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda-studio/delete_streaming_app.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda-studio/delete_studio.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda-studio/delete_studio.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda-studio/deploy.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda-studio/deploy.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda-studio/deployed_run.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda-studio/deployed_run.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda-studio/event_engine_1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda-studio/event_engine_1.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda-studio/event_engine_2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda-studio/event_engine_2.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda-studio/event_engine_3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda-studio/event_engine_3.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda-studio/event_engine_4.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda-studio/event_engine_4.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda-studio/event_engine_5.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda-studio/event_engine_5.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda-studio/glue_catalog.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda-studio/glue_catalog.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda-studio/glue_database_select.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda-studio/glue_database_select.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda-studio/glue_name_database.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda-studio/glue_name_database.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda-studio/interactive_notebook.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda-studio/interactive_notebook.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda-studio/producer_notebook.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda-studio/producer_notebook.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda-studio/select_IAM.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda-studio/select_IAM.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda-studio/studio_set_up.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda-studio/studio_set_up.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda-studio/workshop_architecture.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda-studio/workshop_architecture.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda-studio/zeppelin_1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda-studio/zeppelin_1.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/advanced-monitoring-cw-1-uptime-and-downtime.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/advanced-monitoring-cw-1-uptime-and-downtime.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/advanced-monitoring-cw-2-checkpoint.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/advanced-monitoring-cw-2-checkpoint.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/advanced-monitoring-cw-3-cpumemory.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/advanced-monitoring-cw-3-cpumemory.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/advanced-monitoring-cw-4-app-progress.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/advanced-monitoring-cw-4-app-progress.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/advanced-monitoring-cw-5-source-and-sink.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/advanced-monitoring-cw-5-source-and-sink.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/advanced-monitoring-index-1-cw-dashboard.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/advanced-monitoring-index-1-cw-dashboard.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/advanced-monitoring-index-3-flink-dashboard.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/advanced-monitoring-index-3-flink-dashboard.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/advanced-monitoring-metrics-1-cfn-launch.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/advanced-monitoring-metrics-1-cfn-launch.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/advanced-monitoring-metrics-2-cfn-launch.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/advanced-monitoring-metrics-2-cfn-launch.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/advanced-monitoring-metrics-3-cfn-launch.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/advanced-monitoring-metrics-3-cfn-launch.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/advanced-monitoring-metrics-4-cfn-launch.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/advanced-monitoring-metrics-4-cfn-launch.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/advanced-monitoring-metrics-5-cfn-launch.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/advanced-monitoring-metrics-5-cfn-launch.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/advanced-scaling-cfn-1-create-stack.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/advanced-scaling-cfn-1-create-stack.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/advanced-scaling-cfn-2-stack-details.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/advanced-scaling-cfn-2-stack-details.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/advanced-scaling-cfn-3-capabilities.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/advanced-scaling-cfn-3-capabilities.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/advanced-scaling-cw-alarm.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/advanced-scaling-cw-alarm.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/advanced-scaling-cw-edit-scale-out.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/advanced-scaling-cw-edit-scale-out.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/advanced-scaling-cw-scale-out.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/advanced-scaling-cw-scale-out.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/aes-domain-details.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/aes-domain-details.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/apn-logo.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/apn-logo.jpg -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/aws-open-source.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/aws-open-source.jpg -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/cfn-1-create-stack.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/cfn-1-create-stack.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/cfn-2-stack-details.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/cfn-2-stack-details.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/cfn-4-confirm-capabilities.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/cfn-4-confirm-capabilities.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/cfn-5-completed.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/cfn-5-completed.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/cfn-6-parameters.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/cfn-6-parameters.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/click-on-window-box.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/click-on-window-box.gif -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/cloudformation-launch-stack.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/cloudformation-launch-stack.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/cw-alarm-in-alarm.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/cw-alarm-in-alarm.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/cw-dashboard-1-filter.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/cw-dashboard-1-filter.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/cw-dashboard-2-select-metrics.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/cw-dashboard-2-select-metrics.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/cw-dashboard-3-metric-properties.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/cw-dashboard-3-metric-properties.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/cw-dashboard-4-configure-log.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/cw-dashboard-4-configure-log.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/cw-dashboard-5-scale-kds.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/cw-dashboard-5-scale-kds.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/cw-dashboard-6-scale-kda.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/cw-dashboard-6-scale-kda.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/cw-in-alarm-2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/cw-in-alarm-2.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/cw-in-alarm.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/cw-in-alarm.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/enhanced-monitoring-1.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/enhanced-monitoring-1.jpg -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/flink-dashboard-backpressure.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/flink-dashboard-backpressure.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/flink-dashboard-checkpoint-history.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/flink-dashboard-checkpoint-history.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/flink-dashboard-checkpoints-tab.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/flink-dashboard-checkpoints-tab.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/flink-dashboard-exception-tab.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/flink-dashboard-exception-tab.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/flink-dashboard-exception.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/flink-dashboard-exception.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/flink-dashboard-operator-subtasks.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/flink-dashboard-operator-subtasks.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/flink-dashboard-task-managers.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/flink-dashboard-task-managers.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/flink-dashboard-watermarks.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/flink-dashboard-watermarks.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/flink-dashboard-wide-view.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/flink-dashboard-wide-view.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/flink-web-dashboard-task-list.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/flink-web-dashboard-task-list.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/flink-web-dashboard-tasks.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/flink-web-dashboard-tasks.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/flink-web-dashboard-top-level-tabs.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/flink-web-dashboard-top-level-tabs.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/high-level-architecture.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/high-level-architecture.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/iam-1-create-user.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/iam-1-create-user.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/iam-2-attach-policy.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/iam-2-attach-policy.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/iam-3-create-user.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/iam-3-create-user.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/iam-4-save-url.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/iam-4-save-url.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/iam-kda-1-create-role.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/iam-kda-1-create-role.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/iam-kda-2-select-service.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/iam-kda-2-select-service.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/iam-kda-3-select-permissions.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/iam-kda-3-select-permissions.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/iam-kda-4-review.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/iam-kda-4-review.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/intellij-1-welcome.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/intellij-1-welcome.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/intellij-10-configuration-details.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/intellij-10-configuration-details.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/intellij-11-replay-lag.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/intellij-11-replay-lag.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/intellij-12-pom-reimport.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/intellij-12-pom-reimport.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/intellij-13-replay-lag-copy.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/intellij-13-replay-lag-copy.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/intellij-2-clone.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/intellij-2-clone.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/intellij-3-ingest.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/intellij-3-ingest.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/intellij-4-execute-flink.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/intellij-4-execute-flink.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/intellij-5-execute-flink-output.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/intellij-5-execute-flink-output.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/intellij-6-debug.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/intellij-6-debug.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/intellij-7-maven-package.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/intellij-7-maven-package.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/intellij-8-maven-install.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/intellij-8-maven-install.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/intellij-9-edit-configuration.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/intellij-9-edit-configuration.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/kda-configure-scaling.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/kda-configure-scaling.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/kda-configure.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/kda-configure.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/kda-create-app-permissions.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/kda-create-app-permissions.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/kda-create-app.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/kda-create-app.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/kda-monitoring.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/kda-monitoring.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/kda-property-group.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/kda-property-group.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/kda-running-app.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/kda-running-app.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/kda-scale-app.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/kda-scale-app.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/kds-create-stream.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/kds-create-stream.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/kds-update-shards.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/kds-update-shards.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/kibana-1-create-index.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/kibana-1-create-index.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/kibana-2-import-objects.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/kibana-2-import-objects.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/kibana-3-upload-json.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/kibana-3-upload-json.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/kibana-4-default-index.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/kibana-4-default-index.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/kibana-5-visualizatio-partial.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/kibana-5-visualizatio-partial.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/kibana-visualization.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/kibana-visualization.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/kinesis-welcome-create-kda.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/kinesis-welcome-create-kda.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/kinesis-welcome-create-stream.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/kinesis-welcome-create-stream.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/open-flink-dashboard.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/open-flink-dashboard.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/parallel-data-flow-flink.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/parallel-data-flow-flink.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/parallelism-updated-cw-alarm-scale-out.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/parallelism-updated-cw-alarm-scale-out.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/remote-desktop-1-create.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/remote-desktop-1-create.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/remote-desktop-2-credentials.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/remote-desktop-2-credentials.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/secrets-manager-retrieve-secret.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/secrets-manager-retrieve-secret.png -------------------------------------------------------------------------------- /workshop/static/images/flink-on-kda/workshop-architecture.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/static/images/flink-on-kda/workshop-architecture.png -------------------------------------------------------------------------------- /workshop/themes/.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/aws-samples/streaming-analytics-workshop/df0aeffee3506e5ecbf41055d4696f9dada5faa7/workshop/themes/.DS_Store --------------------------------------------------------------------------------