├── .asf.yaml
├── .gitignore
├── LICENSE
├── NOTICE
├── README.md
├── publish.sh
├── site
├── .gitignore
├── Gemfile
├── _config.yml
├── _data
│ ├── contributors.yml
│ ├── navigation.yml
│ ├── project.yml
│ └── releases.yml
├── _includes
│ ├── JB
│ │ ├── analytics
│ │ ├── analytics-providers
│ │ │ ├── getclicky
│ │ │ ├── google-classic
│ │ │ ├── google-universal
│ │ │ ├── mixpanel
│ │ │ └── piwik
│ │ ├── categories_list
│ │ ├── comments
│ │ ├── comments-providers
│ │ │ ├── disqus
│ │ │ ├── facebook
│ │ │ ├── intensedebate
│ │ │ └── livefyre
│ │ ├── liquid_raw
│ │ ├── pages_list
│ │ ├── posts_collate
│ │ ├── setup
│ │ ├── sharing
│ │ └── tags_list
│ └── themes
│ │ ├── apache-clean
│ │ ├── _navigation.html
│ │ ├── default.html
│ │ ├── footer.html
│ │ ├── page.html
│ │ ├── post.html
│ │ └── settings.yml
│ │ └── apache
│ │ ├── _navigation.html
│ │ ├── default.html
│ │ ├── footer.html
│ │ ├── page.html
│ │ ├── post.html
│ │ └── settings.yml
├── _layouts
│ ├── default.html
│ ├── page.html
│ └── post.html
├── _sass
│ ├── _base.scss
│ ├── _layout.scss
│ └── _syntax-highlighting.scss
├── assets
│ ├── images
│ │ └── important.png
│ └── themes
│ │ ├── apache-clean
│ │ ├── bootstrap
│ │ │ ├── css
│ │ │ │ ├── bootstrap-theme.css
│ │ │ │ ├── bootstrap-theme.min.css
│ │ │ │ └── bootstrap.css
│ │ │ ├── fonts
│ │ │ │ ├── glyphicons-halflings-regular.eot
│ │ │ │ ├── glyphicons-halflings-regular.svg
│ │ │ │ ├── glyphicons-halflings-regular.ttf
│ │ │ │ └── glyphicons-halflings-regular.woff
│ │ │ ├── img
│ │ │ │ ├── glyphicons-halflings-white.png
│ │ │ │ └── glyphicons-halflings.png
│ │ │ └── js
│ │ │ │ ├── bootstrap.js
│ │ │ │ └── bootstrap.min.js
│ │ ├── css
│ │ │ ├── style.css
│ │ │ └── syntax.css
│ │ ├── img
│ │ │ ├── apache_logo.png
│ │ │ ├── apache_logo_800.png
│ │ │ ├── body-bg.jpg
│ │ │ ├── egg-logo.png
│ │ │ ├── favicon.png
│ │ │ ├── header-bg.jpg
│ │ │ ├── highlight-bg.jpg
│ │ │ ├── logo.png
│ │ │ ├── new-black.png
│ │ │ └── sidebar-bg.jpg
│ │ └── jquery
│ │ │ ├── jquery-2.1.1.js
│ │ │ └── jquery-2.1.1.min.js
│ │ └── apache
│ │ ├── bootstrap
│ │ ├── css
│ │ │ ├── bootstrap-theme.css
│ │ │ ├── bootstrap-theme.min.css
│ │ │ └── bootstrap.css
│ │ ├── fonts
│ │ │ ├── glyphicons-halflings-regular.eot
│ │ │ ├── glyphicons-halflings-regular.svg
│ │ │ ├── glyphicons-halflings-regular.ttf
│ │ │ └── glyphicons-halflings-regular.woff
│ │ ├── img
│ │ │ ├── glyphicons-halflings-white.png
│ │ │ └── glyphicons-halflings.png
│ │ └── js
│ │ │ ├── bootstrap.js
│ │ │ └── bootstrap.min.js
│ │ ├── css
│ │ ├── style.css
│ │ └── syntax.css
│ │ ├── img
│ │ ├── apache_logo.png
│ │ ├── apache_logo_800.png
│ │ ├── body-bg.jpg
│ │ ├── egg-logo.png
│ │ ├── favicon.png
│ │ ├── header-bg.jpg
│ │ ├── highlight-bg.jpg
│ │ ├── logo.png
│ │ ├── new-black.png
│ │ └── sidebar-bg.jpg
│ │ └── jquery
│ │ ├── jquery-2.1.1.js
│ │ └── jquery-2.1.1.min.js
├── community-members.md
├── community.md
├── contributing-extensions.md
├── contributing.md
├── css
│ └── main.scss
├── docs
│ ├── flink
│ │ ├── 1.0
│ │ │ ├── documentation.md
│ │ │ ├── flink-streaming-activemq.md
│ │ │ ├── flink-streaming-akka.md
│ │ │ ├── flink-streaming-flume.md
│ │ │ ├── flink-streaming-netty.md
│ │ │ └── flink-streaming-redis.md
│ │ ├── 1.1.0
│ │ │ ├── documentation.md
│ │ │ ├── flink-streaming-activemq.md
│ │ │ ├── flink-streaming-akka.md
│ │ │ ├── flink-streaming-flume.md
│ │ │ ├── flink-streaming-influxdb.md
│ │ │ ├── flink-streaming-influxdb2.md
│ │ │ ├── flink-streaming-kudu.md
│ │ │ ├── flink-streaming-netty.md
│ │ │ ├── flink-streaming-pinot.md
│ │ │ └── flink-streaming-redis.md
│ │ ├── current
│ │ │ ├── documentation.md
│ │ │ ├── flink-streaming-activemq.md
│ │ │ ├── flink-streaming-akka.md
│ │ │ ├── flink-streaming-flume.md
│ │ │ ├── flink-streaming-influxdb.md
│ │ │ ├── flink-streaming-influxdb2.md
│ │ │ ├── flink-streaming-kudu.md
│ │ │ ├── flink-streaming-netty.md
│ │ │ ├── flink-streaming-pinot.md
│ │ │ └── flink-streaming-redis.md
│ │ ├── overview.md
│ │ └── templates
│ │ │ ├── flink-streaming-activemq.template
│ │ │ ├── flink-streaming-akka.template
│ │ │ ├── flink-streaming-flume.template
│ │ │ ├── flink-streaming-influxdb.template
│ │ │ ├── flink-streaming-influxdb2.template
│ │ │ ├── flink-streaming-kudu.template
│ │ │ ├── flink-streaming-netty.template
│ │ │ ├── flink-streaming-pinot.template
│ │ │ └── flink-streaming-redis.template
│ └── spark
│ │ ├── 2.0.0
│ │ ├── documentation.md
│ │ ├── spark-sql-streaming-mqtt.md
│ │ ├── spark-streaming-akka.md
│ │ ├── spark-streaming-mqtt.md
│ │ ├── spark-streaming-twitter.md
│ │ └── spark-streaming-zeromq.md
│ │ ├── 2.0.1
│ │ ├── documentation.md
│ │ ├── spark-sql-streaming-mqtt.md
│ │ ├── spark-streaming-akka.md
│ │ ├── spark-streaming-mqtt.md
│ │ ├── spark-streaming-twitter.md
│ │ └── spark-streaming-zeromq.md
│ │ ├── 2.0.2
│ │ ├── documentation.md
│ │ ├── spark-sql-streaming-mqtt.md
│ │ ├── spark-streaming-akka.md
│ │ ├── spark-streaming-mqtt.md
│ │ ├── spark-streaming-twitter.md
│ │ └── spark-streaming-zeromq.md
│ │ ├── 2.1.0
│ │ ├── documentation.md
│ │ ├── spark-sql-streaming-mqtt.md
│ │ ├── spark-streaming-akka.md
│ │ ├── spark-streaming-mqtt.md
│ │ ├── spark-streaming-twitter.md
│ │ └── spark-streaming-zeromq.md
│ │ ├── 2.1.1
│ │ ├── documentation.md
│ │ ├── spark-sql-cloudant.md
│ │ ├── spark-sql-streaming-akka.md
│ │ ├── spark-sql-streaming-mqtt.md
│ │ ├── spark-streaming-akka.md
│ │ ├── spark-streaming-mqtt.md
│ │ ├── spark-streaming-pubsub.md
│ │ ├── spark-streaming-twitter.md
│ │ └── spark-streaming-zeromq.md
│ │ ├── 2.1.2
│ │ ├── documentation.md
│ │ ├── spark-sql-cloudant.md
│ │ ├── spark-sql-streaming-akka.md
│ │ ├── spark-sql-streaming-mqtt.md
│ │ ├── spark-streaming-akka.md
│ │ ├── spark-streaming-mqtt.md
│ │ ├── spark-streaming-pubsub.md
│ │ ├── spark-streaming-twitter.md
│ │ └── spark-streaming-zeromq.md
│ │ ├── 2.1.3
│ │ ├── documentation.md
│ │ ├── spark-sql-cloudant.md
│ │ ├── spark-sql-streaming-akka.md
│ │ ├── spark-sql-streaming-mqtt.md
│ │ ├── spark-streaming-akka.md
│ │ ├── spark-streaming-mqtt.md
│ │ ├── spark-streaming-pubsub.md
│ │ ├── spark-streaming-twitter.md
│ │ └── spark-streaming-zeromq.md
│ │ ├── 2.2.0
│ │ ├── documentation.md
│ │ ├── spark-sql-cloudant.md
│ │ ├── spark-sql-streaming-akka.md
│ │ ├── spark-sql-streaming-mqtt.md
│ │ ├── spark-streaming-akka.md
│ │ ├── spark-streaming-mqtt.md
│ │ ├── spark-streaming-pubsub.md
│ │ ├── spark-streaming-twitter.md
│ │ └── spark-streaming-zeromq.md
│ │ ├── 2.2.1
│ │ ├── documentation.md
│ │ ├── spark-sql-cloudant.md
│ │ ├── spark-sql-streaming-akka.md
│ │ ├── spark-sql-streaming-mqtt.md
│ │ ├── spark-streaming-akka.md
│ │ ├── spark-streaming-mqtt.md
│ │ ├── spark-streaming-pubsub.md
│ │ ├── spark-streaming-twitter.md
│ │ └── spark-streaming-zeromq.md
│ │ ├── 2.2.2
│ │ ├── documentation.md
│ │ ├── spark-sql-cloudant.md
│ │ ├── spark-sql-streaming-akka.md
│ │ ├── spark-sql-streaming-mqtt.md
│ │ ├── spark-streaming-akka.md
│ │ ├── spark-streaming-mqtt.md
│ │ ├── spark-streaming-pubsub.md
│ │ ├── spark-streaming-twitter.md
│ │ └── spark-streaming-zeromq.md
│ │ ├── 2.2.3
│ │ ├── documentation.md
│ │ ├── spark-sql-cloudant.md
│ │ ├── spark-sql-streaming-akka.md
│ │ ├── spark-sql-streaming-mqtt.md
│ │ ├── spark-streaming-akka.md
│ │ ├── spark-streaming-mqtt.md
│ │ ├── spark-streaming-pubsub.md
│ │ ├── spark-streaming-twitter.md
│ │ └── spark-streaming-zeromq.md
│ │ ├── 2.3.0
│ │ ├── documentation.md
│ │ ├── spark-sql-cloudant.md
│ │ ├── spark-sql-streaming-akka.md
│ │ ├── spark-sql-streaming-mqtt.md
│ │ ├── spark-streaming-akka.md
│ │ ├── spark-streaming-mqtt.md
│ │ ├── spark-streaming-pubnub.md
│ │ ├── spark-streaming-pubsub.md
│ │ ├── spark-streaming-twitter.md
│ │ └── spark-streaming-zeromq.md
│ │ ├── 2.3.1
│ │ ├── documentation.md
│ │ ├── spark-sql-cloudant.md
│ │ ├── spark-sql-streaming-akka.md
│ │ ├── spark-sql-streaming-mqtt.md
│ │ ├── spark-streaming-akka.md
│ │ ├── spark-streaming-mqtt.md
│ │ ├── spark-streaming-pubnub.md
│ │ ├── spark-streaming-pubsub.md
│ │ ├── spark-streaming-twitter.md
│ │ └── spark-streaming-zeromq.md
│ │ ├── 2.3.2
│ │ ├── documentation.md
│ │ ├── spark-sql-cloudant.md
│ │ ├── spark-sql-streaming-akka.md
│ │ ├── spark-sql-streaming-mqtt.md
│ │ ├── spark-streaming-akka.md
│ │ ├── spark-streaming-mqtt.md
│ │ ├── spark-streaming-pubnub.md
│ │ ├── spark-streaming-pubsub.md
│ │ ├── spark-streaming-twitter.md
│ │ └── spark-streaming-zeromq.md
│ │ ├── 2.3.3
│ │ ├── documentation.md
│ │ ├── spark-sql-cloudant.md
│ │ ├── spark-sql-streaming-akka.md
│ │ ├── spark-sql-streaming-mqtt.md
│ │ ├── spark-streaming-akka.md
│ │ ├── spark-streaming-mqtt.md
│ │ ├── spark-streaming-pubnub.md
│ │ ├── spark-streaming-pubsub.md
│ │ ├── spark-streaming-twitter.md
│ │ └── spark-streaming-zeromq.md
│ │ ├── 2.3.4
│ │ ├── documentation.md
│ │ ├── spark-sql-cloudant.md
│ │ ├── spark-sql-streaming-akka.md
│ │ ├── spark-sql-streaming-mqtt.md
│ │ ├── spark-streaming-akka.md
│ │ ├── spark-streaming-mqtt.md
│ │ ├── spark-streaming-pubnub.md
│ │ ├── spark-streaming-pubsub.md
│ │ ├── spark-streaming-twitter.md
│ │ └── spark-streaming-zeromq.md
│ │ ├── 2.4.0
│ │ ├── documentation.md
│ │ ├── spark-sql-cloudant.md
│ │ ├── spark-sql-streaming-akka.md
│ │ ├── spark-sql-streaming-mqtt.md
│ │ ├── spark-streaming-akka.md
│ │ ├── spark-streaming-mqtt.md
│ │ ├── spark-streaming-pubnub.md
│ │ ├── spark-streaming-pubsub.md
│ │ ├── spark-streaming-twitter.md
│ │ └── spark-streaming-zeromq.md
│ │ ├── current
│ │ ├── documentation.md
│ │ ├── spark-sql-cloudant.md
│ │ ├── spark-sql-streaming-akka.md
│ │ ├── spark-sql-streaming-mqtt.md
│ │ ├── spark-streaming-akka.md
│ │ ├── spark-streaming-mqtt.md
│ │ ├── spark-streaming-pubnub.md
│ │ ├── spark-streaming-pubsub.md
│ │ ├── spark-streaming-twitter.md
│ │ └── spark-streaming-zeromq.md
│ │ ├── overview.md
│ │ └── templates
│ │ ├── spark-sql-cloudant.template
│ │ ├── spark-sql-streaming-akka.template
│ │ ├── spark-sql-streaming-mqtt.template
│ │ ├── spark-streaming-akka.template
│ │ ├── spark-streaming-mqtt.template
│ │ ├── spark-streaming-pubnub.template
│ │ ├── spark-streaming-pubsub.template
│ │ ├── spark-streaming-twitter.template
│ │ └── spark-streaming-zeromq.template
├── downloads
│ ├── flink.md
│ └── spark.md
├── feed.xml
├── history.md
├── index.md
├── privacy-policy.md
└── releases
│ └── spark
│ ├── 2.0.0
│ └── release-notes.md
│ ├── 2.0.1
│ └── release-notes.md
│ ├── 2.0.2
│ └── release-notes.md
│ ├── 2.1.0
│ └── release-notes.md
│ ├── 2.3.3
│ └── release-notes.md
│ └── 2.3.4
│ └── release-notes.md
└── update-doc.sh
/.asf.yaml:
--------------------------------------------------------------------------------
1 | # Licensed to the Apache Software Foundation (ASF) under one
2 | # or more contributor license agreements. See the NOTICE file
3 | # distributed with this work for additional information
4 | # regarding copyright ownership. The ASF licenses this file
5 | # to you under the Apache License, Version 2.0 (the
6 | # "License"); you may not use this file except in compliance
7 | # with the License. You may obtain a copy of the License at
8 | #
9 | # http://www.apache.org/licenses/LICENSE-2.0
10 | #
11 | # Unless required by applicable law or agreed to in writing,
12 | # software distributed under the License is distributed on an
13 | # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14 | # KIND, either express or implied. See the License for the
15 | # specific language governing permissions and limitations
16 | # under the License.
17 |
18 | publish:
19 | whoami: asf-site
20 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Licensed to the Apache Software Foundation (ASF) under one or more
2 | # contributor license agreements. See the NOTICE file distributed with
3 | # this work for additional information regarding copyright ownership.
4 | # The ASF licenses this file to you under the Apache License, Version 2.0
5 | # (the "License"); you may not use this file except in compliance with
6 | # the License. You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | #
16 |
17 | # Mac
18 | .DS_Store
19 |
20 | # Eclipse
21 | .classpath
22 | .project
23 | .settings/
24 | target/
25 |
26 | # Intellij
27 | .idea/
28 | .idea_modules/
29 | *.iml
30 | *.iws
31 | *.class
32 | *.log
33 |
34 | # ignore published content (after running ./publish.sh)
35 | site/_site/
36 | site/.bundle
37 | content/
38 |
39 | # Others
40 | *~
41 | .sass-cache
42 | .jekyll-metadata
43 | .checkstyle
44 | .fbExcludeFilterFile
45 |
--------------------------------------------------------------------------------
/NOTICE:
--------------------------------------------------------------------------------
1 | Apache Website Template
2 | Copyright [2016-2018] The Apache Software Foundation
3 |
4 | This product includes software developed at
5 | The Apache Software Foundation (http://www.apache.org/).
6 |
--------------------------------------------------------------------------------
/publish.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | #
3 | # Licensed to the Apache Software Foundation (ASF) under one or more
4 | # contributor license agreements. See the NOTICE file distributed with
5 | # this work for additional information regarding copyright ownership.
6 | # The ASF licenses this file to you under the Apache License, Version 2.0
7 | # (the "License"); you may not use this file except in compliance with
8 | # the License. You may obtain a copy of the License at
9 | #
10 | # http://www.apache.org/licenses/LICENSE-2.0
11 | #
12 | # Unless required by applicable law or agreed to in writing, software
13 | # distributed under the License is distributed on an "AS IS" BASIS,
14 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 | # See the License for the specific language governing permissions and
16 | # limitations under the License.
17 | #
18 |
19 | set -e
20 | cd site
21 | #bundle exec jekyll clean
22 | rm -rf _site
23 | bundle exec jekyll build -d _site
24 | COMMIT_HASH=`git rev-parse HEAD`
25 | cd ..
26 | git checkout asf-site
27 | git branch --set-upstream-to=origin/asf-site asf-site
28 | git pull --rebase
29 | rm -rf content
30 | mkdir content
31 | mv site/_site/* content
32 | git add content
33 | echo "Publishing changes from master branch $COMMIT_HASH"
34 | git commit -a -m "Publishing from $COMMIT_HASH"
35 | echo "> > >"
36 | echo " "
37 | echo "You are now on the asf-site branch"
38 | echo "Run git push origin asf-site to update the live site."
39 | echo " "
40 | echo " "
41 | set +e
42 |
--------------------------------------------------------------------------------
/site/.gitignore:
--------------------------------------------------------------------------------
1 | # Licensed to the Apache Software Foundation (ASF) under one or more
2 | # contributor license agreements. See the NOTICE file distributed with
3 | # this work for additional information regarding copyright ownership.
4 | # The ASF licenses this file to you under the Apache License, Version 2.0
5 | # (the "License"); you may not use this file except in compliance with
6 | # the License. You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | #
16 |
17 | # Mac
18 | .DS_Store
19 |
20 | # Eclipse
21 | .classpath
22 | .project
23 | .settings/
24 | target/
25 |
26 | # Intellij
27 | .idea/
28 | .idea_modules/
29 | *.iml
30 | *.iws
31 | *.class
32 | *.log
33 |
34 | # generated site
35 | _site
36 |
37 | # Others
38 | *~
39 | .sass-cache
40 | .jekyll-metadata
41 | .checkstyle
42 | .fbExcludeFilterFile
43 |
44 | Gemfile.lock
45 |
46 |
--------------------------------------------------------------------------------
/site/Gemfile:
--------------------------------------------------------------------------------
1 | # Licensed to the Apache Software Foundation (ASF) under one or more
2 | # contributor license agreements. See the NOTICE file distributed with
3 | # this work for additional information regarding copyright ownership.
4 | # The ASF licenses this file to you under the Apache License, Version 2.0
5 | # (the "License"); you may not use this file except in compliance with
6 | # the License. You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | #
16 | source 'https://rubygems.org'
17 | ruby "2.7.0"
18 | gem 'jekyll', '= 3.9.0'
19 |
20 | gem 'github-pages'
21 | gem 'rouge'
22 | gem 'jekyll-oembed', :require => 'jekyll_oembed'
23 | # End Gemfile
24 |
--------------------------------------------------------------------------------
/site/_config.yml:
--------------------------------------------------------------------------------
1 | # Licensed to the Apache Software Foundation (ASF) under one or more
2 | # contributor license agreements. See the NOTICE file distributed with
3 | # this work for additional information regarding copyright ownership.
4 | # The ASF licenses this file to you under the Apache License, Version 2.0
5 | # (the "License"); you may not use this file except in compliance with
6 | # the License. You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | #
16 | title: Apache Bahir
17 | email: dev@bahir.apache.org
18 | description: > # this means to ignore newlines until "baseurl:"
19 | Apache Bahir provides extensions to distributed analytic platforms such as Apache Spark and Apache Flink.
20 | baseurl: "" # the subpath of your site, e.g. /blog
21 | url: "http://bahir.apache.org" # the base hostname & protocol for your site
22 | #twitter_username: ApacheBahir
23 | github_username: apache
24 |
25 | # Build settings
26 | markdown: kramdown
27 | kramdown:
28 | parse_block_html: true
29 |
30 | permalink: pretty
31 | excerpt_separator: ""
32 |
33 | repository: https://github.com/apache/bahir-website
34 | destination: _site
35 | exclude: [README.md,Gemfile*,docs/*/templates]
36 | keep_files: [".git", ".svn", "apidocs"]
37 |
38 | # if 'analytics_on' is true, analytics section will be rendered on the HTML pages
39 | JB:
40 | ASSET_PATH: "/assets/themes/apache-clean"
41 | analytics:
42 | provider: google_universal
43 | google_universal:
44 | domain: bahir.apache.org
45 | tracking_id: UA-79140859-1
46 |
47 | # These allow the documentation to be updated with newer releases
48 | SPARK_VERSION: 2.4.0-SNAPSHOT
49 | SCALA_BINARY_VERSION: "2.11"
50 | SCALA_VERSION: "2.11.11"
51 |
--------------------------------------------------------------------------------
/site/_data/releases.yml:
--------------------------------------------------------------------------------
1 | # Licensed to the Apache Software Foundation (ASF) under one or more
2 | # contributor license agreements. See the NOTICE file distributed with
3 | # this work for additional information regarding copyright ownership.
4 | # The ASF licenses this file to you under the Apache License, Version 2.0
5 | # (the "License"); you may not use this file except in compliance with
6 | # the License. You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | #
16 |
17 | - version: 2.3.4
18 | date: 09/23/2019
19 |
20 | - version: 2.3.3
21 | date: 06/08/2019
22 |
23 | - version: 2.2.3
24 | date: 06/08/2019
25 |
26 | - version: 2.3.2
27 | date: 12/04/2018
28 |
29 | - version: 2.3.1
30 | date: 12/04/2018
31 |
32 | - version: 2.3.0
33 | date: 12/04/2018
34 |
35 | - version: 2.2.2
36 | date: 11/13/2018
37 |
38 | - version: 2.2.1
39 | date: 06/25/2018
40 |
41 | - version: 2.2.0
42 | date: 08/22/2017
43 |
44 | - version: 2.1.3
45 | date: 11/13/2018
46 |
47 | - version: 2.1.2
48 | date: 06/07/2018
49 |
50 | - version: 2.1.1
51 | date: 07/17/2017
52 |
53 | - version: 2.1.0
54 | date: 02/22/2017
55 |
56 | - version: 2.0.2
57 | date: 01/27/2017
58 |
59 | - version: 2.0.1
60 | date: 11/27/2016
61 |
62 | - version: 2.0.0
63 | date: 08/10/2016
64 |
65 | - version: 2.0.0-preview
66 | date: 07/01/2016
67 |
--------------------------------------------------------------------------------
/site/_includes/JB/analytics:
--------------------------------------------------------------------------------
1 | {% if site.JB.analytics.provider and page.JB.analytics != false %}
2 |
3 | {% case site.JB.analytics.provider %}
4 | {% when "google_classic" %}
5 | {% include JB/analytics-providers/google-classic %}
6 | {% when "google_universal" %}
7 | {% include JB/analytics-providers/google-universal %}
8 | {% when "getclicky" %}
9 | {% include JB/analytics-providers/getclicky %}
10 | {% when "mixpanel" %}
11 | {% include JB/analytics-providers/mixpanel %}
12 | {% when "piwik" %}
13 | {% include JB/analytics-providers/piwik %}
14 | {% when "custom" %}
15 | {% include custom/analytics %}
16 | {% endcase %}
17 |
18 | {% endif %}
--------------------------------------------------------------------------------
/site/_includes/JB/analytics-providers/getclicky:
--------------------------------------------------------------------------------
1 |
12 |
13 |
--------------------------------------------------------------------------------
/site/_includes/JB/analytics-providers/google-classic:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/site/_includes/JB/analytics-providers/google-universal:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/site/_includes/JB/analytics-providers/mixpanel:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/site/_includes/JB/analytics-providers/piwik:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/site/_includes/JB/categories_list:
--------------------------------------------------------------------------------
1 | {% comment %}{% endcomment %}
19 |
20 | {% if site.JB.categories_list.provider == "custom" %}
21 | {% include custom/categories_list %}
22 | {% else %}
23 | {% if categories_list.first[0] == null %}
24 | {% for category in categories_list %}
25 |
26 | {{ category | join: "/" }} {{ site.categories[category].size }}
27 |
28 | {% endfor %}
29 | {% else %}
30 | {% for category in categories_list %}
31 |
32 | {{ category[0] | join: "/" }} {{ category[1].size }}
33 |
34 | {% endfor %}
35 | {% endif %}
36 | {% endif %}
37 | {% assign categories_list = nil %}
--------------------------------------------------------------------------------
/site/_includes/JB/comments:
--------------------------------------------------------------------------------
1 | {% if site.JB.comments.provider and page.comments != false %}
2 |
3 | {% case site.JB.comments.provider %}
4 | {% when "disqus" %}
5 | {% include JB/comments-providers/disqus %}
6 | {% when "livefyre" %}
7 | {% include JB/comments-providers/livefyre %}
8 | {% when "intensedebate" %}
9 | {% include JB/comments-providers/intensedebate %}
10 | {% when "facebook" %}
11 | {% include JB/comments-providers/facebook %}
12 | {% when "custom" %}
13 | {% include custom/comments %}
14 | {% endcase %}
15 |
16 | {% endif %}
--------------------------------------------------------------------------------
/site/_includes/JB/comments-providers/disqus:
--------------------------------------------------------------------------------
1 |
2 |
13 | Please enable JavaScript to view the comments powered by Disqus.
14 | blog comments powered by
15 |
--------------------------------------------------------------------------------
/site/_includes/JB/comments-providers/facebook:
--------------------------------------------------------------------------------
1 |
2 |
9 |
--------------------------------------------------------------------------------
/site/_includes/JB/comments-providers/intensedebate:
--------------------------------------------------------------------------------
1 |
6 |
7 |
--------------------------------------------------------------------------------
/site/_includes/JB/comments-providers/livefyre:
--------------------------------------------------------------------------------
1 |
2 |
--------------------------------------------------------------------------------
/site/_includes/JB/liquid_raw:
--------------------------------------------------------------------------------
1 | {% comment%}{% endcomment%}
26 |
27 | {% if site.JB.liquid_raw.provider == "custom" %}
28 | {% include custom/liquid_raw %}
29 | {% else %}
30 | {{text | replace:"|.", "{" | replace:".|", "}" | replace:">", ">" | replace:"<", "<" }}
31 | {% endif %}
32 | {% assign text = nil %}
--------------------------------------------------------------------------------
/site/_includes/JB/pages_list:
--------------------------------------------------------------------------------
1 | {% comment %}{% endcomment %}
22 |
23 | {% if site.JB.pages_list.provider == "custom" %}
24 | {% include custom/pages_list %}
25 | {% else %}
26 | {% for node in pages_list %}
27 | {% if node.title != null %}
28 | {% if group == null or group == node.group %}
29 | {% if page.url == node.url %}
30 | {{node.title}}
31 | {% else %}
32 | {{node.title}}
33 | {% endif %}
34 | {% endif %}
35 | {% endif %}
36 | {% endfor %}
37 | {% endif %}
38 | {% assign pages_list = nil %}
39 | {% assign group = nil %}
--------------------------------------------------------------------------------
/site/_includes/JB/posts_collate:
--------------------------------------------------------------------------------
1 | {% comment %}{% endcomment %}
19 |
20 | {% if site.JB.posts_collate.provider == "custom" %}
21 | {% include custom/posts_collate %}
22 | {% else %}
23 | {% for post in posts_collate %}
24 | {% capture this_year %}{{ post.date | date: "%Y" }}{% endcapture %}
25 | {% capture this_month %}{{ post.date | date: "%B" }}{% endcapture %}
26 | {% capture next_year %}{{ post.previous.date | date: "%Y" }}{% endcapture %}
27 | {% capture next_month %}{{ post.previous.date | date: "%B" }}{% endcapture %}
28 |
29 | {% if forloop.first %}
30 | {{this_year}}
31 | {{this_month}}
32 |
33 | {% endif %}
34 |
35 | {{ post.date | date: "%B %e, %Y" }} » {{ post.title }}
36 |
37 | {% if forloop.last %}
38 |
39 | {% else %}
40 | {% if this_year != next_year %}
41 |
42 | {{next_year}}
43 | {{next_month}}
44 |
45 | {% else %}
46 | {% if this_month != next_month %}
47 |
48 | {{next_month}}
49 |
50 | {% endif %}
51 | {% endif %}
52 | {% endif %}
53 | {% endfor %}
54 | {% endif %}
55 | {% assign posts_collate = nil %}
--------------------------------------------------------------------------------
/site/_includes/JB/setup:
--------------------------------------------------------------------------------
1 | {% capture jbcache %}
2 |
5 | {% if site.JB.setup.provider == "custom" %}
6 | {% include custom/setup %}
7 | {% else %}
8 | {% if site.safe and site.JB.BASE_PATH and site.JB.BASE_PATH != '' %}
9 | {% assign BASE_PATH = site.JB.BASE_PATH %}
10 | {% assign HOME_PATH = site.JB.BASE_PATH %}
11 | {% else %}
12 | {% assign BASE_PATH = nil %}
13 | {% assign HOME_PATH = "/" %}
14 | {% endif %}
15 |
16 | {% if site.JB.ASSET_PATH %}
17 | {% assign ASSET_PATH = site.JB.ASSET_PATH %}
18 | {% else %}
19 | {% capture ASSET_PATH %}{{ BASE_PATH }}/assets/themes/{{ layout.theme.name }}{% endcapture %}
20 | {% endif %}
21 | {% endif %}
22 | {% endcapture %}{% assign jbcache = nil %}
23 |
--------------------------------------------------------------------------------
/site/_includes/JB/sharing:
--------------------------------------------------------------------------------
1 | {% if site.safe and site.JB.sharing.provider and page.JB.sharing != false %}
2 |
3 | {% case site.JB.sharing.provider %}
4 | {% when "custom" %}
5 | {% include custom/sharing %}
6 | {% endcase %}
7 |
8 | {% endif %}
--------------------------------------------------------------------------------
/site/_includes/JB/tags_list:
--------------------------------------------------------------------------------
1 | {% comment %}{% endcomment %}
19 |
20 | {% if site.JB.tags_list.provider == "custom" %}
21 | {% include custom/tags_list %}
22 | {% else %}
23 | {% if tags_list.first[0] == null %}
24 | {% for tag in tags_list %}
25 | {{ tag }} {{ site.tags[tag].size }}
26 | {% endfor %}
27 | {% else %}
28 | {% for tag in tags_list %}
29 | {{ tag[0] }} {{ tag[1].size }}
30 | {% endfor %}
31 | {% endif %}
32 | {% endif %}
33 | {% assign tags_list = nil %}
34 |
--------------------------------------------------------------------------------
/site/_includes/themes/apache-clean/default.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | {{ page.title }}
6 | {% if page.description %} {% endif %}
7 |
8 |
9 |
10 |
11 |
12 |
13 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 | {% include themes/apache-clean/_navigation.html %}
38 |
39 |
40 |
41 | {{ content }}
42 |
43 |
44 |
45 |
46 | {% include themes/apache-clean/footer.html %}
47 |
48 |
49 |
50 | {% include JB/analytics %}
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
--------------------------------------------------------------------------------
/site/_includes/themes/apache-clean/footer.html:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/site/_includes/themes/apache-clean/page.html:
--------------------------------------------------------------------------------
1 |
5 |
6 |
7 |
8 | {{ content }}
9 |
10 |
11 |
--------------------------------------------------------------------------------
/site/_includes/themes/apache-clean/post.html:
--------------------------------------------------------------------------------
1 |
4 |
5 |
6 |
7 |
8 | {{ page.date | date_to_long_string }}
9 |
10 |
11 | {{ content }}
12 |
13 |
14 | {% unless page.categories == empty %}
15 |
16 |
17 | {% assign categories_list = page.categories %}
18 | {% include JB/categories_list %}
19 |
20 | {% endunless %}
21 |
22 | {% unless page.tags == empty %}
23 |
24 |
25 | {% assign tags_list = page.tags %}
26 | {% include JB/tags_list %}
27 |
28 | {% endunless %}
29 |
30 |
31 |
46 |
47 | {% include JB/comments %}
48 |
49 |
50 |
--------------------------------------------------------------------------------
/site/_includes/themes/apache-clean/settings.yml:
--------------------------------------------------------------------------------
1 | # Licensed to the Apache Software Foundation (ASF) under one or more
2 | # contributor license agreements. See the NOTICE file distributed with
3 | # this work for additional information regarding copyright ownership.
4 | # The ASF licenses this file to you under the Apache License, Version 2.0
5 | # (the "License"); you may not use this file except in compliance with
6 | # the License. You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | #
16 | theme :
17 | name : apache-clean
18 |
--------------------------------------------------------------------------------
/site/_includes/themes/apache/_navigation.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
15 |
16 |
17 |
18 | {% for entry in site.data.navigation.topnav %}
19 |
20 | {% if entry.subcategories %}
21 | {{ entry.title }}
22 |
30 | {% else %}
31 | {% if entry.url contains "http" %}
32 | {% assign target = "_blank" %}{% else %}
33 | {% assign target = "_self" %}{% endif %}
34 | {{ entry.title }}
35 | {% endif %}
36 |
37 | {% endfor %}
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 | {% if page.title == "Home" %}
47 |
61 | {% endif %}
62 |
--------------------------------------------------------------------------------
/site/_includes/themes/apache/default.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | {{ page.title }}
6 | {% if page.description %} {% endif %}
7 |
8 |
9 |
10 |
11 |
12 |
13 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 | {% include themes/apache/_navigation.html %}
38 |
39 |
40 |
41 | {{ content }}
42 |
43 |
44 |
45 |
46 | {% include themes/apache/footer.html %}
47 |
48 |
49 |
50 | {% include JB/analytics %}
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
--------------------------------------------------------------------------------
/site/_includes/themes/apache/footer.html:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/site/_includes/themes/apache/page.html:
--------------------------------------------------------------------------------
1 |
5 |
6 |
7 |
8 | {{ content }}
9 |
10 |
11 |
--------------------------------------------------------------------------------
/site/_includes/themes/apache/post.html:
--------------------------------------------------------------------------------
1 |
4 |
5 |
6 |
7 |
8 | {{ page.date | date_to_long_string }}
9 |
10 |
11 | {{ content }}
12 |
13 |
14 | {% unless page.categories == empty %}
15 |
16 |
17 | {% assign categories_list = page.categories %}
18 | {% include JB/categories_list %}
19 |
20 | {% endunless %}
21 |
22 | {% unless page.tags == empty %}
23 |
24 |
25 | {% assign tags_list = page.tags %}
26 | {% include JB/tags_list %}
27 |
28 | {% endunless %}
29 |
30 |
31 |
46 |
47 | {% include JB/comments %}
48 |
49 |
50 |
--------------------------------------------------------------------------------
/site/_includes/themes/apache/settings.yml:
--------------------------------------------------------------------------------
1 | # Licensed to the Apache Software Foundation (ASF) under one or more
2 | # contributor license agreements. See the NOTICE file distributed with
3 | # this work for additional information regarding copyright ownership.
4 | # The ASF licenses this file to you under the Apache License, Version 2.0
5 | # (the "License"); you may not use this file except in compliance with
6 | # the License. You may obtain a copy of the License at
7 | #
8 | # http://www.apache.org/licenses/LICENSE-2.0
9 | #
10 | # Unless required by applicable law or agreed to in writing, software
11 | # distributed under the License is distributed on an "AS IS" BASIS,
12 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | # See the License for the specific language governing permissions and
14 | # limitations under the License.
15 | #
16 | theme :
17 | name : apache
18 |
--------------------------------------------------------------------------------
/site/_layouts/default.html:
--------------------------------------------------------------------------------
1 | ---
2 | theme :
3 | name : apache-clean
4 | ---
5 | {% include JB/setup %}
6 | {% include themes/apache-clean/default.html %}
7 |
--------------------------------------------------------------------------------
/site/_layouts/page.html:
--------------------------------------------------------------------------------
1 | ---
2 | layout: default
3 | ---
4 | {% include JB/setup %}
5 | {% include themes/apache-clean/page.html %}
6 |
--------------------------------------------------------------------------------
/site/_layouts/post.html:
--------------------------------------------------------------------------------
1 | ---
2 | layout: default
3 | ---
4 | {% include JB/setup %}
5 | {% include themes/apache-clean/post.html %}
6 |
--------------------------------------------------------------------------------
/site/assets/images/important.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/apache/bahir-website/f84894d6480d8464cee63c1994a382b9efc245e4/site/assets/images/important.png
--------------------------------------------------------------------------------
/site/assets/themes/apache-clean/bootstrap/fonts/glyphicons-halflings-regular.eot:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/apache/bahir-website/f84894d6480d8464cee63c1994a382b9efc245e4/site/assets/themes/apache-clean/bootstrap/fonts/glyphicons-halflings-regular.eot
--------------------------------------------------------------------------------
/site/assets/themes/apache-clean/bootstrap/fonts/glyphicons-halflings-regular.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/apache/bahir-website/f84894d6480d8464cee63c1994a382b9efc245e4/site/assets/themes/apache-clean/bootstrap/fonts/glyphicons-halflings-regular.ttf
--------------------------------------------------------------------------------
/site/assets/themes/apache-clean/bootstrap/fonts/glyphicons-halflings-regular.woff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/apache/bahir-website/f84894d6480d8464cee63c1994a382b9efc245e4/site/assets/themes/apache-clean/bootstrap/fonts/glyphicons-halflings-regular.woff
--------------------------------------------------------------------------------
/site/assets/themes/apache-clean/bootstrap/img/glyphicons-halflings-white.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/apache/bahir-website/f84894d6480d8464cee63c1994a382b9efc245e4/site/assets/themes/apache-clean/bootstrap/img/glyphicons-halflings-white.png
--------------------------------------------------------------------------------
/site/assets/themes/apache-clean/bootstrap/img/glyphicons-halflings.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/apache/bahir-website/f84894d6480d8464cee63c1994a382b9efc245e4/site/assets/themes/apache-clean/bootstrap/img/glyphicons-halflings.png
--------------------------------------------------------------------------------
/site/assets/themes/apache-clean/img/apache_logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/apache/bahir-website/f84894d6480d8464cee63c1994a382b9efc245e4/site/assets/themes/apache-clean/img/apache_logo.png
--------------------------------------------------------------------------------
/site/assets/themes/apache-clean/img/apache_logo_800.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/apache/bahir-website/f84894d6480d8464cee63c1994a382b9efc245e4/site/assets/themes/apache-clean/img/apache_logo_800.png
--------------------------------------------------------------------------------
/site/assets/themes/apache-clean/img/body-bg.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/apache/bahir-website/f84894d6480d8464cee63c1994a382b9efc245e4/site/assets/themes/apache-clean/img/body-bg.jpg
--------------------------------------------------------------------------------
/site/assets/themes/apache-clean/img/egg-logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/apache/bahir-website/f84894d6480d8464cee63c1994a382b9efc245e4/site/assets/themes/apache-clean/img/egg-logo.png
--------------------------------------------------------------------------------
/site/assets/themes/apache-clean/img/favicon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/apache/bahir-website/f84894d6480d8464cee63c1994a382b9efc245e4/site/assets/themes/apache-clean/img/favicon.png
--------------------------------------------------------------------------------
/site/assets/themes/apache-clean/img/header-bg.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/apache/bahir-website/f84894d6480d8464cee63c1994a382b9efc245e4/site/assets/themes/apache-clean/img/header-bg.jpg
--------------------------------------------------------------------------------
/site/assets/themes/apache-clean/img/highlight-bg.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/apache/bahir-website/f84894d6480d8464cee63c1994a382b9efc245e4/site/assets/themes/apache-clean/img/highlight-bg.jpg
--------------------------------------------------------------------------------
/site/assets/themes/apache-clean/img/logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/apache/bahir-website/f84894d6480d8464cee63c1994a382b9efc245e4/site/assets/themes/apache-clean/img/logo.png
--------------------------------------------------------------------------------
/site/assets/themes/apache-clean/img/new-black.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/apache/bahir-website/f84894d6480d8464cee63c1994a382b9efc245e4/site/assets/themes/apache-clean/img/new-black.png
--------------------------------------------------------------------------------
/site/assets/themes/apache-clean/img/sidebar-bg.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/apache/bahir-website/f84894d6480d8464cee63c1994a382b9efc245e4/site/assets/themes/apache-clean/img/sidebar-bg.jpg
--------------------------------------------------------------------------------
/site/assets/themes/apache/bootstrap/fonts/glyphicons-halflings-regular.eot:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/apache/bahir-website/f84894d6480d8464cee63c1994a382b9efc245e4/site/assets/themes/apache/bootstrap/fonts/glyphicons-halflings-regular.eot
--------------------------------------------------------------------------------
/site/assets/themes/apache/bootstrap/fonts/glyphicons-halflings-regular.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/apache/bahir-website/f84894d6480d8464cee63c1994a382b9efc245e4/site/assets/themes/apache/bootstrap/fonts/glyphicons-halflings-regular.ttf
--------------------------------------------------------------------------------
/site/assets/themes/apache/bootstrap/fonts/glyphicons-halflings-regular.woff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/apache/bahir-website/f84894d6480d8464cee63c1994a382b9efc245e4/site/assets/themes/apache/bootstrap/fonts/glyphicons-halflings-regular.woff
--------------------------------------------------------------------------------
/site/assets/themes/apache/bootstrap/img/glyphicons-halflings-white.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/apache/bahir-website/f84894d6480d8464cee63c1994a382b9efc245e4/site/assets/themes/apache/bootstrap/img/glyphicons-halflings-white.png
--------------------------------------------------------------------------------
/site/assets/themes/apache/bootstrap/img/glyphicons-halflings.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/apache/bahir-website/f84894d6480d8464cee63c1994a382b9efc245e4/site/assets/themes/apache/bootstrap/img/glyphicons-halflings.png
--------------------------------------------------------------------------------
/site/assets/themes/apache/img/apache_logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/apache/bahir-website/f84894d6480d8464cee63c1994a382b9efc245e4/site/assets/themes/apache/img/apache_logo.png
--------------------------------------------------------------------------------
/site/assets/themes/apache/img/apache_logo_800.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/apache/bahir-website/f84894d6480d8464cee63c1994a382b9efc245e4/site/assets/themes/apache/img/apache_logo_800.png
--------------------------------------------------------------------------------
/site/assets/themes/apache/img/body-bg.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/apache/bahir-website/f84894d6480d8464cee63c1994a382b9efc245e4/site/assets/themes/apache/img/body-bg.jpg
--------------------------------------------------------------------------------
/site/assets/themes/apache/img/egg-logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/apache/bahir-website/f84894d6480d8464cee63c1994a382b9efc245e4/site/assets/themes/apache/img/egg-logo.png
--------------------------------------------------------------------------------
/site/assets/themes/apache/img/favicon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/apache/bahir-website/f84894d6480d8464cee63c1994a382b9efc245e4/site/assets/themes/apache/img/favicon.png
--------------------------------------------------------------------------------
/site/assets/themes/apache/img/header-bg.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/apache/bahir-website/f84894d6480d8464cee63c1994a382b9efc245e4/site/assets/themes/apache/img/header-bg.jpg
--------------------------------------------------------------------------------
/site/assets/themes/apache/img/highlight-bg.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/apache/bahir-website/f84894d6480d8464cee63c1994a382b9efc245e4/site/assets/themes/apache/img/highlight-bg.jpg
--------------------------------------------------------------------------------
/site/assets/themes/apache/img/logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/apache/bahir-website/f84894d6480d8464cee63c1994a382b9efc245e4/site/assets/themes/apache/img/logo.png
--------------------------------------------------------------------------------
/site/assets/themes/apache/img/new-black.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/apache/bahir-website/f84894d6480d8464cee63c1994a382b9efc245e4/site/assets/themes/apache/img/new-black.png
--------------------------------------------------------------------------------
/site/assets/themes/apache/img/sidebar-bg.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/apache/bahir-website/f84894d6480d8464cee63c1994a382b9efc245e4/site/assets/themes/apache/img/sidebar-bg.jpg
--------------------------------------------------------------------------------
/site/community-members.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Community Members
4 | description: Project Community Page
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 | # {{ site.data.project.short_name }} Team Members
29 |
30 | {% if site.data.contributors %}
31 |
32 |
33 |
34 |
35 | Full Name
36 | Apache ID
37 | GitHub
38 | Role
39 | Affiliation
40 |
41 |
42 |
43 | {% for member in site.data.contributors %}
44 |
45 |
46 | {{member.name}}
47 | {{member.apacheId}}
48 | {{ member.githubId }}
49 | {{member.role}}
50 | {{member.org}}
51 |
52 | {% endfor %}
53 |
54 |
55 | {% endif %}
56 |
57 |
--------------------------------------------------------------------------------
/site/css/main.scss:
--------------------------------------------------------------------------------
1 | ---
2 | # Only the main Sass file needs front matter (the dashes are enough)
3 | ---
4 | @charset "utf-8";
5 |
6 |
7 |
8 | // Our variables
9 | $base-font-family: "Helvetica Neue", Helvetica, Arial, sans-serif;
10 | $base-font-size: 16px;
11 | $base-font-weight: 400;
12 | $small-font-size: $base-font-size * 0.875;
13 | $base-line-height: 1.5;
14 |
15 | $spacing-unit: 30px;
16 |
17 | $text-color: #111;
18 | $background-color: #fdfdfd;
19 | $brand-color: #2a7ae2;
20 |
21 | $grey-color: #828282;
22 | $grey-color-light: lighten($grey-color, 40%);
23 | $grey-color-dark: darken($grey-color, 25%);
24 |
25 | // Width of the content area
26 | $content-width: 800px;
27 |
28 | $on-palm: 600px;
29 | $on-laptop: 800px;
30 |
31 |
32 |
33 | // Use media queries like this:
34 | // @include media-query($on-palm) {
35 | // .wrapper {
36 | // padding-right: $spacing-unit / 2;
37 | // padding-left: $spacing-unit / 2;
38 | // }
39 | // }
40 | @mixin media-query($device) {
41 | @media screen and (max-width: $device) {
42 | @content;
43 | }
44 | }
45 |
46 |
47 |
48 | // Import partials from `sass_dir` (defaults to `_sass`)
49 | @import
50 | "base",
51 | "layout",
52 | "syntax-highlighting"
53 | ;
54 |
--------------------------------------------------------------------------------
/site/docs/flink/1.0/documentation.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Extensions for Apache Flink (1.0)
4 | description: Extensions for Apache Flink (1.0)
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 | ### Apache Bahir Extensions for Apache Flink
29 |
30 |
31 |
32 | #### Streaming Connectors
33 |
34 | [ActiveMQ connector](../flink-streaming-activemq)
35 |
36 | [Akka connector](../flink-streaming-akka)
37 |
38 | [Flume connector](../flink-streaming-flume)
39 |
40 | [Netty connector](../flink-streaming-netty)
41 |
42 | [Redis connector](../flink-streaming-redis)
43 |
--------------------------------------------------------------------------------
/site/docs/flink/1.0/flink-streaming-activemq.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Apache Flink Streaming Connector for ActiveMQ
4 | description: Apache Flink Streaming Connector for ActiveMQ
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 | # Flink ActiveMQ Connector
29 |
30 | This connector provides a source and sink to [Apache ActiveMQ](http://activemq.apache.org/)™
31 | To use this connector, add the following dependency to your project:
32 |
33 |
34 | org.apache.bahir
35 | flink-connector-activemq_2.11
36 | 1.0
37 |
38 |
39 | *Version Compatibility*: This module is compatible with ActiveMQ 5.14.0.
40 |
41 | Note that the streaming connectors are not part of the binary distribution of Flink. You need to link them into your job jar for cluster execution.
42 | See how to link with them for cluster execution [here](https://ci.apache.org/projects/flink/flink-docs-release-1.2/dev/linking.html).
43 |
44 | The source class is called `AMQSource`, and the sink is `AMQSink`.
45 |
--------------------------------------------------------------------------------
/site/docs/flink/1.0/flink-streaming-akka.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Apache Flink Streaming Connector for Akka
4 | description: Apache Flink Streaming Connector for Akka
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 | # Flink Akka Connector
29 |
30 | This connector provides a sink to [Akka](http://akka.io/) source actors in an ActorSystem.
31 | To use this connector, add the following dependency to your project:
32 |
33 |
34 | org.apache.bahir
35 | flink-connector-akka_2.11
36 | 1.0
37 |
38 |
39 | *Version Compatibility*: This module is compatible with Akka 2.0+.
40 |
41 | Note that the streaming connectors are not part of the binary distribution of Flink. You need to link them into your job jar for cluster execution.
42 | See how to link with them for cluster execution [here](https://ci.apache.org/projects/flink/flink-docs-release-1.2/dev/linking.html).
43 |
44 | ## Configuration
45 |
46 | The configurations for the Receiver Actor System in Flink Akka connector can be created using the standard typesafe `Config (com.typesafe.config.Config)` object.
47 |
48 | To enable acknowledgements, the custom configuration `akka.remote.auto-ack` can be used.
49 |
50 | The user can set any of the default configurations allowed by Akka as well as custom configurations allowed by the connector.
51 |
52 | A sample configuration can be defined as follows:
53 |
54 | String configFile = getClass().getClassLoader()
55 | .getResource("feeder_actor.conf").getFile();
56 | Config config = ConfigFactory.parseFile(new File(configFile));
57 |
58 | ## Message Types
59 |
60 | There are 3 different kind of message types which the receiver Actor in Flink Akka connector can receive.
61 |
62 | - message containing `Iterable` data
63 |
64 | - message containing generic `Object` data
65 |
66 | - message containing generic `Object` data and a `Timestamp` value passed as `Tuple2`.
67 |
--------------------------------------------------------------------------------
/site/docs/flink/1.0/flink-streaming-flume.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Apache Flink Streaming Connector for Apache Flume
4 | description: Apache Flink Streaming Connector for Apache Flume
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 | # Flink Flume Connector
29 |
30 | This connector provides a sink that can send data to [Apache Flume](https://flume.apache.org/)™. To use this connector, add the
31 | following dependency to your project:
32 |
33 |
34 | org.apache.bahir
35 | flink-connector-flume_2.11
36 | 1.0
37 |
38 |
39 | *Version Compatibility*: This module is compatible with Flume 1.5.0.
40 |
41 | Note that the streaming connectors are not part of the binary distribution of Flink. You need to link them into your job jar for cluster execution.
42 | See how to link with them for cluster execution [here](https://ci.apache.org/projects/flink/flink-docs-release-1.2/dev/linking.html).
43 |
44 | To create a `FlumeSink` instantiate the following constructor:
45 |
46 | FlumeSink(String host, int port, SerializationSchema schema)
47 |
--------------------------------------------------------------------------------
/site/docs/flink/1.1.0/documentation.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Extensions for Apache Flink (1.1.0)
4 | description: Extensions for Apache Flink (1.1.0)
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 | ### Apache Bahir Extensions for Apache Flink
29 |
30 |
31 |
32 | #### Streaming Connectors
33 |
34 | [ActiveMQ connector](../flink-streaming-activemq)
35 |
36 | [Akka connector](../flink-streaming-akka)
37 |
38 | [Flume connector](../flink-streaming-flume)
39 |
40 | [InfluxDB connector](../flink-streaming-influxdb)
41 |
42 | [InfluxDB2 connector](../flink-streaming-influxdb2) {:height="36px" width="36px"}
43 |
44 | [Kudu connector](../flink-streaming-kudu) {:height="36px" width="36px"}
45 |
46 | [Netty connector](../flink-streaming-netty)
47 |
48 | [Pinot connector](../flink-streaming-pinot) {:height="36px" width="36px"}
49 |
50 | [Redis connector](../flink-streaming-redis)
51 |
--------------------------------------------------------------------------------
/site/docs/flink/1.1.0/flink-streaming-activemq.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Apache Flink Streaming Connector for ActiveMQ
4 | description: Apache Flink Streaming Connector for ActiveMQ
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 | # Flink ActiveMQ Connector
29 |
30 | This connector provides a source and sink to [Apache ActiveMQ](http://activemq.apache.org/)™
31 | To use this connector, add the following dependency to your project:
32 |
33 |
34 | org.apache.bahir
35 | flink-connector-activemq_2.11
36 | 1.1.0
37 |
38 |
39 | *Version Compatibility*: This module is compatible with ActiveMQ 5.14.0.
40 |
41 | Note that the streaming connectors are not part of the binary distribution of Flink. You need to link them into your job jar for cluster execution.
42 | See how to link with them for cluster execution [here](https://ci.apache.org/projects/flink/flink-docs-release-1.2/dev/linking.html).
43 |
44 | The source class is called `AMQSource`, and the sink is `AMQSink`.
45 |
--------------------------------------------------------------------------------
/site/docs/flink/1.1.0/flink-streaming-akka.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Apache Flink Streaming Connector for Akka
4 | description: Apache Flink Streaming Connector for Akka
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 | # Flink Akka Connector
29 |
30 | This connector provides a sink to [Akka](http://akka.io/) source actors in an ActorSystem.
31 | To use this connector, add the following dependency to your project:
32 |
33 |
34 | org.apache.bahir
35 | flink-connector-akka_2.11
36 | 1.1.0
37 |
38 |
39 | *Version Compatibility*: This module is compatible with Akka 2.0+.
40 |
41 | Note that the streaming connectors are not part of the binary distribution of Flink. You need to link them into your job jar for cluster execution.
42 | See how to link with them for cluster execution [here](https://ci.apache.org/projects/flink/flink-docs-release-1.2/dev/linking.html).
43 |
44 | ## Configuration
45 |
46 | The configurations for the Receiver Actor System in Flink Akka connector can be created using the standard typesafe `Config (com.typesafe.config.Config)` object.
47 |
48 | To enable acknowledgements, the custom configuration `akka.remote.auto-ack` can be used.
49 |
50 | The user can set any of the default configurations allowed by Akka as well as custom configurations allowed by the connector.
51 |
52 | A sample configuration can be defined as follows:
53 |
54 | String configFile = getClass().getClassLoader()
55 | .getResource("feeder_actor.conf").getFile();
56 | Config config = ConfigFactory.parseFile(new File(configFile));
57 |
58 | ## Message Types
59 |
60 | There are 3 different kind of message types which the receiver Actor in Flink Akka connector can receive.
61 |
62 | - message containing `Iterable` data
63 |
64 | - message containing generic `Object` data
65 |
66 | - message containing generic `Object` data and a `Timestamp` value passed as `Tuple2`.
67 |
--------------------------------------------------------------------------------
/site/docs/flink/1.1.0/flink-streaming-flume.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Apache Flink Streaming Connector for Apache Flume
4 | description: Apache Flink Streaming Connector for Apache Flume
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 | # Flink Flume Connector
29 |
30 | This connector provides a sink that can send data to [Apache Flume](https://flume.apache.org/)™. To use this connector, add the
31 | following dependency to your project:
32 |
33 |
34 | org.apache.bahir
35 | flink-connector-flume_2.11
36 | 1.1.0
37 |
38 |
39 | *Version Compatibility*: This module is compatible with Flume 1.8.0.
40 |
41 | Note that the streaming connectors are not part of the binary distribution of Flink. You need to link them into your job jar for cluster execution.
42 | See how to link with them for cluster execution [here](https://ci.apache.org/projects/flink/flink-docs-release-1.2/dev/linking.html).
43 |
44 | To create a `FlumeSink` instantiate the following constructor:
45 |
46 | FlumeSink(String host, int port, SerializationSchema schema)
47 |
48 |
--------------------------------------------------------------------------------
/site/docs/flink/1.1.0/flink-streaming-influxdb.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Apache Flink Streaming Connector for InfluxDB
4 | description: Apache Flink Streaming Connector for InfluxDB
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 | # Flink InfluxDB Connector
29 |
30 | This connector provides a sink that can send data to [InfluxDB](https://www.influxdata.com/). To use this connector, add the
31 | following dependency to your project:
32 |
33 |
34 | org.apache.bahir
35 | flink-connector-influxdb_2.11
36 | 1.1.0
37 |
38 |
39 | *Version Compatibility*: This module is compatible with InfluxDB 1.3.x
40 | *Requirements*: Java 1.8+
41 |
42 | Note that the streaming connectors are not part of the binary distribution of Flink. You need to link them into your job jar for cluster execution.
43 | See how to link with them for cluster execution [here](https://ci.apache.org/projects/flink/flink-docs-release-1.3/dev/linking.html).
44 |
45 | ## Installing InfluxDB
46 | Follow the instructions from the [InfluxDB download page](https://portal.influxdata.com/downloads#influxdb).
47 |
48 | ## Examples
49 |
50 | ### JAVA API
51 |
52 | DataStream dataStream = ...
53 | InfluxDBConfig influxDBConfig = InfluxDBConfig.builder(String host, String username, String password, String dbName)
54 | dataStream.addSink(new InfluxDBSink(influxDBConfig));
55 |
56 |
57 | See end-to-end examples at [InfluxDB Examples](https://github.com/apache/bahir-flink/tree/master/flink-connector-influxdb/examples)
58 |
59 |
60 |
--------------------------------------------------------------------------------
/site/docs/flink/current/documentation.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Extensions for Apache Flink (1.2-SNAPSHOT)
4 | description: Extensions for Apache Flink (1.2-SNAPSHOT)
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 | ### Apache Bahir Extensions for Apache Flink
29 |
30 |
31 |
32 | #### Streaming Connectors
33 |
34 | [ActiveMQ connector](../flink-streaming-activemq)
35 |
36 | [Akka connector](../flink-streaming-akka)
37 |
38 | [Flume connector](../flink-streaming-flume)
39 |
40 | [InfluxDB connector](../flink-streaming-influxdb)
41 |
42 | [InfluxDB2 connector](../flink-streaming-influxdb2)
43 |
44 | [Kudu connector](../flink-streaming-kudu)
45 |
46 | [Netty connector](../flink-streaming-netty)
47 |
48 | [Pinot connector](../flink-streaming-pinot)
49 |
50 | [Redis connector](../flink-streaming-redis)
51 |
--------------------------------------------------------------------------------
/site/docs/flink/current/flink-streaming-activemq.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Apache Flink Streaming Connector for ActiveMQ
4 | description: Apache Flink Streaming Connector for ActiveMQ
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 | # Flink ActiveMQ Connector
29 |
30 | This connector provides a source and sink to [Apache ActiveMQ](http://activemq.apache.org/)™
31 | To use this connector, add the following dependency to your project:
32 |
33 |
34 | org.apache.bahir
35 | flink-connector-activemq_2.11
36 | 1.2-SNAPSHOT
37 |
38 |
39 | *Version Compatibility*: This module is compatible with ActiveMQ 5.14.0.
40 |
41 | Note that the streaming connectors are not part of the binary distribution of Flink. You need to link them into your job jar for cluster execution.
42 | See how to link with them for cluster execution [here](https://ci.apache.org/projects/flink/flink-docs-release-1.2/dev/linking.html).
43 |
44 | The source class is called `AMQSource`, and the sink is `AMQSink`.
45 |
--------------------------------------------------------------------------------
/site/docs/flink/current/flink-streaming-akka.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Apache Flink Streaming Connector for Akka
4 | description: Apache Flink Streaming Connector for Akka
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 | # Flink Akka Connector
29 |
30 | This connector provides a sink to [Akka](http://akka.io/) source actors in an ActorSystem.
31 | To use this connector, add the following dependency to your project:
32 |
33 |
34 | org.apache.bahir
35 | flink-connector-akka_2.11
36 | 1.2-SNAPSHOT
37 |
38 |
39 | *Version Compatibility*: This module is compatible with Akka 2.0+.
40 |
41 | Note that the streaming connectors are not part of the binary distribution of Flink. You need to link them into your job jar for cluster execution.
42 | See how to link with them for cluster execution [here](https://ci.apache.org/projects/flink/flink-docs-release-1.2/dev/linking.html).
43 |
44 | ## Configuration
45 |
46 | The configurations for the Receiver Actor System in Flink Akka connector can be created using the standard typesafe `Config (com.typesafe.config.Config)` object.
47 |
48 | To enable acknowledgements, the custom configuration `akka.remote.auto-ack` can be used.
49 |
50 | The user can set any of the default configurations allowed by Akka as well as custom configurations allowed by the connector.
51 |
52 | A sample configuration can be defined as follows:
53 |
54 | String configFile = getClass().getClassLoader()
55 | .getResource("feeder_actor.conf").getFile();
56 | Config config = ConfigFactory.parseFile(new File(configFile));
57 |
58 | ## Message Types
59 |
60 | There are 3 different kind of message types which the receiver Actor in Flink Akka connector can receive.
61 |
62 | - message containing `Iterable` data
63 |
64 | - message containing generic `Object` data
65 |
66 | - message containing generic `Object` data and a `Timestamp` value passed as `Tuple2`.
67 |
--------------------------------------------------------------------------------
/site/docs/flink/current/flink-streaming-flume.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Apache Flink Streaming Connector for Apache Flume
4 | description: Apache Flink Streaming Connector for Apache Flume
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 | # Flink Flume Connector
29 |
30 | This connector provides a sink that can send data to [Apache Flume](https://flume.apache.org/)™. To use this connector, add the
31 | following dependency to your project:
32 |
33 |
34 | org.apache.bahir
35 | flink-connector-flume_2.11
36 | 1.2-SNAPSHOT
37 |
38 |
39 | *Version Compatibility*: This module is compatible with Flume 1.8.0.
40 |
41 | Note that the streaming connectors are not part of the binary distribution of Flink. You need to link them into your job jar for cluster execution.
42 | See how to link with them for cluster execution [here](https://ci.apache.org/projects/flink/flink-docs-release-1.2/dev/linking.html).
43 |
44 | To create a `FlumeSink` instantiate the following constructor:
45 |
46 | FlumeSink(String host, int port, SerializationSchema schema)
47 |
48 |
--------------------------------------------------------------------------------
/site/docs/flink/current/flink-streaming-influxdb.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Apache Flink Streaming Connector for InfluxDB
4 | description: Apache Flink Streaming Connector for InfluxDB
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 | # Flink InfluxDB Connector
29 |
30 | This connector provides a sink that can send data to [InfluxDB](https://www.influxdata.com/). To use this connector, add the
31 | following dependency to your project:
32 |
33 |
34 | org.apache.bahir
35 | flink-connector-influxdb_2.11
36 | 1.2-SNAPSHOT
37 |
38 |
39 | *Version Compatibility*: This module is compatible with InfluxDB 1.3.x
40 | *Requirements*: Java 1.8+
41 |
42 | Note that the streaming connectors are not part of the binary distribution of Flink. You need to link them into your job jar for cluster execution.
43 | See how to link with them for cluster execution [here](https://ci.apache.org/projects/flink/flink-docs-release-1.3/dev/linking.html).
44 |
45 | ## Installing InfluxDB
46 | Follow the instructions from the [InfluxDB download page](https://portal.influxdata.com/downloads#influxdb).
47 |
48 | ## Examples
49 |
50 | ### JAVA API
51 |
52 | DataStream dataStream = ...
53 | InfluxDBConfig influxDBConfig = InfluxDBConfig.builder(String host, String username, String password, String dbName)
54 | dataStream.addSink(new InfluxDBSink(influxDBConfig));
55 |
56 |
57 | See end-to-end examples at [InfluxDB Examples](https://github.com/apache/bahir-flink/tree/master/flink-connector-influxdb/examples)
58 |
59 |
60 |
--------------------------------------------------------------------------------
/site/docs/flink/overview.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Extensions for Apache Flink
4 | description: Extensions for Apache Flink
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 | ### Apache Bahir Extensions for Apache Flink
29 |
30 | - [Current - 1.2-SNAPSHOT](/docs/flink/current/documentation)
31 | - [1.1.0](/docs/flink/1.1.0/documentation)
32 | - [1.0](/docs/flink/1.0/documentation)
33 |
--------------------------------------------------------------------------------
/site/docs/flink/templates/flink-streaming-activemq.template:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Apache Flink Streaming Connector for ActiveMQ
4 | description: Apache Flink Streaming Connector for ActiveMQ
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 |
--------------------------------------------------------------------------------
/site/docs/flink/templates/flink-streaming-akka.template:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Apache Flink Streaming Connector for Akka
4 | description: Apache Flink Streaming Connector for Akka
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 |
--------------------------------------------------------------------------------
/site/docs/flink/templates/flink-streaming-flume.template:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Apache Flink Streaming Connector for Apache Flume
4 | description: Apache Flink Streaming Connector for Apache Flume
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 |
--------------------------------------------------------------------------------
/site/docs/flink/templates/flink-streaming-influxdb.template:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Apache Flink Streaming Connector for InfluxDB
4 | description: Apache Flink Streaming Connector for InfluxDB
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 |
--------------------------------------------------------------------------------
/site/docs/flink/templates/flink-streaming-influxdb2.template:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Apache Flink Streaming Connector for InfluxDB2
4 | description: Apache Flink Streaming Connector for InfluxDB2
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 |
--------------------------------------------------------------------------------
/site/docs/flink/templates/flink-streaming-kudu.template:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Apache Flink Streaming Connector for Apache Kudu
4 | description: Apache Flink Streaming Connector for Apache Kudu
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 |
--------------------------------------------------------------------------------
/site/docs/flink/templates/flink-streaming-netty.template:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Apache Flink Streaming Connector for Netty
4 | description: Apache Flink Streaming Connector for Netty
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 |
--------------------------------------------------------------------------------
/site/docs/flink/templates/flink-streaming-pinot.template:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Apache Flink Streaming Connector for Pinot
4 | description: Apache Flink Streaming Connector for Pinot
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 |
--------------------------------------------------------------------------------
/site/docs/flink/templates/flink-streaming-redis.template:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Apache Flink Streaming Connector for Redis
4 | description: Apache Flink Streaming Connector for Redis
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 |
--------------------------------------------------------------------------------
/site/docs/spark/2.0.0/documentation.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Extensions for Apache Spark
4 | description: Extensions for Apache Spark
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 | ### Apache Bahir Extensions for Apache Spark 2.0.0
29 |
30 |
31 |
32 | #### Structured Streaming Data Sources
33 |
34 | [MQTT data source](../spark-sql-streaming-mqtt)
35 |
36 |
37 |
38 | #### Discretized Streams (DStreams) Connectors
39 |
40 | [Akka connector](../spark-streaming-akka)
41 |
42 | [MQTT connector](../spark-streaming-mqtt)
43 |
44 | [Twitter connector](../spark-streaming-twitter)
45 |
46 | [ZeroMQ connector](../spark-streaming-zeromq)
--------------------------------------------------------------------------------
/site/docs/spark/2.0.0/spark-streaming-twitter.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Spark Streaming Twitter
4 | description: Spark Streaming Twitter
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 | A library for reading social data from [twitter](http://twitter.com/) using Spark Streaming.
29 |
30 | ## Linking
31 |
32 | Using SBT:
33 |
34 | libraryDependencies += "org.apache.bahir" %% "spark-streaming-twitter" % "2.0.0"
35 |
36 | Using Maven:
37 |
38 |
39 | org.apache.bahir
40 | spark-streaming-twitter_2.11
41 | 2.0.0
42 |
43 |
44 | This library can also be added to Spark jobs launched through `spark-shell` or `spark-submit` by using the `--packages` command line option.
45 | For example, to include it when starting the spark shell:
46 |
47 | $ bin/spark-shell --packages org.apache.bahir:spark-streaming-twitter_2.11:2.0.0
48 |
49 | Unlike using `--jars`, using `--packages` ensures that this library and its dependencies will be added to the classpath.
50 | The `--packages` argument can also be used with `bin/spark-submit`.
51 |
52 | This library is cross-published for Scala 2.10 and Scala 2.11, so users should replace the proper Scala version (2.10 or 2.11) in the commands listed above.
53 |
54 |
55 | ## Examples
56 |
57 | `TwitterUtils` uses Twitter4j to get the public stream of tweets using [Twitter's Streaming API](https://dev.twitter.com/docs/streaming-apis). Authentication information
58 | can be provided by any of the [methods](http://twitter4j.org/en/configuration.html) supported by Twitter4J library. You can import the `TwitterUtils` class and create a DStream with `TwitterUtils.createStream` as shown below.
59 |
60 | ### Scala API
61 |
62 | import org.apache.spark.streaming.twitter._
63 |
64 | TwitterUtils.createStream(ssc, None)
65 |
66 | ### Java API
67 |
68 | import org.apache.spark.streaming.twitter.*;
69 |
70 | TwitterUtils.createStream(jssc);
71 |
72 |
73 | You can also either get the public stream, or get the filtered stream based on keywords.
74 | See end-to-end examples at [Twitter Examples](https://github.com/apache/bahir/tree/master/streaming-twitter/examples)
--------------------------------------------------------------------------------
/site/docs/spark/2.0.0/spark-streaming-zeromq.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Spark Streaming ZeroMQ
4 | description: Spark Streaming ZeroMQ
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 | A library for reading data from [ZeroMQ](http://zeromq.org/) using Spark Streaming.
29 |
30 | ## Linking
31 |
32 | Using SBT:
33 |
34 | libraryDependencies += "org.apache.bahir" %% "spark-streaming-zeromq" % "2.0.0"
35 |
36 | Using Maven:
37 |
38 |
39 | org.apache.bahir
40 | spark-streaming-zeromq_2.11
41 | 2.0.0
42 |
43 |
44 | This library can also be added to Spark jobs launched through `spark-shell` or `spark-submit` by using the `--packages` command line option.
45 | For example, to include it when starting the spark shell:
46 |
47 | $ bin/spark-shell --packages org.apache.bahir:spark-streaming-zeromq_2.11:2.0.0
48 |
49 | Unlike using `--jars`, using `--packages` ensures that this library and its dependencies will be added to the classpath.
50 | The `--packages` argument can also be used with `bin/spark-submit`.
51 |
52 | This library is cross-published for Scala 2.10 and Scala 2.11, so users should replace the proper Scala version (2.10 or 2.11) in the commands listed above.
53 |
54 | ## Examples
55 |
56 |
57 | ### Scala API
58 |
59 | val lines = ZeroMQUtils.createStream(ssc, ...)
60 |
61 | ### Java API
62 |
63 | JavaDStream lines = ZeroMQUtils.createStream(jssc, ...);
64 |
65 | See end-to-end examples at [ZeroMQ Examples](https://github.com/apache/bahir/tree/master/streaming-zeromq/examples)
--------------------------------------------------------------------------------
/site/docs/spark/2.0.1/documentation.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Extensions for Apache Spark
4 | description: Extensions for Apache Spark
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 | ### Apache Bahir Extensions for Apache Spark 2.0.1
29 |
30 |
31 |
32 | #### Structured Streaming Data Sources
33 |
34 | [MQTT data source](../spark-sql-streaming-mqtt)
35 |
36 |
37 |
38 | #### Discretized Streams (DStreams) Connectors
39 |
40 | [Akka connector](../spark-streaming-akka)
41 |
42 | [MQTT connector](../spark-streaming-mqtt)
43 |
44 | [Twitter connector](../spark-streaming-twitter)
45 |
46 | [ZeroMQ connector](../spark-streaming-zeromq)
--------------------------------------------------------------------------------
/site/docs/spark/2.0.1/spark-streaming-twitter.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Spark Streaming Twitter
4 | description: Spark Streaming Twitter
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 | A library for reading social data from [twitter](http://twitter.com/) using Spark Streaming.
29 |
30 | ## Linking
31 |
32 | Using SBT:
33 |
34 | libraryDependencies += "org.apache.bahir" %% "spark-streaming-twitter" % "2.0.1"
35 |
36 | Using Maven:
37 |
38 |
39 | org.apache.bahir
40 | spark-streaming-twitter_2.11
41 | 2.0.1
42 |
43 |
44 | This library can also be added to Spark jobs launched through `spark-shell` or `spark-submit` by using the `--packages` command line option.
45 | For example, to include it when starting the spark shell:
46 |
47 | $ bin/spark-shell --packages org.apache.bahir:spark-streaming-twitter_2.11:2.0.1
48 |
49 | Unlike using `--jars`, using `--packages` ensures that this library and its dependencies will be added to the classpath.
50 | The `--packages` argument can also be used with `bin/spark-submit`.
51 |
52 | This library is cross-published for Scala 2.10 and Scala 2.11, so users should replace the proper Scala version (2.10 or 2.11) in the commands listed above.
53 |
54 |
55 | ## Examples
56 |
57 | `TwitterUtils` uses Twitter4j to get the public stream of tweets using [Twitter's Streaming API](https://dev.twitter.com/docs/streaming-apis). Authentication information
58 | can be provided by any of the [methods](http://twitter4j.org/en/configuration.html) supported by Twitter4J library. You can import the `TwitterUtils` class and create a DStream with `TwitterUtils.createStream` as shown below.
59 |
60 | ### Scala API
61 |
62 | import org.apache.spark.streaming.twitter._
63 |
64 | TwitterUtils.createStream(ssc, None)
65 |
66 | ### Java API
67 |
68 | import org.apache.spark.streaming.twitter.*;
69 |
70 | TwitterUtils.createStream(jssc);
71 |
72 |
73 | You can also either get the public stream, or get the filtered stream based on keywords.
74 | See end-to-end examples at [Twitter Examples](https://github.com/apache/bahir/tree/master/streaming-twitter/examples)
--------------------------------------------------------------------------------
/site/docs/spark/2.0.1/spark-streaming-zeromq.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Spark Streaming ZeroMQ
4 | description: Spark Streaming ZeroMQ
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 | A library for reading data from [ZeroMQ](http://zeromq.org/) using Spark Streaming.
29 |
30 | ## Linking
31 |
32 | Using SBT:
33 |
34 | libraryDependencies += "org.apache.bahir" %% "spark-streaming-zeromq" % "2.0.1"
35 |
36 | Using Maven:
37 |
38 |
39 | org.apache.bahir
40 | spark-streaming-zeromq_2.11
41 | 2.0.1
42 |
43 |
44 | This library can also be added to Spark jobs launched through `spark-shell` or `spark-submit` by using the `--packages` command line option.
45 | For example, to include it when starting the spark shell:
46 |
47 | $ bin/spark-shell --packages org.apache.bahir:spark-streaming-zeromq_2.11:2.0.1
48 |
49 | Unlike using `--jars`, using `--packages` ensures that this library and its dependencies will be added to the classpath.
50 | The `--packages` argument can also be used with `bin/spark-submit`.
51 |
52 | This library is cross-published for Scala 2.10 and Scala 2.11, so users should replace the proper Scala version (2.10 or 2.11) in the commands listed above.
53 |
54 | ## Examples
55 |
56 |
57 | ### Scala API
58 |
59 | val lines = ZeroMQUtils.createStream(ssc, ...)
60 |
61 | ### Java API
62 |
63 | JavaDStream lines = ZeroMQUtils.createStream(jssc, ...);
64 |
65 | See end-to-end examples at [ZeroMQ Examples](https://github.com/apache/bahir/tree/master/streaming-zeromq/examples)
--------------------------------------------------------------------------------
/site/docs/spark/2.0.2/documentation.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Extensions for Apache Spark
4 | description: Extensions for Apache Spark
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 | ### Apache Bahir Extensions for Apache Spark 2.0.2
29 |
30 |
31 |
32 | #### Structured Streaming Data Sources
33 |
34 | [MQTT data source](../spark-sql-streaming-mqtt)
35 |
36 |
37 |
38 | #### Discretized Streams (DStreams) Connectors
39 |
40 | [Akka connector](../spark-streaming-akka)
41 |
42 | [MQTT connector](../spark-streaming-mqtt)
43 |
44 | [Twitter connector](../spark-streaming-twitter)
45 |
46 | [ZeroMQ connector](../spark-streaming-zeromq)
--------------------------------------------------------------------------------
/site/docs/spark/2.0.2/spark-streaming-zeromq.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Spark Streaming ZeroMQ
4 | description: Spark Streaming ZeroMQ
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 | A library for reading data from [ZeroMQ](http://zeromq.org/) using Spark Streaming.
29 |
30 | ## Linking
31 |
32 | Using SBT:
33 |
34 | libraryDependencies += "org.apache.bahir" %% "spark-streaming-zeromq" % "2.0.2"
35 |
36 | Using Maven:
37 |
38 |
39 | org.apache.bahir
40 | spark-streaming-zeromq_2.11
41 | 2.0.2
42 |
43 |
44 | This library can also be added to Spark jobs launched through `spark-shell` or `spark-submit` by using the `--packages` command line option.
45 | For example, to include it when starting the spark shell:
46 |
47 | $ bin/spark-shell --packages org.apache.bahir:spark-streaming-zeromq_2.11:2.0.2
48 |
49 | Unlike using `--jars`, using `--packages` ensures that this library and its dependencies will be added to the classpath.
50 | The `--packages` argument can also be used with `bin/spark-submit`.
51 |
52 | This library is cross-published for Scala 2.10 and Scala 2.11, so users should replace the proper Scala version (2.10 or 2.11) in the commands listed above.
53 |
54 | ## Examples
55 |
56 |
57 | ### Scala API
58 |
59 | val lines = ZeroMQUtils.createStream(ssc, ...)
60 |
61 | ### Java API
62 |
63 | JavaDStream lines = ZeroMQUtils.createStream(jssc, ...);
64 |
65 | See end-to-end examples at [ZeroMQ Examples](https://github.com/apache/bahir/tree/master/streaming-zeromq/examples)
--------------------------------------------------------------------------------
/site/docs/spark/2.1.0/documentation.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Extensions for Apache Spark
4 | description: Extensions for Apache Spark
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 | ### Apache Bahir Extensions for Apache Spark 2.1.0
29 |
30 |
31 |
32 | #### Structured Streaming Data Sources
33 |
34 | [MQTT data source](../spark-sql-streaming-mqtt)
35 |
36 |
37 |
38 | #### Discretized Streams (DStreams) Connectors
39 |
40 | [Akka connector](../spark-streaming-akka)
41 |
42 | [MQTT connector](../spark-streaming-mqtt)
43 |
44 | [Twitter connector](../spark-streaming-twitter)
45 |
46 | [ZeroMQ connector](../spark-streaming-zeromq)
--------------------------------------------------------------------------------
/site/docs/spark/2.1.0/spark-streaming-zeromq.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Spark Streaming ZeroMQ
4 | description: Spark Streaming ZeroMQ
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 | A library for reading data from [ZeroMQ](http://zeromq.org/) using Spark Streaming.
29 |
30 | ## Linking
31 |
32 | Using SBT:
33 |
34 | libraryDependencies += "org.apache.bahir" %% "spark-streaming-zeromq" % "2.1.0"
35 |
36 | Using Maven:
37 |
38 |
39 | org.apache.bahir
40 | spark-streaming-zeromq_2.11
41 | 2.1.0
42 |
43 |
44 | This library can also be added to Spark jobs launched through `spark-shell` or `spark-submit` by using the `--packages` command line option.
45 | For example, to include it when starting the spark shell:
46 |
47 | $ bin/spark-shell --packages org.apache.bahir:spark-streaming-zeromq_2.11:2.1.0
48 |
49 | Unlike using `--jars`, using `--packages` ensures that this library and its dependencies will be added to the classpath.
50 | The `--packages` argument can also be used with `bin/spark-submit`.
51 |
52 | This library is cross-published for Scala 2.10 and Scala 2.11, so users should replace the proper Scala version (2.10 or 2.11) in the commands listed above.
53 |
54 | ## Examples
55 |
56 |
57 | ### Scala API
58 |
59 | val lines = ZeroMQUtils.createStream(ssc, ...)
60 |
61 | ### Java API
62 |
63 | JavaDStream lines = ZeroMQUtils.createStream(jssc, ...);
64 |
65 | See end-to-end examples at [ZeroMQ Examples](https://github.com/apache/bahir/tree/master/streaming-zeromq/examples)
--------------------------------------------------------------------------------
/site/docs/spark/2.1.1/documentation.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Extensions for Apache Spark
4 | description: Extensions for Apache Spark
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 | ### Apache Bahir Extensions for Apache Spark 2.1.1
29 |
30 |
31 |
32 | #### SQL Data Sources
33 |
34 | [Apache CouchDB/Cloudant data source](../spark-sql-cloudant)
35 |
36 |
37 |
38 | #### Structured Streaming Data Sources
39 |
40 | [Akka data source](../spark-sql-streaming-akka)
41 |
42 | [MQTT data source](../spark-sql-streaming-mqtt)
43 |
44 |
45 |
46 | #### Discretized Streams (DStreams) Connectors
47 |
48 | [Akka connector](../spark-streaming-akka)
49 |
50 | [Google Cloud Pub/Sub connector](../spark-streaming-pubsub)
51 |
52 | [MQTT connector](../spark-streaming-mqtt)
53 |
54 | [Twitter connector](../spark-streaming-twitter)
55 |
56 | [ZeroMQ connector](../spark-streaming-zeromq)
57 |
--------------------------------------------------------------------------------
/site/docs/spark/2.1.1/spark-streaming-pubsub.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Spark Streaming Google Pub-Sub
4 | description: Spark Streaming Google Pub-Sub
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 | A library for reading data from [Google Cloud Pub/Sub](https://cloud.google.com/pubsub/) using Spark Streaming.
28 |
29 | ## Linking
30 |
31 | Using SBT:
32 |
33 | libraryDependencies += "org.apache.bahir" %% "spark-streaming-pubsub" % "2.1.1"
34 |
35 | Using Maven:
36 |
37 |
38 | org.apache.bahir
39 | spark-streaming-pubsub_2.11
40 | 2.1.1
41 |
42 |
43 | This library can also be added to Spark jobs launched through `spark-shell` or `spark-submit` by using the `--packages` command line option.
44 | For example, to include it when starting the spark shell:
45 |
46 | $ bin/spark-shell --packages org.apache.bahir:spark-streaming-pubsub_2.11:2.1.1
47 |
48 | Unlike using `--jars`, using `--packages` ensures that this library and its dependencies will be added to the classpath.
49 | The `--packages` argument can also be used with `bin/spark-submit`.
50 |
51 | ## Examples
52 |
53 | First you need to create credential by SparkGCPCredentials, it support four type of credentials
54 | * application default
55 | `SparkGCPCredentials.builder.build()`
56 | * json type service account
57 | `SparkGCPCredentials.builder.jsonServiceAccount(PATH_TO_JSON_KEY).build()`
58 | * p12 type service account
59 | `SparkGCPCredentials.builder.p12ServiceAccount(PATH_TO_P12_KEY, EMAIL_ACCOUNT).build()`
60 | * metadata service account(running on dataproc)
61 | `SparkGCPCredentials.builder.metadataServiceAccount().build()`
62 |
63 | ### Scala API
64 |
65 | val lines = PubsubUtils.createStream(ssc, projectId, subscriptionName, credential, ..)
66 |
67 | ### Java API
68 |
69 | JavaDStream lines = PubsubUtils.createStream(jssc, projectId, subscriptionName, credential...)
70 |
71 | See end-to-end examples at [Google Cloud Pubsub Examples](streaming-pubsub/examples)
72 |
--------------------------------------------------------------------------------
/site/docs/spark/2.1.1/spark-streaming-zeromq.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Spark Streaming ZeroMQ
4 | description: Spark Streaming ZeroMQ
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 | A library for reading data from [ZeroMQ](http://zeromq.org/) using Spark Streaming.
29 |
30 | ## Linking
31 |
32 | Using SBT:
33 |
34 | libraryDependencies += "org.apache.bahir" %% "spark-streaming-zeromq" % "2.1.1"
35 |
36 | Using Maven:
37 |
38 |
39 | org.apache.bahir
40 | spark-streaming-zeromq_2.11
41 | 2.1.1
42 |
43 |
44 | This library can also be added to Spark jobs launched through `spark-shell` or `spark-submit` by using the `--packages` command line option.
45 | For example, to include it when starting the spark shell:
46 |
47 | $ bin/spark-shell --packages org.apache.bahir:spark-streaming-zeromq_2.11:2.1.1
48 |
49 | Unlike using `--jars`, using `--packages` ensures that this library and its dependencies will be added to the classpath.
50 | The `--packages` argument can also be used with `bin/spark-submit`.
51 |
52 | This library is cross-published for Scala 2.10 and Scala 2.11, so users should replace the proper Scala version (2.10 or 2.11) in the commands listed above.
53 |
54 | ## Examples
55 |
56 |
57 | ### Scala API
58 |
59 | val lines = ZeroMQUtils.createStream(ssc, ...)
60 |
61 | ### Java API
62 |
63 | JavaDStream lines = ZeroMQUtils.createStream(jssc, ...);
64 |
65 | See end-to-end examples at [ZeroMQ Examples](https://github.com/apache/bahir/tree/master/streaming-zeromq/examples)
66 |
--------------------------------------------------------------------------------
/site/docs/spark/2.1.2/documentation.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Extensions for Apache Spark
4 | description: Extensions for Apache Spark
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 | ### Apache Bahir Extensions for Apache Spark
29 |
30 |
31 |
32 | #### SQL Data Sources
33 |
34 | [Apache CouchDB/Cloudant data source](../spark-sql-cloudant)
35 |
36 |
37 |
38 | #### Structured Streaming Data Sources
39 |
40 | [Akka data source](../spark-sql-streaming-akka)
41 |
42 | [MQTT data source](../spark-sql-streaming-mqtt)
43 |
44 |
45 |
46 | #### Discretized Streams (DStreams) Connectors
47 |
48 | [Apache CouchDB/Cloudant connector](../spark-sql-cloudant)
49 |
50 | [Akka connector](../spark-streaming-akka)
51 |
52 | [Google Cloud Pub/Sub connector](../spark-streaming-pubsub)
53 |
54 | [MQTT connector](../spark-streaming-mqtt)
55 |
56 | [Twitter connector](../spark-streaming-twitter)
57 |
58 | [ZeroMQ connector](../spark-streaming-zeromq)
59 |
--------------------------------------------------------------------------------
/site/docs/spark/2.1.2/spark-streaming-zeromq.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Spark Streaming ZeroMQ
4 | description: Spark Streaming ZeroMQ
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 | A library for reading data from [ZeroMQ](http://zeromq.org/) using Spark Streaming.
29 |
30 | ## Linking
31 |
32 | Using SBT:
33 |
34 | libraryDependencies += "org.apache.bahir" %% "spark-streaming-zeromq" % "2.1.2"
35 |
36 | Using Maven:
37 |
38 |
39 | org.apache.bahir
40 | spark-streaming-zeromq_2.11
41 | 2.1.2
42 |
43 |
44 | This library can also be added to Spark jobs launched through `spark-shell` or `spark-submit` by using the `--packages` command line option.
45 | For example, to include it when starting the spark shell:
46 |
47 | $ bin/spark-shell --packages org.apache.bahir:spark-streaming-zeromq_2.11:2.1.2
48 |
49 | Unlike using `--jars`, using `--packages` ensures that this library and its dependencies will be added to the classpath.
50 | The `--packages` argument can also be used with `bin/spark-submit`.
51 |
52 | This library is cross-published for Scala 2.10 and Scala 2.11, so users should replace the proper Scala version (2.10 or 2.11) in the commands listed above.
53 |
54 | ## Examples
55 |
56 |
57 | ### Scala API
58 |
59 | val lines = ZeroMQUtils.createStream(ssc, ...)
60 |
61 | ### Java API
62 |
63 | JavaDStream lines = ZeroMQUtils.createStream(jssc, ...);
64 |
65 | See end-to-end examples at [ZeroMQ Examples](https://github.com/apache/bahir/tree/master/streaming-zeromq/examples)
--------------------------------------------------------------------------------
/site/docs/spark/2.1.3/documentation.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Extensions for Apache Spark
4 | description: Extensions for Apache Spark
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 | ### Apache Bahir Extensions for Apache Spark
29 |
30 |
31 |
32 | #### SQL Data Sources
33 |
34 | [Apache CouchDB/Cloudant data source](../spark-sql-cloudant)
35 |
36 |
37 |
38 | #### Structured Streaming Data Sources
39 |
40 | [Akka data source](../spark-sql-streaming-akka)
41 |
42 | [MQTT data source](../spark-sql-streaming-mqtt)
43 |
44 |
45 |
46 | #### Discretized Streams (DStreams) Connectors
47 |
48 | [Apache CouchDB/Cloudant connector](../spark-sql-cloudant)
49 |
50 | [Akka connector](../spark-streaming-akka)
51 |
52 | [Google Cloud Pub/Sub connector](../spark-streaming-pubsub)
53 |
54 | [MQTT connector](../spark-streaming-mqtt)
55 |
56 | [Twitter connector](../spark-streaming-twitter)
57 |
58 | [ZeroMQ connector](../spark-streaming-zeromq)
59 |
--------------------------------------------------------------------------------
/site/docs/spark/2.1.3/spark-streaming-zeromq.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Spark Streaming ZeroMQ
4 | description: Spark Streaming ZeroMQ
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 | A library for reading data from [ZeroMQ](http://zeromq.org/) using Spark Streaming.
29 |
30 | ## Linking
31 |
32 | Using SBT:
33 |
34 | libraryDependencies += "org.apache.bahir" %% "spark-streaming-zeromq" % "2.1.3"
35 |
36 | Using Maven:
37 |
38 |
39 | org.apache.bahir
40 | spark-streaming-zeromq_2.11
41 | 2.1.3
42 |
43 |
44 | This library can also be added to Spark jobs launched through `spark-shell` or `spark-submit` by using the `--packages` command line option.
45 | For example, to include it when starting the spark shell:
46 |
47 | $ bin/spark-shell --packages org.apache.bahir:spark-streaming-zeromq_2.11:2.1.3
48 |
49 | Unlike using `--jars`, using `--packages` ensures that this library and its dependencies will be added to the classpath.
50 | The `--packages` argument can also be used with `bin/spark-submit`.
51 |
52 | This library is cross-published for Scala 2.10 and Scala 2.11, so users should replace the proper Scala version (2.10 or 2.11) in the commands listed above.
53 |
54 | ## Examples
55 |
56 |
57 | ### Scala API
58 |
59 | val lines = ZeroMQUtils.createStream(ssc, ...)
60 |
61 | ### Java API
62 |
63 | JavaDStream lines = ZeroMQUtils.createStream(jssc, ...);
64 |
65 | See end-to-end examples at [ZeroMQ Examples](https://github.com/apache/bahir/tree/master/streaming-zeromq/examples)
--------------------------------------------------------------------------------
/site/docs/spark/2.2.0/documentation.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Extensions for Apache Spark
4 | description: Extensions for Apache Spark
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 | ### Apache Bahir Extensions for Apache Spark 2.2.0
29 |
30 |
31 |
32 | #### SQL Data Sources
33 |
34 | [Apache CouchDB/Cloudant data source](../spark-sql-cloudant)
35 |
36 |
37 |
38 | #### Structured Streaming Data Sources
39 |
40 | [Akka data source](../spark-sql-streaming-akka)
41 |
42 | [MQTT data source](../spark-sql-streaming-mqtt)
43 |
44 |
45 |
46 | #### Discretized Streams (DStreams) Connectors
47 |
48 | [Apache CouchDB/Cloudant connector](../spark-sql-cloudant)
49 |
50 | [Akka connector](../spark-streaming-akka)
51 |
52 | [Google Cloud Pub/Sub connector](../spark-streaming-pubsub)
53 |
54 | [MQTT connector](../spark-streaming-mqtt)
55 |
56 | [Twitter connector](../spark-streaming-twitter)
57 |
58 | [ZeroMQ connector](../spark-streaming-zeromq)
59 |
--------------------------------------------------------------------------------
/site/docs/spark/2.2.0/spark-streaming-pubsub.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Spark Streaming Google Pub-Sub
4 | description: Spark Streaming Google Pub-Sub
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 | A library for reading data from [Google Cloud Pub/Sub](https://cloud.google.com/pubsub/) using Spark Streaming.
28 |
29 | ## Linking
30 |
31 | Using SBT:
32 |
33 | libraryDependencies += "org.apache.bahir" %% "spark-streaming-pubsub" % "2.2.0"
34 |
35 | Using Maven:
36 |
37 |
38 | org.apache.bahir
39 | spark-streaming-pubsub_2.11
40 | 2.2.0
41 |
42 |
43 | This library can also be added to Spark jobs launched through `spark-shell` or `spark-submit` by using the `--packages` command line option.
44 | For example, to include it when starting the spark shell:
45 |
46 | $ bin/spark-shell --packages org.apache.bahir:spark-streaming-pubsub_2.11:2.2.0
47 |
48 | Unlike using `--jars`, using `--packages` ensures that this library and its dependencies will be added to the classpath.
49 | The `--packages` argument can also be used with `bin/spark-submit`.
50 |
51 | ## Examples
52 |
53 | First you need to create credential by SparkGCPCredentials, it support four type of credentials
54 | * application default
55 | `SparkGCPCredentials.builder.build()`
56 | * json type service account
57 | `SparkGCPCredentials.builder.jsonServiceAccount(PATH_TO_JSON_KEY).build()`
58 | * p12 type service account
59 | `SparkGCPCredentials.builder.p12ServiceAccount(PATH_TO_P12_KEY, EMAIL_ACCOUNT).build()`
60 | * metadata service account(running on dataproc)
61 | `SparkGCPCredentials.builder.metadataServiceAccount().build()`
62 |
63 | ### Scala API
64 |
65 | val lines = PubsubUtils.createStream(ssc, projectId, subscriptionName, credential, ..)
66 |
67 | ### Java API
68 |
69 | JavaDStream lines = PubsubUtils.createStream(jssc, projectId, subscriptionName, credential...)
70 |
71 | See end-to-end examples at [Google Cloud Pubsub Examples](streaming-pubsub/examples)
72 |
--------------------------------------------------------------------------------
/site/docs/spark/2.2.0/spark-streaming-zeromq.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Spark Streaming ZeroMQ
4 | description: Spark Streaming ZeroMQ
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 | A library for reading data from [ZeroMQ](http://zeromq.org/) using Spark Streaming.
29 |
30 | ## Linking
31 |
32 | Using SBT:
33 |
34 | libraryDependencies += "org.apache.bahir" %% "spark-streaming-zeromq" % "2.2.0"
35 |
36 | Using Maven:
37 |
38 |
39 | org.apache.bahir
40 | spark-streaming-zeromq_2.11
41 | 2.2.0
42 |
43 |
44 | This library can also be added to Spark jobs launched through `spark-shell` or `spark-submit` by using the `--packages` command line option.
45 | For example, to include it when starting the spark shell:
46 |
47 | $ bin/spark-shell --packages org.apache.bahir:spark-streaming-zeromq_2.11:2.2.0
48 |
49 | Unlike using `--jars`, using `--packages` ensures that this library and its dependencies will be added to the classpath.
50 | The `--packages` argument can also be used with `bin/spark-submit`.
51 |
52 | This library is cross-published for Scala 2.10 and Scala 2.11, so users should replace the proper Scala version (2.10 or 2.11) in the commands listed above.
53 |
54 | ## Examples
55 |
56 |
57 | ### Scala API
58 |
59 | val lines = ZeroMQUtils.createStream(ssc, ...)
60 |
61 | ### Java API
62 |
63 | JavaDStream lines = ZeroMQUtils.createStream(jssc, ...);
64 |
65 | See end-to-end examples at [ZeroMQ Examples](https://github.com/apache/bahir/tree/master/streaming-zeromq/examples)
66 |
--------------------------------------------------------------------------------
/site/docs/spark/2.2.1/documentation.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Extensions for Apache Spark
4 | description: Extensions for Apache Spark
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 | ### Apache Bahir Extensions for Apache Spark
29 |
30 |
31 |
32 | #### SQL Data Sources
33 |
34 | [Apache CouchDB/Cloudant data source](../spark-sql-cloudant)
35 |
36 |
37 |
38 | #### Structured Streaming Data Sources
39 |
40 | [Akka data source](../spark-sql-streaming-akka)
41 |
42 | [MQTT data source](../spark-sql-streaming-mqtt)
43 |
44 |
45 |
46 | #### Discretized Streams (DStreams) Connectors
47 |
48 | [Apache CouchDB/Cloudant connector](../spark-sql-cloudant)
49 |
50 | [Akka connector](../spark-streaming-akka)
51 |
52 | [Google Cloud Pub/Sub connector](../spark-streaming-pubsub)
53 |
54 | [MQTT connector](../spark-streaming-mqtt)
55 |
56 | [Twitter connector](../spark-streaming-twitter)
57 |
58 | [ZeroMQ connector](../spark-streaming-zeromq)
59 |
--------------------------------------------------------------------------------
/site/docs/spark/2.2.1/spark-streaming-zeromq.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Spark Streaming ZeroMQ
4 | description: Spark Streaming ZeroMQ
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 | A library for reading data from [ZeroMQ](http://zeromq.org/) using Spark Streaming.
29 |
30 | ## Linking
31 |
32 | Using SBT:
33 |
34 | libraryDependencies += "org.apache.bahir" %% "spark-streaming-zeromq" % "2.2.1"
35 |
36 | Using Maven:
37 |
38 |
39 | org.apache.bahir
40 | spark-streaming-zeromq_2.11
41 | 2.2.1
42 |
43 |
44 | This library can also be added to Spark jobs launched through `spark-shell` or `spark-submit` by using the `--packages` command line option.
45 | For example, to include it when starting the spark shell:
46 |
47 | $ bin/spark-shell --packages org.apache.bahir:spark-streaming-zeromq_2.11:2.2.1
48 |
49 | Unlike using `--jars`, using `--packages` ensures that this library and its dependencies will be added to the classpath.
50 | The `--packages` argument can also be used with `bin/spark-submit`.
51 |
52 | This library is cross-published for Scala 2.10 and Scala 2.11, so users should replace the proper Scala version (2.10 or 2.11) in the commands listed above.
53 |
54 | ## Examples
55 |
56 |
57 | ### Scala API
58 |
59 | val lines = ZeroMQUtils.createStream(ssc, ...)
60 |
61 | ### Java API
62 |
63 | JavaDStream lines = ZeroMQUtils.createStream(jssc, ...);
64 |
65 | See end-to-end examples at [ZeroMQ Examples](https://github.com/apache/bahir/tree/master/streaming-zeromq/examples)
--------------------------------------------------------------------------------
/site/docs/spark/2.2.2/documentation.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Extensions for Apache Spark
4 | description: Extensions for Apache Spark
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 | ### Apache Bahir Extensions for Apache Spark
29 |
30 |
31 |
32 | #### SQL Data Sources
33 |
34 | [Apache CouchDB/Cloudant data source](../spark-sql-cloudant)
35 |
36 |
37 |
38 | #### Structured Streaming Data Sources
39 |
40 | [Akka data source](../spark-sql-streaming-akka)
41 |
42 | [MQTT data source](../spark-sql-streaming-mqtt)
43 |
44 |
45 |
46 | #### Discretized Streams (DStreams) Connectors
47 |
48 | [Apache CouchDB/Cloudant connector](../spark-sql-cloudant)
49 |
50 | [Akka connector](../spark-streaming-akka)
51 |
52 | [Google Cloud Pub/Sub connector](../spark-streaming-pubsub)
53 |
54 | [MQTT connector](../spark-streaming-mqtt)
55 |
56 | [Twitter connector](../spark-streaming-twitter)
57 |
58 | [ZeroMQ connector](../spark-streaming-zeromq)
59 |
--------------------------------------------------------------------------------
/site/docs/spark/2.2.2/spark-streaming-zeromq.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Spark Streaming ZeroMQ
4 | description: Spark Streaming ZeroMQ
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 | A library for reading data from [ZeroMQ](http://zeromq.org/) using Spark Streaming.
29 |
30 | ## Linking
31 |
32 | Using SBT:
33 |
34 | libraryDependencies += "org.apache.bahir" %% "spark-streaming-zeromq" % "2.2.2"
35 |
36 | Using Maven:
37 |
38 |
39 | org.apache.bahir
40 | spark-streaming-zeromq_2.11
41 | 2.2.2
42 |
43 |
44 | This library can also be added to Spark jobs launched through `spark-shell` or `spark-submit` by using the `--packages` command line option.
45 | For example, to include it when starting the spark shell:
46 |
47 | $ bin/spark-shell --packages org.apache.bahir:spark-streaming-zeromq_2.11:2.2.2
48 |
49 | Unlike using `--jars`, using `--packages` ensures that this library and its dependencies will be added to the classpath.
50 | The `--packages` argument can also be used with `bin/spark-submit`.
51 |
52 | This library is cross-published for Scala 2.10 and Scala 2.11, so users should replace the proper Scala version (2.10 or 2.11) in the commands listed above.
53 |
54 | ## Examples
55 |
56 |
57 | ### Scala API
58 |
59 | val lines = ZeroMQUtils.createStream(ssc, ...)
60 |
61 | ### Java API
62 |
63 | JavaDStream lines = ZeroMQUtils.createStream(jssc, ...);
64 |
65 | See end-to-end examples at [ZeroMQ Examples](https://github.com/apache/bahir/tree/master/streaming-zeromq/examples)
--------------------------------------------------------------------------------
/site/docs/spark/2.2.3/documentation.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Extensions for Apache Spark
4 | description: Extensions for Apache Spark
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 | ### Apache Bahir Extensions for Apache Spark
29 |
30 |
31 |
32 | #### SQL Data Sources
33 |
34 | [Apache CouchDB/Cloudant data source](../spark-sql-cloudant)
35 |
36 |
37 |
38 | #### Structured Streaming Data Sources
39 |
40 | [Akka data source](../spark-sql-streaming-akka)
41 |
42 | [MQTT data source](../spark-sql-streaming-mqtt)
43 |
44 |
45 |
46 | #### Discretized Streams (DStreams) Connectors
47 |
48 | [Apache CouchDB/Cloudant connector](../spark-sql-cloudant)
49 |
50 | [Akka connector](../spark-streaming-akka)
51 |
52 | [Google Cloud Pub/Sub connector](../spark-streaming-pubsub)
53 |
54 | [MQTT connector](../spark-streaming-mqtt)
55 |
56 | [Twitter connector](../spark-streaming-twitter)
57 |
58 | [ZeroMQ connector](../spark-streaming-zeromq)
59 |
--------------------------------------------------------------------------------
/site/docs/spark/2.2.3/spark-streaming-zeromq.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Spark Streaming ZeroMQ
4 | description: Spark Streaming ZeroMQ
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 | A library for reading data from [ZeroMQ](http://zeromq.org/) using Spark Streaming.
29 |
30 | ## Linking
31 |
32 | Using SBT:
33 |
34 | libraryDependencies += "org.apache.bahir" %% "spark-streaming-zeromq" % "2.2.3"
35 |
36 | Using Maven:
37 |
38 |
39 | org.apache.bahir
40 | spark-streaming-zeromq_2.11
41 | 2.2.3
42 |
43 |
44 | This library can also be added to Spark jobs launched through `spark-shell` or `spark-submit` by using the `--packages` command line option.
45 | For example, to include it when starting the spark shell:
46 |
47 | $ bin/spark-shell --packages org.apache.bahir:spark-streaming-zeromq_2.11:2.2.3
48 |
49 | Unlike using `--jars`, using `--packages` ensures that this library and its dependencies will be added to the classpath.
50 | The `--packages` argument can also be used with `bin/spark-submit`.
51 |
52 | This library is cross-published for Scala 2.10 and Scala 2.11, so users should replace the proper Scala version (2.10 or 2.11) in the commands listed above.
53 |
54 | ## Examples
55 |
56 |
57 | ### Scala API
58 |
59 | val lines = ZeroMQUtils.createStream(ssc, ...)
60 |
61 | ### Java API
62 |
63 | JavaDStream lines = ZeroMQUtils.createStream(jssc, ...);
64 |
65 | See end-to-end examples at [ZeroMQ Examples](https://github.com/apache/bahir/tree/master/streaming-zeromq/examples)
--------------------------------------------------------------------------------
/site/docs/spark/2.3.0/documentation.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Extensions for Apache Spark
4 | description: Extensions for Apache Spark
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 | ### Apache Bahir Extensions for Apache Spark
29 |
30 |
31 |
32 | #### SQL Data Sources
33 |
34 | [Apache CouchDB/Cloudant data source](../spark-sql-cloudant)
35 |
36 |
37 |
38 | #### Structured Streaming Data Sources
39 |
40 | [Akka data source](../spark-sql-streaming-akka)
41 |
42 | [MQTT data source](../spark-sql-streaming-mqtt) {:height="36px" width="36px"} (new Sink)
43 |
44 |
45 |
46 | #### Discretized Streams (DStreams) Connectors
47 |
48 | [Apache CouchDB/Cloudant connector](../spark-sql-cloudant)
49 |
50 | [Akka connector](../spark-streaming-akka)
51 |
52 | [Google Cloud Pub/Sub connector](../spark-streaming-pubsub)
53 |
54 | [Cloud PubNub connector](../spark-streaming-pubnub) {:height="36px" width="36px"}
55 |
56 | [MQTT connector](../spark-streaming-mqtt)
57 |
58 | [Twitter connector](../spark-streaming-twitter)
59 |
60 | [ZeroMQ connector](../spark-streaming-zeromq) {:height="36px" width="36px"} (Enhanced Implementation)
61 |
--------------------------------------------------------------------------------
/site/docs/spark/2.3.0/spark-streaming-zeromq.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Spark Streaming ZeroMQ
4 | description: Spark Streaming ZeroMQ
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 | # Spark Streaming ZeroMQ Connector
28 |
29 | A library for reading data from [ZeroMQ](http://zeromq.org/) using Spark Streaming.
30 |
31 | ## Linking
32 |
33 | Using SBT:
34 |
35 | libraryDependencies += "org.apache.bahir" %% "spark-streaming-zeromq" % "2.3.0"
36 |
37 | Using Maven:
38 |
39 |
40 | org.apache.bahir
41 | spark-streaming-zeromq_2.11
42 | 2.3.0
43 |
44 |
45 | This library can also be added to Spark jobs launched through `spark-shell` or `spark-submit` by using the `--packages` command line option.
46 | For example, to include it when starting the spark shell:
47 |
48 | $ bin/spark-shell --packages org.apache.bahir:spark-streaming-zeromq_2.11:2.3.0
49 |
50 | Unlike using `--jars`, using `--packages` ensures that this library and its dependencies will be added to the classpath.
51 | The `--packages` argument can also be used with `bin/spark-submit`.
52 |
53 | This library is cross-published for Scala 2.10 and Scala 2.11, so users should replace the proper Scala version (2.10 or 2.11) in the commands listed above.
54 |
55 | ## Examples
56 |
57 | Review end-to-end examples at [ZeroMQ Examples](https://github.com/apache/bahir/tree/master/streaming-zeromq/examples).
58 |
59 | ### Scala API
60 |
61 | import org.apache.spark.streaming.zeromq.ZeroMQUtils
62 |
63 | val lines = ZeroMQUtils.createTextStream(
64 | ssc, "tcp://server:5555", true, Seq("my-topic".getBytes)
65 | )
66 |
67 | ### Java API
68 |
69 | import org.apache.spark.storage.StorageLevel;
70 | import org.apache.spark.streaming.api.java.JavaReceiverInputDStream;
71 | import org.apache.spark.streaming.zeromq.ZeroMQUtils;
72 |
73 | JavaReceiverInputDStream test1 = ZeroMQUtils.createJavaStream(
74 | ssc, "tcp://server:5555", true, Arrays.asList("my-topic.getBytes()),
75 | StorageLevel.MEMORY_AND_DISK_SER_2()
76 | );
--------------------------------------------------------------------------------
/site/docs/spark/2.3.1/documentation.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Extensions for Apache Spark
4 | description: Extensions for Apache Spark
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 | ### Apache Bahir Extensions for Apache Spark
29 |
30 |
31 |
32 | #### SQL Data Sources
33 |
34 | [Apache CouchDB/Cloudant data source](../spark-sql-cloudant)
35 |
36 |
37 |
38 | #### Structured Streaming Data Sources
39 |
40 | [Akka data source](../spark-sql-streaming-akka)
41 |
42 | [MQTT data source](../spark-sql-streaming-mqtt) {:height="36px" width="36px"} (new Sink)
43 |
44 |
45 |
46 | #### Discretized Streams (DStreams) Connectors
47 |
48 | [Apache CouchDB/Cloudant connector](../spark-sql-cloudant)
49 |
50 | [Akka connector](../spark-streaming-akka)
51 |
52 | [Google Cloud Pub/Sub connector](../spark-streaming-pubsub)
53 |
54 | [Cloud PubNub connector](../spark-streaming-pubnub) {:height="36px" width="36px"}
55 |
56 | [MQTT connector](../spark-streaming-mqtt)
57 |
58 | [Twitter connector](../spark-streaming-twitter)
59 |
60 | [ZeroMQ connector](../spark-streaming-zeromq) {:height="36px" width="36px"} (Enhanced Implementation)
61 |
--------------------------------------------------------------------------------
/site/docs/spark/2.3.1/spark-streaming-zeromq.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Spark Streaming ZeroMQ
4 | description: Spark Streaming ZeroMQ
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 | # Spark Streaming ZeroMQ Connector
28 |
29 | A library for reading data from [ZeroMQ](http://zeromq.org/) using Spark Streaming.
30 |
31 | ## Linking
32 |
33 | Using SBT:
34 |
35 | libraryDependencies += "org.apache.bahir" %% "spark-streaming-zeromq" % "2.3.1"
36 |
37 | Using Maven:
38 |
39 |
40 | org.apache.bahir
41 | spark-streaming-zeromq_2.11
42 | 2.3.1
43 |
44 |
45 | This library can also be added to Spark jobs launched through `spark-shell` or `spark-submit` by using the `--packages` command line option.
46 | For example, to include it when starting the spark shell:
47 |
48 | $ bin/spark-shell --packages org.apache.bahir:spark-streaming-zeromq_2.11:2.3.1
49 |
50 | Unlike using `--jars`, using `--packages` ensures that this library and its dependencies will be added to the classpath.
51 | The `--packages` argument can also be used with `bin/spark-submit`.
52 |
53 | This library is cross-published for Scala 2.10 and Scala 2.11, so users should replace the proper Scala version (2.10 or 2.11) in the commands listed above.
54 |
55 | ## Examples
56 |
57 | Review end-to-end examples at [ZeroMQ Examples](https://github.com/apache/bahir/tree/master/streaming-zeromq/examples).
58 |
59 | ### Scala API
60 |
61 | import org.apache.spark.streaming.zeromq.ZeroMQUtils
62 |
63 | val lines = ZeroMQUtils.createTextStream(
64 | ssc, "tcp://server:5555", true, Seq("my-topic".getBytes)
65 | )
66 |
67 | ### Java API
68 |
69 | import org.apache.spark.storage.StorageLevel;
70 | import org.apache.spark.streaming.api.java.JavaReceiverInputDStream;
71 | import org.apache.spark.streaming.zeromq.ZeroMQUtils;
72 |
73 | JavaReceiverInputDStream test1 = ZeroMQUtils.createJavaStream(
74 | ssc, "tcp://server:5555", true, Arrays.asList("my-topic.getBytes()),
75 | StorageLevel.MEMORY_AND_DISK_SER_2()
76 | );
--------------------------------------------------------------------------------
/site/docs/spark/2.3.2/documentation.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Extensions for Apache Spark
4 | description: Extensions for Apache Spark
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 | ### Apache Bahir Extensions for Apache Spark
29 |
30 |
31 |
32 | #### SQL Data Sources
33 |
34 | [Apache CouchDB/Cloudant data source](../spark-sql-cloudant)
35 |
36 |
37 |
38 | #### Structured Streaming Data Sources
39 |
40 | [Akka data source](../spark-sql-streaming-akka)
41 |
42 | [MQTT data source](../spark-sql-streaming-mqtt) {:height="36px" width="36px"} (new Sink)
43 |
44 |
45 |
46 | #### Discretized Streams (DStreams) Connectors
47 |
48 | [Apache CouchDB/Cloudant connector](../spark-sql-cloudant)
49 |
50 | [Akka connector](../spark-streaming-akka)
51 |
52 | [Google Cloud Pub/Sub connector](../spark-streaming-pubsub)
53 |
54 | [Cloud PubNub connector](../spark-streaming-pubnub) {:height="36px" width="36px"}
55 |
56 | [MQTT connector](../spark-streaming-mqtt)
57 |
58 | [Twitter connector](../spark-streaming-twitter)
59 |
60 | [ZeroMQ connector](../spark-streaming-zeromq) {:height="36px" width="36px"} (Enhanced Implementation)
61 |
--------------------------------------------------------------------------------
/site/docs/spark/2.3.2/spark-streaming-zeromq.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Spark Streaming ZeroMQ
4 | description: Spark Streaming ZeroMQ
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 | # Spark Streaming ZeroMQ Connector
28 |
29 | A library for reading data from [ZeroMQ](http://zeromq.org/) using Spark Streaming.
30 |
31 | ## Linking
32 |
33 | Using SBT:
34 |
35 | libraryDependencies += "org.apache.bahir" %% "spark-streaming-zeromq" % "2.3.2"
36 |
37 | Using Maven:
38 |
39 |
40 | org.apache.bahir
41 | spark-streaming-zeromq_2.11
42 | 2.3.2
43 |
44 |
45 | This library can also be added to Spark jobs launched through `spark-shell` or `spark-submit` by using the `--packages` command line option.
46 | For example, to include it when starting the spark shell:
47 |
48 | $ bin/spark-shell --packages org.apache.bahir:spark-streaming-zeromq_2.11:2.3.2
49 |
50 | Unlike using `--jars`, using `--packages` ensures that this library and its dependencies will be added to the classpath.
51 | The `--packages` argument can also be used with `bin/spark-submit`.
52 |
53 | This library is cross-published for Scala 2.10 and Scala 2.11, so users should replace the proper Scala version (2.10 or 2.11) in the commands listed above.
54 |
55 | ## Examples
56 |
57 | Review end-to-end examples at [ZeroMQ Examples](https://github.com/apache/bahir/tree/master/streaming-zeromq/examples).
58 |
59 | ### Scala API
60 |
61 | import org.apache.spark.streaming.zeromq.ZeroMQUtils
62 |
63 | val lines = ZeroMQUtils.createTextStream(
64 | ssc, "tcp://server:5555", true, Seq("my-topic".getBytes)
65 | )
66 |
67 | ### Java API
68 |
69 | import org.apache.spark.storage.StorageLevel;
70 | import org.apache.spark.streaming.api.java.JavaReceiverInputDStream;
71 | import org.apache.spark.streaming.zeromq.ZeroMQUtils;
72 |
73 | JavaReceiverInputDStream test1 = ZeroMQUtils.createJavaStream(
74 | ssc, "tcp://server:5555", true, Arrays.asList("my-topic.getBytes()),
75 | StorageLevel.MEMORY_AND_DISK_SER_2()
76 | );
--------------------------------------------------------------------------------
/site/docs/spark/2.3.3/documentation.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Extensions for Apache Spark
4 | description: Extensions for Apache Spark
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 | ### Apache Bahir Extensions for Apache Spark
29 |
30 |
31 |
32 | #### SQL Data Sources
33 |
34 | [Apache CouchDB/Cloudant data source](../spark-sql-cloudant)
35 |
36 |
37 |
38 | #### Structured Streaming Data Sources
39 |
40 | [Akka data source](../spark-sql-streaming-akka)
41 |
42 | [MQTT data source](../spark-sql-streaming-mqtt) {:height="36px" width="36px"} (new Sink)
43 |
44 |
45 |
46 | #### Discretized Streams (DStreams) Connectors
47 |
48 | [Apache CouchDB/Cloudant connector](../spark-sql-cloudant)
49 |
50 | [Akka connector](../spark-streaming-akka)
51 |
52 | [Google Cloud Pub/Sub connector](../spark-streaming-pubsub)
53 |
54 | [Cloud PubNub connector](../spark-streaming-pubnub) {:height="36px" width="36px"}
55 |
56 | [MQTT connector](../spark-streaming-mqtt)
57 |
58 | [Twitter connector](../spark-streaming-twitter)
59 |
60 | [ZeroMQ connector](../spark-streaming-zeromq) {:height="36px" width="36px"} (Enhanced Implementation)
61 |
--------------------------------------------------------------------------------
/site/docs/spark/2.3.3/spark-streaming-zeromq.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Spark Streaming ZeroMQ
4 | description: Spark Streaming ZeroMQ
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 | # Spark Streaming ZeroMQ Connector
28 |
29 | A library for reading data from [ZeroMQ](http://zeromq.org/) using Spark Streaming.
30 |
31 | ## Linking
32 |
33 | Using SBT:
34 |
35 | libraryDependencies += "org.apache.bahir" %% "spark-streaming-zeromq" % "2.3.3"
36 |
37 | Using Maven:
38 |
39 |
40 | org.apache.bahir
41 | spark-streaming-zeromq_2.11
42 | 2.3.3
43 |
44 |
45 | This library can also be added to Spark jobs launched through `spark-shell` or `spark-submit` by using the `--packages` command line option.
46 | For example, to include it when starting the spark shell:
47 |
48 | $ bin/spark-shell --packages org.apache.bahir:spark-streaming-zeromq_2.11:2.3.3
49 |
50 | Unlike using `--jars`, using `--packages` ensures that this library and its dependencies will be added to the classpath.
51 | The `--packages` argument can also be used with `bin/spark-submit`.
52 |
53 | This library is cross-published for Scala 2.10 and Scala 2.11, so users should replace the proper Scala version (2.10 or 2.11) in the commands listed above.
54 |
55 | ## Examples
56 |
57 | Review end-to-end examples at [ZeroMQ Examples](https://github.com/apache/bahir/tree/master/streaming-zeromq/examples).
58 |
59 | ### Scala API
60 |
61 | import org.apache.spark.streaming.zeromq.ZeroMQUtils
62 |
63 | val lines = ZeroMQUtils.createTextStream(
64 | ssc, "tcp://server:5555", true, Seq("my-topic".getBytes)
65 | )
66 |
67 | ### Java API
68 |
69 | import org.apache.spark.storage.StorageLevel;
70 | import org.apache.spark.streaming.api.java.JavaReceiverInputDStream;
71 | import org.apache.spark.streaming.zeromq.ZeroMQUtils;
72 |
73 | JavaReceiverInputDStream test1 = ZeroMQUtils.createJavaStream(
74 | ssc, "tcp://server:5555", true, Arrays.asList("my-topic.getBytes()),
75 | StorageLevel.MEMORY_AND_DISK_SER_2()
76 | );
--------------------------------------------------------------------------------
/site/docs/spark/2.3.4/documentation.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Extensions for Apache Spark
4 | description: Extensions for Apache Spark
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 | ### Apache Bahir Extensions for Apache Spark
29 |
30 |
31 |
32 | #### SQL Data Sources
33 |
34 | [Apache CouchDB/Cloudant data source](../spark-sql-cloudant)
35 |
36 |
37 |
38 | #### Structured Streaming Data Sources
39 |
40 | [Akka data source](../spark-sql-streaming-akka)
41 |
42 | [MQTT data source](../spark-sql-streaming-mqtt) {:height="36px" width="36px"} (new Sink)
43 |
44 |
45 |
46 | #### Discretized Streams (DStreams) Connectors
47 |
48 | [Apache CouchDB/Cloudant connector](../spark-sql-cloudant)
49 |
50 | [Akka connector](../spark-streaming-akka)
51 |
52 | [Google Cloud Pub/Sub connector](../spark-streaming-pubsub)
53 |
54 | [Cloud PubNub connector](../spark-streaming-pubnub) {:height="36px" width="36px"}
55 |
56 | [MQTT connector](../spark-streaming-mqtt)
57 |
58 | [Twitter connector](../spark-streaming-twitter)
59 |
60 | [ZeroMQ connector](../spark-streaming-zeromq) {:height="36px" width="36px"} (Enhanced Implementation)
61 |
--------------------------------------------------------------------------------
/site/docs/spark/2.3.4/spark-streaming-zeromq.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Spark Streaming ZeroMQ
4 | description: Spark Streaming ZeroMQ
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 | # Spark Streaming ZeroMQ Connector
28 |
29 | A library for reading data from [ZeroMQ](http://zeromq.org/) using Spark Streaming.
30 |
31 | ## Linking
32 |
33 | Using SBT:
34 |
35 | libraryDependencies += "org.apache.bahir" %% "spark-streaming-zeromq" % "2.3.4"
36 |
37 | Using Maven:
38 |
39 |
40 | org.apache.bahir
41 | spark-streaming-zeromq_2.11
42 | 2.3.4
43 |
44 |
45 | This library can also be added to Spark jobs launched through `spark-shell` or `spark-submit` by using the `--packages` command line option.
46 | For example, to include it when starting the spark shell:
47 |
48 | $ bin/spark-shell --packages org.apache.bahir:spark-streaming-zeromq_2.11:2.3.4
49 |
50 | Unlike using `--jars`, using `--packages` ensures that this library and its dependencies will be added to the classpath.
51 | The `--packages` argument can also be used with `bin/spark-submit`.
52 |
53 | This library is cross-published for Scala 2.10 and Scala 2.11, so users should replace the proper Scala version (2.10 or 2.11) in the commands listed above.
54 |
55 | ## Examples
56 |
57 | Review end-to-end examples at [ZeroMQ Examples](https://github.com/apache/bahir/tree/master/streaming-zeromq/examples).
58 |
59 | ### Scala API
60 |
61 | import org.apache.spark.streaming.zeromq.ZeroMQUtils
62 |
63 | val lines = ZeroMQUtils.createTextStream(
64 | ssc, "tcp://server:5555", true, Seq("my-topic".getBytes)
65 | )
66 |
67 | ### Java API
68 |
69 | import org.apache.spark.storage.StorageLevel;
70 | import org.apache.spark.streaming.api.java.JavaReceiverInputDStream;
71 | import org.apache.spark.streaming.zeromq.ZeroMQUtils;
72 |
73 | JavaReceiverInputDStream test1 = ZeroMQUtils.createJavaStream(
74 | ssc, "tcp://server:5555", true, Arrays.asList("my-topic.getBytes()),
75 | StorageLevel.MEMORY_AND_DISK_SER_2()
76 | );
--------------------------------------------------------------------------------
/site/docs/spark/2.4.0/documentation.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Extensions for Apache Spark
4 | description: Extensions for Apache Spark
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 | ### Apache Bahir Extensions for Apache Spark
29 |
30 |
31 |
32 | #### SQL Data Sources
33 |
34 | [Apache CouchDB/Cloudant data source](../spark-sql-cloudant)
35 |
36 |
37 |
38 | #### Structured Streaming Data Sources
39 |
40 | [Akka data source](../spark-sql-streaming-akka)
41 |
42 | [MQTT data source](../spark-sql-streaming-mqtt) {:height="36px" width="36px"} (new Sink)
43 |
44 |
45 |
46 | #### Discretized Streams (DStreams) Connectors
47 |
48 | [Apache CouchDB/Cloudant connector](../spark-sql-cloudant)
49 |
50 | [Akka connector](../spark-streaming-akka)
51 |
52 | [Google Cloud Pub/Sub connector](../spark-streaming-pubsub)
53 |
54 | [Cloud PubNub connector](../spark-streaming-pubnub) {:height="36px" width="36px"}
55 |
56 | [MQTT connector](../spark-streaming-mqtt)
57 |
58 | [Twitter connector](../spark-streaming-twitter)
59 |
60 | [ZeroMQ connector](../spark-streaming-zeromq) {:height="36px" width="36px"} (Enhanced Implementation)
61 |
--------------------------------------------------------------------------------
/site/docs/spark/current/documentation.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Extensions for Apache Spark
4 | description: Extensions for Apache Spark
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 | ### Apache Bahir Extensions for Apache Spark
29 |
30 |
31 |
32 | #### SQL Data Sources
33 |
34 | [Apache CouchDB/Cloudant data source](../spark-sql-cloudant)
35 |
36 |
37 |
38 | #### Structured Streaming Data Sources
39 |
40 | [Akka data source](../spark-sql-streaming-akka)
41 |
42 | [MQTT data source](../spark-sql-streaming-mqtt) {:height="36px" width="36px"} (new Sink)
43 |
44 |
45 |
46 | #### Discretized Streams (DStreams) Connectors
47 |
48 | [Apache CouchDB/Cloudant connector](../spark-sql-cloudant)
49 |
50 | [Akka connector](../spark-streaming-akka)
51 |
52 | [Google Cloud Pub/Sub connector](../spark-streaming-pubsub)
53 |
54 | [Cloud PubNub connector](../spark-streaming-pubnub) {:height="36px" width="36px"}
55 |
56 | [MQTT connector](../spark-streaming-mqtt)
57 |
58 | [Twitter connector](../spark-streaming-twitter)
59 |
60 | [ZeroMQ connector](../spark-streaming-zeromq) {:height="36px" width="36px"} (Enhanced Implementation)
61 |
--------------------------------------------------------------------------------
/site/docs/spark/overview.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Extensions for Apache Spark
4 | description: Extensions for Apache Spark
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 | ### Apache Bahir Extensions for Apache Spark
29 |
30 | - [Current - 2.4.0-SNAPSHOT](/docs/spark/current/documentation)
31 | - [2.4.0](/docs/spark/2.4.0/documentation)
32 | - [2.3.4](/docs/spark/2.3.4/documentation)
33 | - [2.3.3](/docs/spark/2.3.3/documentation)
34 | - [2.3.2](/docs/spark/2.3.2/documentation)
35 | - [2.3.1](/docs/spark/2.3.1/documentation)
36 | - [2.3.0](/docs/spark/2.3.0/documentation)
37 | - [2.2.3](/docs/spark/2.2.3/documentation)
38 | - [2.2.2](/docs/spark/2.2.2/documentation)
39 | - [2.2.1](/docs/spark/2.2.1/documentation)
40 | - [2.2.0](/docs/spark/2.2.0/documentation)
41 | - [2.1.3](/docs/spark/2.1.3/documentation)
42 | - [2.1.2](/docs/spark/2.1.2/documentation)
43 | - [2.1.1](/docs/spark/2.1.1/documentation)
44 | - [2.1.0](/docs/spark/2.1.0/documentation)
45 | - [2.0.2](/docs/spark/2.0.2/documentation)
46 | - [2.0.1](/docs/spark/2.0.1/documentation)
47 | - [2.0.0](/docs/spark/2.0.0/documentation)
48 |
--------------------------------------------------------------------------------
/site/docs/spark/templates/spark-sql-cloudant.template:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Spark Data Source for Apache CouchDB/Cloudant
4 | description: Spark Data Source for Apache CouchDB/Cloudant
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
--------------------------------------------------------------------------------
/site/docs/spark/templates/spark-sql-streaming-akka.template:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Spark Structured Streaming Akka
4 | description: Spark Structured Streaming Akka
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
--------------------------------------------------------------------------------
/site/docs/spark/templates/spark-sql-streaming-mqtt.template:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Spark Structured Streaming MQTT
4 | description: Spark Structured Streaming MQTT
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 |
--------------------------------------------------------------------------------
/site/docs/spark/templates/spark-streaming-akka.template:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Spark Streaming Akka
4 | description: Spark Streaming Akka
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
--------------------------------------------------------------------------------
/site/docs/spark/templates/spark-streaming-mqtt.template:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Spark Structured Streaming MQTT
4 | description: Spark Structured Streaming MQTT
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 |
--------------------------------------------------------------------------------
/site/docs/spark/templates/spark-streaming-pubnub.template:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Spark Streaming Google Pub-Sub
4 | description: Spark Streaming Google Pub-Sub
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
--------------------------------------------------------------------------------
/site/docs/spark/templates/spark-streaming-pubsub.template:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Spark Streaming PubNub
4 | description: Spark Streaming PubNub
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
--------------------------------------------------------------------------------
/site/docs/spark/templates/spark-streaming-twitter.template:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Spark Streaming Twitter
4 | description: Spark Streaming Twitter
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
--------------------------------------------------------------------------------
/site/docs/spark/templates/spark-streaming-zeromq.template:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Spark Streaming ZeroMQ
4 | description: Spark Streaming ZeroMQ
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
--------------------------------------------------------------------------------
/site/feed.xml:
--------------------------------------------------------------------------------
1 | ---
2 | layout: null
3 | ---
4 |
5 |
6 |
7 | {{ site.title | xml_escape }}
8 | {{ site.description | xml_escape }}
9 | {{ site.url }}{{ site.baseurl }}/
10 |
11 | {{ site.time | date_to_rfc822 }}
12 | {{ site.time | date_to_rfc822 }}
13 | Jekyll v{{ jekyll.version }}
14 | {% for post in site.posts limit:10 %}
15 | -
16 |
{{ post.title | xml_escape }}
17 | {{ post.content | xml_escape }}
18 | {{ post.date | date_to_rfc822 }}
19 | {{ post.url | prepend: site.baseurl | prepend: site.url }}
20 | {{ post.url | prepend: site.baseurl | prepend: site.url }}
21 | {% for tag in post.tags %}
22 | {{ tag | xml_escape }}
23 | {% endfor %}
24 | {% for cat in post.categories %}
25 | {{ cat | xml_escape }}
26 | {% endfor %}
27 |
28 | {% endfor %}
29 |
30 |
31 |
--------------------------------------------------------------------------------
/site/history.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: History
4 | description: Release History
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 | ## {{ site.data.project.name }} Downloads
29 |
30 | For a full list of releases, see
31 | github .
32 | Downloads are available on the
33 | [downloads page]({{ site.baseurl }}/download.html).
34 |
35 | ## 0.2.0 / 2015-11-10
36 | {: #v0-2-0}
37 |
38 | Our second release!
39 |
40 | New features
41 |
42 | * [FOO-911 ]
43 | Add a variant of `FooSchema` that does not cache sub-objects
44 | * [FOO-845 ]
45 | Derive `FOO` return type by a customizable policy
46 |
47 | ## 0.1.0 / 2015-09-25
48 | {: #v0-1-0}
49 |
50 | Our first release!
51 |
--------------------------------------------------------------------------------
/site/privacy-policy.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Privacy Policy
4 | description: Apache Bahir website privacy policy
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 | ## {{ site.data.project.name }} Privacy Policy
29 |
30 |
31 |
32 | Information about your use of this website is collected using server access logs and a tracking cookie. The collected information consists of the following:
33 |
34 | The IP address from which you access the website;
35 | The type of browser and operating system you use to access our site;
36 | The date and time you access our site;
37 | The pages you visit; and
38 | The addresses of pages from where you followed a link to our site.
39 |
40 | Part of this information is gathered using a tracking cookie set by the [Google Analytics](http://www.google.com/analytics/) service and handled by Google as described in their [privacy policy](http://www.google.com/privacy.html). See your browser documentation for instructions on how to disable the cookie if you prefer not to share this data with Google.
41 |
42 | We use the gathered information to help us make our site more useful to visitors and to better understand how and when our site is used. We do not track or collect personally identifiable information or associate gathered data with any personally identifying information from other sources.
43 |
44 | By using this website, you consent to the collection of this data in the manner and for the purpose described above.
--------------------------------------------------------------------------------
/site/releases/spark/2.0.0/release-notes.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Bahir Release 2.0.0
4 | description: Release Notes for Apache Bahir 2.0.0
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 |
29 | # Apache Bahir 2.0 for Apache Spark 2.0
30 |
31 | ## Bug
32 |
33 | [BAHIR-13] - Update spark tags dependency
34 | [BAHIR-14] - Cleanup maven pom from Spark dependencies
35 | [BAHIR-15] - Enable RAT on Bahir builds
36 | [BAHIR-16] - Build issues due to log4j properties not found
37 | [BAHIR-18] - Include examples in Maven test build
38 | [BAHIR-23] - Build should fail on Checkstyle violation
39 | [BAHIR-24] - Fix MQTT Python code
40 | [BAHIR-38] - Spark-submit does not use latest locally installed Bahir packages
41 | [BAHIR-42] - Refactor sql-streaming-mqtt examples to follow other projects pattern
42 | [BAHIR-43] - Add missing apache license header to sql-mqtt file
43 | [BAHIR-44] - Add new sql-streaming-mqtt to distribution
44 |
45 | ## Improvement
46 |
47 | [BAHIR-36] - Update readme.md with build instructions
48 |
49 | ## New Feature
50 |
51 | [BAHIR-2] - Create initial build for Bahir components
52 | [BAHIR-39] - MQTT as a streaming source for SQL Streaming
53 |
54 | ## Task
55 |
56 | [BAHIR-17] - Prepare release based on Apache Spark 2.0.0-preview
57 | [BAHIR-22] - Add script to run examples
58 | [BAHIR-35] - Include Python code in the binary jars for use with "--packages ..."
59 | [BAHIR-37] - Prepare release based on Apache Spark 2.0.0
60 |
61 | ## Sub-task
62 |
63 | [BAHIR-19] - Create Bahir source distribution
64 | [BAHIR-20] - Create release script
65 | [BAHIR-21] - Create script to change build between scala 2.10 and 2.11
66 | [BAHIR-28] - Add documentation for streaming-akka connector
67 | [BAHIR-29] - Add documentation for streaming-mqtt connector
68 | [BAHIR-30] - Add documentation for streaming-twitter connector
69 | [BAHIR-31] - Add documentation for streaming-zeromq connector
70 |
--------------------------------------------------------------------------------
/site/releases/spark/2.0.1/release-notes.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Bahir Release 2.0.1
4 | description: Release Notes for Apache Bahir 2.0.1
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 | # Apache Bahir 2.0.1 for Apache Spark 2.0.1
29 |
30 | ## Bug
31 |
32 | [BAHIR-52] - Update extension documentation formats for code sections
33 | [BAHIR-69] - Release script fails intermitently when publishing to staging maven repo
34 |
35 | ## Improvement
36 |
37 | [BAHIR-51] - Add additional MQTT options/parameters to MQTTInputDStream and MqttStreamSource
38 | [BAHIR-53] - Add additional MQTT options/parameters to MQTTInputDStream
39 | [BAHIR-61] - Enable release script to publish release by a specific rc tag
40 |
41 | ## Task
42 |
43 | [BAHIR-62] - Prepare release based on Apache Spark 2.0.1
44 |
--------------------------------------------------------------------------------
/site/releases/spark/2.0.2/release-notes.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Bahir Release 2.0.2
4 | description: Release Notes for Apache Bahir 2.0.2
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 |
29 | # Apache Bahir 2.0.2 for Apache Spark 2.0.2
30 |
31 | ## Task
32 |
33 | [BAHIR-82] - Prepare release based on Apache Spark 2.0.2
34 |
35 | ## Sub-Task
36 |
37 | [BAHIR-64] - Add test that Akka streaming connector can receive data
38 | [BAHIR-69] - Clean build between different scala versions
39 |
40 | ## Test
41 |
42 | [BAHIR-83] - Flaky test in BasicMQTTSourceSuite
43 | [BAHIR-84] - Build log flooded with test log messages
44 |
--------------------------------------------------------------------------------
/site/releases/spark/2.1.0/release-notes.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Bahir Release 2.1.0
4 | description: Release Notes for Apache Bahir 2.1.0
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 |
29 | # Apache Bahir 2.1 for Apache Spark 2.1
30 |
31 | ## Task
32 |
33 | [BAHIR-87] - Prepare release based on Apache Spark 2.1.0
34 | [MINOR] - Update Scaladoc in MQTTWordCount example
35 | [MINOR] - Fix Maven artifact IDs in README.md files
36 |
37 |
--------------------------------------------------------------------------------
/site/releases/spark/2.3.3/release-notes.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Bahir Release 2.1.0
4 | description: Release Notes for Apache Bahir 2.1.0
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 |
29 | # Apache Bahir for Apache Spark 2.3.3
30 |
31 | Below are all the enhancements and bug fixes for the 2.3.x releases.
32 |
33 |
34 | Release Notes - Bahir - Version Spark-2.3.0
35 |
36 | * [BAHIR-46] - Handle re-delivery of message in MQTT structured streaming source.
37 | * [BAHIR-104] - MQTT Dstream returned by the new multi topic support API is not a pairRDD
38 | * [BAHIR-128] - Test failing sporadically in sql-cloudant's CloudantChangesDFSuite
39 | * [BAHIR-139] - Scala-maven-plugin does not respect Java compile level
40 | * [BAHIR-152] - License header not enforced for Java sources
41 | * [BAHIR-159] - Our project needs to use org.apache.bahir:spark-sql-streaming-mqtt_2.11:2.2.0, which has dependency conflict problem.
42 | * [BAHIR-165] - The avro messages to streaming-mqtt gives negative value.
43 | * [BAHIR-66] - Add test that ZeroMQ streaming connector can receive data
44 | * [BAHIR-164] - Spark Streaming with MQTT fails with Spark 2.3.0
45 | * [BAHIR-166] - Migrate akka sql streaming source to datasrouce v2 API
46 | * [BAHIR-49] - Add MQTTSink to SQL Streaming MQTT.
47 | * [BAHIR-182] - Create PubNub extension for Apache Spark Streaming
48 | * [BAHIR-137] - Load performance improvements for _changes API in sql-cloudant
49 | * [BAHIR-154] - Refactor sql-cloudant to use Cloudant's java-cloudant features
50 | * [BAHIR-181] - username and password should be available for pyspark when using mqtt streaming
51 | * [BAHIR-123] - Fix errors to support the latest version of Play JSON library for sql-cloudant
52 | * [BAHIR-138] - Fix sql-cloudant deprecation messages
53 | * [BAHIR-150] - Jenkins PR builder should not abort build after first failed module
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
--------------------------------------------------------------------------------
/site/releases/spark/2.3.4/release-notes.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: page
3 | title: Bahir Release 2.1.0
4 | description: Release Notes for Apache Bahir 2.1.0
5 | group: nav-right
6 | ---
7 |
25 |
26 | {% include JB/setup %}
27 |
28 |
29 | # Apache Bahir for Apache Spark 2.3.4
30 |
31 | Below are all the enhancements and bug fixes for the 2.3.x releases.
32 |
33 |
34 | Release Notes - Bahir - Version Spark-2.3.0
35 |
36 | * [BAHIR-46] - Handle re-delivery of message in MQTT structured streaming source.
37 | * [BAHIR-104] - MQTT Dstream returned by the new multi topic support API is not a pairRDD
38 | * [BAHIR-128] - Test failing sporadically in sql-cloudant's CloudantChangesDFSuite
39 | * [BAHIR-139] - Scala-maven-plugin does not respect Java compile level
40 | * [BAHIR-152] - License header not enforced for Java sources
41 | * [BAHIR-159] - Our project needs to use org.apache.bahir:spark-sql-streaming-mqtt_2.11:2.2.0, which has dependency conflict problem.
42 | * [BAHIR-165] - The avro messages to streaming-mqtt gives negative value.
43 | * [BAHIR-66] - Add test that ZeroMQ streaming connector can receive data
44 | * [BAHIR-164] - Spark Streaming with MQTT fails with Spark 2.3.0
45 | * [BAHIR-166] - Migrate akka sql streaming source to datasrouce v2 API
46 | * [BAHIR-49] - Add MQTTSink to SQL Streaming MQTT.
47 | * [BAHIR-182] - Create PubNub extension for Apache Spark Streaming
48 | * [BAHIR-137] - Load performance improvements for _changes API in sql-cloudant
49 | * [BAHIR-154] - Refactor sql-cloudant to use Cloudant's java-cloudant features
50 | * [BAHIR-181] - username and password should be available for pyspark when using mqtt streaming
51 | * [BAHIR-123] - Fix errors to support the latest version of Play JSON library for sql-cloudant
52 | * [BAHIR-138] - Fix sql-cloudant deprecation messages
53 | * [BAHIR-150] - Jenkins PR builder should not abort build after first failed module
54 |
--------------------------------------------------------------------------------