├── .gitignore ├── .gitmodules ├── LICENSE.md ├── README.md ├── docker-compose.yml ├── env └── default-rest.env ├── logstash └── logstash.conf └── usecases ├── bbc ├── annotations.js ├── fulltext.js ├── map.js ├── nominatim.js └── rss.js ├── slackbot ├── search.js ├── slack-reply.js └── slack-source.js └── twitter ├── render.js ├── sentiment.js └── twitter.js /.gitignore: -------------------------------------------------------------------------------- 1 | data/ 2 | rest.env 3 | docker-compose-deploy.yml 4 | Makefile 5 | -------------------------------------------------------------------------------- /.gitmodules: -------------------------------------------------------------------------------- 1 | [submodule "exynize-rest"] 2 | path = exynize-rest 3 | url = git@github.com:Exynize/exynize-rest.git 4 | [submodule "exynize-ui"] 5 | path = exynize-ui 6 | url = git@github.com:Exynize/exynize-ui.git 7 | [submodule "exynize-runner"] 8 | path = exynize-runner 9 | url = git@github.com:Exynize/exynize-runner.git 10 | -------------------------------------------------------------------------------- /LICENSE.md: -------------------------------------------------------------------------------- 1 | # License 2 | 3 | Exynize platform, including all of its components and all of its derivates, is available under different licensing 4 | options designed to accommodate the needs of various users. 5 | 6 | ## Community license (GPL-3.0) 7 | 8 | Exynize platform licensed under the GNU General Public License v3 (GPL-3.0) is appropriate for the development 9 | of applications based on Exynize platform provided you can comply with the terms and conditions 10 | of the GNU General Public License v3 (GPL-3.0). 11 | 12 | ## Commercial license 13 | 14 | Exynize platform licensed under Commercial license is appropriate for development of proprietary/commercial 15 | software where you do not want to share any source code with third parties or otherwise cannot comply with the terms 16 | of the GPL-3.0. 17 | To obtain the commercial license please contact team@exynize.com 18 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Exynize platform 2 | 3 | > Exynize platform - easy creation of complex processing pipelines for your data. 4 | 5 | ## About Exynize platform 6 | 7 | Exynize platform aims to simplifying the workflow and allow rapid creation of data processing pipelines and visualisations. 8 | Current version of the platform allows: 9 | - constructing pipelines right in your browsers with very little effort, 10 | - writing processing component as if you was dealing with a single data item, 11 | - re-using existing processing modules in new pipelines, 12 | - creating real-time processing and visualisation without thinking about doing real-time at all, 13 | - spending time on doing actual work, not fiddling with scaffolding. 14 | 15 | More info on the platform as well as some demoes of its capabilities can be found in the following article on Medium 16 | > [Building data processing and visualisation pipelines in the browser with Exynize](https://medium.com/the-data-experience/building-data-processing-and-visualisation-pipelines-in-the-browser-with-exynize-372ab15e848c#.cq73g7k7q) 17 | 18 | ## Getting started 19 | 20 | This is a deployment repository for Exynize platform. 21 | It contains docker-compose file that can be used to easily setup your own copy of exynize platform. 22 | 23 | If you are interested in separate platform components, then can be found in other repositories: 24 | 25 | - [Exynize UI](https://github.com/Exynize/exynize-ui) 26 | - [Exynize REST](https://github.com/Exynize/exynize-rest) 27 | - [Exynize Runner](https://github.com/Exynize/exynize-runner) 28 | 29 | ### Requirements 30 | 31 | For Exynize platform to function properly, you'll need to have following things installed: 32 | 33 | - Docker v1.10 or later 34 | - Docker-compose 1.6 or later 35 | 36 | ### Installation 37 | 38 | 1. Clone the repository and cd into new folder: `git clone git@github.com:Exynize/exynize-platform.git && cd exynize-platform` 39 | 2. Execute `git submodule init && git submodule update` to get latest sources for platform components 40 | 3. Execute `docker-compose up` to start Exynize platform 41 | 4. Navigate to `http://your.docker.address` using browser to see the platform UI 42 | 43 | ### Configuration 44 | 45 | To apply your custom config you need to do the following steps: 46 | 47 | 1. Copy `./env/default-rest.env` and edit variables to your liking 48 | 2. Add new entry with your file to `rest.env_file` section in `docker-compose.yml`, it should look like this: 49 | ```yml 50 | rest: 51 | build: exynize-rest 52 | depends_on: 53 | - rdb 54 | - rabbit 55 | links: 56 | - rdb 57 | - rabbit 58 | volumes: 59 | - ./data/static:/opt/app/src/static 60 | environment: 61 | - NODE_ENV=production 62 | env_file: 63 | - ./env/default-rest.env 64 | - ./my-rest.env 65 | ``` 66 | 3. Restart the containers 67 | 68 | ### RethinkDB admin UI 69 | 70 | By default you can access RethinkDB UI at `http://your.docker.address:8080`. 71 | This can be disabled by commenting out `rdb.ports` entry in `docker-compose.yml`. 72 | 73 | ### RabbitMQ admin UI 74 | 75 | By default you can access RabbitMQ admin UI at `http://your.docker.address:8081`. 76 | This can be disabled by commenting out `rabbit.ports` entry in `docker-compose.yml`. 77 | 78 | ## License 79 | 80 | Dual licensed under GPL-3.0 and commercial license. 81 | See LICENSE.md file for more details. 82 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '2' 2 | services: 3 | # Exynize UI 4 | ui: 5 | build: exynize-ui 6 | ports: 7 | - "80:3000" 8 | environment: 9 | - NODE_ENV=production 10 | logging: 11 | driver: gelf 12 | options: 13 | gelf-address: udp://localhost:12201 14 | tag: "{{.ImageName}}/{{.Name}}/{{.ID}}" 15 | # Exynize REST 16 | rest: 17 | build: exynize-rest 18 | depends_on: 19 | - logstash 20 | - rdb 21 | - rabbit 22 | volumes: 23 | - ./data/static:/opt/app/src/static 24 | environment: 25 | - NODE_ENV=production 26 | env_file: 27 | - ./env/default-rest.env 28 | logging: 29 | driver: gelf 30 | options: 31 | gelf-address: udp://localhost:12201 32 | tag: "{{.ImageName}}/{{.Name}}/{{.ID}}" 33 | # Exynize runner 34 | runner: 35 | build: exynize-runner 36 | depends_on: 37 | - logstash 38 | - rabbit 39 | environment: 40 | - NODE_ENV=production 41 | - RABBITMQ_NODENAME=rabbit 42 | logging: 43 | driver: gelf 44 | options: 45 | gelf-address: udp://localhost:12201 46 | tag: "{{.ImageName}}/{{.Name}}/{{.ID}}" 47 | 48 | # database 49 | rdb: 50 | image: rethinkdb 51 | volumes: 52 | - ./data/db:/data 53 | ports: 54 | - "8080:8080" 55 | # message bus 56 | rabbit: 57 | image: rabbitmq:management 58 | ports: 59 | - "8081:15672" 60 | 61 | # ELK logging stack 62 | elasticsearch: 63 | image: elasticsearch 64 | command: elasticsearch -Des.network.host=0.0.0.0 65 | logstash: 66 | image: logstash 67 | command: logstash -f /etc/logstash/conf.d/logstash.conf 68 | volumes: 69 | - ./logstash:/etc/logstash/conf.d 70 | ports: 71 | - "12201:12201" 72 | - "12201:12201/udp" 73 | kibana: 74 | image: kibana 75 | ports: 76 | - "5601:5601" 77 | environment: 78 | - ELASTICSEARCH_URL=http://elasticsearch:9200 79 | -------------------------------------------------------------------------------- /env/default-rest.env: -------------------------------------------------------------------------------- 1 | EXYNIZE_DB_HOST=rdb 2 | # auth password salt 3 | EXYNIZE_AUTH_SALT=set-your-salt-here 4 | # JWT secret 5 | EXYNIZE_JWT_SECRET=set-you-secret-here 6 | # hostname 7 | EXYNIZE_HOST=docker.dev 8 | # rabbit node 9 | RABBITMQ_NODENAME=rabbit 10 | # mail config 11 | EXYNIZE_MAIL_VALIDATION=0 12 | EXYNIZE_MAIL_HOST=mail.server.net 13 | EXYNIZE_MAIL_PORT=465 14 | EXYNIZE_MAIL_SECURE=true 15 | EXYNIZE_MAIL_USER=bot@server.com 16 | EXYNIZE_MAIL_PASS=password 17 | -------------------------------------------------------------------------------- /logstash/logstash.conf: -------------------------------------------------------------------------------- 1 | input { 2 | gelf {} 3 | } 4 | 5 | filter { 6 | if [short_message] =~ "error" { 7 | mutate { replace => { type => "error" } } 8 | } else if [short_message] =~ "info" { 9 | mutate { replace => { type => "info" } } 10 | } else { 11 | mutate { replace => { type => "other" } } 12 | } 13 | } 14 | 15 | output { 16 | elasticsearch { 17 | hosts => ['elasticsearch'] 18 | } 19 | } 20 | -------------------------------------------------------------------------------- /usecases/bbc/annotations.js: -------------------------------------------------------------------------------- 1 | import _ from 'lodash'; 2 | import request from 'request'; 3 | 4 | // FOX NLP tool API url 5 | const foxUrl = 'http://fox-demo.aksw.org/call/ner/entities'; 6 | 7 | export default (data) => Rx.Observable.create(obs => { 8 | const json = { 9 | input: data.text, 10 | type: 'text', 11 | task: 'ner', 12 | output: 'JSON-LD', 13 | }; 14 | request({ 15 | method: 'POST', 16 | url: foxUrl, 17 | headers: { 18 | 'Content-Type': 'application/json', 19 | }, 20 | body: JSON.stringify(json), 21 | }, (err, res, body) => { 22 | if (err) { 23 | obs.onError(err); 24 | return; 25 | } 26 | 27 | if (res && res.statusCode !== 200) { 28 | obs.onError(`Error code: ${res.statusCode}, ${res.statusMessage}`); 29 | return; 30 | } 31 | 32 | const result = JSON.parse(body); 33 | const entries = result['@graph'] ? result['@graph'] : []; 34 | const annotations = entries.map(it => ({ 35 | types: it['@type'] ? it['@type'] 36 | .map(t => t.indexOf(':') !== -1 ? t.split(':')[1] : t) 37 | .map(t => t.toLowerCase()) 38 | .map(_.capitalize) 39 | .filter(t => t !== 'Annotation') : [], 40 | name: it['ann:body'], 41 | beginIndex: typeof it.beginIndex === 'string' ? [it.beginIndex] : it.beginIndex, 42 | endIndex: typeof it.endIndex === 'string' ? [it.endIndex] : it.endIndex, 43 | })); 44 | data.annotations = annotations; 45 | obs.onNext(data); 46 | obs.onCompleted(); 47 | }); 48 | }); 49 | -------------------------------------------------------------------------------- /usecases/bbc/fulltext.js: -------------------------------------------------------------------------------- 1 | import request from 'superagent'; 2 | import cheerio from 'cheerio'; 3 | 4 | const cleanText = text => text 5 | .replace(/[\n\r\t]+/g, ' ') 6 | .replace(/\s+/g, ' ') 7 | .replace(/(\w)\.([A-Z0-9_])/g, '$1. $2'); 8 | 9 | const cleanHtml = html => html 10 | .replace(/[\n\r\t]+/g, ' ') 11 | .replace(//g, ' ') 12 | .replace(//g, ' ') 13 | .replace(/\s+/g, ' '); 14 | 15 | export default (data) => { 16 | return Rx.Observable.create(obs => { 17 | const {link} = data; 18 | request 19 | .get(link) 20 | .end((err, res) => { 21 | if (err) { 22 | return obs.onError(err); 23 | } 24 | 25 | const $ = cheerio.load(res.text); 26 | $('script').remove(); 27 | $('object').remove(); 28 | // try to extract only article text 29 | let obj = $('.story-body__inner'); 30 | if (!obj || !obj.length) { 31 | obj = $('body'); 32 | } 33 | // cleanup 34 | $('figure', obj).remove(); 35 | // get html and text 36 | const resHtml = cleanHtml(obj.html()); // BBC news selector 37 | const resText = cleanText(obj.text()); 38 | 39 | // assign to data 40 | data.text = resText; 41 | data.html = resHtml; 42 | 43 | // send 44 | obs.onNext(data); 45 | obs.onCompleted(); 46 | }); 47 | }); 48 | }; 49 | -------------------------------------------------------------------------------- /usecases/bbc/map.js: -------------------------------------------------------------------------------- 1 | import L from 'leaflet'; 2 | import 'leaflet/dist/leaflet.css'; 3 | 4 | const styleGray = '#cccccc'; 5 | const styleGreen = '#5cb85c'; 6 | const styleRed = '#d9534f'; 7 | 8 | const mapConfig = { 9 | minZoom: 2, 10 | maxZoom: 20, 11 | layers: [ 12 | L.tileLayer( 13 | 'http://{s}.tile.openstreetmap.org/{z}/{x}/{y}.png', 14 | { 15 | attribution: '© OpenStreetMap' + 16 | ' contributors, CC-BY-SA', 17 | } 18 | ) 19 | ], 20 | attributionControl: false, 21 | }; 22 | 23 | const popup = (it) => ` 24 | ${it.title} 25 |
${it.description}
26 | `; 27 | 28 | export default() => React.createClass({ 29 | componentDidMount() { 30 | // init 31 | this.map = L.map(this.refs.map, mapConfig); 32 | this.map.setView([-10, 10], 2); 33 | }, 34 | componentWillReceiveProps(props) { 35 | // render items 36 | props.data.forEach(this.renderItem); 37 | }, 38 | renderItem(it) { 39 | if (!it.places) { 40 | return; 41 | } 42 | // go over location 43 | it.places.forEach((loc) => { 44 | // do not render location with -1 -1 as lat or lon 45 | if (loc.lat === -1 || loc.lon === -1) { 46 | return; 47 | } 48 | 49 | const color = it.sentiment.score === 0 ? styleGray : 50 | it.sentiment.score > 0 ? styleGreen : styleRed; 51 | const marker = L.circle([loc.lat, loc.lon], 100000, { 52 | stroke: false, 53 | fillColor: color, 54 | fillOpacity: 0.8, 55 | className: 'leaflet-marker-animated', 56 | }).addTo(this.map); 57 | marker.bindPopup(popup(it)); 58 | }); 59 | }, 60 | 61 | render() { 62 | return ( 63 |
64 | ); 65 | }, 66 | }); 67 | -------------------------------------------------------------------------------- /usecases/bbc/nominatim.js: -------------------------------------------------------------------------------- 1 | import _ from 'lodash'; 2 | import nominatim from 'nominatim'; 3 | 4 | const observableSearch = Rx.Observable.fromNodeCallback(nominatim.search); 5 | 6 | export default (inputData) => Rx.Observable.return(inputData) 7 | .flatMap(data => { 8 | // if no annotations - just die 9 | if (!data.annotations) { 10 | return Rx.Observable.return(data); 11 | } 12 | 13 | if (!data.places) { 14 | data.places = []; 15 | } 16 | 17 | return Rx.Observable.merge(data.annotations.map(annotation => { 18 | if (_.includes(annotation.types, 'Location')) { 19 | return observableSearch({q: annotation.name}) 20 | .map(([opt, results]) => { 21 | if (results && results[0]) { 22 | return { 23 | name: opt.q, 24 | lat: results[0].lat, 25 | lon: results[0].lon, 26 | }; 27 | } 28 | 29 | return undefined; 30 | }); 31 | } 32 | 33 | return Rx.Observable.return(undefined); 34 | })) 35 | .filter(loc => loc !== undefined) 36 | .reduce((acc, place) => [place, ...acc], []) 37 | .map(places => { 38 | data.places = places; 39 | return data; 40 | }); 41 | }); 42 | -------------------------------------------------------------------------------- /usecases/bbc/rss.js: -------------------------------------------------------------------------------- 1 | import FeedParser from 'feedparser'; 2 | import request from 'request'; 3 | 4 | export default (url, obs) => { 5 | const req = request(url); 6 | const feedparser = new FeedParser(); 7 | 8 | // handle errors 9 | req.on('error', err => obs.onError(err)); 10 | feedparser.on('error', err => obs.onError(err)); 11 | 12 | // pipe request into feedparser 13 | req.on('response', function(res) { 14 | const stream = this; 15 | if (res.statusCode !== 200) { 16 | return this.emit('error', new Error('Bad status code')); 17 | } 18 | stream.pipe(feedparser); 19 | }); 20 | 21 | // process articles 22 | feedparser.on('readable', function() { 23 | const stream = this; 24 | let item; 25 | while (item = stream.read()) { 26 | obs.onNext(item); 27 | } 28 | // finish 29 | // obs.onCompleted(); 30 | }); 31 | // trigger end once done 32 | feedparser.on('end', () => obs.onCompleted()); 33 | }; 34 | -------------------------------------------------------------------------------- /usecases/slackbot/search.js: -------------------------------------------------------------------------------- 1 | import {DDG} from 'node-ddg-api'; 2 | 3 | const ddg = new DDG('exynize-ddg'); 4 | 5 | export default (data) => Rx.Observable 6 | .return(data) 7 | .flatMap(input => Rx.Observable.create(obs => { 8 | ddg.instantAnswer(input.text, {skip_disambig: '0'}, (err, res) => { 9 | if (err) { 10 | obs.onError(err); 11 | return; 12 | } 13 | 14 | obs.onNext(Object.assign({}, data, res)); 15 | obs.onCompleted(); 16 | }); 17 | })); 18 | -------------------------------------------------------------------------------- /usecases/slackbot/slack-reply.js: -------------------------------------------------------------------------------- 1 | import Botkit from 'botkit'; 2 | 3 | export default (token, data) => Rx.Observable.create(obs => { 4 | const controller = Botkit.slackbot({debug: false}); 5 | const bot = controller.spawn({token}).startRTM(err => { 6 | if (err) { 7 | obs.onError(err); 8 | return; 9 | } 10 | 11 | const result = data.Abstract ? data.Abstract : 12 | data.RelatedTopics[0] ? data.RelatedTopics[0].Text : 13 | false; 14 | 15 | bot.reply({ 16 | type: data.type, 17 | channel: data.channel, 18 | user: data.user, 19 | text: data.text, 20 | ts: data.tx, 21 | team: data.team, 22 | event: data.event, 23 | }, result ? `Here's what I found: \n> ${result}` : `Sorry, couldn't find anything :(`); 24 | obs.onCompleted(); 25 | }); 26 | 27 | return () => { 28 | bot.closeRTM(); 29 | }; 30 | }); 31 | -------------------------------------------------------------------------------- /usecases/slackbot/slack-source.js: -------------------------------------------------------------------------------- 1 | import Botkit from 'botkit'; 2 | 3 | export default (token, obs) => { 4 | const controller = Botkit.slackbot({debug: false}); 5 | controller.spawn({token}).startRTM(err => err && obs.onError(err)); 6 | controller.on('direct_mention', (bot, message) => { 7 | obs.onNext(message); 8 | }); 9 | }; 10 | -------------------------------------------------------------------------------- /usecases/twitter/render.js: -------------------------------------------------------------------------------- 1 | const renderTweet = (tweet) => ( 2 |
3 |
4 |
5 |
6 | 0 ? 'success' : 'danger' 9 | )}> 10 | {tweet.sentiment.score} 11 | 12 |
13 |

14 | {tweet.username} 15 |

16 | 17 |
18 |

{tweet.text}

19 |
20 |
21 |
22 |
23 | ); 24 | 25 | const overallSentiment = (collection) => ( 26 |

27 | Positive: 28 | 29 | {collection.reduce((sum, it) => sum += it.sentiment.score > 0 ? it.sentiment.score : 0, 0)} 30 | 31 | 32 | Negative: 33 | 34 | {collection.reduce((sum, it) => sum += it.sentiment.score < 0 ? it.sentiment.score : 0, 0)} 35 | 36 | 37 | Total: 38 | 39 | {collection.reduce((sum, it) => sum += it.sentiment.score, 0)} 40 | 41 |

42 | ); 43 | 44 | export default () => React.createClass({ 45 | render() { 46 | const iphone = this.props.data.filter(tweet => tweet.text.toLowerCase().indexOf('iphone') !== -1); 47 | const galaxy = this.props.data.filter(tweet => tweet.text.toLowerCase().indexOf('galaxy') !== -1); 48 | 49 | return ( 50 |
51 |
52 | {/* iphone col */} 53 |
54 |

iPhone 6s

55 | {overallSentiment(iphone)} 56 | {iphone.slice(0, 10).map(renderTweet)} 57 |
58 | 59 | {/* galaxy col */} 60 |
61 |

Galaxy S6

62 | {overallSentiment(galaxy)} 63 | {galaxy.slice(0, 10).map(renderTweet)} 64 |
65 |
66 |
67 | ); 68 | } 69 | }); 70 | -------------------------------------------------------------------------------- /usecases/twitter/sentiment.js: -------------------------------------------------------------------------------- 1 | import sentiment from 'sentiment'; 2 | 3 | export default (data) => { 4 | const res = sentiment(data.text); 5 | data.sentiment = { 6 | score: res.score, 7 | comparative: res.comparative, 8 | }; 9 | return Rx.Observable.return(data); 10 | }; 11 | -------------------------------------------------------------------------------- /usecases/twitter/twitter.js: -------------------------------------------------------------------------------- 1 | import Twit from 'twit'; 2 | 3 | export default ( 4 | consumer_key, 5 | consumer_secret, 6 | access_token, 7 | access_token_secret, 8 | filter_lang, 9 | keyword, 10 | obs 11 | ) => { 12 | const T = new Twit({ 13 | consumer_key, 14 | consumer_secret, 15 | access_token, 16 | access_token_secret, 17 | }); 18 | 19 | const stream = T.stream('statuses/filter', {track: keyword}); 20 | stream.on('tweet', (tweet) => { 21 | const data = { 22 | id: tweet.id, 23 | created_at: tweet.created_at, 24 | text: tweet.text, 25 | username: tweet.user.name, 26 | lang: tweet.lang, 27 | url: `https://twitter.com/${tweet.user.screen_name}/status/${tweet.id_str}`, 28 | }; 29 | if (filter_lang === data.lang) { 30 | obs.onNext(data); 31 | } 32 | }); 33 | stream.on('error', error => obs.onError(error)); 34 | }; 35 | --------------------------------------------------------------------------------