├── .gitignore
├── LICENSE
├── README.md
├── apache-bigdata
├── 1.flask-hadoop
│ ├── Dockerfile
│ ├── README.md
│ ├── app.py
│ ├── compose
│ │ ├── bash
│ │ ├── build
│ │ ├── develop
│ │ ├── docker-compose.yml
│ │ └── down
│ ├── core-site.xml
│ ├── hdfs-site.xml
│ ├── lowercase.py
│ ├── mapred-site.xml
│ ├── screenshot1.png
│ ├── screenshot2.png
│ ├── ssh_config
│ ├── start-all.sh
│ ├── text.txt
│ ├── wordcount.py
│ └── yarn-site.xml
├── 2.flask-kafka
│ ├── Dockerfile
│ ├── README.md
│ ├── app.py
│ ├── compose
│ │ ├── bash
│ │ ├── build
│ │ ├── develop
│ │ ├── docker-compose.yml
│ │ └── down
│ ├── producer.py
│ ├── server.properties
│ └── supervisord.conf
├── 3.flask-hadoop-hive
│ ├── Dockerfile
│ ├── README.md
│ ├── app.py
│ ├── compose
│ │ ├── bash
│ │ ├── build
│ │ ├── develop
│ │ ├── docker-compose.yml
│ │ └── down
│ ├── core-site.xml
│ ├── hdfs-site.xml
│ ├── mapred-site.xml
│ ├── sample.txt
│ ├── ssh_config
│ ├── start-all.sh
│ └── yarn-site.xml
├── 4.Pyspark-jupyter-hadoop
│ ├── Dockerfile
│ ├── Iris.csv
│ ├── README.md
│ ├── compose
│ │ ├── bash
│ │ ├── build
│ │ ├── develop
│ │ ├── docker-compose.yml
│ │ └── down
│ ├── core-site.xml
│ ├── hadoop-storage.png
│ ├── hdfs-site.xml
│ ├── mapred-site.xml
│ ├── ssh_config
│ ├── start-all.sh
│ ├── test-iris-hadoop.ipynb
│ ├── test-pyspark.ipynb
│ └── yarn-site.xml
├── 5.flink-jupyter
│ ├── Dockerfile
│ ├── KEYS
│ ├── README.md
│ ├── compose
│ │ ├── bash
│ │ ├── build
│ │ ├── develop
│ │ ├── docker-compose.yml
│ │ └── down
│ ├── dashboard.png
│ ├── jobmanager.sh
│ ├── taskmanager.png
│ └── taskmanager.sh
└── 6.redis-storm
│ ├── Dockerfile
│ ├── README.md
│ ├── conf
│ └── storm.yaml
│ ├── docker-compose.yml
│ ├── screenshot.png
│ └── tasks
│ └── wordcount
│ ├── .gitignore
│ ├── README.md
│ ├── config.json
│ ├── fabfile.py
│ ├── project.clj
│ ├── ps.py
│ ├── src
│ ├── bolts
│ │ ├── __init__.py
│ │ └── wordcount.py
│ └── spouts
│ │ ├── __init__.py
│ │ └── words.py
│ ├── start.sh
│ ├── supervisord.conf
│ ├── supervisord.log
│ ├── supervisord.pid
│ ├── topologies
│ └── wordcount.py
│ └── virtualenvs
│ └── wordcount.txt
├── basic-backend
├── 1.flask-hello
│ ├── Dockerfile
│ ├── README.md
│ ├── app.py
│ ├── compose
│ │ ├── build
│ │ ├── develop
│ │ ├── docker-compose.yml
│ │ └── down
│ ├── eventlet.sh
│ └── requirements.txt
├── 2.flask-mongodb
│ ├── Dockerfile
│ ├── README.md
│ ├── app.py
│ ├── compose
│ │ ├── build
│ │ ├── develop
│ │ ├── docker-compose.yml
│ │ └── down
│ ├── eventlet.sh
│ └── requirements.txt
├── 3.flask-rest-api
│ ├── Dockerfile
│ ├── README.md
│ ├── app.py
│ ├── compose
│ │ ├── build
│ │ ├── develop
│ │ ├── docker-compose.yml
│ │ └── down
│ ├── eventlet.sh
│ └── requirements.txt
├── 4.flask-redis-pubsub
│ ├── Dockerfile
│ ├── README.md
│ ├── app.py
│ ├── compose
│ │ ├── build
│ │ ├── develop
│ │ ├── docker-compose.yml
│ │ └── down
│ ├── eventlet.sh
│ ├── gunicorn.pid
│ └── requirements.txt
├── 5.flask-mysql-rest-api
│ ├── Dockerfile
│ ├── README.md
│ ├── app.py
│ ├── compose
│ │ ├── build
│ │ ├── develop
│ │ ├── docker-compose.yml
│ │ └── down
│ ├── eventlet.sh
│ ├── requirements.txt
│ └── sample.sql
├── 6.flask-postgres-rest-api
│ ├── Dockerfile
│ ├── README.md
│ ├── app.py
│ ├── compose
│ │ ├── bash
│ │ ├── build
│ │ ├── develop
│ │ ├── docker-compose.yml
│ │ └── down
│ ├── eventlet.sh
│ ├── requirements.txt
│ └── sample.sql
├── 7.flask-elasticsearch
│ ├── Dockerfile
│ ├── README.md
│ ├── app.py
│ ├── compose
│ │ ├── build
│ │ ├── develop
│ │ ├── docker-compose.yml
│ │ ├── down
│ │ └── supervisord.conf
│ ├── eventlet.sh
│ └── requirements.txt
├── 8.flask-gunicorn-logstash
│ ├── Dockerfile
│ ├── README.md
│ ├── app.py
│ ├── compose
│ │ ├── bash
│ │ ├── build
│ │ ├── develop
│ │ ├── docker-compose.yml
│ │ └── down
│ ├── elasticsearch.yml
│ ├── gunicorn.conf
│ ├── kibana.yml
│ ├── logstash.conf
│ └── supervisord.conf
└── 9.mlflow-nginx
│ ├── .htpasswd
│ ├── Dockerfile
│ ├── README.md
│ ├── compose
│ ├── bash
│ ├── build
│ ├── develop
│ ├── docker-compose.yml
│ └── down
│ ├── mlflow-config.conf
│ ├── mlflow.png
│ └── mlruns
│ └── 0
│ ├── 97a8d84dedce44dbba3f3a60453f1d49
│ ├── artifacts
│ │ └── test.txt
│ ├── meta.yaml
│ ├── metrics
│ │ └── foo
│ └── params
│ │ └── param1
│ └── meta.yaml
├── basic
├── 1.autopep8
│ ├── README.md
│ └── malaya
│ │ ├── __init__.py
│ │ ├── main.py
│ │ ├── num2word.py
│ │ ├── sentiment.py
│ │ ├── tatabahasa.py
│ │ ├── topic.py
│ │ ├── utils.py
│ │ └── word2vec.py
└── 2.graph-dependencies
│ ├── README.md
│ ├── malaya-graph.png
│ ├── malaya.dot
│ ├── malaya.svg
│ ├── malaya
│ ├── __init__.py
│ ├── main.py
│ ├── num2word.py
│ ├── sentiment.py
│ ├── tatabahasa.py
│ ├── topic.py
│ ├── utils.py
│ └── word2vec.py
│ ├── pyan.py
│ └── pyan
│ ├── __init__.py
│ ├── analyzer.py
│ ├── anutils.py
│ ├── main.py
│ ├── node.py
│ ├── visgraph.py
│ └── writers.py
├── misc
├── elasticsearch-cerebro
│ ├── Dockerfile
│ ├── compose
│ │ ├── bash
│ │ ├── build
│ │ ├── develop
│ │ ├── docker-compose.yml
│ │ └── down
│ ├── elasticsearch.yml
│ └── kibana.yml
├── elasticsearch-kibana
│ ├── Dockerfile
│ ├── app.py
│ ├── compose
│ │ ├── bash
│ │ ├── build
│ │ ├── develop
│ │ ├── docker-compose.yml
│ │ └── down
│ ├── elasticsearch.yml
│ └── kibana.yml
├── hadoop-cluster-luigi
│ ├── core-site.xml
│ ├── docker-compose.yml
│ ├── hdfs-site.xml
│ ├── mapred-site.xml
│ ├── master
│ │ ├── Dockerfile
│ │ ├── luigi.cfg
│ │ └── start-all.sh
│ ├── slave
│ │ ├── Dockerfile
│ │ └── start-all.sh
│ ├── slaves
│ ├── ssh_config
│ └── yarn-site.xml
├── jupyter-notebook
│ ├── Dockerfile
│ ├── README.md
│ ├── compose
│ │ ├── bash
│ │ ├── build
│ │ ├── docker-compose.yml
│ │ └── down
│ ├── jupyter.png
│ └── requirements.txt
├── jupyterhub-github
│ ├── docker-compose.yml
│ └── jupyterhub
│ │ ├── Dockerfile
│ │ ├── jupyterhub_config.py
│ │ ├── jupyterhub_config_github.py
│ │ ├── requirements.txt
│ │ └── start.sh
├── jupyterhub
│ ├── Dockerfile
│ ├── README.md
│ ├── compose
│ │ ├── bash
│ │ ├── build
│ │ ├── develop
│ │ ├── docker-compose.yml
│ │ └── down
│ ├── jupyterhub.png
│ └── requirements.txt
├── kafka-cluster
│ ├── .gitignore
│ ├── Dockerfile
│ ├── README.md
│ ├── docker-compose.yml
│ ├── jupyter
│ │ ├── Dockerfile
│ │ ├── test-dynamic-group-consumer.ipynb
│ │ └── test-manual-partition-consumer.ipynb
│ ├── kafka_2.11-2.0.0.tgz
│ ├── server.properties
│ ├── server2.properties
│ └── server3.properties
└── storm
│ ├── README.md
│ ├── docker-compose.yml
│ ├── screenshot
│ ├── 1.png
│ ├── 2.png
│ └── 3.png
│ ├── storm
│ ├── Dockerfile
│ ├── conf
│ │ └── storm.yaml
│ └── docker-entrypoint.sh
│ └── streamparse
│ ├── Dockerfile
│ ├── entrypoint.sh
│ └── tasks
│ └── wordcount
│ ├── .gitignore
│ ├── README.md
│ ├── config.json
│ ├── fabfile.py
│ ├── project.clj
│ ├── ps.py
│ ├── src
│ ├── bolts
│ │ ├── __init__.py
│ │ └── wordcount.py
│ └── spouts
│ │ ├── __init__.py
│ │ └── words.py
│ ├── topologies
│ └── wordcount.py
│ └── virtualenvs
│ └── wordcount.txt
├── piping
├── 1.sentiment-tweetpy-elasticsearch
│ ├── Dockerfile
│ ├── README.md
│ ├── compose
│ │ ├── build
│ │ ├── develop
│ │ ├── docker-compose.yml
│ │ └── down
│ ├── elasticsearch.yml
│ ├── kibana.png
│ ├── kibana.yml
│ ├── sentiment
│ │ ├── bayes.p
│ │ ├── bayes_bm.p
│ │ ├── eventlet.sh
│ │ ├── server.py
│ │ ├── stop-word-kerulnet
│ │ ├── tatabahasa.py
│ │ ├── tfidf.p
│ │ └── tfidf_bm.p
│ ├── supervisord.conf
│ └── twitter-streaming.py
├── 2.luigi-crawler-sentiment-elasticsearch
│ ├── Dockerfile
│ ├── README.md
│ ├── app.py
│ ├── compose
│ │ ├── bash
│ │ ├── build
│ │ ├── develop
│ │ ├── docker-compose.yml
│ │ └── down
│ ├── core.py
│ ├── crawling.py
│ ├── dependency.png
│ ├── download_nltk.py
│ ├── elasticsearch.yml
│ ├── kibana.png
│ ├── kibana.yml
│ ├── luigi.png
│ ├── sentiment
│ │ ├── bayes.p
│ │ ├── bayes_bm.p
│ │ ├── stop-word-kerulnet
│ │ ├── tfidf.p
│ │ └── tfidf_bm.p
│ ├── supervisord.conf
│ └── tatabahasa.py
└── 3.airflow-elasticsearch
│ ├── Dockerfile
│ ├── README.md
│ ├── airflow.cfg
│ ├── app.py
│ ├── bash
│ ├── big-text.txt
│ ├── dags
│ ├── sentiment_to_elastic.py
│ ├── test_print.py
│ └── test_xcom.py
│ ├── docker-compose.yml
│ ├── elasticsearch.yml
│ ├── fast-text-sentiment.json
│ ├── screenshot
│ ├── 1.png
│ ├── 2.png
│ ├── 3.png
│ ├── 4.png
│ ├── 5.png
│ └── 6.png
│ ├── sentiment.pb
│ ├── start.sh
│ ├── supervisord.conf
│ └── unittests.cfg
├── scaling-backend
├── 1.flask-socketio-scale-redis
│ ├── Dockerfile
│ ├── README.md
│ ├── app.py
│ ├── compose
│ │ ├── bash
│ │ ├── build
│ │ ├── develop
│ │ ├── docker-compose.yml
│ │ └── down
│ ├── gunicorn-eventlet.sh
│ ├── requirements.txt
│ ├── stress-test-socketio.ipynb
│ └── uwsgi-gevent.sh
├── 2.flask-nginx-loadbalancer
│ ├── Dockerfile
│ ├── README.md
│ ├── app.py
│ ├── compose
│ │ ├── bash
│ │ ├── build
│ │ ├── develop
│ │ ├── docker-compose.yml
│ │ └── down
│ └── load-balancer.conf
├── 3.flask-socketio-redis-nginx-loadbalancer
│ ├── Dockerfile
│ ├── README.md
│ ├── app.py
│ ├── compose
│ │ ├── bash
│ │ ├── build
│ │ ├── develop
│ │ ├── docker-compose.yml
│ │ └── down
│ ├── load-balancer.conf
│ ├── requirements.txt
│ └── stress-test-socketio.ipynb
├── 4.rabbitmq-multiple-celery-flask
│ ├── Dockerfile
│ ├── README.md
│ ├── app.py
│ ├── compose
│ │ ├── bash
│ │ ├── build
│ │ ├── develop
│ │ ├── docker-compose.yml
│ │ └── down
│ ├── screenshot
│ │ ├── front.png
│ │ ├── graphs.png
│ │ ├── long-task.png
│ │ └── tasks.png
│ └── supervisord.conf
└── 5.flask-gunicorn-haproxy
│ ├── Dockerfile
│ ├── README.md
│ ├── app.py
│ ├── docker-compose.yml
│ ├── initial.py
│ └── start.sh
├── stresstest
└── 1.Locust-Stresstest
│ ├── Dockerfile
│ ├── README.md
│ ├── app.py
│ ├── compose
│ ├── build
│ ├── develop
│ ├── docker-compose.yml
│ └── down
│ ├── eventlet.sh
│ ├── locust_file.py
│ ├── requirements.txt
│ ├── screenshot1.png
│ ├── screenshot2.png
│ └── supervisord.conf
└── unit-test
└── 1.pytest-flask
├── .coveragerc
├── Dockerfile
├── README.md
├── calculation.png
├── compose
├── build
└── docker-compose.yml
├── coverage.png
├── report
├── coverage_html.js
├── index.html
├── jquery.ba-throttle-debounce.min.js
├── jquery.hotkeys.js
├── jquery.isonscreen.js
├── jquery.min.js
├── jquery.tablesorter.min.js
├── keybd_closed.png
├── keybd_open.png
├── status.json
├── style.css
├── web___init___py.html
└── web_calculation_py.html
├── tests
├── __init__.py
├── conftest.py
└── test_api.py
└── web
├── __init__.py
└── calculation.py
/.gitignore:
--------------------------------------------------------------------------------
1 | *DS_Store
2 | *__pycache__
3 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2018 Devcon
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/apache-bigdata/1.flask-hadoop/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM ubuntu:16.04 AS base
2 |
3 | ENV HADOOP_HOME /opt/hadoop
4 | ENV JAVA_HOME /usr/lib/jvm/java-8-openjdk-amd64
5 |
6 | RUN apt-get update && apt-get install -y \
7 | python3 \
8 | python3-pip \
9 | python3-wheel \
10 | openjdk-8-jdk \
11 | supervisor \
12 | wget
13 |
14 | RUN wget http://www-eu.apache.org/dist/hadoop/common/hadoop-3.1.1/hadoop-3.1.1.tar.gz && \
15 | tar -xzf hadoop-3.1.1.tar.gz && \
16 | mv hadoop-3.1.1 $HADOOP_HOME && \
17 | for user in hadoop hdfs yarn mapred; do \
18 | useradd -U -M -d /opt/hadoop/ --shell /bin/bash ${user}; \
19 | done && \
20 | for user in root hdfs yarn mapred; do \
21 | usermod -G hadoop ${user}; \
22 | done && \
23 | echo "export JAVA_HOME=$JAVA_HOME" >> $HADOOP_HOME/etc/hadoop/hadoop-env.sh && \
24 | echo "export HDFS_DATANODE_USER=root" >> $HADOOP_HOME/etc/hadoop/hadoop-env.sh && \
25 | echo "export HDFS_NAMENODE_USER=root" >> $HADOOP_HOME/etc/hadoop/hadoop-env.sh && \
26 | echo "export HDFS_SECONDARYNAMENODE_USER=root" >> $HADOOP_HOME/etc/hadoop/hadoop-env.sh && \
27 | echo "export YARN_RESOURCEMANAGER_USER=root" >> $HADOOP_HOME/etc/hadoop/yarn-env.sh && \
28 | echo "export YARN_NODEMANAGER_USER=root" >> $HADOOP_HOME/etc/hadoop/yarn-env.sh && \
29 | echo "PATH=$PATH:$HADOOP_HOME/bin" >> ~/.bashrc
30 |
31 | RUN apt-get install openssh-client -y && \
32 | ssh-keygen -t rsa -P '' -f ~/.ssh/id_rsa && \
33 | cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys && \
34 | chmod 0600 ~/.ssh/authorized_keys
35 |
36 | ADD *xml $HADOOP_HOME/etc/hadoop/
37 |
38 | ADD ssh_config /root/.ssh/config
39 |
40 | RUN pip3 install Flask werkzeug pydoop==2.0a3
41 |
42 | WORKDIR /app
43 |
44 | COPY . /app
45 |
46 | RUN apt-get install ssh -y
47 |
48 | ENV LC_ALL C.UTF-8
49 | ENV LANG C.UTF-8
50 |
--------------------------------------------------------------------------------
/apache-bigdata/1.flask-hadoop/README.md:
--------------------------------------------------------------------------------
1 | ## How-to
2 |
3 | 1. Run `Docker compose`,
4 | ```bash
5 | compose/build
6 | ```
7 |
8 | 2. Request the server,
9 | ```bash
10 | curl --form file=@18.flask-hadoop/text.txt localhost:5000/lowercase
11 | ```
12 | ```text
13 | ["husein\nbin\nzolkepli\n"]
14 | ```
15 |
16 | 
17 |
18 | 
19 |
--------------------------------------------------------------------------------
/apache-bigdata/1.flask-hadoop/app.py:
--------------------------------------------------------------------------------
1 | from flask import Flask, request
2 | from werkzeug import secure_filename
3 | import os
4 | import pydoop.hdfs as hdfs
5 | import json
6 |
7 | app = Flask(__name__)
8 | app.config['UPLOAD_FOLDER'] = os.getcwd()
9 |
10 | print(hdfs.hdfs().list_directory('/user'))
11 |
12 |
13 | @app.route('/')
14 | def hello_world():
15 | return 'Hey, we have Flask in a Docker container!'
16 |
17 |
18 | @app.route('/test', methods = ['POST'])
19 | def test():
20 | f = request.files['file']
21 | f.save(
22 | os.path.join(app.config['UPLOAD_FOLDER'], secure_filename(f.filename))
23 | )
24 | return f.filename
25 |
26 |
27 | @app.route('/wordcount', methods = ['POST'])
28 | def wordcount():
29 | f = request.files['file']
30 | f.save(
31 | os.path.join(app.config['UPLOAD_FOLDER'], secure_filename(f.filename))
32 | )
33 | with open(f.filename, 'r') as fopen:
34 | hdfs.dump(fopen.read(), '/user/input_wordcount/text')
35 | os.system(
36 | 'pydoop script -c combiner wordcount.py /user/input_wordcount /user/output_wordcount'
37 | )
38 | list_files = hdfs.hdfs().list_directory('/user/output_wordcount')
39 | return json.dumps(
40 | [
41 | hdfs.load(file['name'], mode = 'rt')
42 | for file in list_files
43 | if 'SUCCESS' not in file['name']
44 | ]
45 | )
46 |
47 |
48 | @app.route('/lowercase', methods = ['POST'])
49 | def lowercase():
50 | f = request.files['file']
51 | f.save(
52 | os.path.join(app.config['UPLOAD_FOLDER'], secure_filename(f.filename))
53 | )
54 | with open(f.filename, 'r') as fopen:
55 | hdfs.dump(fopen.read(), '/user/input_lowercase/text')
56 | os.system(
57 | "pydoop script --num-reducers 0 -t '' lowercase.py /user/input_lowercase /user/output_lowercase"
58 | )
59 | list_files = hdfs.hdfs().list_directory('/user/output_lowercase')
60 | return json.dumps(
61 | [
62 | hdfs.load(file['name'], mode = 'rt')
63 | for file in list_files
64 | if 'SUCCESS' not in file['name']
65 | ]
66 | )
67 |
68 |
69 | if __name__ == '__main__':
70 | app.run(debug = True, host = '0.0.0.0', port = 5000)
71 |
--------------------------------------------------------------------------------
/apache-bigdata/1.flask-hadoop/compose/bash:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml exec flask-hadoop bash
4 |
--------------------------------------------------------------------------------
/apache-bigdata/1.flask-hadoop/compose/build:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml up --build
4 |
--------------------------------------------------------------------------------
/apache-bigdata/1.flask-hadoop/compose/develop:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | bash start-all.sh
4 | python3 app.py
5 |
--------------------------------------------------------------------------------
/apache-bigdata/1.flask-hadoop/compose/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '2.3'
2 | services:
3 | flask-hadoop:
4 | build:
5 | context: ..
6 | target: base
7 | command: compose/develop
8 | environment:
9 | PYTHONPATH: .
10 | ports:
11 | - '5000:5000'
12 | - '8088:8088'
13 | - '9000:9000'
14 | - '9870:9870'
15 | - '9864:9864'
16 | - '19888:19888'
17 | - '8042:8042'
18 |
--------------------------------------------------------------------------------
/apache-bigdata/1.flask-hadoop/compose/down:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml down
4 |
--------------------------------------------------------------------------------
/apache-bigdata/1.flask-hadoop/core-site.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | fs.defaultFS
6 | hdfs://localhost:9000
7 |
8 |
9 |
--------------------------------------------------------------------------------
/apache-bigdata/1.flask-hadoop/hdfs-site.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | dfs.replication
6 | 1
7 |
8 |
9 | dfs.webhdfs.enabled
10 | true
11 |
12 |
13 |
--------------------------------------------------------------------------------
/apache-bigdata/1.flask-hadoop/lowercase.py:
--------------------------------------------------------------------------------
1 | def mapper(_, record, writer):
2 | writer.emit('', record.lower())
3 |
--------------------------------------------------------------------------------
/apache-bigdata/1.flask-hadoop/mapred-site.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | mapreduce.framework.name
6 | yarn
7 |
8 |
9 | yarn.app.mapreduce.am.env
10 | HADOOP_MAPRED_HOME=${HADOOP_HOME}
11 |
12 |
13 | mapreduce.map.env
14 | HADOOP_MAPRED_HOME=${HADOOP_HOME}
15 |
16 |
17 | mapreduce.reduce.env
18 | HADOOP_MAPRED_HOME=${HADOOP_HOME}
19 |
20 |
21 |
--------------------------------------------------------------------------------
/apache-bigdata/1.flask-hadoop/screenshot1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/huseinzol05/Python-DevOps/a5d578fae7df369bd9e10175249db18fe2db989e/apache-bigdata/1.flask-hadoop/screenshot1.png
--------------------------------------------------------------------------------
/apache-bigdata/1.flask-hadoop/screenshot2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/huseinzol05/Python-DevOps/a5d578fae7df369bd9e10175249db18fe2db989e/apache-bigdata/1.flask-hadoop/screenshot2.png
--------------------------------------------------------------------------------
/apache-bigdata/1.flask-hadoop/ssh_config:
--------------------------------------------------------------------------------
1 | Host *
2 | UserKnownHostsFile /dev/null
3 | StrictHostKeyChecking no
4 |
--------------------------------------------------------------------------------
/apache-bigdata/1.flask-hadoop/start-all.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | /etc/init.d/ssh start
4 |
5 | $HADOOP_HOME/bin/hdfs namenode -format
6 |
7 | $HADOOP_HOME/sbin/start-yarn.sh
8 | $HADOOP_HOME/sbin/start-dfs.sh
9 |
10 | $HADOOP_HOME/bin/hdfs dfs -mkdir /user
11 | $HADOOP_HOME/bin/hdfs dfs -mkdir /user/input_wordcount
12 | $HADOOP_HOME/bin/hdfs dfs -mkdir /user/input_lowercase
13 | $HADOOP_HOME/bin/hdfs dfs -put $HADOOP_HOME/etc/hadoop/*.xml /user
14 |
--------------------------------------------------------------------------------
/apache-bigdata/1.flask-hadoop/text.txt:
--------------------------------------------------------------------------------
1 | HUSEIN
2 | BIN
3 | ZOLKEPLI
4 |
--------------------------------------------------------------------------------
/apache-bigdata/1.flask-hadoop/wordcount.py:
--------------------------------------------------------------------------------
1 | def mapper(_, text, writer):
2 | for word in text.split():
3 | writer.emit(word, '1')
4 |
5 |
6 | def reducer(word, icounts, writer):
7 | writer.emit(word, sum(map(int, icounts)))
8 |
9 |
10 | def combiner(word, icounts, writer):
11 | writer.count('combiner calls', 1)
12 | reducer(word, icounts, writer)
13 |
--------------------------------------------------------------------------------
/apache-bigdata/1.flask-hadoop/yarn-site.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | yarn.nodemanager.aux-services
5 | mapreduce_shuffle
6 |
7 |
8 | yarn.resourcemanager.address
9 | 127.0.0.1:8032
10 |
11 |
12 |
--------------------------------------------------------------------------------
/apache-bigdata/2.flask-kafka/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM ubuntu:16.04 AS base
2 |
3 | RUN apt-get update && apt-get install -y \
4 | python3 \
5 | python3-pip \
6 | python3-wheel \
7 | openjdk-8-jdk \
8 | wget \
9 | supervisor
10 |
11 | RUN pip3 install bs4 requests lxml Flask kafka-python
12 |
13 | ADD . /code
14 |
15 | WORKDIR /code
16 |
17 | RUN wget http://www-us.apache.org/dist/kafka/2.0.0/kafka_2.11-2.0.0.tgz
18 |
19 | RUN tar -xvzf kafka_2.11-2.0.0.tgz
20 |
21 | RUN cp server.properties kafka_2.11-2.0.0/config/
22 |
--------------------------------------------------------------------------------
/apache-bigdata/2.flask-kafka/app.py:
--------------------------------------------------------------------------------
1 | from flask import Flask, request
2 | from kafka import KafkaConsumer
3 | import json
4 | import os
5 |
6 | app = Flask(__name__)
7 |
8 |
9 | @app.route('/')
10 | def hello_world():
11 | return 'Hey, we have Flask in a Docker container!'
12 |
13 |
14 | @app.route('/topic//')
15 | def get_topics(topic):
16 | consumer = KafkaConsumer(
17 | topic,
18 | auto_offset_reset = 'earliest',
19 | bootstrap_servers = ['localhost:9092'],
20 | api_version = (0, 10),
21 | consumer_timeout_ms = 1000,
22 | )
23 | return json.dumps(
24 | [json.loads(msg.value.decode('utf-8')) for msg in consumer]
25 | )
26 |
27 |
28 | if __name__ == '__main__':
29 | app.run(debug = True, host = '0.0.0.0', port = 5000)
30 |
--------------------------------------------------------------------------------
/apache-bigdata/2.flask-kafka/compose/bash:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml exec kafka-flask bash
4 |
--------------------------------------------------------------------------------
/apache-bigdata/2.flask-kafka/compose/build:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml up --build
4 |
--------------------------------------------------------------------------------
/apache-bigdata/2.flask-kafka/compose/develop:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | supervisord -c supervisord.conf
4 | python3 producer.py
5 | python3 app.py
6 |
--------------------------------------------------------------------------------
/apache-bigdata/2.flask-kafka/compose/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '2.3'
2 | services:
3 | kafka-flask:
4 | build:
5 | context: ..
6 | target: base
7 | command: compose/develop
8 | environment:
9 | PYTHONPATH: .
10 | ports:
11 | - '5000:5000'
12 | - '2181:2181'
13 | - '9092:9092'
14 | - '9000:9000'
15 |
--------------------------------------------------------------------------------
/apache-bigdata/2.flask-kafka/compose/down:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml down
4 |
--------------------------------------------------------------------------------
/apache-bigdata/2.flask-kafka/supervisord.conf:
--------------------------------------------------------------------------------
1 | [supervisord]
2 |
3 | [program:zookeeper]
4 | command = /code/kafka_2.11-2.0.0/bin/zookeeper-server-start.sh /code/kafka_2.11-2.0.0/config/zookeeper.properties
5 | stopasgroup = true
6 | autorestart = true
7 |
8 | [program:kafka]
9 | command = /code/kafka_2.11-2.0.0/bin/kafka-server-start.sh /code/kafka_2.11-2.0.0/config/server.properties
10 | stopasgroup = true
11 | autorestart = true
12 |
13 | [inet_http_server]
14 | port = 0.0.0.0:9000
15 |
--------------------------------------------------------------------------------
/apache-bigdata/3.flask-hadoop-hive/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM ubuntu:16.04 AS base
2 |
3 | ENV HADOOP_HOME /opt/hadoop
4 | ENV HIVE_HOME /opt/hive
5 | ENV JAVA_HOME /usr/lib/jvm/java-8-openjdk-amd64
6 |
7 | RUN apt-get update && apt-get install -y \
8 | python3 \
9 | python3-pip \
10 | python3-wheel \
11 | openjdk-8-jdk \
12 | ssh \
13 | wget
14 |
15 | RUN wget http://www-eu.apache.org/dist/hadoop/common/hadoop-3.1.1/hadoop-3.1.1.tar.gz && \
16 | tar -xzf hadoop-3.1.1.tar.gz && \
17 | mv hadoop-3.1.1 $HADOOP_HOME && \
18 | for user in hadoop hdfs yarn mapred; do \
19 | useradd -U -M -d /opt/hadoop/ --shell /bin/bash ${user}; \
20 | done && \
21 | for user in root hdfs yarn mapred; do \
22 | usermod -G hadoop ${user}; \
23 | done && \
24 | echo "export JAVA_HOME=$JAVA_HOME" >> $HADOOP_HOME/etc/hadoop/hadoop-env.sh && \
25 | echo "export HDFS_DATANODE_USER=root" >> $HADOOP_HOME/etc/hadoop/hadoop-env.sh && \
26 | echo "export HDFS_NAMENODE_USER=root" >> $HADOOP_HOME/etc/hadoop/hadoop-env.sh && \
27 | echo "export HDFS_SECONDARYNAMENODE_USER=root" >> $HADOOP_HOME/etc/hadoop/hadoop-env.sh && \
28 | echo "export YARN_RESOURCEMANAGER_USER=root" >> $HADOOP_HOME/etc/hadoop/yarn-env.sh && \
29 | echo "export YARN_NODEMANAGER_USER=root" >> $HADOOP_HOME/etc/hadoop/yarn-env.sh && \
30 | echo "PATH=$PATH:$HADOOP_HOME/bin" >> ~/.bashrc
31 |
32 | RUN apt-get install openssh-client -y && \
33 | ssh-keygen -t rsa -P '' -f ~/.ssh/id_rsa && \
34 | cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys && \
35 | chmod 0600 ~/.ssh/authorized_keys
36 |
37 | ADD *xml $HADOOP_HOME/etc/hadoop/
38 |
39 | ADD ssh_config /root/.ssh/config
40 |
41 | RUN pip3 install Flask PyHive
42 |
43 | RUN wget http://www-eu.apache.org/dist/hive/hive-3.1.0/apache-hive-3.1.0-bin.tar.gz
44 | RUN tar -xzf apache-hive-3.1.0-bin.tar.gz && mv apache-hive-3.1.0-bin $HIVE_HOME
45 |
46 | WORKDIR /app
47 |
48 | COPY . /app
49 |
50 | RUN apt-get install libsasl2-dev gcc -y
51 |
52 | RUN pip3 install thrift pyhive[hive]
53 |
54 | ENV LC_ALL C.UTF-8
55 | ENV LANG C.UTF-8
56 |
--------------------------------------------------------------------------------
/apache-bigdata/3.flask-hadoop-hive/README.md:
--------------------------------------------------------------------------------
1 | ## How-to
2 |
3 | 1. Run `Docker compose`,
4 | ```bash
5 | compose/build
6 | ```
7 |
8 | 2. Open new terminal,
9 | ```bash
10 | compose/bash
11 | python3 app.py
12 | curl http://localhost:5000/employee/Gopal/
13 | ```
14 | ```text
15 | [[1201, "Gopal", "45000", "Technical manager"]]
16 | ```
17 |
--------------------------------------------------------------------------------
/apache-bigdata/3.flask-hadoop-hive/app.py:
--------------------------------------------------------------------------------
1 | from flask import Flask, request
2 | import os
3 | import json
4 | from pyhive import hive
5 |
6 | cursor = hive.connect('localhost').cursor()
7 | cursor.execute(
8 | "CREATE TABLE IF NOT EXISTS employee ( eid int, name String, salary String, destignation String) COMMENT 'employee details' ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' LINES TERMINATED BY '\n' STORED AS TEXTFILE"
9 | )
10 | cursor.execute(
11 | "LOAD DATA LOCAL INPATH 'sample.txt' OVERWRITE INTO TABLE employee"
12 | )
13 |
14 | app = Flask(__name__)
15 |
16 |
17 | @app.route('/')
18 | def hello_world():
19 | return 'Hey, we have Flask in a Docker container!'
20 |
21 |
22 | @app.route('/employee//')
23 | def get(person):
24 | cursor.execute("SELECT * FROM employee WHERE name = '%s'" % (person))
25 | return json.dumps(list(cursor.fetchall()))
26 |
27 |
28 | if __name__ == '__main__':
29 | app.run(debug = True, host = '0.0.0.0', port = 5000)
30 |
--------------------------------------------------------------------------------
/apache-bigdata/3.flask-hadoop-hive/compose/bash:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml exec flask-hadoop-hive bash
4 |
--------------------------------------------------------------------------------
/apache-bigdata/3.flask-hadoop-hive/compose/build:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml up --build
4 |
--------------------------------------------------------------------------------
/apache-bigdata/3.flask-hadoop-hive/compose/develop:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | bash start-all.sh
4 |
--------------------------------------------------------------------------------
/apache-bigdata/3.flask-hadoop-hive/compose/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '2.3'
2 | services:
3 | flask-hadoop-hive:
4 | build:
5 | context: ..
6 | target: base
7 | command: compose/develop
8 | environment:
9 | PYTHONPATH: .
10 | ports:
11 | - '5000:5000'
12 | - '8088:8088'
13 | - '9000:9000'
14 | - '9870:9870'
15 | - '9864:9864'
16 | - '19888:19888'
17 | - '10000:10000'
18 | - '8042:8042'
19 |
--------------------------------------------------------------------------------
/apache-bigdata/3.flask-hadoop-hive/compose/down:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml down
4 |
--------------------------------------------------------------------------------
/apache-bigdata/3.flask-hadoop-hive/core-site.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | fs.defaultFS
6 | hdfs://localhost:9000
7 |
8 |
9 | hadoop.proxyuser.root.hosts
10 | *
11 |
12 |
13 | hadoop.proxyuser.root.groups
14 | *
15 |
16 |
17 |
--------------------------------------------------------------------------------
/apache-bigdata/3.flask-hadoop-hive/hdfs-site.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | dfs.replication
6 | 1
7 |
8 |
9 | dfs.webhdfs.enabled
10 | true
11 |
12 |
13 |
--------------------------------------------------------------------------------
/apache-bigdata/3.flask-hadoop-hive/mapred-site.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | mapreduce.framework.name
6 | yarn
7 |
8 |
9 | yarn.app.mapreduce.am.env
10 | HADOOP_MAPRED_HOME=${HADOOP_HOME}
11 |
12 |
13 | mapreduce.map.env
14 | HADOOP_MAPRED_HOME=${HADOOP_HOME}
15 |
16 |
17 | mapreduce.reduce.env
18 | HADOOP_MAPRED_HOME=${HADOOP_HOME}
19 |
20 |
21 |
--------------------------------------------------------------------------------
/apache-bigdata/3.flask-hadoop-hive/sample.txt:
--------------------------------------------------------------------------------
1 | 1201,Gopal,45000,Technical manager
2 | 1202,Manisha,45000,Proof reader
3 | 1203,Masthanvali,40000,Technical writer
4 | 1204,Kiran,40000,Hr Admin
5 | 1205,Kranthi,30000,tOp Admin
6 |
--------------------------------------------------------------------------------
/apache-bigdata/3.flask-hadoop-hive/ssh_config:
--------------------------------------------------------------------------------
1 | Host *
2 | UserKnownHostsFile /dev/null
3 | StrictHostKeyChecking no
4 |
--------------------------------------------------------------------------------
/apache-bigdata/3.flask-hadoop-hive/start-all.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | /etc/init.d/ssh start
4 |
5 | $HADOOP_HOME/bin/hdfs namenode -format
6 |
7 | $HADOOP_HOME/sbin/start-yarn.sh
8 | $HADOOP_HOME/sbin/start-dfs.sh
9 |
10 | $HADOOP_HOME/bin/hdfs dfs -mkdir -p /user/hive/warehouse
11 | $HADOOP_HOME/bin/hdfs dfs -chmod 765 /user/hive/warehouse
12 | $HIVE_HOME/bin/schematool -initSchema -dbType derby
13 | $HIVE_HOME/bin/hive --service hiveserver2 --hiveconf hive.server2.thrift.port=10000 --hiveconf hive.root.logger=INFO,console
14 |
--------------------------------------------------------------------------------
/apache-bigdata/3.flask-hadoop-hive/yarn-site.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | yarn.nodemanager.aux-services
5 | mapreduce_shuffle
6 |
7 |
8 | yarn.resourcemanager.address
9 | 127.0.0.1:8032
10 |
11 |
12 |
--------------------------------------------------------------------------------
/apache-bigdata/4.Pyspark-jupyter-hadoop/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM ubuntu:16.04 AS base
2 |
3 | ENV HADOOP_HOME /opt/hadoop
4 | ENV SPARK_HOME /opt/spark
5 | ENV JAVA_HOME /usr/lib/jvm/java-8-openjdk-amd64
6 |
7 | RUN apt-get update && apt-get install -y \
8 | python3 \
9 | python3-pip \
10 | python3-wheel \
11 | openjdk-8-jdk \
12 | ssh \
13 | wget
14 |
15 | RUN wget https://www-eu.apache.org/dist/spark/spark-2.3.2/spark-2.3.2-bin-hadoop2.7.tgz && \
16 | tar -xzf spark-2.3.2-bin-hadoop2.7.tgz && \
17 | mv spark-2.3.2-bin-hadoop2.7 $SPARK_HOME
18 |
19 | RUN pip3 install jupyter pyspark
20 |
21 | RUN wget http://www-eu.apache.org/dist/hadoop/common/hadoop-3.1.1/hadoop-3.1.1.tar.gz && \
22 | tar -xzf hadoop-3.1.1.tar.gz && \
23 | mv hadoop-3.1.1 $HADOOP_HOME && \
24 | for user in hadoop hdfs yarn mapred; do \
25 | useradd -U -M -d /opt/hadoop/ --shell /bin/bash ${user}; \
26 | done && \
27 | for user in root hdfs yarn mapred; do \
28 | usermod -G hadoop ${user}; \
29 | done && \
30 | echo "export JAVA_HOME=$JAVA_HOME" >> $HADOOP_HOME/etc/hadoop/hadoop-env.sh && \
31 | echo "export HDFS_DATANODE_USER=root" >> $HADOOP_HOME/etc/hadoop/hadoop-env.sh && \
32 | echo "export HDFS_NAMENODE_USER=root" >> $HADOOP_HOME/etc/hadoop/hadoop-env.sh && \
33 | echo "export HDFS_SECONDARYNAMENODE_USER=root" >> $HADOOP_HOME/etc/hadoop/hadoop-env.sh && \
34 | echo "export YARN_RESOURCEMANAGER_USER=root" >> $HADOOP_HOME/etc/hadoop/yarn-env.sh && \
35 | echo "export YARN_NODEMANAGER_USER=root" >> $HADOOP_HOME/etc/hadoop/yarn-env.sh && \
36 | echo "PATH=$PATH:$HADOOP_HOME/bin" >> ~/.bashrc
37 |
38 | RUN apt-get install openssh-client -y && \
39 | ssh-keygen -t rsa -P '' -f ~/.ssh/id_rsa && \
40 | cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys && \
41 | chmod 0600 ~/.ssh/authorized_keys
42 |
43 | ADD *xml $HADOOP_HOME/etc/hadoop/
44 |
45 | ADD ssh_config /root/.ssh/config
46 |
47 | WORKDIR /app
48 |
49 | COPY . /app
50 |
51 | ENV LC_ALL C.UTF-8
52 | ENV LANG C.UTF-8
53 |
54 | RUN pip3 install pandas matplotlib seaborn
55 |
56 | RUN ln -s /usr/bin/python3 /usr/bin/python
57 |
--------------------------------------------------------------------------------
/apache-bigdata/4.Pyspark-jupyter-hadoop/README.md:
--------------------------------------------------------------------------------
1 | ## How-to
2 |
3 | 1. Run `Docker compose`,
4 | ```bash
5 | compose/build
6 | ```
7 |
8 | 2. Open jupyter notebook from link provided in terminal,
9 | ```text
10 | pyspark_1 | Copy/paste this URL into your browser when you connect for the first time,
11 | pyspark_1 | to login with a token:
12 | pyspark_1 | http://(4d8ed5c57e9f or 127.0.0.1):8080/?token=b7a118cfcc021d8f1bffa9ba94dd18337c13a12ce6ad6c6c
13 | ```
14 |
15 | There are some notebook examples how to read CSV from HDFS.
16 |
17 | 
18 |
--------------------------------------------------------------------------------
/apache-bigdata/4.Pyspark-jupyter-hadoop/compose/bash:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml exec pyspark bash
4 |
--------------------------------------------------------------------------------
/apache-bigdata/4.Pyspark-jupyter-hadoop/compose/build:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml up --build
4 |
--------------------------------------------------------------------------------
/apache-bigdata/4.Pyspark-jupyter-hadoop/compose/develop:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | bash start-all.sh
4 | jupyter notebook --ip=0.0.0.0 --port=8080 --allow-root
5 |
--------------------------------------------------------------------------------
/apache-bigdata/4.Pyspark-jupyter-hadoop/compose/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '2.3'
2 | services:
3 | pyspark:
4 | build:
5 | context: ..
6 | target: base
7 | command: compose/develop
8 | ports:
9 | - '8080:8080'
10 | - '8088:8088'
11 | - '9000:9000'
12 | - '9870:9870'
13 | - '9864:9864'
14 | - '19888:19888'
15 | - '8042:8042'
16 |
--------------------------------------------------------------------------------
/apache-bigdata/4.Pyspark-jupyter-hadoop/compose/down:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml down
4 |
--------------------------------------------------------------------------------
/apache-bigdata/4.Pyspark-jupyter-hadoop/core-site.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | fs.defaultFS
6 | hdfs://localhost:9000
7 |
8 |
9 |
--------------------------------------------------------------------------------
/apache-bigdata/4.Pyspark-jupyter-hadoop/hadoop-storage.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/huseinzol05/Python-DevOps/a5d578fae7df369bd9e10175249db18fe2db989e/apache-bigdata/4.Pyspark-jupyter-hadoop/hadoop-storage.png
--------------------------------------------------------------------------------
/apache-bigdata/4.Pyspark-jupyter-hadoop/hdfs-site.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | dfs.replication
6 | 1
7 |
8 |
9 | dfs.webhdfs.enabled
10 | true
11 |
12 |
13 |
--------------------------------------------------------------------------------
/apache-bigdata/4.Pyspark-jupyter-hadoop/mapred-site.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | mapreduce.framework.name
6 | yarn
7 |
8 |
9 | yarn.app.mapreduce.am.env
10 | HADOOP_MAPRED_HOME=${HADOOP_HOME}
11 |
12 |
13 | mapreduce.map.env
14 | HADOOP_MAPRED_HOME=${HADOOP_HOME}
15 |
16 |
17 | mapreduce.reduce.env
18 | HADOOP_MAPRED_HOME=${HADOOP_HOME}
19 |
20 |
21 |
--------------------------------------------------------------------------------
/apache-bigdata/4.Pyspark-jupyter-hadoop/ssh_config:
--------------------------------------------------------------------------------
1 | Host *
2 | UserKnownHostsFile /dev/null
3 | StrictHostKeyChecking no
4 |
--------------------------------------------------------------------------------
/apache-bigdata/4.Pyspark-jupyter-hadoop/start-all.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | /etc/init.d/ssh start
4 |
5 | $HADOOP_HOME/bin/hdfs namenode -format
6 |
7 | $HADOOP_HOME/sbin/start-yarn.sh
8 | $HADOOP_HOME/sbin/start-dfs.sh
9 |
10 | $HADOOP_HOME/bin/hdfs dfs -mkdir /user
11 | $HADOOP_HOME/bin/hdfs dfs -put $HADOOP_HOME/etc/hadoop/*.xml /user
12 | $HADOOP_HOME/bin/hdfs dfs -put Iris.csv /user
13 |
--------------------------------------------------------------------------------
/apache-bigdata/4.Pyspark-jupyter-hadoop/test-pyspark.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 1,
6 | "metadata": {},
7 | "outputs": [
8 | {
9 | "name": "stdout",
10 | "output_type": "stream",
11 | "text": [
12 | "3.14125384\n"
13 | ]
14 | }
15 | ],
16 | "source": [
17 | "import pyspark\n",
18 | "import random\n",
19 | "sc = pyspark.SparkContext(appName=\"Pi\")\n",
20 | "num_samples = 100000000\n",
21 | "def inside(p): \n",
22 | " x, y = random.random(), random.random()\n",
23 | " return x*x + y*y < 1\n",
24 | "count = sc.parallelize(range(0, num_samples)).filter(inside).count()\n",
25 | "pi = 4 * count / num_samples\n",
26 | "print(pi)\n",
27 | "sc.stop()"
28 | ]
29 | },
30 | {
31 | "cell_type": "code",
32 | "execution_count": null,
33 | "metadata": {},
34 | "outputs": [],
35 | "source": []
36 | }
37 | ],
38 | "metadata": {
39 | "kernelspec": {
40 | "display_name": "Python 3",
41 | "language": "python",
42 | "name": "python3"
43 | },
44 | "language_info": {
45 | "codemirror_mode": {
46 | "name": "ipython",
47 | "version": 3
48 | },
49 | "file_extension": ".py",
50 | "mimetype": "text/x-python",
51 | "name": "python",
52 | "nbconvert_exporter": "python",
53 | "pygments_lexer": "ipython3",
54 | "version": "3.5.2"
55 | }
56 | },
57 | "nbformat": 4,
58 | "nbformat_minor": 2
59 | }
60 |
--------------------------------------------------------------------------------
/apache-bigdata/4.Pyspark-jupyter-hadoop/yarn-site.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | yarn.nodemanager.aux-services
5 | mapreduce_shuffle
6 |
7 |
8 | yarn.resourcemanager.address
9 | 127.0.0.1:8032
10 |
11 |
12 |
--------------------------------------------------------------------------------
/apache-bigdata/5.flink-jupyter/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM ubuntu:16.04 AS base
2 |
3 | RUN apt-get update && apt-get install -y \
4 | python3-pip \
5 | openjdk-8-jdk-headless \
6 | supervisor \
7 | wget \
8 | libsnappy1v5
9 |
10 | # Grab gosu for easy step-down from root
11 | ENV GOSU_VERSION 1.7
12 | RUN set -ex; \
13 | wget -nv -O /usr/local/bin/gosu "https://github.com/tianon/gosu/releases/download/$GOSU_VERSION/gosu-$(dpkg --print-architecture)"; \
14 | wget -nv -O /usr/local/bin/gosu.asc "https://github.com/tianon/gosu/releases/download/$GOSU_VERSION/gosu-$(dpkg --print-architecture).asc"; \
15 | export GNUPGHOME="$(mktemp -d)"; \
16 | for server in $(shuf -e ha.pool.sks-keyservers.net \
17 | hkp://p80.pool.sks-keyservers.net:80 \
18 | keyserver.ubuntu.com \
19 | hkp://keyserver.ubuntu.com:80 \
20 | pgp.mit.edu) ; do \
21 | gpg --batch --keyserver "$server" --recv-keys B42F6819007F00F88E364FD4036A9C25BF357DD4 && break || : ; \
22 | done && \
23 | gpg --batch --verify /usr/local/bin/gosu.asc /usr/local/bin/gosu; \
24 | rm -rf "$GNUPGHOME" /usr/local/bin/gosu.asc; \
25 | chmod +x /usr/local/bin/gosu; \
26 | gosu nobody true
27 |
28 | # Configure Flink version
29 | ENV FLINK_VERSION=1.7.0 \
30 | HADOOP_SCALA_VARIANT=hadoop28-scala_2.12
31 |
32 | # Prepare environment
33 | ENV FLINK_HOME=/opt/flink
34 | ENV PATH=$FLINK_HOME/bin:$PATH
35 | RUN groupadd --system --gid=9999 flink && \
36 | useradd --system --home-dir $FLINK_HOME --uid=9999 --gid=flink flink
37 | WORKDIR $FLINK_HOME
38 |
39 | ENV FLINK_URL_FILE_PATH=flink/flink-${FLINK_VERSION}/flink-${FLINK_VERSION}-bin-${HADOOP_SCALA_VARIANT}.tgz
40 | # Not all mirrors have the .asc files
41 | ENV FLINK_TGZ_URL=https://www.apache.org/dyn/closer.cgi?action=download&filename=${FLINK_URL_FILE_PATH} \
42 | FLINK_ASC_URL=https://www.apache.org/dist/${FLINK_URL_FILE_PATH}.asc
43 |
44 | # For GPG verification instead of relying on key servers
45 | COPY KEYS /KEYS
46 |
47 | # Install Flink
48 | RUN set -ex; \
49 | wget -nv -O flink.tgz "$FLINK_TGZ_URL"; \
50 | wget -nv -O flink.tgz.asc "$FLINK_ASC_URL"; \
51 | \
52 | export GNUPGHOME="$(mktemp -d)"; \
53 | gpg --batch --import /KEYS; \
54 | gpg --batch --verify flink.tgz.asc flink.tgz; \
55 | rm -rf "$GNUPGHOME" flink.tgz.asc; \
56 | \
57 | tar -xf flink.tgz --strip-components=1; \
58 | rm flink.tgz; \
59 | \
60 | chown -R flink:flink .;
61 |
62 | RUN pip3 install jupyter
63 |
64 | WORKDIR /app
65 |
66 | COPY . /app
67 |
68 | ENV LC_ALL C.UTF-8
69 | ENV LANG C.UTF-8
70 |
71 | RUN jupyter notebook --generate-config
72 |
73 | RUN echo "" >> /root/.jupyter/jupyter_notebook_config.py
74 | RUN echo "c.NotebookApp.token = ''" >> /root/.jupyter/jupyter_notebook_config.py
75 |
76 | EXPOSE 6123 8081
77 |
--------------------------------------------------------------------------------
/apache-bigdata/5.flink-jupyter/README.md:
--------------------------------------------------------------------------------
1 | ## How-to
2 |
3 | 1. Run [compose/build](compose/build) to build the container.
4 | ```bash
5 | compose/build
6 | ```
7 |
8 | 2. Visit [localhost:8090](localhost:8090) for dashboard.
9 |
10 | 
11 |
12 | 
13 |
--------------------------------------------------------------------------------
/apache-bigdata/5.flink-jupyter/compose/bash:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml exec tf-flink bash
4 |
--------------------------------------------------------------------------------
/apache-bigdata/5.flink-jupyter/compose/build:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml up --build
4 |
--------------------------------------------------------------------------------
/apache-bigdata/5.flink-jupyter/compose/develop:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | bash jobmanager.sh
4 | bash taskmanager.sh
5 | jupyter notebook --ip=0.0.0.0 --port=9090 --allow-root
6 |
--------------------------------------------------------------------------------
/apache-bigdata/5.flink-jupyter/compose/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '2.3'
2 | services:
3 | tf-flink:
4 | build:
5 | context: ..
6 | command: compose/develop
7 | environment:
8 | PYTHONPATH: .
9 | volumes:
10 | - ../:/app
11 | expose:
12 | - '6123'
13 | - '6121'
14 | - '6122'
15 | environment:
16 | - JOB_MANAGER_RPC_ADDRESS=localhost
17 | ports:
18 | - '8090:8081'
19 | - '9090:9090'
20 |
--------------------------------------------------------------------------------
/apache-bigdata/5.flink-jupyter/compose/down:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml down
4 |
--------------------------------------------------------------------------------
/apache-bigdata/5.flink-jupyter/dashboard.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/huseinzol05/Python-DevOps/a5d578fae7df369bd9e10175249db18fe2db989e/apache-bigdata/5.flink-jupyter/dashboard.png
--------------------------------------------------------------------------------
/apache-bigdata/5.flink-jupyter/jobmanager.sh:
--------------------------------------------------------------------------------
1 | echo "Starting Job Manager"
2 | sed -i -e "s/jobmanager.rpc.address: localhost/jobmanager.rpc.address: ${JOB_MANAGER_RPC_ADDRESS}/g" "$FLINK_HOME/conf/flink-conf.yaml"
3 | echo "blob.server.port: 6124" >> "$FLINK_HOME/conf/flink-conf.yaml"
4 | echo "query.server.port: 6125" >> "$FLINK_HOME/conf/flink-conf.yaml"
5 | echo "config file: " && grep '^[^\n#]' "$FLINK_HOME/conf/flink-conf.yaml"
6 | $FLINK_HOME/bin/jobmanager.sh start
7 |
--------------------------------------------------------------------------------
/apache-bigdata/5.flink-jupyter/taskmanager.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/huseinzol05/Python-DevOps/a5d578fae7df369bd9e10175249db18fe2db989e/apache-bigdata/5.flink-jupyter/taskmanager.png
--------------------------------------------------------------------------------
/apache-bigdata/5.flink-jupyter/taskmanager.sh:
--------------------------------------------------------------------------------
1 | TASK_MANAGER_NUMBER_OF_TASK_SLOTS=${TASK_MANAGER_NUMBER_OF_TASK_SLOTS:-$(grep -c ^processor /proc/cpuinfo)}
2 | sed -i -e "s/jobmanager.rpc.address: localhost/jobmanager.rpc.address: ${JOB_MANAGER_RPC_ADDRESS}/g" "$FLINK_HOME/conf/flink-conf.yaml"
3 | sed -i -e "s/taskmanager.numberOfTaskSlots: 1/taskmanager.numberOfTaskSlots: $TASK_MANAGER_NUMBER_OF_TASK_SLOTS/g" "$FLINK_HOME/conf/flink-conf.yaml"
4 | echo "blob.server.port: 6124" >> "$FLINK_HOME/conf/flink-conf.yaml"
5 | echo "query.server.port: 6125" >> "$FLINK_HOME/conf/flink-conf.yaml"
6 | echo "Starting Task Manager"
7 | echo "config file: " && grep '^[^\n#]' "$FLINK_HOME/conf/flink-conf.yaml"
8 | $FLINK_HOME/bin/taskmanager.sh start
9 |
--------------------------------------------------------------------------------
/apache-bigdata/6.redis-storm/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM ubuntu:16.04
2 |
3 | RUN apt-get update && \
4 | apt-get upgrade -y && \
5 | apt-get install openjdk-8-jre-headless -y && \
6 | apt-get install locales -y && \
7 | update-locale LANG=C.UTF-8 LC_MESSAGES=POSIX && \
8 | locale-gen en_US.UTF-8 && \
9 | dpkg-reconfigure locales && \
10 | apt-get clean all
11 |
12 | RUN apt-get install -y curl python-dev build-essential
13 | RUN apt-get install -y python3-pip
14 | RUN apt-get install -y libssl-dev
15 | RUN apt-get install -y libffi-dev
16 | RUN apt-get install -y wget
17 |
18 |
19 | # download and install Leiningen
20 | ENV LEIN_ROOT=1
21 | RUN curl https://raw.githubusercontent.com/technomancy/leiningen/stable/bin/lein > ./lein
22 | RUN chmod a+x ./lein
23 | RUN mv ./lein /usr/bin/lein
24 | RUN lein version
25 |
26 | RUN pip3 install streamparse -U
27 |
28 | ENV STORM_USER=storm \
29 | STORM_CONF_DIR=/conf \
30 | STORM_DATA_DIR=/data \
31 | STORM_LOG_DIR=/logs
32 |
33 | WORKDIR /opt
34 |
35 | # Add a user and make dirs
36 | RUN set -x \
37 | && useradd "$STORM_USER" \
38 | && mkdir -p "$STORM_CONF_DIR" "$STORM_DATA_DIR" "$STORM_LOG_DIR" \
39 | && chown -R "$STORM_USER:$STORM_USER" "$STORM_CONF_DIR" "$STORM_DATA_DIR" "$STORM_LOG_DIR"
40 |
41 | ARG DISTRO_NAME=apache-storm-1.2.1
42 |
43 | # Download Apache Storm, verify its PGP signature, untar and clean up
44 | RUN set -x \
45 | && wget -q "http://www.apache.org/dist/storm/$DISTRO_NAME/$DISTRO_NAME.tar.gz" \
46 | && tar -xzf "$DISTRO_NAME.tar.gz" \
47 | && chown -R "$STORM_USER:$STORM_USER" "$DISTRO_NAME"
48 |
49 |
50 | ENV PATH /opt/"$DISTRO_NAME"/bin/:$PATH
51 |
52 | RUN apt-get install -y inetutils-ping supervisor
53 | RUN update-ca-certificates -f
54 | WORKDIR /tasks/wordcount
55 |
56 | RUN pip3 install psutil redis
57 |
58 | #ENTRYPOINT ["/bin/bash"]
59 |
--------------------------------------------------------------------------------
/apache-bigdata/6.redis-storm/README.md:
--------------------------------------------------------------------------------
1 | ## How-to
2 |
3 | Make sure your computer is powerful, or else Apache Storm will become damn slow.
4 |
5 | 1. Run `Docker compose`,
6 | ```bash
7 | docker-compose up --build
8 | ```
9 |
10 | 2. Check [supervisord](http://localhost:9005)
11 |
12 | 3. Check [Storm UI](http://localhost:9005)
13 |
14 | 
15 |
--------------------------------------------------------------------------------
/apache-bigdata/6.redis-storm/conf/storm.yaml:
--------------------------------------------------------------------------------
1 | storm.zookeeper.servers:
2 | - "zookeeper"
3 |
4 | nimbus.childopts: "-Xmx1024m -Djava.net.preferIPv4Stack=true"
5 |
6 | ui.childopts: "-Xmx768m -Djava.net.preferIPv4Stack=true"
7 |
8 | supervisor.childopts: "-Djava.net.preferIPv4Stack=true"
9 | worker.childopts: "-Xmx768m -Djava.net.preferIPv4Stack=true"
10 |
11 | storm.log.dir: "/logs"
12 | storm.local.dir: "/data"
13 |
--------------------------------------------------------------------------------
/apache-bigdata/6.redis-storm/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '2.3'
2 |
3 | services:
4 | redis:
5 | image: redis:5-alpine
6 | container_name: redis
7 | restart: always
8 |
9 | zookeeper:
10 | image: zookeeper:3.4
11 | container_name: zookeeper
12 | restart: always
13 |
14 | storm:
15 | build: .
16 | container_name: storm
17 | command: bash start.sh
18 | depends_on:
19 | - zookeeper
20 | links:
21 | - zookeeper
22 | restart: always
23 | ports:
24 | - '8089:8000'
25 | - '6627:6627'
26 | - '9005:9000'
27 | - '8085:8080'
28 | volumes:
29 | - ./conf:/conf
30 | - ./tasks:/tasks
31 |
--------------------------------------------------------------------------------
/apache-bigdata/6.redis-storm/screenshot.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/huseinzol05/Python-DevOps/a5d578fae7df369bd9e10175249db18fe2db989e/apache-bigdata/6.redis-storm/screenshot.png
--------------------------------------------------------------------------------
/apache-bigdata/6.redis-storm/tasks/wordcount/.gitignore:
--------------------------------------------------------------------------------
1 | .DS_Store
2 | _build
3 | _resources
4 | logs
5 |
--------------------------------------------------------------------------------
/apache-bigdata/6.redis-storm/tasks/wordcount/README.md:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/huseinzol05/Python-DevOps/a5d578fae7df369bd9e10175249db18fe2db989e/apache-bigdata/6.redis-storm/tasks/wordcount/README.md
--------------------------------------------------------------------------------
/apache-bigdata/6.redis-storm/tasks/wordcount/config.json:
--------------------------------------------------------------------------------
1 | {
2 | "serializer": "json",
3 | "topology_specs": "topologies/",
4 | "virtualenv_specs": "virtualenvs/",
5 | "envs": {
6 | "prod": {
7 | "user": "",
8 | "ssh_password": "",
9 | "use_ssh_for_nimbus": false,
10 | "use_virtualenv": false,
11 | "nimbus": "localhost",
12 | "workers": [],
13 | "log": {
14 | "path": "",
15 | "max_bytes": 1000000,
16 | "backup_count": 10,
17 | "level": "info"
18 | },
19 | "virtualenv_root": ""
20 | }
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/apache-bigdata/6.redis-storm/tasks/wordcount/fabfile.py:
--------------------------------------------------------------------------------
1 | def pre_submit(topology_name, env_name, env_config, options):
2 | """Override this function to perform custom actions prior to topology
3 | submission. No SSH tunnels will be active when this function is called."""
4 | pass
5 |
6 |
7 | def post_submit(topo_name, env_name, env_config, options):
8 | """Override this function to perform custom actions after topology
9 | submission. Note that the SSH tunnel to Nimbus will still be active
10 | when this function is called."""
11 | pass
12 |
--------------------------------------------------------------------------------
/apache-bigdata/6.redis-storm/tasks/wordcount/project.clj:
--------------------------------------------------------------------------------
1 | (defproject wordcount "0.0.1-SNAPSHOT"
2 | :resource-paths ["_resources"]
3 | :target-path "_build"
4 | :min-lein-version "2.0.0"
5 | :jvm-opts ["-client"]
6 | :dependencies [[org.apache.storm/storm-core "1.2.1"]
7 | [org.apache.storm/flux-core "1.2.1"]]
8 | :jar-exclusions [#"log4j\.properties" #"org\.apache\.storm\.(?!flux)" #"trident" #"META-INF" #"meta-inf" #"\.yaml"]
9 | :uberjar-exclusions [#"log4j\.properties" #"org\.apache\.storm\.(?!flux)" #"trident" #"META-INF" #"meta-inf" #"\.yaml"]
10 | )
11 |
--------------------------------------------------------------------------------
/apache-bigdata/6.redis-storm/tasks/wordcount/ps.py:
--------------------------------------------------------------------------------
1 | from __future__ import print_function
2 | import psutil
3 |
4 |
5 | def pidmonitor():
6 | processes = ['streamparse.run', 'java']
7 | for pid in psutil.pids():
8 | proc = psutil.Process(pid)
9 | for process in processes:
10 | if process in proc.cmdline():
11 | cmdline = proc.cmdline()
12 | main_proc = cmdline[0]
13 | details = []
14 | if main_proc == 'java':
15 | details.append('[storm]')
16 | elif main_proc == 'python':
17 | details.extend(cmdline[2:4])
18 | for detail in details:
19 | if 'Spout' in detail:
20 | details.append('[spout]')
21 | if 'Bolt' in detail:
22 | details.append('[bolt]')
23 | print(main_proc, ' '.join(details))
24 | print('=> CPU% {}'.format(proc.cpu_percent(interval = 0.2)))
25 |
26 |
27 | while True:
28 | try:
29 | pidmonitor()
30 | except Exception as e:
31 | print(e)
32 | pass
33 |
--------------------------------------------------------------------------------
/apache-bigdata/6.redis-storm/tasks/wordcount/src/bolts/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/huseinzol05/Python-DevOps/a5d578fae7df369bd9e10175249db18fe2db989e/apache-bigdata/6.redis-storm/tasks/wordcount/src/bolts/__init__.py
--------------------------------------------------------------------------------
/apache-bigdata/6.redis-storm/tasks/wordcount/src/bolts/wordcount.py:
--------------------------------------------------------------------------------
1 | import os
2 | from redis import StrictRedis
3 | from streamparse import Bolt
4 |
5 |
6 | class RedisWordCountBolt(Bolt):
7 | outputs = ['word', 'count']
8 |
9 | def initialize(self, conf, ctx):
10 | self.redis = StrictRedis(host = 'redis')
11 | self.total = 0
12 |
13 | def _increment(self, word, inc_by):
14 | self.total += inc_by
15 | return self.redis.zincrby('words', word, inc_by)
16 |
17 | def process(self, tup):
18 | word = tup.values[0]
19 | count = self._increment(word, 10 if word == 'dog' else 1)
20 | if self.total % 1000 == 0:
21 | self.logger.info('counted %i words', self.total)
22 | self.emit([word, count])
23 |
--------------------------------------------------------------------------------
/apache-bigdata/6.redis-storm/tasks/wordcount/src/spouts/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/huseinzol05/Python-DevOps/a5d578fae7df369bd9e10175249db18fe2db989e/apache-bigdata/6.redis-storm/tasks/wordcount/src/spouts/__init__.py
--------------------------------------------------------------------------------
/apache-bigdata/6.redis-storm/tasks/wordcount/src/spouts/words.py:
--------------------------------------------------------------------------------
1 | from itertools import cycle
2 |
3 | from streamparse import Spout
4 |
5 |
6 | class WordSpout(Spout):
7 | outputs = ['word']
8 |
9 | def initialize(self, stormconf, context):
10 | self.words = cycle(['dog', 'cat', 'zebra', 'elephant'])
11 |
12 | def next_tuple(self):
13 | word = next(self.words)
14 | self.emit([word])
15 |
--------------------------------------------------------------------------------
/apache-bigdata/6.redis-storm/tasks/wordcount/start.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | supervisord -c supervisord.conf
4 |
5 | while true ; do
6 | if sparse submit --debug -e prod -n wordcount -w 4; then
7 | break
8 | fi
9 | sleep 2
10 | done
11 |
12 | export PYTHONUNBUFFERED=0
13 | python3 ps.py
14 |
--------------------------------------------------------------------------------
/apache-bigdata/6.redis-storm/tasks/wordcount/supervisord.conf:
--------------------------------------------------------------------------------
1 | [supervisord]
2 |
3 | [program:nimbus]
4 | command = storm nimbus
5 | stopasgroup = true
6 | autorestart = true
7 |
8 | [program:supervisor]
9 | command = storm supervisor
10 | stopasgroup = true
11 | autorestart = true
12 |
13 | [program:ui]
14 | command = storm ui
15 | stopasgroup = true
16 | autorestart = true
17 |
18 | [inet_http_server]
19 | port = 0.0.0.0:9000
20 |
--------------------------------------------------------------------------------
/apache-bigdata/6.redis-storm/tasks/wordcount/supervisord.pid:
--------------------------------------------------------------------------------
1 | 14
2 |
--------------------------------------------------------------------------------
/apache-bigdata/6.redis-storm/tasks/wordcount/topologies/wordcount.py:
--------------------------------------------------------------------------------
1 | """
2 | Word count topology
3 | """
4 |
5 | from streamparse import Grouping, Topology
6 |
7 | from bolts.wordcount import RedisWordCountBolt
8 | from spouts.words import WordSpout
9 |
10 |
11 | class WordCount(Topology):
12 | word_spout = WordSpout.spec()
13 | count_bolt = RedisWordCountBolt.spec(
14 | inputs = {word_spout: Grouping.fields('word')}, par = 2
15 | )
16 |
--------------------------------------------------------------------------------
/apache-bigdata/6.redis-storm/tasks/wordcount/virtualenvs/wordcount.txt:
--------------------------------------------------------------------------------
1 | streamparse # always required for streamparse projects
2 |
--------------------------------------------------------------------------------
/basic-backend/1.flask-hello/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM ubuntu:16.04 AS base
2 |
3 | RUN apt-get update && apt-get install -y \
4 | curl \
5 | git \
6 | python3 \
7 | python3-pip \
8 | python3-wheel
9 |
10 | COPY requirements.txt ./
11 | RUN pip3 install -r requirements.txt
12 |
13 | ENV LC_ALL C.UTF-8
14 | ENV LANG C.UTF-8
15 |
16 | WORKDIR /app
17 |
18 | COPY . /app
19 |
--------------------------------------------------------------------------------
/basic-backend/1.flask-hello/README.md:
--------------------------------------------------------------------------------
1 | ## How-to
2 |
3 | 1. Run `Docker compose`,
4 | ```bash
5 | compose/build
6 | ```
7 |
8 | 2. Try some examples,
9 | ```bash
10 | curl localhost:5000/ -x GET
11 | ```
12 | ```text
13 | Hey, we have Flask in a Docker container!
14 | ```
15 | ```bash
16 | curl localhost:5000/members/husein/
17 | ```
18 | ```text
19 | husein
20 | ```
21 |
--------------------------------------------------------------------------------
/basic-backend/1.flask-hello/app.py:
--------------------------------------------------------------------------------
1 | from flask import Flask
2 |
3 | app = Flask(__name__)
4 |
5 |
6 | @app.route('/')
7 | def hello_world():
8 | return 'Hey, we have Flask in a Docker container!'
9 |
10 |
11 | @app.route('/members')
12 | def members():
13 | return 'you can put anything after /members/'
14 |
15 |
16 | @app.route('/members//')
17 | def getMember(name):
18 | return name
19 |
20 |
21 | if __name__ == '__main__':
22 | app.run(debug = True, host = '0.0.0.0', port = 5000)
23 |
--------------------------------------------------------------------------------
/basic-backend/1.flask-hello/compose/build:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml up --build
4 |
--------------------------------------------------------------------------------
/basic-backend/1.flask-hello/compose/develop:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | bash eventlet.sh
4 |
--------------------------------------------------------------------------------
/basic-backend/1.flask-hello/compose/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '2.3'
2 | services:
3 | flask-hello:
4 | build:
5 | context: ..
6 | target: base
7 | command: bash eventlet.sh
8 | environment:
9 | PYTHONPATH: .
10 | ports:
11 | - '5000:5000' # flask
12 | volumes:
13 | - ..:/app
14 |
--------------------------------------------------------------------------------
/basic-backend/1.flask-hello/compose/down:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml down
4 |
--------------------------------------------------------------------------------
/basic-backend/1.flask-hello/eventlet.sh:
--------------------------------------------------------------------------------
1 | BIND_ADDR=0.0.0.0:5000
2 | gunicorn --worker-class eventlet -b $BIND_ADDR -p gunicorn.pid app:app
3 |
4 |
--------------------------------------------------------------------------------
/basic-backend/1.flask-hello/requirements.txt:
--------------------------------------------------------------------------------
1 | Flask
2 | eventlet
3 | gunicorn
4 |
--------------------------------------------------------------------------------
/basic-backend/2.flask-mongodb/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM ubuntu:16.04 AS base
2 |
3 | RUN apt-get update && apt-get install -y \
4 | curl \
5 | git \
6 | python3 \
7 | python3-pip \
8 | python3-wheel
9 |
10 | COPY requirements.txt ./
11 | RUN pip3 install -r requirements.txt
12 |
13 | ENV LC_ALL C.UTF-8
14 | ENV LANG C.UTF-8
15 |
16 | RUN apt-key adv --keyserver hkp://keyserver.ubuntu.com:80 --recv EA312927
17 |
18 | RUN echo "deb http://repo.mongodb.org/apt/ubuntu xenial/mongodb-org/3.2 multiverse" | tee /etc/apt/sources.list.d/mongodb-org-3.2.list
19 |
20 | RUN apt-get update && \
21 | apt-get install -y mongodb-org && \
22 | rm -rf /var/lib/apt/lists/*
23 |
24 | EXPOSE 27017
25 | EXPOSE 28017
26 |
27 | RUN mkdir -p /data/db
28 |
29 | WORKDIR /app
30 |
31 | COPY . /app
32 |
--------------------------------------------------------------------------------
/basic-backend/2.flask-mongodb/README.md:
--------------------------------------------------------------------------------
1 | ## How-to
2 |
3 | 1. Run `Docker compose`,
4 | ```bash
5 | compose/build
6 | ```
7 |
8 | 2. Try some examples,
9 | ```bash
10 | curl localhost:5000/ -x GET
11 | ```
12 | ```text
13 | Hey, we have Flask with MongoDB in a Docker container!
14 | ```
15 | ```bash
16 | curl localhost:5000/insert?name=husein -X GET
17 | ```
18 | ```text
19 | done inserted husein
20 | ```
21 | ```bash
22 | curl localhost:5000/get?name=husein -X GET
23 | ```
24 | ```text
25 | husein
26 | ```
27 |
--------------------------------------------------------------------------------
/basic-backend/2.flask-mongodb/app.py:
--------------------------------------------------------------------------------
1 | from flask import Flask, request
2 | from pymongo import MongoClient
3 | import json
4 |
5 | app = Flask(__name__)
6 | client = MongoClient('localhost', 27017)
7 | db = client.test_database
8 | posts = db.posts
9 |
10 |
11 | @app.route('/')
12 | def hello_world():
13 | return 'Hey, we have Flask with MongoDB in a Docker container!'
14 |
15 |
16 | @app.route('/insert', methods = ['GET'])
17 | def insert():
18 | if not request.args.get('name'):
19 | return 'insert name'
20 | posts.insert_one({'name': request.args.get('name')})
21 | return 'done inserted ' + request.args.get('name')
22 |
23 |
24 | @app.route('/get', methods = ['GET'])
25 | def get():
26 | if not request.args.get('name'):
27 | return 'insert name'
28 | try:
29 | return posts.find_one({'name': request.args.get('name')})['name']
30 | except:
31 | return 'not found'
32 |
33 |
34 | if __name__ == '__main__':
35 | app.run(debug = True, host = '0.0.0.0', port = 5000)
36 |
--------------------------------------------------------------------------------
/basic-backend/2.flask-mongodb/compose/build:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml up --build
4 |
--------------------------------------------------------------------------------
/basic-backend/2.flask-mongodb/compose/develop:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | /usr/bin/mongod --fork --logpath /var/log/mongod.log
4 | bash eventlet.sh
5 |
--------------------------------------------------------------------------------
/basic-backend/2.flask-mongodb/compose/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '2.3'
2 | services:
3 | flask-mongodb:
4 | build:
5 | context: ..
6 | target: base
7 | command: compose/develop
8 | environment:
9 | PYTHONPATH: .
10 | ports:
11 | - '27017:27017' # mongodb
12 | - '5000:5000' # flask
13 | volumes:
14 | - ..:/app
15 | - ..:/data/db
16 |
--------------------------------------------------------------------------------
/basic-backend/2.flask-mongodb/compose/down:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml down
4 |
--------------------------------------------------------------------------------
/basic-backend/2.flask-mongodb/eventlet.sh:
--------------------------------------------------------------------------------
1 | BIND_ADDR=0.0.0.0:5000
2 | gunicorn --worker-class eventlet -b $BIND_ADDR -p gunicorn.pid app:app
3 |
4 |
--------------------------------------------------------------------------------
/basic-backend/2.flask-mongodb/requirements.txt:
--------------------------------------------------------------------------------
1 | Flask
2 | pymongo
3 | eventlet
4 | gunicorn
5 |
--------------------------------------------------------------------------------
/basic-backend/3.flask-rest-api/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM ubuntu:16.04 AS base
2 |
3 | RUN apt-get update && apt-get install -y \
4 | curl \
5 | git \
6 | python3 \
7 | python3-pip \
8 | python3-wheel
9 |
10 | COPY requirements.txt ./
11 | RUN pip3 install -r requirements.txt
12 |
13 | ENV LC_ALL C.UTF-8
14 | ENV LANG C.UTF-8
15 |
16 | WORKDIR /app
17 |
18 | COPY . /app
19 |
--------------------------------------------------------------------------------
/basic-backend/3.flask-rest-api/README.md:
--------------------------------------------------------------------------------
1 | ## How-to
2 |
3 | 1. Run `Docker compose`,
4 | ```bash
5 | compose/build
6 | ```
7 |
8 | 2. Try some examples,
9 | ```bash
10 | curl localhost:5000 -X GET
11 | ```
12 | ```text
13 | {"hello": "world"}
14 | ```
15 | ```bash
16 | curl localhost:5000/todo1 -d "data=take milk" -X PUT
17 | curl localhost:5000/todo1 -X GET
18 | ```
19 | ```text
20 | {"todo1": "take milk"}
21 | ```
22 |
--------------------------------------------------------------------------------
/basic-backend/3.flask-rest-api/app.py:
--------------------------------------------------------------------------------
1 | from flask import Flask, request
2 | from flask_restful import reqparse, abort, Api, Resource
3 |
4 | app = Flask(__name__)
5 | api = Api(app)
6 |
7 | # simple caching, do not do this on real deployment, use database
8 | todos = {}
9 | TODOS = {
10 | 'todo1': {'task': 'build an API'},
11 | 'todo2': {'task': '?????'},
12 | 'todo3': {'task': 'profit!'},
13 | }
14 |
15 |
16 | def abort_if_todo_doesnt_exist(todo_id):
17 | if todo_id not in TODOS:
18 | abort(404, message = "Todo {} doesn't exist".format(todo_id))
19 |
20 |
21 | parser = reqparse.RequestParser()
22 | parser.add_argument('task')
23 |
24 |
25 | class HelloWorld(Resource):
26 | def get(self):
27 | return {'hello': 'world'}
28 |
29 |
30 | class TodoSimple(Resource):
31 | def get(self, todo_id):
32 | try:
33 | return {todo_id: todos[todo_id]}
34 | except:
35 | return {'error': 'todo not found'}
36 |
37 | def put(self, todo_id):
38 | todos[todo_id] = request.form['data']
39 | return {todo_id: todos[todo_id]}
40 |
41 |
42 | class Todo(Resource):
43 | def get(self, todo_id):
44 | abort_if_todo_doesnt_exist(todo_id)
45 | return TODOS[todo_id]
46 |
47 | def delete(self, todo_id):
48 | abort_if_todo_doesnt_exist(todo_id)
49 | del TODOS[todo_id]
50 | return '', 204
51 |
52 | def put(self, todo_id):
53 | args = parser.parse_args()
54 | task = {'task': args['task']}
55 | TODOS[todo_id] = task
56 | return task, 201
57 |
58 |
59 | class TodoList(Resource):
60 | def get(self):
61 | return TODOS
62 |
63 | def post(self):
64 | args = parser.parse_args()
65 | todo_id = int(max(TODOS.keys()).lstrip('todo')) + 1
66 | todo_id = 'todo%i' % todo_id
67 | TODOS[todo_id] = {'task': args['task']}
68 | return TODOS[todo_id], 201
69 |
70 |
71 | api.add_resource(HelloWorld, '/')
72 | api.add_resource(TodoSimple, '/')
73 | api.add_resource(Todo, '/todos/')
74 | api.add_resource(TodoList, '/todos')
75 |
76 | if __name__ == '__main__':
77 | app.run(debug = True, host = '0.0.0.0', port = 5000)
78 |
--------------------------------------------------------------------------------
/basic-backend/3.flask-rest-api/compose/build:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml up --build
4 |
--------------------------------------------------------------------------------
/basic-backend/3.flask-rest-api/compose/develop:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | bash eventlet.sh
4 |
--------------------------------------------------------------------------------
/basic-backend/3.flask-rest-api/compose/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '2.3'
2 | services:
3 | flask-rest:
4 | build:
5 | context: ..
6 | target: base
7 | command: bash eventlet.sh
8 | environment:
9 | PYTHONPATH: .
10 | ports:
11 | - '5000:5000' # flask
12 | volumes:
13 | - ..:/app
14 |
--------------------------------------------------------------------------------
/basic-backend/3.flask-rest-api/compose/down:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml down
4 |
--------------------------------------------------------------------------------
/basic-backend/3.flask-rest-api/eventlet.sh:
--------------------------------------------------------------------------------
1 | BIND_ADDR=0.0.0.0:5000
2 | gunicorn --worker-class eventlet -b $BIND_ADDR -p gunicorn.pid app:app
3 |
4 |
--------------------------------------------------------------------------------
/basic-backend/3.flask-rest-api/requirements.txt:
--------------------------------------------------------------------------------
1 | Flask
2 | flask_restful
3 | eventlet
4 | gunicorn
5 |
--------------------------------------------------------------------------------
/basic-backend/4.flask-redis-pubsub/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM ubuntu:16.04 AS base
2 |
3 | RUN apt-get update && apt-get install -y \
4 | curl \
5 | git \
6 | python3 \
7 | python3-pip \
8 | python3-wheel \
9 | wget \
10 | cmake
11 |
12 | COPY requirements.txt ./
13 | RUN pip3 install -r requirements.txt
14 |
15 | ENV LC_ALL C.UTF-8
16 | ENV LANG C.UTF-8
17 |
18 | RUN wget http://download.redis.io/redis-stable.tar.gz && tar xvzf redis-stable.tar.gz
19 |
20 | RUN cd redis-stable && make install
21 |
22 | WORKDIR /app
23 |
24 | COPY . /app
25 |
--------------------------------------------------------------------------------
/basic-backend/4.flask-redis-pubsub/README.md:
--------------------------------------------------------------------------------
1 | ## How-to
2 |
3 | 1. Run `Docker compose`,
4 | ```bash
5 | compose/build
6 | ```
7 |
8 | 2. Try some examples,
9 | ```bash
10 | curl localhost:5000 -X GET
11 | ```
12 | ```text
13 | Hey, we have Flask with Redis in a Docker container!
14 | ```
15 | ```bash
16 | curl localhost:5000/first-channel -d "data=from first channel" -X PUT
17 | ```
18 | ```text
19 | "from first channel"
20 | ```
21 |
--------------------------------------------------------------------------------
/basic-backend/4.flask-redis-pubsub/app.py:
--------------------------------------------------------------------------------
1 | from flask import Flask, request
2 | from flask_restful import reqparse, abort, Api, Resource
3 | import redis
4 | import json
5 |
6 | app = Flask(__name__)
7 | api = Api(app)
8 | r = redis.StrictRedis(host = 'localhost', port = 6379, db = 0)
9 | publishers = {'first-channel': r.pubsub(), 'second-channel': r.pubsub()}
10 | publishers['first-channel'].subscribe('first-channel')
11 | publishers['second-channel'].subscribe('second-channel')
12 |
13 |
14 | @app.route('/')
15 | def hello_world():
16 | return 'Hey, we have Flask with Redis in a Docker container!'
17 |
18 |
19 | @app.route('/insert', methods = ['GET'])
20 | def insert():
21 | dictionary = request.args.to_dict()
22 | filtered_dictionary = {
23 | i: dictionary[i] for i in dictionary.keys() if i not in ['name']
24 | }
25 | r.set(dictionary['name'], json.dumps(filtered_dictionary))
26 | return 'inserted into redis'
27 |
28 |
29 | @app.route('/get', methods = ['GET'])
30 | def get():
31 | try:
32 | return r.get(request.args.get('name')).decode('utf-8')
33 | except:
34 | return 'not found'
35 |
36 |
37 | class Publishers(Resource):
38 | def get(self, id):
39 | try:
40 | return publishers[id].get_message()['data'].decode('utf-8')
41 | except:
42 | return publishers[id].get_message()['data']
43 |
44 | def put(self, id):
45 | r.publish(id, request.form['data'])
46 | return request.form['data']
47 |
48 | def post(self, id):
49 | publishers[id] = r.pubsub()
50 | publishers[id].subscribe(id)
51 | return id, 201
52 |
53 |
54 | api.add_resource(Publishers, '/')
55 |
56 | if __name__ == '__main__':
57 | app.run(debug = True, host = '0.0.0.0', port = 5000)
58 |
--------------------------------------------------------------------------------
/basic-backend/4.flask-redis-pubsub/compose/build:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml up --build
4 |
--------------------------------------------------------------------------------
/basic-backend/4.flask-redis-pubsub/compose/develop:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | redis-server --daemonize yes
4 | bash eventlet.sh
5 |
--------------------------------------------------------------------------------
/basic-backend/4.flask-redis-pubsub/compose/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '2.3'
2 | services:
3 | flask-redis:
4 | build:
5 | context: ..
6 | target: base
7 | command: compose/develop
8 | environment:
9 | PYTHONPATH: .
10 | ports:
11 | - '5000:5000' # flask
12 | volumes:
13 | - ..:/app
14 |
--------------------------------------------------------------------------------
/basic-backend/4.flask-redis-pubsub/compose/down:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml down
4 |
--------------------------------------------------------------------------------
/basic-backend/4.flask-redis-pubsub/eventlet.sh:
--------------------------------------------------------------------------------
1 | BIND_ADDR=0.0.0.0:5000
2 | gunicorn --worker-class eventlet -b $BIND_ADDR -p gunicorn.pid app:app
3 |
4 |
--------------------------------------------------------------------------------
/basic-backend/4.flask-redis-pubsub/gunicorn.pid:
--------------------------------------------------------------------------------
1 | 14
2 |
--------------------------------------------------------------------------------
/basic-backend/4.flask-redis-pubsub/requirements.txt:
--------------------------------------------------------------------------------
1 | Flask
2 | redis
3 | flask_restful
4 | eventlet
5 | gunicorn
6 |
--------------------------------------------------------------------------------
/basic-backend/5.flask-mysql-rest-api/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM ubuntu:16.04 AS base
2 |
3 | ENV MYSQL_PWD husein
4 | RUN echo "mysql-server mysql-server/root_password password $MYSQL_PWD" | debconf-set-selections
5 | RUN echo "mysql-server mysql-server/root_password_again password $MYSQL_PWD" | debconf-set-selections
6 |
7 | RUN apt-get update && apt-get install -y \
8 | curl \
9 | git \
10 | build-essential libssl-dev libffi-dev \
11 | python3-dev \
12 | mysql-server \
13 | python3 \
14 | python3-pip \
15 | python3-wheel
16 |
17 | COPY requirements.txt ./
18 | RUN pip3 install -r requirements.txt
19 |
20 | ENV LC_ALL C.UTF-8
21 | ENV LANG C.UTF-8
22 |
23 | WORKDIR /app
24 |
25 | COPY . /app
26 |
--------------------------------------------------------------------------------
/basic-backend/5.flask-mysql-rest-api/README.md:
--------------------------------------------------------------------------------
1 | ## How-to
2 |
3 | 1. Run `Docker compose`,
4 | ```bash
5 | compose/build
6 | ```
7 |
8 | 2. Try some examples,
9 | ```bash
10 | curl localhost:5000/ -d "username=huseinzol05&first_name=husein&last_name=zolkepli&password=comel" -X PUT
11 | ```
12 | ```text
13 | "success {\"password\": \"comel\", \"first_name\": \"husein\", \"last_name\": \"zolkepli\", \"username\": \"huseinzol05\"}"
14 | ```
15 | ```bash
16 | curl localhost:5000/ -d "username=huseinzol05" -X GET
17 | ```
18 | ```text
19 | "[10001, \"huseinzol05\", \"husein\", \"zolkepli\", \"comel\"]"
20 | ```
21 |
--------------------------------------------------------------------------------
/basic-backend/5.flask-mysql-rest-api/app.py:
--------------------------------------------------------------------------------
1 | from flask import Flask, request
2 | import pymysql
3 | import json
4 | from flask_restful import reqparse, abort, Api, Resource
5 |
6 | app = Flask(__name__)
7 | api = Api(app)
8 |
9 | required = ['username', 'first_name', 'last_name', 'password']
10 | connection = pymysql.connect(
11 | host = 'localhost', user = 'root', password = 'husein', db = 'testdb'
12 | )
13 | cursor = connection.cursor()
14 |
15 |
16 | def abort_if_todo_doesnt_exist(todo_id):
17 | if todo_id not in TODOS:
18 | abort(404, message = "Todo {} doesn't exist".format(todo_id))
19 |
20 |
21 | class Mysql(Resource):
22 | def get(self):
23 | sql = 'SELECT * FROM user_details WHERE username=%s'
24 | cursor.execute(sql, (request.form['username']))
25 | result = cursor.fetchone()
26 | return json.dumps(result)
27 |
28 | def put(self):
29 | to_dict = request.form.to_dict()
30 | keys = ['`%s`' % (i) for i in to_dict.keys()]
31 | if sum([i in required for i in to_dict.keys()]) != len(required):
32 | return 'not enough parameters'
33 | sql = (
34 | 'INSERT INTO user_details ('
35 | + ','.join(keys)
36 | + ') VALUES ('
37 | + ','.join(['%s'] * len(keys))
38 | + ')'
39 | )
40 | cursor.execute(sql, tuple([to_dict[i] for i in to_dict.keys()]))
41 | connection.commit()
42 | return 'success ' + json.dumps(to_dict)
43 |
44 |
45 | api.add_resource(Mysql, '/')
46 |
47 | if __name__ == '__main__':
48 | app.run(debug = True, host = '0.0.0.0', port = 5000)
49 |
--------------------------------------------------------------------------------
/basic-backend/5.flask-mysql-rest-api/compose/build:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml up --build
4 |
--------------------------------------------------------------------------------
/basic-backend/5.flask-mysql-rest-api/compose/develop:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | /etc/init.d/mysql start
4 | mysql -u root -e "create database testdb;"
5 | mysql testdb < sample.sql
6 | bash eventlet.sh
7 |
--------------------------------------------------------------------------------
/basic-backend/5.flask-mysql-rest-api/compose/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '2.3'
2 | services:
3 | flask-mysql:
4 | build:
5 | context: ..
6 | target: base
7 | command: compose/develop
8 | environment:
9 | PYTHONPATH: .
10 | ports:
11 | - '3306:3306' # mysql
12 | - '5000:5000' # flask
13 | volumes:
14 | - ..:/app
15 |
--------------------------------------------------------------------------------
/basic-backend/5.flask-mysql-rest-api/compose/down:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml down
4 |
--------------------------------------------------------------------------------
/basic-backend/5.flask-mysql-rest-api/eventlet.sh:
--------------------------------------------------------------------------------
1 | BIND_ADDR=0.0.0.0:5000
2 | gunicorn --worker-class eventlet -b $BIND_ADDR -p gunicorn.pid app:app
3 |
4 |
--------------------------------------------------------------------------------
/basic-backend/5.flask-mysql-rest-api/requirements.txt:
--------------------------------------------------------------------------------
1 | Flask
2 | eventlet
3 | gunicorn
4 | flask_restful
5 | PyMySQL
6 |
--------------------------------------------------------------------------------
/basic-backend/5.flask-mysql-rest-api/sample.sql:
--------------------------------------------------------------------------------
1 | --
2 | -- Database: `samplevideo_db`
3 | --
4 |
5 | -- --------------------------------------------------------
6 |
7 | --
8 | -- Table structure for table `user_details`
9 | --
10 |
11 | CREATE TABLE IF NOT EXISTS `user_details` (
12 | `user_id` int(11) NOT NULL AUTO_INCREMENT,
13 | `username` varchar(255) DEFAULT NULL,
14 | `first_name` varchar(50) DEFAULT NULL,
15 | `last_name` varchar(50) DEFAULT NULL,
16 | `password` varchar(50) DEFAULT NULL,
17 | PRIMARY KEY (`user_id`)
18 | ) ENGINE=MyISAM DEFAULT CHARSET=latin1 AUTO_INCREMENT=10001 ;
19 |
20 | --
21 | -- Dumping data for table `user_details`
22 | --
23 |
24 | INSERT INTO `user_details` (`user_id`, `username`, `first_name`, `last_name`, `password`) VALUES
25 | (1, 'rogers63', 'david', 'john', 'e6a33eee180b07e563d74fee8c2c66b8');
26 |
--------------------------------------------------------------------------------
/basic-backend/6.flask-postgres-rest-api/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM ubuntu:16.04 AS base
2 |
3 | ENV POSTGRES_USER root
4 | ENV POSTGRES_PASSWORD root
5 |
6 | RUN apt-get update && apt-get install -y \
7 | curl \
8 | git \
9 | postgresql postgresql-contrib \
10 | python3 \
11 | python3-pip \
12 | python3-wheel
13 |
14 | COPY requirements.txt ./
15 | RUN pip3 install -r requirements.txt
16 |
17 | ENV LC_ALL C.UTF-8
18 | ENV LANG C.UTF-8
19 |
20 | WORKDIR /app
21 |
22 | COPY . /app
23 |
--------------------------------------------------------------------------------
/basic-backend/6.flask-postgres-rest-api/README.md:
--------------------------------------------------------------------------------
1 | ## How-to
2 |
3 | 1. Run `Docker compose`,
4 | ```bash
5 | compose/build
6 | ```
7 |
8 | 2. Try some examples,
9 | ```bash
10 | curl localhost:5000/ -d "username=huseinzol05&first_name=husein&last_name=zolkepli&password=comel" -X PUT
11 | ```
12 | ```text
13 | "success {\"password\": \"comel\", \"first_name\": \"husein\", \"last_name\": \"zolkepli\", \"username\": \"huseinzol05\"}"
14 | ```
15 | ```bash
16 | curl localhost:5000/ -d "username=huseinzol05" -X GET
17 | ```
18 | ```text
19 | "[10001, \"huseinzol05\", \"husein\", \"zolkepli\", \"comel\"]"
20 | ```
21 |
--------------------------------------------------------------------------------
/basic-backend/6.flask-postgres-rest-api/app.py:
--------------------------------------------------------------------------------
1 | from flask import Flask, request
2 | import psycopg2
3 | import json
4 | from flask_restful import reqparse, abort, Api, Resource
5 |
6 | app = Flask(__name__)
7 | api = Api(app)
8 |
9 | required = ['username', 'first_name', 'last_name', 'pass']
10 | connection = psycopg2.connect(
11 | host = 'localhost', database = 'testdb', user = 'root', password = 'root'
12 | )
13 | cursor = connection.cursor()
14 |
15 |
16 | def abort_if_todo_doesnt_exist(todo_id):
17 | if todo_id not in TODOS:
18 | abort(404, message = "Todo {} doesn't exist".format(todo_id))
19 |
20 |
21 | class Postgres(Resource):
22 | def get(self):
23 | sql = "SELECT * FROM user_details WHERE username='%s'" % (
24 | request.form['username']
25 | )
26 | cursor.execute(sql)
27 | result = cursor.fetchone()
28 | return json.dumps(result)
29 |
30 | def put(self):
31 | to_dict = request.form.to_dict()
32 | keys = ['%s' % (i) for i in to_dict.keys()]
33 | if sum([i in required for i in to_dict.keys()]) != len(required):
34 | return 'not enough parameters'
35 | sql = (
36 | 'INSERT INTO user_details ('
37 | + ','.join(keys)
38 | + ') VALUES ('
39 | + ','.join(['%s'] * len(keys))
40 | + ')'
41 | )
42 | cursor.execute(sql, tuple([to_dict[i] for i in to_dict.keys()]))
43 | connection.commit()
44 | return 'success ' + json.dumps(to_dict)
45 |
46 |
47 | api.add_resource(Postgres, '/')
48 |
49 | if __name__ == '__main__':
50 | app.run(debug = True, host = '0.0.0.0', port = 5000)
51 |
--------------------------------------------------------------------------------
/basic-backend/6.flask-postgres-rest-api/compose/bash:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml exec processing bash
4 |
5 |
--------------------------------------------------------------------------------
/basic-backend/6.flask-postgres-rest-api/compose/build:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml up --build
4 |
--------------------------------------------------------------------------------
/basic-backend/6.flask-postgres-rest-api/compose/develop:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | service postgresql start
4 | su -c "psql -c \"CREATE ROLE root WITH LOGIN PASSWORD 'root' \"" postgres
5 | su -c "psql -c \"ALTER USER root WITH SUPERUSER \"" postgres
6 | createdb testdb
7 | psql -d testdb -f sample.sql
8 | bash eventlet.sh
9 |
--------------------------------------------------------------------------------
/basic-backend/6.flask-postgres-rest-api/compose/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '2.3'
2 | services:
3 | flask-postgres:
4 | build:
5 | context: ..
6 | target: base
7 | command: compose/develop
8 | environment:
9 | PYTHONPATH: .
10 | ports:
11 | - '5000:5000' # flask
12 | volumes:
13 | - ..:/app
14 |
--------------------------------------------------------------------------------
/basic-backend/6.flask-postgres-rest-api/compose/down:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml down
4 |
--------------------------------------------------------------------------------
/basic-backend/6.flask-postgres-rest-api/eventlet.sh:
--------------------------------------------------------------------------------
1 | BIND_ADDR=0.0.0.0:5000
2 | gunicorn --worker-class eventlet -b $BIND_ADDR -p gunicorn.pid app:app
3 |
4 |
--------------------------------------------------------------------------------
/basic-backend/6.flask-postgres-rest-api/requirements.txt:
--------------------------------------------------------------------------------
1 | Flask
2 | eventlet
3 | gunicorn
4 | flask_restful
5 | psycopg2
6 |
--------------------------------------------------------------------------------
/basic-backend/6.flask-postgres-rest-api/sample.sql:
--------------------------------------------------------------------------------
1 | --
2 | -- Database: `samplevideo_db`
3 | --
4 |
5 | -- --------------------------------------------------------
6 |
7 | --
8 | -- Table structure for table `user_details`
9 | --
10 |
11 | CREATE TABLE IF NOT EXISTS user_details (
12 | username varchar(255) DEFAULT NULL,
13 | first_name varchar(50) DEFAULT NULL,
14 | last_name varchar(50) DEFAULT NULL,
15 | pass varchar(50) DEFAULT NULL
16 | );
17 |
18 | --
19 | -- Dumping data for table `user_details`
20 | --
21 |
22 | INSERT INTO user_details (username, first_name, last_name, pass) VALUES
23 | ('rogers63', 'david', 'john', 'e6a33eee180b07e563d74fee8c2c66b8');
24 |
--------------------------------------------------------------------------------
/basic-backend/7.flask-elasticsearch/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM ubuntu:16.04 AS base
2 |
3 | RUN apt-get update && apt-get install -y \
4 | curl \
5 | git \
6 | python3 \
7 | python3-pip \
8 | python3-wheel \
9 | wget \
10 | openjdk-8-jdk-headless \
11 | unzip
12 |
13 | COPY requirements.txt ./
14 | RUN pip3 install -r requirements.txt
15 |
16 | ENV LC_ALL C.UTF-8
17 | ENV LANG C.UTF-8
18 |
19 | RUN apt-get update && apt-get install -y curl supervisor
20 |
21 | WORKDIR /app
22 |
23 | RUN wget https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-6.3.2.zip
24 |
25 | RUN unzip elasticsearch-6.3.2.zip
26 |
27 | COPY . /app
28 |
--------------------------------------------------------------------------------
/basic-backend/7.flask-elasticsearch/README.md:
--------------------------------------------------------------------------------
1 | ## How-to
2 |
3 | 1. Run `Docker compose`,
4 | ```bash
5 | compose/build
6 | ```
7 |
8 | 2. Try some examples,
9 | ```bash
10 | curl localhost:9200/recipes/_search?q=title:salad -X GET
11 | ```
12 | ```text
13 | {"took":62,"timed_out":false,"_shards":{"total":1,"successful":1,"skipped":0,"failed":0},"hits":{"total":10,"max_score":0.054237623,"hits":[{"_index":"recipes","_type":"salads","_id":"LtlzD2UBBv9LAuM_3gMX","_score":0.054237623,"_source":{"ingredients": [{"step": "1/4 cup basil leaves"}, {"step": "4 cups 1/2-inch cubes watermelon"}, {"step": "2 teaspoons lemon juice"}, {"step": "1/4 teaspoon kosher salt"}, {"step": "1/4 teaspoon chili powder"}], "description": "A quick salad of watermelon and basil. The chili powder plays well with the sweetness of the melon.", "submitter": "Chefthompson.com", "title": "Watermelon Basil Salad", "calories": "10"}}
14 | ```
15 |
--------------------------------------------------------------------------------
/basic-backend/7.flask-elasticsearch/compose/build:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml up --build
4 |
--------------------------------------------------------------------------------
/basic-backend/7.flask-elasticsearch/compose/develop:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | useradd elastic
4 | chmod 777 -R elasticsearch-6.3.2
5 | #supervisord --nodaemon -c develop/supervisord.conf
6 | su -c "elasticsearch-6.3.2/bin/elasticsearch -d" elastic
7 | python3 app.py
8 |
9 |
--------------------------------------------------------------------------------
/basic-backend/7.flask-elasticsearch/compose/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '2.3'
2 | services:
3 | flask-elastic:
4 | build:
5 | context: ..
6 | target: base
7 | command: compose/develop
8 | environment:
9 | PYTHONPATH: .
10 | ports:
11 | - '9200:9200' # elastic
12 | - '5000:5000' # flask
13 |
--------------------------------------------------------------------------------
/basic-backend/7.flask-elasticsearch/compose/down:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml down
4 |
--------------------------------------------------------------------------------
/basic-backend/7.flask-elasticsearch/compose/supervisord.conf:
--------------------------------------------------------------------------------
1 | [supervisord]
2 |
3 | [program:elasticsearch]
4 | command = su -c "elasticsearch-6.3.2/bin/elasticsearch" elastic
5 | stopasgroup = true
6 | autorestart = true
7 |
8 | [program:flask]
9 | command = bash eventlet.sh
10 | stopasgroup = true
11 | autorestart = true
12 |
--------------------------------------------------------------------------------
/basic-backend/7.flask-elasticsearch/eventlet.sh:
--------------------------------------------------------------------------------
1 | BIND_ADDR=0.0.0.0:5000
2 | gunicorn --worker-class eventlet -b $BIND_ADDR -p gunicorn.pid app:app
3 |
4 |
--------------------------------------------------------------------------------
/basic-backend/7.flask-elasticsearch/requirements.txt:
--------------------------------------------------------------------------------
1 | Flask
2 | eventlet
3 | gunicorn
4 | elasticsearch
5 | bs4
6 | requests
7 | lxml
8 |
--------------------------------------------------------------------------------
/basic-backend/8.flask-gunicorn-logstash/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM ubuntu:16.04 AS base
2 |
3 | RUN apt-get update && apt-get install -y \
4 | python3 \
5 | python3-pip \
6 | python3-wheel \
7 | openjdk-8-jdk-headless \
8 | wget \
9 | apt-transport-https \
10 | supervisor
11 |
12 | RUN wget -qO - https://artifacts.elastic.co/GPG-KEY-elasticsearch | apt-key add -
13 |
14 | RUN echo "deb https://artifacts.elastic.co/packages/6.x/apt stable main" | tee -a /etc/apt/sources.list.d/elastic-6.x.list
15 |
16 | RUN apt-get update && apt-get install elasticsearch
17 |
18 | RUN apt-get update && apt-get install kibana
19 |
20 | RUN apt-get update && apt-get install logstash
21 |
22 | RUN pip3 install Flask python-json-logger gunicorn python3-logstash
23 |
24 | WORKDIR /app
25 |
26 | COPY . /app
27 |
28 | RUN cp elasticsearch.yml /etc/elasticsearch/
29 |
30 | RUN cp kibana.yml /etc/kibana/
31 |
--------------------------------------------------------------------------------
/basic-backend/8.flask-gunicorn-logstash/README.md:
--------------------------------------------------------------------------------
1 | ## How-to
2 |
3 | 1. Run `Docker compose`,
4 | ```bash
5 | compose/build
6 | ```
7 |
8 | 2. Request url,
9 | ```html
10 | http://localhost:9200/_cat/indices?v
11 | ```
12 |
13 | 3. Check index,
14 | ```text
15 | health status index uuid pri rep docs.count docs.deleted store.size pri.store.size
16 | yellow open logstash-2018.09.30 IL6UjeHTTCKdL8be5hpOUw 5 1 0 0 460b 460b
17 | ```
18 |
19 | 4. Search on the index,
20 | ```html
21 | http://localhost:9200/logstash-2018.09.30/_search
22 | ```
23 |
24 | ```text
25 | {"took":147,"timed_out":false,"_shards":{"total":5,"successful":5,"skipped":0,"failed":0},"hits":{"total":4,"max_score":1.0,"hits":[{"_index":"logstash-2018.09.30","_type":"doc","_id":"lUw4KGYBBCQZE1CyH2wT","_score":1.0,"_source":{"@timestamp":"2018-09-30T02:04:18.472Z","host":"localhost","@version":"1","port":36286,"message":"172.22.0.1 - - [30/Sep/2018:02:04:18 +0000] \"GET / HTTP/1.1\" 200 41 \"-\" \"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.106 Safari/537.36\""}},{"_index":"logstash-2018.09.30","_type":"doc","_id":"k0w4KGYBBCQZE1CyHWyY","_score":1.0,"_source":{"@timestamp":"2018-09-30T02:04:17.203Z","host":"localhost","@version":"1","port":36286,"message":"172.22.0.1 - - [30/Sep/2018:02:04:16 +0000] \"GET / HTTP/1.1\" 200 41 \"-\" \"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.106 Safari/537.36\""}},{"_index":"logstash-2018.09.30","_type":"doc","_id":"lkw4KGYBBCQZE1CyH2zP","_score":1.0,"_source":{"@timestamp":"2018-09-30T02:04:18.658Z","host":"localhost","@version":"1","port":36286,"message":"172.22.0.1 - - [30/Sep/2018:02:04:18 +0000] \"GET / HTTP/1.1\" 200 41 \"-\" \"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.106 Safari/537.36\""}},{"_index":"logstash-2018.09.30","_type":"doc","_id":"lEw4KGYBBCQZE1CyHWzg","_score":1.0,"_source":{"@timestamp":"2018-09-30T02:04:18.160Z","host":"localhost","@version":"1","port":36286,"message":"172.22.0.1 - - [30/Sep/2018:02:04:18 +0000] \"GET / HTTP/1.1\" 200 41 \"-\" \"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.106 Safari/537.36\""}}]}}
26 | ```
27 |
--------------------------------------------------------------------------------
/basic-backend/8.flask-gunicorn-logstash/app.py:
--------------------------------------------------------------------------------
1 | from flask import Flask
2 |
3 | app = Flask(__name__)
4 |
5 |
6 | @app.route('/')
7 | def hello_world():
8 | return 'Hey, we have Flask in a Docker container!'
9 |
10 |
11 | @app.route('/members')
12 | def members():
13 | return 'you can put anything after /members/'
14 |
15 |
16 | @app.route('/members//')
17 | def getMember(name):
18 | return name
19 |
--------------------------------------------------------------------------------
/basic-backend/8.flask-gunicorn-logstash/compose/bash:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml exec logstash bash
4 |
--------------------------------------------------------------------------------
/basic-backend/8.flask-gunicorn-logstash/compose/build:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml up --build
4 |
--------------------------------------------------------------------------------
/basic-backend/8.flask-gunicorn-logstash/compose/develop:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | service elasticsearch start
4 | service kibana start
5 | supervisord --nodaemon -c supervisord.conf
6 |
--------------------------------------------------------------------------------
/basic-backend/8.flask-gunicorn-logstash/compose/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '2.3'
2 | services:
3 | logstash:
4 | build:
5 | context: ..
6 | target: base
7 | command: compose/develop
8 | environment:
9 | PYTHONPATH: .
10 | ports:
11 | - '9200:9200' # elastic
12 | - '5601:5601' # kibana
13 | - '8000:8000' # flask
14 | - '9000:9000' # supervisor
15 |
--------------------------------------------------------------------------------
/basic-backend/8.flask-gunicorn-logstash/compose/down:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml down
4 |
--------------------------------------------------------------------------------
/basic-backend/8.flask-gunicorn-logstash/gunicorn.conf:
--------------------------------------------------------------------------------
1 | [loggers]
2 | keys=root, logstash.error, logstash.access
3 |
4 | [handlers]
5 | keys=console , logstash
6 |
7 | [formatters]
8 | keys=generic, access, json
9 |
10 | [logger_root]
11 | level=INFO
12 | handlers=console
13 |
14 | [logger_logstash.error]
15 | level=INFO
16 | handlers=logstash
17 | propagate=1
18 | qualname=gunicorn.error
19 |
20 | [logger_logstash.access]
21 | level=INFO
22 | handlers=logstash
23 | propagate=0
24 | qualname=gunicorn.access
25 |
26 | [handler_console]
27 | class=StreamHandler
28 | formatter=generic
29 | args=(sys.stdout, )
30 |
31 | [handler_logstash]
32 | class=logstash.TCPLogstashHandler
33 | formatter=json
34 | args=('localhost',5959)
35 |
36 | [formatter_generic]
37 | format=%(asctime)s [%(process)d] [%(levelname)s] %(message)s
38 | datefmt=%Y-%m-%d %H:%M:%S
39 | class=logging.Formatter
40 |
41 | [formatter_access]
42 | format=%(message)s
43 | class=logging.Formatter
44 |
45 | [formatter_json]
46 | class=pythonjsonlogger.jsonlogger.JsonFormatter
47 |
--------------------------------------------------------------------------------
/basic-backend/8.flask-gunicorn-logstash/logstash.conf:
--------------------------------------------------------------------------------
1 | input {
2 | tcp {
3 | port => 5959
4 | codec => json
5 | }
6 | }
7 | output {
8 | elasticsearch {
9 | hosts => ["localhost:9200"]
10 | }
11 | }
12 |
--------------------------------------------------------------------------------
/basic-backend/8.flask-gunicorn-logstash/supervisord.conf:
--------------------------------------------------------------------------------
1 | [supervisord]
2 |
3 | [program:logstash]
4 | command = /usr/share/logstash/bin/logstash -f logstash.conf
5 | stopasgroup = true
6 | autorestart = true
7 |
8 | [program:gunicorn]
9 | command = gunicorn --bind 0.0.0.0:8000 --log-config gunicorn.conf app:app
10 | stopasgroup = true
11 | autorestart = true
12 |
13 | [inet_http_server]
14 | port = 0.0.0.0:9000
15 |
--------------------------------------------------------------------------------
/basic-backend/9.mlflow-nginx/.htpasswd:
--------------------------------------------------------------------------------
1 | husein:$apr1$43sNIl7k$/Jtli3DNXgfYN.h01hKtq.
2 |
--------------------------------------------------------------------------------
/basic-backend/9.mlflow-nginx/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM ubuntu:16.04 AS base
2 |
3 | RUN apt-get update && apt-get install -y \
4 | python3 \
5 | python3-pip \
6 | python3-wheel \
7 | nginx
8 |
9 | ADD . /code
10 |
11 | WORKDIR /code
12 |
13 | RUN pip3 install mlflow
14 |
15 | RUN rm /etc/nginx/sites-enabled/default
16 |
17 | RUN cp mlflow-config.conf /etc/nginx/conf.d/
18 |
19 | ENV LC_ALL C.UTF-8
20 | ENV LANG C.UTF-8
21 |
--------------------------------------------------------------------------------
/basic-backend/9.mlflow-nginx/README.md:
--------------------------------------------------------------------------------
1 | ## How-to
2 |
3 | 1. Run `Docker compose`,
4 | ```bash
5 | compose/build
6 | ```
7 |
8 | 
9 |
--------------------------------------------------------------------------------
/basic-backend/9.mlflow-nginx/compose/bash:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml exec mlflow bash
4 |
--------------------------------------------------------------------------------
/basic-backend/9.mlflow-nginx/compose/build:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml up --build
4 |
--------------------------------------------------------------------------------
/basic-backend/9.mlflow-nginx/compose/develop:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | service nginx start
4 | service nginx restart
5 | mlflow server --host 0.0.0.0 -w 1
6 |
--------------------------------------------------------------------------------
/basic-backend/9.mlflow-nginx/compose/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '2.3'
2 | services:
3 | mlflow:
4 | build:
5 | context: ..
6 | target: base
7 | command: compose/develop
8 | environment:
9 | PYTHONPATH: .
10 | ports:
11 | - '5000:5000' # mlflow
12 | - '80:80' # nginx
13 | volumes:
14 | - ..:/code
15 |
--------------------------------------------------------------------------------
/basic-backend/9.mlflow-nginx/compose/down:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml down
4 |
--------------------------------------------------------------------------------
/basic-backend/9.mlflow-nginx/mlflow-config.conf:
--------------------------------------------------------------------------------
1 | server {
2 | listen 80;
3 |
4 | location / {
5 | proxy_set_header Host $host;
6 | proxy_set_header X-Real-IP $remote_addr;
7 | proxy_pass http://localhost:5000;
8 | auth_basic "admin";
9 | auth_basic_user_file /code/.htpasswd;
10 |
11 | }
12 | }
13 |
--------------------------------------------------------------------------------
/basic-backend/9.mlflow-nginx/mlflow.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/huseinzol05/Python-DevOps/a5d578fae7df369bd9e10175249db18fe2db989e/basic-backend/9.mlflow-nginx/mlflow.png
--------------------------------------------------------------------------------
/basic-backend/9.mlflow-nginx/mlruns/0/97a8d84dedce44dbba3f3a60453f1d49/artifacts/test.txt:
--------------------------------------------------------------------------------
1 | hello world!
--------------------------------------------------------------------------------
/basic-backend/9.mlflow-nginx/mlruns/0/97a8d84dedce44dbba3f3a60453f1d49/meta.yaml:
--------------------------------------------------------------------------------
1 | artifact_uri: /home/husein/mlruns/0/97a8d84dedce44dbba3f3a60453f1d49/artifacts
2 | end_time: 1538735823692
3 | entry_point_name: ''
4 | experiment_id: 0
5 | lifecycle_stage: active
6 | name: ''
7 | run_uuid: 97a8d84dedce44dbba3f3a60453f1d49
8 | source_name: mlflow/examples/quickstart/mlflow_tracking.py
9 | source_type: 4
10 | source_version: 259f049bad1a12c1ae830c7f68d568e74bd26c73
11 | start_time: 1538735823679
12 | status: 3
13 | tags: []
14 | user_id: husein
15 |
--------------------------------------------------------------------------------
/basic-backend/9.mlflow-nginx/mlruns/0/97a8d84dedce44dbba3f3a60453f1d49/metrics/foo:
--------------------------------------------------------------------------------
1 | 1538735823 0.014539606209584055
2 | 1538735823 1.5455199487977334
3 | 1538735823 2.475175672054433
4 |
--------------------------------------------------------------------------------
/basic-backend/9.mlflow-nginx/mlruns/0/97a8d84dedce44dbba3f3a60453f1d49/params/param1:
--------------------------------------------------------------------------------
1 | 64
--------------------------------------------------------------------------------
/basic-backend/9.mlflow-nginx/mlruns/0/meta.yaml:
--------------------------------------------------------------------------------
1 | artifact_location: /home/husein/mlruns/0
2 | experiment_id: 0
3 | lifecycle_stage: active
4 | name: Default
5 |
--------------------------------------------------------------------------------
/basic/1.autopep8/README.md:
--------------------------------------------------------------------------------
1 | ## How-to
2 |
3 | ```bash
4 | autopep8 --in-place --aggressive --recursive .
5 | ```
6 |
--------------------------------------------------------------------------------
/basic/1.autopep8/malaya/__init__.py:
--------------------------------------------------------------------------------
1 | from .main import *
2 | from .tatabahasa import *
3 | from .num2word import to_cardinal, to_ordinal, to_ordinal_num, to_currency, to_year
4 | from .word2vec import *
5 | from .topic import train_lda, train_lsa, train_nmf
6 | from .sentiment import deep_sentiment
7 |
--------------------------------------------------------------------------------
/basic/1.autopep8/malaya/topic.py:
--------------------------------------------------------------------------------
1 | from sklearn.feature_extraction.text import TfidfVectorizer, CountVectorizer
2 | from sklearn.decomposition import TruncatedSVD, NMF, LatentDirichletAllocation
3 | from sklearn.pipeline import make_pipeline
4 | from sklearn.preprocessing import Normalizer
5 | import numpy as np
6 | import re
7 | from nltk.tokenize import word_tokenize
8 | from unidecode import unidecode
9 | import itertools
10 |
11 |
12 | class TOPIC:
13 | def __init__(self, features, comp):
14 | self.features = features
15 | self.comp = comp
16 |
17 | def get_topics(self, len_topic):
18 | results = []
19 | for no, topic in enumerate(self.comp.components_):
20 | results.append(
21 | (no, " ".join([self.features[i] for i in topic.argsort()[:-len_topic - 1:-1]])))
22 | return results
23 |
24 |
25 | def clearstring(string):
26 | string = unidecode(string)
27 | string = re.sub('[^A-Za-z ]+', '', string)
28 | string = word_tokenize(string)
29 | string = filter(None, string)
30 | string = [y.strip() for y in string]
31 | string = ' '.join(string).lower()
32 | return ''.join(''.join(s)[:2] for _, s in itertools.groupby(string))
33 |
34 |
35 | def train_lda(corpus, n_topics=10, max_df=0.95, min_df=2,
36 | cleaning=clearstring, stop_words='english'):
37 | if cleaning is not None:
38 | for i in range(len(corpus)):
39 | corpus[i] = cleaning(corpus[i])
40 | tf_vectorizer = CountVectorizer(
41 | max_df=max_df,
42 | min_df=min_df,
43 | stop_words=stop_words)
44 | tf = tf_vectorizer.fit_transform(corpus)
45 | tf_features = tf_vectorizer.get_feature_names()
46 | lda = LatentDirichletAllocation(
47 | n_topics=n_topics,
48 | max_iter=5,
49 | learning_method='online',
50 | learning_offset=50.,
51 | random_state=0).fit(tf)
52 | return TOPIC(tf_features, lda)
53 |
54 |
55 | def train_nmf(corpus, n_topics=10, max_df=0.95, min_df=2,
56 | cleaning=clearstring, stop_words='english'):
57 | if cleaning is not None:
58 | for i in range(len(corpus)):
59 | corpus[i] = cleaning(corpus[i])
60 | tfidf_vectorizer = TfidfVectorizer(
61 | max_df=max_df, min_df=min_df, stop_words=stop_words)
62 | tfidf = tfidf_vectorizer.fit_transform(corpus)
63 | tfidf_features = tfidf_vectorizer.get_feature_names()
64 | nmf = NMF(
65 | n_components=n_topics,
66 | random_state=1,
67 | alpha=.1,
68 | l1_ratio=.5,
69 | init='nndsvd').fit(tfidf)
70 | return TOPIC(tfidf_features, nmf)
71 |
72 |
73 | def train_lsa(corpus, n_topics, max_df=0.95, min_df=2,
74 | cleaning=clearstring, stop_words='english'):
75 | if cleaning is not None:
76 | for i in range(len(corpus)):
77 | corpus[i] = cleaning(corpus[i])
78 | tfidf_vectorizer = TfidfVectorizer(
79 | max_df=max_df, min_df=min_df, stop_words=stop_words)
80 | tfidf = tfidf_vectorizer.fit_transform(corpus)
81 | tfidf_features = tfidf_vectorizer.get_feature_names()
82 | tfidf = Normalizer().fit_transform(tfidf)
83 | lsa = TruncatedSVD(n_topics).fit(tfidf)
84 | return TOPIC(tfidf_features, lsa)
85 |
--------------------------------------------------------------------------------
/basic/1.autopep8/malaya/utils.py:
--------------------------------------------------------------------------------
1 | from tqdm import tqdm
2 | import requests
3 | chunk_size = 1024
4 |
5 |
6 | def download_file(url, filename):
7 | r = requests.get(url, stream=True)
8 | total_size = int(r.headers['content-length'])
9 | with open(filename, 'wb') as f:
10 | for data in tqdm(iterable=r.iter_content(
11 | chunk_size=chunk_size), total=total_size / chunk_size, unit='KB'):
12 | f.write(data)
13 |
--------------------------------------------------------------------------------
/basic/2.graph-dependencies/README.md:
--------------------------------------------------------------------------------
1 | ## How-to
2 |
3 | ```bash
4 | python3 pyan.py malaya/*.py --colored --annotate --grouped --dot > malaya.dot
5 | dot -Tsvg malaya.dot > malaya.svg
6 | ```
7 |
8 | 
9 |
10 | 
11 |
--------------------------------------------------------------------------------
/basic/2.graph-dependencies/malaya-graph.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/huseinzol05/Python-DevOps/a5d578fae7df369bd9e10175249db18fe2db989e/basic/2.graph-dependencies/malaya-graph.png
--------------------------------------------------------------------------------
/basic/2.graph-dependencies/malaya/__init__.py:
--------------------------------------------------------------------------------
1 | from .main import *
2 | from .tatabahasa import *
3 | from .num2word import to_cardinal,to_ordinal,to_ordinal_num,to_currency,to_year
4 | from .word2vec import *
5 | from .topic import train_lda, train_lsa, train_nmf
6 | from .sentiment import deep_sentiment
7 |
--------------------------------------------------------------------------------
/basic/2.graph-dependencies/malaya/tatabahasa.py:
--------------------------------------------------------------------------------
1 | tanya_list = ['kenapa','bila','siapa','mengapa','apa','bagaimana','berapa','mana']
2 | perintah_list = ['jangan','sila','tolong','harap','usah','jemput','minta']
3 | pangkal_list = ['maka','alkisah','arakian','syahdah','adapun','bermula','kalakian']
4 | bantu_list = ['akan','telah','boleh','mesti','belum','sudah','dapat','masih','harus','hendak']
5 | penguat_list = ['paling','agak','sungguh','amat','terlalu','nian','benar','paling']
6 | penegas_list = ['jua','juga','sahaja','hanya','memang','lagi','pun']
7 | nafi_list = ['bukan','tidak','tak','tiada','tidaklah','tidakkah']
8 | pemeri_list = ['ialah','adalah']
9 | sendi_list = ['akan','kepada','terhadap','bagi','untuk','dari','daripada','di','dengan','hingga','sampai',
10 | 'ke','kepada','oleh','pada','sejak','seperti','umpama','bak','tentang','laksanabagai',
11 | 'semenjak','dalam','antara']
12 | pembenar_list = ['ya','benar','betul']
13 | nombor_list = ['satu','dua','tiga','empat','lima','enam','tujuh','lapan','sembilan','kosong']
14 | suku_bilangan_list = ['per','suku','setengah','separuh','tiga suku']
15 | pisahan_list = ['setiap','tiap']
16 | keterangan_list = ['begitu','begini','demikian','perlahan','cepat','lena','akan','sedang','belum',
17 | 'telah','sekarang','sebentar','semalam','mungkin','agak','barangkali','pasti','tentu',
18 | 'sudah','selalu','kadang','acapkali','sesekali','yang']
19 | arah_list = ['atas','bawah','tepi','antara','hadapan','utara','sisi','luar']
20 | hubung_list = ['agar','apabila','atau','bahawa','dan','hingga','jika','jikalau','kecuali','kerana',
21 | 'lalu','manakala','sambil','serta','semenjak','sementara','sungguhpun','supaya','walaupun','tetapi','berkenan','berkenaan']
22 | gantinama_list = ['aku','saya','hamba','patik','beta','kami','kita','anda','awak','engkau','tuanku','kalian',
23 | 'kamu','baginda','beliau','mereka','ini','itu','sini','situ','sana','kini','dia']
24 |
25 | # pos permulaan[:-4]
26 | permulaan = ['bel','be','se','ter','men','memper','di','pe','me','ke','ber','pen','per']
27 | # pos hujung [:1]
28 | hujung = ['kan', 'kah','lah','tah','nya','an','wan','wati','ita']
29 | alphabet = 'qwertyuiopasdfghjklzxcvbnm'
30 |
31 | tatabahasa_dict = {'KT':tanya_list,'KP':perintah_list,'KPA':pangkal_list,'KB':bantu_list,'KPENGUAT':penguat_list,
32 | 'KPENEGAS':penegas_list,'NAFI':nafi_list, 'KPEMERI':pemeri_list,'KS':sendi_list,'KPEMBENAR':pembenar_list,
33 | 'NO':nombor_list,'SUKU':suku_bilangan_list,'PISAHAN':pisahan_list,'KETERANGAN':keterangan_list,
34 | 'ARAH':arah_list,'KH':hubung_list,'GN':gantinama_list}
35 |
--------------------------------------------------------------------------------
/basic/2.graph-dependencies/malaya/topic.py:
--------------------------------------------------------------------------------
1 | from sklearn.feature_extraction.text import TfidfVectorizer, CountVectorizer
2 | from sklearn.decomposition import TruncatedSVD, NMF, LatentDirichletAllocation
3 | from sklearn.pipeline import make_pipeline
4 | from sklearn.preprocessing import Normalizer
5 | import numpy as np
6 | import re
7 | from nltk.tokenize import word_tokenize
8 | from unidecode import unidecode
9 | import itertools
10 |
11 | class TOPIC:
12 | def __init__(self,features,comp):
13 | self.features = features
14 | self.comp = comp
15 | def get_topics(self, len_topic):
16 | results = []
17 | for no, topic in enumerate(self.comp.components_):
18 | results.append((no, " ".join([self.features[i] for i in topic.argsort()[:-len_topic -1:-1]])))
19 | return results
20 |
21 | def clearstring(string):
22 | string = unidecode(string)
23 | string = re.sub('[^A-Za-z ]+', '', string)
24 | string = word_tokenize(string)
25 | string = filter(None, string)
26 | string = [y.strip() for y in string]
27 | string = ' '.join(string).lower()
28 | return ''.join(''.join(s)[:2] for _, s in itertools.groupby(string))
29 |
30 | def train_lda(corpus,n_topics=10, max_df=0.95, min_df=2,cleaning=clearstring,stop_words='english'):
31 | if cleaning is not None:
32 | for i in range(len(corpus)): corpus[i] = cleaning(corpus[i])
33 | tf_vectorizer = CountVectorizer(max_df=max_df, min_df=min_df, stop_words=stop_words)
34 | tf = tf_vectorizer.fit_transform(corpus)
35 | tf_features = tf_vectorizer.get_feature_names()
36 | lda = LatentDirichletAllocation(n_topics=n_topics, max_iter = 5, learning_method = 'online', learning_offset=50., random_state=0).fit(tf)
37 | return TOPIC(tf_features,lda)
38 |
39 | def train_nmf(corpus,n_topics=10, max_df=0.95, min_df=2,cleaning=clearstring,stop_words='english'):
40 | if cleaning is not None:
41 | for i in range(len(corpus)): corpus[i] = cleaning(corpus[i])
42 | tfidf_vectorizer = TfidfVectorizer(max_df = max_df, min_df = min_df, stop_words = stop_words)
43 | tfidf = tfidf_vectorizer.fit_transform(corpus)
44 | tfidf_features = tfidf_vectorizer.get_feature_names()
45 | nmf = NMF(n_components=n_topics, random_state = 1, alpha =.1, l1_ratio=.5, init = 'nndsvd').fit(tfidf)
46 | return TOPIC(tfidf_features,nmf)
47 |
48 | def train_lsa(corpus,n_topics, max_df=0.95, min_df=2,cleaning=clearstring,stop_words='english'):
49 | if cleaning is not None:
50 | for i in range(len(corpus)): corpus[i] = cleaning(corpus[i])
51 | tfidf_vectorizer = TfidfVectorizer(max_df = max_df, min_df = min_df, stop_words = stop_words)
52 | tfidf = tfidf_vectorizer.fit_transform(corpus)
53 | tfidf_features = tfidf_vectorizer.get_feature_names()
54 | tfidf = Normalizer().fit_transform(tfidf)
55 | lsa = TruncatedSVD(n_topics).fit(tfidf)
56 | return TOPIC(tfidf_features,lsa)
57 |
--------------------------------------------------------------------------------
/basic/2.graph-dependencies/malaya/utils.py:
--------------------------------------------------------------------------------
1 | from tqdm import tqdm
2 | import requests
3 | chunk_size = 1024
4 |
5 | def download_file(url, filename):
6 | r = requests.get(url, stream = True)
7 | total_size = int(r.headers['content-length'])
8 | with open(filename, 'wb') as f:
9 | for data in tqdm(iterable = r.iter_content(chunk_size = chunk_size), total = total_size/chunk_size, unit = 'KB'):
10 | f.write(data)
11 |
--------------------------------------------------------------------------------
/basic/2.graph-dependencies/pyan.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding: utf-8 -*-
3 |
4 | import re
5 | import sys
6 |
7 | from pyan import main
8 |
9 | if __name__ == '__main__':
10 | sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
11 | sys.exit(main())
12 |
--------------------------------------------------------------------------------
/basic/2.graph-dependencies/pyan/__init__.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding: utf-8 -*-
3 |
4 | from .main import main
5 |
6 | __version__ = '1.0.2'
7 |
--------------------------------------------------------------------------------
/misc/elasticsearch-cerebro/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM ubuntu:16.04 AS base
2 |
3 | RUN apt-get update && apt-get install -y \
4 | git \
5 | python3 \
6 | python3-pip \
7 | python3-wheel \
8 | openjdk-8-jdk-headless \
9 | wget
10 |
11 | RUN wget -qO - https://artifacts.elastic.co/GPG-KEY-elasticsearch | apt-key add -
12 |
13 | RUN echo "deb https://artifacts.elastic.co/packages/6.x/apt stable main" | tee -a /etc/apt/sources.list.d/elastic-6.x.list
14 |
15 | RUN apt-get update && apt-get install elasticsearch
16 |
17 | RUN apt-get update && apt-get install kibana
18 |
19 | WORKDIR /app
20 |
21 | COPY . /app
22 |
23 | RUN cp elasticsearch.yml /etc/elasticsearch/
24 |
25 | RUN cp kibana.yml /etc/kibana/
26 |
27 | RUN wget https://github.com/lmenezes/cerebro/releases/download/v0.8.1/cerebro-0.8.1.tgz
28 |
29 | RUN tar -zxf cerebro-0.8.1.tgz
30 |
--------------------------------------------------------------------------------
/misc/elasticsearch-cerebro/compose/bash:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml exec cerebro bash
4 |
--------------------------------------------------------------------------------
/misc/elasticsearch-cerebro/compose/build:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml up --build
4 |
--------------------------------------------------------------------------------
/misc/elasticsearch-cerebro/compose/develop:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | service elasticsearch start
4 | service kibana start
5 | cerebro-0.8.1/bin/cerebro -Dhttp.port=6000 -Dhttp.address=0.0.0.0
6 |
--------------------------------------------------------------------------------
/misc/elasticsearch-cerebro/compose/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '2.3'
2 | services:
3 | cerebro:
4 | build:
5 | context: ..
6 | target: base
7 | command: compose/develop
8 | environment:
9 | PYTHONPATH: .
10 | ports:
11 | - '9200:9200' # elastic
12 | - '5601:5601' # kibana
13 | - '6000:6000' # cerebro
14 |
--------------------------------------------------------------------------------
/misc/elasticsearch-cerebro/compose/down:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml down
4 |
--------------------------------------------------------------------------------
/misc/elasticsearch-kibana/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM ubuntu:16.04 AS base
2 |
3 | RUN apt-get update && apt-get install -y \
4 | curl \
5 | git \
6 | python3 \
7 | python3-pip \
8 | python3-wheel \
9 | openjdk-8-jdk-headless \
10 | wget \
11 | apt-transport-https \
12 | supervisor
13 |
14 | RUN wget -qO - https://artifacts.elastic.co/GPG-KEY-elasticsearch | apt-key add -
15 |
16 | RUN echo "deb https://artifacts.elastic.co/packages/6.x/apt stable main" | tee -a /etc/apt/sources.list.d/elastic-6.x.list
17 |
18 | RUN apt-get update && apt-get install elasticsearch
19 |
20 | RUN wget -qO - https://artifacts.elastic.co/GPG-KEY-elasticsearch | apt-key add -
21 |
22 | RUN echo "deb https://artifacts.elastic.co/packages/6.x/apt stable main" | tee -a /etc/apt/sources.list.d/elastic-6.x.list
23 |
24 | RUN apt-get update && apt-get install kibana
25 |
26 | RUN pip3 install Flask
27 |
28 | WORKDIR /app
29 |
30 | COPY . /app
31 |
32 | RUN cp elasticsearch.yml /etc/elasticsearch/
33 |
34 | RUN cp kibana.yml /etc/kibana/
35 |
--------------------------------------------------------------------------------
/misc/elasticsearch-kibana/app.py:
--------------------------------------------------------------------------------
1 | from flask import Flask
2 |
3 | app = Flask(__name__)
4 |
5 |
6 | @app.route('/')
7 | def hello_world():
8 | return 'Hey, we have Flask in a Docker container!'
9 |
10 |
11 | @app.route('/members')
12 | def members():
13 | return 'you can put anything after /members/'
14 |
15 |
16 | @app.route('/members//')
17 | def getMember(name):
18 | return name
19 |
20 |
21 | if __name__ == '__main__':
22 | app.run(debug = True, host = '0.0.0.0', port = 5000)
23 |
--------------------------------------------------------------------------------
/misc/elasticsearch-kibana/compose/bash:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml exec els-kibana bash
4 |
--------------------------------------------------------------------------------
/misc/elasticsearch-kibana/compose/build:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml up --build
4 |
--------------------------------------------------------------------------------
/misc/elasticsearch-kibana/compose/develop:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | service elasticsearch start
4 | service kibana start
5 | python3 app.py
6 |
--------------------------------------------------------------------------------
/misc/elasticsearch-kibana/compose/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '2.3'
2 | services:
3 | els-kibana:
4 | build:
5 | context: ..
6 | target: base
7 | command: compose/develop
8 | environment:
9 | PYTHONPATH: .
10 | ports:
11 | - '9200:9200' # elastic
12 | - '5601:5601' # kibana
13 | volumes:
14 | - ..:/app
15 |
--------------------------------------------------------------------------------
/misc/elasticsearch-kibana/compose/down:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml down
4 |
--------------------------------------------------------------------------------
/misc/hadoop-cluster-luigi/core-site.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | fs.defaultFS
5 | hdfs://hadoop-master:9000/
6 |
7 |
8 | hadoop.http.staticuser.user
9 | husein
10 |
11 |
12 |
--------------------------------------------------------------------------------
/misc/hadoop-cluster-luigi/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '2.3'
2 | services:
3 | hadoop-master:
4 | build:
5 | context: master
6 | target: base
7 | networks:
8 | - hadoop
9 | command: bash start-all.sh
10 | environment:
11 | PYTHONPATH: .
12 | container_name: hadoop-master
13 | volumes:
14 | - ../:/app
15 | ports:
16 | - '8088:8088'
17 | - '50070:50070'
18 | - '8083:8082'
19 | - '9000:9000'
20 | - '9090:9090'
21 | - '9870:9870'
22 | - '9864:9864'
23 | - '19888:19888'
24 | - '8042:8042'
25 |
26 | hadoop-slave1:
27 | build:
28 | context: slave
29 | target: base
30 | networks:
31 | - hadoop
32 | command: bash start-all.sh
33 | container_name: hadoop-slave1
34 | environment:
35 | PYTHONPATH: .
36 |
37 | hadoop-slave2:
38 | build:
39 | context: slave
40 | target: base
41 | networks:
42 | - hadoop
43 | command: bash start-all.sh
44 | container_name: hadoop-slave2
45 | environment:
46 | PYTHONPATH: .
47 |
48 |
49 | networks:
50 | hadoop:
51 | external:
52 | name: hadoop_default
53 |
--------------------------------------------------------------------------------
/misc/hadoop-cluster-luigi/hdfs-site.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | dfs.replication
6 | 1
7 |
8 |
9 | dfs.webhdfs.enabled
10 | true
11 |
12 |
13 | dfs.permissions
14 | false
15 |
16 |
17 |
--------------------------------------------------------------------------------
/misc/hadoop-cluster-luigi/mapred-site.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | mapreduce.framework.name
6 | yarn
7 |
8 |
9 | yarn.app.mapreduce.am.env
10 | HADOOP_MAPRED_HOME=${HADOOP_HOME}
11 |
12 |
13 | mapreduce.map.env
14 | HADOOP_MAPRED_HOME=${HADOOP_HOME}
15 |
16 |
17 | mapreduce.reduce.env
18 | HADOOP_MAPRED_HOME=${HADOOP_HOME}
19 |
20 |
21 |
--------------------------------------------------------------------------------
/misc/hadoop-cluster-luigi/master/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM ubuntu:16.04 AS base
2 |
3 | ENV HADOOP_HOME /opt/hadoop
4 | ENV JAVA_HOME /usr/lib/jvm/java-8-openjdk-amd64
5 |
6 | RUN apt-get update && apt-get install -y \
7 | python3 \
8 | python3-pip \
9 | openjdk-8-jdk \
10 | wget \
11 | openssh-server
12 |
13 | RUN wget http://www-eu.apache.org/dist/hadoop/common/hadoop-3.1.1/hadoop-3.1.1.tar.gz && \
14 | tar -xzf hadoop-3.1.1.tar.gz && \
15 | mv hadoop-3.1.1 $HADOOP_HOME
16 |
17 | RUN ssh-keygen -t rsa -f ~/.ssh/id_rsa -P '' && \
18 | cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys
19 |
20 | ADD ../*xml $HADOOP_HOME/etc/hadoop/
21 | ADD ../slaves $HADOOP_HOME/etc/hadoop/
22 |
23 | ADD ../ssh_config /root/.ssh/config
24 |
25 | RUN pip3 install docutils
26 | RUN pip3 install luigi python-daemon jupyter
27 | RUN echo
28 |
29 | RUN jupyter notebook --generate-config
30 |
31 | RUN echo "" >> /root/.jupyter/jupyter_notebook_config.py
32 | RUN echo "c.NotebookApp.token = ''" >> /root/.jupyter/jupyter_notebook_config.py
33 |
34 | WORKDIR /app
35 |
36 | COPY . /app
37 |
38 | ENV LC_ALL C.UTF-8
39 | ENV LANG C.UTF-8
40 |
--------------------------------------------------------------------------------
/misc/hadoop-cluster-luigi/master/luigi.cfg:
--------------------------------------------------------------------------------
1 | [hadoop]
2 | python-executable=/usr/bin/python3
3 | command=/opt/hadoop/bin/hadoop
4 | streaming-jar=/opt/hadoop/share/hadoop/tools/lib/hadoop-streaming-3.1.1.jar
5 |
--------------------------------------------------------------------------------
/misc/hadoop-cluster-luigi/master/start-all.sh:
--------------------------------------------------------------------------------
1 | service ssh start
2 |
3 | $HADOOP_HOME/bin/hdfs namenode -format
4 |
5 | $HADOOP_HOME/sbin/start-yarn.sh
6 | $HADOOP_HOME/sbin/start-dfs.sh
7 |
8 | luigid --background
9 | jupyter notebook --ip=0.0.0.0 --port=9090 --allow-root
10 |
--------------------------------------------------------------------------------
/misc/hadoop-cluster-luigi/slave/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM ubuntu:16.04 AS base
2 |
3 | ENV HADOOP_HOME /opt/hadoop
4 | ENV JAVA_HOME /usr/lib/jvm/java-8-openjdk-amd64
5 |
6 | RUN apt-get update && apt-get install -y \
7 | openjdk-8-jdk \
8 | wget \
9 | openssh-server
10 |
11 | RUN wget http://www-eu.apache.org/dist/hadoop/common/hadoop-3.1.1/hadoop-3.1.1.tar.gz && \
12 | tar -xzf hadoop-3.1.1.tar.gz && \
13 | mv hadoop-3.1.1 $HADOOP_HOME
14 |
15 | RUN ssh-keygen -t rsa -f ~/.ssh/id_rsa -P '' && \
16 | cat ~/.ssh/id_rsa.pub >> ~/.ssh/authorized_keys
17 |
18 | ADD ../*xml $HADOOP_HOME/etc/hadoop/
19 | ADD ../slaves $HADOOP_HOME/etc/hadoop/
20 |
21 | ADD ../ssh_config /root/.ssh/config
22 |
23 | WORKDIR /app
24 |
25 | COPY start-all.sh /app
26 |
27 | ENV LC_ALL C.UTF-8
28 | ENV LANG C.UTF-8
29 |
--------------------------------------------------------------------------------
/misc/hadoop-cluster-luigi/slave/start-all.sh:
--------------------------------------------------------------------------------
1 | service ssh start
2 |
3 | $HADOOP_HOME/bin/hdfs namenode -format
4 |
5 | $HADOOP_HOME/sbin/start-yarn.sh
6 | $HADOOP_HOME/sbin/start-dfs.sh
7 |
8 | tail -f hadoop-root-resourcemanager-*.log | while read line; do echo $line; sleep 1; done
9 |
--------------------------------------------------------------------------------
/misc/hadoop-cluster-luigi/slaves:
--------------------------------------------------------------------------------
1 | hadoop-slave1
2 | hadoop-slave2
3 |
--------------------------------------------------------------------------------
/misc/hadoop-cluster-luigi/ssh_config:
--------------------------------------------------------------------------------
1 | Host localhost
2 | StrictHostKeyChecking no
3 |
4 | Host 0.0.0.0
5 | StrictHostKeyChecking no
6 |
7 | Host hadoop-*
8 | StrictHostKeyChecking no
9 | UserKnownHostsFile=/dev/null
10 |
--------------------------------------------------------------------------------
/misc/hadoop-cluster-luigi/yarn-site.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | yarn.nodemanager.aux-services
5 | mapreduce_shuffle
6 |
7 |
8 | yarn.nodemanager.aux-services.mapreduce_shuffle.class
9 | org.apache.hadoop.mapred.ShuffleHandler
10 |
11 |
12 | yarn.resourcemanager.hostname
13 | hadoop-master
14 |
15 |
16 |
--------------------------------------------------------------------------------
/misc/jupyter-notebook/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM debian:stretch-slim AS base
2 |
3 | RUN apt-get update && apt-get install -y \
4 | python3 \
5 | python3-pip
6 |
7 | RUN pip3 install scipy matplotlib seaborn numpy sklearn scikit-learn tensorflow jupyter
8 |
9 | ENV LC_ALL C.UTF-8
10 | ENV LANG C.UTF-8
11 |
12 | RUN jupyter notebook --generate-config
13 |
14 | RUN echo "" >> /root/.jupyter/jupyter_notebook_config.py
15 | RUN echo "c.NotebookApp.token = ''" >> /root/.jupyter/jupyter_notebook_config.py
16 |
17 | WORKDIR /app
18 |
--------------------------------------------------------------------------------
/misc/jupyter-notebook/README.md:
--------------------------------------------------------------------------------
1 | # Programming-Environment
2 |
3 | you only need docker compose to start programming environment
4 |
5 | ## How-to Start
6 | ```bash
7 | sudo develop/up
8 | ```
9 |
10 | First time it will slow, afterwards, it will fast, caching already.
11 |
12 | Found an output,
13 | ```string
14 | ...:8080/?token=...
15 | ```
16 | copy the entire line from :8080 until the end and paste into your browser. Happy coding inside the jupyter :)
17 |
18 | ## How-to Bash
19 | ```bash
20 | # open new terminal, run this while another terminal is running develop/up
21 | sudo develop/shell
22 | ```
23 |
24 | 
25 |
--------------------------------------------------------------------------------
/misc/jupyter-notebook/compose/bash:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml exec jupyter_notebook bash
4 |
--------------------------------------------------------------------------------
/misc/jupyter-notebook/compose/build:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml up --build
4 |
--------------------------------------------------------------------------------
/misc/jupyter-notebook/compose/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '2.3'
2 | services:
3 | jupyter_notebook:
4 | build:
5 | context: ..
6 | target: base
7 | command: jupyter notebook --ip=0.0.0.0 --port=8080 --allow-root
8 | environment:
9 | PYTHONPATH: .
10 | ports:
11 | - '8080:8080'
12 | volumes:
13 | - ..:/app
14 |
--------------------------------------------------------------------------------
/misc/jupyter-notebook/compose/down:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml down
4 |
--------------------------------------------------------------------------------
/misc/jupyter-notebook/jupyter.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/huseinzol05/Python-DevOps/a5d578fae7df369bd9e10175249db18fe2db989e/misc/jupyter-notebook/jupyter.png
--------------------------------------------------------------------------------
/misc/jupyter-notebook/requirements.txt:
--------------------------------------------------------------------------------
1 | numpy
2 | scipy
3 | tensorflow
4 | opencv-contrib-python
5 | jupyter
6 | matplotlib
7 | seaborn
8 | nteract_on_jupyter
9 |
--------------------------------------------------------------------------------
/misc/jupyterhub-github/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '3'
2 |
3 | services:
4 | jupyterhub:
5 | restart: always
6 | build:
7 | context: jupyterhub
8 | networks:
9 | - luigi-backend
10 | - backend
11 | command: bash start.sh
12 | ports:
13 | - '8000:8000'
14 | container_name: jupyterhub
15 | volumes:
16 | - './jupyter:/home'
17 |
--------------------------------------------------------------------------------
/misc/jupyterhub-github/jupyterhub/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM ubuntu:18.04 AS base
2 |
3 | RUN apt-get update && apt-get install -y \
4 | curl \
5 | git \
6 | python3 \
7 | python3-pip \
8 | portaudio19-dev \
9 | libsm6 libxext6 libxrender-dev
10 |
11 | RUN curl -sL https://deb.nodesource.com/setup_8.x | bash -
12 | RUN apt-get install -y nodejs
13 |
14 | COPY requirements.txt ./
15 | RUN pip3 install -r requirements.txt
16 | RUN pip3 install tornado==5.1.1
17 |
18 | RUN npm install -g configurable-http-proxy
19 |
20 | ENV LC_ALL C.UTF-8
21 | ENV LANG C.UTF-8
22 |
23 | WORKDIR /app
24 |
25 | COPY . /app
26 |
--------------------------------------------------------------------------------
/misc/jupyterhub-github/jupyterhub/requirements.txt:
--------------------------------------------------------------------------------
1 | numpy
2 | scipy
3 | tensorflow
4 | opencv-contrib-python
5 | jupyter
6 | matplotlib
7 | seaborn
8 | Flask
9 | flask-socketio
10 | flask-restful
11 | Pillow
12 | sklearn
13 | scikit-learn
14 | SpeechRecognition
15 | pyaudio
16 | gunicorn
17 | eventlet
18 | jupyterhub
19 | prophet
20 | oauthenticator
21 | elasticsearch
22 | elasticsearch_dsl
23 |
--------------------------------------------------------------------------------
/misc/jupyterhub-github/jupyterhub/start.sh:
--------------------------------------------------------------------------------
1 | # adduser --quiet --disabled-password --shell /bin/bash --home /home/jupyter --gecos "User" jupyter
2 | # echo "jupyter:jupyter123" | chpasswd
3 | #
4 | # adduser --quiet --disabled-password --shell /bin/bash --home /home/admin --gecos "User" admin
5 | # echo "admin:A5cwn9YVcqnekR6Y" | chpasswd
6 | #
7 | # mkdir /home/jupyter
8 | # mkdir /home/admin
9 | #
10 | # chown -R jupyter /home/jupyter
11 | # chown -R admin /home/admin
12 | export GITHUB_CLIENT_ID=
13 | export GITHUB_CLIENT_SECRET=
14 | export OAUTH_CALLBACK_URL=
15 | export CONFIGPROXY_AUTH_TOKEN=super-secret
16 | chmod 777 -R /home
17 | jupyterhub -f /app/jupyterhub_config_github.py
18 |
--------------------------------------------------------------------------------
/misc/jupyterhub/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM ubuntu:16.04 AS base
2 |
3 | RUN apt-get update && apt-get install -y \
4 | curl \
5 | git \
6 | python3 \
7 | python3-pip \
8 | python3-wheel \
9 | portaudio19-dev \
10 | npm nodejs-legacy \
11 | libsm6 libxext6 libxrender-dev
12 |
13 | COPY requirements.txt ./
14 | RUN pip3 install -r requirements.txt
15 | RUN pip3 install tornado==5.1.1
16 |
17 | RUN npm install -g configurable-http-proxy
18 |
19 | ENV LC_ALL C.UTF-8
20 | ENV LANG C.UTF-8
21 |
22 | WORKDIR /app
23 |
24 | COPY . /app
25 |
--------------------------------------------------------------------------------
/misc/jupyterhub/README.md:
--------------------------------------------------------------------------------
1 | # Jupyterhub
2 |
3 | docker compose jupyterhub
4 |
5 | ## How-to Start
6 | ```bash
7 | sudo develop/up
8 | ```
9 |
10 | First time it will slow, afterwards, it will fast, caching already.
11 |
12 | Open localhost:8000. Insert, jupyter for username, jupyter123 for password, and Happy coding inside the jupyterhub :)
13 |
14 | ## How-to Bash
15 | ```bash
16 | # open new terminal, run this while another terminal is running develop/up
17 | sudo develop/shell
18 | ```
19 |
20 | 
21 |
--------------------------------------------------------------------------------
/misc/jupyterhub/compose/bash:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml exec jupyterhub bash
4 |
5 |
--------------------------------------------------------------------------------
/misc/jupyterhub/compose/build:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml up --build
4 |
--------------------------------------------------------------------------------
/misc/jupyterhub/compose/develop:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | adduser --quiet --disabled-password --shell /bin/bash --home /home/jupyter --gecos "User" jupyter
4 | echo "jupyter:jupyter123" | chpasswd
5 | jupyterhub
6 |
--------------------------------------------------------------------------------
/misc/jupyterhub/compose/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '2.3'
2 | services:
3 | jupyterhub:
4 | build:
5 | context: ..
6 | target: base
7 | command: compose/develop
8 | environment:
9 | PYTHONPATH: .
10 | ports:
11 | - '8000:8000'
12 | volumes:
13 | - ..:/app
14 |
--------------------------------------------------------------------------------
/misc/jupyterhub/compose/down:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml down
4 |
--------------------------------------------------------------------------------
/misc/jupyterhub/jupyterhub.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/huseinzol05/Python-DevOps/a5d578fae7df369bd9e10175249db18fe2db989e/misc/jupyterhub/jupyterhub.png
--------------------------------------------------------------------------------
/misc/jupyterhub/requirements.txt:
--------------------------------------------------------------------------------
1 | numpy
2 | scipy
3 | tensorflow
4 | opencv-contrib-python
5 | jupyter
6 | matplotlib
7 | seaborn
8 | Flask
9 | flask-socketio
10 | flask-restful
11 | Pillow
12 | sklearn
13 | scikit-learn
14 | SpeechRecognition
15 | pyaudio
16 | gunicorn
17 | eventlet
18 | jupyterhub
19 |
--------------------------------------------------------------------------------
/misc/kafka-cluster/.gitignore:
--------------------------------------------------------------------------------
1 | kafka-storage3
2 | kafka-storage2
3 | kafka-storage
4 |
--------------------------------------------------------------------------------
/misc/kafka-cluster/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM ubuntu:16.04 AS base
2 |
3 | ENV JAVA_HOME /usr/lib/jvm/java-8-openjdk-amd64
4 | ENV KM_VERSION=1.2.7
5 | ENV JMX_PORT=9999
6 |
7 | RUN apt-get update && apt-get install -y \
8 | python3 \
9 | python3-pip \
10 | python3-wheel \
11 | openjdk-8-jdk \
12 | wget \
13 | supervisor
14 |
15 | ADD . /code
16 |
17 | WORKDIR /code
18 |
19 | RUN tar -xvzf kafka_2.11-2.0.0.tgz
20 |
21 | RUN cp server.properties kafka_2.11-2.0.0/config/
22 | RUN cp server2.properties kafka_2.11-2.0.0/config/
23 | RUN cp server3.properties kafka_2.11-2.0.0/config/
24 |
25 | RUN echo
26 |
--------------------------------------------------------------------------------
/misc/kafka-cluster/README.md:
--------------------------------------------------------------------------------
1 | # Kafka-cluster
2 |
3 | docker compose kafka cluster
4 |
5 | ## How-to Start
6 |
7 | 1. Create `backend` network,
8 | ```bash
9 | docker network create backend_default
10 | ```
11 |
12 | 2. Build and run docker-compose,
13 | ```bash
14 | docker-compose up --build
15 | ```
16 |
17 | 2. Open jupyter notebook on [localhost:8080](http://localhost:8080)
18 |
19 | ## Test manual partition consumers
20 |
21 | Upload [jupyter/test-manual-partition-consumer.ipynb](jupyter/test-manual-partition-consumer.ipynb)
22 |
23 | ## Test dynamic group consumers
24 |
25 | Upload [jupyter/test-dynamic-group-consumer.ipynb](jupyter/test-dynamic-group-consumer.ipynb)
26 |
--------------------------------------------------------------------------------
/misc/kafka-cluster/jupyter/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM debian:stretch-slim AS base
2 |
3 | RUN apt-get update && apt-get install -y \
4 | python3 \
5 | python3-pip
6 |
7 | RUN pip3 install kafka-python jupyter confluent-kafka
8 | RUN pip3 install tornado==5.1.1
9 |
10 | ENV LC_ALL C.UTF-8
11 | ENV LANG C.UTF-8
12 |
13 | RUN jupyter notebook --generate-config
14 |
15 | RUN echo "" >> /root/.jupyter/jupyter_notebook_config.py
16 | RUN echo "c.NotebookApp.token = ''" >> /root/.jupyter/jupyter_notebook_config.py
17 |
18 | WORKDIR /app
19 |
--------------------------------------------------------------------------------
/misc/kafka-cluster/kafka_2.11-2.0.0.tgz:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/huseinzol05/Python-DevOps/a5d578fae7df369bd9e10175249db18fe2db989e/misc/kafka-cluster/kafka_2.11-2.0.0.tgz
--------------------------------------------------------------------------------
/misc/storm/README.md:
--------------------------------------------------------------------------------
1 | # Apache storm
2 |
3 | docker compose storm cluster
4 |
5 | ## How-to Start
6 | Prune docker image first,
7 | ```bash
8 | docker image prune -f
9 | docker container prune -f
10 | ```
11 |
12 | ```bash
13 | docker-compose up --build --force-recreate
14 | ```
15 |
16 | Nimbus is pretty slow to run up, if you get output like this,
17 | ```text
18 | streamparse | thriftpy.transport.TTransportException: TTransportException(type=1, message="Could not connect to ('nimbus', 6627)")
19 | ```
20 |
21 | So you need to wait Nimbus to spawn,
22 | ```text
23 | Running: java -server -Ddaemon.name=nimbus -Dstorm.options= -Dstorm.home=/apache-storm-1.2.1 -Dstorm.log.dir=/logs -Djava.library.path=/usr/local/lib:/opt/local/lib:/usr/lib -Dstorm.conf.file= -cp /apache-storm-1.2.1/*:/apache-storm-1.2.1/lib/*:/apache-storm-1.2.1/extlib/*:/apache-storm-1.2.1/extlib-daemon/*:/conf -Xmx1024m -Djava.net.preferIPv4Stack=true -Dlogfile.name=nimbus.log -DLog4jContextSelector=org.apache.logging.log4j.core.async.AsyncLoggerContextSelector -Dlog4j.configurationFile=/apache-storm-1.2.1/log4j2/cluster.xml org.apache.storm.daemon.nimbus
24 | ```
25 |
26 | ## Storm UI
27 |
28 | Visit [localhost:8085](http://localhost:8085) for storm UI
29 |
30 | 
31 |
32 | 
33 |
34 | 
35 |
--------------------------------------------------------------------------------
/misc/storm/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '2.3'
2 | services:
3 | zookeeper:
4 | image: zookeeper:3.4
5 | container_name: zookeeper
6 | restart: always
7 |
8 | nimbus:
9 | build: storm
10 | container_name: nimbus
11 | command: storm nimbus
12 | depends_on:
13 | - zookeeper
14 | links:
15 | - zookeeper
16 | restart: always
17 | ports:
18 | - 6627:6627
19 | volumes:
20 | - ./storm/conf:/conf
21 |
22 | supervisor:
23 | build: storm
24 | container_name: supervisor
25 | command: storm supervisor
26 | depends_on:
27 | - nimbus
28 | - zookeeper
29 | links:
30 | - nimbus
31 | - zookeeper
32 | restart: always
33 | volumes:
34 | - ./storm/conf:/conf
35 |
36 | sui:
37 | build: storm
38 | container_name: sui
39 | command: storm ui
40 | depends_on:
41 | - nimbus
42 | - zookeeper
43 | links:
44 | - nimbus
45 | - zookeeper
46 | restart: always
47 | ports:
48 | - "8085:8080"
49 | volumes:
50 | - ./storm/conf:/conf
51 |
52 | streamparse:
53 | build: streamparse
54 | container_name: streamparse
55 | command: /entrypoint.sh
56 | depends_on:
57 | - zookeeper
58 | - nimbus
59 | - supervisor
60 | - sui
61 | links:
62 | - zookeeper
63 | - nimbus
64 | - supervisor
65 | - sui
66 | restart: always
67 | volumes:
68 | - ./streamparse/tasks:/tasks
69 |
--------------------------------------------------------------------------------
/misc/storm/screenshot/1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/huseinzol05/Python-DevOps/a5d578fae7df369bd9e10175249db18fe2db989e/misc/storm/screenshot/1.png
--------------------------------------------------------------------------------
/misc/storm/screenshot/2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/huseinzol05/Python-DevOps/a5d578fae7df369bd9e10175249db18fe2db989e/misc/storm/screenshot/2.png
--------------------------------------------------------------------------------
/misc/storm/screenshot/3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/huseinzol05/Python-DevOps/a5d578fae7df369bd9e10175249db18fe2db989e/misc/storm/screenshot/3.png
--------------------------------------------------------------------------------
/misc/storm/storm/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM ubuntu:16.04
2 |
3 | RUN apt-get update && \
4 | apt-get upgrade -y && \
5 | apt-get install openjdk-8-jre-headless -y && \
6 | apt-get install locales -y && \
7 | update-locale LANG=C.UTF-8 LC_MESSAGES=POSIX && \
8 | locale-gen en_US.UTF-8 && \
9 | dpkg-reconfigure locales && \
10 | apt-get clean all
11 |
12 | RUN apt-get install -y python openssh-server gnupg
13 |
14 | ENV STORM_USER=storm \
15 | STORM_CONF_DIR=/conf \
16 | STORM_DATA_DIR=/data \
17 | STORM_LOG_DIR=/logs
18 |
19 | # Add a user and make dirs
20 | RUN set -x \
21 | && useradd "$STORM_USER" \
22 | && mkdir -p "$STORM_CONF_DIR" "$STORM_DATA_DIR" "$STORM_LOG_DIR" \
23 | && chown -R "$STORM_USER:$STORM_USER" "$STORM_CONF_DIR" "$STORM_DATA_DIR" "$STORM_LOG_DIR"
24 |
25 | ARG DISTRO_NAME=apache-storm-1.2.1
26 |
27 | # Download Apache Storm, verify its PGP signature, untar and clean up
28 | RUN set -x \
29 | && wget -q "http://www.apache.org/dist/storm/$DISTRO_NAME/$DISTRO_NAME.tar.gz" \
30 | && tar -xzf "$DISTRO_NAME.tar.gz" \
31 | && chown -R "$STORM_USER:$STORM_USER" "$DISTRO_NAME"
32 |
33 | WORKDIR $DISTRO_NAME
34 |
35 | ENV PATH $PATH:/$DISTRO_NAME/bin
36 |
37 | RUN wget https://github.com/javabean/su-exec/releases/download/v0.2/su-exec.amd64 -O /usr/bin/su-exec
38 | RUN chmod +x /usr/bin/su-exec
39 |
40 | #
41 | # FIXME: streamparse should be installed by virtualenv
42 | #
43 | RUN apt-get install -y python3-pip libffi-dev libssl-dev
44 | RUN pip3 install streamparse
45 |
46 | COPY docker-entrypoint.sh /
47 | RUN chmod +x /docker-entrypoint.sh
48 |
49 | WORKDIR /
50 |
51 | ENTRYPOINT ["/docker-entrypoint.sh"]
52 |
--------------------------------------------------------------------------------
/misc/storm/storm/conf/storm.yaml:
--------------------------------------------------------------------------------
1 | storm.zookeeper.servers:
2 | - "zookeeper"
3 |
4 | nimbus.seeds : ["nimbus"]
5 | nimbus.childopts: "-Xmx1024m -Djava.net.preferIPv4Stack=true"
6 |
7 | ui.childopts: "-Xmx768m -Djava.net.preferIPv4Stack=true"
8 |
9 | supervisor.childopts: "-Djava.net.preferIPv4Stack=true"
10 | worker.childopts: "-Xmx768m -Djava.net.preferIPv4Stack=true"
11 |
12 | storm.log.dir: "/logs"
13 | storm.local.dir: "/data"
14 |
--------------------------------------------------------------------------------
/misc/storm/storm/docker-entrypoint.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | set -e
4 |
5 |
6 | # Allow the container to be started with `--user`
7 | if [ "$1" = 'storm' -a "$(id -u)" = '0' ]; then
8 | chown -R "$STORM_USER" "$STORM_CONF_DIR" "$STORM_DATA_DIR" "$STORM_LOG_DIR"
9 | exec su-exec "$STORM_USER" "$0" "$@"
10 | fi
11 |
12 | ## Generate the config only if it doesn't exist
13 | #CONFIG="$STORM_CONF_DIR/storm.yaml"
14 | #if [ ! -f "$CONFIG" ]; then
15 | # cat << EOF > "$CONFIG"
16 | #storm.zookeeper.servers: [zookeeper]
17 | #nimbus.seeds: [nimbus]
18 | #storm.log.dir: "$STORM_LOG_DIR"
19 | #storm.local.dir: "$STORM_DATA_DIR"
20 | #EOF
21 | #fi
22 |
23 | exec "$@"
24 |
--------------------------------------------------------------------------------
/misc/storm/streamparse/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM ubuntu:16.04
2 |
3 | RUN apt-get update && \
4 | apt-get upgrade -y && \
5 | apt-get install openjdk-8-jre-headless -y && \
6 | apt-get install locales -y && \
7 | update-locale LANG=C.UTF-8 LC_MESSAGES=POSIX && \
8 | locale-gen en_US.UTF-8 && \
9 | dpkg-reconfigure locales && \
10 | apt-get clean all
11 |
12 | RUN apt-get install -y curl python-dev build-essential
13 | RUN apt-get install -y python3-pip
14 | RUN apt-get install -y libssl-dev
15 | RUN apt-get install -y libffi-dev
16 | RUN apt-get install -y wget
17 |
18 |
19 | # download and install Leiningen
20 | ENV LEIN_ROOT=1
21 | RUN curl https://raw.githubusercontent.com/technomancy/leiningen/stable/bin/lein > ./lein
22 | RUN chmod a+x ./lein
23 | RUN mv ./lein /usr/bin/lein
24 | RUN lein version
25 |
26 | RUN pip3 install streamparse -U
27 |
28 | ENV STORM_USER=storm \
29 | STORM_CONF_DIR=/conf \
30 | STORM_DATA_DIR=/data \
31 | STORM_LOG_DIR=/logs
32 |
33 | WORKDIR /opt
34 |
35 | # Add a user and make dirs
36 | RUN set -x \
37 | && useradd "$STORM_USER" \
38 | && mkdir -p "$STORM_CONF_DIR" "$STORM_DATA_DIR" "$STORM_LOG_DIR" \
39 | && chown -R "$STORM_USER:$STORM_USER" "$STORM_CONF_DIR" "$STORM_DATA_DIR" "$STORM_LOG_DIR"
40 |
41 | ARG DISTRO_NAME=apache-storm-1.2.1
42 |
43 | # Download Apache Storm, verify its PGP signature, untar and clean up
44 | RUN set -x \
45 | && wget -q "http://www.apache.org/dist/storm/$DISTRO_NAME/$DISTRO_NAME.tar.gz" \
46 | && tar -xzf "$DISTRO_NAME.tar.gz" \
47 | && chown -R "$STORM_USER:$STORM_USER" "$DISTRO_NAME"
48 |
49 |
50 | ENV PATH /opt/"$DISTRO_NAME"/bin/:$PATH
51 |
52 | COPY entrypoint.sh /entrypoint.sh
53 | RUN chmod +x /entrypoint.sh
54 |
55 | RUN apt-get install -y inetutils-ping
56 | RUN update-ca-certificates -f
57 | WORKDIR /tasks/wordcount
58 |
59 | RUN pip3 install psutil
60 |
61 | #ENTRYPOINT ["/bin/bash"]
62 |
--------------------------------------------------------------------------------
/misc/storm/streamparse/entrypoint.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | while true ; do
4 | if sparse submit --debug -e prod -n wordcount -w 4; then
5 | break
6 | fi
7 | sleep 2
8 | done
9 |
10 | export PYTHONUNBUFFERED=0
11 | python3 ps.py
12 |
--------------------------------------------------------------------------------
/misc/storm/streamparse/tasks/wordcount/.gitignore:
--------------------------------------------------------------------------------
1 | .DS_Store
2 | _build
3 | _resources
4 | logs
5 |
--------------------------------------------------------------------------------
/misc/storm/streamparse/tasks/wordcount/README.md:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/huseinzol05/Python-DevOps/a5d578fae7df369bd9e10175249db18fe2db989e/misc/storm/streamparse/tasks/wordcount/README.md
--------------------------------------------------------------------------------
/misc/storm/streamparse/tasks/wordcount/config.json:
--------------------------------------------------------------------------------
1 | {
2 | "serializer": "json",
3 | "topology_specs": "topologies/",
4 | "virtualenv_specs": "virtualenvs/",
5 | "envs": {
6 | "prod": {
7 | "user": "",
8 | "ssh_password": "",
9 | "use_ssh_for_nimbus": false,
10 | "use_virtualenv": false,
11 | "nimbus": "nimbus",
12 | "workers": [],
13 | "log": {
14 | "path": "",
15 | "max_bytes": 1000000,
16 | "backup_count": 10,
17 | "level": "info"
18 | },
19 | "virtualenv_root": ""
20 | }
21 | }
22 | }
23 |
--------------------------------------------------------------------------------
/misc/storm/streamparse/tasks/wordcount/fabfile.py:
--------------------------------------------------------------------------------
1 | def pre_submit(topology_name, env_name, env_config, options):
2 | """Override this function to perform custom actions prior to topology
3 | submission. No SSH tunnels will be active when this function is called."""
4 | pass
5 |
6 |
7 | def post_submit(topo_name, env_name, env_config, options):
8 | """Override this function to perform custom actions after topology
9 | submission. Note that the SSH tunnel to Nimbus will still be active
10 | when this function is called."""
11 | pass
12 |
--------------------------------------------------------------------------------
/misc/storm/streamparse/tasks/wordcount/project.clj:
--------------------------------------------------------------------------------
1 | (defproject wordcount "0.0.1-SNAPSHOT"
2 | :resource-paths ["_resources"]
3 | :target-path "_build"
4 | :min-lein-version "2.0.0"
5 | :jvm-opts ["-client"]
6 | :dependencies [[org.apache.storm/storm-core "1.2.1"]
7 | [org.apache.storm/flux-core "1.2.1"]]
8 | :jar-exclusions [#"log4j\.properties" #"org\.apache\.storm\.(?!flux)" #"trident" #"META-INF" #"meta-inf" #"\.yaml"]
9 | :uberjar-exclusions [#"log4j\.properties" #"org\.apache\.storm\.(?!flux)" #"trident" #"META-INF" #"meta-inf" #"\.yaml"]
10 | )
11 |
--------------------------------------------------------------------------------
/misc/storm/streamparse/tasks/wordcount/ps.py:
--------------------------------------------------------------------------------
1 | from __future__ import print_function
2 | import psutil
3 |
4 |
5 | def pidmonitor():
6 | processes = ['streamparse.run', 'java']
7 | for pid in psutil.pids():
8 | proc = psutil.Process(pid)
9 | for process in processes:
10 | if process in proc.cmdline():
11 | cmdline = proc.cmdline()
12 | main_proc = cmdline[0]
13 | details = []
14 | if main_proc == 'java':
15 | details.append('[storm]')
16 | elif main_proc == 'python':
17 | details.extend(cmdline[2:4])
18 | for detail in details:
19 | if 'Spout' in detail:
20 | details.append('[spout]')
21 | if 'Bolt' in detail:
22 | details.append('[bolt]')
23 | print(main_proc, ' '.join(details))
24 | print('=> CPU% {}'.format(proc.cpu_percent(interval = 0.2)))
25 |
26 |
27 | while True:
28 | try:
29 | pidmonitor()
30 | except Exception as e:
31 | print(e)
32 | pass
33 |
--------------------------------------------------------------------------------
/misc/storm/streamparse/tasks/wordcount/src/bolts/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/huseinzol05/Python-DevOps/a5d578fae7df369bd9e10175249db18fe2db989e/misc/storm/streamparse/tasks/wordcount/src/bolts/__init__.py
--------------------------------------------------------------------------------
/misc/storm/streamparse/tasks/wordcount/src/bolts/wordcount.py:
--------------------------------------------------------------------------------
1 | import os
2 | from collections import Counter
3 |
4 | from streamparse import Bolt
5 |
6 |
7 | class WordCountBolt(Bolt):
8 | outputs = ['word', 'count']
9 |
10 | def initialize(self, conf, ctx):
11 | self.counter = Counter()
12 | self.pid = os.getpid()
13 | self.total = 0
14 |
15 | def _increment(self, word, inc_by):
16 | self.counter[word] += inc_by
17 | self.total += inc_by
18 |
19 | def process(self, tup):
20 | word = tup.values[0]
21 | self._increment(word, 10 if word == "dog" else 1)
22 | if self.total % 1000 == 0:
23 | self.logger.info("counted [{:,}] words [pid={}]".format(self.total,
24 | self.pid))
25 | self.emit([word, self.counter[word]])
26 |
--------------------------------------------------------------------------------
/misc/storm/streamparse/tasks/wordcount/src/spouts/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/huseinzol05/Python-DevOps/a5d578fae7df369bd9e10175249db18fe2db989e/misc/storm/streamparse/tasks/wordcount/src/spouts/__init__.py
--------------------------------------------------------------------------------
/misc/storm/streamparse/tasks/wordcount/src/spouts/words.py:
--------------------------------------------------------------------------------
1 | from itertools import cycle
2 |
3 | from streamparse import Spout
4 |
5 |
6 | class WordSpout(Spout):
7 | outputs = ['word']
8 |
9 | def initialize(self, stormconf, context):
10 | self.words = cycle(['dog', 'cat', 'zebra', 'elephant'])
11 |
12 | def next_tuple(self):
13 | word = next(self.words)
14 | self.emit([word])
15 |
--------------------------------------------------------------------------------
/misc/storm/streamparse/tasks/wordcount/topologies/wordcount.py:
--------------------------------------------------------------------------------
1 | """
2 | Word count topology
3 | """
4 |
5 | from streamparse import Grouping, Topology
6 |
7 | from bolts.wordcount import WordCountBolt
8 | from spouts.words import WordSpout
9 |
10 |
11 | class WordCount(Topology):
12 | word_spout = WordSpout.spec()
13 | count_bolt = WordCountBolt.spec(inputs={word_spout: Grouping.fields('word')},
14 | par=2)
15 |
--------------------------------------------------------------------------------
/misc/storm/streamparse/tasks/wordcount/virtualenvs/wordcount.txt:
--------------------------------------------------------------------------------
1 | streamparse # always required for streamparse projects
2 |
--------------------------------------------------------------------------------
/piping/1.sentiment-tweetpy-elasticsearch/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM ubuntu:16.04 AS base
2 |
3 | RUN apt-get update && apt-get install -y \
4 | curl \
5 | git \
6 | python3 \
7 | python3-pip \
8 | python3-wheel \
9 | openjdk-8-jdk-headless \
10 | wget \
11 | apt-transport-https \
12 | supervisor
13 |
14 | RUN wget -qO - https://artifacts.elastic.co/GPG-KEY-elasticsearch | apt-key add -
15 |
16 | RUN echo "deb https://artifacts.elastic.co/packages/6.x/apt stable main" | tee -a /etc/apt/sources.list.d/elastic-6.x.list
17 |
18 | RUN apt-get update && apt-get install elasticsearch
19 |
20 | RUN wget -qO - https://artifacts.elastic.co/GPG-KEY-elasticsearch | apt-key add -
21 |
22 | RUN echo "deb https://artifacts.elastic.co/packages/6.x/apt stable main" | tee -a /etc/apt/sources.list.d/elastic-6.x.list
23 |
24 | RUN apt-get update && apt-get install kibana
25 |
26 | RUN pip3 install scipy tweepy elasticsearch numpy sklearn scikit-learn Flask gunicorn eventlet unidecode flask_cors
27 |
28 | WORKDIR /app
29 |
30 | COPY . /app
31 |
32 | RUN cp elasticsearch.yml /etc/elasticsearch/
33 |
34 | RUN cp kibana.yml /etc/kibana/
35 |
--------------------------------------------------------------------------------
/piping/1.sentiment-tweetpy-elasticsearch/README.md:
--------------------------------------------------------------------------------
1 | ## How-to
2 |
3 | 1. Insert keys related,
4 |
5 | Make sure you inserted related keys in twitter-streaming.py
6 |
7 | ```python
8 | consumer_key=""
9 | consumer_secret=""
10 |
11 | access_token=""
12 | access_token_secret=""
13 | ```
14 |
15 | 2. Run `Docker compose`,
16 | ```bash
17 | compose/build
18 | ```
19 |
20 | 
21 |
--------------------------------------------------------------------------------
/piping/1.sentiment-tweetpy-elasticsearch/compose/build:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml up --build
4 |
--------------------------------------------------------------------------------
/piping/1.sentiment-tweetpy-elasticsearch/compose/develop:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | service elasticsearch start
4 | service kibana start
5 | supervisord --nodaemon -c supervisord.conf
6 |
--------------------------------------------------------------------------------
/piping/1.sentiment-tweetpy-elasticsearch/compose/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '2.3'
2 | services:
3 | sentiment-twitter-elasticsearch:
4 | build:
5 | context: ..
6 | target: base
7 | command: compose/develop
8 | environment:
9 | PYTHONPATH: .
10 | ports:
11 | - '9200:9200' # elastic
12 | - '5601:5601' # kibana
13 | - '9000:9000' # supervisord
14 | - '8095:8095' # sentiment
15 | volumes:
16 | - ..:/app
17 |
--------------------------------------------------------------------------------
/piping/1.sentiment-tweetpy-elasticsearch/compose/down:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml down
4 |
--------------------------------------------------------------------------------
/piping/1.sentiment-tweetpy-elasticsearch/kibana.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/huseinzol05/Python-DevOps/a5d578fae7df369bd9e10175249db18fe2db989e/piping/1.sentiment-tweetpy-elasticsearch/kibana.png
--------------------------------------------------------------------------------
/piping/1.sentiment-tweetpy-elasticsearch/sentiment/bayes.p:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/huseinzol05/Python-DevOps/a5d578fae7df369bd9e10175249db18fe2db989e/piping/1.sentiment-tweetpy-elasticsearch/sentiment/bayes.p
--------------------------------------------------------------------------------
/piping/1.sentiment-tweetpy-elasticsearch/sentiment/bayes_bm.p:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/huseinzol05/Python-DevOps/a5d578fae7df369bd9e10175249db18fe2db989e/piping/1.sentiment-tweetpy-elasticsearch/sentiment/bayes_bm.p
--------------------------------------------------------------------------------
/piping/1.sentiment-tweetpy-elasticsearch/sentiment/eventlet.sh:
--------------------------------------------------------------------------------
1 | BIND_ADDR=0.0.0.0:8095
2 | gunicorn --worker-class eventlet -b $BIND_ADDR -p gunicorn.pid server:app
3 |
--------------------------------------------------------------------------------
/piping/1.sentiment-tweetpy-elasticsearch/sentiment/tfidf.p:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/huseinzol05/Python-DevOps/a5d578fae7df369bd9e10175249db18fe2db989e/piping/1.sentiment-tweetpy-elasticsearch/sentiment/tfidf.p
--------------------------------------------------------------------------------
/piping/1.sentiment-tweetpy-elasticsearch/sentiment/tfidf_bm.p:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/huseinzol05/Python-DevOps/a5d578fae7df369bd9e10175249db18fe2db989e/piping/1.sentiment-tweetpy-elasticsearch/sentiment/tfidf_bm.p
--------------------------------------------------------------------------------
/piping/1.sentiment-tweetpy-elasticsearch/supervisord.conf:
--------------------------------------------------------------------------------
1 | [supervisord]
2 |
3 | [program:sentiment]
4 | directory = sentiment
5 | command = bash eventlet.sh
6 | stopasgroup = true
7 | autorestart = true
8 |
9 | [program:twitterstreaming]
10 | command = python3 twitter-streaming.py
11 | stopasgroup = true
12 | autorestart = true
13 |
14 | [inet_http_server]
15 | port = 0.0.0.0:9000
16 |
--------------------------------------------------------------------------------
/piping/2.luigi-crawler-sentiment-elasticsearch/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM ubuntu:16.04 AS base
2 |
3 | RUN apt-get update && apt-get install -y \
4 | curl \
5 | git \
6 | python3 \
7 | python3-pip \
8 | python3-wheel \
9 | openjdk-8-jdk-headless \
10 | wget \
11 | apt-transport-https \
12 | supervisor
13 |
14 | RUN wget -qO - https://artifacts.elastic.co/GPG-KEY-elasticsearch | apt-key add -
15 |
16 | RUN echo "deb https://artifacts.elastic.co/packages/6.x/apt stable main" | tee -a /etc/apt/sources.list.d/elastic-6.x.list
17 |
18 | RUN apt-get update && apt-get install elasticsearch
19 |
20 | RUN wget -qO - https://artifacts.elastic.co/GPG-KEY-elasticsearch | apt-key add -
21 |
22 | RUN echo "deb https://artifacts.elastic.co/packages/6.x/apt stable main" | tee -a /etc/apt/sources.list.d/elastic-6.x.list
23 |
24 | RUN apt-get update && apt-get install kibana
25 |
26 | RUN pip3 install scipy tweepy elasticsearch numpy sklearn scikit-learn Flask gunicorn eventlet unidecode flask_cors
27 |
28 | WORKDIR /app
29 |
30 | COPY . /app
31 |
32 | RUN cp elasticsearch.yml /etc/elasticsearch/
33 |
34 | RUN cp kibana.yml /etc/kibana/
35 |
--------------------------------------------------------------------------------
/piping/2.luigi-crawler-sentiment-elasticsearch/README.md:
--------------------------------------------------------------------------------
1 | ## How-to
2 |
3 | 1. Run `Docker compose`,
4 | ```bash
5 | compose/build
6 | ```
7 |
8 | Task automation
9 |
10 | 
11 |
12 | localhost:8082
13 |
14 | 
15 |
16 | Kibana
17 |
18 | 
19 |
--------------------------------------------------------------------------------
/piping/2.luigi-crawler-sentiment-elasticsearch/app.py:
--------------------------------------------------------------------------------
1 | from flask import Flask
2 |
3 | app = Flask(__name__)
4 |
5 |
6 | @app.route('/')
7 | def hello_world():
8 | return 'Hey, we have Flask in a Docker container!'
9 |
10 |
11 | @app.route('/members')
12 | def members():
13 | return 'you can put anything after /members/'
14 |
15 |
16 | @app.route('/members//')
17 | def getMember(name):
18 | return name
19 |
20 |
21 | if __name__ == '__main__':
22 | app.run(debug = True, host = '0.0.0.0', port = 5000)
23 |
--------------------------------------------------------------------------------
/piping/2.luigi-crawler-sentiment-elasticsearch/compose/bash:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml exec crawler bash
4 |
--------------------------------------------------------------------------------
/piping/2.luigi-crawler-sentiment-elasticsearch/compose/build:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml up --build
4 |
--------------------------------------------------------------------------------
/piping/2.luigi-crawler-sentiment-elasticsearch/compose/develop:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | service elasticsearch start
4 | service kibana start
5 | supervisord -c supervisord.conf
6 | python3 download_nltk.py
7 | PYTHONPATH='.' luigi --module crawling Save_to_Elastic --issue 'isu najib razak' --year-start 2010 --year-end 2019 --limit 1000 --batch-size 10 --index 'news'
8 | python3 app.py
9 |
--------------------------------------------------------------------------------
/piping/2.luigi-crawler-sentiment-elasticsearch/compose/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '2.3'
2 | services:
3 | crawler:
4 | build:
5 | context: ..
6 | target: base
7 | command: compose/develop
8 | environment:
9 | PYTHONPATH: .
10 | ports:
11 | - '9200:9200' # elastic
12 | - '5601:5601' # kibana
13 | - '8082:8082' # luigi
14 | - '9000:9000' # supervisord
15 | - '5000:5000' # flask
16 | volumes:
17 | - ..:/app
18 |
--------------------------------------------------------------------------------
/piping/2.luigi-crawler-sentiment-elasticsearch/compose/down:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml down
4 |
--------------------------------------------------------------------------------
/piping/2.luigi-crawler-sentiment-elasticsearch/dependency.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/huseinzol05/Python-DevOps/a5d578fae7df369bd9e10175249db18fe2db989e/piping/2.luigi-crawler-sentiment-elasticsearch/dependency.png
--------------------------------------------------------------------------------
/piping/2.luigi-crawler-sentiment-elasticsearch/download_nltk.py:
--------------------------------------------------------------------------------
1 | import nltk
2 |
3 | nltk.download('punkt')
4 |
--------------------------------------------------------------------------------
/piping/2.luigi-crawler-sentiment-elasticsearch/kibana.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/huseinzol05/Python-DevOps/a5d578fae7df369bd9e10175249db18fe2db989e/piping/2.luigi-crawler-sentiment-elasticsearch/kibana.png
--------------------------------------------------------------------------------
/piping/2.luigi-crawler-sentiment-elasticsearch/luigi.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/huseinzol05/Python-DevOps/a5d578fae7df369bd9e10175249db18fe2db989e/piping/2.luigi-crawler-sentiment-elasticsearch/luigi.png
--------------------------------------------------------------------------------
/piping/2.luigi-crawler-sentiment-elasticsearch/sentiment/bayes.p:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/huseinzol05/Python-DevOps/a5d578fae7df369bd9e10175249db18fe2db989e/piping/2.luigi-crawler-sentiment-elasticsearch/sentiment/bayes.p
--------------------------------------------------------------------------------
/piping/2.luigi-crawler-sentiment-elasticsearch/sentiment/bayes_bm.p:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/huseinzol05/Python-DevOps/a5d578fae7df369bd9e10175249db18fe2db989e/piping/2.luigi-crawler-sentiment-elasticsearch/sentiment/bayes_bm.p
--------------------------------------------------------------------------------
/piping/2.luigi-crawler-sentiment-elasticsearch/sentiment/tfidf.p:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/huseinzol05/Python-DevOps/a5d578fae7df369bd9e10175249db18fe2db989e/piping/2.luigi-crawler-sentiment-elasticsearch/sentiment/tfidf.p
--------------------------------------------------------------------------------
/piping/2.luigi-crawler-sentiment-elasticsearch/sentiment/tfidf_bm.p:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/huseinzol05/Python-DevOps/a5d578fae7df369bd9e10175249db18fe2db989e/piping/2.luigi-crawler-sentiment-elasticsearch/sentiment/tfidf_bm.p
--------------------------------------------------------------------------------
/piping/2.luigi-crawler-sentiment-elasticsearch/supervisord.conf:
--------------------------------------------------------------------------------
1 | [supervisord]
2 |
3 | [program:luigi]
4 | command = luigid
5 | stopasgroup = true
6 | autorestart = true
7 |
8 | [inet_http_server]
9 | port = 0.0.0.0:9000
10 |
--------------------------------------------------------------------------------
/piping/3.airflow-elasticsearch/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM ubuntu:16.04 AS base
2 |
3 | RUN apt-get update && apt-get install -y \
4 | curl \
5 | python3-pip \
6 | openjdk-8-jdk-headless \
7 | apt-transport-https \
8 | wget
9 |
10 | RUN wget -qO - https://artifacts.elastic.co/GPG-KEY-elasticsearch | apt-key add -
11 |
12 | RUN echo "deb https://artifacts.elastic.co/packages/6.x/apt stable main" | tee -a /etc/apt/sources.list.d/elastic-6.x.list
13 |
14 | RUN apt-get update && apt-get install elasticsearch
15 |
16 | ENV SLUGIFY_USES_TEXT_UNIDECODE yes
17 |
18 | RUN pip3 install Flask apache-airflow elasticsearch
19 |
20 | RUN pip3 install tensorflow
21 |
22 | RUN pip3 install elasticsearch-dsl
23 |
24 | WORKDIR /app
25 |
26 | COPY . /app
27 |
28 | RUN apt-get install supervisor -y
29 |
30 | RUN cp elasticsearch.yml /etc/elasticsearch/
31 |
32 | ENV AIRFLOW_HOME /app
33 |
--------------------------------------------------------------------------------
/piping/3.airflow-elasticsearch/app.py:
--------------------------------------------------------------------------------
1 | from flask import Flask, request, render_template, jsonify
2 | from elasticsearch import Elasticsearch
3 | from elasticsearch_dsl import Search
4 |
5 | app = Flask(__name__)
6 |
7 |
8 | def get_es(index = 'test_index'):
9 | es = Elasticsearch()
10 | return Search(using = es, index = index)
11 |
12 |
13 | def get_sentiment(sentiment):
14 | s = get_es()
15 | s = s.filter('terms', sentiment_label__keyword = [sentiment])
16 | return s.execute().to_dict()
17 |
18 |
19 | @app.route('/', methods = ['GET'])
20 | def hello():
21 | return 'HOLLA!'
22 |
23 |
24 | @app.route('/index/')
25 | def indexing(sentiment):
26 | if not sentiment:
27 | return jsonify({'error': 'insert sentiment'})
28 | return jsonify(get_sentiment(sentiment))
29 |
30 |
31 | if __name__ == '__main__':
32 | app.run(debug = True, host = '0.0.0.0', port = 5000)
33 |
--------------------------------------------------------------------------------
/piping/3.airflow-elasticsearch/bash:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml exec airflow-elasticsearch bash
4 |
--------------------------------------------------------------------------------
/piping/3.airflow-elasticsearch/dags/test_print.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime
2 | from airflow import DAG
3 | from airflow.operators.dummy_operator import DummyOperator
4 | from airflow.operators.python_operator import PythonOperator
5 | import logging
6 |
7 |
8 | def print_hello():
9 | logging.info('hello world!')
10 | return 'Hello world!'
11 |
12 |
13 | dag = DAG(
14 | 'hello_world',
15 | description = 'Simple tutorial DAG',
16 | schedule_interval = None,
17 | start_date = datetime(2017, 3, 20),
18 | catchup = False,
19 | )
20 |
21 | dummy_operator = DummyOperator(task_id = 'dummy_task', retries = 3, dag = dag)
22 |
23 | hello_operator = PythonOperator(
24 | task_id = 'hello_task', python_callable = print_hello, dag = dag
25 | )
26 |
27 | dummy_operator >> hello_operator
28 |
--------------------------------------------------------------------------------
/piping/3.airflow-elasticsearch/dags/test_xcom.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime, timedelta
2 | from airflow.models import DAG
3 | from airflow.operators.python_operator import PythonOperator
4 | import logging
5 |
6 | DAG = DAG(
7 | dag_id = 'simple_xcom',
8 | start_date = datetime(2017, 10, 26),
9 | schedule_interval = None,
10 | )
11 |
12 |
13 | def push_function(**context):
14 | msg = 'the_message'
15 | logging.info("message to push: '%s'" % msg)
16 | print("message to push: '%s'" % msg)
17 | task_instance = context['task_instance']
18 | task_instance.xcom_push(key = 'the_message', value = msg)
19 |
20 |
21 | push_task = PythonOperator(
22 | task_id = 'push_task',
23 | python_callable = push_function,
24 | provide_context = True,
25 | dag = DAG,
26 | )
27 |
28 |
29 | def pull_function(**kwargs):
30 | ti = kwargs['ti']
31 | msg = ti.xcom_pull(task_ids = 'push_task', key = 'the_message')
32 | logging.info("received message: '%s'" % msg)
33 | print("received message: '%s'" % msg)
34 |
35 |
36 | pull_task = PythonOperator(
37 | task_id = 'pull_task',
38 | python_callable = pull_function,
39 | provide_context = True,
40 | dag = DAG,
41 | )
42 |
43 | push_task >> pull_task
44 |
--------------------------------------------------------------------------------
/piping/3.airflow-elasticsearch/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '2.3'
2 | services:
3 | airflow-elasticsearch:
4 | build:
5 | context: .
6 | target: base
7 | command: bash start.sh
8 | container_name: airflow-elasticsearch
9 | volumes:
10 | - .:/app
11 | ports:
12 | - '8080:8080'
13 | - '5000:5000'
14 | - '9200:9200'
15 | - '9001:9000'
16 |
--------------------------------------------------------------------------------
/piping/3.airflow-elasticsearch/screenshot/1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/huseinzol05/Python-DevOps/a5d578fae7df369bd9e10175249db18fe2db989e/piping/3.airflow-elasticsearch/screenshot/1.png
--------------------------------------------------------------------------------
/piping/3.airflow-elasticsearch/screenshot/2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/huseinzol05/Python-DevOps/a5d578fae7df369bd9e10175249db18fe2db989e/piping/3.airflow-elasticsearch/screenshot/2.png
--------------------------------------------------------------------------------
/piping/3.airflow-elasticsearch/screenshot/3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/huseinzol05/Python-DevOps/a5d578fae7df369bd9e10175249db18fe2db989e/piping/3.airflow-elasticsearch/screenshot/3.png
--------------------------------------------------------------------------------
/piping/3.airflow-elasticsearch/screenshot/4.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/huseinzol05/Python-DevOps/a5d578fae7df369bd9e10175249db18fe2db989e/piping/3.airflow-elasticsearch/screenshot/4.png
--------------------------------------------------------------------------------
/piping/3.airflow-elasticsearch/screenshot/5.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/huseinzol05/Python-DevOps/a5d578fae7df369bd9e10175249db18fe2db989e/piping/3.airflow-elasticsearch/screenshot/5.png
--------------------------------------------------------------------------------
/piping/3.airflow-elasticsearch/screenshot/6.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/huseinzol05/Python-DevOps/a5d578fae7df369bd9e10175249db18fe2db989e/piping/3.airflow-elasticsearch/screenshot/6.png
--------------------------------------------------------------------------------
/piping/3.airflow-elasticsearch/sentiment.pb:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/huseinzol05/Python-DevOps/a5d578fae7df369bd9e10175249db18fe2db989e/piping/3.airflow-elasticsearch/sentiment.pb
--------------------------------------------------------------------------------
/piping/3.airflow-elasticsearch/start.sh:
--------------------------------------------------------------------------------
1 | service elasticsearch start
2 | service elasticsearch status
3 | airflow initdb
4 | supervisord -c supervisord.conf
5 | python3 app.py
6 |
--------------------------------------------------------------------------------
/piping/3.airflow-elasticsearch/supervisord.conf:
--------------------------------------------------------------------------------
1 | [supervisord]
2 |
3 | [program:airflow-web]
4 | command = airflow webserver -p 8080
5 | stopasgroup = true
6 | autorestart = true
7 |
8 | [program:airflow-scheduler]
9 | command = airflow scheduler
10 | stopasgroup = true
11 | autorestart = true
12 |
13 | [inet_http_server]
14 | port = 0.0.0.0:9000
15 |
--------------------------------------------------------------------------------
/piping/3.airflow-elasticsearch/unittests.cfg:
--------------------------------------------------------------------------------
1 | [core]
2 | unit_test_mode = True
3 | airflow_home = /app
4 | dags_folder = /app/dags
5 | plugins_folder = /app/plugins
6 | base_log_folder = /app/logs
7 | logging_level = INFO
8 | fab_logging_level = WARN
9 | log_filename_template = {{ ti.dag_id }}/{{ ti.task_id }}/{{ ts }}/{{ try_number }}.log
10 | log_processor_filename_template = {{ filename }}.log
11 | dag_processor_manager_log_location = /app/logs/dag_processor_manager/dag_processor_manager.log
12 | executor = SequentialExecutor
13 | sql_alchemy_conn = sqlite:////app/unittests.db
14 | load_examples = True
15 | donot_pickle = False
16 | dag_concurrency = 16
17 | dags_are_paused_at_creation = False
18 | fernet_key =
19 | non_pooled_task_slot_count = 128
20 | enable_xcom_pickling = False
21 | killed_task_cleanup_time = 5
22 | secure_mode = False
23 | hostname_callable = socket:getfqdn
24 | worker_precheck = False
25 |
26 | [cli]
27 | api_client = airflow.api.client.local_client
28 | endpoint_url = http://localhost:8080
29 |
30 | [api]
31 | auth_backend = airflow.api.auth.backend.default
32 |
33 | [operators]
34 | default_owner = airflow
35 |
36 | [hive]
37 | default_hive_mapred_queue = airflow
38 |
39 | [webserver]
40 | base_url = http://localhost:8080
41 | web_server_host = 0.0.0.0
42 | web_server_port = 8080
43 | dag_orientation = LR
44 | dag_default_view = tree
45 | log_fetch_timeout_sec = 5
46 | hide_paused_dags_by_default = False
47 | page_size = 100
48 | rbac = False
49 |
50 | [email]
51 | email_backend = airflow.utils.email.send_email_smtp
52 |
53 | [smtp]
54 | smtp_host = localhost
55 | smtp_user = airflow
56 | smtp_port = 25
57 | smtp_password = airflow
58 | smtp_mail_from = airflow@example.com
59 |
60 | [celery]
61 | celery_app_name = airflow.executors.celery_executor
62 | worker_concurrency = 16
63 | worker_log_server_port = 8793
64 | broker_url = sqla+mysql://airflow:airflow@localhost:3306/airflow
65 | result_backend = db+mysql://airflow:airflow@localhost:3306/airflow
66 | flower_host = 0.0.0.0
67 | flower_port = 5555
68 | default_queue = default
69 |
70 | [mesos]
71 | master = localhost:5050
72 | framework_name = Airflow
73 | task_cpu = 1
74 | task_memory = 256
75 | checkpoint = False
76 | authenticate = False
77 | docker_image_slave = test/docker-airflow
78 |
79 | [scheduler]
80 | job_heartbeat_sec = 1
81 | scheduler_heartbeat_sec = 5
82 | scheduler_health_check_threshold = 30
83 | authenticate = true
84 | max_threads = 2
85 | catchup_by_default = True
86 | scheduler_zombie_task_threshold = 300
87 | dag_dir_list_interval = 0
88 | max_tis_per_query = 512
89 |
90 | [admin]
91 | hide_sensitive_variable_fields = True
92 |
93 | [elasticsearch]
94 | elasticsearch_host =
95 | elasticsearch_log_id_template = {dag_id}-{task_id}-{execution_date}-{try_number}
96 | elasticsearch_end_of_log_mark = end_of_log
--------------------------------------------------------------------------------
/scaling-backend/1.flask-socketio-scale-redis/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM ubuntu:16.04 AS base
2 |
3 | ENV POSTGRES_USER root
4 | ENV POSTGRES_PASSWORD root
5 |
6 | RUN apt-get update && apt-get install -y \
7 | python3 \
8 | python3-pip \
9 | python3-wheel
10 |
11 | COPY requirements.txt ./
12 | RUN pip3 install -r requirements.txt
13 |
14 | ENV LC_ALL C.UTF-8
15 | ENV LANG C.UTF-8
16 |
17 | RUN apt-get install -y wget
18 |
19 | RUN wget http://download.redis.io/redis-stable.tar.gz && tar xvzf redis-stable.tar.gz
20 |
21 | RUN cd redis-stable && make install
22 |
23 | WORKDIR /app
24 |
25 | COPY . /app
26 |
--------------------------------------------------------------------------------
/scaling-backend/1.flask-socketio-scale-redis/README.md:
--------------------------------------------------------------------------------
1 | ## How-to
2 |
3 | 1. Run `Docker compose`,
4 | ```bash
5 | compose/build
6 | ```
7 |
8 | 2. Run stress test,
9 | ```python
10 | # gunicorn with eventlet, 400 unique threads, 100 threads per second
11 | stress_test(400,100)
12 | ```
13 | ```text
14 | # index 0, total time taken 99.869447 s, average time taken 0.998694 s
15 | # index 100, total time taken 222.226329 s, average time taken 2.222263 s
16 | # index 200, total time taken 271.741829 s, average time taken 2.717418 s
17 | # index 300, total time taken 376.807925 s, average time taken 3.768079 s
18 | ```
19 |
--------------------------------------------------------------------------------
/scaling-backend/1.flask-socketio-scale-redis/app.py:
--------------------------------------------------------------------------------
1 | from flask import Flask
2 | from flask_socketio import SocketIO, send, emit
3 | import time
4 | from flask import current_app
5 | import json
6 | import eventlet
7 |
8 | eventlet.monkey_patch()
9 |
10 | app = Flask(__name__)
11 | socketio = SocketIO(app, message_queue = 'redis://')
12 |
13 |
14 | @socketio.on('connect', namespace = '/live')
15 | def test_connect():
16 | emit('response', {'data': 'OK'}, broadcast = True)
17 |
18 |
19 | @socketio.on('disconnect', namespace = '/live')
20 | def test_disconnect():
21 | print('Client disconnected')
22 |
23 |
24 | @socketio.on('event', namespace = '/live')
25 | def test_message(message):
26 | print('incoming from %s' % (message['id']))
27 | emit('event', {'data': message['id']}, broadcast = True)
28 |
29 |
30 | application = app
31 |
--------------------------------------------------------------------------------
/scaling-backend/1.flask-socketio-scale-redis/compose/bash:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml exec socketio bash
4 |
--------------------------------------------------------------------------------
/scaling-backend/1.flask-socketio-scale-redis/compose/build:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml up --build
4 |
--------------------------------------------------------------------------------
/scaling-backend/1.flask-socketio-scale-redis/compose/develop:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | redis-server --daemonize yes
4 | bash gunicorn-eventlet.sh
5 |
--------------------------------------------------------------------------------
/scaling-backend/1.flask-socketio-scale-redis/compose/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '2.3'
2 | services:
3 | socketio:
4 | build:
5 | context: ..
6 | target: base
7 | command: compose/develop
8 | environment:
9 | PYTHONPATH: .
10 | ports:
11 | - '5000:5000' # flask
12 |
--------------------------------------------------------------------------------
/scaling-backend/1.flask-socketio-scale-redis/compose/down:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml down
4 |
--------------------------------------------------------------------------------
/scaling-backend/1.flask-socketio-scale-redis/gunicorn-eventlet.sh:
--------------------------------------------------------------------------------
1 | gunicorn -b 0.0.0.0:5000 -k eventlet app
2 |
--------------------------------------------------------------------------------
/scaling-backend/1.flask-socketio-scale-redis/requirements.txt:
--------------------------------------------------------------------------------
1 | Flask
2 | eventlet
3 | gunicorn
4 | redis
5 | uwsgi
6 | flask-socketio
7 | psycopg2
8 | gevent
9 |
--------------------------------------------------------------------------------
/scaling-backend/1.flask-socketio-scale-redis/uwsgi-gevent.sh:
--------------------------------------------------------------------------------
1 | uwsgi --gevent 2 --http 0.0.0.0:5000 --wsgi-file app.py
2 |
--------------------------------------------------------------------------------
/scaling-backend/2.flask-nginx-loadbalancer/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM ubuntu:16.04 AS base
2 |
3 | RUN apt-get update && apt-get install -y \
4 | python3 \
5 | python3-pip \
6 | python3-wheel \
7 | nginx
8 |
9 | ADD . /code
10 |
11 | WORKDIR /code
12 |
13 | RUN pip3 install flask redis gunicorn eventlet
14 |
--------------------------------------------------------------------------------
/scaling-backend/2.flask-nginx-loadbalancer/README.md:
--------------------------------------------------------------------------------
1 | ## How-to
2 |
3 | 1. Run `Docker compose`,
4 | ```bash
5 | compose/build
6 | ```
7 |
8 | Port 80 will load balanced on 2 different servers, 5000 and 5001.
9 |
10 | ```text
11 | curl http://localhost:5000/ -X GET
12 | Hello World! I have been seen 19 times.
13 |
14 | curl http://localhost:5001/ -X GET
15 | Hello World! I have been seen 20 times.
16 |
17 | curl http://localhost/ -X GET
18 | Hello World! I have been seen 21 times.
19 | ```
20 |
--------------------------------------------------------------------------------
/scaling-backend/2.flask-nginx-loadbalancer/app.py:
--------------------------------------------------------------------------------
1 | from flask import Flask
2 | from redis import Redis
3 |
4 | app = Flask(__name__)
5 | redis = Redis(host = 'redis', port = 6379)
6 |
7 |
8 | @app.route('/')
9 | def hello():
10 | count = redis.incr('hits')
11 | return 'Hello World! I have been seen {} times.\n'.format(count)
12 |
--------------------------------------------------------------------------------
/scaling-backend/2.flask-nginx-loadbalancer/compose/bash:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml exec flask bash
4 |
--------------------------------------------------------------------------------
/scaling-backend/2.flask-nginx-loadbalancer/compose/build:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml up --build
4 |
--------------------------------------------------------------------------------
/scaling-backend/2.flask-nginx-loadbalancer/compose/develop:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | rm /etc/nginx/sites-enabled/default
4 | cp load-balancer.conf /etc/nginx/conf.d/
5 | service nginx start
6 | service nginx restart
7 | gunicorn --daemon --worker-class eventlet -b 0.0.0.0:5000 app:app
8 | gunicorn --worker-class eventlet -b 0.0.0.0:5001 app:app
9 |
--------------------------------------------------------------------------------
/scaling-backend/2.flask-nginx-loadbalancer/compose/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '2.3'
2 | services:
3 | flask:
4 | build:
5 | context: ..
6 | target: base
7 | command: compose/develop
8 | environment:
9 | PYTHONPATH: .
10 | ports:
11 | - '5000:5000' # flask1
12 | - '5001:5001' # flask2
13 | - '80:80' # nginx
14 |
15 | redis:
16 | image: redis:alpine
17 |
--------------------------------------------------------------------------------
/scaling-backend/2.flask-nginx-loadbalancer/compose/down:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml down
4 |
--------------------------------------------------------------------------------
/scaling-backend/2.flask-nginx-loadbalancer/load-balancer.conf:
--------------------------------------------------------------------------------
1 | upstream backend {
2 | server 127.0.0.1:5000;
3 | server 127.0.0.1:5001;
4 | }
5 |
6 | # This server accepts all traffic to port 80 and passes it to the upstream.
7 |
8 | server {
9 | listen 80;
10 |
11 | location / {
12 | proxy_pass http://backend;
13 | }
14 | }
15 |
--------------------------------------------------------------------------------
/scaling-backend/3.flask-socketio-redis-nginx-loadbalancer/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM ubuntu:16.04 AS base
2 |
3 | RUN apt-get update && apt-get install -y \
4 | python3 \
5 | python3-pip \
6 | python3-wheel \
7 | nginx
8 |
9 | COPY requirements.txt ./
10 | RUN pip3 install -r requirements.txt
11 |
12 | WORKDIR /app
13 |
14 | COPY . /app
15 |
--------------------------------------------------------------------------------
/scaling-backend/3.flask-socketio-redis-nginx-loadbalancer/README.md:
--------------------------------------------------------------------------------
1 | ## How-to
2 |
3 | 1. Run `Docker compose`,
4 | ```bash
5 | compose/build
6 | ```
7 |
8 | 2. Run stress test,
9 | ```text
10 | Port 80 will load balanced on 2 different servers, 5000 and 5001.
11 |
12 | stress_test(get_time_80, 50,10)
13 | index 0, total time taken 1.087309 s, average time taken 0.108731 s
14 | index 10, total time taken 1.203958 s, average time taken 0.120396 s
15 | index 20, total time taken 1.310126 s, average time taken 0.131013 s
16 | index 30, total time taken 1.595863 s, average time taken 0.159586 s
17 | index 40, total time taken 1.548332 s, average time taken 0.154833 s
18 | ```
19 |
--------------------------------------------------------------------------------
/scaling-backend/3.flask-socketio-redis-nginx-loadbalancer/app.py:
--------------------------------------------------------------------------------
1 | from flask import Flask
2 | from flask_socketio import SocketIO, send, emit
3 | import time
4 | from flask import current_app
5 | import json
6 | import eventlet
7 |
8 | eventlet.monkey_patch()
9 | app = Flask(__name__)
10 | socketio = SocketIO(app, message_queue = 'redis://redis')
11 |
12 |
13 | @socketio.on('connect', namespace = '/live')
14 | def test_connect():
15 | emit('response', {'data': 'OK'}, broadcast = True)
16 |
17 |
18 | @socketio.on('disconnect', namespace = '/live')
19 | def test_disconnect():
20 | print('Client disconnected')
21 |
22 |
23 | @socketio.on('event', namespace = '/live')
24 | def test_message(message):
25 | print('incoming from %s' % (message['id']))
26 | emit('event', {'data': 'Hello World!'}, broadcast = True)
27 |
--------------------------------------------------------------------------------
/scaling-backend/3.flask-socketio-redis-nginx-loadbalancer/compose/bash:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml exec flask bash
4 |
--------------------------------------------------------------------------------
/scaling-backend/3.flask-socketio-redis-nginx-loadbalancer/compose/build:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml up --build
4 |
--------------------------------------------------------------------------------
/scaling-backend/3.flask-socketio-redis-nginx-loadbalancer/compose/develop:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | rm /etc/nginx/sites-enabled/default
4 | cp load-balancer.conf /etc/nginx/conf.d/
5 | service nginx start
6 | service nginx restart
7 | gunicorn --daemon --worker-class eventlet -b 0.0.0.0:5000 app:app
8 | gunicorn --worker-class eventlet -b 0.0.0.0:5001 app:app
9 |
--------------------------------------------------------------------------------
/scaling-backend/3.flask-socketio-redis-nginx-loadbalancer/compose/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '2.3'
2 | services:
3 | flask:
4 | build:
5 | context: ..
6 | target: base
7 | command: compose/develop
8 | environment:
9 | PYTHONPATH: .
10 | ports:
11 | - '5000:5000' # flask1
12 | - '5001:5001' # flask2
13 | - '80:80' # nginx
14 |
15 | redis:
16 | image: redis:alpine
17 |
--------------------------------------------------------------------------------
/scaling-backend/3.flask-socketio-redis-nginx-loadbalancer/compose/down:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml down
4 |
--------------------------------------------------------------------------------
/scaling-backend/3.flask-socketio-redis-nginx-loadbalancer/load-balancer.conf:
--------------------------------------------------------------------------------
1 | upstream socketio_nodes {
2 | ip_hash;
3 | server 127.0.0.1:5000;
4 | server 127.0.0.1:5001;
5 | }
6 |
7 | # This server accepts all traffic to port 80 and passes it to the upstream.
8 |
9 | server {
10 | listen 80;
11 | server_name _;
12 |
13 | location / {
14 | include proxy_params;
15 | proxy_pass http://127.0.0.1:5000;
16 | }
17 | location /socket.io {
18 | include proxy_params;
19 | proxy_http_version 1.1;
20 | proxy_buffering off;
21 | proxy_set_header Upgrade $http_upgrade;
22 | proxy_set_header Connection "Upgrade";
23 | proxy_pass http://socketio_nodes/socket.io;
24 | }
25 | }
26 |
--------------------------------------------------------------------------------
/scaling-backend/3.flask-socketio-redis-nginx-loadbalancer/requirements.txt:
--------------------------------------------------------------------------------
1 | Flask
2 | eventlet
3 | gunicorn
4 | redis
5 | flask-socketio
6 | gevent
7 |
--------------------------------------------------------------------------------
/scaling-backend/4.rabbitmq-multiple-celery-flask/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM ubuntu:16.04 AS base
2 |
3 | RUN apt-get update && apt-get install -y \
4 | python3 \
5 | python3-pip \
6 | wget \
7 | rabbitmq-server \
8 | supervisor
9 |
10 | RUN pip3 install Flask celery flower
11 |
12 | WORKDIR /app
13 |
14 | COPY . /app
15 |
--------------------------------------------------------------------------------
/scaling-backend/4.rabbitmq-multiple-celery-flask/README.md:
--------------------------------------------------------------------------------
1 | ## How-to
2 |
3 | 1. Run [compose/build](compose/build),
4 | ```bash
5 | compose/build
6 | ```
7 |
8 | 2. Request some long task,
9 | ```bash
10 | curl localhost:5000/longtask
11 | ```
12 |
13 | Example output,
14 | ```text
15 | {
16 | "task_id": "659ee28d-2fab-4568-8c3b-1a760f4fd8c1"
17 | }
18 | ```
19 |
20 | 3. Check task id returned,
21 | ```bash
22 | curl localhost:5000/status/659ee28d-2fab-4568-8c3b-1a760f4fd8c1
23 | ```
24 |
25 | Example output,
26 | ```text
27 | {
28 | "current": 18,
29 | "state": "PROGRESS",
30 | "status": "Checking radiant orbiter...",
31 | "total": 34
32 | }
33 | ```
34 |
35 | ## Printscreen, visit [localhost:5000](localhost:5000)
36 |
37 | #### Front flower dashboard
38 |
39 | 
40 |
41 | #### Task description
42 |
43 | 
44 |
45 | #### List of tasks
46 |
47 | 
48 |
49 | #### Graphs execution
50 |
51 | 
52 |
--------------------------------------------------------------------------------
/scaling-backend/4.rabbitmq-multiple-celery-flask/app.py:
--------------------------------------------------------------------------------
1 | import os
2 | import random
3 | import time
4 | from flask import Flask, jsonify
5 | from celery import Celery
6 |
7 | app = Flask(__name__)
8 |
9 | celery = Celery(
10 | app.name, backend = 'rpc://', broker = 'amqp://guest:@localhost:5672//'
11 | )
12 | celery.conf.update(app.config)
13 |
14 |
15 | @celery.task(bind = True)
16 | def long_task(self):
17 | """Background task that runs a long function with progress reports."""
18 | verb = ['Starting up', 'Booting', 'Repairing', 'Loading', 'Checking']
19 | adjective = ['master', 'radiant', 'silent', 'harmonic', 'fast']
20 | noun = ['solar array', 'particle reshaper', 'cosmic ray', 'orbiter', 'bit']
21 | message = ''
22 | total = random.randint(10, 50)
23 | for i in range(total):
24 | if not message or random.random() < 0.25:
25 | message = '{0} {1} {2}...'.format(
26 | random.choice(verb),
27 | random.choice(adjective),
28 | random.choice(noun),
29 | )
30 | self.update_state(
31 | state = 'PROGRESS',
32 | meta = {'current': i, 'total': total, 'status': message},
33 | )
34 | time.sleep(1)
35 | return {
36 | 'current': 100,
37 | 'total': 100,
38 | 'status': 'Task completed!',
39 | 'result': 42,
40 | }
41 |
42 |
43 | @app.route('/', methods = ['GET'])
44 | def hello():
45 | return jsonify('HALLO')
46 |
47 |
48 | @app.route('/longtask', methods = ['GET'])
49 | def longtask():
50 | task = long_task.apply_async()
51 | return jsonify({'task_id': task.id})
52 |
53 |
54 | @app.route('/status/')
55 | def taskstatus(task_id):
56 | task = long_task.AsyncResult(task_id)
57 | if task.state == 'PENDING':
58 | response = {
59 | 'state': task.state,
60 | 'current': 0,
61 | 'total': 1,
62 | 'status': 'Pending...',
63 | }
64 | elif task.state != 'FAILURE':
65 | response = {
66 | 'state': task.state,
67 | 'current': task.info.get('current', 0),
68 | 'total': task.info.get('total', 1),
69 | 'status': task.info.get('status', ''),
70 | }
71 | if 'result' in task.info:
72 | response['result'] = task.info['result']
73 | else:
74 | response = {
75 | 'state': task.state,
76 | 'current': 1,
77 | 'total': 1,
78 | 'status': str(task.info),
79 | }
80 | return jsonify(response)
81 |
82 |
83 | if __name__ == '__main__':
84 | app.run(debug = True, host = '0.0.0.0', port = 5000)
85 |
--------------------------------------------------------------------------------
/scaling-backend/4.rabbitmq-multiple-celery-flask/compose/bash:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml exec celery bash
4 |
--------------------------------------------------------------------------------
/scaling-backend/4.rabbitmq-multiple-celery-flask/compose/build:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml up --build
4 |
--------------------------------------------------------------------------------
/scaling-backend/4.rabbitmq-multiple-celery-flask/compose/develop:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | service rabbitmq-server start
4 | supervisord -c supervisord.conf
5 | python3 app.py
6 |
--------------------------------------------------------------------------------
/scaling-backend/4.rabbitmq-multiple-celery-flask/compose/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '2.3'
2 | services:
3 | celery:
4 | build:
5 | context: ..
6 | target: base
7 | command: compose/develop
8 | environment:
9 | PYTHONPATH: .
10 | ports:
11 | - '5000:5000' # flask
12 | - '5555:5555'
13 |
--------------------------------------------------------------------------------
/scaling-backend/4.rabbitmq-multiple-celery-flask/compose/down:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml down
4 |
--------------------------------------------------------------------------------
/scaling-backend/4.rabbitmq-multiple-celery-flask/screenshot/front.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/huseinzol05/Python-DevOps/a5d578fae7df369bd9e10175249db18fe2db989e/scaling-backend/4.rabbitmq-multiple-celery-flask/screenshot/front.png
--------------------------------------------------------------------------------
/scaling-backend/4.rabbitmq-multiple-celery-flask/screenshot/graphs.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/huseinzol05/Python-DevOps/a5d578fae7df369bd9e10175249db18fe2db989e/scaling-backend/4.rabbitmq-multiple-celery-flask/screenshot/graphs.png
--------------------------------------------------------------------------------
/scaling-backend/4.rabbitmq-multiple-celery-flask/screenshot/long-task.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/huseinzol05/Python-DevOps/a5d578fae7df369bd9e10175249db18fe2db989e/scaling-backend/4.rabbitmq-multiple-celery-flask/screenshot/long-task.png
--------------------------------------------------------------------------------
/scaling-backend/4.rabbitmq-multiple-celery-flask/screenshot/tasks.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/huseinzol05/Python-DevOps/a5d578fae7df369bd9e10175249db18fe2db989e/scaling-backend/4.rabbitmq-multiple-celery-flask/screenshot/tasks.png
--------------------------------------------------------------------------------
/scaling-backend/4.rabbitmq-multiple-celery-flask/supervisord.conf:
--------------------------------------------------------------------------------
1 | [supervisord]
2 |
3 | [program:flower]
4 | command = flower -A app.celery --port=5555 --address=0.0.0.0
5 | stopasgroup = true
6 | autorestart = true
7 |
8 | [program:celery1]
9 | command = celery worker -A app.celery --loglevel=debug -c 10 -n worker1@%%n
10 | stopasgroup = true
11 | autorestart = true
12 |
13 | [program:celery2]
14 | command = celery worker -A app.celery --loglevel=debug -c 10 -n worker2@%%n
15 | stopasgroup = true
16 | autorestart = true
17 |
18 | [inet_http_server]
19 | port = 0.0.0.0:9000
20 |
--------------------------------------------------------------------------------
/scaling-backend/5.flask-gunicorn-haproxy/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM python:3.6.1 AS base
2 |
3 | RUN pip3 install -U textblob ekphrasis
4 | RUN pip3 install flask gunicorn
5 | RUN python3 -m nltk.downloader punkt
6 | RUN python3 -m nltk.downloader wordnet
7 | RUN python3 -m nltk.downloader stopwords
8 | RUN python3 -m textblob.download_corpora
9 |
10 | WORKDIR /app
11 |
12 | COPY . /app
13 |
14 | RUN echo
15 |
16 | ENV LC_ALL C.UTF-8
17 | ENV LANG C.UTF-8
18 |
19 | EXPOSE 5000
20 |
--------------------------------------------------------------------------------
/scaling-backend/5.flask-gunicorn-haproxy/README.md:
--------------------------------------------------------------------------------
1 | ## How-to
2 |
3 | 1. Run `docker-compose`,
4 | ```bash
5 | docker-compose up --build
6 | ```
7 |
8 | 2. Scale `sentiment` as many you want,
9 | ```bash
10 | docker-compose scale sentiment=2
11 | ```
12 |
13 | You can see the output from Haproxy,
14 | ```text
15 | sentiment-haproxy | server 5flask-gunicorn-haproxy_sentiment_1 5flask-gunicorn-haproxy_sentiment_1:5000 check inter 2000 rise 2 fall 3
16 | sentiment-haproxy | server 5flask-gunicorn-haproxy_sentiment_2 5flask-gunicorn-haproxy_sentiment_2:5000 check inter 2000 rise 2 fall 3
17 | ```
18 |
19 | 3. Visit [localhost:5000/classify](http://localhost:5000/classify?text=%20i%20hate%20u%20man),
20 | ```text
21 | {"polarity":-0.8,"subjectivity":0.9}
22 | ```
23 |
--------------------------------------------------------------------------------
/scaling-backend/5.flask-gunicorn-haproxy/app.py:
--------------------------------------------------------------------------------
1 | from flask import Flask, request, jsonify
2 | from textblob import TextBlob
3 | from ekphrasis.classes.preprocessor import TextPreProcessor
4 | from ekphrasis.classes.tokenizer import SocialTokenizer
5 | from ekphrasis.dicts.emoticons import emoticons
6 | from ekphrasis.classes.spellcorrect import SpellCorrector
7 |
8 | text_processor = TextPreProcessor(
9 | normalize = [
10 | 'url',
11 | 'email',
12 | 'percent',
13 | 'money',
14 | 'phone',
15 | 'user',
16 | 'time',
17 | 'url',
18 | 'date',
19 | 'number',
20 | ],
21 | annotate = {
22 | 'hashtag',
23 | 'allcaps',
24 | 'elongated',
25 | 'repeated',
26 | 'emphasis',
27 | 'censored',
28 | },
29 | fix_html = True,
30 | segmenter = 'twitter',
31 | corrector = 'twitter',
32 | unpack_hashtags = True,
33 | unpack_contractions = True,
34 | spell_correct_elong = False,
35 | tokenizer = SocialTokenizer(lowercase = True).tokenize,
36 | dicts = [emoticons],
37 | )
38 |
39 | sp = SpellCorrector(corpus = 'english')
40 | app = Flask(__name__)
41 |
42 |
43 | def process_text(string):
44 | return ' '.join(
45 | [
46 | sp.correct(c)
47 | for c in text_processor.pre_process_doc(string)
48 | if '<' not in c
49 | and '>' not in c
50 | and c not in ',!;:{}\'"!@#$%^&*(01234567890?/|\\'
51 | ]
52 | )
53 |
54 |
55 | @app.route('/', methods = ['GET'])
56 | def hello():
57 | return 'Hello!'
58 |
59 |
60 | @app.route('/classify', methods = ['GET'])
61 | def classify():
62 | text = request.args.get('text')
63 | result = TextBlob(process_text(text))
64 | return jsonify(
65 | {
66 | 'polarity': result.sentiment.polarity,
67 | 'subjectivity': result.sentiment.subjectivity,
68 | }
69 | )
70 |
71 |
72 | application = app
73 |
--------------------------------------------------------------------------------
/scaling-backend/5.flask-gunicorn-haproxy/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '3'
2 |
3 | services:
4 | sentiment:
5 | restart: always
6 | build:
7 | context: .
8 | environment:
9 | SERVICE_PORTS: 5000
10 | command: bash start.sh 2
11 | lstm-haproxy:
12 | image: dockercloud/haproxy
13 | depends_on:
14 | - sentiment
15 | links:
16 | - sentiment
17 | ports:
18 | - '5000:80'
19 | container_name: sentiment-haproxy
20 | volumes:
21 | - /var/run/docker.sock:/var/run/docker.sock
22 |
--------------------------------------------------------------------------------
/scaling-backend/5.flask-gunicorn-haproxy/initial.py:
--------------------------------------------------------------------------------
1 | from ekphrasis.classes.preprocessor import TextPreProcessor
2 | from ekphrasis.classes.tokenizer import SocialTokenizer
3 | from ekphrasis.dicts.emoticons import emoticons
4 | from ekphrasis.classes.spellcorrect import SpellCorrector
5 |
6 | text_processor = TextPreProcessor(
7 | normalize = [
8 | 'url',
9 | 'email',
10 | 'percent',
11 | 'money',
12 | 'phone',
13 | 'user',
14 | 'time',
15 | 'url',
16 | 'date',
17 | 'number',
18 | ],
19 | annotate = {
20 | 'hashtag',
21 | 'allcaps',
22 | 'elongated',
23 | 'repeated',
24 | 'emphasis',
25 | 'censored',
26 | },
27 | fix_html = True,
28 | segmenter = 'twitter',
29 | corrector = 'twitter',
30 | unpack_hashtags = True,
31 | unpack_contractions = True,
32 | spell_correct_elong = False,
33 | tokenizer = SocialTokenizer(lowercase = True).tokenize,
34 | dicts = [emoticons],
35 | )
36 |
37 | sp = SpellCorrector(corpus = 'english')
38 |
--------------------------------------------------------------------------------
/scaling-backend/5.flask-gunicorn-haproxy/start.sh:
--------------------------------------------------------------------------------
1 | NUM_WORKER=$1
2 | BIND_ADDR=0.0.0.0:5000
3 | python3 initial.py
4 | gunicorn --graceful-timeout 30 --reload --max-requests 10 --timeout 180 -w $NUM_WORKER -b $BIND_ADDR -k sync app
5 |
--------------------------------------------------------------------------------
/stresstest/1.Locust-Stresstest/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM ubuntu:16.04 AS base
2 |
3 | RUN apt-get update && apt-get install -y \
4 | python3 \
5 | python3-pip \
6 | python3-wheel \
7 | supervisor
8 |
9 | COPY requirements.txt ./
10 | RUN pip3 install -r requirements.txt
11 |
12 | ENV LC_ALL C.UTF-8
13 | ENV LANG C.UTF-8
14 |
15 | WORKDIR /app
16 |
17 | COPY . /app
18 |
--------------------------------------------------------------------------------
/stresstest/1.Locust-Stresstest/README.md:
--------------------------------------------------------------------------------
1 | ## How-to
2 |
3 | 1. Run `Docker compose`,
4 | ```bash
5 | compose/build
6 | ```
7 |
8 | 
9 |
10 | 
11 |
--------------------------------------------------------------------------------
/stresstest/1.Locust-Stresstest/app.py:
--------------------------------------------------------------------------------
1 | from flask import Flask
2 |
3 | app = Flask(__name__)
4 |
5 |
6 | @app.route('/')
7 | def hello_world():
8 | return 'Hey, we have Flask in a Docker container!'
9 |
10 |
11 | @app.route('/members')
12 | def members():
13 | return 'you can put anything after /members/'
14 |
15 |
16 | @app.route('/members//')
17 | def getMember(name):
18 | return name
19 |
20 |
21 | if __name__ == '__main__':
22 | app.run(debug = True, host = '0.0.0.0', port = 5000)
23 |
--------------------------------------------------------------------------------
/stresstest/1.Locust-Stresstest/compose/build:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml up --build
4 |
--------------------------------------------------------------------------------
/stresstest/1.Locust-Stresstest/compose/develop:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | supervisord --nodaemon -c supervisord.conf
4 |
--------------------------------------------------------------------------------
/stresstest/1.Locust-Stresstest/compose/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '2.3'
2 | services:
3 | locust:
4 | build:
5 | context: ..
6 | target: base
7 | command: compose/develop
8 | environment:
9 | PYTHONPATH: .
10 | ports:
11 | - '5000:5000' # flask
12 | - '8089:8089' # locust
13 | - '9000:9000'
14 |
--------------------------------------------------------------------------------
/stresstest/1.Locust-Stresstest/compose/down:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml down
4 |
--------------------------------------------------------------------------------
/stresstest/1.Locust-Stresstest/eventlet.sh:
--------------------------------------------------------------------------------
1 | BIND_ADDR=0.0.0.0:5000
2 | gunicorn --worker-class eventlet -b $BIND_ADDR -p gunicorn.pid app:app
3 |
4 |
--------------------------------------------------------------------------------
/stresstest/1.Locust-Stresstest/locust_file.py:
--------------------------------------------------------------------------------
1 | from locust import HttpLocust, TaskSet, task
2 |
3 |
4 | class UserBehavior(TaskSet):
5 | @task(1)
6 | def index(self):
7 | self.client.get('/')
8 |
9 |
10 | class WebsiteUser(HttpLocust):
11 | task_set = UserBehavior
12 | min_wait = 5000
13 | max_wait = 9000
14 |
--------------------------------------------------------------------------------
/stresstest/1.Locust-Stresstest/requirements.txt:
--------------------------------------------------------------------------------
1 | Flask
2 | eventlet
3 | gunicorn
4 | locust
5 |
--------------------------------------------------------------------------------
/stresstest/1.Locust-Stresstest/screenshot1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/huseinzol05/Python-DevOps/a5d578fae7df369bd9e10175249db18fe2db989e/stresstest/1.Locust-Stresstest/screenshot1.png
--------------------------------------------------------------------------------
/stresstest/1.Locust-Stresstest/screenshot2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/huseinzol05/Python-DevOps/a5d578fae7df369bd9e10175249db18fe2db989e/stresstest/1.Locust-Stresstest/screenshot2.png
--------------------------------------------------------------------------------
/stresstest/1.Locust-Stresstest/supervisord.conf:
--------------------------------------------------------------------------------
1 | [supervisord]
2 |
3 | [program:flask]
4 | command = bash eventlet.sh
5 | stopasgroup = true
6 | autorestart = true
7 |
8 | [program:locust]
9 | command = locust -f locust_file.py --host http://localhost:5000
10 | stopasgroup = true
11 | autorestart = true
12 |
13 | [inet_http_server]
14 | port = 0.0.0.0:9000
15 |
--------------------------------------------------------------------------------
/unit-test/1.pytest-flask/.coveragerc:
--------------------------------------------------------------------------------
1 | [run]
2 | source = web
3 |
--------------------------------------------------------------------------------
/unit-test/1.pytest-flask/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM ubuntu:16.04 AS base
2 |
3 | RUN apt-get update && apt-get install -y \
4 | python3 \
5 | python3-pip
6 |
7 | RUN pip3 install Flask pytest pytest-cov
8 |
9 | ENV LC_ALL C.UTF-8
10 | ENV LANG C.UTF-8
11 |
12 | RUN mkdir -p /home/flask
13 | WORKDIR /home/flask
14 |
15 | FROM base
16 |
17 | COPY . /home/flask
18 |
--------------------------------------------------------------------------------
/unit-test/1.pytest-flask/README.md:
--------------------------------------------------------------------------------
1 | ## How-to
2 |
3 | 1. Run `Docker compose`,
4 | ```bash
5 | compose/build
6 | ```
7 |
8 | ```text
9 | pytest_1 | Name Stmts Miss Cover
10 | pytest_1 | ----------------------------------------
11 | pytest_1 | web/__init__.py 13 2 85%
12 | pytest_1 | web/calculation.py 6 1 83%
13 | pytest_1 | ----------------------------------------
14 | pytest_1 | TOTAL 19 3 84%
15 | pytest_1 | Coverage HTML written to dir htmlcov
16 | ```
17 |
18 | 2. Open report/index.html
19 |
20 | 
21 |
--------------------------------------------------------------------------------
/unit-test/1.pytest-flask/calculation.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/huseinzol05/Python-DevOps/a5d578fae7df369bd9e10175249db18fe2db989e/unit-test/1.pytest-flask/calculation.png
--------------------------------------------------------------------------------
/unit-test/1.pytest-flask/compose/build:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | docker-compose -f $(dirname "$0")/docker-compose.yml up --build
4 | rm -r report && docker cp compose_pytest_1:/home/flask/htmlcov report
5 |
--------------------------------------------------------------------------------
/unit-test/1.pytest-flask/compose/docker-compose.yml:
--------------------------------------------------------------------------------
1 | version: '2.3'
2 | services:
3 | pytest:
4 | build:
5 | context: ..
6 | command: pytest tests --cov --cov-report term --cov-report html
7 | environment:
8 | PYTHONPATH: .
9 | FLASK_APP: web
10 |
--------------------------------------------------------------------------------
/unit-test/1.pytest-flask/coverage.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/huseinzol05/Python-DevOps/a5d578fae7df369bd9e10175249db18fe2db989e/unit-test/1.pytest-flask/coverage.png
--------------------------------------------------------------------------------
/unit-test/1.pytest-flask/report/jquery.ba-throttle-debounce.min.js:
--------------------------------------------------------------------------------
1 | /*
2 | * jQuery throttle / debounce - v1.1 - 3/7/2010
3 | * http://benalman.com/projects/jquery-throttle-debounce-plugin/
4 | *
5 | * Copyright (c) 2010 "Cowboy" Ben Alman
6 | * Dual licensed under the MIT and GPL licenses.
7 | * http://benalman.com/about/license/
8 | */
9 | (function(b,c){var $=b.jQuery||b.Cowboy||(b.Cowboy={}),a;$.throttle=a=function(e,f,j,i){var h,d=0;if(typeof f!=="boolean"){i=j;j=f;f=c}function g(){var o=this,m=+new Date()-d,n=arguments;function l(){d=+new Date();j.apply(o,n)}function k(){h=c}if(i&&!h){l()}h&&clearTimeout(h);if(i===c&&m>e){l()}else{if(f!==true){h=setTimeout(i?k:l,i===c?e-m:e)}}}if($.guid){g.guid=j.guid=j.guid||$.guid++}return g};$.debounce=function(d,e,f){return f===c?a(d,e,false):a(d,f,e!==false)}})(this);
10 |
--------------------------------------------------------------------------------
/unit-test/1.pytest-flask/report/jquery.isonscreen.js:
--------------------------------------------------------------------------------
1 | /* Copyright (c) 2010
2 | * @author Laurence Wheway
3 | * Dual licensed under the MIT (http://www.opensource.org/licenses/mit-license.php)
4 | * and GPL (http://www.opensource.org/licenses/gpl-license.php) licenses.
5 | *
6 | * @version 1.2.0
7 | */
8 | (function($) {
9 | jQuery.extend({
10 | isOnScreen: function(box, container) {
11 | //ensure numbers come in as intgers (not strings) and remove 'px' is it's there
12 | for(var i in box){box[i] = parseFloat(box[i])};
13 | for(var i in container){container[i] = parseFloat(container[i])};
14 |
15 | if(!container){
16 | container = {
17 | left: $(window).scrollLeft(),
18 | top: $(window).scrollTop(),
19 | width: $(window).width(),
20 | height: $(window).height()
21 | }
22 | }
23 |
24 | if( box.left+box.width-container.left > 0 &&
25 | box.left < container.width+container.left &&
26 | box.top+box.height-container.top > 0 &&
27 | box.top < container.height+container.top
28 | ) return true;
29 | return false;
30 | }
31 | })
32 |
33 |
34 | jQuery.fn.isOnScreen = function (container) {
35 | for(var i in container){container[i] = parseFloat(container[i])};
36 |
37 | if(!container){
38 | container = {
39 | left: $(window).scrollLeft(),
40 | top: $(window).scrollTop(),
41 | width: $(window).width(),
42 | height: $(window).height()
43 | }
44 | }
45 |
46 | if( $(this).offset().left+$(this).width()-container.left > 0 &&
47 | $(this).offset().left < container.width+container.left &&
48 | $(this).offset().top+$(this).height()-container.top > 0 &&
49 | $(this).offset().top < container.height+container.top
50 | ) return true;
51 | return false;
52 | }
53 | })(jQuery);
54 |
--------------------------------------------------------------------------------
/unit-test/1.pytest-flask/report/keybd_closed.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/huseinzol05/Python-DevOps/a5d578fae7df369bd9e10175249db18fe2db989e/unit-test/1.pytest-flask/report/keybd_closed.png
--------------------------------------------------------------------------------
/unit-test/1.pytest-flask/report/keybd_open.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/huseinzol05/Python-DevOps/a5d578fae7df369bd9e10175249db18fe2db989e/unit-test/1.pytest-flask/report/keybd_open.png
--------------------------------------------------------------------------------
/unit-test/1.pytest-flask/report/status.json:
--------------------------------------------------------------------------------
1 | {"format":1,"settings":"1439cd0ada669d6c6dae0d05e1e6d670","files":{"web_calculation_py":{"hash":"f44567a6a93e7a70a9474046f01850eb","index":{"html_filename":"web_calculation_py.html","nums":[1,6,0,1,0,0,0],"relative_filename":"web/calculation.py"}},"web___init___py":{"hash":"6dae37aaf714fe1df0cf27fe57f5a00d","index":{"html_filename":"web___init___py.html","nums":[1,13,0,2,0,0,0],"relative_filename":"web/__init__.py"}}},"version":"4.5.1"}
--------------------------------------------------------------------------------
/unit-test/1.pytest-flask/tests/__init__.py:
--------------------------------------------------------------------------------
1 | # for pytest
2 |
--------------------------------------------------------------------------------
/unit-test/1.pytest-flask/tests/conftest.py:
--------------------------------------------------------------------------------
1 | import pytest
2 |
3 |
4 | @pytest.fixture
5 | def client():
6 | from web import app
7 |
8 | app.config['TESTING'] = True
9 | yield app.test_client()
10 |
--------------------------------------------------------------------------------
/unit-test/1.pytest-flask/tests/test_api.py:
--------------------------------------------------------------------------------
1 | from urllib.parse import urlencode
2 | import json
3 |
4 |
5 | def call(client, path, params):
6 | url = path + '?' + urlencode(params)
7 | response = client.get(url)
8 | return json.loads(response.data.decode('utf-8'))
9 |
10 |
11 | def test_plus_one(client):
12 | result = call(client, '/plus_one', {'x': 2})
13 | assert result['x'] == 3
14 |
15 |
16 | def test_square(client):
17 | result = call(client, '/square', {'x': 2})
18 | assert result['x'] == 4
19 |
--------------------------------------------------------------------------------
/unit-test/1.pytest-flask/web/__init__.py:
--------------------------------------------------------------------------------
1 | from .calculation import cal_step_by_one, cal_square, cal_minus_one
2 | from flask import request, Flask
3 | import json
4 |
5 | app = Flask(__name__)
6 |
7 | @app.route('/plus_one')
8 | def plus_one():
9 | x = int(request.args.get("x",1))
10 | return json.dumps({'x':cal_step_by_one(x)})
11 |
12 | @app.route('/square')
13 | def square():
14 | x = int(request.args.get("x",1))
15 | return json.dumps({'x':cal_square(x)})
16 |
17 | @app.route('/minus_one')
18 | def minus_one():
19 | x = int(request.args.get("x",1))
20 | return json.dumps({'x':cal_minus_one(x)})
21 |
--------------------------------------------------------------------------------
/unit-test/1.pytest-flask/web/calculation.py:
--------------------------------------------------------------------------------
1 | def cal_step_by_one(x):
2 | return x + 1
3 |
4 | def cal_square(x):
5 | return x * x
6 |
7 | def cal_minus_one(x):
8 | return x - 1
9 |
--------------------------------------------------------------------------------