├── api ├── model │ └── iris_svm_model.pkl ├── requirements.txt ├── Dockerfile ├── iris_svm_train.py └── app.py ├── nginx ├── Dockerfile └── nginx.conf ├── README.md └── docker-compose.yml /api/model/iris_svm_model.pkl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Akxay/dockerized_scalable_ml/HEAD/api/model/iris_svm_model.pkl -------------------------------------------------------------------------------- /nginx/Dockerfile: -------------------------------------------------------------------------------- 1 | 2 | #Docker file for 3 | FROM nginx:1.15.2 4 | 5 | RUN rm /etc/nginx/nginx.conf 6 | COPY nginx.conf /etc/nginx/ 7 | -------------------------------------------------------------------------------- /api/requirements.txt: -------------------------------------------------------------------------------- 1 | Cython==0.25.2 2 | Flask==0.12.2 3 | Flask-Cors==3.0.2 4 | numpy==1.15.2 5 | numpydoc==0.6.0 6 | pandas==0.20.1 7 | requests==2.14.2 8 | scipy==1.1.0 9 | scikit-learn==0.19.2 10 | gunicorn==19.9.0 11 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | Please follow the instructions on my article published on Data Driven Investor. 2 | 3 | https://medium.com/datadriveninvestor/from-model-inception-to-deployment-adce1f5ed9d6 4 | 5 | 6 | How to run: 7 | 8 | cd Project directory 9 | 10 | docker-compose up 11 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: '3' 2 | 3 | services: 4 | 5 | api: 6 | container_name: api # Name can be anything 7 | restart: always 8 | build: ./api 9 | ports: 10 | - "8000:8000" 11 | command: gunicorn -w 1 -b :8000 app:app 12 | server: 13 | container_name: server_nginx 14 | restart: always 15 | build: ./nginx 16 | ports: 17 | - 8080:80 18 | depends_on: 19 | - api 20 | -------------------------------------------------------------------------------- /api/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.6 2 | 3 | # make directories suited to your application 4 | RUN mkdir -p /home/project/app 5 | #RUN mkdir -p /home/project/app/model 6 | WORKDIR /home/project/app 7 | 8 | # copy and install packages for flask 9 | COPY requirements.txt /home/project/app 10 | RUN pip install --no-cache-dir -r requirements.txt 11 | 12 | # copy contents from your local to your docker container 13 | COPY . /home/project/app 14 | COPY ./model /home/project/app/model 15 | 16 | -------------------------------------------------------------------------------- /nginx/nginx.conf: -------------------------------------------------------------------------------- 1 | user nginx; 2 | 3 | events { 4 | worker_connections 1024; 5 | } 6 | worker_processes 1; 7 | http { 8 | 9 | keepalive_timeout 65; 10 | 11 | server { 12 | listen 80; 13 | server_name localhost; 14 | access_log /var/log/nginx/example.log; 15 | location / { 16 | proxy_pass http://localhost:8000; 17 | 18 | proxy_set_header Host $host; 19 | # proxy_set_header X-Real-IP $remote_addr; 20 | proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; 21 | # proxy_set_header X-Forward-Host $server_name; 22 | } 23 | } 24 | } 25 | -------------------------------------------------------------------------------- /api/iris_svm_train.py: -------------------------------------------------------------------------------- 1 | #Load required packages 2 | import pandas as pd 3 | import numpy as np 4 | from sklearn.externals import joblib 5 | from sklearn import svm, datasets 6 | from sklearn.cross_validation import train_test_split, cross_val_score 7 | from sklearn.svm import SVC 8 | 9 | iris = datasets.load_iris() # import data to play with 10 | #print(iris.data.shape,iris.target.shape) #Print shape of data(Independent features) and target variable 11 | #print ("Iris data set Description : ", iris['DESCR']) #To understand the data size, variables and class distribution 12 | X = iris.data #Features 13 | y = iris.target #Target variable 14 | 15 | #split the data into train and test sets 16 | X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=10) 17 | 18 | #Using SVM classifier 19 | model = SVC(kernel='linear').fit(X_train, y_train) 20 | #Calculate Test Prediction 21 | y_pred = model.predict(X_test) 22 | print(model.score(X_test,y_test.ravel())) 23 | 24 | # save model 25 | joblib.dump(model, 'model/iris_svm_model.pkl', compress=True) 26 | 27 | -------------------------------------------------------------------------------- /api/app.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Mon Oct 8 20:27:39 2018 4 | 5 | @author: akxay 6 | """ 7 | from sklearn.externals import joblib 8 | import numpy as np 9 | import pandas as pd 10 | from flask import Flask, jsonify, request 11 | import gunicorn 12 | 13 | app = Flask(__name__) 14 | 15 | 16 | @app.route("/") 17 | def hello(): 18 | return "Hoilaaaaaaaaa!" 19 | 20 | 21 | @app.route('/predict', methods=['POST']) 22 | def apicall(): 23 | """API Call 24 | """ 25 | try: 26 | test_json = request.get_json() 27 | val = [] 28 | print(test_json) 29 | for dic in test_json: 30 | row = [] 31 | row.append(dic['sepal_length']) 32 | row.append(dic['sepal_width']) 33 | row.append(dic['petal_length']) 34 | row.append(dic['petal_width']) 35 | val.append(row) 36 | #load model 37 | print(np.array(val)) 38 | loaded_model = joblib.load('model/iris_svm_model.pkl') 39 | y_pred = loaded_model.predict(np.array(val)) 40 | pred_dict = {} 41 | for i,pred in enumerate(y_pred): 42 | pred_dict['prediction_'+str(i)] = int(pred) 43 | responses = jsonify(predictions=pred_dict) 44 | responses.status_code = 200 45 | except Exception as e: 46 | responses = jsonify(predictions={'error':'some error occured, please try again later'}) 47 | responses.status_code = 404 48 | print ('error', e) 49 | return (responses) 50 | 51 | 52 | #if __name__ == "__main__": 53 | # app.run(host='0.0.0.0') # remove debug = True, or set to False 54 | --------------------------------------------------------------------------------