├── Procfile ├── README.md ├── __pycache__ └── index.cpython-37.pyc ├── apinepse ├── index.py └── wsgi.py ├── index.py ├── requirements.txt └── runtime.txt /Procfile: -------------------------------------------------------------------------------- 1 | web: gunicorn apinepse.wsgi:application --log-file - 2 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # Nepal Stock Market (NEPSE) API 2 | API for Nepal Stock Market that scraps NEPSE's website and gives data in JSON. 3 | 4 | This program scraps the real time Nepal Stock Market listed companies datas and serves it as a JSON object. 5 | 6 | The running instance of this program could be found on: 7 | 8 | http://apinepse.herokuapp.com/ (scrapped from NEPSE's official website, code of this is available here in this repo) 9 | 10 | http://nepstockapi.herokuapp.com/ (This one is more fast and has more data, scrapped from sharesansar, code of this is not available in this repo, contact me individually if you need it) 11 | 12 | To fetch the data, you could just perform a get request to any of the above mentioned link: Eg: 13 | 14 | get(http://nepstockapi.herokuapp.com/) 15 | 16 | To run locally, install the following python packages on your local machine: 17 | 18 | a. flask 19 | 20 | b. pandas 21 | 22 | c. requests 23 | 24 | d. bs4 25 | 26 | e. lxml 27 | 28 | f. jsonify 29 | 30 | And finally into the folder do: 31 | 32 | python3 index.py 33 | 34 | For any queries or feedback, contact on: 35 | 36 | Facebook:https://www.facebook.com/nepali.zuckerberg 37 | 38 | Twitter: https://twitter.com/aabiseverywhere 39 | 40 | Quora: https://www.quora.com/profile/Aabishkar-Wagle 41 | 42 | Gmail: aabishkar2@gmail.com 43 | -------------------------------------------------------------------------------- /__pycache__/index.cpython-37.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/Aabishkar2/NepseAPI/a2d6e6484f3967e40e30d1ebbeda8fa908e6f156/__pycache__/index.cpython-37.pyc -------------------------------------------------------------------------------- /apinepse/index.py: -------------------------------------------------------------------------------- 1 | from flask import Flask, jsonify 2 | app = Flask(__name__) 3 | 4 | @app.route('/') 5 | def main(): 6 | import requests 7 | from bs4 import BeautifulSoup 8 | import pandas as pd 9 | import json 10 | 11 | table_Datas = [] 12 | for j in range(1,9): 13 | id = j 14 | url = "http://www.nepalstock.com/main/todays_price/index/%s" % id 15 | gethtml = requests.get(url) 16 | html = gethtml.text 17 | bs = BeautifulSoup(html, "lxml") 18 | table = bs.find('table',{'class':'table'}) 19 | tr = table.findAll('tr') 20 | for td in tr: 21 | t_d = td.findAll('td') 22 | for tD in t_d: 23 | table_Datas.append(td.text.strip()) 24 | 25 | level1 = pd.Series(table_Datas).drop_duplicates().tolist() 26 | 27 | level2 = [] 28 | for k in level1: 29 | if len(k)>35: 30 | level2.append(k) 31 | else: 32 | pass 33 | 34 | d = [] 35 | for l in range(1, len(level2)): 36 | c = level2[l].split("\n") 37 | d.append(c) 38 | 39 | df = pd.DataFrame(d[1:],columns=d[0]) 40 | df.rename(columns={'Traded Companies': 'company_name','No. Of Transaction': 'no_of_transaction', 'Max Price':'max_price','Min Price':'min_price','Closing Price':'closing_price','Traded Shares':'traded_shares','Previous Closing':'previous_closing','Difference Rs.':'difference_rs'}, inplace=True) 41 | jsn = json.loads(df.to_json(orient='records')) 42 | return jsonify(jsn) 43 | # return Response(data: jsonify(jsn)) 44 | 45 | if __name__ == '__main__': 46 | app.run(debug=True) -------------------------------------------------------------------------------- /apinepse/wsgi.py: -------------------------------------------------------------------------------- 1 | from whitenoise import WhiteNoise 2 | 3 | from index import app 4 | 5 | application = WhiteNoise(app) -------------------------------------------------------------------------------- /index.py: -------------------------------------------------------------------------------- 1 | from flask import Flask, jsonify 2 | app = Flask(__name__) 3 | 4 | @app.route('/') 5 | def main(): 6 | import requests 7 | from bs4 import BeautifulSoup 8 | import pandas as pd 9 | import json 10 | 11 | table_Datas = [] 12 | for j in range(1,9): 13 | id = j 14 | url = "http://www.nepalstock.com/main/todays_price/index/%s" % id 15 | gethtml = requests.get(url) 16 | html = gethtml.text 17 | bs = BeautifulSoup(html, "lxml") 18 | table = bs.find('table',{'class':'table'}) 19 | tr = table.findAll('tr') 20 | for td in tr: 21 | t_d = td.findAll('td') 22 | for tD in t_d: 23 | table_Datas.append(td.text.strip()) 24 | 25 | level1 = pd.Series(table_Datas).drop_duplicates().tolist() 26 | 27 | level2 = [] 28 | for k in level1: 29 | if len(k)>35: 30 | level2.append(k) 31 | else: 32 | pass 33 | 34 | d = [] 35 | for l in range(1, len(level2)): 36 | c = level2[l].split("\n") 37 | d.append(c) 38 | 39 | df = pd.DataFrame(d[1:],columns=d[0]) 40 | df.rename(columns={'Traded Companies': 'company_name','No. Of Transaction': 'no_of_transaction', 'Max Price':'max_price','Min Price':'min_price','Closing Price':'closing_price','Traded Shares':'traded_shares','Previous Closing':'previous_closing','Difference Rs.':'difference_rs'}, inplace=True) 41 | jsn = json.loads(df.to_json(orient='records')) 42 | return jsonify(jsn) 43 | # return Response(data: jsonify(jsn)) 44 | 45 | if __name__ == '__main__': 46 | app.run(debug=True) -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | flask-heroku 2 | flask_cors 3 | pandas 4 | requests 5 | gunicorn 6 | whitenoise 7 | bs4 8 | lxml 9 | jsonify 10 | -------------------------------------------------------------------------------- /runtime.txt: -------------------------------------------------------------------------------- 1 | python-3.7.1 2 | --------------------------------------------------------------------------------