├── README.md ├── req.txt └── yahooScreener.py /README.md: -------------------------------------------------------------------------------- 1 | # Python Stock Screener 2 | 3 | This is a stock screener that harnesses the Finviz Module ([FinViz - Repo](https://github.com/mariostoev/finviz), [Finviz - PiPy](https://pypi.org/project/finviz/)), it then uses Beautiful Soup collect data and export it into a CSV file. 4 | 5 | ## Installation & Usage 6 | 7 | **Install** 8 | 9 | `pip install -r requirements.txt` 10 | 11 | 12 | **Usage** 13 | 14 | `python yahooScreener.py` in your terminal. The script will save the results within the same directory, inside a CSV called 'gappers.csv'. 15 | 16 | Alternatively, you can use the `--filename=` to change the filename, `--dir=` to change the directory path and `--ext=` to change the extension of the file. 17 | 18 | i.e. `python yahooScreener.py --filename=stocks --dir=~/Desktop/Stocks/ --ext=txt` 19 | 20 | ## Screener Options 21 | 22 | *Coming Soon* 23 | 24 | ## Modules 25 | 26 | - FinViz 27 | - BeautifulSoup 28 | - URLLIB 29 | 30 | ## Future Goals 31 | 32 | 1. Modularize script. 33 | 2. Assign existing script to backend API. 34 | 3. Create Ionic App that will use API and provide quick menu for screening, and Stock data push notifications and records. 35 | -------------------------------------------------------------------------------- /req.txt: -------------------------------------------------------------------------------- 1 | beautifulsoup4==4.7.1 2 | finviz==1.2.3 3 | urllib3==1.26.18 4 | -------------------------------------------------------------------------------- /yahooScreener.py: -------------------------------------------------------------------------------- 1 | import os, datetime, re, sys 2 | from time import sleep 3 | from finviz.screener import Screener 4 | from urllib import request as r 5 | from bs4 import BeautifulSoup 6 | 7 | def check_args(args, flag): 8 | '''Cycles through sys args and returns None or value of Arg''' 9 | value = None 10 | if len(args): 11 | for arg in args: 12 | if flag in arg: 13 | value = get_arg(arg) 14 | return value 15 | 16 | def get_arg(arg): 17 | return arg.split('=')[1] if arg else '' 18 | 19 | filename = check_defaults(check_args(sys.argv, 'filename'),'filename') 20 | path = check_defaults(check_args(sys.argv, 'dir'),'dir') 21 | ext = check_defaults(check_args(sys.argv, 'ext'),'ext') 22 | target_path = f'{path}{filename}.{ext}' 23 | 24 | def check_defaults(value, flag): 25 | defaults = { 26 | 'filename': 'gappers', 27 | 'dir': os.getcwd(), 28 | 'ext':'csv' 29 | } 30 | return value if value != None else defaults[flag] 31 | 32 | # Filter for Stocks 33 | filters = ['sh_price_u5','ta_gap_d5','ta_rsi_os30','ft=3'] 34 | stocks = Screener(filters=filters, order="price") 35 | tickers = [ticker['Ticker'] for ticker in stocks] 36 | 37 | def group(lst, n): 38 | '''Creates an Array of 2 values: key & value from BS4 extracted data.''' 39 | for i in range(0, len(lst), n): 40 | val = lst[i:i+n] 41 | if len(val) == n: 42 | yield tuple(val) 43 | 44 | def scan_columns(value): 45 | '''Verify Column is present from Current TD element.''' 46 | columns = ['Open','Close','Volume','Avg. Volume','Bid','Ask'] 47 | 48 | for col in columns: 49 | return True if col in value 50 | 51 | def getData(tickers, ctime): 52 | for ticker in tickers: 53 | html = r.urlopen(f"https://finance.yahoo.com/quote/{ ticker }") 54 | soup = BeautifulSoup(html, 'html.parser') 55 | 56 | quoteHeader = soup.findAll('div', attrs={"id":"quote-header-info"}) 57 | current_price = quoteHeader[0].findAll(attrs={"data-reactid":"14"})[0].get_text() # specific TD from Yahoo Finance. 58 | tds = list(group(soup.findAll(lambda tag: tag.name == 'td' and 'data-reactid' in tag.attrs), 2)) 59 | texts = { t[0].get_text(): t[1].get_text() for t in tds if scan_columns(t[0].get_text()) } 60 | 61 | data = { 62 | "current time": ctime.strftime("%Y %m %d - %H:%M:%S"), 63 | "ticker": ticker, 64 | "current price": current_price, 65 | "open": texts['Open'], 66 | "bid": texts['Bid'], 67 | "ask": texts['Ask'], 68 | "volume": texts['Volume'], 69 | "average volume": f"'{texts['Avg. Volume']}'", 70 | "close": current_price if ctime.hour == 17 or 19 else None 71 | } 72 | 73 | if os.path.exists(path): 74 | gappers = open(target_path, 'a+') 75 | gappers_read = open(target_path,'r') 76 | lines = len(gappers_read.readlines()) 77 | gappers_read.close() 78 | if lines > 0: 79 | gappers.write(f"{','.join(data.values())}\n") 80 | else: 81 | gappers.write(f"{','.join(data.keys())}\n") 82 | gappers.write(f"{','.join(data.values())}\n") 83 | print(f'recorded data for time { ctime }') 84 | else: 85 | raise OSError(f"Path: {target_path} doesn't exist...") 86 | 87 | def recordData(): 88 | check = True 89 | # adjusted from EST to MST, no daylight savings time included. 90 | while check: 91 | current_time = datetime.datetime.now() 92 | if current_time.hour >= 7 and current_time.hour <= 13: 93 | if (current_time.hour == 7 and current_time.minute >= 30 and current_time.minute <= 59) or (current_time.hour >= 8 and current_time.hour <= 15): 94 | getData(tickers, current_time) 95 | sleep(60*15) # check every 15 minutes 96 | else: 97 | check = False 98 | 99 | print('Finished recording for today...') 100 | 101 | recordData() --------------------------------------------------------------------------------