├── showDown ├── proxy.config ├── __pycache__ │ ├── download.cpython-35.pyc │ ├── showDown.cpython-35.pyc │ └── showDown_hd.cpython-35.pyc ├── episode.py ├── download.py ├── showDown_hd.py ├── showDown.py └── showDown.py~ ├── .travis.yml ├── CONTRIBUTING.md ├── CONTRIBUTORS.md ├── .gitattributes ├── README.md └── .gitignore /showDown/proxy.config: -------------------------------------------------------------------------------- 1 | 2 | http://edcguest:edcguest@172.31.100.85:5959 3 | 4 | http://heed:ravi@172.31.103.29:3128 5 | 6 | -------------------------------------------------------------------------------- /showDown/__pycache__/download.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MayankPratap/show-down/HEAD/showDown/__pycache__/download.cpython-35.pyc -------------------------------------------------------------------------------- /showDown/__pycache__/showDown.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MayankPratap/show-down/HEAD/showDown/__pycache__/showDown.cpython-35.pyc -------------------------------------------------------------------------------- /showDown/__pycache__/showDown_hd.cpython-35.pyc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/MayankPratap/show-down/HEAD/showDown/__pycache__/showDown_hd.cpython-35.pyc -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | python: 3 | - "2.7" 4 | - "3.4" 5 | 6 | # command to install dependencies 7 | install: "pip install beautifulsoup4" 8 | 9 | script: nosetests 10 | 11 | 12 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | ####Do not push to master branch 2 | 3 | ######Push to dev branch, after it verifies all the tests, then we will merge it with the master branch. 4 | ######It will help in the case, someone accidently pushed a buggy code. 5 | ######To fix bugs or updating the code, pull from dev branch. 6 | -------------------------------------------------------------------------------- /CONTRIBUTORS.md: -------------------------------------------------------------------------------- 1 | # A list of contributors 2 | 3 | * [Mayank Pratap](https://github.com/MayankPratap) 4 | * [Narendra Pal](https://github.com/npcoder2k14) 5 | * [Prabhat Doongarwal](https://github.com/pdoongarwal) 6 | * Shashank Singh 7 | * [Sunil Sangwan](https://github.com/shark-S) 8 | * [Keshav Garg](https://github.com/mkeshavgarg) 9 | * [Kartikay Singh](https://github.com/alphaguy4) 10 | -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | # Auto detect text files and perform LF normalization 2 | * text=auto 3 | 4 | # Custom for Visual Studio 5 | *.cs diff=csharp 6 | 7 | # Standard to msysgit 8 | *.doc diff=astextplain 9 | *.DOC diff=astextplain 10 | *.docx diff=astextplain 11 | *.DOCX diff=astextplain 12 | *.dot diff=astextplain 13 | *.DOT diff=astextplain 14 | *.pdf diff=astextplain 15 | *.PDF diff=astextplain 16 | *.rtf diff=astextplain 17 | *.RTF diff=astextplain 18 | -------------------------------------------------------------------------------- /showDown/episode.py: -------------------------------------------------------------------------------- 1 | from showDown import showDown 2 | 3 | from showDown_hd import showDown_hd 4 | import argparse 5 | 6 | user = showDown() 7 | user_hd = showDown_hd() 8 | 9 | parser = argparse.ArgumentParser(description='A Command line tool to download your favorie TV Shows') 10 | parser.add_argument('-dl','--downloadlatest', help='Download the latest episode of your favorite TV Show', required=False) 11 | parser.add_argument('-hdl','--highdefinition', help='Download the latest episode in HD', required=False) 12 | parser.add_argument('-p','--proxy', help='Set the proxy', required=False) 13 | parser.add_argument('-u','--unsetproxy', help='Unset the proxy', action='store_true') 14 | parser.add_argument('-l','--listshows', help='List available Shows', action='store_true') 15 | 16 | args = vars(parser.parse_args()) 17 | 18 | if args['proxy']: 19 | user.setproxy(args['proxy']) 20 | 21 | elif args['unsetproxy']: 22 | user.unsetproxy() 23 | 24 | elif args['highdefinition']: 25 | user_hd.downloadLatest(args['highdefinition']) 26 | 27 | elif args['downloadlatest']: 28 | user.downloadLatest(args['downloadlatest']) 29 | 30 | elif args['listshows']: 31 | user.listAvailableShows() 32 | 33 | 34 | 35 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # show-down 2 | 3 | [![Join the chat at https://gitter.im/MayankPratap/show-down](https://badges.gitter.im/MayankPratap/show-down.svg)](https://gitter.im/MayankPratap/show-down?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) 4 | 5 | A simple script to download latest episode of any TV show by simply typing a show name. 6 | 7 | Supported in Python3 only. 8 | 9 | - Dependencies 10 | - beautifulsoup4 module of python :- Install it using command ```pip install beautifulsoup4``` in terminal. 11 | 12 | 13 | - Download ZIP file of repository or Clone the repository. 14 | 15 | - If using for first time and your system is in proxy network then 16 | 17 | - Use ```python episode.py -p http://username:password@proxy_address:port``` 18 | 19 | - You can choose to change proxy anytime 20 | 21 | - You can unset proxy by command ```python episode.py -u``` 22 | 23 | - For Downloading 24 | 25 | - Use ```python episode.py -dl show_name``` 26 | 27 | - Download format is .mp4 28 | 29 | - To get list of all available shows in mp4 quality. 30 | 31 | - Use ```python episode.py -l``` 32 | 33 | - For Help 34 | 35 | - Type ```python episode.py -h``` 36 | 37 | - **Those who want to schedule their downloader to run weekly and check for latest episode of TV Series you entered :-** 38 | - *For Ubuntu and other Linux versions* 39 | 40 | - Open Terminal, type ```crontab -e``` 41 | 42 | - Add this at end of file :- ```@weekly DISPLAY=:0 xterm -e python2 /path/to/my/episode.py``` 43 | 44 | For example :- ```@weekly DISPLAY=:0 xterm -e python2 /opt/lampp/htdocs/show-down/episode.py``` 45 | 46 | - Save changes in crontab -e and exit. 47 | 48 | - Testing Travis CI 49 | -------------------------------------------------------------------------------- /showDown/download.py: -------------------------------------------------------------------------------- 1 | import sys 2 | import os 3 | import urllib.request as urllib2 4 | 5 | import time 6 | 7 | 8 | class Downloader(object): 9 | 10 | def __init__(self): 11 | None 12 | 13 | def download(self,url,filename): 14 | try: 15 | self.url = url 16 | self.header = { 'USER_AGENT' : 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:43.0) Gecko/20100101 Firefox/43.0'} 17 | self.proxy_file = open("proxy.config","r") 18 | self.http_proxy = self.proxy_file.read() 19 | self.proxyDict = { 20 | "http" : self.http_proxy 21 | } 22 | self.proxy_file.close() 23 | r = urllib2.urlopen(urllib2.Request(self.url, headers = self.header)) 24 | print ('\rSend Get ... ') 25 | time.sleep(1) 26 | print ('\rDownload started .... ') 27 | time.sleep(1) 28 | content_length = r.headers['content-length'] 29 | print ('Filename : %s '%(filename)) 30 | print ('Filesize : %.2f MB'%(float(content_length)/(1024*1024))) 31 | downloaded = 0 32 | f = open(filename, "wb") 33 | data=r.read(10240) 34 | cur_speed = 0 35 | session_data=0 36 | start_time = lasttime = time.time() 37 | if sys.platform!='win32': 38 | os.system('setterm -cursor off') 39 | while data : 40 | f.write(data) 41 | session_data += len(data) 42 | if int(time.time()-lasttime) == 1: 43 | cur_speed = (session_data/1000)/(time.time()-lasttime) 44 | lasttime = time.time() 45 | session_data = 0 46 | downloaded += len(data) 47 | sys.stdout.write('\rDownloaded : %.2f %% Downloading @ %.2f kBps '%(float(downloaded*100)/float(content_length),cur_speed)) 48 | sys.stdout.flush() 49 | data=r.read(10240) 50 | f.close() 51 | if sys.platform!='win32': 52 | os.system('setterm -cursor on') 53 | sys.stdout.write("\n"+filename + " downloaded successfully !!!\n%.2f MB downloaded in %.2f s ."%(float(content_length)/(1024*1024),time.time() - start_time)) 54 | except KeyboardInterrupt : 55 | if sys.platform!='win32': 56 | os.system('setterm -cursor on') 57 | print (' Ctrl + C pressed') 58 | if os.path.isfile("./"+filename): 59 | r.close() 60 | f.close() 61 | os.remove("./"+filename) 62 | -------------------------------------------------------------------------------- /showDown/showDown_hd.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/python 2 | import bs4 3 | import os 4 | import time 5 | import sys 6 | import webbrowser 7 | import urllib.request as urllib2 8 | 9 | class showDown_hd: 10 | 11 | def __init__(self): 12 | self.url = 'http://dayt.se/tvseries/index.php?&page=' 13 | self.header = {'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.11 (KHTML, like Gecko) Chrome/23.0.1271.64 Safari/537.11', 14 | 'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8', 15 | 'Accept-Charset': 'ISO-8859-1,utf-8;q=0.7,*;q=0.3', 16 | 'Accept-Encoding': 'none', 17 | 'Accept-Language': 'en-US,en;q=0.8', 18 | 'Connection': 'keep-alive'} 19 | self.proxy_file = open("proxy.config","r") 20 | self.http_proxy = self.proxy_file.read() 21 | self.proxyDict = { 22 | "http" : self.http_proxy 23 | } 24 | self.proxy_file.close() 25 | 26 | def downloadLatest(self,show_name): 27 | print ('Sending request ... ') 28 | if self.http_proxy != '': 29 | proxy = urllib2.ProxyHandler(self.proxyDict) 30 | auth = urllib2.HTTPBasicAuthHandler() 31 | opener = urllib2.build_opener(proxy, auth, urllib2.HTTPHandler) 32 | urllib2.install_opener(opener) 33 | page_num=1 34 | while True: 35 | try: 36 | if page_num>3: 37 | raise ValueError("Sorry, no match found...") 38 | found=False 39 | res = urllib2.urlopen(urllib2.Request(self.url+str(page_num), headers = self.header)).read() 40 | soup = bs4.BeautifulSoup(res, "html.parser") 41 | elems = soup.select('p[align="center"]') 42 | for i in range(len(elems)): 43 | if elems[i].getText().lower()==show_name.lower(): 44 | global show_num 45 | show_num=i 46 | found=True 47 | break 48 | if found: 49 | break 50 | else: 51 | page_num+=1 52 | except ValueError as e: 53 | print(e) 54 | exit() 55 | res = urllib2.urlopen(urllib2.Request(self.url+str(page_num), headers = self.header)).read() 56 | soup = bs4.BeautifulSoup(res, "html.parser") 57 | elems=soup.select('.topic_head a') 58 | self.url="http://dayt.se/tvseries/"+elems[show_num].get('href') 59 | res = urllib2.urlopen(urllib2.Request(self.url, headers = self.header)).read() 60 | soup = bs4.BeautifulSoup(res, "html.parser") 61 | elems=soup.select('.title') 62 | print("Enter choice:") 63 | for i in range(len(elems)): 64 | print(str(i+1)+". "+str(elems[i].getText())) 65 | choice=int(input()) 66 | self.url="http://dayt.se/forum/"+elems[choice-1].get('href') 67 | res = urllib2.urlopen(urllib2.Request(self.url, headers = self.header)).read() 68 | soup = bs4.BeautifulSoup(res, "html.parser") 69 | elems=soup.select('#dm1') 70 | self.url=elems[0].get('href') 71 | webbrowser.open(self.url) 72 | 73 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | ################# 2 | ## Eclipse 3 | ################# 4 | 5 | *.pydevproject 6 | .project 7 | .metadata 8 | bin/ 9 | tmp/ 10 | *.tmp 11 | *.bak 12 | *.swp 13 | *~.nib 14 | local.properties 15 | .classpath 16 | .settings/ 17 | .loadpath 18 | 19 | # External tool builders 20 | .externalToolBuilders/ 21 | 22 | # Locally stored "Eclipse launch configurations" 23 | *.launch 24 | 25 | # CDT-specific 26 | .cproject 27 | 28 | # PDT-specific 29 | .buildpath 30 | 31 | 32 | ################# 33 | ## Visual Studio 34 | ################# 35 | 36 | ## Ignore Visual Studio temporary files, build results, and 37 | ## files generated by popular Visual Studio add-ons. 38 | 39 | # User-specific files 40 | *.suo 41 | *.user 42 | *.sln.docstates 43 | 44 | # Build results 45 | 46 | [Dd]ebug/ 47 | [Rr]elease/ 48 | x64/ 49 | build/ 50 | [Bb]in/ 51 | [Oo]bj/ 52 | 53 | # MSTest test Results 54 | [Tt]est[Rr]esult*/ 55 | [Bb]uild[Ll]og.* 56 | 57 | *_i.c 58 | *_p.c 59 | *.ilk 60 | *.meta 61 | *.obj 62 | *.pch 63 | *.pdb 64 | *.pgc 65 | *.pgd 66 | *.rsp 67 | *.sbr 68 | *.tlb 69 | *.tli 70 | *.tlh 71 | *.tmp 72 | *.tmp_proj 73 | *.log 74 | *.vspscc 75 | *.vssscc 76 | .builds 77 | *.pidb 78 | *.log 79 | *.scc 80 | 81 | # Visual C++ cache files 82 | ipch/ 83 | *.aps 84 | *.ncb 85 | *.opensdf 86 | *.sdf 87 | *.cachefile 88 | 89 | # Visual Studio profiler 90 | *.psess 91 | *.vsp 92 | *.vspx 93 | 94 | # Guidance Automation Toolkit 95 | *.gpState 96 | 97 | # ReSharper is a .NET coding add-in 98 | _ReSharper*/ 99 | *.[Rr]e[Ss]harper 100 | 101 | # TeamCity is a build add-in 102 | _TeamCity* 103 | 104 | # DotCover is a Code Coverage Tool 105 | *.dotCover 106 | 107 | # NCrunch 108 | *.ncrunch* 109 | .*crunch*.local.xml 110 | 111 | # Installshield output folder 112 | [Ee]xpress/ 113 | 114 | # DocProject is a documentation generator add-in 115 | DocProject/buildhelp/ 116 | DocProject/Help/*.HxT 117 | DocProject/Help/*.HxC 118 | DocProject/Help/*.hhc 119 | DocProject/Help/*.hhk 120 | DocProject/Help/*.hhp 121 | DocProject/Help/Html2 122 | DocProject/Help/html 123 | 124 | # Click-Once directory 125 | publish/ 126 | 127 | # Publish Web Output 128 | *.Publish.xml 129 | *.pubxml 130 | *.publishproj 131 | 132 | # NuGet Packages Directory 133 | ## TODO: If you have NuGet Package Restore enabled, uncomment the next line 134 | #packages/ 135 | 136 | # Windows Azure Build Output 137 | csx 138 | *.build.csdef 139 | 140 | # Windows Store app package directory 141 | AppPackages/ 142 | 143 | # Others 144 | sql/ 145 | *.Cache 146 | ClientBin/ 147 | [Ss]tyle[Cc]op.* 148 | ~$* 149 | *~ 150 | *.dbmdl 151 | *.[Pp]ublish.xml 152 | *.pfx 153 | *.publishsettings 154 | 155 | # RIA/Silverlight projects 156 | Generated_Code/ 157 | 158 | # Backup & report files from converting an old project file to a newer 159 | # Visual Studio version. Backup files are not needed, because we have git ;-) 160 | _UpgradeReport_Files/ 161 | Backup*/ 162 | UpgradeLog*.XML 163 | UpgradeLog*.htm 164 | 165 | # SQL Server files 166 | App_Data/*.mdf 167 | App_Data/*.ldf 168 | 169 | ############# 170 | ## Windows detritus 171 | ############# 172 | 173 | # Windows image file caches 174 | Thumbs.db 175 | ehthumbs.db 176 | 177 | # Folder config file 178 | Desktop.ini 179 | 180 | # Recycle Bin used on file shares 181 | $RECYCLE.BIN/ 182 | 183 | # Mac crap 184 | .DS_Store 185 | 186 | 187 | ############# 188 | ## Python 189 | ############# 190 | 191 | *.py[cod] 192 | 193 | # Packages 194 | *.egg 195 | *.egg-info 196 | dist/ 197 | build/ 198 | eggs/ 199 | parts/ 200 | var/ 201 | sdist/ 202 | develop-eggs/ 203 | .installed.cfg 204 | 205 | # Installer logs 206 | pip-log.txt 207 | 208 | # Unit test / coverage reports 209 | .coverage 210 | .tox 211 | 212 | #Translations 213 | *.mo 214 | 215 | #Mr Developer 216 | .mr.developer.cfg 217 | -------------------------------------------------------------------------------- /showDown/showDown.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from download import Downloader 3 | import urllib.request as urllib2 4 | import bs4 5 | import os 6 | import time 7 | import sys 8 | temp = -1 9 | 10 | class showDown: 11 | 12 | def __init__(self): 13 | self.url = 'http://tvshows4mobile.com/search/list_all_tv_series' 14 | self.header = { 'USER_AGENT' : 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:43.0) Gecko/20100101 Firefox/43.0'} 15 | self.proxy_file = open("proxy.config","r") 16 | self.http_proxy = self.proxy_file.read() 17 | self.proxyDict = { 18 | "http" : self.http_proxy 19 | } 20 | self.proxy_file.close() 21 | 22 | def setproxy(self,proxy) : 23 | # Writes the given proxy settings to the proxy configuration file. 24 | 25 | # Arguments: 26 | # proxy - a string containing the proxy settings 27 | 28 | self.proxy_file = open("proxy.config","w") 29 | self.proxy_file.write(proxy) 30 | print ("Proxy updated") 31 | self.proxy_file.close() 32 | 33 | def unsetproxy(self) : 34 | # Removes proxy settings from the proxy configuration file. 35 | self.proxy_file = open("proxy.config","w") 36 | self.proxy_file.write("") 37 | print ("Proxy removed") 38 | self.proxy_file.close() 39 | 40 | def downloadLatest(self,show_name): 41 | print ('Sending request ... ') 42 | if self.http_proxy != '': 43 | proxy = urllib2.ProxyHandler(self.proxyDict) 44 | auth = urllib2.HTTPBasicAuthHandler() 45 | opener = urllib2.build_opener(proxy, auth, urllib2.HTTPHandler) 46 | urllib2.install_opener(opener) 47 | res = urllib2.urlopen(urllib2.Request(self.url, headers = self.header)).read() 48 | soup = bs4.BeautifulSoup(res, "html.parser") 49 | elems = soup.select('.data a') 50 | for i in range(len(elems)): 51 | if show_name.lower() in str(elems[i]).lower(): 52 | global temp 53 | temp = i 54 | if temp != -1 : 55 | self.url = elems[temp].get('href') 56 | self.url = self.url[:-10] 57 | 58 | #print(self.url) 59 | self.currentSeason() 60 | #print(self.url) 61 | self.latestEpisode() 62 | #print(self.url) 63 | self.getVideoLink() 64 | 65 | 66 | self.currentSeason() 67 | self.latestEpisode() 68 | self.getVideoLink() 69 | 70 | print ("The file in latest available is : %s"%(self.filename)) 71 | 72 | response = input("Do you wanna download it (y or n): ") 73 | if response=='y' or response=='Y': 74 | video = Downloader() 75 | video.download(url = self.url ,filename = self.filename) 76 | 77 | else : 78 | if sys.platform!='win32': 79 | os.system("setterm -cursor on") 80 | return 81 | temp = -1 82 | else : 83 | print ('Sorry , no match found . ') 84 | 85 | def currentSeason(self): 86 | res = urllib2.urlopen(urllib2.Request(self.url, headers = self.header)).read() 87 | soup=bs4.BeautifulSoup(res, "html.parser") 88 | elems=soup.select('.data a') 89 | lastseason=elems[0].get('href') 90 | self.url = lastseason 91 | 92 | 93 | def latestEpisode(self): 94 | res = urllib2.urlopen(urllib2.Request(self.url, headers = self.header)).read() 95 | soup=bs4.BeautifulSoup(res, "html.parser") 96 | elems=soup.select('.data a') 97 | lastepisode=elems[0].get('href') 98 | self.url = lastepisode 99 | 100 | def getVideoLink(self): 101 | res = urllib2.urlopen(urllib2.Request(self.url, headers = self.header)).read() 102 | soup=bs4.BeautifulSoup(res, "html.parser") 103 | 104 | # Because .data span has only two occurences it is easy 105 | # to capture than .data a 106 | elem=soup.select('.data span')[0] 107 | elema=elem.parent 108 | elema=elema.find_all("a")[0] 109 | url=elema.get('href') 110 | self.filename=elema.get_text() 111 | #elema=elem.parent 112 | #print(elema.get_text()) 113 | #elema=soup.find_all('a')[0] 114 | 115 | #print(elema.get('href')) 116 | #self.filename = str(elems[-2]) 117 | #self.filename = self.filename[self.filename.find(">")+1:] 118 | #self.filename = self.filename[:self.filename.find("<")] 119 | self.url=url 120 | 121 | 122 | elems=soup.select('.data a') 123 | self.filename = str(elems[-2]) 124 | self.filename = self.filename[self.filename.find(">")+1:] 125 | self.filename = self.filename[:self.filename.find("<")] 126 | self.url=elems[-2].get('href') 127 | 128 | 129 | def listAvailableShows(self): 130 | res = urllib2.urlopen(urllib2.Request(self.url, headers = self.header)).read() 131 | soup=bs4.BeautifulSoup(res, "html.parser") 132 | elems=soup.select('.data a') 133 | print ('List of TV Shows: \n') 134 | for i in range(len(elems)): 135 | show_name = str(elems[i]) 136 | show_name = show_name[show_name.find(">")+1:] 137 | show_name = show_name[:show_name.find("<")] 138 | print ("\r " + show_name) 139 | 140 | 141 | -------------------------------------------------------------------------------- /showDown/showDown.py~: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from download import Downloader 3 | import urllib.request as urllib2 4 | import bs4 5 | import os 6 | import time 7 | import sys 8 | temp = -1 9 | 10 | class showDown: 11 | 12 | def __init__(self): 13 | self.url = 'http://tvshows4mobile.com/search/list_all_tv_series' 14 | self.header = { 'USER_AGENT' : 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:43.0) Gecko/20100101 Firefox/43.0'} 15 | self.proxy_file = open("proxy.config","r") 16 | self.http_proxy = self.proxy_file.read() 17 | self.proxyDict = { 18 | "http" : self.http_proxy 19 | } 20 | self.proxy_file.close() 21 | 22 | def setproxy(self,proxy) : 23 | # Writes the given proxy settings to the proxy configuration file. 24 | 25 | # Arguments: 26 | # proxy - a string containing the proxy settings 27 | 28 | self.proxy_file = open("proxy.config","w") 29 | self.proxy_file.write(proxy) 30 | print ("Proxy updated") 31 | self.proxy_file.close() 32 | 33 | def unsetproxy(self) : 34 | # Removes proxy settings from the proxy configuration file. 35 | self.proxy_file = open("proxy.config","w") 36 | self.proxy_file.write("") 37 | print ("Proxy removed") 38 | self.proxy_file.close() 39 | 40 | def downloadLatest(self,show_name): 41 | print ('Sending request ... ') 42 | if self.http_proxy != '': 43 | proxy = urllib2.ProxyHandler(self.proxyDict) 44 | auth = urllib2.HTTPBasicAuthHandler() 45 | opener = urllib2.build_opener(proxy, auth, urllib2.HTTPHandler) 46 | urllib2.install_opener(opener) 47 | res = urllib2.urlopen(urllib2.Request(self.url, headers = self.header)).read() 48 | soup = bs4.BeautifulSoup(res, "html.parser") 49 | elems = soup.select('.data a') 50 | for i in range(len(elems)): 51 | if show_name.lower() in str(elems[i]).lower(): 52 | global temp 53 | temp = i 54 | if temp != -1 : 55 | self.url = elems[temp].get('href') 56 | self.url = self.url[:-10] 57 | <<<<<<< HEAD 58 | #print(self.url) 59 | self.currentSeason() 60 | #print(self.url) 61 | self.latestEpisode() 62 | #print(self.url) 63 | self.getVideoLink() 64 | 65 | ======= 66 | self.currentSeason() 67 | self.latestEpisode() 68 | self.getVideoLink() 69 | >>>>>>> b5c2e2405088207b37f79964fe74ccc5fef674d4 70 | print ("The file in latest available is : %s"%(self.filename)) 71 | 72 | response = input("Do you wanna download it (y or n): ") 73 | if response=='y' or response=='Y': 74 | video = Downloader() 75 | video.download(url = self.url ,filename = self.filename) 76 | 77 | else : 78 | if sys.platform!='win32': 79 | os.system("setterm -cursor on") 80 | return 81 | temp = -1 82 | else : 83 | print ('Sorry , no match found . ') 84 | 85 | def currentSeason(self): 86 | res = urllib2.urlopen(urllib2.Request(self.url, headers = self.header)).read() 87 | soup=bs4.BeautifulSoup(res, "html.parser") 88 | elems=soup.select('.data a') 89 | lastseason=elems[0].get('href') 90 | self.url = lastseason 91 | 92 | 93 | def latestEpisode(self): 94 | res = urllib2.urlopen(urllib2.Request(self.url, headers = self.header)).read() 95 | soup=bs4.BeautifulSoup(res, "html.parser") 96 | elems=soup.select('.data a') 97 | lastepisode=elems[0].get('href') 98 | self.url = lastepisode 99 | 100 | def getVideoLink(self): 101 | res = urllib2.urlopen(urllib2.Request(self.url, headers = self.header)).read() 102 | soup=bs4.BeautifulSoup(res, "html.parser") 103 | <<<<<<< HEAD 104 | # Because .data span has only two occurences it is easy 105 | # to capture than .data a 106 | elem=soup.select('.data span')[0] 107 | elema=elem.parent 108 | elema=elema.find_all("a")[0] 109 | url=elema.get('href') 110 | self.filename=elema.get_text() 111 | #elema=elem.parent 112 | #print(elema.get_text()) 113 | #elema=soup.find_all('a')[0] 114 | 115 | #print(elema.get('href')) 116 | #self.filename = str(elems[-2]) 117 | #self.filename = self.filename[self.filename.find(">")+1:] 118 | #self.filename = self.filename[:self.filename.find("<")] 119 | self.url=url 120 | 121 | ======= 122 | elems=soup.select('.data a') 123 | self.filename = str(elems[-2]) 124 | self.filename = self.filename[self.filename.find(">")+1:] 125 | self.filename = self.filename[:self.filename.find("<")] 126 | self.url=elems[-2].get('href') 127 | >>>>>>> b5c2e2405088207b37f79964fe74ccc5fef674d4 128 | 129 | def listAvailableShows(self): 130 | res = urllib2.urlopen(urllib2.Request(self.url, headers = self.header)).read() 131 | soup=bs4.BeautifulSoup(res, "html.parser") 132 | elems=soup.select('.data a') 133 | print ('List of TV Shows: \n') 134 | for i in range(len(elems)): 135 | show_name = str(elems[i]) 136 | show_name = show_name[show_name.find(">")+1:] 137 | show_name = show_name[:show_name.find("<")] 138 | print ("\r " + show_name) 139 | 140 | 141 | --------------------------------------------------------------------------------