', methods=['GET'])
37 | def get_mallookup(region, malid):
38 | return loadResultsByMALID(malid, region)
39 |
40 | def loadResultsByQuery(query, region) :
41 | mydb = openConnection()
42 | mycursor = mydb.cursor(dictionary=True)
43 | sql = "SELECT t.title, s.sitename, r.regionname, l.url, t.mal_id from region AS r, links AS l, sites AS s, titles AS t WHERE l.titleid = t.titleid AND l.siteid = s.id AND l.regionid = r.id AND r.regionname = %s AND t.title LIKE %s"
44 | val = [region, query]
45 | mycursor.execute(sql,val)
46 | results = mycursor.fetchall()
47 | fresults = type(dict);
48 | if len(results) > 0 :
49 | fresults = {"data" : results, "meta":{"query": query, "region":region, "count": len(results)}}
50 | return jsonify(fresults), 200
51 | else :
52 | fresults = {"data": None, "error" : "Not Found", "meta" : {"query" : query, "region" : region}}
53 | return jsonify(fresults), 400
54 |
55 | def loadResultsByMALID(malid, region) :
56 | mydb = openConnection()
57 | mycursor = mydb.cursor(dictionary=True)
58 | sql = "SELECT t.title, s.sitename, r.regionname, l.url, t.mal_id from region AS r, links AS l, sites AS s, titles AS t WHERE l.titleid = t.titleid AND l.siteid = s.id AND l.regionid = r.id AND r.regionname = %s AND t.mal_id = %s"
59 | val = [region, malid]
60 | mycursor.execute(sql,val)
61 | results = mycursor.fetchall()
62 | fresults = type(dict);
63 | if len(results) > 0 :
64 | fresults = {"data" : results, "meta" : {"mal_id" : malid, "region" : region, "count" : len(results)}}
65 | return jsonify(fresults), 200
66 | else :
67 | fresults = {"data": None, "error" : "Not Found", "meta" : {"mal_id" : malid, "region" : region}}
68 | return jsonify(fresults), 400
69 |
70 | def openConnection():
71 | mydb = mysql.connector.connect(
72 | host=appconfig.db_host,
73 | user=appconfig.db_user,
74 | password=appconfig.db_user_password,
75 | database=appconfig.db_name,
76 | auth_plugin='mysql_native_password'
77 | )
78 | return mydb
79 |
80 | if __name__ == '__main__':
81 | app.run()
82 |
--------------------------------------------------------------------------------
/appconfig_sample.py:
--------------------------------------------------------------------------------
1 | """
2 | Stream Data Config File
3 | Specify Database Configuration Here
4 | """
5 | db_host = "localhost"
6 | db_name = ""
7 | db_user = ""
8 | db_user_password = ""
9 |
--------------------------------------------------------------------------------
/db.sql:
--------------------------------------------------------------------------------
1 | CREATE TABLE links (
2 | id int(10) NOT NULL auto_increment,
3 | titleid int(10) NOT NULL,
4 | url varchar(500),
5 | siteid int(10) NOT NULL,
6 | regionid int(10) NOT NULL,
7 | PRIMARY KEY (id)
8 | ) ENGINE=InnoDB DEFAULT CHARSET=latin1;
9 |
10 | CREATE TABLE region (
11 | id int(10) NOT NULL auto_increment,
12 | regionname text(65535) NOT NULL,
13 | PRIMARY KEY (id)
14 | ) ENGINE=InnoDB DEFAULT CHARSET=latin1;
15 |
16 | CREATE TABLE sites (
17 | id int(10) NOT NULL auto_increment,
18 | sitename text(65535) NOT NULL,
19 | PRIMARY KEY (id)
20 | ) ENGINE=InnoDB DEFAULT CHARSET=latin1;
21 |
22 | CREATE TABLE staging (
23 | id int(10) NOT NULL auto_increment,
24 | title varchar(500),
25 | streamsitetitle varchar(100),
26 | streamsiteurl varchar(500),
27 | region varchar(3),
28 | PRIMARY KEY (id)
29 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
30 |
31 | CREATE TABLE titles (
32 | titleid int(10) NOT NULL auto_increment,
33 | title varchar(500),
34 | mal_id int(10),
35 | PRIMARY KEY (titleid)
36 | ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;
--------------------------------------------------------------------------------
/load.py:
--------------------------------------------------------------------------------
1 | """
2 | load.py
3 | This script loads the tables for the StreamData RestAPI
4 | ======
5 |
6 | Copyright 2019 Moy IT Solutions
7 |
8 | Licensed under the Apache License, Version 2.0 (the "License");
9 | you may not use this file except in compliance with the License.
10 | You may obtain a copy of the License at
11 |
12 | http://www.apache.org/licenses/LICENSE-2.0
13 |
14 | Unless required by applicable law or agreed to in writing, software
15 | distributed under the License is distributed on an "AS IS" BASIS,
16 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
17 | See the License for the specific language governing permissions and
18 | limitations under the License.
19 | """
20 | import urllib.request
21 | import mysql.connector
22 | import json
23 | import ssl
24 | import appconfig
25 |
26 | mydb = mysql.connector.connect(
27 | host=appconfig.db_host,
28 | user=appconfig.db_user,
29 | password=appconfig.db_user_password,
30 | database=appconfig.db_name,
31 | auth_plugin='mysql_native_password'
32 | )
33 |
34 | def main():
35 | print('Clearing Staging Tables')
36 | truncateStagingTables()
37 | print('Downloading stream data...')
38 | regions = ['us', 'ca', 'uk', 'au']
39 | for region in regions:
40 | downloadStreamData(region)
41 | loadStagingTables(region)
42 | print('Downloading title database')
43 | downloadTitleIDs()
44 | loadRegions()
45 | loadSites()
46 | loadTitles()
47 | loadMALIds()
48 | loadLinks()
49 | cleanup()
50 | mydb.close()
51 | print("Done")
52 |
53 | def downloadStreamData(region):
54 | print('Downloading Stream Data: ' + region)
55 | ssl._create_default_https_context = ssl._create_unverified_context
56 | urllib.request.urlretrieve('https://bcmoe.blob.core.windows.net/assets/'+region+'.json', region + '.json');
57 |
58 | def downloadTitleIDs():
59 | print('Downloading Title Ids')
60 | ssl._create_default_https_context = ssl._create_unverified_context
61 | urllib.request.urlretrieve('https://raw.githubusercontent.com/manami-project/anime-offline-database/master/anime-offline-database.json', 'anime-offline-database.json');
62 |
63 | def loadMALIds():
64 | print('Loading MyAnimeList title ids')
65 | with open('anime-offline-database.json') as json_file:
66 | titledata = json.load(json_file)
67 | mycursor = mydb.cursor(dictionary=True)
68 | sql = "SELECT titleid, title, mal_id FROM titles WHERE mal_id IS NULL;"
69 | mycursor.execute(sql)
70 | results = mycursor.fetchall()
71 | if len(results) > 0:
72 | for title in results:
73 | for titlerecord in titledata['data']:
74 | comparetitle = title['title']
75 | ntitle = titlerecord['title'];
76 | match = False
77 | if ntitle.upper() == comparetitle.upper():
78 | match = True
79 | else:
80 | for synonym in titlerecord['synonyms']:
81 | if synonym.upper() == comparetitle.upper():
82 | match = True
83 | break
84 | if match:
85 | for source in titlerecord['sources']:
86 | if "myanimelist.net" in source:
87 | tmpstr = source.replace("https://myanimelist.net/anime/", "")
88 | titleidint = int(tmpstr)
89 | updateAddMALID(title["titleid"], titleidint)
90 | break
91 | print('Loading MyAnimeList title ids done')
92 |
93 | def truncateStagingTables():
94 | print('Truncating staging tables')
95 | mycursor = mydb.cursor()
96 | sql = "TRUNCATE TABLE staging"
97 | mycursor.execute(sql)
98 | mydb.commit()
99 |
100 | def loadStagingTables(region):
101 | print('Loading ' + region + '.json')
102 | with open(region + '.json') as json_file:
103 | data = json.load(json_file)
104 | for show in data['shows']:
105 | mycursor = mydb.cursor()
106 | sql = "INSERT INTO staging (title, streamsitetitle, streamsiteurl, region) VALUES (%s, %s, %s, %s)"
107 | for streamingsite in show["sites"].keys():
108 | if "http://" in show["sites"][streamingsite] or "https://" in show["sites"][streamingsite]:
109 | val = [show["name"], streamingsite, show["sites"][streamingsite],region];
110 | mycursor.execute(sql, val)
111 | mydb.commit()
112 | print('Finished Loading ' + region + '.json')
113 |
114 | def loadRegions():
115 | print('Loading regions table')
116 | mycursor = mydb.cursor(dictionary=True)
117 | sql = "SELECT DISTINCT region FROM staging;"
118 | mycursor.execute(sql)
119 | results = mycursor.fetchall()
120 | for result in results:
121 | if checkRegion(result["region"]) == True:
122 | continue
123 | else:
124 | insertcursor = mydb.cursor()
125 | sql = "INSERT INTO region (regionname) VALUES (%s)"
126 | val = [result["region"]]
127 | insertcursor.execute(sql,val)
128 | mydb.commit()
129 | print('Loading regions table done')
130 |
131 | def checkRegion(region):
132 | mycursor = mydb.cursor(dictionary=True)
133 | sql = "SELECT id, regionname FROM region WHERE regionname = %s;"
134 | val = [region]
135 | mycursor.execute(sql,val)
136 | results = mycursor.fetchall()
137 | if len(results) > 0:
138 | return True
139 | return False
140 |
141 | def loadLinks():
142 | print('Loading links table')
143 | mycursor = mydb.cursor(dictionary=True)
144 | sql = "SELECT * FROM staging;"
145 | mycursor.execute(sql)
146 | results = mycursor.fetchall()
147 | for result in results:
148 | titleid = lookupTitle(result["title"])
149 | siteid = lookupSite(result["streamsitetitle"])
150 | regionid = lookupRegion(result["region"])
151 | url = result["streamsiteurl"]
152 | if titleid == -1 | siteid == -1 | regionid == -1:
153 | continue
154 | else:
155 | if (checkLink(titleid,regionid,siteid)):
156 | updatelink(titleid,url,siteid,regionid)
157 | else:
158 | insertLink(titleid,url,siteid,regionid)
159 | print('Loading links table done')
160 |
161 | def insertLink(titleid, url, siteid, regionid):
162 | insertcursor = mydb.cursor()
163 | sql = "INSERT INTO links (titleid, url, siteid, regionid) VALUES (%s, %s, %s, %s)"
164 | val = [titleid, url, siteid, regionid]
165 | insertcursor.execute(sql, val)
166 | mydb.commit()
167 |
168 | def updatelink(titleid, url, siteid, regionid):
169 | updatecursor = mydb.cursor()
170 | sql = "UPDATE links SET url = %s WHERE titleid = %s AND siteid = %s AND regionid = %s"
171 | val = [url, titleid, siteid, regionid]
172 | updatecursor.execute(sql,val)
173 | mydb.commit()
174 |
175 | def checkLink(titleid, regionid, siteid):
176 | mycursor = mydb.cursor(dictionary=True)
177 | sql = "SELECT id, titleid, regionid, siteid FROM links WHERE titleid = %s AND regionid = %s AND siteid = %s;"
178 | val = [titleid, regionid, siteid]
179 | mycursor.execute(sql,val)
180 | results = mycursor.fetchall()
181 | if len(results) > 0:
182 | return True
183 | return False
184 |
185 | def loadTitles():
186 | print('Loading titles table')
187 | mycursor = mydb.cursor(dictionary=True)
188 | sql = "SELECT DISTINCT title FROM staging;"
189 | mycursor.execute(sql)
190 | results = mycursor.fetchall()
191 | for result in results:
192 | if checkTitle(result["title"]) == True:
193 | continue
194 | else:
195 | insertcursor = mydb.cursor()
196 | sql = "INSERT INTO titles (title) VALUES (%s)"
197 | val = [result["title"]]
198 | insertcursor.execute(sql,val)
199 | mydb.commit()
200 | print('Loading titles table done')
201 |
202 | def updateAddMALID(id, malid):
203 | updatecursor = mydb.cursor()
204 | sql = "UPDATE titles SET mal_id = %s WHERE titleid = %s"
205 | val = [malid, id]
206 | updatecursor.execute(sql,val)
207 | mydb.commit()
208 |
209 | def checkTitle(title):
210 | mycursor = mydb.cursor(dictionary=True)
211 | sql = "SELECT titleid, title FROM titles WHERE title = %s;"
212 | val = [title]
213 | mycursor.execute(sql,val)
214 | results = mycursor.fetchall()
215 | if len(results) > 0:
216 | return True
217 | return False
218 |
219 | def loadSites():
220 | print('Loading sites table')
221 | mycursor = mydb.cursor(dictionary=True)
222 | sql = "SELECT DISTINCT streamsitetitle FROM staging;"
223 | mycursor.execute(sql)
224 | results = mycursor.fetchall()
225 | for result in results:
226 | if checkSite(result["streamsitetitle"]) == True:
227 | continue
228 | else:
229 | insertcursor = mydb.cursor()
230 | sql = "INSERT INTO sites (sitename) VALUES (%s)"
231 | val = [result["streamsitetitle"]]
232 | insertcursor.execute(sql,val)
233 | mydb.commit()
234 |
235 | def checkSite(sitename):
236 | mycursor = mydb.cursor(dictionary=True)
237 | sql = "SELECT id, sitename FROM sites WHERE sitename = %s;"
238 | val = [sitename]
239 | mycursor.execute(sql,val)
240 | results = mycursor.fetchall()
241 | if len(results) > 0:
242 | return True
243 | return False
244 |
245 | def lookupTitle(title):
246 | mycursor = mydb.cursor(dictionary=True)
247 | sql = "SELECT titleid, title FROM titles WHERE title = %s;"
248 | val = [title]
249 | mycursor.execute(sql,val)
250 | results = mycursor.fetchall()
251 | if len(results) > 0:
252 | return results[0]["titleid"]
253 | return -1
254 |
255 | def lookupSite(site):
256 | mycursor = mydb.cursor(dictionary=True)
257 | sql = "SELECT id, sitename FROM sites WHERE sitename = %s;"
258 | val = [site]
259 | mycursor.execute(sql,val)
260 | results = mycursor.fetchall()
261 | if len(results) > 0:
262 | return results[0]["id"]
263 | return -1
264 |
265 | def lookupRegion(region):
266 | mycursor = mydb.cursor(dictionary=True)
267 | sql = "SELECT id, regionname FROM region WHERE regionname = %s;"
268 | val = [region]
269 | mycursor.execute(sql,val)
270 | results = mycursor.fetchall()
271 | if len(results) > 0:
272 | return results[0]["id"]
273 | return -1
274 |
275 | def cleanup():
276 | print('Performing cleanup')
277 | mycursor = mydb.cursor(dictionary=True)
278 | sql = "SELECT l.id, t.title, s.sitename, r.regionname, l.url, t.mal_id from region AS r, links AS l, sites AS s, titles AS t WHERE l.titleid = t.titleid AND l.siteid = s.id AND l.regionid = r.id"
279 | mycursor.execute(sql)
280 | loadedresults = mycursor.fetchall()
281 | sql = "SELECT * FROM staging"
282 | mycursor.execute(sql)
283 | stagingresults = mycursor.fetchall()
284 | for loadedresult in loadedresults:
285 | found = False
286 | for stageresult in stagingresults:
287 | if loadedresult["url"] == stageresult["streamsiteurl"] and loadedresult["regionname"] == stageresult["region"]:
288 | found = True
289 | if not found:
290 | print("Stream no longer exist, deleting URL: " + loadedresult["url"])
291 | deleteLink(loadedresult["id"])
292 | print("Cleanup complete")
293 |
294 | def deleteLink(id):
295 | deletecursor = mydb.cursor()
296 | sql = "DELETE FROM links WHERE id = %s"
297 | val = [id]
298 | deletecursor.execute(sql, val)
299 | mydb.commit()
300 |
301 | if __name__== "__main__":
302 | main()
303 |
--------------------------------------------------------------------------------
/readme.md:
--------------------------------------------------------------------------------
1 | # StreamData
2 | Stream Data is a REST API that allows applications to retrieve a list of legal stream URLs for a given title. Database loading script and a Flask based REST API sever is written in Python and works with Python 3, not 2.x.
3 |
4 | The app uses legal stream link data [Because.moe](https://because.moe). The difference is that the whole stream data does not need to be downloaded completely and only need to retreive what you need. You will be able to use the title or MyAnimeList ID to look up links for a given title.
5 |
6 | # Why StreamData?
7 | Providing stream links to your applications allows users to find where to watch a given show through legal streaming. Illegal/Pirated streams hurts the creators and animation studios that make an anime production possible. StreamData only provides legal sources where you can watch a given title (e.g. Crunchyroll, Funimation, Amazon, HiDive, etc) and not illegal sources.
8 |
9 | This API enables you to obtain links where the user can watch a show legally through your app for a given title. StreamData only supports the following regions: United States, Canada, United Kingdom and Australia.
10 |
11 | StreamData is a free service and you do not need to be an active Patron to use it. If you want to help us keep the service running, feel free to [become a Patron](https://www.patreon.com/join/malupdaterosx) for as little as $1 a month.
12 |
13 | # How does StreamData works?
14 | StreamData uses two different scripts. load.py is the script that is set to run as a cron job that refreshes the data in the database on a DBMS running MySQL/MariaDB. It performs some ETL (Extract, Transform, and Load). It downloads the stream data from the provider, load it to a staging table. The data is transformed and loaded to the data model. The ERD diagram for the database backend
15 |
16 | 
17 |
18 | The frontend provides the rest API, allowing the user's apps to obtain stream data by title or MyAnimeList title id.
19 |
20 | # Requirements
21 | * Apache with wsgi (libapache2-mod-wsgi-py3) module installed and enabled. The (libapache2-mod-wsgi module will not work.
22 | * Python 3.x or later, the latest version is recommended.
23 | * Following packages installed: mysql-connector-python, flask, and urllib. You can install these using `pip`.
24 |
25 | # How to use the API
26 | ## API Endpoints
27 | ```
28 | GET http://streamdata.malupdaterosx.moe/search/(region)?q=(search term)
29 | ```
30 | #### Parameters
31 |
32 | | Parameter | Value | Required |
33 | |:---|:---|:---|
34 | | region| `us` or `ca` or `uk` or `au` | `true` |
35 | | q | Title (URL Encoded) | `true` |
36 |
37 | #### Example
38 | ```
39 | [GET] http://streamdata.malupdaterosx.moe/search/us?q=Kandagawa%20Jet%20Girls
40 | ```
41 |
42 | ##### Response
43 | ```
44 | {
45 | "data": [
46 | {
47 | "mal_id": 40196,
48 | "regionname": "us",
49 | "sitename": "vrv-hidive",
50 | "title": "Kandagawa Jet Girls",
51 | "url": "https://vrv.co/series/GYW4N30E6"
52 | },
53 | {
54 | "mal_id": 40196,
55 | "regionname": "us",
56 | "sitename": "hidive",
57 | "title": "Kandagawa Jet Girls",
58 | "url": "https://www.hidive.com/stream/kandagawa-jet-girls/s01e001"
59 | }
60 | ],
61 | "meta": {
62 | "count": 2,
63 | "query": "Kandagawa Jet Girls",
64 | "region": "us"
65 | }
66 | }
67 |
68 | ```
69 |
70 | ```
71 | GET http://streamdata.malupdaterosx.moe/lookup/(region)/(MAL ID)
72 | ```
73 | #### Parameters
74 |
75 | | Parameter | Value | Required |
76 | |:---|:---|:---|
77 | | region| `us` or `ca` or `uk` or `au` | `true` |
78 | | MAL ID| MyAnimeList Title ID | `true` |
79 |
80 | #### Example
81 | ```
82 | [GET] http://streamdata.malupdaterosx.moe/lookup/us/40196
83 | ```
84 |
85 | ##### Response
86 | ```
87 | {
88 | "data": [
89 | {
90 | "mal_id": 40196,
91 | "regionname": "us",
92 | "sitename": "vrv-hidive",
93 | "title": "Kandagawa Jet Girls",
94 | "url": "https://vrv.co/series/GYW4N30E6"
95 | },
96 | {
97 | "mal_id": 40196,
98 | "regionname": "us",
99 | "sitename": "hidive",
100 | "title": "Kandagawa Jet Girls",
101 | "url": "https://www.hidive.com/stream/kandagawa-jet-girls/s01e001"
102 | }
103 | ],
104 | "meta": {
105 | "count": 2,
106 | "query": "Kandagawa Jet Girls",
107 | "region": "us"
108 | }
109 | }
110 |
111 | ```
112 | # How to setup the API server
113 | ## 1. Install the necessary Python packages
114 | ```
115 | pip3 install mysql-connector-python flask
116 | ```
117 |
118 | ## 2. Load the database schema by importing db.sql
119 |
120 | ## 3. Database configuration
121 | Copy appconfig_sample.py and rename it to appconfig.py. Edit the dbconfig.py and specify the database user, password, server and database the app will use.
122 |
123 | ## 4. Adding the load.py script to the crontab
124 | A cron job needs to be created so it can run the load.py, which updates the stream data in the database. Add the following to the crontab. You can edit the crontab by typing `crontab -e` into the terminal. Note that you need to replace "/path/to/StreamData" with the proper absolute path to the load.py script
125 | ```
126 | 0 0 1 * * python3 /path/to/StreamData/load.py > load.log
127 | ```
128 |
129 | ## 5. Configure Apache
130 | Make sure wsgi is enabled. You can do this by running the following commands.
131 | ```
132 | sudo a2enmod wsgi
133 | ```
134 |
135 | If it's not installed, run the following command:
136 | ```
137 | sudo apt-get install libapache2-mod-wsgi-py3
138 | ```
139 |
140 | In `/etc/apache2/sites-available`, create a site configuration file called hato.conf with the following. (Change the server name to the domain name where you will host the service)
141 | ```
142 | WSGIDaemonProcess flask_streamdata user=www-data group=www-data threads=2
143 |
144 | ServerName streamdata.your-domain-here.com
145 | WSGIScriptAlias / /path/to/StreamData/streamdata.wsgi
146 |
147 | Require all granted
148 |
149 | Alias /static /path/to/StreamData/static
150 |
151 | Require all granted
152 |
153 | ErrorLog ${APACHE_LOG_DIR}/error.log
154 | LogLevel warn
155 | CustomLog ${APACHE_LOG_DIR}/access.log combined
156 |
157 | ```
158 |
159 | In the StreamData directory, copy streamdata_sample.wsgi to streamdata.wsgi. Replace the "/path/to" to the absolute path of StreamData.
160 |
161 | To enable the site, run the following
162 | ```
163 | sudo a2ensite streamdata.conf
164 | ```
165 |
166 | Go to your web browser and navigate to `http://(domain name)`. The domain dame is where you host the Hato service. If you see the Hato introduction page, the service is running correctly.
167 |
168 |
169 | ## 6. Securing StreamData (optional)
170 | It's recommended to use HTTPS to do any requests between your application and DreamData. You can use the Let's Encrypt service to retrieve a free SSL certificate. You can do this by following [these instructions](https://www.digitalocean.com/community/tutorials/how-to-secure-apache-with-let-s-encrypt-on-ubuntu-16-04).
171 |
172 | ## 7. Updating StreamData
173 | You can update StreamData easily by performing a `git pull` as long you cloned the repo using Git.
174 |
175 | In the directory containing the StreamData application, run the following in the terminal:
176 | ```
177 | git pull
178 | ```
179 | # License
180 | StreamData is open source and licensed under Apache License 2.0
181 |
--------------------------------------------------------------------------------
/streamdata_sample.wsgi:
--------------------------------------------------------------------------------
1 | #!/usr/bin/python
2 | import sys
3 | import logging
4 | logging.basicConfig(stream=sys.stderr)
5 | sys.path.insert(0,"/path/to/StreamData/")
6 |
7 | from app import app as application
8 | application.secret_key = 'Add your secret key'
9 |
--------------------------------------------------------------------------------
/templates/index.html:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | StreamData
8 |
9 |
10 | StreamData
11 | Stream Data is a REST API that allows applications to retrieve a list of legal stream URLs for a given title. Database loading script and a Flask based REST API sever is written in Python.
12 | The app uses legal stream link data Because.moe. The difference is that the whole stream data does not need to be downloaded completely and only need to retreive what you need. You will be able to use the title or MyAnimeList ID to look up links for a given title.
13 | StreamData only supports Python 3 and is Open Sourced.
14 | Why StreamData?
15 | Providing stream links to your applications allows users to find where to watch a given show through legal streaming. Illegal/Pirated streams hurts the creators and animation studios that make an anime production possible. StreamData only provides legal sources where you can watch a given title (e.g. Crunchyroll, Funimation, Amazon, HiDive, etc) and not illegal sources.
16 | This API enables you to obtain links where the user can watch a show legally through your app for a given title. StreamData only supports the following regions: United States, Canada, United Kingdom and Australia.
17 | StreamData is a free service and you do not need to be an active Patron to use it. If you want to help us keep the service running, feel free to become a Patron for as little as $1 a month.
18 | How does StreamData works?
19 | StreamData uses two different scripts. load.py is the script that is set to run as a cron job that refreshes the data in the database on a DBMS running MySQL/MariaDB. It performs some ETL (Extract, Transform, and Load). It downloads the stream data from the provider, load it to a staging table. The data is transformed and loaded to the data model.
20 | The frontend provides the rest API, allowing the user's apps to obtain stream data by title or MyAnimeList title id.
21 | How to use the API
22 | API Endpoints
23 | GET http://streamdata.malupdaterosx.moe/search/(region)?q=(search term)
24 |
Parameters
25 |
26 |
27 |
28 | Parameter |
29 | Value |
30 | Required |
31 |
32 |
33 |
34 |
35 | region |
36 | us or ca or uk or ai |
37 | true |
38 |
39 |
40 | q |
41 | Title (URL Encoded) |
42 | true |
43 |
44 |
45 |
46 | Example
47 | [GET] http://streamdata.malupdaterosx.moe/search/us?q=Kandagawa%20Jet%20Girls
48 |
Response
49 | {
50 | "data": [
51 | {
52 | "mal_id": 40196,
53 | "regionname": "us",
54 | "sitename": "vrv-hidive",
55 | "title": "Kandagawa Jet Girls",
56 | "url": "https://vrv.co/series/GYW4N30E6"
57 | },
58 | {
59 | "mal_id": 40196,
60 | "regionname": "us",
61 | "sitename": "hidive",
62 | "title": "Kandagawa Jet Girls",
63 | "url": "https://www.hidive.com/stream/kandagawa-jet-girls/s01e001"
64 | }
65 | ],
66 | "meta": {
67 | "count": 2,
68 | "query": "Kandagawa Jet Girls",
69 | "region": "us"
70 | }
71 | }
72 |
GET http://streamdata.malupdaterosx.moe/lookup/(region)/(MAL ID)
73 |
Parameters
74 |
75 |
76 |
77 | Parameter |
78 | Value |
79 | Required |
80 |
81 |
82 |
83 |
84 | region |
85 | us or ca or uk or ai |
86 | true |
87 |
88 |
89 | MAL ID |
90 | MyAnimeList Title ID |
91 | true |
92 |
93 |
94 |
95 | Example
96 | [GET] http://streamdata.malupdaterosx.moe/lookup/us/40196
97 |
Response
98 | {
99 | "data": [
100 | {
101 | "mal_id": 40196,
102 | "regionname": "us",
103 | "sitename": "vrv-hidive",
104 | "title": "Kandagawa Jet Girls",
105 | "url": "https://vrv.co/series/GYW4N30E6"
106 | },
107 | {
108 | "mal_id": 40196,
109 | "regionname": "us",
110 | "sitename": "hidive",
111 | "title": "Kandagawa Jet Girls",
112 | "url": "https://www.hidive.com/stream/kandagawa-jet-girls/s01e001"
113 | }
114 | ],
115 | "meta": {
116 | "count": 2,
117 | "query": "Kandagawa Jet Girls",
118 | "region": "us"
119 | }
120 | }
121 |
License
122 | StreamData is open source and licensed under Apache License 2.0
123 |
124 |
--------------------------------------------------------------------------------