├── .editorconfig ├── .gitignore ├── LICENSE ├── README.md ├── eplus.py ├── nhltv.py ├── spwn.py └── zan.py /.editorconfig: -------------------------------------------------------------------------------- 1 | root = true 2 | 3 | [*] 4 | charset = utf-8 5 | end_of_line = lf 6 | indent_size = 4 7 | indent_style = space 8 | insert_final_newline = true 9 | max_line_length = 128 10 | trim_trailing_whitespace = true 11 | 12 | [*.{cfg,svg,yml}] 13 | indent_size = 2 14 | 15 | [*.{md,markdown}] 16 | trim_trailing_whitespace = false 17 | 18 | [docs/Makefile] 19 | indent_size = 8 20 | indent_style = tab 21 | 22 | [docs/_themes/**] 23 | indent_size = ignore 24 | indent_style = ignore 25 | trim_trailing_whitespace = ignore 26 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py,cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | cover/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | .pybuilder/ 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | # For a library or package, you might want to ignore these files since the code is 87 | # intended to run in multiple environments; otherwise, check them in: 88 | # .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 98 | __pypackages__/ 99 | 100 | # Celery stuff 101 | celerybeat-schedule 102 | celerybeat.pid 103 | 104 | # SageMath parsed files 105 | *.sage.py 106 | 107 | # Environments 108 | .env 109 | .venv 110 | env/ 111 | venv/ 112 | ENV/ 113 | env.bak/ 114 | venv.bak/ 115 | 116 | # Spyder project settings 117 | .spyderproject 118 | .spyproject 119 | 120 | # Rope project settings 121 | .ropeproject 122 | 123 | # mkdocs documentation 124 | /site 125 | 126 | # mypy 127 | .mypy_cache/ 128 | .dmypy.json 129 | dmypy.json 130 | 131 | # Pyre type checker 132 | .pyre/ 133 | 134 | # pytype static type analyzer 135 | .pytype/ 136 | 137 | # Cython debug symbols 138 | cython_debug/ 139 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Copyright (c) 2019, Peter Rowlands 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without 5 | modification, are permitted provided that the following conditions are met: 6 | 7 | 1. Redistributions of source code must retain the above copyright notice, this 8 | list of conditions and the following disclaimer. 9 | 2. Redistributions in binary form must reproduce the above copyright notice, 10 | this list of conditions and the following disclaimer in the documentation 11 | and/or other materials provided with the distribution. 12 | 13 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 14 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 15 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 16 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR 17 | ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES 18 | (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; 19 | LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND 20 | ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT 21 | (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS 22 | SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 23 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # streamlink-plugins 2 | 3 | Custom plugins for [Streamlink](https://github.com/streamlink/streamlink) 5.5.0 and newer versions with Python 3.8 and newer version. 4 | 5 | To use these plugins, clone this repo somewhere and run (or configure) streamlink with `--plugin-dir`. 6 | Alternatively, individual plugins can be symlinked or downloaded to `~/.config/streamlink/plugins` 7 | (`%APPDATA%\streamlink\plugins` on Windows). 8 | 9 | ## NHL.tv 10 | 11 | - Login required to view any live game or archived game (VOD). 12 | `--nhltv-email`, `--nhltv-password` and `--nhltv-purge-credentials` options can be used to specify login credentials/behavior. 13 | - Valid subscription is required to view most games, but accounts without a subscription can watch specially designated free games 14 | - `--nhltv-prefer-french` and `--nhltv-prefer-team=TEAM` options can be used to give priority to French language broadcasts or a specific team's home/away broadcasts when determining "best" quality stream. 15 | By default, priority is given in the following order: 16 | 17 | 1. National (English) 18 | 2. Home 19 | 3. Away 20 | 4. National (French) 21 | 22 | ## eplus (e+) 23 | 24 | https://eplus.jp/ plugin. 25 | 26 | - Supports `live.eplus.jp/` (local) and `live.eplus.jp/ex/player?ib=` 27 | (inbound) stream or VOD URLs. 28 | - Login required to view live event or VOD on local eplus. `--eplus-id` 29 | and `--eplus-password` options can be used to specify login credentials. 30 | Specifying `ci_session` cookie by `--http-cookie` option is another way to 31 | access restricted content. 32 | - Streamlink will count as one (desktop browser) "device" against the e+ limit 33 | when viewing a stream or VOD. Set `--eplus-allow-relogin` to kick other 34 | "devices" during download. 35 | - `--player-passthrough=hls` is incompatible with e+ since the video player 36 | will not have access to the authenticated HTTP session. 37 | - DRM-protected content is NOT supported. If you have been notified that an 38 | event is only available for Microsoft Edge on Windows or Safari on macOS, 39 | it's DRM. 40 | 41 | ## SPWN 42 | 43 | https://spwn.jp/ plugin. 44 | 45 | - Supports direct `spwn.jp/events/` (ticketed) stream or VOD URLs. 46 | - Login and valid event ticket required to view any live event or VOD. 47 | `--spwn-email` and `--spwn-password` options can be used to specify login 48 | credentials. Social account (Twitter/Facebook/Google) login requires 49 | specifying the OAuth refresh token (i.e., `refresh_token`) directly with 50 | `--spwn-token`. 51 | - When the `--spwn-low-latency` option is set and a low-latency (LL) stream is 52 | available, it will be preferred over the default stream. 53 | (`--spwn-low-latency` has no effect for VOD URLs) 54 | - Streamlink will count as one (desktop browser) device against the SPWN limit 55 | when viewing a stream or VOD. 56 | 57 | ## Z-aN 58 | 59 | https://www.zan-live.com/ plugin. 60 | 61 | - Supports direct `zan-live.com/live/play` (ticketed) stream or VOD URLs. 62 | - Login and valid event ticket required to view any live event or VOD. 63 | `--zan-email` and `--zan-password` options can be used to specify login 64 | credentials. 65 | - Streamlink will count as one (desktop browser) device against the Z-aN limit 66 | when viewing a stream or VOD. 67 | -------------------------------------------------------------------------------- /eplus.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """eplus.jp streamlink plugin. 3 | 4 | Requires stream/VOD URL. 5 | """ 6 | 7 | import logging 8 | import re 9 | import time 10 | from threading import Thread, Event, Lock 11 | from typing import List, Optional, ClassVar 12 | from urllib.parse import urlencode 13 | 14 | from streamlink.exceptions import NoStreamsError, PluginError 15 | from streamlink.plugin import Plugin, pluginargument, pluginmatcher 16 | from streamlink.plugin.api import validate, useragents 17 | try: 18 | from streamlink.session.http import HTTPSession # from 6.6.0 19 | except ImportError: 20 | from streamlink.plugin.api import HTTPSession 21 | from streamlink.stream.hls import HLSStream, HLSStreamReader, HLSStreamWorker 22 | 23 | log = logging.getLogger(__name__) 24 | 25 | 26 | def _get_eplus_data(session: HTTPSession, eplus_url: str): 27 | """ 28 | Return video data for an eplus event/video page. 29 | """ 30 | schema_data_json = validate.Schema( 31 | re.compile(r""), 32 | validate.none_or_all( 33 | validate.get("data_json"), 34 | validate.parse_json(), 35 | { 36 | "delivery_status": str, 37 | 38 | # For pass tickets, the "archive_mode" field exists but its value is null. 39 | validate.optional("archive_mode"): validate.any(str, None), 40 | validate.optional("is_pass_ticket"): validate.any(str, None), 41 | 42 | "app_id": str, 43 | "app_name": str, 44 | validate.optional("drm_mode"): validate.any(str, None), 45 | }, 46 | ), 47 | ) 48 | schema_m3u8_urls = validate.Schema( 49 | re.compile(r"var\s+listChannels\s*=\s*(?P\[.+?\]);"), 50 | validate.none_or_all( 51 | validate.get("list_channels"), 52 | validate.parse_json(), 53 | list, 54 | ), 55 | ) 56 | schema_stream_session = validate.Schema( 57 | re.compile(r"var\s+streamSession\s*=\s*(['\"])(?P(?:(?!\1).)+)\1;"), 58 | validate.none_or_all( 59 | validate.get("stream_session"), 60 | ), 61 | ) 62 | 63 | body = session.get(eplus_url).text 64 | 65 | data_json = schema_data_json.validate(body, "data_json") 66 | if not data_json: 67 | raise PluginError("Failed to get data_json") 68 | 69 | if data_json.get("is_pass_ticket") == "YES": 70 | raise PluginError("Pass ticket is not supported, please use a url points to the player page") 71 | 72 | if data_json.get("drm_mode") == "ON": 73 | raise PluginError("Stream is DRM-protected") 74 | 75 | delivery_status = data_json["delivery_status"] 76 | archive_mode = data_json["archive_mode"] 77 | log.debug(f"delivery_status = {delivery_status}, archive_mode = {archive_mode}") 78 | 79 | if delivery_status == "PREPARING": 80 | log.error("This event has not started yet") 81 | raise NoStreamsError(eplus_url) 82 | elif delivery_status == "STARTED": 83 | pass # is live 84 | elif delivery_status == "STOPPED": 85 | if archive_mode == "ON": 86 | log.error("This event has ended, but the archive has not been generated yet") 87 | else: 88 | log.error("This event has ended and there is no archive for this event") 89 | raise NoStreamsError(eplus_url) 90 | elif delivery_status == "WAIT_CONFIRM_ARCHIVED": 91 | log.error("This event has ended, and the archive will be available shortly") 92 | raise NoStreamsError(eplus_url) 93 | elif delivery_status == "CONFIRMED_ARCHIVE": 94 | pass # was live 95 | else: 96 | raise PluginError(f"Unknown delivery_status: {delivery_status}") 97 | 98 | m3u8_urls = schema_m3u8_urls.validate(body, "m3u8 urls") or [] 99 | 100 | app_id = data_json["app_id"] 101 | 102 | stream_session = schema_stream_session.validate(body, "stream_session") 103 | if stream_session: 104 | session_update_url = f"https://live.eplus.jp/api/stream/{app_id}/status?sid={stream_session}" 105 | else: 106 | session_update_url = "" 107 | 108 | return { 109 | "app_id": app_id, 110 | "title": data_json["app_name"], 111 | "m3u8_urls": m3u8_urls, 112 | "session_update_url": session_update_url, 113 | } 114 | 115 | 116 | def _try_login(session: HTTPSession, eplus_url: str, login_id: str, password: str): 117 | log.info("Getting auth status") 118 | 119 | res = session.get(eplus_url) 120 | if res.url.startswith(eplus_url): 121 | # already logged in or no login required 122 | return 123 | 124 | auth_url = res.url 125 | 126 | cltft_token = res.headers.get("X-CLTFT-Token") 127 | if not cltft_token: 128 | raise PluginError("Unable to get X-CLTFT-Token for login") 129 | 130 | session.cookies.set("X-CLTFT-Token", cltft_token, domain="live.eplus.jp") 131 | 132 | if not all((login_id, password)): 133 | raise PluginError("Login credentials required") 134 | 135 | log.info("Sending pre-login info") 136 | 137 | login_res = session.post( 138 | "https://live.eplus.jp/member/api/v1/FTAuth/idpw", headers={ 139 | "Content-Type": "application/json; charset=UTF-8", 140 | "Referer": auth_url, 141 | "X-Cltft-Token": cltft_token, 142 | "Accept": "*/*", 143 | }, json={ 144 | "loginId": login_id, 145 | "loginPassword": password, 146 | }) 147 | login_json = session.json(login_res, "login response") 148 | 149 | if not login_json.get("isSuccess"): 150 | raise PluginError("Login failed: Invalid id or password") 151 | 152 | log.info("Logging in via provided id and password") 153 | 154 | session.post( 155 | auth_url, data=urlencode({ 156 | "loginId": id, 157 | "loginPassword": password, 158 | "Token.Default": cltft_token, 159 | "op": "nextPage", 160 | }), headers={ 161 | "Content-Type": "application/x-www-form-urlencoded", 162 | "Referer": res.url, 163 | }) 164 | 165 | 166 | class EplusCtx: 167 | @property 168 | def app_id(self) -> str: 169 | return self._data["app_id"] 170 | 171 | @property 172 | def title(self) -> str: 173 | return self._data["title"] 174 | 175 | @property 176 | def m3u8_urls(self) -> List[str]: 177 | return self._data["m3u8_urls"] 178 | 179 | @property 180 | def session_update_url(self) -> str: 181 | return self._data["session_update_url"] 182 | 183 | @property 184 | def never_valid_session(self) -> bool: 185 | """ 186 | Sometimes the previously obtained stream session is invalid, so we need to try again unconditionally. 187 | Being a class variable does not work since boolean is a primitive data type. 188 | """ 189 | 190 | return self._never_valid_session 191 | 192 | @never_valid_session.setter 193 | def never_valid_session(self, value: bool): 194 | self._never_valid_session = value 195 | 196 | @property 197 | def http_session(self) -> HTTPSession: 198 | return self._session 199 | 200 | def __init__(self, session: HTTPSession, eplus_url: str, login_id: str, password: str, allow_relogin: bool): 201 | self._session = session 202 | self._eplus_url = eplus_url 203 | self._login_id = login_id 204 | self._password = password 205 | self._allow_relogin = allow_relogin 206 | 207 | self._never_valid_session = True 208 | 209 | self.login_and_refresh() 210 | 211 | def login_and_refresh(self): 212 | _try_login(self._session, self._eplus_url, self._login_id, self._password) 213 | self._data = _get_eplus_data(self._session, self._eplus_url) 214 | 215 | 216 | class EplusSessionUpdater(Thread): 217 | """ 218 | Cookies for Eplus expire after about 1 hour. 219 | To keep our live streaming going, we have to refresh them in time, 220 | otherwise we may got HTTP 403 and no new stream could be downloaded. 221 | """ 222 | 223 | _eplus_ctx: ClassVar[EplusCtx] 224 | 225 | def __init__(self): 226 | super().__init__(name=self.__class__.__qualname__, daemon=True) 227 | 228 | self._session = self._eplus_ctx.http_session 229 | self._closed = Event() 230 | self._retries = 0 231 | self._last_expire_timestamp = time.time() 232 | self._log = logging.getLogger(f"{__name__}.{self.__class__.__qualname__}") 233 | 234 | def close(self): 235 | if self._closed.is_set(): 236 | """ 237 | "close(self)" will be called multiple times during the cleanup process of Streamlink. 238 | If Python is about to exit, logging something will raise an exception: 239 | > ImportError: sys.meta_path is None, Python is likely shutting down < 240 | """ 241 | return 242 | 243 | self._log.debug("Closing session updater...") 244 | self._closed.set() 245 | 246 | def run(self): 247 | self._log.debug("Starting session updater...") 248 | 249 | while not self._closed.is_set(): 250 | 251 | # Create a new session without cookies and send a request to Eplus url to obtain new cookies. 252 | self._log.debug(f"Refreshing cookies with url: {self._eplus_ctx.session_update_url}") 253 | try: 254 | fresh_response = self._session_duplicator().get(self._eplus_ctx.session_update_url) 255 | self._log.debug(f"Got new cookies: {fresh_response.cookies!r}") 256 | 257 | # Filter cookies. 258 | # For now, only the "ci_session" cookie is what we don't need, so ignore it. 259 | cookie = next( 260 | cookie for cookie in fresh_response.cookies 261 | if cookie.name != "ci_session" 262 | and cookie.expires > time.time() 263 | ) 264 | self._log.debug( 265 | "Found a valid cookie that will expire at " 266 | f"{time.strftime(r'%Y%m%d-%H%M%S%z', time.localtime(cookie.expires))}. " 267 | f"The cookie: {cookie!r}" 268 | ) 269 | 270 | # Update the global session with the new cookies. 271 | self._session.cookies.clear() 272 | self._session.cookies.update(fresh_response.cookies) 273 | 274 | self._retries = 0 275 | self._last_expire_timestamp = cookie.expires 276 | self._eplus_ctx.never_valid_session = False 277 | 278 | # Refresh cookies at most 5 minutes before expiration. 279 | wait_sec = (cookie.expires - 5 * 60) - time.time() 280 | # Don't be too close! Retry it right away. 281 | wait_sec = max(wait_sec, 0) 282 | # Eplus refreshes cookies every 15 minutes. 283 | wait_sec = min(15 * 60, wait_sec) 284 | 285 | self._log.debug( 286 | "Refreshed cookies. Next attempt will be at about " 287 | f"{time.strftime(r'%Y%m%d-%H%M%S%z', time.localtime(time.time() + wait_sec))}. " 288 | ) 289 | 290 | self._closed.wait(wait_sec) 291 | continue 292 | 293 | except StopIteration: 294 | # next() exhausted all cookies. 295 | self._log.error("No valid cookies found.") 296 | 297 | # Re-login may help 298 | if self._eplus_ctx.never_valid_session or self._eplus_ctx._allow_relogin: 299 | self._log.info("Trying to refresh the session. Any existing sessions will be kicked.") 300 | try: 301 | self._eplus_ctx.login_and_refresh() 302 | except Exception as e: 303 | self._log.error(f"Failed to refresh session: {e!r}") 304 | else: 305 | self._log.info("The session will not be refreshed since re-login is disabled.") 306 | 307 | except Exception as e: 308 | self._log.error(f"Failed to refresh cookies: {e!r}") 309 | 310 | self._retries += 1 311 | retry_delay_sec = 2 ** (self._retries - 1) 312 | 313 | if time.time() + retry_delay_sec > self._last_expire_timestamp + 1 * 60 * 60: 314 | self._log.error("We have not refreshed cookies in the past hour and will not try again.") 315 | 316 | self.close() 317 | return 318 | 319 | self._log.debug(f"We will retry in {retry_delay_sec}s.") 320 | 321 | self._closed.wait(retry_delay_sec) 322 | continue 323 | 324 | def _session_duplicator(self): 325 | """ 326 | Make a duplicate of the member "_session" except for cookies. 327 | """ 328 | 329 | new_session = HTTPSession() 330 | 331 | new_session.proxies = self._session.proxies 332 | new_session.headers = self._session.headers 333 | new_session.trust_env = self._session.trust_env 334 | new_session.verify = self._session.verify 335 | new_session.cert = self._session.cert 336 | new_session.timeout = self._session.timeout 337 | 338 | return new_session 339 | 340 | # Prevent multiple updaters from being creating, running and stopping at the same time. 341 | _updater_mgmt_mutex: ClassVar[Lock] = Lock() 342 | _updater_num: ClassVar[int] = 0 343 | _updater: ClassVar[Optional["EplusSessionUpdater"]] = None 344 | 345 | @classmethod 346 | def start_one(cls): 347 | with cls._updater_mgmt_mutex: 348 | if not cls._eplus_ctx: 349 | raise PluginError("EplusCtx has not been set yet") 350 | 351 | # No session update required for free streams. 352 | if not cls._eplus_ctx.session_update_url: 353 | return 354 | 355 | if not cls._updater: 356 | cls._updater = cls() 357 | 358 | if cls._updater_num == 0: 359 | cls._updater.start() 360 | cls._updater_num += 1 361 | 362 | @classmethod 363 | def stop_one(cls): 364 | with cls._updater_mgmt_mutex: 365 | if not cls._updater: 366 | return 367 | 368 | if cls._updater_num > 0: 369 | cls._updater_num -= 1 370 | if cls._updater_num == 0: 371 | cls._updater.close() 372 | cls._updater.join() 373 | cls._updater = None 374 | 375 | 376 | class EplusHLSStreamWorker(HLSStreamWorker): 377 | def __init__(self, *args, **kwargs): 378 | super().__init__(*args, **kwargs) 379 | self._log = logging.getLogger(f"{__name__}.{self.__class__.__qualname__}") 380 | self._playlist_unchanged_timeout = 0.5 * self.session.options.get("stream-timeout") 381 | self._playlist_changed_timestamp = time.time() 382 | 383 | def reload_playlist(self): 384 | super().reload_playlist() 385 | """ 386 | For the live streaming of Eplus, there is no "#EXT-X-ENDLIST" tag in the playlists. It's OK because they are "Live 387 | Playlists" (rfc8216 § 6.2.2). However, when a live ends, the playlist still doesn't contain an "#EXT-X-ENDLIST" tag, 388 | and its content has not been changed since then. At the same time, the "worker" (self) keeps reloading the playlist 389 | but is not able to get any new stream. Since no new data is written to the buffer, after "stream-timeout" seconds, 390 | the "reader" will throw an exception and cause Streamlink to exit with a non-zero code. 391 | Thus, to gracefully shutdown Streamlink, we think: 392 | If the playlist remains unchanged for a while, the live has ended. 393 | """ 394 | if self.playlist_changed: 395 | self._playlist_changed_timestamp = time.time() 396 | elif (time.time() - self._playlist_changed_timestamp) > self._playlist_unchanged_timeout: 397 | self._log.debug( 398 | f"The {self._playlist_unchanged_timeout}-second timeout reached, " 399 | "this is the last playlist. " 400 | ) 401 | self.close() 402 | 403 | 404 | class EplusHLSStreamReader(HLSStreamReader): 405 | __worker__ = EplusHLSStreamWorker 406 | 407 | def open(self): 408 | super().open() 409 | EplusSessionUpdater.start_one() 410 | 411 | def close(self): 412 | super().close() 413 | EplusSessionUpdater.stop_one() 414 | 415 | 416 | class EplusHLSStream(HLSStream): 417 | __reader__ = EplusHLSStreamReader 418 | 419 | 420 | # Eplus inbound pages 421 | # - https://live.eplus.jp/ex/player?ib= 422 | # key is base64-encoded 64 byte unique key per ticket 423 | # - https://live.eplus.jp/ex/player?ib=&show_id= 424 | # there is an additional "show_id" field for pass tickets 425 | @pluginmatcher(re.compile( 426 | r"https://live\.eplus\.jp/ex/player\?ib=.+" 427 | )) 428 | # DRM test page and Eplus local pages 429 | @pluginmatcher(re.compile( 430 | r"https://live\.eplus\.jp/(?:sample|\d+)" 431 | )) 432 | @pluginargument( 433 | "id", 434 | metavar="ID", 435 | sensitive=True, 436 | help="The email address or mobile phone number associated with your Eplus account", 437 | ) 438 | @pluginargument( 439 | "password", 440 | metavar="PASSWORD", 441 | sensitive=True, 442 | help="The password of your Eplus account", 443 | ) 444 | @pluginargument( 445 | "allow-relogin", 446 | action="store_true", 447 | help="Allow to kick other sessions", 448 | ) 449 | class Eplus(Plugin): 450 | 451 | _ORIGIN = "https://live.eplus.jp" 452 | _REFERER = "https://live.eplus.jp/" 453 | 454 | def __init__(self, *args, **kwargs): 455 | super().__init__(*args, **kwargs) 456 | self.session.http.headers.update( 457 | { 458 | "Origin": self._ORIGIN, 459 | "Referer": self._REFERER, 460 | "User-Agent": useragents.SAFARI, 461 | } 462 | ) 463 | 464 | def _get_streams(self): 465 | eplus_ctx = EplusCtx( 466 | self.session.http, 467 | self.url, 468 | self.get_option("id"), 469 | self.get_option("password"), 470 | self.get_option("allow-relogin"), 471 | ) 472 | self.id = eplus_ctx.app_id 473 | self.title = eplus_ctx.title 474 | m3u8_urls = eplus_ctx.m3u8_urls 475 | 476 | EplusSessionUpdater._eplus_ctx = eplus_ctx 477 | 478 | # Multiple m3u8 playlists? I have never seen it. 479 | # For recent events of "Revue Starlight", a "multi-angle video" does not mean that there are 480 | # multiple playlists, but multiple cameras in one video. That's an edited video so viewers 481 | # cannot switch views. 482 | for m3u8_url in m3u8_urls: 483 | yield from EplusHLSStream.parse_variant_playlist(self.session, m3u8_url).items() 484 | 485 | 486 | __plugin__ = Eplus 487 | -------------------------------------------------------------------------------- /nhltv.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | import base64 4 | import logging 5 | import re 6 | import time 7 | 8 | import requests.cookies 9 | 10 | from streamlink.plugin import Plugin, PluginArgument, PluginArguments, PluginError, pluginmatcher 11 | from streamlink.plugin.api import useragents 12 | from streamlink.stream import HLSStream 13 | 14 | log = logging.getLogger(__name__) 15 | 16 | 17 | NHL_TEAMS = { 18 | "ANA": "Anaheim Ducks", 19 | "ARI": "Arizona Coyotes", 20 | "BOS": "Boston Bruins", 21 | "BUF": "Buffalo Sabres", 22 | "CAR": "Carolina Hurricanes", 23 | "CBJ": "Columbus Blue Jackets", 24 | "CGY": "Calgary Flames", 25 | "COL": "Colorado Avalanche", 26 | "CHI": "Chicago Blackhawks", 27 | "DAL": "Dallas Stars", 28 | "DET": "Detroit Red Wings", 29 | "EDM": "Edmonton Oilers", 30 | "FLA": "Florida Panthers", 31 | "LAK": "Los Angeles Kings", 32 | "MIN": "Minnesota Wild", 33 | "MTL": "Montreal Canadiens", 34 | "NJD": "New Jersey Devils", 35 | "NSH": "Nashville Predators", 36 | "NYI": "New York Islanders", 37 | "NYR": "New York Rangers", 38 | "OTT": "Ottawa Senators", 39 | "PHI": "Philadelphia Flyers", 40 | "PIT": "Pittsburgh Penguins", 41 | "SJS": "San Jose Sharks", 42 | "STL": "St Louis Blues", 43 | "TBL": "Tampa Bay Lightning", 44 | "TOR": "Toronto Maple Leafs", 45 | "VAN": "Vancouver Canucks", 46 | "VGK": "Vegas Golden Knights", 47 | "WPG": "Winnipeg Jets", 48 | "WSH": "Washington Capitals", 49 | } 50 | 51 | 52 | _STATS_API_URL = "https://statsapi.web.nhl.com/api/v1" 53 | _MEDIA_API_URL = "https://mf.svc.nhl.com/ws/media/mf/v2.4" 54 | _LOGIN_URL = "https://gateway.web.nhl.com/ws/subscription/flow/nhlPurchase.login" 55 | _OAUTH_URL = "https://user.svc.nhl.com/oauth/token" 56 | _NHL_WEBAPP_VER = b"web_nhl-v1.0.0:2d1d846ea3b194a18ef40ac9fbce97e3" 57 | 58 | 59 | def now_ms(): 60 | return int(round(time.time() * 1000)) 61 | 62 | 63 | @pluginmatcher(re.compile( 64 | r"https://www.nhl.com/tv/(?P\d+)" 65 | )) 66 | class NHLTV(Plugin): 67 | 68 | NATIONAL_WEIGHT = 4 69 | HOME_WEIGHT = 3 70 | AWAY_WEIGHT = 2 71 | FRENCH_WEIGHT = 1 72 | 73 | arguments = PluginArguments( 74 | PluginArgument( 75 | "email", 76 | required=True, 77 | metavar="EMAIL", 78 | requires=["password"], 79 | help="The email associated with your NHL.tv (NHL.com) account.", 80 | ), 81 | PluginArgument( 82 | "password", 83 | sensitive=True, 84 | metavar="PASSWORD", 85 | help="An NHL.tv account password to use with --nhltv-email.", 86 | ), 87 | PluginArgument( 88 | "purge-credentials", 89 | action="store_true", 90 | help="Purge cached NHL.tv credentials to initiate a new session and reauthenticate.", 91 | ), 92 | PluginArgument( 93 | "prefer-french", 94 | action="store_true", 95 | help=""" 96 | Prefer French language broadcasts. If this option is specified, the highest quality 97 | French language broadcast will be set as "best" quality whenever it is available. 98 | This option takes precedence over --nhltv-prefer-team. 99 | """, 100 | ), 101 | PluginArgument( 102 | "prefer-team", 103 | metavar="TEAM_ABBR", 104 | help=""" 105 | 3-letter abbreviation for your preferred NHL team. If this option is specified, the 106 | highest quality home/away broadcast for the specified team will be set as "best" quality 107 | whenever it is available. 108 | """, 109 | ), 110 | ) 111 | 112 | def __init__(self, *args, **kwargs): 113 | super(NHLTV, self).__init__(*args, **kwargs) 114 | self.session.http.headers.update( 115 | { 116 | "Origin": "https://www.nhl.com", 117 | "Referer": self.url, 118 | "User-Agent": useragents.CHROME, 119 | } 120 | ) 121 | self.game_pk = self.match.group("game_pk") 122 | self.prefer_team = None 123 | 124 | @classmethod 125 | def stream_weight(cls, key): 126 | # NHL.tv may provide any combination of broadcasts depending on the game. 127 | # Prioritize national > home > away > french > multicam for best quality synonym 128 | try: 129 | (name, quality) = key.split("_") 130 | if quality == "audio": 131 | # radio feeds are all 48k audio 132 | weight = 48 133 | else: 134 | if quality.endswith("p60"): 135 | weight = int(quality[:-3]) + 10 136 | else: 137 | weight = int(quality.rstrip("p")) 138 | if key.startswith("national"): 139 | weight += cls.NATIONAL_WEIGHT 140 | elif key.startswith("home"): 141 | weight += cls.HOME_WEIGHT 142 | elif key.startswith("away"): 143 | weight += cls.AWAY_WEIGHT 144 | elif key.startswith("french"): 145 | weight += cls.FRENCH_WEIGHT 146 | return weight, "nhltv" 147 | except ValueError: 148 | pass 149 | 150 | return Plugin.stream_weight(key) 151 | 152 | @property 153 | def _authed(self): 154 | cookies = self.session.http.cookies 155 | return cookies.get("nhl_username") and cookies.get("Authorization") 156 | 157 | @property 158 | def _session_key(self): 159 | return self.cache.get("session_key") 160 | 161 | @property 162 | def _auth_token(self): 163 | return self.session.http.cookies.get("Authorization") 164 | 165 | @property 166 | def _media_auth(self): 167 | return self.session.http.cookies.get("mediaAuth_v2") 168 | 169 | def _login(self, email, password): 170 | auth = "Basic {}".format( 171 | base64.urlsafe_b64encode(_NHL_WEBAPP_VER).decode("ascii") 172 | ) 173 | headers = { 174 | "Referer": "https://www.nhl.com/login?forwardUrl=https://www.nhl.com/tv", 175 | "Authorization": auth, 176 | } 177 | params = {"grant_type": "client_credentials"} 178 | r = self.session.http.post(_OAUTH_URL, params=params, headers=headers) 179 | token = r.json().get("access_token") 180 | if not token: 181 | raise PluginError("Could not obtain oauth token") 182 | headers["Authorization"] = token 183 | data = {"nhlCredentials": {"email": email, "password": password}} 184 | self.session.http.post(_LOGIN_URL, headers=headers, json=data) 185 | log.info("Successfully logged in as {}".format(email)) 186 | self.save_cookies() 187 | self.cache.set("session_key", None) 188 | 189 | def _get_feeds(self): 190 | """Get list of broadcast feeds for this game from the NHL schedule API.""" 191 | url = "{}/schedule".format(_STATS_API_URL) 192 | params = { 193 | "gamePk": self.game_pk, 194 | "expand": ["schedule.game.content.media.epg", "schedule.teams"], 195 | } 196 | headers = {"Accept": "application/json"} 197 | json = self.session.http.get(url, params=params, headers=headers).json() 198 | feeds = [] 199 | for date in json.get("dates", []): 200 | for game in date.get("games", []): 201 | media = game.get("content", {}).get("media") 202 | for epg in media.get("epg", []): 203 | title = epg.get("title") 204 | if title in ("NHLTV", "Audio"): 205 | for item in epg.get("items", []): 206 | media_state = item.get("mediaState") 207 | if title == "Audio" and media_state != "MEDIA_ON": 208 | # Radio broadcast feeds are only available for live games. 209 | # We can skip them here for archived game VODs even though 210 | # they are still returned by the API 211 | continue 212 | call_letters = item.get("callLetters", "") 213 | feed_name = item.get("feedName", "") 214 | feed_type = item.get("mediaFeedType", "").lower() 215 | if feed_type in ("home", "away"): 216 | team = ( 217 | game.get("teams", {}) 218 | .get(feed_type, {}) 219 | .get("team", {}) 220 | ) 221 | abbr = team.get("abbreviation") 222 | feed_name = "-".join([abbr, call_letters]) 223 | if abbr == self.prefer_team: 224 | if feed_type == "home": 225 | self.__class__.HOME_WEIGHT = 5 226 | else: 227 | self.__class__.AWAY_WEIGHT = 5 228 | elif feed_type in ("national", "french"): 229 | feed_name = call_letters 230 | audio_only = False 231 | if title == "Audio": 232 | audio_only = True 233 | broadcast_type = "radio" 234 | elif title == "NHLTV": 235 | broadcast_type = "NHL.tv" 236 | else: 237 | broadcast_type = title 238 | log.info( 239 | "Found {} {} broadcast feed ({})".format( 240 | feed_type, broadcast_type, feed_name 241 | ) 242 | ) 243 | feeds.append((item, audio_only)) 244 | return feeds 245 | 246 | def _get_session_key(self, event_id): 247 | url = "{}/stream".format(_MEDIA_API_URL) 248 | params = { 249 | "eventId": event_id, 250 | "format": "json", 251 | "platform": "WEB_MEDIAPLAYER", 252 | "subject": "NHLTV", 253 | "_": now_ms(), 254 | } 255 | headers = { 256 | "Accept": "application/json", 257 | "Authorization": self._auth_token, 258 | } 259 | json = self.session.http.get(url, params=params, headers=headers).json() 260 | session_key = json.get("session_key") 261 | if not session_key: 262 | status = json.get("status_code") 263 | if status == -3500: 264 | log.debug( 265 | "Plugin is being rate-limited for making too many session key requests." 266 | ) 267 | raise PluginError( 268 | "Could not obtain session key: {}".format(json.get("status_message")) 269 | ) 270 | # This session key is normally supposed to last for a single browser session. 271 | # If we repeatedly request new session keys we will get rate limited by the NHL.tv backend, 272 | # so we cache session keys for 2.5 hours (roughly the length of an NHL hockey game) to 273 | # aproximate normal behavior. 274 | self.cache.set("session_key", session_key, 9000) 275 | 276 | def _get_streams_for_feed(self, feed, audio_only=False): 277 | """Get HLS streams for the specified broadcast feed.""" 278 | event_id = feed.get("eventId") 279 | content_id = feed.get("mediaPlaybackId") 280 | feed_type = feed.get("mediaFeedType") 281 | streams = {} 282 | if not self._session_key: 283 | self._get_session_key(event_id) 284 | url = "{}/stream".format(_MEDIA_API_URL) 285 | if audio_only: 286 | scenario = "HTTP_CLOUD_AUDIO" 287 | else: 288 | scenario = "HTTP_CLOUD_WIRED_60_ADS" 289 | params = { 290 | "contentId": content_id, 291 | "playbackScenario": scenario, 292 | "sessionKey": self._session_key, 293 | "auth": "response", 294 | "format": "json", 295 | "platform": "WEB_MEDIAPLAYER", 296 | "subject": "NHLTV", 297 | "_": now_ms(), 298 | } 299 | headers = { 300 | "Accept": "application/json", 301 | "Authorization": self._auth_token, 302 | } 303 | json = self.session.http.get(url, params=params, headers=headers).json() 304 | if json.get("status_code") != 1: 305 | log.debug( 306 | "Could not get streams for {}/{}: {}".format( 307 | event_id, content_id, json.get("status_message") 308 | ) 309 | ) 310 | return streams 311 | for attr in json.get("session_info", {}).get("sessionAttributes", []): 312 | name = attr.get("attributeName") 313 | if name == "mediaAuth_v2": 314 | auth = attr.get("attributeValue", "") 315 | if self._media_auth != auth: 316 | cookie = requests.cookies.create_cookie( 317 | name, auth, domain=".nhl.com" 318 | ) 319 | self.session.http.cookies.set_cookie(cookie) 320 | for event in json.get("user_verified_event", []): 321 | for content in event.get("user_verified_content", []): 322 | for media in content.get("user_verified_media_item", {}): 323 | if media.get("auth_status") != "SuccessStatus": 324 | msg = ( 325 | "Your account is not authorized to view this content." 326 | " Accounts without an active NHL.tv subscription can only view" 327 | " designated free games. Please refer to NHL.com to see a schedule" 328 | " of upcoming free games." 329 | ) 330 | raise PluginError(msg) 331 | if ( 332 | media.get("blackout_status", {}).get("status") 333 | != "SuccessStatus" 334 | ): 335 | msg = ( 336 | "This content is unavailable in your region due to NHL blackout restrictions." 337 | " For more information visit: https://www.nhl.com/info/nhltv-blackout-detector" 338 | ) 339 | raise PluginError(msg) 340 | url = media.get("url") 341 | if url: 342 | prefix = "{}_".format(feed_type.lower()) 343 | if audio_only: 344 | name_fmt = "audio" 345 | else: 346 | name_fmt = None 347 | parsed = HLSStream.parse_variant_playlist( 348 | self.session, url, name_fmt=name_fmt, name_prefix=prefix 349 | ) 350 | for name, stream in parsed.items(): 351 | if name.endswith("_alt"): 352 | # 720p_alt is actually 720p60, not an alternate url 353 | name = "{}60".format(name[:-4]) 354 | streams[name] = stream 355 | return streams 356 | 357 | def _get_streams(self): 358 | streams = {} 359 | if self._authed and not self.options.get("purge_credentials"): 360 | log.info("Using cached credentials") 361 | if self._session_key: 362 | log.debug("Using cached session key") 363 | else: 364 | self.clear_cookies() 365 | self._login(self.options.get("email"), self.options.get("password")) 366 | 367 | if self.options.get("prefer_french"): 368 | log.info( 369 | "French language broadcast will be preferred when it is available." 370 | ) 371 | self.__class__.FRENCH_WEIGHT = 10 372 | prefer_team = self.options.get("prefer_team") 373 | if prefer_team: 374 | prefer_team = prefer_team.upper() 375 | if prefer_team in NHL_TEAMS: 376 | team = NHL_TEAMS[prefer_team] 377 | log.info( 378 | "{} home/away broadcast will be preferred when it is available.".format( 379 | team 380 | ) 381 | ) 382 | self.prefer_team = prefer_team 383 | else: 384 | log.info( 385 | "Unknown team {}. Valid choices for --nhltv-prefer-team are:".format( 386 | prefer_team 387 | ) 388 | ) 389 | log.info(", ".join(NHL_TEAMS.keys())) 390 | 391 | for feed, audio_only in self._get_feeds(): 392 | streams.update(self._get_streams_for_feed(feed, audio_only)) 393 | 394 | return streams 395 | 396 | 397 | __plugin__ = NHLTV 398 | -------------------------------------------------------------------------------- /spwn.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """spwn.jp streamlink plugin. 3 | 4 | Requires valid SPWN account and event tickets. 5 | """ 6 | 7 | import logging 8 | import re 9 | from datetime import datetime, timedelta 10 | from typing import Any, Dict, NamedTuple 11 | 12 | import requests 13 | from requests.exceptions import HTTPError 14 | from streamlink.plugin import Plugin, PluginError, pluginargument, pluginmatcher 15 | from streamlink.plugin.api import useragents, validate 16 | from streamlink.stream.hls import HLSStream 17 | 18 | log = logging.getLogger(__name__) 19 | 20 | 21 | class VideoPart(NamedTuple): 22 | video_id: str 23 | name: str 24 | url: str 25 | cookie: Dict[str, Any] 26 | 27 | 28 | class FBSession: 29 | """Google firebase auth session.""" 30 | 31 | _IDENTITY_URL = "https://www.googleapis.com/identitytoolkit/v3/relyingparty" 32 | _TOKEN_URL = "https://securetoken.googleapis.com/v1" 33 | 34 | def __init__(self, session, api_key, auth_domain, project_id): 35 | self.session = session 36 | self.api_key = api_key 37 | self.auth_domain = auth_domain 38 | self.project_id = project_id 39 | self._id_token = None 40 | self.refresh_token = None 41 | self.refresh_expires = None 42 | 43 | def set_refresh_token(self, refresh_token): 44 | self.refresh_token = refresh_token 45 | self.expires = datetime.min 46 | 47 | def login(self, email, password): 48 | url = f"{self._IDENTITY_URL}/verifyPassword" 49 | headers = { 50 | "access-control-request-headers": "content-type,x-client-version", 51 | "access-control-request-method": "POST", 52 | } 53 | self.session.http.options( 54 | url, 55 | headers=headers, 56 | params={"key": self.api_key}, 57 | ) 58 | headers = { 59 | "x-client-version": "Chrome/JsCore/7.20.0/FirebaseCore-web", 60 | } 61 | result = self.session.http.post( 62 | url, 63 | headers=headers, 64 | params={"key": self.api_key}, 65 | data={ 66 | "email": email, "password": password, "returnSecureToken": True 67 | }, 68 | ) 69 | data = result.json() 70 | self._id_token = data["idToken"] 71 | self.expires = datetime.now() + timedelta( 72 | seconds=int(data["expiresIn"]) 73 | ) 74 | self.refresh_token = data["refreshToken"] 75 | log.info(f"Logged into SPWN as {data['email']}") 76 | 77 | @property 78 | def id_token(self): 79 | if self._id_token and self.expires < datetime.now(): 80 | return self._id_token 81 | if not self.refresh_token: 82 | raise ValueError("Not logged in, no refresh token") 83 | url = f"{self._TOKEN_URL}/token" 84 | headers = { 85 | "access-control-request-headers": "x-client-version", 86 | "access-control-request-method": "POST", 87 | } 88 | self.session.http.options( 89 | url, 90 | headers=headers, 91 | params={"key": self.api_key}, 92 | ) 93 | headers = { 94 | "x-client-version": "Chrome/JsCore/7.20.0/FirebaseCore-web", 95 | } 96 | result = self.session.http.post( 97 | url, 98 | headers=headers, 99 | params={"key": self.api_key}, 100 | data={ 101 | "grant_type": "refresh_token", 102 | "refreshToken": self.refresh_token, 103 | }, 104 | ) 105 | data = result.json() 106 | self.expires = datetime.now() + timedelta( 107 | seconds=int(data["expires_in"]) 108 | ) 109 | self._id_token = data["id_token"] 110 | return self._id_token 111 | 112 | 113 | @pluginmatcher(re.compile( 114 | r"https://(virtual\.)?spwn\.jp/_?events/(?P[^/]+)" 115 | )) 116 | @pluginargument( 117 | "email", 118 | metavar="EMAIL", 119 | requires=["password"], 120 | help="The email associated with your SPWN account.", 121 | ) 122 | @pluginargument( 123 | "password", 124 | sensitive=True, 125 | metavar="PASSWORD", 126 | help="Account password to use with --spwn-email.", 127 | ) 128 | @pluginargument( 129 | "token", 130 | sensitive=True, 131 | metavar="TOKEN", 132 | help="Account token to use (instead of --spwn-email / --spwn-token).", 133 | ) 134 | @pluginargument( 135 | "video-id", 136 | metavar="VIDEO-ID", 137 | help="The video ID to stream (if there are multiple in the event to choose from).", 138 | ) 139 | @pluginargument( 140 | "low-latency", 141 | help="Prefer low latency (LL) live stream when available.", 142 | action="store_true", 143 | ) 144 | class Spwn(Plugin): 145 | 146 | _BASE_URL = "https://spwn.jp" 147 | _BALUS_URL = "https://us-central1-spwn-balus.cloudfunctions.net" 148 | _PUBLIC_URL = "https://public.spwn.jp" 149 | 150 | def __init__(self, *args, **kwargs): 151 | super().__init__(*args, **kwargs) 152 | self.session.http.headers.update( 153 | { 154 | "Origin": self._BASE_URL, 155 | "Referer": self._BASE_URL, 156 | "User-Agent": useragents.CHROME, 157 | } 158 | ) 159 | self._fb = FBSession( 160 | self.session, 161 | self._fetch_fb_api_key(), 162 | "spwn.jp", 163 | "spwn-balus", 164 | ) 165 | self._authed = False 166 | 167 | def _fetch_fb_api_key(self): 168 | # get firebase API key 169 | scripts = self.session.http.get( 170 | self._BASE_URL, 171 | schema=validate.Schema( 172 | validate.parse_html(), 173 | validate.xml_findall(".//script") 174 | ) 175 | ) 176 | 177 | for script in scripts: 178 | src = script.get("src", "") 179 | m = re.match(r"/static/js/main.*\.js", src) 180 | if m: 181 | break 182 | else: 183 | return None 184 | body = self.session.http.get(f"{self._BASE_URL}{src}").text 185 | m = re.search( 186 | r'REACT_APP_FB_API_KEY:\s*"(?P[a-zA-Z0-9\-]+)"', body 187 | ) 188 | if m: 189 | return m.group("key") 190 | return None 191 | 192 | def _login(self): 193 | if not self._authed: 194 | token = self.options.get("token") 195 | if token: 196 | self._fb.set_refresh_token(token) 197 | else: 198 | self._fb.login( 199 | self.options.get("email"), self.options.get("password") 200 | ) 201 | self._authed = True 202 | 203 | @classmethod 204 | def stream_weight(cls, stream): 205 | try: 206 | _, stream = stream.rsplit("_", 1) 207 | except ValueError: 208 | pass 209 | 210 | return super().stream_weight(stream) 211 | 212 | @staticmethod 213 | def _raise_ticket(stream_info): 214 | if not stream_info.get("hasTickets"): 215 | raise PluginError("You do not have a ticket for this event") 216 | 217 | def _get_streams(self): 218 | try: 219 | self._login() 220 | except Exception as e: 221 | raise PluginError("SPWN login failed") from e 222 | eid = self.match.group("eid") 223 | event_info = self._get_event_info(eid) 224 | 225 | self.id = eid 226 | self.title = event_info.get("title") 227 | if not self.title: 228 | self.url = self.url.replace("/_events/", "/events/") 229 | res = self.session.http.get(self.url) 230 | res.encoding = res.apparent_encoding # override "ISO-8859-1" by "utf-8" 231 | self.title = validate.Schema( 232 | validate.parse_html(), 233 | validate.xml_xpath_string(".//head/meta[@property='og:title'][@content][1]/@content"), 234 | ).validate(res.text) 235 | 236 | log.info(f"Found SPWN event: {self.title}") 237 | stream_info = self._get_streaming_key(eid) 238 | if stream_info.get("isError"): 239 | self._raise_ticket(stream_info) 240 | raise PluginError("Error fetching stream info from SPWN API") 241 | cookies = stream_info.get("cookies") 242 | if not cookies: 243 | self._raise_ticket(stream_info) 244 | msg = stream_info.get("msg", "") 245 | log.info(f"No available stream for this event: {msg}") 246 | return 247 | playlist = {} 248 | for part in self._get_parts( 249 | event_info, stream_info, opt_id=self.options.get("video-id") 250 | ): 251 | cookies = self.session.http.cookies.copy() 252 | for k, v in part.cookie.items(): 253 | cookie = requests.cookies.create_cookie(k, v) 254 | cookies.set_cookie(cookie) 255 | name = part.name.replace(" ", "_").lower() 256 | playlist.update( 257 | HLSStream.parse_variant_playlist( 258 | self.session, 259 | part.url, 260 | name_prefix=f"{name}_" if name else "", 261 | cookies=cookies, 262 | ) 263 | ) 264 | return playlist 265 | 266 | def _get_streaming_key(self, eid): 267 | url = f"{self._BALUS_URL}/getStreamingKey/" 268 | headers = { 269 | "Authorization": f"Bearer {self._fb.id_token}", 270 | } 271 | result = self.session.http.post( 272 | url, headers=headers, json={"eid": eid} 273 | ) 274 | return result.json() 275 | 276 | def _get_event_info(self, eid): 277 | try: 278 | return self._get_event_data(eid) 279 | except (HTTPError, PluginError): 280 | pass 281 | return self._get_goods_data(eid) 282 | 283 | def _get_event_data(self, eid): 284 | url = f"{self._PUBLIC_URL}/event-pages/{eid}/data.json" 285 | result = self.session.http.get(url) 286 | return result.json().get("basic_data", {}) 287 | 288 | def _get_goods_data(self, eid): 289 | url = f"{self._BALUS_URL}/getSellingGoods/" 290 | result = self.session.http.get(url, params={"eventId": eid}) 291 | goods = result.json().get("data", []) 292 | title = goods[-1].get("eventTitle", eid) if goods else None 293 | return {"title": title, "parts": [{"name": ""}]} 294 | 295 | def _get_parts(self, event_info, stream_info, opt_id=None): 296 | if opt_id: 297 | log.info( 298 | "--spwn-video-id is deprecated, " 299 | "use quality name to select a stream instead" 300 | ) 301 | cookies = stream_info.get("cookies", {}) 302 | parts = event_info.get("parts", []) 303 | # NOTE: have observed videoIds being returned in random order, but 304 | # ID naming seems to follow Cv convention, where n is 305 | # incremented with part number, so sorting should give us the expected 306 | # result 307 | video_ids = sorted(stream_info.get("videoIds", [])) 308 | for i, video_id in enumerate(video_ids, start=1): 309 | url = None 310 | if self.options.get("low-latency"): 311 | ll_cookie = cookies.get(video_id, {}).get("LL", {}) 312 | url = ll_cookie.get("url") 313 | if url: 314 | log.info(f"Low-latency stream available for {video_id}") 315 | cookie = ll_cookie.get("cookie", {}) 316 | if not url: 317 | default_cookie = cookies.get(video_id, {}).get("default", {}) 318 | url = default_cookie.get("url") 319 | cookie = default_cookie.get("cookie", {}) 320 | try: 321 | name = parts[i].get("name", "part{i}") 322 | except IndexError: 323 | name = f"part{i}" 324 | if len(parts) > 1 or len(video_ids) > 1: 325 | log.info(f"Multi-part event: {name} ({video_id})") 326 | if not url or (opt_id and video_id != opt_id): 327 | continue 328 | yield VideoPart( 329 | video_id, 330 | name.replace(" ", "_").lower(), 331 | url, 332 | cookie, 333 | ) 334 | 335 | 336 | __plugin__ = Spwn 337 | -------------------------------------------------------------------------------- /zan.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """zan-live.com streamlink plugin. 3 | 4 | Requires valid Z-aN account and event tickets. 5 | """ 6 | 7 | import logging 8 | import re 9 | 10 | from streamlink.plugin import ( 11 | Plugin, 12 | PluginError, 13 | pluginargument, 14 | pluginmatcher, 15 | ) 16 | from streamlink.plugin.api import useragents, validate 17 | from streamlink.stream.hls import HLSStream 18 | 19 | log = logging.getLogger(__name__) 20 | 21 | 22 | @pluginmatcher( 23 | re.compile( 24 | r"https://(www\.)?zan-live\.com/([^/]+/)?live/play/(?P[^/]+)/(?P[^/]+)" 25 | ) 26 | ) 27 | @pluginargument( 28 | "email", 29 | metavar="EMAIL", 30 | requires=["password"], 31 | help="The email associated with your Z-aN account.", 32 | required=True, 33 | ) 34 | @pluginargument( 35 | "password", 36 | sensitive=True, 37 | metavar="PASSWORD", 38 | help="Account password to use with --zan-email.", 39 | ) 40 | class Zan(Plugin): 41 | 42 | _BASE_URL = "https://www.zan-live.com" 43 | _LOGIN_URL = f"{_BASE_URL}/auth/login" 44 | _PLAY_URL = f"{_BASE_URL}/live/play/{{ticket_id}}/{{live_id}}" 45 | 46 | def __init__(self, *args, **kwargs): 47 | super().__init__(*args, **kwargs) 48 | self.session.http.headers.update( 49 | { 50 | "Origin": self._BASE_URL, 51 | "Referer": self._BASE_URL, 52 | "User-Agent": useragents.CHROME, 53 | } 54 | ) 55 | self._authed = False 56 | 57 | def _login(self): 58 | if self._authed: 59 | return 60 | csrf = "" 61 | for input_tag in self.session.http.get( 62 | self._LOGIN_URL, 63 | schema=validate.Schema( 64 | validate.parse_html(), 65 | validate.xml_findall(".//input[@name='_csrf']") 66 | ) 67 | ): 68 | csrf = input_tag.get("value", "") 69 | email = self.options.get("email") 70 | password = self.options.get("password") 71 | data = { 72 | "mailAddress": email, 73 | "password": password, 74 | "isPersistentLogin": "1", 75 | "__submit__": "Log In", 76 | "_csrf": csrf, 77 | } 78 | self.session.http.post(self._LOGIN_URL, data=data) 79 | if not self.session.http.cookies.get("Z-aN_sid"): 80 | raise PluginError("Z-aN login failed") 81 | log.info(f"Logged into Z-aN as {email}") 82 | self._authed = True 83 | 84 | def get_title(self): 85 | return self.title 86 | 87 | def _get_streams(self): 88 | try: 89 | self._login() 90 | except Exception as e: 91 | raise PluginError("Z-an login failed") from e 92 | ticket_id = self.match.group("ticket_id") 93 | live_id = self.match.group("live_id") 94 | url = self._PLAY_URL.format(ticket_id=ticket_id, live_id=live_id) 95 | live_url = "" 96 | for meta_tag in self.session.http.get( 97 | url, 98 | schema=validate.Schema( 99 | validate.parse_html(), 100 | validate.xml_findall(".//meta[@name='live-url']"), 101 | ), 102 | ): 103 | live_url = meta_tag.get("content", "") 104 | playlist = {} 105 | if live_url: 106 | log.debug(f"Got live-url {live_url}") 107 | playlist.update( 108 | HLSStream.parse_variant_playlist( 109 | self.session, 110 | live_url, 111 | ) 112 | ) 113 | return playlist 114 | 115 | 116 | __plugin__ = Zan 117 | --------------------------------------------------------------------------------