├── .gitattributes ├── .github └── workflows │ ├── codeql-analysis.yml │ └── pylint.yml ├── .gitignore ├── README.md ├── backend ├── .gitignore ├── .vscode │ └── launch.json ├── Dockerfile ├── extract_details │ ├── bitchute.py │ ├── lbry.py │ ├── rumble.py │ └── youtube.py ├── main.py ├── package.json ├── redis │ └── docker-compose.yml ├── requirements.txt ├── test.py ├── utils │ ├── __init__.py │ ├── optimize.py │ ├── spelling.py │ └── util.py └── yarn.lock ├── docker-compose.yml └── frontend ├── .gitignore ├── .yarnclean ├── Dockerfile ├── README.md ├── components ├── BadgeLive.jsx ├── BoxForContent.jsx ├── ChannelHeader.jsx ├── Content.jsx ├── JsonEdit.jsx ├── Navbar.jsx ├── Settings.jsx ├── Sidebar.jsx ├── Skeleton.jsx ├── SmallVideoBox.jsx ├── SubscribeButton.jsx ├── Subscriptions.jsx ├── Suggestion.jsx ├── VideoBoard.jsx ├── VideoPlayer.jsx ├── Watch.jsx ├── WatchedSymbol.jsx ├── data.jsx ├── footer.jsx └── platformTag.jsx ├── layouts └── retube.js ├── package.json ├── pages ├── _app.js ├── channel.js ├── index.js ├── json.js ├── popular.js ├── search.js ├── settings.js ├── subscriptions.js └── watch.js ├── postcss.config.js ├── public ├── favicon.ico └── vercel.svg ├── routes └── routes.js ├── tailwind.config.js ├── utils.js └── yarn.lock /.gitattributes: -------------------------------------------------------------------------------- 1 | # Auto detect text files and perform LF normalization 2 | * text=auto 3 | -------------------------------------------------------------------------------- /.github/workflows/codeql-analysis.yml: -------------------------------------------------------------------------------- 1 | # For most projects, this workflow file will not need changing; you simply need 2 | # to commit it to your repository. 3 | # 4 | # You may wish to alter this file to override the set of languages analyzed, 5 | # or to provide custom queries or build logic. 6 | # 7 | # ******** NOTE ******** 8 | # We have attempted to detect the languages in your repository. Please check 9 | # the `language` matrix defined below to confirm you have the correct set of 10 | # supported CodeQL languages. 11 | # 12 | name: "CodeQL" 13 | 14 | on: 15 | push: 16 | branches: [ main ] 17 | pull_request: 18 | # The branches below must be a subset of the branches above 19 | branches: [ main ] 20 | schedule: 21 | - cron: '29 5 * * 6' 22 | 23 | jobs: 24 | analyze: 25 | name: Analyze 26 | runs-on: ubuntu-latest 27 | permissions: 28 | actions: read 29 | contents: read 30 | security-events: write 31 | 32 | strategy: 33 | fail-fast: false 34 | matrix: 35 | language: [ 'javascript', 'python' ] 36 | # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ] 37 | # Learn more: 38 | # https://docs.github.com/en/free-pro-team@latest/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#changing-the-languages-that-are-analyzed 39 | 40 | steps: 41 | - name: Checkout repository 42 | uses: actions/checkout@v2 43 | 44 | # Initializes the CodeQL tools for scanning. 45 | - name: Initialize CodeQL 46 | uses: github/codeql-action/init@v1 47 | with: 48 | languages: ${{ matrix.language }} 49 | # If you wish to specify custom queries, you can do so here or in a config file. 50 | # By default, queries listed here will override any specified in a config file. 51 | # Prefix the list here with "+" to use these queries and those in the config file. 52 | # queries: ./path/to/local/query, your-org/your-repo/queries@main 53 | 54 | # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). 55 | # If this step fails, then you should remove it and run the build manually (see below) 56 | - name: Autobuild 57 | uses: github/codeql-action/autobuild@v1 58 | 59 | # ℹ️ Command-line programs to run using the OS shell. 60 | # 📚 https://git.io/JvXDl 61 | 62 | # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines 63 | # and modify them (or add more) to build your code if your project 64 | # uses a compiled language 65 | 66 | #- run: | 67 | # make bootstrap 68 | # make release 69 | 70 | - name: Perform CodeQL Analysis 71 | uses: github/codeql-action/analyze@v1 72 | -------------------------------------------------------------------------------- /.github/workflows/pylint.yml: -------------------------------------------------------------------------------- 1 | name: Pylint 2 | 3 | on: [push] 4 | 5 | jobs: 6 | build: 7 | 8 | runs-on: ubuntu-latest 9 | 10 | steps: 11 | - uses: actions/checkout@v2 12 | - name: Set up Python 3.9 13 | uses: actions/setup-python@v2 14 | with: 15 | python-version: 3.9 16 | - name: Install dependencies 17 | run: | 18 | python -m pip install --upgrade pip 19 | pip install pylint 20 | cd backend 21 | pip install -r requirements.txt 22 | - name: Analysing the code with pylint 23 | run: | 24 | pylint `find . -type f|grep .py$|xargs` 25 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | redis-data/ 2 | .vscode 3 | *credentials* 4 | backend/testing.py 5 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # ReTube 2 | An alternative front-end to `YouTube`, `Odysee/Lbry`, `Bitchute`, `Rumble`, etc. 3 | 4 | Access all videos from one place. \ 5 | No account, no ads, no tracking. \ 6 | Subscriptions are saved locally in [`LocalStorage`](https://javascript.info/localstorage) (TODO: Optional `Sync`) 7 | 8 | Mirrored at Gitlab: https://gitlab.com/PrivOci/ReTube 9 | 10 | # DEMO 11 | [![DEMO](https://user-images.githubusercontent.com/74867724/112172240-736d0780-8bec-11eb-97a8-61b1a7e9eba4.png)](https://streamable.com/zx1cpu) 12 | 13 | 14 | # Development 15 | I'm not a web developer and this one is my first project, if you like the idea please contribute, any kind of feedback is welcome. \ 16 | The frontend is build using [`NextJS`](https://nextjs.org) with [`Tailwinds`](https://tailwindcss.com). \ 17 | The backend uses [`FastApi`](https://fastapi.tiangolo.com) and [`Redis`](https://redis.io). 18 | 19 | I chose them because they are very simple and easy-to-use. 20 | 21 | # Setup local instance: 22 | ## Docker 23 | `git clone https://github.com/PrivOci/ReTube` \ 24 | `cd ReTube` \ 25 | `docker-compose up -d` 26 | 27 | # Manual: 28 | # Backend 29 | ## Setup Redis for Caching 30 | 31 | `cd backend/redis` \ 32 | `docker-compose up -d` 33 | 34 | ## FastApi 35 | `cd backend` \ 36 | `pip3 install virtualenv` \ 37 | `python -m virtualenv venv` \ 38 | `venv\Scripts\activate` \ 39 | `pip install -r requirements.txt` 40 | 41 | `uvicorn main:app --reload` or `python ./main.py` \ 42 | API docs: `http://localhost:8000/docs` 43 | 44 | # Frontend 45 | `cd frontend` \ 46 | `yarn` \ 47 | `yarn run dev` 48 | -------------------------------------------------------------------------------- /backend/.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py,cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | cover/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | .pybuilder/ 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | # For a library or package, you might want to ignore these files since the code is 87 | # intended to run in multiple environments; otherwise, check them in: 88 | # .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 98 | __pypackages__/ 99 | 100 | # Celery stuff 101 | celerybeat-schedule 102 | celerybeat.pid 103 | 104 | # SageMath parsed files 105 | *.sage.py 106 | 107 | # Environments 108 | .env 109 | .venv 110 | env/ 111 | venv/ 112 | ENV/ 113 | env.bak/ 114 | venv.bak/ 115 | .idea 116 | 117 | # Spyder project settings 118 | .spyderproject 119 | .spyproject 120 | 121 | # Rope project settings 122 | .ropeproject 123 | 124 | # mkdocs documentation 125 | /site 126 | 127 | # mypy 128 | .mypy_cache/ 129 | .dmypy.json 130 | dmypy.json 131 | 132 | # Pyre type checker 133 | .pyre/ 134 | 135 | # pytype static type analyzer 136 | .pytype/ 137 | 138 | # Cython debug symbols 139 | cython_debug/ 140 | 141 | # retube 142 | redis/redis-data -------------------------------------------------------------------------------- /backend/.vscode/launch.json: -------------------------------------------------------------------------------- 1 | { 2 | // Use IntelliSense to learn about possible attributes. 3 | // Hover to view descriptions of existing attributes. 4 | // For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387 5 | "version": "0.2.0", 6 | "configurations": [ 7 | { 8 | "name": "Python: Current File", 9 | "type": "python", 10 | "request": "launch", 11 | "program": "${file}", 12 | "console": "integratedTerminal" 13 | } 14 | ] 15 | } -------------------------------------------------------------------------------- /backend/Dockerfile: -------------------------------------------------------------------------------- 1 | # https://github.com/tiangolo/uvicorn-gunicorn-fastapi-docker 2 | FROM tiangolo/uvicorn-gunicorn:latest 3 | 4 | COPY ./ /app 5 | WORKDIR /app/ 6 | ENV PYTHONPATH=/app 7 | 8 | RUN pip install -r requirements.txt -------------------------------------------------------------------------------- /backend/extract_details/bitchute.py: -------------------------------------------------------------------------------- 1 | import re 2 | import urllib.parse 3 | from datetime import datetime 4 | 5 | import cloudscraper 6 | import dateparser 7 | from bs4 import BeautifulSoup 8 | from loguru import logger 9 | 10 | from utils.util import get_xml_stream_as_json, parsed_time_to_seconds 11 | 12 | 13 | class BitchuteProcessor: 14 | """Class to process Bitchute videos and channels.""" 15 | BITCHUTE = "bc" 16 | BITCHUTE_BASE = "https://www.bitchute.com" 17 | BITCHUTE_XML = f"{BITCHUTE_BASE}/feeds/rss/channel/" 18 | 19 | _headers = { 20 | 'authority': 'www.bitchute.com', 21 | 'sec-ch-ua': '" Not A;Brand";v="99", "Chromium";v="90"', 22 | 'accept': '*/*', 23 | 'dnt': '1', 24 | 'x-requested-with': 'XMLHttpRequest', 25 | 'sec-ch-ua-mobile': '?0', 26 | 'user-agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/535.11 (KHTML, like Gecko) Ubuntu/14.04.6 ' 27 | 'Chrome/81.0.3990.0 Safari/537.36', 28 | 'content-type': 'application/x-www-form-urlencoded; charset=UTF-8', 29 | 'origin': 'https://www.bitchute.com', 30 | 'sec-fetch-site': 'same-origin', 31 | 'sec-fetch-mode': 'cors', 32 | 'sec-fetch-dest': 'empty', 33 | 'accept-language': 'en-US,en-GB;q=0.9,en;q=0.8', 34 | } 35 | 36 | def __init__(self) -> None: 37 | # self.session = requests.Session() 38 | self.session = cloudscraper.create_scraper() # returns a CloudScraper instance 39 | self.cookies = {} 40 | # get csrftoken 41 | res = self.session.get( 42 | f'{self.BITCHUTE_BASE}/help-us-grow/', headers=self._headers) 43 | if res.ok: 44 | self.cookies = self.session.cookies.get_dict() 45 | if not self.cookies or not 'csrftoken' in self.cookies: 46 | self.cookies["csrftoken"] = "lTJ0imXW23ycznfCjFy8rwqJxbtYZMJPgCbm2WHYF3l1454XjvkXTjvUvTsdtPCt" 47 | self._headers["cookie"] = f'csrftoken={self.cookies["csrftoken"]}' 48 | 49 | def get_video_details(self, video_url) -> dict: 50 | req = self.session.get(video_url) 51 | if not req.ok: 52 | return {} 53 | soup = BeautifulSoup(req.text, 'html.parser') 54 | 55 | data = { 56 | 'csrfmiddlewaretoken': self.cookies["csrftoken"] 57 | } 58 | 59 | self._headers['referer'] = video_url 60 | count_req = self.session.post( 61 | f'{video_url.strip("/")}/counts/', data=data, headers=self._headers) 62 | if count_req.ok: 63 | count_json = count_req.json() 64 | else: 65 | count_json = None 66 | 67 | publish_date = soup.find( 68 | "div", {"class": "video-publish-date"}).text.strip() 69 | splited_date = publish_date.split(" on ")[1].split() 70 | date_result = f"{splited_date[0]} {re.sub('[^0-9]', '', splited_date[1])} {splited_date[2]}" 71 | publish_date = datetime.strptime(date_result, '%B %d %Y.').timestamp() 72 | 73 | video_details = { 74 | "id": video_url.split("/video/")[1].strip().strip('/'), 75 | "title": soup.find("h1", {"id": "video-title"}).text, 76 | "description": soup.find(id="video-description").text, 77 | "author": soup.find("p", {"class": "owner"}).a.text, 78 | "channelUrl": "https://bitchute.com" + 79 | soup.find("p", {"class": "name"}).a["href"], 80 | "duration": "", 81 | "views": count_json["view_count"] if count_json else None, 82 | "likeCount": count_json["like_count"] if count_json else None, 83 | "dislikeCount": count_json["dislike_count"] if count_json else None, 84 | "subscriberCount": count_json["subscriber_count"] if count_json else None, 85 | "thumbnailUrl": soup.find("video", {"id": "player"})["poster"], 86 | "createdAt": int(publish_date) * 1000, 87 | "streamUrl": soup.find("video", {"id": "player"}).source["src"], 88 | } 89 | return video_details 90 | 91 | def _parse_bitchute_details(self, entry) -> dict: 92 | video_entry = { 93 | "thumbnailUrl": entry["images"]["thumbnail"], 94 | "title": entry["name"], 95 | "author": entry["channel_name"], 96 | "views": entry["views"] 97 | } 98 | date_formatted = dateparser.parse(entry["published"]) 99 | video_entry["createdAt"] = date_formatted.timestamp() * 1000 100 | video_entry["videoUrl"] = f"https://bitchute.com{entry['path']}" 101 | video_entry["duration"] = entry["duration"] 102 | 103 | video_entry["platform"] = self.BITCHUTE 104 | return video_entry 105 | 106 | def search_video(self, search_query) -> dict: 107 | search_terms = search_query["query"] 108 | max_results = search_query["max"] 109 | encoded_query = urllib.parse.quote(search_terms) 110 | 111 | data_dict = { 112 | "platform": self.BITCHUTE, 113 | "ready": False 114 | } 115 | 116 | data = { 117 | 'csrfmiddlewaretoken': self.cookies['csrftoken'], 118 | 'query': encoded_query, 119 | 'kind': 'video', 120 | 'duration': '', 121 | 'sort': '', 122 | 'page': '0' 123 | } 124 | 125 | response = self.session.post( 126 | 'https://www.bitchute.com/api/search/list/', headers=self._headers, data=data) 127 | if not response.ok: 128 | return data_dict 129 | response_json = response.json() 130 | if "success" not in response_json or response_json["success"] != True: 131 | return data_dict 132 | 133 | video_entries = [] 134 | for entry in response_json["results"][:max_results]: 135 | video_entry = self._parse_bitchute_details(entry) 136 | if video_entry: 137 | video_entries.append(video_entry) 138 | 139 | data_dict["content"] = video_entries 140 | data_dict["ready"] = True 141 | 142 | return data_dict 143 | 144 | def channel_data(self, details: dict) -> dict: 145 | if details['id'] == "popular": 146 | return self.get_popular() 147 | data_dict = { 148 | "ready": False, 149 | "platform": self.BITCHUTE 150 | } 151 | channel_rss = f"{self.BITCHUTE_XML}{details['id']}" 152 | content = get_xml_stream_as_json(channel_rss, session=self.session) 153 | if not content: 154 | return data_dict 155 | 156 | channel_json = content["rss"]["channel"] 157 | data_dict["channel_meta"] = { 158 | "title": channel_json["title"], 159 | "channelUrl": channel_json["link"], 160 | "banner": None, 161 | "avatar": None, 162 | "subscriberCount": None, 163 | } 164 | 165 | video_entries = [] 166 | for entry in channel_json["item"]: 167 | video_entry = { 168 | "thumbnailUrl": entry["enclosure"]["@url"], 169 | "title": entry["title"], 170 | "author": details['id'], 171 | "views": "", 172 | "createdAt": int( 173 | dateparser.parse(entry["pubDate"]).timestamp()) * 1000, 174 | "videoUrl": f"https://www.bitchute.com/video/{entry['link'].split('/embed/')[1]}", 175 | "platform": self.BITCHUTE, 176 | "channelUrl": f"https://www.bitchute.com/channel/{details['id']}" 177 | } 178 | video_entries.append(video_entry) 179 | 180 | data_dict["ready"] = True 181 | data_dict["content"] = video_entries if len(video_entries) else None 182 | return data_dict 183 | 184 | # Parse Bitchute "listing-popular" section 185 | 186 | def get_popular(self) -> dict: 187 | data_dict = { 188 | "ready": False, 189 | "platform": self.BITCHUTE 190 | } 191 | res = self.session.get(self.BITCHUTE_BASE, headers=self._headers) 192 | if not res.ok: 193 | logger.debug( 194 | f"Failed to download bitchute popular\nReason: {res.reason}") 195 | return data_dict 196 | soup = BeautifulSoup(res.text, 'html.parser') 197 | content_section = soup.find("div", {"id": "listing-popular"}).div 198 | 199 | video_entries = [] 200 | for block in content_section: 201 | video_entry = {} 202 | if not hasattr(block, 'div'): 203 | continue 204 | 205 | video_entry["thumbnailUrl"] = block.find("img", src=True)[ 206 | "data-src"].strip() 207 | 208 | video_entry["title"] = block.find( 209 | "p", {"class": "video-card-title"}).a.text.strip() 210 | video_entry["author"] = block.find( 211 | "p", {"class": "video-card-channel"}).a.text.strip() 212 | video_entry["duration"] = parsed_time_to_seconds( 213 | block.find("span", {"class": "video-duration"}).text.strip()) 214 | channel_id = block.find( 215 | "p", {"class": "video-card-channel"}).a["href"] 216 | video_entry["channelUrl"] = f"{self.BITCHUTE_BASE}{channel_id}" 217 | video_entry["createdAt"] = dateparser.parse(block.find( 218 | "p", {"class": "video-card-published"}).text.strip()).timestamp() * 1000 219 | video_entry["videoUrl"] = f'{self.BITCHUTE_BASE}{block.find("a", href=True)["href"].strip()}' 220 | video_entry["platform"] = self.BITCHUTE 221 | video_entries.append(video_entry) 222 | 223 | data_dict["ready"] = True 224 | data_dict["content"] = video_entries 225 | 226 | return data_dict 227 | -------------------------------------------------------------------------------- /backend/extract_details/lbry.py: -------------------------------------------------------------------------------- 1 | import datetime 2 | import json 3 | import urllib.parse 4 | 5 | import requests 6 | 7 | 8 | class LbryProcessor: 9 | """Class to process Lbry/Odysee videos and channels.""" 10 | 11 | LBRY = "lb" 12 | _headers = { 13 | 'authority': 'api.lbry.tv', 14 | 'user-agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537 (KHTML, like Gecko) Chrome/89 Safari/537', 15 | 'content-type': 'application/json-rpc', 16 | 'accept': '*/*', 17 | 'origin': 'https://odysee.com', 18 | 'sec-fetch-site': 'cross-site', 19 | 'sec-fetch-mode': 'cors', 20 | 'sec-fetch-dest': 'empty', 21 | 'referer': 'https://odysee.com/', 22 | 'accept-language': 'en-GB,en-US;q=0.9,en;q=0.8', 23 | } 24 | 25 | def __init__(self) -> None: 26 | self.session = requests.Session() 27 | 28 | def _get_details(self, lbry_urls): 29 | data = { 30 | "jsonrpc": "2.0", 31 | "method": "resolve", 32 | "params": { 33 | "urls": lbry_urls, 34 | "include_purchase_receipt": True, 35 | "include_is_my_output": True 36 | }, 37 | } 38 | 39 | response = self.session.post( 40 | 'https://api.lbry.tv/api/v1/proxy?m=resolve', headers=self._headers, data=json.dumps(data)) 41 | data = response.json() 42 | json_details = data["result"] 43 | return json_details 44 | 45 | def _get_video_url(self, lbry_url): 46 | # get video url 47 | data = { 48 | "jsonrpc": "2.0", 49 | "method": "get", 50 | "params": { 51 | "uri": lbry_url, 52 | "save_file": False, 53 | }, 54 | } 55 | response = self.session.post( 56 | 'https://api.lbry.tv/api/v1/proxy?m=get', headers=self._headers, data=json.dumps(data)) 57 | data = response.json() 58 | return data["result"]["streaming_url"] if "result" in data else None 59 | 60 | def _normal_to_lbry_url(self, normal_url): 61 | # lbry/odysee URL to lbry api accessible format 62 | protocol = "lbry://" 63 | channel_and_video = normal_url.split( 64 | "odysee.com/")[1].replace(":", "#") 65 | return f"{protocol}{channel_and_video}" 66 | 67 | def _lbry_to_normal_url(self, lbry_url): 68 | protocol = "https://odysee.com/" 69 | channel_and_video = lbry_url.split("lbry://")[1].replace("#", ":") 70 | return f"{protocol}{channel_and_video}" 71 | 72 | def search_video(self, search_query) -> dict: 73 | search_terms = search_query["query"] 74 | max_results = search_query["max"] 75 | encoded_query = urllib.parse.quote(search_terms) 76 | 77 | data_dict = { 78 | "platform": self.LBRY, 79 | "ready": False 80 | } 81 | headers = { 82 | 'Referer': 'https://odysee.com/', 83 | 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537 (KHTML, like Gecko) Chrome/89 ' 84 | 'Safari/537', 85 | } 86 | response = self.session.get( 87 | f'https://lighthouse.lbry.com/search?s={encoded_query}&mediaType=video&free_only=true&size={max_results}&from=0&nsfw=false', 88 | headers=headers) 89 | results_json = response.json() 90 | if not response.ok: 91 | # if no results 92 | return data_dict 93 | lbry_videos = [ 94 | f"lbry://{lbry_video['name']}#{lbry_video['claimId']}" for lbry_video in results_json] 95 | video_details = self._get_details(lbry_videos) 96 | video_entries = [] 97 | for entry in video_details: 98 | entry = video_details[entry] 99 | video_entry = self._parse_lbry_details(entry) 100 | if video_entry: 101 | video_entries.append(video_entry) 102 | 103 | data_dict["ready"] = True 104 | data_dict["content"] = video_entries 105 | return data_dict 106 | 107 | def _parse_lbry_details(self, entry) -> dict: 108 | video_entry = {} 109 | if 'value' not in entry: 110 | return {} 111 | if 'thumbnail' not in entry["value"]: 112 | return {} 113 | video_entry["platform"] = self.LBRY 114 | 115 | if 'url' in entry["value"]["thumbnail"]: 116 | video_entry["thumbnailUrl"] = entry["value"]["thumbnail"]["url"] 117 | else: 118 | video_entry[ 119 | "thumbnailUrl"] = "https://user-images.githubusercontent.com/74614193/112720980-68bab700-8ef9-11eb" \ 120 | "-9319-0e79508b6e7e.png " 121 | if "title" in entry["value"]: 122 | video_entry["title"] = entry["value"]["title"] 123 | else: 124 | video_entry["title"] = entry["name"] 125 | if entry["value_type"] == "channel": 126 | video_entry["isChannel"] = True 127 | video_entry["channelUrl"] = self._lbry_to_normal_url( 128 | entry["canonical_url"]) 129 | elif entry["value_type"] == "stream" and 'video' in entry["value"]: 130 | if 'value' not in entry["signing_channel"]: 131 | video_entry["author"] = "Anonymous" 132 | else: 133 | video_entry["author"] = entry["signing_channel"]["value"].get( 134 | "title", entry["signing_channel"]["name"]) 135 | video_entry["channelUrl"] = self._lbry_to_normal_url( 136 | entry["signing_channel"]["short_url"]) 137 | video_entry["duration"] = entry["value"]["video"]["duration"] 138 | video_entry["views"] = "" 139 | video_entry["createdAt"] = int(entry["timestamp"]) * 1000 140 | video_entry["videoUrl"] = self._lbry_to_normal_url( 141 | entry["canonical_url"]) 142 | else: 143 | return video_entry 144 | 145 | return video_entry 146 | 147 | def get_video_details(self, video_url): 148 | video_url = urllib.parse.unquote(video_url) 149 | lbry_url = self._normal_to_lbry_url(video_url) 150 | 151 | video_url = self._get_video_url(lbry_url) 152 | if not video_url: 153 | return None 154 | json_details = self._get_details([lbry_url])[lbry_url] 155 | claim_id = json_details["claim_id"] 156 | view_count = self._get_view_count(claim_id) 157 | 158 | video_details = self._parse_lbry_details(json_details) 159 | video_details["id"] = claim_id 160 | video_details["views"] = view_count 161 | video_details["streamUrl"] = video_url 162 | video_details["description"] = json_details["value"].get( 163 | "description", "") 164 | 165 | return video_details 166 | 167 | def _get_view_count(self, claim_id): 168 | # get auth_token cookie 169 | # session = requests.Session() 170 | # session.get('https://odysee.com/$/help') 171 | # cookies = session.cookies.get_dict() 172 | # TODO: remove the hardcoded value 173 | # cookies["auth_token"] 174 | auth_token = "5v4AcLe2fxSQ9Vxf1TV8bi4jKoxjj8Ut" 175 | 176 | response = self.session.get( 177 | f'https://api.lbry.com/file/view_count?auth_token={auth_token}&claim_id={claim_id}', headers=self._headers) 178 | data = response.json() 179 | if "success" in data and data["success"] == True: 180 | return data["data"][0] 181 | else: 182 | return 0 183 | 184 | def channel_data(self, channel_id) -> dict: 185 | channel_url = f"https://odysee.com/@{channel_id}" 186 | lbry_url = self._normal_to_lbry_url(channel_url) 187 | channel_details = self._get_details([lbry_url])[lbry_url] 188 | channel_id = channel_details["claim_id"] 189 | 190 | data = { 191 | "jsonrpc": "2.0", 192 | "method": "claim_search", 193 | "params": { 194 | "page_size": 20, 195 | "page": 1, 196 | "no_totals": True, 197 | "not_channel_ids": [], 198 | "not_tags": [], 199 | "order_by": [ 200 | "release_time" 201 | ], 202 | "fee_amount": ">=0", 203 | "channel_ids": [ 204 | channel_id 205 | ], 206 | "stream_types": [ 207 | "video" 208 | ], 209 | "include_purchase_receipt": True 210 | }, 211 | } 212 | 213 | response = self.session.post( 214 | 'https://api.lbry.tv/api/v1/proxy?m=claim_search', headers=self._headers, data=json.dumps(data)) 215 | 216 | data = response.json() 217 | 218 | data_dict = {"platform": self.LBRY} 219 | 220 | data_dict["channel_meta"] = { 221 | "title": channel_details["value"]["title"], 222 | "channelUrl": channel_url, 223 | "banner": channel_details["value"]["cover"]["url"], 224 | "avatar": channel_details["value"]["thumbnail"]["url"], 225 | "subscriberCount": None 226 | } 227 | 228 | video_entries = [] 229 | for entry in data["result"]["items"]: 230 | video_entry = self._parse_lbry_details(entry) 231 | if video_entry: 232 | video_entries.append(video_entry) 233 | 234 | data_dict["content"] = video_entries 235 | data_dict["ready"] = True 236 | return data_dict 237 | 238 | def get_popular(self) -> dict: 239 | week_ago_date = int( 240 | (datetime.datetime.now() - datetime.timedelta(days=7)).timestamp()) 241 | 242 | data = { 243 | "jsonrpc": "2.0", 244 | "method": "claim_search", 245 | "params": { 246 | "page_size": 20, 247 | "page": 1, 248 | "claim_type": [ 249 | "stream" 250 | ], 251 | "no_totals": True, 252 | "not_channel_ids": [], 253 | "not_tags": [], 254 | "order_by": [ 255 | "effective_amount" 256 | ], 257 | "limit_claims_per_channel": 1, 258 | "fee_amount": "<=0", 259 | "release_time": f">{week_ago_date}", 260 | "stream_types": [ 261 | "video" 262 | ], 263 | "include_purchase_receipt": True 264 | }, 265 | } 266 | 267 | response = self.session.post( 268 | 'https://api.lbry.tv/api/v1/proxy?m=claim_search', headers=self._headers, data=json.dumps(data)) 269 | if not response.ok: 270 | return {} 271 | results_json = response.json() 272 | 273 | data_dict = {"platform": self.LBRY} 274 | video_entries = [] 275 | for entry in results_json["result"]["items"]: 276 | video_entry = self._parse_lbry_details(entry) 277 | if video_entry: 278 | video_entries.append(video_entry) 279 | 280 | data_dict["content"] = video_entries 281 | data_dict["ready"] = True 282 | return data_dict 283 | 284 | def search_for_channels(self, search_query): 285 | """Searches for channels in Lbry. 286 | 287 | Args: 288 | search_query (str): search query. 289 | """ 290 | channel_name = search_query["query"] 291 | max_results = search_query["max"] 292 | channel_name = channel_name.replace(" ", "").replace("+", "") 293 | channel_name = f"lbry://@{channel_name}" 294 | data = { 295 | "jsonrpc": "2.0", 296 | "method": "resolve", 297 | "params": { 298 | "urls": [ 299 | channel_name 300 | ], 301 | "include_purchase_receipt": True 302 | } 303 | } 304 | 305 | response = self.session.post( 306 | 'https://api.lbry.tv/api/v1/proxy?m=resolve', headers=self._headers, data=json.dumps(data)) 307 | data = response.json() 308 | 309 | data_dict = {"platform": self.LBRY} 310 | video_entries = [] 311 | for entry in data["result"]: 312 | current_entry = data["result"][entry] 313 | if "error" in current_entry: 314 | continue 315 | video_entry = self._parse_lbry_details(current_entry) 316 | if video_entry: 317 | video_entries.append(video_entry) 318 | 319 | data_dict["content"] = video_entries 320 | data_dict["ready"] = True 321 | return data_dict 322 | -------------------------------------------------------------------------------- /backend/extract_details/rumble.py: -------------------------------------------------------------------------------- 1 | import urllib.parse 2 | 3 | import requests 4 | from bs4 import BeautifulSoup 5 | from loguru import logger 6 | 7 | from utils.util import parsed_time_to_seconds, convert_str_to_number 8 | import dateutil.parser as dp 9 | 10 | # TODO: search for channels 11 | 12 | 13 | class RumbleProcessor: 14 | """Class to process Rumble videos and channels.""" 15 | PLATFORM = "rb" 16 | RUMBLE_BASE = "https://rumble.com" 17 | 18 | def __init__(self) -> None: 19 | self.session = requests.Session() 20 | 21 | def _get_video_entries(self, target_url, parse_channel_meta=False) -> list: 22 | res = self.session.get(target_url) 23 | if not res.ok: 24 | logger.debug( 25 | f"Failed to download rumble channel meta\nReason: {res.reason}") 26 | return [] 27 | soup = BeautifulSoup(res.text, 'html.parser') 28 | constrained_section = soup.find("div", "constrained") 29 | list_html = constrained_section.find("ol") 30 | video_list_html = list_html.find_all("li", "video-listing-entry") 31 | 32 | # channel meta 33 | channel_meta = None 34 | if parse_channel_meta and not "rumble.com/videos" in target_url: 35 | title = constrained_section.find( 36 | "h1", {"class": "listing-header--title"}).text 37 | banner_src = constrained_section.find( 38 | "img", {"class": "listing-header--backsplash-img"}) 39 | banner = banner_src["src"] if banner_src else None 40 | avatar_src = constrained_section.find( 41 | "img", {"class": "listing-header--thumb"}) 42 | avatar = avatar_src["src"] if avatar_src else None 43 | # subscriber count 44 | subscriber_count = None 45 | subs_count_span = soup.find( 46 | "span", "subscribe-button-count") 47 | if subs_count_span: 48 | subs_count_str = subs_count_span.text.strip() 49 | subscriber_count = convert_str_to_number(subs_count_str) 50 | channel_meta = { 51 | "title": title, 52 | "channelUrl": target_url, 53 | "banner": banner, 54 | "avatar": avatar, 55 | "subscriberCount": subscriber_count 56 | } 57 | 58 | video_entries = [] 59 | for block in video_list_html: 60 | video_entry = {} 61 | article = block.article 62 | 63 | # channel 64 | channel_id = article.find("a", {"rel": "author"})["href"] 65 | 66 | # duration 67 | duration_span = article.find( 68 | "span", {"class": "video-item--duration"}) 69 | duration_span_value = duration_span["data-value"].strip( 70 | ) if duration_span else None 71 | 72 | is_live = False 73 | live_span = article.find("span", {"class": "video-item--live"}) 74 | if live_span: 75 | is_live = True 76 | 77 | # views 78 | views_count = None 79 | if "video-item--meta video-item--views" in article.text: 80 | views_span = article.find( 81 | "span", {"class": "video-item--meta video-item--views"})["data-value"].strip() 82 | views_count = int(views_span.replace(",", "")) 83 | 84 | # date 85 | date_span = article.find( 86 | "time", {"class": "video-item--meta video-item--time"})["datetime"].strip() 87 | parsed_time = dp.parse(date_span) 88 | time_in_seconds = int(parsed_time.timestamp()) * 1000 89 | 90 | # TODO(me): use utc bytes of str 91 | video_entry["title"] = article.h3.text.strip() 92 | video_entry["thumbnailUrl"] = article.a.img["src"] 93 | video_entry["videoUrl"] = f"{self.RUMBLE_BASE}{article.a['href']}" 94 | video_entry["author"] = article.footer.a.text.strip() 95 | video_entry["duration"] = parsed_time_to_seconds( 96 | duration_span_value) if duration_span_value else None 97 | video_entry["views"] = views_count 98 | video_entry["platform"] = self.PLATFORM 99 | video_entry["createdAt"] = time_in_seconds 100 | video_entry["channelUrl"] = f"{self.RUMBLE_BASE}{channel_id}" 101 | video_entry["isLive"] = is_live 102 | video_entries.append(video_entry) 103 | return video_entries, channel_meta 104 | 105 | def channel_data(self, details: dict) -> dict: 106 | data_dict = { 107 | "ready": False, 108 | "platform": self.PLATFORM 109 | } 110 | if details['id'] == "popular": 111 | channel_url = "https://rumble.com/videos?sort=views&date=today" 112 | else: 113 | channel_url = f"{self.RUMBLE_BASE}/{details['id']}" 114 | if not details["id"]: 115 | return {} 116 | data_dict["ready"] = True 117 | data_dict["content"], data_dict["channel_meta"] = self._get_video_entries( 118 | channel_url, parse_channel_meta=True) 119 | return data_dict 120 | 121 | def search_for_videos(self, search_query) -> dict: 122 | search_terms = search_query["query"] 123 | # max_results = search_query["max"] 124 | encoded_query = urllib.parse.quote(search_terms) 125 | 126 | data_dict = { 127 | "ready": False, 128 | "platform": self.PLATFORM 129 | } 130 | 131 | videos_url = f"{self.RUMBLE_BASE}/search/video?q={encoded_query}" 132 | data_dict["ready"] = True 133 | data_dict["content"], _ = self._get_video_entries(videos_url) 134 | return data_dict 135 | 136 | def get_video_details(self, video_url) -> dict: 137 | html_page = self.session.get(video_url).text 138 | video_id = html_page.split('"video":"')[1].split('","')[0] 139 | 140 | soup = BeautifulSoup(html_page, 'html.parser') 141 | # views 142 | heading_info_list = soup.find_all("span", "media-heading-info") 143 | views_count = None 144 | for info in heading_info_list: 145 | info_text = info.text.strip() 146 | if "Views" in info_text: 147 | views_count = info_text.split(" ")[0].replace(",", "") 148 | 149 | # description 150 | media_description_text = soup.find( 151 | "div", "container content media-description").text.strip() 152 | 153 | # "Rumble — " if no description 154 | if media_description_text == "Rumble\n —": 155 | video_description = None 156 | else: 157 | video_description = media_description_text.split(" — ")[1] 158 | 159 | 160 | # like count (rumbles count) 161 | count = int(soup.find("span", "rumbles-count").text.strip()) 162 | 163 | # subscriber count 164 | subs_count = None 165 | subs_count_span = soup.find( 166 | "span", "subscribe-button-count") 167 | if subs_count_span: 168 | subs_count_str = subs_count_span.text.strip() 169 | subs_count = convert_str_to_number(subs_count_str) 170 | 171 | # get direct video source 172 | # https://rumble.com/embedJS/u3/?request=video&ver=2&v=video_id 173 | target_url = f"https://rumble.com/embedJS/u3/?request=video&ver=2&v={video_id}" 174 | res = self.session.get(target_url) 175 | if not res.ok: 176 | logger.debug( 177 | f"Failed to get rumble video source\nReason: {res.reason}") 178 | return {} 179 | meta_json = res.json() 180 | 181 | is_live = False 182 | if meta_json["live"] != 0: 183 | is_live = True 184 | 185 | parsed_time = dp.parse(meta_json["pubDate"]) 186 | time_in_seconds = int(parsed_time.timestamp()) * 1000 187 | 188 | if is_live: 189 | video_format = "hls" 190 | else: 191 | video_format = "mp4" 192 | 193 | video_details = { 194 | "id": video_url.split(self.RUMBLE_BASE + "/")[1].strip().strip('.html'), 195 | "isLive": is_live, 196 | "title": meta_json["title"], 197 | "description": video_description, 198 | "author": meta_json["author"]["name"], 199 | "channelUrl": meta_json["author"]["url"], 200 | "duration": int(meta_json["duration"]), 201 | "views": views_count, 202 | "likeCount": count, 203 | # "dislikeCount": , 204 | "subscriberCount": subs_count if subs_count else None, 205 | "thumbnailUrl": meta_json["i"], 206 | "createdAt": time_in_seconds, 207 | "streamUrl": meta_json["u"][video_format]["url"], 208 | } 209 | return video_details 210 | -------------------------------------------------------------------------------- /backend/extract_details/youtube.py: -------------------------------------------------------------------------------- 1 | import dateparser 2 | import pytube 3 | import requests 4 | from youtubesearchpython import VideosSearch, ChannelsSearch 5 | from loguru import logger 6 | 7 | from utils.util import parsed_time_to_seconds, convert_str_to_number, is_connected 8 | 9 | 10 | class YoutubeProcessor: 11 | """Class to process YouTube videos and channels.""" 12 | YOUTUBE = "yt" 13 | 14 | def __init__(self) -> None: 15 | self.session = requests.Session() 16 | 17 | def get_video_details(self, video_url) -> dict: 18 | return self._get_video_details_pytube(video_url) 19 | 20 | def _get_video_details_pytube(self, video_url) -> dict: 21 | """ 22 | Extract video meta using pytube 23 | """ 24 | 25 | # TODO: cache data until video url expires 26 | yt_object = pytube.YouTube(video_url) 27 | 28 | is_live = False 29 | try: 30 | yt_object.check_availability() 31 | except pytube.exceptions.LiveStreamError as e: 32 | is_live = True 33 | 34 | if is_live: 35 | # TODO: live stream url 36 | video_url = None 37 | else: 38 | video_url_obj = yt_object.streams.filter( 39 | progressive=True, file_extension='mp4').order_by("resolution").desc().first() 40 | video_url = video_url_obj.url 41 | 42 | video_details = { 43 | "id": yt_object.video_id, 44 | "title": yt_object.title, 45 | "streamUrl": video_url, 46 | "description": yt_object.description, 47 | "author": yt_object.author, 48 | "duration": yt_object.length, 49 | "views": yt_object.views, 50 | "thumbnailUrl": yt_object.thumbnail_url, 51 | "channelUrl": yt_object.channel_url, 52 | "createdAt": yt_object.publish_date.timestamp() * 1000, 53 | } 54 | return video_details 55 | 56 | def search_video(self, search_query): 57 | search_words = search_query["query"] 58 | max_results = search_query["max"] 59 | 60 | results_json = VideosSearch(search_words, limit=max_results) 61 | data_dict = {"platform": self.YOUTUBE} 62 | 63 | video_entries = [] 64 | for video in results_json.result()["result"]: 65 | video_entry = {} 66 | video_entry["thumbnailUrl"] = video["thumbnails"][0]["url"].split("?sqp")[ 67 | 0] 68 | video_entry["title"] = video["title"] 69 | video_entry["author"] = video["channel"]["name"] 70 | video_entry["channelUrl"] = video["channel"]["link"] 71 | video_entry["views"] = "" # TODO: video["viewCount"]["text"] 72 | # date 73 | video_time = video["publishedTime"] 74 | if video_time: 75 | # None if still streaming 76 | if "Streamed" in video_time: 77 | video_time = video_time.split("Streamed ")[1] 78 | date_formatted = dateparser.parse(video_time) 79 | video_entry["createdAt"] = date_formatted.timestamp() * 1000 80 | # duration 81 | video_entry["duration"] = parsed_time_to_seconds(video["duration"]) 82 | video_entry["videoUrl"] = video["link"] 83 | video_entry["platform"] = self.YOUTUBE 84 | video_entries.append(video_entry) 85 | 86 | data_dict["content"] = video_entries 87 | data_dict["ready"] = True 88 | return data_dict 89 | 90 | def search_for_channels(self, search_query): 91 | """Searches for channels in YouTube. 92 | 93 | Args: 94 | search_query (str): search query. 95 | """ 96 | search_words = search_query["query"] 97 | max_results = search_query["max"] 98 | search_result = ChannelsSearch( 99 | search_words, limit=max_results).result() 100 | data_dict = {"platform": self.YOUTUBE} 101 | channel_entries = [] 102 | for channel in search_result["result"]: 103 | channel_entry = {} 104 | if channel["subscribers"]: 105 | sub_count_str = channel["subscribers"].split(" ")[0] 106 | channel_entry["subscriberCount"] = convert_str_to_number( 107 | sub_count_str) 108 | channel_entry["isChannel"] = True 109 | channel_entry["id"] = channel["id"] 110 | thumb = channel["thumbnails"][-1]["url"] 111 | if thumb.startswith("//"): 112 | thumb = f"https:{thumb}" 113 | channel_entry["thumbnailUrl"] = thumb 114 | channel_entry["title"] = channel["title"] 115 | channel_entry["author"] = channel["title"] 116 | channel_entry["channelUrl"] = channel["link"] 117 | channel_entry["videoCount"] = channel["videoCount"] 118 | channel_entry["platform"] = self.YOUTUBE 119 | channel_entries.append(channel_entry) 120 | 121 | data_dict["content"] = channel_entries 122 | data_dict["ready"] = True 123 | return data_dict 124 | 125 | def channel_data(self, details) -> dict: 126 | """ 127 | Extracts video list from a channel id and playlist id 128 | """ 129 | # access mobile version: https://m.youtube.com/?persist_app=1&app=m 130 | is_it_playlist = details.get("playlist") == True 131 | if is_it_playlist: 132 | channel_url = f"https://m.youtube.com/playlist?list={details['id']}" 133 | taget_url = f"{channel_url}&pbj=1" 134 | else: 135 | channel_url = f"https://m.youtube.com/channel/{details['id']}".strip( 136 | "/") 137 | taget_url = f'{channel_url}/videos?pbj=1' 138 | data_dict = {} 139 | data_dict["platform"] = self.YOUTUBE 140 | headers = { 141 | 'authority': 'm.youtube.com', 142 | 'x-youtube-sts': '18892', 143 | 'x-youtube-device': 'cbr=Edge+Chromium&cbrand=google&cbrver=93.0.961.52&ceng=WebKit&cengver=537.36&cmodel' 144 | '=pixel+2+xl&cos=Android&cosver=8.0.0&cplatform=MOBILE&cyear=2017', 145 | 'x-youtube-page-label': 'youtube.mobile.web.client_20210923_00_RC00', 146 | 'sec-ch-ua-arch': '', 147 | 'sec-ch-ua-platform-version': '"8.0.0"', 148 | 'x-youtube-page-cl': '398415020', 149 | 'x-spf-referer': channel_url, 150 | 'x-youtube-utc-offset': '60', 151 | 'sec-ch-ua-model': '"Pixel 2 XL"', 152 | 'x-youtube-time-zone': 'Europe/London', 153 | 'sec-ch-ua-platform': '"Android"', 154 | 'sec-ch-ua-mobile': '?1', 155 | 'user-agent': 'Mozilla/5.0 (Linux; Android 8.0.0; Pixel 2 XL Build/OPD1.170816.004) AppleWebKit/537.36 (' 156 | 'KHTML, like Gecko) Chrome/93.0.4577.82 Mobile Safari/537.36 Edg/93.0.961.52', 157 | 'sec-ch-ua-full-version': '"93.0.961.52"', 158 | 'x-youtube-client-name': '2', 159 | 'x-youtube-client-version': '2.20210923.00.00', 160 | 'sec-ch-ua': '"Microsoft Edge";v="93", " Not;A Brand";v="99", "Chromium";v="93"', 161 | 'accept': '*/*', 162 | 'sec-fetch-site': 'same-origin', 163 | 'sec-fetch-mode': 'cors', 164 | 'sec-fetch-dest': 'empty', 165 | 'referer': channel_url, 166 | 'accept-language': 'en-GB,en;q=0.9', 167 | } 168 | response = requests.get(taget_url, headers=headers) 169 | if not response.ok: 170 | data_dict["ready"] = False 171 | return data_dict 172 | resp_json = response.json() 173 | 174 | # header 175 | if not is_it_playlist: 176 | header = resp_json["response"]["header"]["c4TabbedHeaderRenderer"] 177 | subscriber_count = None 178 | if "subscriberCountText" in header: 179 | sub_count_str = header["subscriberCountText"]["runs"][0]["text"].split(" ")[ 180 | 0] 181 | subscriber_count = convert_str_to_number(sub_count_str) 182 | 183 | data_dict["channel_meta"] = { 184 | "title": header["title"], 185 | "channelUrl": f"https://youtube.com/channel/{header['channelId']}", 186 | "banner": header["banner"]["thumbnails"][0]["url"] if "banner" in header else None, 187 | "avatar": header["avatar"]["thumbnails"][0]["url"], 188 | "subscriberCount": subscriber_count 189 | } 190 | 191 | # tab 1 - is for videos 192 | videos_index = 0 if is_it_playlist else 1 193 | videos = resp_json["response"]["contents"]["singleColumnBrowseResultsRenderer"]["tabs"][videos_index] 194 | content = videos["tabRenderer"]["content"]["sectionListRenderer"]["contents"] 195 | video_meta_list = content[0]["itemSectionRenderer"]["contents"] 196 | 197 | if not is_it_playlist: 198 | channel_name = resp_json["response"]["metadata"]["channelMetadataRenderer"]["title"] 199 | channel_url = resp_json["response"]["metadata"]["channelMetadataRenderer"]["channelUrl"] 200 | else: 201 | channel_name = None 202 | 203 | renderer_key = "playlistVideoRenderer" if is_it_playlist else "compactVideoRenderer" 204 | if is_it_playlist: 205 | video_meta_list = video_meta_list[0]["playlistVideoListRenderer"]["contents"] 206 | 207 | video_entries = [] 208 | for entry in video_meta_list: 209 | video_entry = {} 210 | # the last item 211 | if "continuationItemRenderer" in entry: 212 | continue 213 | video_meta = entry[renderer_key] 214 | if is_it_playlist: 215 | channel_name = video_meta["shortBylineText"]["runs"][0]["text"] 216 | channel_id = video_meta["shortBylineText"]["runs"][0]["navigationEndpoint"]["browseEndpoint"][ 217 | "browseId"] 218 | channel_url = f"https://youtube.com/channel/{channel_id}" 219 | 220 | video_entry["title"] = video_meta["title"]["runs"][0]["text"] 221 | # there are 4 different sizes 222 | video_entry["thumbnailUrl"] = video_meta["thumbnail"]["thumbnails"][1]["url"] 223 | if not is_it_playlist: 224 | video_entry["channelThumbnail"] = video_meta["channelThumbnail"]["thumbnails"][0]["url"] 225 | if "publishedTimeText" in video_meta: 226 | date_str = video_meta["publishedTimeText"]["runs"][0]["text"].lower( 227 | ) 228 | date_str = date_str.replace("streamed", "").strip() 229 | video_entry["createdAt"] = int( 230 | dateparser.parse(date_str).timestamp()) * 1000 231 | if "viewCountText" in video_meta: 232 | # 2,403 views 233 | views_str = video_meta["viewCountText"]["runs"][0]["text"].split(" ")[ 234 | 0] 235 | # No views 236 | if views_str == "No": 237 | views_str = "0" 238 | video_entry["views"] = int(views_str.replace(',', '')) 239 | video_entry["videoUrl"] = f"https://www.youtube.com/watch?v={video_meta['videoId']}" 240 | # 16:33 241 | if "lengthText" in video_meta: 242 | duration_str = video_meta["lengthText"]["runs"][0]["text"] 243 | video_entry["duration"] = parsed_time_to_seconds(duration_str) 244 | else: 245 | video_entry["duration"] = None 246 | 247 | video_entry["channelUrl"] = channel_url 248 | video_entry["author"] = channel_name 249 | 250 | video_entry["platform"] = self.YOUTUBE 251 | video_entries.append(video_entry) 252 | data_dict["content"] = video_entries 253 | data_dict["ready"] = True 254 | return data_dict 255 | -------------------------------------------------------------------------------- /backend/main.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime 2 | 3 | import uvicorn 4 | import sys 5 | from fastapi import FastAPI 6 | from fastapi.middleware.cors import CORSMiddleware 7 | from fastapi_utils.tasks import repeat_every 8 | from loguru import logger 9 | from pydantic import BaseModel 10 | 11 | import utils.optimize as optimize 12 | from extract_details.bitchute import BitchuteProcessor 13 | from extract_details.lbry import LbryProcessor 14 | from extract_details.rumble import RumbleProcessor 15 | from extract_details.youtube import YoutubeProcessor 16 | from utils.spelling import ginger_check_sentence 17 | 18 | app = FastAPI() 19 | bc_processor = BitchuteProcessor() 20 | yt_processor = YoutubeProcessor() 21 | lbry_processor = LbryProcessor() 22 | rb_processor = RumbleProcessor() 23 | 24 | 25 | def debugger_is_active() -> bool: 26 | """Return if the debugger is currently active""" 27 | gettrace = getattr(sys, 'gettrace', lambda: None) 28 | return gettrace() is not None 29 | 30 | 31 | # Disable caching when debugged 32 | optimize.DISABLE_CACHE = debugger_is_active() 33 | 34 | ALLOWED_HOSTS = None 35 | if not ALLOWED_HOSTS: 36 | ALLOWED_HOSTS = ["*"] 37 | 38 | app.add_middleware( 39 | CORSMiddleware, 40 | allow_origins=ALLOWED_HOSTS, 41 | allow_credentials=True, 42 | allow_methods=["*"], 43 | allow_headers=["*"], 44 | ) 45 | 46 | YOUTUBE = "yt" 47 | LBRY = "lb" 48 | BITCHUTE = "bc" 49 | RUMBLE = "rb" 50 | 51 | 52 | class RequestDetails(BaseModel): 53 | platform: str 54 | id: str 55 | 56 | 57 | class SearchQuery(BaseModel): 58 | query: str 59 | max: int 60 | 61 | 62 | class JustString(BaseModel): 63 | query: str 64 | 65 | 66 | LB_VIDEO_URL = "https://odysee.com/" 67 | YT_VIDEO_URL = "https://www.youtube.com/watch?v=" 68 | BT_VIDEO_URL = "https://www.bitchute.com/video/" 69 | RB_VIDEO_URL = "https://rumble.com/" 70 | 71 | # global list of channel URLs to prefetch them each hour. 72 | # Only prefetched recently requested URLs 73 | global_yt_urls = {} 74 | global_lbry_urls = {} 75 | global_bc_urls = {} 76 | global_rb_urls = {} 77 | 78 | 79 | async def prefetch_channels(platform, channels, source_function) -> None: 80 | """ 81 | Prefetch channels requested within a day, remove rest. 82 | """ 83 | details = {"platform": platform} 84 | now = datetime.utcnow() 85 | for (channel_id, req_date) in channels.items(): 86 | details["id"] = channel_id 87 | difference = now - req_date 88 | if difference.days != 0: 89 | del channels[channel_id] 90 | continue 91 | logger.debug( 92 | f"prefetch: {details['channel_id']} - {details['platform']}") 93 | await optimize.optimized_request(dict(details), source_function, 1, forced=True) 94 | 95 | 96 | @app.on_event("startup") 97 | @repeat_every(seconds=60 * 50) # 50 mins 98 | async def prefetch_yt_channels() -> None: 99 | await prefetch_channels(YOUTUBE, global_yt_urls, yt_processor.channel_data) 100 | 101 | 102 | @app.on_event("startup") 103 | @repeat_every(seconds=60 * 50) # 50 mins 104 | async def prefetch_lbry_channels() -> None: 105 | await prefetch_channels(LBRY, global_lbry_urls, get_lbry_channel_source) 106 | 107 | 108 | @app.on_event("startup") 109 | @repeat_every(seconds=60 * 50) # 50 mins 110 | async def prefetch_bc_channels() -> None: 111 | await prefetch_channels(BITCHUTE, global_bc_urls, bc_processor.channel_data) 112 | 113 | 114 | @app.on_event("startup") 115 | @repeat_every(seconds=60 * 50) # 50 mins 116 | async def prefetch_rb_channels() -> None: 117 | await prefetch_channels(RUMBLE, global_rb_urls, rb_processor.channel_data) 118 | 119 | 120 | @app.post("/api/check") 121 | async def check_sentence(just_string: JustString): 122 | return ginger_check_sentence(just_string.query) 123 | 124 | 125 | @app.post("/api/video/") 126 | async def get_video(details: RequestDetails) -> dict: 127 | details.id = details.id.strip().strip("/") 128 | # YT video link expires 129 | if details.platform == YOUTUBE: 130 | return get_video_from_source(dict(details)) 131 | return await optimize.optimized_request(dict(details), get_video_from_source, 72) 132 | 133 | 134 | def get_video_from_source(details: dict) -> dict: 135 | result = {'ready': False} 136 | 137 | # prepare video_url 138 | video_url = None 139 | if details["platform"] == LBRY: 140 | video_url = LB_VIDEO_URL + details["id"] 141 | elif details["platform"] == YOUTUBE: 142 | video_url = YT_VIDEO_URL + details["id"] 143 | elif details["platform"] == BITCHUTE: 144 | video_url = BT_VIDEO_URL + details["id"] 145 | elif details["platform"] == RUMBLE: 146 | video_url = RB_VIDEO_URL + details["id"] + ".html" 147 | else: 148 | return result 149 | 150 | # our extractors 151 | if details["platform"] == YOUTUBE: 152 | result["platform"] = YOUTUBE 153 | result["content"] = yt_processor.get_video_details(video_url) 154 | result['ready'] = result["content"] is not None 155 | return result 156 | elif details["platform"] == BITCHUTE: 157 | result["platform"] = BITCHUTE 158 | result["content"] = bc_processor.get_video_details(video_url) 159 | result['ready'] = result["content"] is not None 160 | return result 161 | elif details["platform"] == LBRY: 162 | result["platform"] = LBRY 163 | result["content"] = lbry_processor.get_video_details(video_url) 164 | result['ready'] = result["content"] is not None 165 | return result 166 | elif details["platform"] == RUMBLE: 167 | result["platform"] = RUMBLE 168 | result["content"] = rb_processor.get_video_details(video_url) 169 | result['ready'] = result["content"] is not None 170 | return result 171 | else: 172 | return result 173 | 174 | 175 | # YouTube channel to JSON 176 | @app.post("/api/youtube/c/") 177 | async def get_youtube_channel(details: RequestDetails) -> dict: 178 | details = dict(details) 179 | details["id"] = details["id"].strip().strip("/") 180 | details["channel"] = True 181 | global_yt_urls[details["id"]] = datetime.utcnow() 182 | return await optimize.optimized_request( 183 | dict(details), 184 | yt_processor.channel_data, 185 | 1) 186 | 187 | 188 | # YouTube playlist to JSON 189 | @app.post("/api/youtube/p/") 190 | async def get_youtube_playlist(details: RequestDetails) -> dict: 191 | details = dict(details) 192 | details["id"] = details["id"].strip().strip("/") 193 | if details["id"] == "popular": 194 | details["id"] = "PLrEnWoR732-BHrPp_Pm8_VleD68f9s14-" 195 | details["playlist"] = True 196 | return await optimize.optimized_request( 197 | dict(details), 198 | yt_processor.channel_data, 199 | 1) 200 | 201 | 202 | # search youtube videos 203 | @app.post("/api/youtube/search/") 204 | async def youtube_search_results(search_query: SearchQuery) -> dict: 205 | search_query = dict(search_query) 206 | search_query["platform"] = YOUTUBE 207 | result = await optimize.optimized_request( 208 | dict(search_query), 209 | yt_processor.search_video, 210 | 1) 211 | return result 212 | 213 | 214 | # search youtube channels 215 | @app.post("/api/youtube/channels/") 216 | async def youtube_search_channels(search_query: SearchQuery) -> dict: 217 | search_query = dict(search_query) 218 | search_query["platform"] = YOUTUBE 219 | search_query["max"] = 3 220 | result = await optimize.optimized_request( 221 | dict(search_query), 222 | yt_processor.search_for_channels, 223 | 1) 224 | return result 225 | 226 | 227 | # search Lbry channels 228 | @app.post("/api/lbry/channels/") 229 | async def lbry_search_channels(search_query: SearchQuery) -> dict: 230 | search_query = dict(search_query) 231 | search_query["platform"] = LBRY 232 | search_query["max"] = 3 233 | result = await optimize.optimized_request( 234 | dict(search_query), 235 | lbry_processor.search_for_channels, 236 | 1) 237 | return result 238 | 239 | 240 | # search bitchute videos 241 | @app.post("/api/bitchute/search/") 242 | async def bitchute_search_results(search_query: SearchQuery) -> dict: 243 | search_query = dict(search_query) 244 | search_query["platform"] = BITCHUTE 245 | result = await optimize.optimized_request( 246 | dict(search_query), 247 | bc_processor.search_video, 248 | 1) 249 | return result 250 | 251 | 252 | # search youtube videos 253 | @app.post("/api/lbry/search/") 254 | async def lbry_search_results(search_query: SearchQuery) -> dict: 255 | search_query = dict(search_query) 256 | search_query["platform"] = LBRY 257 | result = await optimize.optimized_request( 258 | dict(search_query), 259 | lbry_processor.search_video, 260 | 1) 261 | return result 262 | 263 | 264 | # search rumble videos 265 | @app.post("/api/rumble/search/") 266 | async def rb_search_results(search_query: SearchQuery) -> dict: 267 | search_query = dict(search_query) 268 | search_query["platform"] = RUMBLE 269 | result = await optimize.optimized_request( 270 | dict(search_query), 271 | rb_processor.search_for_videos, 272 | 1) 273 | return result 274 | 275 | 276 | # Lbry/Odysee channel to JSON 277 | @app.post("/api/lbry/c/") 278 | async def get_lbry_channel(details: RequestDetails) -> dict: 279 | details = dict(details) 280 | details["id"] = details["id"].strip().strip("/") 281 | details["channel"] = True 282 | global_lbry_urls[details["id"]] = datetime.utcnow() 283 | return await optimize.optimized_request( 284 | dict(details), 285 | get_lbry_channel_source, 286 | 1) 287 | 288 | 289 | def get_lbry_channel_source(details: dict) -> dict: 290 | if details['id'] == "popular": 291 | return lbry_processor.get_popular() 292 | return lbry_processor.channel_data(details['id']) 293 | 294 | 295 | # Rumble channel to JSON 296 | @app.post("/api/rumble/c/") 297 | async def get_lbry_channel(details: RequestDetails) -> dict: 298 | details = dict(details) 299 | details["id"] = details["id"].strip().strip("/") 300 | global_rb_urls[details["id"]] = datetime.utcnow() 301 | return await optimize.optimized_request( 302 | dict(details), 303 | rb_processor.channel_data, 304 | 1) 305 | 306 | 307 | # BitChute channel to JSON 308 | @app.post("/api/bitchute/c/") 309 | async def get_bitchute_channel(details: RequestDetails): 310 | details = dict(details) 311 | details["id"] = details["id"].strip().strip("/") 312 | details["channel"] = True 313 | global_bc_urls[details["id"]] = datetime.utcnow() 314 | return await optimize.optimized_request( 315 | dict(details), 316 | bc_processor.channel_data, 317 | 1) 318 | 319 | 320 | if __name__ == "__main__": 321 | uvicorn.run(app, host="0.0.0.0", port=8000) 322 | -------------------------------------------------------------------------------- /backend/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "dependencies": { 3 | "@tailwindcss/jit": "^0.1.1", 4 | "autoprefixer": "^10.2.5", 5 | "humanize-duration": "^3.25.2", 6 | "postcss": "^8.2.8", 7 | "tailwindcss": "^2.0.3" 8 | } 9 | } 10 | -------------------------------------------------------------------------------- /backend/redis/docker-compose.yml: -------------------------------------------------------------------------------- 1 | # docker-compose.yml 2 | 3 | version: "3.2" 4 | services: 5 | redis: 6 | container_name: redis-cont 7 | image: "redis:alpine" 8 | environment: 9 | - REDIS_PASSWORD=retube 10 | - REDIS_REPLICATION_MODE=master 11 | ports: 12 | - "6379:6379" 13 | volumes: 14 | # save redisearch data to your current working directory 15 | - ./redis-data:/data 16 | command: 17 | # Save if 100 keys are added in every 10 seconds 18 | - "--save 10 100" 19 | # Set password 20 | - "--requirepass retube" 21 | 22 | volumes: 23 | redis-data: -------------------------------------------------------------------------------- /backend/requirements.txt: -------------------------------------------------------------------------------- 1 | beautifulsoup4 2 | fastapi 3 | fastapi_utils 4 | requests 5 | uvicorn 6 | xmltodict 7 | dateparser 8 | redis 9 | youtube-search-python 10 | pytube 11 | gingerit 12 | loguru 13 | cloudscraper 14 | pydantic -------------------------------------------------------------------------------- /backend/test.py: -------------------------------------------------------------------------------- 1 | from fastapi.testclient import TestClient 2 | 3 | from main import app 4 | 5 | client = TestClient(app) 6 | 7 | # test /api/video - request video details 8 | 9 | 10 | def test_get_video_yt(): 11 | """ Test /api/video for Youtube. """ 12 | response = client.post( 13 | "/api/video/", 14 | json={"id": "jNQXAC9IVRw", "platform": "yt"}, 15 | ) 16 | assert response.status_code == 200 17 | json_object = response.json() 18 | assert json_object["ready"] == True 19 | assert json_object["platform"] == 'yt' 20 | assert json_object["content"]["id"] == 'jNQXAC9IVRw' 21 | assert json_object["content"]["description"] == 'The first video on YouTube. While you wait for Part 2, listen to ' \ 22 | 'this great song: https://www.youtube.com/watch?v=zj82_v2R6ts ' 23 | assert json_object["content"]["author"] == 'jawed' 24 | assert json_object["content"]["channelUrl"] == 'https://www.youtube.com/channel/UC4QobU6STFB0P71PMvOGN5A' 25 | assert json_object["content"]["title"] == 'Me at the zoo' 26 | assert "ytimg.com/" in json_object["content"]["thumbnailUrl"] 27 | assert "googlevideo.com/" in json_object["content"]["streamUrl"] 28 | 29 | 30 | def test_get_video_lbry(): 31 | """ Test /api/video for Lbry/Odysee. """ 32 | response = client.post( 33 | "/api/video/", 34 | json={"id": "@lbry:3f/odysee:7", "platform": "lb"}, 35 | ) 36 | assert response.status_code == 200 37 | json_object = response.json() 38 | assert json_object["ready"] == True 39 | assert json_object["platform"] == 'lb' 40 | assert json_object["content"]["id"] == '7a416c44a6888d94fe045241bbac055c726332aa' 41 | assert json_object["content"]["description"] == 'Big thanks to @MH for this ❤️' 42 | assert json_object["content"]["author"] == 'LBRY' 43 | assert json_object["content"]["channelUrl"] == 'https://odysee.com/@lbry:3f' 44 | assert json_object["content"]["title"] == 'Introducing Odysee: A Short Video' 45 | assert "spee.ch/" in json_object["content"]["thumbnailUrl"] 46 | assert "lbryplayer.xyz/" in json_object["content"]["streamUrl"] 47 | 48 | 49 | def test_get_video_bc(): 50 | """ Test /api/video for Bitchute. """ 51 | response = client.post( 52 | "/api/video/", 53 | json={"id": "UGlrF9o9b-Q", "platform": "bc"}, 54 | ) 55 | assert response.status_code == 200 56 | json_object = response.json() 57 | assert json_object["ready"] == True 58 | assert json_object["platform"] == 'bc' 59 | assert json_object["content"]["id"] == 'UGlrF9o9b-Q' 60 | assert json_object["content"]["author"] == 'BitChute' 61 | assert json_object["content"]["channelUrl"] == 'https://bitchute.com/channel/bitchute/' 62 | assert json_object["content"]["title"] == 'This is the first video on #BitChute !' 63 | assert "bitchute.com/" in json_object["content"]["thumbnailUrl"] 64 | assert "/UGlrF9o9b-Q.mp4" in json_object["content"]["streamUrl"] 65 | -------------------------------------------------------------------------------- /backend/utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PrivOci/ReTube/36606b58c5d8a3a73f10a271625c1099ecce9730/backend/utils/__init__.py -------------------------------------------------------------------------------- /backend/utils/optimize.py: -------------------------------------------------------------------------------- 1 | import json 2 | import socket 3 | import sys 4 | from datetime import timedelta 5 | 6 | import redis 7 | 8 | DISABLE_CACHE = False 9 | 10 | 11 | def is_open(ip, port): 12 | s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 13 | try: 14 | s.connect((ip, int(port))) 15 | s.shutdown(2) 16 | return True 17 | except: 18 | return False 19 | 20 | 21 | def redis_connect() -> redis.client.Redis: 22 | host = "localhost" if is_open("localhost", 6379) else "redis" 23 | try: 24 | redis_client = redis.Redis( 25 | host=host, 26 | port=6379, 27 | password="retube", 28 | db=0, 29 | socket_timeout=5, 30 | ) 31 | ping = redis_client.ping() 32 | if ping is True: 33 | return redis_client 34 | except redis.AuthenticationError: 35 | print("AuthenticationError") 36 | sys.exit(1) 37 | 38 | 39 | client = redis_connect() 40 | 41 | 42 | def set_cache(key: str, value: str, hours=24) -> bool: 43 | """Data to redis - 1 hour""" 44 | state = client.setex(key, timedelta(hours=hours), value=value, ) 45 | return state 46 | 47 | 48 | async def get_from_cache(key: str) -> str: 49 | """Data from redis.""" 50 | val = client.get(key) 51 | return val 52 | 53 | 54 | async def optimized_request(details, get_from_source, hours=24, forced=False): 55 | key_for_redis = json.dumps(details) 56 | # First it looks for the data in redis cache 57 | data = await get_from_cache(key_for_redis) if not DISABLE_CACHE else None 58 | 59 | # If cache is found then serves the data from cache 60 | if data is not None and forced is False: 61 | data = json.loads(data) 62 | data["cache"] = True 63 | return data 64 | 65 | else: 66 | # If cache is not found then sends request to the MapBox API 67 | data = get_from_source(details) 68 | if not data: 69 | return { 70 | "ready": "False" 71 | } 72 | 73 | # This block sets saves the respose to redis and serves it directly 74 | if data.get("ready"): 75 | data["cache"] = False 76 | state = set_cache(key_for_redis, json.dumps(data), hours) 77 | 78 | if state is True: 79 | return data 80 | return data 81 | -------------------------------------------------------------------------------- /backend/utils/spelling.py: -------------------------------------------------------------------------------- 1 | import urllib.parse 2 | 3 | from gingerit.gingerit import GingerIt 4 | 5 | parser = GingerIt() 6 | 7 | 8 | def ginger_check_sentence(sentence): 9 | """Corrects spelling and grammar mistakes based on the context of complete sentences. 10 | 11 | Args: 12 | sentence ([string]): Sentence with a possible mistake 13 | 14 | Returns: 15 | [(bool, string)]: True/False and corrected sentence 16 | """ 17 | sentence = urllib.parse.unquote_plus(sentence) 18 | result = parser.parse(sentence) 19 | result_change = result["result"] 20 | changed = bool(len(result["corrections"])) and ( 21 | sentence.lower() != result_change.lower()) 22 | return {"need_change": changed, "result": result_change} 23 | -------------------------------------------------------------------------------- /backend/utils/util.py: -------------------------------------------------------------------------------- 1 | from itertools import count 2 | 3 | import requests 4 | import socket 5 | import xmltodict 6 | from loguru import logger 7 | 8 | 9 | def get_xml_stream_as_json(xml_url, session=None): 10 | if not session: 11 | session = requests 12 | try: 13 | req = session.get(xml_url) 14 | except requests.ConnectionError as e: 15 | logger.error(f"URL: {xml_url}\nerror:\n{e}") 16 | return None 17 | except requests.Timeout as e: 18 | logger.error(f"URL: {xml_url}\nerror:\n{e}") 19 | return None 20 | if req and not req.ok: 21 | logger.debug(f"Failed to download: {xml_url}") 22 | return None 23 | return xmltodict.parse(req.text) 24 | 25 | 26 | def parsed_time_to_seconds(human_time): 27 | """ 28 | # 12:44 => number 29 | """ 30 | if not human_time: 31 | return None 32 | time_parts = human_time.split(":") 33 | 34 | def part_to_seconds(part, order): return int(part) * pow(60, order) 35 | 36 | return sum(map(part_to_seconds, reversed(time_parts), count())) 37 | 38 | 39 | def convert_str_to_number(x): 40 | total_stars = 0 41 | num_map = {'K': 1000, 'M': 1000000, 'B': 1000000000} 42 | x = x.replace(",", ".") 43 | if x.isdigit(): 44 | total_stars = int(x) 45 | else: 46 | if len(x) > 1: 47 | total_stars = float(x[:-1]) * num_map.get(x[-1].upper(), 1) 48 | return int(total_stars) 49 | 50 | 51 | def is_connected(): 52 | REMOTE_SERVER = "one.one.one.one" 53 | try: 54 | # see if we can resolve the host name -- tells us if there is 55 | # a DNS listening 56 | host = socket.gethostbyname(REMOTE_SERVER) 57 | # connect to the host -- tells us if the host is actually 58 | # reachable 59 | s = socket.create_connection((host, 80), 2) 60 | s.close() 61 | return True 62 | except: 63 | pass 64 | -------------------------------------------------------------------------------- /backend/yarn.lock: -------------------------------------------------------------------------------- 1 | # THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. 2 | # yarn lockfile v1 3 | 4 | 5 | "@fullhuman/postcss-purgecss@^3.1.3": 6 | version "3.1.3" 7 | resolved "https://registry.yarnpkg.com/@fullhuman/postcss-purgecss/-/postcss-purgecss-3.1.3.tgz#47af7b87c9bfb3de4bc94a38f875b928fffdf339" 8 | integrity sha512-kwOXw8fZ0Lt1QmeOOrd+o4Ibvp4UTEBFQbzvWldjlKv5n+G9sXfIPn1hh63IQIL8K8vbvv1oYMJiIUbuy9bGaA== 9 | dependencies: 10 | purgecss "^3.1.3" 11 | 12 | "@nodelib/fs.scandir@2.1.4": 13 | version "2.1.4" 14 | resolved "https://registry.yarnpkg.com/@nodelib/fs.scandir/-/fs.scandir-2.1.4.tgz#d4b3549a5db5de2683e0c1071ab4f140904bbf69" 15 | integrity sha512-33g3pMJk3bg5nXbL/+CY6I2eJDzZAni49PfJnL5fghPTggPvBd/pFNSgJsdAgWptuFu7qq/ERvOYFlhvsLTCKA== 16 | dependencies: 17 | "@nodelib/fs.stat" "2.0.4" 18 | run-parallel "^1.1.9" 19 | 20 | "@nodelib/fs.stat@2.0.4", "@nodelib/fs.stat@^2.0.2": 21 | version "2.0.4" 22 | resolved "https://registry.yarnpkg.com/@nodelib/fs.stat/-/fs.stat-2.0.4.tgz#a3f2dd61bab43b8db8fa108a121cfffe4c676655" 23 | integrity sha512-IYlHJA0clt2+Vg7bccq+TzRdJvv19c2INqBSsoOLp1je7xjtr7J26+WXR72MCdvU9q1qTzIWDfhMf+DRvQJK4Q== 24 | 25 | "@nodelib/fs.walk@^1.2.3": 26 | version "1.2.6" 27 | resolved "https://registry.yarnpkg.com/@nodelib/fs.walk/-/fs.walk-1.2.6.tgz#cce9396b30aa5afe9e3756608f5831adcb53d063" 28 | integrity sha512-8Broas6vTtW4GIXTAHDoE32hnN2M5ykgCpWGbuXHQ15vEMqr23pB76e/GZcYsZCHALv50ktd24qhEyKr6wBtow== 29 | dependencies: 30 | "@nodelib/fs.scandir" "2.1.4" 31 | fastq "^1.6.0" 32 | 33 | "@tailwindcss/jit@^0.1.1": 34 | version "0.1.18" 35 | resolved "https://registry.yarnpkg.com/@tailwindcss/jit/-/jit-0.1.18.tgz#f44ac25b347ad1b4056af4fbda69399070206825" 36 | integrity sha512-WNSEiwbggtO9n6+ok2fFdYmhqY20oqLmB82H23nY8P5WzijZbIshojoY3s/OvPD7cmvzkweZ6LLKGWuDS1/vLA== 37 | dependencies: 38 | chokidar "^3.5.1" 39 | dlv "^1.1.3" 40 | fast-glob "^3.2.5" 41 | lodash.topath "^4.5.2" 42 | normalize-path "^3.0.0" 43 | object-hash "^2.1.1" 44 | parse-glob "^3.0.4" 45 | postcss-selector-parser "^6.0.4" 46 | quick-lru "^5.1.1" 47 | 48 | acorn-node@^1.6.1: 49 | version "1.8.2" 50 | resolved "https://registry.yarnpkg.com/acorn-node/-/acorn-node-1.8.2.tgz#114c95d64539e53dede23de8b9d96df7c7ae2af8" 51 | integrity sha512-8mt+fslDufLYntIoPAaIMUe/lrbrehIiwmR3t2k9LljIzoigEPF27eLk2hy8zSGzmR/ogr7zbRKINMo1u0yh5A== 52 | dependencies: 53 | acorn "^7.0.0" 54 | acorn-walk "^7.0.0" 55 | xtend "^4.0.2" 56 | 57 | acorn-walk@^7.0.0: 58 | version "7.2.0" 59 | resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-7.2.0.tgz#0de889a601203909b0fbe07b8938dc21d2e967bc" 60 | integrity sha512-OPdCF6GsMIP+Az+aWfAAOEt2/+iVDKE7oy6lJ098aoe59oAmK76qV6Gw60SbZ8jHuG2wH058GF4pLFbYamYrVA== 61 | 62 | acorn@^7.0.0: 63 | version "7.4.1" 64 | resolved "https://registry.yarnpkg.com/acorn/-/acorn-7.4.1.tgz#feaed255973d2e77555b83dbc08851a6c63520fa" 65 | integrity sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A== 66 | 67 | ansi-styles@^3.2.1: 68 | version "3.2.1" 69 | resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" 70 | integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== 71 | dependencies: 72 | color-convert "^1.9.0" 73 | 74 | ansi-styles@^4.1.0: 75 | version "4.3.0" 76 | resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937" 77 | integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg== 78 | dependencies: 79 | color-convert "^2.0.1" 80 | 81 | anymatch@~3.1.1: 82 | version "3.1.2" 83 | resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.2.tgz#c0557c096af32f106198f4f4e2a383537e378716" 84 | integrity sha512-P43ePfOAIupkguHUycrc4qJ9kz8ZiuOUijaETwX7THt0Y/GNK7v0aa8rY816xWjZ7rJdA5XdMcpVFTKMq+RvWg== 85 | dependencies: 86 | normalize-path "^3.0.0" 87 | picomatch "^2.0.4" 88 | 89 | at-least-node@^1.0.0: 90 | version "1.0.0" 91 | resolved "https://registry.yarnpkg.com/at-least-node/-/at-least-node-1.0.0.tgz#602cd4b46e844ad4effc92a8011a3c46e0238dc2" 92 | integrity sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg== 93 | 94 | autoprefixer@^10.2.5: 95 | version "10.2.5" 96 | resolved "https://registry.yarnpkg.com/autoprefixer/-/autoprefixer-10.2.5.tgz#096a0337dbc96c0873526d7fef5de4428d05382d" 97 | integrity sha512-7H4AJZXvSsn62SqZyJCP+1AWwOuoYpUfK6ot9vm0e87XD6mT8lDywc9D9OTJPMULyGcvmIxzTAMeG2Cc+YX+fA== 98 | dependencies: 99 | browserslist "^4.16.3" 100 | caniuse-lite "^1.0.30001196" 101 | colorette "^1.2.2" 102 | fraction.js "^4.0.13" 103 | normalize-range "^0.1.2" 104 | postcss-value-parser "^4.1.0" 105 | 106 | balanced-match@^1.0.0: 107 | version "1.0.2" 108 | resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.2.tgz#e83e3a7e3f300b34cb9d87f615fa0cbf357690ee" 109 | integrity sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw== 110 | 111 | binary-extensions@^2.0.0: 112 | version "2.2.0" 113 | resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-2.2.0.tgz#75f502eeaf9ffde42fc98829645be4ea76bd9e2d" 114 | integrity sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA== 115 | 116 | brace-expansion@^1.1.7: 117 | version "1.1.11" 118 | resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" 119 | integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== 120 | dependencies: 121 | balanced-match "^1.0.0" 122 | concat-map "0.0.1" 123 | 124 | braces@^3.0.1, braces@~3.0.2: 125 | version "3.0.2" 126 | resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107" 127 | integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A== 128 | dependencies: 129 | fill-range "^7.0.1" 130 | 131 | browserslist@^4.16.3: 132 | version "4.16.5" 133 | resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.16.5.tgz#952825440bca8913c62d0021334cbe928ef062ae" 134 | integrity sha512-C2HAjrM1AI/djrpAUU/tr4pml1DqLIzJKSLDBXBrNErl9ZCCTXdhwxdJjYc16953+mBWf7Lw+uUJgpgb8cN71A== 135 | dependencies: 136 | caniuse-lite "^1.0.30001214" 137 | colorette "^1.2.2" 138 | electron-to-chromium "^1.3.719" 139 | escalade "^3.1.1" 140 | node-releases "^1.1.71" 141 | 142 | bytes@^3.0.0: 143 | version "3.1.0" 144 | resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.1.0.tgz#f6cf7933a360e0588fa9fde85651cdc7f805d1f6" 145 | integrity sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg== 146 | 147 | camelcase-css@^2.0.1: 148 | version "2.0.1" 149 | resolved "https://registry.yarnpkg.com/camelcase-css/-/camelcase-css-2.0.1.tgz#ee978f6947914cc30c6b44741b6ed1df7f043fd5" 150 | integrity sha512-QOSvevhslijgYwRx6Rv7zKdMF8lbRmx+uQGx2+vDc+KI/eBnsy9kit5aj23AgGu3pa4t9AgwbnXWqS+iOY+2aA== 151 | 152 | caniuse-lite@^1.0.30001196, caniuse-lite@^1.0.30001214: 153 | version "1.0.30001214" 154 | resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001214.tgz#70f153c78223515c6d37a9fde6cd69250da9d872" 155 | integrity sha512-O2/SCpuaU3eASWVaesQirZv1MSjUNOvmugaD8zNSJqw6Vv5SGwoOpA9LJs3pNPfM745nxqPvfZY3MQKY4AKHYg== 156 | 157 | chalk@^2.4.1: 158 | version "2.4.2" 159 | resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" 160 | integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== 161 | dependencies: 162 | ansi-styles "^3.2.1" 163 | escape-string-regexp "^1.0.5" 164 | supports-color "^5.3.0" 165 | 166 | chalk@^4.1.0: 167 | version "4.1.1" 168 | resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.1.tgz#c80b3fab28bf6371e6863325eee67e618b77e6ad" 169 | integrity sha512-diHzdDKxcU+bAsUboHLPEDQiw0qEe0qd7SYUn3HgcFlWgbDcfLGswOHYeGrHKzG9z6UYf01d9VFMfZxPM1xZSg== 170 | dependencies: 171 | ansi-styles "^4.1.0" 172 | supports-color "^7.1.0" 173 | 174 | chokidar@^3.5.1: 175 | version "3.5.1" 176 | resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-3.5.1.tgz#ee9ce7bbebd2b79f49f304799d5468e31e14e68a" 177 | integrity sha512-9+s+Od+W0VJJzawDma/gvBNQqkTiqYTWLuZoyAsivsI4AaWTCzHG06/TMjsf1cYe9Cb97UCEhjz7HvnPk2p/tw== 178 | dependencies: 179 | anymatch "~3.1.1" 180 | braces "~3.0.2" 181 | glob-parent "~5.1.0" 182 | is-binary-path "~2.1.0" 183 | is-glob "~4.0.1" 184 | normalize-path "~3.0.0" 185 | readdirp "~3.5.0" 186 | optionalDependencies: 187 | fsevents "~2.3.1" 188 | 189 | color-convert@^1.9.0, color-convert@^1.9.1: 190 | version "1.9.3" 191 | resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" 192 | integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== 193 | dependencies: 194 | color-name "1.1.3" 195 | 196 | color-convert@^2.0.1: 197 | version "2.0.1" 198 | resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3" 199 | integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ== 200 | dependencies: 201 | color-name "~1.1.4" 202 | 203 | color-name@1.1.3: 204 | version "1.1.3" 205 | resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" 206 | integrity sha1-p9BVi9icQveV3UIyj3QIMcpTvCU= 207 | 208 | color-name@^1.0.0, color-name@~1.1.4: 209 | version "1.1.4" 210 | resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" 211 | integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== 212 | 213 | color-string@^1.5.4: 214 | version "1.5.5" 215 | resolved "https://registry.yarnpkg.com/color-string/-/color-string-1.5.5.tgz#65474a8f0e7439625f3d27a6a19d89fc45223014" 216 | integrity sha512-jgIoum0OfQfq9Whcfc2z/VhCNcmQjWbey6qBX0vqt7YICflUmBCh9E9CiQD5GSJ+Uehixm3NUwHVhqUAWRivZg== 217 | dependencies: 218 | color-name "^1.0.0" 219 | simple-swizzle "^0.2.2" 220 | 221 | color@^3.1.3: 222 | version "3.1.3" 223 | resolved "https://registry.yarnpkg.com/color/-/color-3.1.3.tgz#ca67fb4e7b97d611dcde39eceed422067d91596e" 224 | integrity sha512-xgXAcTHa2HeFCGLE9Xs/R82hujGtu9Jd9x4NW3T34+OMs7VoPsjwzRczKHvTAHeJwWFwX5j15+MgAppE8ztObQ== 225 | dependencies: 226 | color-convert "^1.9.1" 227 | color-string "^1.5.4" 228 | 229 | colorette@^1.2.2: 230 | version "1.2.2" 231 | resolved "https://registry.yarnpkg.com/colorette/-/colorette-1.2.2.tgz#cbcc79d5e99caea2dbf10eb3a26fd8b3e6acfa94" 232 | integrity sha512-MKGMzyfeuutC/ZJ1cba9NqcNpfeqMUcYmyF1ZFY6/Cn7CNSAKx6a+s48sqLqyAiZuaP2TcqMhoo+dlwFnVxT9w== 233 | 234 | commander@^6.0.0: 235 | version "6.2.1" 236 | resolved "https://registry.yarnpkg.com/commander/-/commander-6.2.1.tgz#0792eb682dfbc325999bb2b84fddddba110ac73c" 237 | integrity sha512-U7VdrJFnJgo4xjrHpTzu0yrHPGImdsmD95ZlgYSEajAn2JKzDhDTPG9kBTefmObL2w/ngeZnilk+OV9CG3d7UA== 238 | 239 | concat-map@0.0.1: 240 | version "0.0.1" 241 | resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" 242 | integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s= 243 | 244 | css-unit-converter@^1.1.1: 245 | version "1.1.2" 246 | resolved "https://registry.yarnpkg.com/css-unit-converter/-/css-unit-converter-1.1.2.tgz#4c77f5a1954e6dbff60695ecb214e3270436ab21" 247 | integrity sha512-IiJwMC8rdZE0+xiEZHeru6YoONC4rfPMqGm2W85jMIbkFvv5nFTwJVFHam2eFrN6txmoUYFAFXiv8ICVeTO0MA== 248 | 249 | cssesc@^3.0.0: 250 | version "3.0.0" 251 | resolved "https://registry.yarnpkg.com/cssesc/-/cssesc-3.0.0.tgz#37741919903b868565e1c09ea747445cd18983ee" 252 | integrity sha512-/Tb/JcjK111nNScGob5MNtsntNM1aCNUDipB/TkwZFhyDrrE47SOx/18wF2bbjgc3ZzCSKW1T5nt5EbFoAz/Vg== 253 | 254 | defined@^1.0.0: 255 | version "1.0.0" 256 | resolved "https://registry.yarnpkg.com/defined/-/defined-1.0.0.tgz#c98d9bcef75674188e110969151199e39b1fa693" 257 | integrity sha1-yY2bzvdWdBiOEQlpFRGZ45sfppM= 258 | 259 | detective@^5.2.0: 260 | version "5.2.0" 261 | resolved "https://registry.yarnpkg.com/detective/-/detective-5.2.0.tgz#feb2a77e85b904ecdea459ad897cc90a99bd2a7b" 262 | integrity sha512-6SsIx+nUUbuK0EthKjv0zrdnajCCXVYGmbYYiYjFVpzcjwEs/JMDZ8tPRG29J/HhN56t3GJp2cGSWDRjjot8Pg== 263 | dependencies: 264 | acorn-node "^1.6.1" 265 | defined "^1.0.0" 266 | minimist "^1.1.1" 267 | 268 | didyoumean@^1.2.1: 269 | version "1.2.1" 270 | resolved "https://registry.yarnpkg.com/didyoumean/-/didyoumean-1.2.1.tgz#e92edfdada6537d484d73c0172fd1eba0c4976ff" 271 | integrity sha1-6S7f2tplN9SE1zwBcv0eugxJdv8= 272 | 273 | dlv@^1.1.3: 274 | version "1.1.3" 275 | resolved "https://registry.yarnpkg.com/dlv/-/dlv-1.1.3.tgz#5c198a8a11453596e751494d49874bc7732f2e79" 276 | integrity sha512-+HlytyjlPKnIG8XuRG8WvmBP8xs8P71y+SKKS6ZXWoEgLuePxtDoUEiH7WkdePWrQ5JBpE6aoVqfZfJUQkjXwA== 277 | 278 | electron-to-chromium@^1.3.719: 279 | version "1.3.720" 280 | resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.720.tgz#f5d66df8754d993006b7b2ded15ff7738c58bd94" 281 | integrity sha512-B6zLTxxaOFP4WZm6DrvgRk8kLFYWNhQ5TrHMC0l5WtkMXhU5UbnvWoTfeEwqOruUSlNMhVLfYak7REX6oC5Yfw== 282 | 283 | escalade@^3.1.1: 284 | version "3.1.1" 285 | resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.1.1.tgz#d8cfdc7000965c5a0174b4a82eaa5c0552742e40" 286 | integrity sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw== 287 | 288 | escape-string-regexp@^1.0.5: 289 | version "1.0.5" 290 | resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" 291 | integrity sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ= 292 | 293 | fast-glob@^3.2.5: 294 | version "3.2.5" 295 | resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.5.tgz#7939af2a656de79a4f1901903ee8adcaa7cb9661" 296 | integrity sha512-2DtFcgT68wiTTiwZ2hNdJfcHNke9XOfnwmBRWXhmeKM8rF0TGwmC/Qto3S7RoZKp5cilZbxzO5iTNTQsJ+EeDg== 297 | dependencies: 298 | "@nodelib/fs.stat" "^2.0.2" 299 | "@nodelib/fs.walk" "^1.2.3" 300 | glob-parent "^5.1.0" 301 | merge2 "^1.3.0" 302 | micromatch "^4.0.2" 303 | picomatch "^2.2.1" 304 | 305 | fastq@^1.6.0: 306 | version "1.11.0" 307 | resolved "https://registry.yarnpkg.com/fastq/-/fastq-1.11.0.tgz#bb9fb955a07130a918eb63c1f5161cc32a5d0858" 308 | integrity sha512-7Eczs8gIPDrVzT+EksYBcupqMyxSHXXrHOLRRxU2/DicV8789MRBRR8+Hc2uWzUupOs4YS4JzBmBxjjCVBxD/g== 309 | dependencies: 310 | reusify "^1.0.4" 311 | 312 | fill-range@^7.0.1: 313 | version "7.0.1" 314 | resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40" 315 | integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ== 316 | dependencies: 317 | to-regex-range "^5.0.1" 318 | 319 | fraction.js@^4.0.13: 320 | version "4.0.13" 321 | resolved "https://registry.yarnpkg.com/fraction.js/-/fraction.js-4.0.13.tgz#3c1c315fa16b35c85fffa95725a36fa729c69dfe" 322 | integrity sha512-E1fz2Xs9ltlUp+qbiyx9wmt2n9dRzPsS11Jtdb8D2o+cC7wr9xkkKsVKJuBX0ST+LVS+LhLO+SbLJNtfWcJvXA== 323 | 324 | fs-extra@^9.1.0: 325 | version "9.1.0" 326 | resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-9.1.0.tgz#5954460c764a8da2094ba3554bf839e6b9a7c86d" 327 | integrity sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ== 328 | dependencies: 329 | at-least-node "^1.0.0" 330 | graceful-fs "^4.2.0" 331 | jsonfile "^6.0.1" 332 | universalify "^2.0.0" 333 | 334 | fs.realpath@^1.0.0: 335 | version "1.0.0" 336 | resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" 337 | integrity sha1-FQStJSMVjKpA20onh8sBQRmU6k8= 338 | 339 | fsevents@~2.3.1: 340 | version "2.3.2" 341 | resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.2.tgz#8a526f78b8fdf4623b709e0b975c52c24c02fd1a" 342 | integrity sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA== 343 | 344 | function-bind@^1.1.1: 345 | version "1.1.1" 346 | resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" 347 | integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== 348 | 349 | glob-base@^0.3.0: 350 | version "0.3.0" 351 | resolved "https://registry.yarnpkg.com/glob-base/-/glob-base-0.3.0.tgz#dbb164f6221b1c0b1ccf82aea328b497df0ea3c4" 352 | integrity sha1-27Fk9iIbHAscz4Kuoyi0l98Oo8Q= 353 | dependencies: 354 | glob-parent "^2.0.0" 355 | is-glob "^2.0.0" 356 | 357 | glob-parent@^2.0.0: 358 | version "2.0.0" 359 | resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-2.0.0.tgz#81383d72db054fcccf5336daa902f182f6edbb28" 360 | integrity sha1-gTg9ctsFT8zPUzbaqQLxgvbtuyg= 361 | dependencies: 362 | is-glob "^2.0.0" 363 | 364 | glob-parent@^5.1.0, glob-parent@~5.1.0: 365 | version "5.1.2" 366 | resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.2.tgz#869832c58034fe68a4093c17dc15e8340d8401c4" 367 | integrity sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow== 368 | dependencies: 369 | is-glob "^4.0.1" 370 | 371 | glob@^7.0.0, glob@^7.1.2: 372 | version "7.1.6" 373 | resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.6.tgz#141f33b81a7c2492e125594307480c46679278a6" 374 | integrity sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA== 375 | dependencies: 376 | fs.realpath "^1.0.0" 377 | inflight "^1.0.4" 378 | inherits "2" 379 | minimatch "^3.0.4" 380 | once "^1.3.0" 381 | path-is-absolute "^1.0.0" 382 | 383 | graceful-fs@^4.1.6, graceful-fs@^4.2.0: 384 | version "4.2.6" 385 | resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.6.tgz#ff040b2b0853b23c3d31027523706f1885d76bee" 386 | integrity sha512-nTnJ528pbqxYanhpDYsi4Rd8MAeaBA67+RZ10CM1m3bTAVFEDcd5AuA4a6W5YkGZ1iNXHzZz8T6TBKLeBuNriQ== 387 | 388 | has-flag@^3.0.0: 389 | version "3.0.0" 390 | resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" 391 | integrity sha1-tdRU3CGZriJWmfNGfloH87lVuv0= 392 | 393 | has-flag@^4.0.0: 394 | version "4.0.0" 395 | resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b" 396 | integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ== 397 | 398 | has@^1.0.3: 399 | version "1.0.3" 400 | resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796" 401 | integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== 402 | dependencies: 403 | function-bind "^1.1.1" 404 | 405 | html-tags@^3.1.0: 406 | version "3.1.0" 407 | resolved "https://registry.yarnpkg.com/html-tags/-/html-tags-3.1.0.tgz#7b5e6f7e665e9fb41f30007ed9e0d41e97fb2140" 408 | integrity sha512-1qYz89hW3lFDEazhjW0yVAV87lw8lVkrJocr72XmBkMKsoSVJCQx3W8BXsC7hO2qAt8BoVjYjtAcZ9perqGnNg== 409 | 410 | humanize-duration@^3.25.2: 411 | version "3.25.2" 412 | resolved "https://registry.yarnpkg.com/humanize-duration/-/humanize-duration-3.25.2.tgz#5259585b749ecc5ad5a60fb37121aee0e9ab0c5e" 413 | integrity sha512-zSerjahuzBazDaE8skjMI7Xmrt/EirvW5cDsXgysx8tYIjcgCMnI5Y5985y3LxYeLah9L5cQY3WEw1k7GRWbfg== 414 | 415 | inflight@^1.0.4: 416 | version "1.0.6" 417 | resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" 418 | integrity sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk= 419 | dependencies: 420 | once "^1.3.0" 421 | wrappy "1" 422 | 423 | inherits@2: 424 | version "2.0.4" 425 | resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" 426 | integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== 427 | 428 | is-arrayish@^0.3.1: 429 | version "0.3.2" 430 | resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.3.2.tgz#4574a2ae56f7ab206896fb431eaeed066fdf8f03" 431 | integrity sha512-eVRqCvVlZbuw3GrM63ovNSNAeA1K16kaR/LRY/92w0zxQ5/1YzwblUX652i4Xs9RwAGjW9d9y6X88t8OaAJfWQ== 432 | 433 | is-binary-path@~2.1.0: 434 | version "2.1.0" 435 | resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-2.1.0.tgz#ea1f7f3b80f064236e83470f86c09c254fb45b09" 436 | integrity sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw== 437 | dependencies: 438 | binary-extensions "^2.0.0" 439 | 440 | is-core-module@^2.2.0: 441 | version "2.2.0" 442 | resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.2.0.tgz#97037ef3d52224d85163f5597b2b63d9afed981a" 443 | integrity sha512-XRAfAdyyY5F5cOXn7hYQDqh2Xmii+DEfIcQGxK/uNwMHhIkPWO0g8msXcbzLe+MpGoR951MlqM/2iIlU4vKDdQ== 444 | dependencies: 445 | has "^1.0.3" 446 | 447 | is-dotfile@^1.0.0: 448 | version "1.0.3" 449 | resolved "https://registry.yarnpkg.com/is-dotfile/-/is-dotfile-1.0.3.tgz#a6a2f32ffd2dfb04f5ca25ecd0f6b83cf798a1e1" 450 | integrity sha1-pqLzL/0t+wT1yiXs0Pa4PPeYoeE= 451 | 452 | is-extglob@^1.0.0: 453 | version "1.0.0" 454 | resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-1.0.0.tgz#ac468177c4943405a092fc8f29760c6ffc6206c0" 455 | integrity sha1-rEaBd8SUNAWgkvyPKXYMb/xiBsA= 456 | 457 | is-extglob@^2.1.1: 458 | version "2.1.1" 459 | resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" 460 | integrity sha1-qIwCU1eR8C7TfHahueqXc8gz+MI= 461 | 462 | is-glob@^2.0.0: 463 | version "2.0.1" 464 | resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-2.0.1.tgz#d096f926a3ded5600f3fdfd91198cb0888c2d863" 465 | integrity sha1-0Jb5JqPe1WAPP9/ZEZjLCIjC2GM= 466 | dependencies: 467 | is-extglob "^1.0.0" 468 | 469 | is-glob@^4.0.1, is-glob@~4.0.1: 470 | version "4.0.1" 471 | resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.1.tgz#7567dbe9f2f5e2467bc77ab83c4a29482407a5dc" 472 | integrity sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg== 473 | dependencies: 474 | is-extglob "^2.1.1" 475 | 476 | is-number@^7.0.0: 477 | version "7.0.0" 478 | resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" 479 | integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== 480 | 481 | jsonfile@^6.0.1: 482 | version "6.1.0" 483 | resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-6.1.0.tgz#bc55b2634793c679ec6403094eb13698a6ec0aae" 484 | integrity sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ== 485 | dependencies: 486 | universalify "^2.0.0" 487 | optionalDependencies: 488 | graceful-fs "^4.1.6" 489 | 490 | lodash.toarray@^4.4.0: 491 | version "4.4.0" 492 | resolved "https://registry.yarnpkg.com/lodash.toarray/-/lodash.toarray-4.4.0.tgz#24c4bfcd6b2fba38bfd0594db1179d8e9b656561" 493 | integrity sha1-JMS/zWsvuji/0FlNsRedjptlZWE= 494 | 495 | lodash.topath@^4.5.2: 496 | version "4.5.2" 497 | resolved "https://registry.yarnpkg.com/lodash.topath/-/lodash.topath-4.5.2.tgz#3616351f3bba61994a0931989660bd03254fd009" 498 | integrity sha1-NhY1Hzu6YZlKCTGYlmC9AyVP0Ak= 499 | 500 | lodash@^4.17.21: 501 | version "4.17.21" 502 | resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.21.tgz#679591c564c3bffaae8454cf0b3df370c3d6911c" 503 | integrity sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg== 504 | 505 | merge2@^1.3.0: 506 | version "1.4.1" 507 | resolved "https://registry.yarnpkg.com/merge2/-/merge2-1.4.1.tgz#4368892f885e907455a6fd7dc55c0c9d404990ae" 508 | integrity sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg== 509 | 510 | micromatch@^4.0.2: 511 | version "4.0.4" 512 | resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.4.tgz#896d519dfe9db25fce94ceb7a500919bf881ebf9" 513 | integrity sha512-pRmzw/XUcwXGpD9aI9q/0XOwLNygjETJ8y0ao0wdqprrzDa4YnxLcz7fQRZr8voh8V10kGhABbNcHVk5wHgWwg== 514 | dependencies: 515 | braces "^3.0.1" 516 | picomatch "^2.2.3" 517 | 518 | minimatch@^3.0.4: 519 | version "3.0.4" 520 | resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" 521 | integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA== 522 | dependencies: 523 | brace-expansion "^1.1.7" 524 | 525 | minimist@^1.1.1: 526 | version "1.2.5" 527 | resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.5.tgz#67d66014b66a6a8aaa0c083c5fd58df4e4e97602" 528 | integrity sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw== 529 | 530 | modern-normalize@^1.0.0: 531 | version "1.0.0" 532 | resolved "https://registry.yarnpkg.com/modern-normalize/-/modern-normalize-1.0.0.tgz#539d84a1e141338b01b346f3e27396d0ed17601e" 533 | integrity sha512-1lM+BMLGuDfsdwf3rsgBSrxJwAZHFIrQ8YR61xIqdHo0uNKI9M52wNpHSrliZATJp51On6JD0AfRxd4YGSU0lw== 534 | 535 | nanoid@^3.1.22: 536 | version "3.1.22" 537 | resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.1.22.tgz#b35f8fb7d151990a8aebd5aa5015c03cf726f844" 538 | integrity sha512-/2ZUaJX2ANuLtTvqTlgqBQNJoQO398KyJgZloL0PZkC0dpysjncRUPsFe3DUPzz/y3h+u7C46np8RMuvF3jsSQ== 539 | 540 | node-emoji@^1.8.1: 541 | version "1.10.0" 542 | resolved "https://registry.yarnpkg.com/node-emoji/-/node-emoji-1.10.0.tgz#8886abd25d9c7bb61802a658523d1f8d2a89b2da" 543 | integrity sha512-Yt3384If5H6BYGVHiHwTL+99OzJKHhgp82S8/dktEK73T26BazdgZ4JZh92xSVtGNJvz9UbXdNAc5hcrXV42vw== 544 | dependencies: 545 | lodash.toarray "^4.4.0" 546 | 547 | node-releases@^1.1.71: 548 | version "1.1.71" 549 | resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-1.1.71.tgz#cb1334b179896b1c89ecfdd4b725fb7bbdfc7dbb" 550 | integrity sha512-zR6HoT6LrLCRBwukmrVbHv0EpEQjksO6GmFcZQQuCAy139BEsoVKPYnf3jongYW83fAa1torLGYwxxky/p28sg== 551 | 552 | normalize-path@^3.0.0, normalize-path@~3.0.0: 553 | version "3.0.0" 554 | resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" 555 | integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== 556 | 557 | normalize-range@^0.1.2: 558 | version "0.1.2" 559 | resolved "https://registry.yarnpkg.com/normalize-range/-/normalize-range-0.1.2.tgz#2d10c06bdfd312ea9777695a4d28439456b75942" 560 | integrity sha1-LRDAa9/TEuqXd2laTShDlFa3WUI= 561 | 562 | object-assign@^4.1.1: 563 | version "4.1.1" 564 | resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" 565 | integrity sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM= 566 | 567 | object-hash@^2.1.1: 568 | version "2.1.1" 569 | resolved "https://registry.yarnpkg.com/object-hash/-/object-hash-2.1.1.tgz#9447d0279b4fcf80cff3259bf66a1dc73afabe09" 570 | integrity sha512-VOJmgmS+7wvXf8CjbQmimtCnEx3IAoLxI3fp2fbWehxrWBcAQFbk+vcwb6vzR0VZv/eNCJ/27j151ZTwqW/JeQ== 571 | 572 | once@^1.3.0: 573 | version "1.4.0" 574 | resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" 575 | integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E= 576 | dependencies: 577 | wrappy "1" 578 | 579 | parse-glob@^3.0.4: 580 | version "3.0.4" 581 | resolved "https://registry.yarnpkg.com/parse-glob/-/parse-glob-3.0.4.tgz#b2c376cfb11f35513badd173ef0bb6e3a388391c" 582 | integrity sha1-ssN2z7EfNVE7rdFz7wu246OIORw= 583 | dependencies: 584 | glob-base "^0.3.0" 585 | is-dotfile "^1.0.0" 586 | is-extglob "^1.0.0" 587 | is-glob "^2.0.0" 588 | 589 | path-is-absolute@^1.0.0: 590 | version "1.0.1" 591 | resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" 592 | integrity sha1-F0uSaHNVNP+8es5r9TpanhtcX18= 593 | 594 | path-parse@^1.0.6: 595 | version "1.0.6" 596 | resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.6.tgz#d62dbb5679405d72c4737ec58600e9ddcf06d24c" 597 | integrity sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw== 598 | 599 | picomatch@^2.0.4, picomatch@^2.2.1, picomatch@^2.2.3: 600 | version "2.2.3" 601 | resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.2.3.tgz#465547f359ccc206d3c48e46a1bcb89bf7ee619d" 602 | integrity sha512-KpELjfwcCDUb9PeigTs2mBJzXUPzAuP2oPcA989He8Rte0+YUAjw1JVedDhuTKPkHjSYzMN3npC9luThGYEKdg== 603 | 604 | postcss-functions@^3: 605 | version "3.0.0" 606 | resolved "https://registry.yarnpkg.com/postcss-functions/-/postcss-functions-3.0.0.tgz#0e94d01444700a481de20de4d55fb2640564250e" 607 | integrity sha1-DpTQFERwCkgd4g3k1V+yZAVkJQ4= 608 | dependencies: 609 | glob "^7.1.2" 610 | object-assign "^4.1.1" 611 | postcss "^6.0.9" 612 | postcss-value-parser "^3.3.0" 613 | 614 | postcss-js@^3.0.3: 615 | version "3.0.3" 616 | resolved "https://registry.yarnpkg.com/postcss-js/-/postcss-js-3.0.3.tgz#2f0bd370a2e8599d45439f6970403b5873abda33" 617 | integrity sha512-gWnoWQXKFw65Hk/mi2+WTQTHdPD5UJdDXZmX073EY/B3BWnYjO4F4t0VneTCnCGQ5E5GsCdMkzPaTXwl3r5dJw== 618 | dependencies: 619 | camelcase-css "^2.0.1" 620 | postcss "^8.1.6" 621 | 622 | postcss-nested@5.0.5: 623 | version "5.0.5" 624 | resolved "https://registry.yarnpkg.com/postcss-nested/-/postcss-nested-5.0.5.tgz#f0a107d33a9fab11d7637205f5321e27223e3603" 625 | integrity sha512-GSRXYz5bccobpTzLQZXOnSOfKl6TwVr5CyAQJUPub4nuRJSOECK5AqurxVgmtxP48p0Kc/ndY/YyS1yqldX0Ew== 626 | dependencies: 627 | postcss-selector-parser "^6.0.4" 628 | 629 | postcss-selector-parser@^6.0.2, postcss-selector-parser@^6.0.4: 630 | version "6.0.5" 631 | resolved "https://registry.yarnpkg.com/postcss-selector-parser/-/postcss-selector-parser-6.0.5.tgz#042d74e137db83e6f294712096cb413f5aa612c4" 632 | integrity sha512-aFYPoYmXbZ1V6HZaSvat08M97A8HqO6Pjz+PiNpw/DhuRrC72XWAdp3hL6wusDCN31sSmcZyMGa2hZEuX+Xfhg== 633 | dependencies: 634 | cssesc "^3.0.0" 635 | util-deprecate "^1.0.2" 636 | 637 | postcss-value-parser@^3.3.0: 638 | version "3.3.1" 639 | resolved "https://registry.yarnpkg.com/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz#9ff822547e2893213cf1c30efa51ac5fd1ba8281" 640 | integrity sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ== 641 | 642 | postcss-value-parser@^4.1.0: 643 | version "4.1.0" 644 | resolved "https://registry.yarnpkg.com/postcss-value-parser/-/postcss-value-parser-4.1.0.tgz#443f6a20ced6481a2bda4fa8532a6e55d789a2cb" 645 | integrity sha512-97DXOFbQJhk71ne5/Mt6cOu6yxsSfM0QGQyl0L25Gca4yGWEGJaig7l7gbCX623VqTBNGLRLaVUCnNkcedlRSQ== 646 | 647 | postcss@^6.0.9: 648 | version "6.0.23" 649 | resolved "https://registry.yarnpkg.com/postcss/-/postcss-6.0.23.tgz#61c82cc328ac60e677645f979054eb98bc0e3324" 650 | integrity sha512-soOk1h6J3VMTZtVeVpv15/Hpdl2cBLX3CAw4TAbkpTJiNPk9YP/zWcD1ND+xEtvyuuvKzbxliTOIyvkSeSJ6ag== 651 | dependencies: 652 | chalk "^2.4.1" 653 | source-map "^0.6.1" 654 | supports-color "^5.4.0" 655 | 656 | postcss@^8.1.6, postcss@^8.2.1, postcss@^8.2.8: 657 | version "8.2.12" 658 | resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.2.12.tgz#81248a1a87e0f575cc594a99a08207fd1c4addc4" 659 | integrity sha512-BJnGT5+0q2tzvs6oQfnY2NpEJ7rIXNfBnZtQOKCIsweeWXBXeDd5k31UgTdS3d/c02ouspufn37mTaHWkJyzMQ== 660 | dependencies: 661 | colorette "^1.2.2" 662 | nanoid "^3.1.22" 663 | source-map "^0.6.1" 664 | 665 | pretty-hrtime@^1.0.3: 666 | version "1.0.3" 667 | resolved "https://registry.yarnpkg.com/pretty-hrtime/-/pretty-hrtime-1.0.3.tgz#b7e3ea42435a4c9b2759d99e0f201eb195802ee1" 668 | integrity sha1-t+PqQkNaTJsnWdmeDyAesZWALuE= 669 | 670 | purgecss@^3.1.3: 671 | version "3.1.3" 672 | resolved "https://registry.yarnpkg.com/purgecss/-/purgecss-3.1.3.tgz#26987ec09d12eeadc318e22f6e5a9eb0be094f41" 673 | integrity sha512-hRSLN9mguJ2lzlIQtW4qmPS2kh6oMnA9RxdIYK8sz18QYqd6ePp4GNDl18oWHA1f2v2NEQIh51CO8s/E3YGckQ== 674 | dependencies: 675 | commander "^6.0.0" 676 | glob "^7.0.0" 677 | postcss "^8.2.1" 678 | postcss-selector-parser "^6.0.2" 679 | 680 | queue-microtask@^1.2.2: 681 | version "1.2.3" 682 | resolved "https://registry.yarnpkg.com/queue-microtask/-/queue-microtask-1.2.3.tgz#4929228bbc724dfac43e0efb058caf7b6cfb6243" 683 | integrity sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A== 684 | 685 | quick-lru@^5.1.1: 686 | version "5.1.1" 687 | resolved "https://registry.yarnpkg.com/quick-lru/-/quick-lru-5.1.1.tgz#366493e6b3e42a3a6885e2e99d18f80fb7a8c932" 688 | integrity sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA== 689 | 690 | readdirp@~3.5.0: 691 | version "3.5.0" 692 | resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-3.5.0.tgz#9ba74c019b15d365278d2e91bb8c48d7b4d42c9e" 693 | integrity sha512-cMhu7c/8rdhkHXWsY+osBhfSy0JikwpHK/5+imo+LpeasTF8ouErHrlYkwT0++njiyuDvc7OFY5T3ukvZ8qmFQ== 694 | dependencies: 695 | picomatch "^2.2.1" 696 | 697 | reduce-css-calc@^2.1.8: 698 | version "2.1.8" 699 | resolved "https://registry.yarnpkg.com/reduce-css-calc/-/reduce-css-calc-2.1.8.tgz#7ef8761a28d614980dc0c982f772c93f7a99de03" 700 | integrity sha512-8liAVezDmUcH+tdzoEGrhfbGcP7nOV4NkGE3a74+qqvE7nt9i4sKLGBuZNOnpI4WiGksiNPklZxva80061QiPg== 701 | dependencies: 702 | css-unit-converter "^1.1.1" 703 | postcss-value-parser "^3.3.0" 704 | 705 | resolve@^1.20.0: 706 | version "1.20.0" 707 | resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.20.0.tgz#629a013fb3f70755d6f0b7935cc1c2c5378b1975" 708 | integrity sha512-wENBPt4ySzg4ybFQW2TT1zMQucPK95HSh/nq2CFTZVOGut2+pQvSsgtda4d26YrYcr067wjbmzOG8byDPBX63A== 709 | dependencies: 710 | is-core-module "^2.2.0" 711 | path-parse "^1.0.6" 712 | 713 | reusify@^1.0.4: 714 | version "1.0.4" 715 | resolved "https://registry.yarnpkg.com/reusify/-/reusify-1.0.4.tgz#90da382b1e126efc02146e90845a88db12925d76" 716 | integrity sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw== 717 | 718 | run-parallel@^1.1.9: 719 | version "1.2.0" 720 | resolved "https://registry.yarnpkg.com/run-parallel/-/run-parallel-1.2.0.tgz#66d1368da7bdf921eb9d95bd1a9229e7f21a43ee" 721 | integrity sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA== 722 | dependencies: 723 | queue-microtask "^1.2.2" 724 | 725 | simple-swizzle@^0.2.2: 726 | version "0.2.2" 727 | resolved "https://registry.yarnpkg.com/simple-swizzle/-/simple-swizzle-0.2.2.tgz#a4da6b635ffcccca33f70d17cb92592de95e557a" 728 | integrity sha1-pNprY1/8zMoz9w0Xy5JZLeleVXo= 729 | dependencies: 730 | is-arrayish "^0.3.1" 731 | 732 | source-map@^0.6.1: 733 | version "0.6.1" 734 | resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" 735 | integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== 736 | 737 | supports-color@^5.3.0, supports-color@^5.4.0: 738 | version "5.5.0" 739 | resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" 740 | integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== 741 | dependencies: 742 | has-flag "^3.0.0" 743 | 744 | supports-color@^7.1.0: 745 | version "7.2.0" 746 | resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da" 747 | integrity sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw== 748 | dependencies: 749 | has-flag "^4.0.0" 750 | 751 | tailwindcss@^2.0.3: 752 | version "2.1.2" 753 | resolved "https://registry.yarnpkg.com/tailwindcss/-/tailwindcss-2.1.2.tgz#29402bf73a445faedd03df6d3b177e7b52b7c4a1" 754 | integrity sha512-T5t+wwd+/hsOyRw2HJuFuv0LTUm3MUdHm2DJ94GPVgzqwPPFa9XxX0KlwLWupUuiOUj6uiKURCzYPHFcuPch/w== 755 | dependencies: 756 | "@fullhuman/postcss-purgecss" "^3.1.3" 757 | bytes "^3.0.0" 758 | chalk "^4.1.0" 759 | chokidar "^3.5.1" 760 | color "^3.1.3" 761 | detective "^5.2.0" 762 | didyoumean "^1.2.1" 763 | dlv "^1.1.3" 764 | fast-glob "^3.2.5" 765 | fs-extra "^9.1.0" 766 | html-tags "^3.1.0" 767 | lodash "^4.17.21" 768 | lodash.topath "^4.5.2" 769 | modern-normalize "^1.0.0" 770 | node-emoji "^1.8.1" 771 | normalize-path "^3.0.0" 772 | object-hash "^2.1.1" 773 | parse-glob "^3.0.4" 774 | postcss-functions "^3" 775 | postcss-js "^3.0.3" 776 | postcss-nested "5.0.5" 777 | postcss-selector-parser "^6.0.4" 778 | postcss-value-parser "^4.1.0" 779 | pretty-hrtime "^1.0.3" 780 | quick-lru "^5.1.1" 781 | reduce-css-calc "^2.1.8" 782 | resolve "^1.20.0" 783 | 784 | to-regex-range@^5.0.1: 785 | version "5.0.1" 786 | resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4" 787 | integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ== 788 | dependencies: 789 | is-number "^7.0.0" 790 | 791 | universalify@^2.0.0: 792 | version "2.0.0" 793 | resolved "https://registry.yarnpkg.com/universalify/-/universalify-2.0.0.tgz#75a4984efedc4b08975c5aeb73f530d02df25717" 794 | integrity sha512-hAZsKq7Yy11Zu1DE0OzWjw7nnLZmJZYTDZZyEFHZdUhV8FkH5MCfoU1XMaxXovpyW5nq5scPqq0ZDP9Zyl04oQ== 795 | 796 | util-deprecate@^1.0.2: 797 | version "1.0.2" 798 | resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" 799 | integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8= 800 | 801 | wrappy@1: 802 | version "1.0.2" 803 | resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" 804 | integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8= 805 | 806 | xtend@^4.0.2: 807 | version "4.0.2" 808 | resolved "https://registry.yarnpkg.com/xtend/-/xtend-4.0.2.tgz#bb72779f5fa465186b1f438f674fa347fdb5db54" 809 | integrity sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ== 810 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | version: "3.3" 2 | services: 3 | redis: 4 | container_name: redis 5 | image: "redis:alpine" 6 | environment: 7 | - REDIS_PASSWORD=retube 8 | - REDIS_REPLICATION_MODE=master 9 | volumes: 10 | # save redisearch data to your current working directory 11 | - ./redis-data:/data 12 | command: 13 | # Save if 100 keys are added in every 10 seconds 14 | - "--save 10 100" 15 | # Set password 16 | - "--requirepass retube" 17 | restart: unless-stopped 18 | backend: 19 | build: ./backend 20 | ports: 21 | - "8000:80" 22 | depends_on: 23 | - redis 24 | restart: unless-stopped 25 | frontend: 26 | build: ./frontend 27 | ports: 28 | - "3033:3000" 29 | depends_on: 30 | - backend 31 | restart: unless-stopped 32 | 33 | volumes: 34 | redis-data: 35 | -------------------------------------------------------------------------------- /frontend/.gitignore: -------------------------------------------------------------------------------- 1 | # See https://help.github.com/articles/ignoring-files/ for more about ignoring files. 2 | 3 | # dependencies 4 | /node_modules 5 | /.pnp 6 | .pnp.js 7 | 8 | # testing 9 | /coverage 10 | 11 | # next.js 12 | /.next/ 13 | /out/ 14 | 15 | # production 16 | /build 17 | 18 | # misc 19 | .DS_Store 20 | *.pem 21 | 22 | # debug 23 | npm-debug.log* 24 | yarn-debug.log* 25 | yarn-error.log* 26 | 27 | # local env files 28 | .env.local 29 | .env.development.local 30 | .env.test.local 31 | .env.production.local 32 | 33 | # vercel 34 | .vercel 35 | 36 | package-lock.json 37 | yarn.lock -------------------------------------------------------------------------------- /frontend/.yarnclean: -------------------------------------------------------------------------------- 1 | # test directories 2 | __tests__ 3 | test 4 | tests 5 | powered-test 6 | 7 | # asset directories 8 | docs 9 | doc 10 | website 11 | images 12 | assets 13 | 14 | # examples 15 | example 16 | examples 17 | 18 | # code coverage directories 19 | coverage 20 | .nyc_output 21 | 22 | # build scripts 23 | Makefile 24 | Gulpfile.js 25 | Gruntfile.js 26 | 27 | # configs 28 | appveyor.yml 29 | circle.yml 30 | codeship-services.yml 31 | codeship-steps.yml 32 | wercker.yml 33 | .tern-project 34 | .gitattributes 35 | .editorconfig 36 | .*ignore 37 | .eslintrc 38 | .jshintrc 39 | .flowconfig 40 | .documentup.json 41 | .yarn-metadata.json 42 | .travis.yml 43 | 44 | # misc 45 | *.md 46 | -------------------------------------------------------------------------------- /frontend/Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:alpine AS deps 2 | # Check https://github.com/nodejs/docker-node/tree/b4117f9333da4138b03a546ec926ef50a31506c3#nodealpine to understand why libc6-compat might be needed. 3 | RUN apk add --no-cache libc6-compat 4 | WORKDIR /app 5 | COPY package.json yarn.lock ./ 6 | RUN yarn install --frozen-lockfile 7 | 8 | # Rebuild the source code only when needed 9 | FROM node:alpine AS builder 10 | WORKDIR /app 11 | COPY . . 12 | COPY --from=deps /app/node_modules ./node_modules 13 | RUN yarn build && yarn install --production --ignore-scripts --prefer-offline 14 | 15 | # Production image, copy all the files and run next 16 | FROM node:alpine AS runner 17 | WORKDIR /app 18 | 19 | ENV NODE_ENV production 20 | 21 | RUN addgroup -g 1001 -S nodejs 22 | RUN adduser -S nextjs -u 1001 23 | 24 | 25 | # You only need to copy next.config.js if you are NOT using the default configuration 26 | # COPY --from=builder /app/next.config.js ./ 27 | COPY --from=builder /app/public ./public 28 | COPY --from=builder --chown=nextjs:nodejs /app/.next ./.next 29 | COPY --from=builder /app/node_modules ./node_modules 30 | COPY --from=builder /app/package.json ./package.json 31 | 32 | 33 | USER nextjs 34 | 35 | EXPOSE 3000 36 | 37 | # Next.js collects completely anonymous telemetry data about general usage. 38 | # Learn more here: https://nextjs.org/telemetry 39 | # Uncomment the following line in case you want to disable telemetry. 40 | ENV NEXT_TELEMETRY_DISABLED 1 41 | 42 | CMD ["yarn", "start"] -------------------------------------------------------------------------------- /frontend/README.md: -------------------------------------------------------------------------------- 1 | This is a [Next.js](https://nextjs.org/) project bootstrapped with [`create-next-app`](https://github.com/vercel/next.js/tree/canary/packages/create-next-app). 2 | 3 | ## Getting Started 4 | 5 | First, run the development server: 6 | 7 | ```bash 8 | npm run dev 9 | # or 10 | yarn dev 11 | ``` 12 | 13 | Open [http://localhost:3000](http://localhost:3000) with your browser to see the result. 14 | 15 | You can start editing the page by modifying `pages/index.js`. The page auto-updates as you edit the file. 16 | 17 | [API routes](https://nextjs.org/docs/api-routes/introduction) can be accessed on [http://localhost:3000/api/hello](http://localhost:3000/api/hello). This endpoint can be edited in `pages/api/hello.js`. 18 | 19 | The `pages/api` directory is mapped to `/api/*`. Files in this directory are treated as [API routes](https://nextjs.org/docs/api-routes/introduction) instead of React pages. 20 | 21 | ## Learn More 22 | 23 | To learn more about Next.js, take a look at the following resources: 24 | 25 | - [Next.js Documentation](https://nextjs.org/docs) - learn about Next.js features and API. 26 | - [Learn Next.js](https://nextjs.org/learn) - an interactive Next.js tutorial. 27 | 28 | You can check out [the Next.js GitHub repository](https://github.com/vercel/next.js/) - your feedback and contributions are welcome! 29 | 30 | ## Deploy on Vercel 31 | 32 | The easiest way to deploy your Next.js app is to use the [Vercel Platform](https://vercel.com/new?utm_medium=default-template&filter=next.js&utm_source=create-next-app&utm_campaign=create-next-app-readme) from the creators of Next.js. 33 | 34 | Check out our [Next.js deployment documentation](https://nextjs.org/docs/deployment) for more details. 35 | -------------------------------------------------------------------------------- /frontend/components/BadgeLive.jsx: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | 3 | const ButtonLive = () => { 4 | return ( 5 | 11 | ); 12 | }; 13 | 14 | export default ButtonLive; 15 | -------------------------------------------------------------------------------- /frontend/components/BoxForContent.jsx: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | 3 | const BoxForContent = ({children}) => { 4 | return ( 5 |
6 | {children} 7 |
8 | ); 9 | }; 10 | 11 | export default BoxForContent; 12 | -------------------------------------------------------------------------------- /frontend/components/ChannelHeader.jsx: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | 3 | import dynamic from "next/dynamic"; 4 | import { data } from "autoprefixer"; 5 | const SubscribeButton = dynamic(() => import("./SubscribeButton"), { 6 | ssr: false, 7 | }); 8 | 9 | const ChannelHeader = ({ data }) => { 10 | return ( 11 |
12 | {/* show banner if there is one */} 13 | {data.channel_meta.banner ? ( 14 |
15 | 16 |
17 | ) : ( 18 | "" 19 | )} 20 |
21 |
22 |
23 |
24 | {/* show number of avatar if its is available */} 25 | {data.channel_meta.avatar ? ( 26 | channel_logo 31 | ) : ( 32 | "" 33 | )} 34 |
35 |
36 | 37 | {data.channel_meta.title} 38 | 39 |
40 | {/* show number of subscribers if the number is available */} 41 | {data.channel_meta.subscriberCount ? ( 42 |

43 | {data.channel_meta.subscriberCount} subscribers 44 |

45 | ) : ( 46 | "" 47 | )} 48 |
49 |
50 | 51 |
52 |
53 |
54 |
55 | ); 56 | }; 57 | 58 | export default ChannelHeader; 59 | -------------------------------------------------------------------------------- /frontend/components/Content.jsx: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | 3 | const Content = ({ children }) => { 4 | return ( 5 |
6 |
{children}
7 |
8 | ); 9 | }; 10 | 11 | export default Content; 12 | -------------------------------------------------------------------------------- /frontend/components/JsonEdit.jsx: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | import ReactJson from "react-json-view"; 3 | import { useSnapshot } from "valtio"; 4 | import {subscriptions} from "./data"; 5 | 6 | import { removeFromList } from "../utils"; 7 | 8 | const JsonEdit = () => { 9 | const subsReadOnly = useSnapshot(subscriptions); 10 | 11 | return ( 12 |
13 | { 18 | removeFromList( 19 | subscriptions[del["namespace"][0]], 20 | del.existing_value 21 | ); 22 | }} 23 | onAdd={() => {}} 24 | onEdit={(edit) => { 25 | if (typeof edit.new_value !== String) { 26 | return false; 27 | } 28 | if ( 29 | edit.existing_value && 30 | edit.new_value && 31 | edit.existing_value !== edit.new_value 32 | ) { 33 | removeFromList( 34 | subscriptions[edit["namespace"][0]], 35 | edit.existing_value 36 | ); 37 | subscriptions[edit["namespace"][0]].push(edit.new_value); 38 | return true; 39 | } else if ( 40 | edit.existing_value === null && 41 | edit.new_value && 42 | edit.new_value !== "null" && 43 | !subscriptions[edit["namespace"][0]].includes(edit.new_value) 44 | ) { 45 | subscriptions[edit["namespace"][0]].push(edit.new_value); 46 | return true; 47 | } 48 | 49 | return false; 50 | }} 51 | /> 52 |
53 | ); 54 | }; 55 | 56 | export default JsonEdit; 57 | -------------------------------------------------------------------------------- /frontend/components/Navbar.jsx: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | 3 | const Navbar = () => { 4 | return ( 5 |
6 |
7 | 8 |

ReTube Logo (todo)

9 |
10 |
11 |
12 |
13 |
14 | 21 | 38 |
39 |
40 |
41 |
42 | ); 43 | }; 44 | 45 | export default Navbar; 46 | -------------------------------------------------------------------------------- /frontend/components/Settings.jsx: -------------------------------------------------------------------------------- 1 | import React, { useState } from "react"; 2 | import BoxForContent from "./BoxForContent"; 3 | import Link from "next/link"; 4 | import { subscriptions } from "./data"; 5 | import { useSnapshot } from "valtio"; 6 | import { useToasts } from "react-toast-notifications"; 7 | import _ from "lodash"; 8 | 9 | const Settings = () => { 10 | const subsReadOnly = useSnapshot(subscriptions); 11 | 12 | // file upload - YouTube 13 | const [selectedFileYT, setSelectedFileYT] = useState(); 14 | const [isFilePickedYT, setIsFilePickedYT] = useState(false); 15 | 16 | const changeHandlerYouTube = (event) => { 17 | setSelectedFileYT(event.target.files.item(0)); 18 | setIsFilePickedYT(true); 19 | }; 20 | 21 | const { addToast } = useToasts(); 22 | const handleSubmissionYouTube = async () => { 23 | const fileContent = await selectedFileYT.text(); 24 | const jsonContent = JSON.parse(fileContent); 25 | // prepare list 26 | jsonContent.forEach((element) => { 27 | subscriptions.youtube.push(element["snippet"]["resourceId"]["channelId"]); 28 | }); 29 | subscriptions.youtube = [...new Set(subscriptions.youtube)]; 30 | localStorage.setItem("subscriptions", JSON.stringify(subscriptions)); 31 | addToast("YouTube subscriptions updated", { 32 | appearance: "success", 33 | autoDismiss: true, 34 | }); 35 | setIsFilePickedYT(false); 36 | }; 37 | 38 | // file upload ReTube 39 | const [selectedFileRT, setSelectedFileRT] = useState(); 40 | const [isFilePickedRT, setIsFilePickedRT] = useState(false); 41 | 42 | const changeHandlerRT = (event) => { 43 | setSelectedFileRT(event.target.files.item(0)); 44 | setIsFilePickedRT(true); 45 | }; 46 | 47 | const handleSubmissionRT = async () => { 48 | const fileContent = await selectedFileRT.text(); 49 | 50 | const jsonContent = JSON.parse(fileContent); 51 | const ytJson = jsonContent["youtube"]; 52 | const bcJson = jsonContent["bitchute"]; 53 | const lbJson = jsonContent["lbry"]; 54 | 55 | subscriptions.youtube = _.merge(subscriptions.youtube, ytJson); 56 | subscriptions.bitchute = _.merge(subscriptions.bitchute, bcJson); 57 | subscriptions.lbry = _.merge(subscriptions.lbry, lbJson); 58 | 59 | localStorage.setItem("subscriptions", JSON.stringify(subscriptions)); 60 | addToast("ReTube subscriptions updated", { 61 | appearance: "success", 62 | autoDismiss: true, 63 | }); 64 | 65 | setIsFilePickedRT(false); 66 | }; 67 | 68 | // file upload Bitchute 69 | const [selectedFileBT, setSelectedFileBT] = useState(); 70 | const [isFilePickedBT, setIsFilePickedBT] = useState(false); 71 | 72 | const changeHandlerBT = (event) => { 73 | setSelectedFileBT(event.target.files.item(0)); 74 | setIsFilePickedBT(true); 75 | }; 76 | 77 | const handleSubmissionBT = async () => { 78 | // https://www.bitchute.com/subscriptions/ 79 | const fileContent = await selectedFileBT.text(); 80 | 81 | const parser = new DOMParser(); 82 | const htmlDoc = parser.parseFromString(fileContent, "text/html"); 83 | const subsBox = htmlDoc.getElementById("page-detail"); 84 | const subsBoxList = subsBox.getElementsByClassName( 85 | "subscription-container" 86 | ); 87 | [...subsBoxList].forEach((element) => { 88 | // /channel/channelId/ => channelId 89 | let channelId = element.getElementsByTagName("a")[0].getAttribute("href"); 90 | channelId = channelId.split("channel/")[1].slice(0, -1); 91 | subscriptions.bitchute.push(channelId); 92 | }); 93 | 94 | subscriptions.bitchute = [...new Set(subscriptions.bitchute)]; 95 | localStorage.setItem("subscriptions", JSON.stringify(subscriptions)); 96 | addToast("Bitchute subscriptions updated", { 97 | appearance: "success", 98 | autoDismiss: true, 99 | }); 100 | setIsFilePickedLbry(false); 101 | }; 102 | 103 | return ( 104 |
105 | 106 |
107 | 108 | Import YouTube subscriptions: 109 | 110 |
111 | 130 | 136 |
137 | 138 | 139 |
140 | 141 | Import BitChute subscriptions: 142 | 143 |
144 | 159 | 165 |
166 | 167 | 168 | 169 |
170 | 178 | Export 179 | 180 | 181 | 182 |
183 | 184 | Import ReTube subscriptions: 185 | 186 |
187 | 206 | 212 |
213 |
214 |
215 |
216 | ); 217 | }; 218 | 219 | export default Settings; 220 | -------------------------------------------------------------------------------- /frontend/components/Sidebar.jsx: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | import Link from "next/link"; 3 | import { useRouter } from "next/router"; 4 | 5 | import routes from "../routes/routes"; 6 | 7 | import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; 8 | 9 | const Sidebar = () => { 10 | const router = useRouter(); 11 | 12 | return ( 13 |
14 |
15 |
16 |
17 | 28 |
29 |
30 |
31 |
32 | ); 33 | }; 34 | 35 | let LinkItem = ({ linkRoute, currentPath }) => ( 36 |
  • 37 |
    45 | 46 | 47 |
    48 | 49 | {linkRoute.name} 50 |
    51 |
    52 | 53 |
    54 |
  • 55 | ); 56 | 57 | export default Sidebar; 58 | -------------------------------------------------------------------------------- /frontend/components/Skeleton.jsx: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | 3 | const Skeleton = () => { 4 | return ( 5 |
    6 |
    7 |
    8 |
    9 |
    10 |
    11 |
    12 |
    13 |
    14 |
    15 |
    16 | ); 17 | }; 18 | 19 | export default Skeleton; 20 | -------------------------------------------------------------------------------- /frontend/components/SmallVideoBox.jsx: -------------------------------------------------------------------------------- 1 | import React, { useEffect, useState } from "react"; 2 | import { 3 | platforms, 4 | humanizeDurationSec, 5 | timeSince, 6 | getIdFromVideo, 7 | } from "../utils"; 8 | import Skeleton from "./Skeleton"; 9 | import WatchedSymbol from "./WatchedSymbol"; 10 | import { snapshot } from "valtio"; 11 | import dynamic from "next/dynamic"; 12 | import ButtonLive from "./BadgeLive"; 13 | const SubscribeButton = dynamic(() => import("./SubscribeButton"), { 14 | ssr: false, 15 | }); 16 | 17 | const VideoThumbnail = ({ item }) => { 18 | const platform = platforms[item.platform]; 19 | const [isWatched, setIsWatched] = useState(false); 20 | 21 | useEffect(() => { 22 | const proxy_data = require("./data"); 23 | const dbWatched = snapshot(proxy_data.dbWatched); 24 | if (item.videoUrl) { 25 | const watchUrl = getIdFromVideo(item.videoUrl); 26 | if (dbWatched.links.includes(watchUrl)) { 27 | setIsWatched(true); 28 | } 29 | } 30 | }, []); 31 | 32 | return ( 33 |
    34 | 35 |
    36 | thumbnail 42 | 43 | {item.createdAt ? `${timeSince(item.createdAt)} ago` : ""} 44 | 45 | {item.isLive ? ( 46 | 47 | 48 | 49 | ) : ( 50 | 51 | {humanizeDurationSec(item.duration)} 52 | 53 | )} 54 | {isWatched ? ( 55 | 56 | 57 | 58 | ) : ( 59 | 60 | )} 61 |
    62 |

    63 | {platform} 64 |

    65 |

    {item.title}

    66 |
    67 | 68 |

    69 | {item.author} 70 |

    71 |
    72 |
    73 | ); 74 | }; 75 | 76 | const ChannelThumbnail = ({ item }) => { 77 | const platform = platforms[item.platform]; 78 | 79 | return ( 80 |
    81 | 82 | thumbnail 87 |

    88 | {platform} 89 |

    90 |
    91 | 95 | 96 |

    {item.title}

    97 |
    98 |

    99 | {item.creator} 100 |

    101 |
    102 | ); 103 | }; 104 | 105 | const videoBoxes = (item, index) => { 106 | return ( 107 |
    108 | {item ? ( 109 | item.isChannel ? ( 110 | 111 | ) : ( 112 | 113 | ) 114 | ) : ( 115 | 116 | )} 117 |
    118 | ); 119 | }; 120 | 121 | export default videoBoxes; 122 | -------------------------------------------------------------------------------- /frontend/components/SubscribeButton.jsx: -------------------------------------------------------------------------------- 1 | import { conforms } from "lodash"; 2 | import React, { useState, useEffect } from "react"; 3 | import { useSnapshot } from "valtio"; 4 | import { channelUrlDetails, removeFromList } from "../utils"; 5 | 6 | import { subscriptions } from "./data"; 7 | 8 | // TODO: shared 9 | const YOUTUBE = "yt"; 10 | const LBRY = "lb"; 11 | const BITCHUTE = "bc"; 12 | const RUMBLE = "rb"; 13 | 14 | const IsSubscribed = (subsReadOnly, channel_url) => { 15 | let [platform, id] = channelUrlDetails(channel_url); 16 | 17 | switch (platform) { 18 | case YOUTUBE: 19 | return subsReadOnly.youtube.includes(id); 20 | case BITCHUTE: 21 | return subsReadOnly.bitchute.includes(id); 22 | case LBRY: 23 | return subsReadOnly.lbry.includes(id); 24 | case RUMBLE: 25 | if ("rumble" in subsReadOnly) return subsReadOnly.rumble.includes(id); 26 | else return false; 27 | default: 28 | return false; 29 | } 30 | }; 31 | 32 | const SubscribeButton = ({ channel_url, count }) => { 33 | const subsReadOnly = useSnapshot(subscriptions); 34 | const [isSubscribed, setIsSubscribed] = useState(false); 35 | useEffect(() => { 36 | setIsSubscribed(IsSubscribed(subsReadOnly, channel_url)); 37 | 38 | return () => {}; 39 | }, [subsReadOnly, channel_url]); 40 | 41 | return ( 42 | 91 | ); 92 | }; 93 | 94 | export default SubscribeButton; 95 | -------------------------------------------------------------------------------- /frontend/components/Subscriptions.jsx: -------------------------------------------------------------------------------- 1 | import React, { useEffect, useState } from "react"; 2 | import { fetchData } from "../utils"; 3 | import { snapshot } from "valtio"; 4 | import VideoBoard from "./VideoBoard"; 5 | 6 | const isFromToday = (videoEntry) => { 7 | const oneDay = 24 * 60 * 60 * 1000; 8 | const now = new Date(); 9 | const videoDate = new Date(videoEntry.createdAt); 10 | const isMoreThanADay = now - videoDate > oneDay; 11 | if (!isMoreThanADay) { 12 | return true; 13 | } 14 | return false; 15 | }; 16 | 17 | const fetchSubsVideos = async (subsStore, setVideoDataState) => { 18 | let allSubsWait = []; 19 | let allSubs = {}; 20 | allSubs.platform = "subscriptions"; 21 | allSubs.ready = false; 22 | allSubs.content = []; 23 | // youtube 24 | subsStore.youtube.forEach((item, index) => { 25 | const ytUrl = `https://www.youtube.com/channel/${item}`; 26 | allSubsWait.push(fetchData(ytUrl)); 27 | }); 28 | // Lbry 29 | subsStore.lbry.forEach((item, index) => { 30 | const lbUrl = `https://odysee.com/@${item}`; 31 | allSubsWait.push(fetchData(lbUrl)); 32 | }); 33 | // bitchute 34 | subsStore.bitchute.forEach((item, index) => { 35 | const bcUrl = `https://www.bitchute.com/channel/${item}`; 36 | allSubsWait.push(fetchData(bcUrl)); 37 | }); 38 | // rumble 39 | if ("rumble" in subsStore) { 40 | subsStore.rumble.forEach((item, index) => { 41 | const rbUrl = `https://rumble.com/${item}`; 42 | allSubsWait.push(fetchData(rbUrl)); 43 | }); 44 | } 45 | 46 | for (const waitSub of allSubsWait) { 47 | if (waitSub === null) { 48 | continue; 49 | } 50 | const result = await waitSub; 51 | if (result === null) { 52 | continue; 53 | } 54 | if (result.ready === "False") { 55 | continue; 56 | } 57 | let videoEntries = result.content; 58 | if (!videoEntries) continue; 59 | videoEntries = videoEntries.filter(isFromToday); 60 | allSubs.content = allSubs.content.concat(videoEntries); 61 | 62 | setVideoDataState(); 63 | setVideoDataState(allSubs); 64 | } 65 | 66 | allSubs.content = allSubs.content.sort((a, b) => { 67 | return b.createdAt - a.createdAt; 68 | }); 69 | allSubs.ready = true; 70 | setVideoDataState(); 71 | setVideoDataState(allSubs); 72 | }; 73 | 74 | const Subscriptions = () => { 75 | let [videoData, setVideoDataState] = useState(); 76 | 77 | useEffect(() => { 78 | const proxy_data = require("./data"); 79 | let subsStore = snapshot(proxy_data.subscriptions); 80 | localStorage.setItem( 81 | "subscriptions", 82 | JSON.stringify(proxy_data.subscriptions) 83 | ); 84 | 85 | fetchSubsVideos(subsStore, setVideoDataState); 86 | 87 | return () => { 88 | setVideoDataState([]); 89 | }; 90 | }, []); 91 | 92 | return ; 93 | }; 94 | 95 | export default Subscriptions; 96 | -------------------------------------------------------------------------------- /frontend/components/Suggestion.jsx: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | 3 | const Suggestion = ({ data }) => { 4 | return ( 5 |
    6 | {data && "suggestion" in data ? ( 7 |

    8 | {`Do you mean: `} 9 | 10 | 15 | {data.suggestion} 16 | 17 | 18 |

    19 | ) : ( 20 | 21 | )} 22 |
    23 | ); 24 | }; 25 | 26 | export default Suggestion; 27 | -------------------------------------------------------------------------------- /frontend/components/VideoBoard.jsx: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | import videoBoxes from "./SmallVideoBox"; 3 | import Suggestion from "./Suggestion"; 4 | import ChannelHeader from "./ChannelHeader"; 5 | function VideoBoard({ data }) { 6 | return ( 7 |
    8 | {data && data.channel_meta ? : ""} 9 | 10 |
    11 |
    12 | {(!data || !data.content || !data.content.length 13 | ? Array.from(new Array(3)) 14 | : data.ready 15 | ? data.content 16 | : [null, ...data.content] 17 | ).map((item, index) => videoBoxes(item, index))} 18 |
    19 |
    20 |
    21 | ); 22 | } 23 | 24 | export default VideoBoard; 25 | -------------------------------------------------------------------------------- /frontend/components/VideoPlayer.jsx: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | import ReactPlayer from "react-player"; 3 | import { platforms, timeSince, fetchDataSWR, getIdFromVideo } from "../utils"; 4 | import useSWR from "swr"; 5 | 6 | import { dbWatched } from "./data"; 7 | import { useSnapshot } from "valtio"; 8 | import WatchedSymbol from "./WatchedSymbol"; 9 | import dynamic from "next/dynamic"; 10 | import ButtonLive from "./BadgeLive"; 11 | const SubscribeButton = dynamic(() => import("./SubscribeButton"), { 12 | ssr: false, 13 | }); 14 | 15 | const descriptionBox = (description) => { 16 | if (description) { 17 | return ( 18 |
    19 |
    {description}
    20 |
    21 | ); 22 | } else { 23 | return
    ; 24 | } 25 | }; 26 | 27 | const VideoPlayer = ({ videoProps, details, platform, originalUrl }) => { 28 | if (!originalUrl.startsWith("http")) { 29 | originalUrl = "https://" + originalUrl; 30 | } 31 | const platformName = platforms[platform]; 32 | const watchedProxy = useSnapshot(dbWatched); 33 | const playerOnProgress = ({ played }) => { 34 | if (played > 0.9) { 35 | dbWatched.links.push(getIdFromVideo(originalUrl)); 36 | dbWatched.links = [...new Set(dbWatched.links)]; 37 | localStorage.setItem("watched", JSON.stringify(dbWatched)); 38 | } 39 | }; 40 | 41 | // prefetch channel since next/link not working here 42 | useSWR([details.channelUrl, undefined], fetchDataSWR); 43 | return ( 44 |
    45 |
    46 |
    47 | 54 |
    55 |
    56 |

    57 |
    58 | {details.title} 59 | {details.isLive ? : ""} 60 |
    61 |

    62 |

    63 | {details.author} 64 |

    65 |
    66 | 74 | 75 | 76 | 77 | {`${Number(details.views).toLocaleString()} views • ${timeSince( 78 | details.createdAt 79 | )} ago`} 80 | 81 | {watchedProxy.links.includes(getIdFromVideo(originalUrl)) ? ( 82 | 83 | ) : ( 84 | 85 | )} 86 |
    87 |
    88 | {details.channelUrl ? ( 89 | 93 | ) : ( 94 | 95 | )} 96 | 102 |
    103 |
    104 |
    105 | {descriptionBox(details.description)} 106 |
    107 | ); 108 | }; 109 | 110 | export default VideoPlayer; 111 | -------------------------------------------------------------------------------- /frontend/components/Watch.jsx: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | 3 | import { useRouter } from "next/router"; 4 | import useSWR from "swr"; 5 | 6 | import Skeleton from "./Skeleton"; 7 | import VideoPlayer from "./VideoPlayer"; 8 | 9 | import { videoUrlDetails } from "../utils"; 10 | 11 | const VIDEO_API = "api/video/"; 12 | // TODO: extract 13 | const BACKEND_ADDR = "http://localhost:8000"; 14 | 15 | const fetchVideoMetaSWR = async (platform_id) => { 16 | const { platform, id } = JSON.parse(platform_id); 17 | 18 | const requestOptions = { 19 | method: "POST", 20 | headers: { "Content-Type": "application/json" }, 21 | body: JSON.stringify({ 22 | platform: platform, 23 | id: id, 24 | }), 25 | }; 26 | return fetch(`${BACKEND_ADDR}/${VIDEO_API}`, requestOptions) 27 | .then((response) => response.json()) 28 | .then((data) => { 29 | if (data.ready === false) { 30 | console.log("failed to get video details"); 31 | return {}; 32 | } 33 | return data.content; 34 | }); 35 | }; 36 | 37 | const Watch = () => { 38 | const router = useRouter(); 39 | 40 | // TODO: standartize original URL (targetUrl) 41 | let targetUrl = router.asPath.split("url=")[1]; 42 | let [platform, id] = videoUrlDetails(targetUrl); 43 | 44 | console.log(`watch: p:${platform} id:${id}`); 45 | const { data } = useSWR(JSON.stringify({ platform, id }), fetchVideoMetaSWR, { 46 | revalidateOnFocus: false, 47 | refreshWhenHidden: false, 48 | refreshInterval: 0, 49 | }); 50 | 51 | const videoProps = { 52 | controls: true, 53 | url: data ? data.streamUrl : null, 54 | // light: data ? (data.thumbnailUrl ? data.thumbnailUrl : null) : null, 55 | // https://github.com/CookPete/react-player#props 56 | }; 57 | return ( 58 |
    59 | {!data || !videoProps.url ? ( 60 |
    61 | 62 |
    63 | ) : ( 64 |
    65 | 71 |
    72 | )} 73 |
    74 | ); 75 | }; 76 | 77 | export default Watch; 78 | -------------------------------------------------------------------------------- /frontend/components/WatchedSymbol.jsx: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | import { FontAwesomeIcon } from "@fortawesome/react-fontawesome"; 3 | import { faCheckSquare } from "@fortawesome/free-solid-svg-icons"; 4 | 5 | const WatchedSymbol = ({ withDot }) => { 6 | return ( 7 |
    8 | {withDot ? " • " : } 9 | 10 |
    11 | ); 12 | }; 13 | 14 | export default WatchedSymbol; 15 | -------------------------------------------------------------------------------- /frontend/components/data.jsx: -------------------------------------------------------------------------------- 1 | import { proxy } from "valtio"; 2 | 3 | export const subscriptions = proxy( 4 | JSON.parse(localStorage.getItem("subscriptions")) ?? { 5 | youtube: [], 6 | lbry: [], 7 | bitchute: [], 8 | rumble: [], 9 | } 10 | ); 11 | 12 | export const config = proxy( 13 | JSON.parse(localStorage.getItem("config")) ?? { 14 | spell_checker: false, 15 | } 16 | ); 17 | 18 | export const dbWatched = proxy( 19 | JSON.parse(localStorage.getItem("watched")) ?? { 20 | links: [], 21 | } 22 | ); 23 | -------------------------------------------------------------------------------- /frontend/components/footer.jsx: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | 3 | const Footer = () => { 4 | return ( 5 | 35 | ); 36 | }; 37 | 38 | export default Footer; 39 | -------------------------------------------------------------------------------- /frontend/components/platformTag.jsx: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | 3 | const PlatformTag = ({ platform }) => { 4 | return ( 5 |
    8 | platform 9 |
    10 | ); 11 | }; 12 | 13 | export default PlatformTag; 14 | -------------------------------------------------------------------------------- /frontend/layouts/retube.js: -------------------------------------------------------------------------------- 1 | import Head from "next/head"; 2 | import Navbar from "../components/Navbar"; 3 | import Sidebar from "../components/Sidebar"; 4 | import Content from "../components/Content"; 5 | // import Footer from "../components/Footer"; 6 | 7 | export default function Layout({ children }) { 8 | return ( 9 |
    10 | 11 | ReTube - Reimagine Tubing 12 | 13 | 14 | 15 | 16 |
    17 | 18 | {children} 19 |
    20 | {/*
    */} 21 |
    22 | ); 23 | } 24 | -------------------------------------------------------------------------------- /frontend/package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "frontend_next", 3 | "version": "0.1.0", 4 | "private": true, 5 | "scripts": { 6 | "dev": "next dev", 7 | "build": "next build", 8 | "start": "next start" 9 | }, 10 | "dependencies": { 11 | "@fortawesome/fontawesome-svg-core": "^1.2.36", 12 | "@fortawesome/free-solid-svg-icons": "^5.15.4", 13 | "@fortawesome/react-fontawesome": "^0.1.15", 14 | "@tailwindcss/aspect-ratio": "^0.3.0", 15 | "humanize-duration": "^3.27.0", 16 | "lodash": "^4.17.21", 17 | "next": "^11.1.2", 18 | "react": "^17.0.2", 19 | "react-dom": "^17.0.2", 20 | "react-json-view": "^1.21.3", 21 | "react-player": "^2.9.0", 22 | "react-toast-notifications": "^2.5.1", 23 | "swr": "^1.0.1", 24 | "valtio": "^1.2.4", 25 | "yarn-upgrade-all": "^0.5.4" 26 | }, 27 | "devDependencies": { 28 | "autoprefixer": "^10.3.7", 29 | "postcss": "^8.3.9", 30 | "tailwindcss": "^2.2.17" 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /frontend/pages/_app.js: -------------------------------------------------------------------------------- 1 | import "tailwindcss/tailwind.css"; 2 | import Layout from "../layouts/retube"; 3 | 4 | function MyApp({ Component, pageProps }) { 5 | return ( 6 | 7 | 8 | 9 | ); 10 | } 11 | 12 | export default MyApp; 13 | -------------------------------------------------------------------------------- /frontend/pages/channel.js: -------------------------------------------------------------------------------- 1 | import { fetchData } from "../utils"; 2 | import { useRouter } from "next/router"; 3 | import VideoBoard from "../components/VideoBoard"; 4 | import useSWR from "swr"; 5 | export default function channel() { 6 | const router = useRouter(); 7 | const targetUrl = router.asPath; 8 | const { data } = useSWR(targetUrl, fetchData); 9 | 10 | return ; 11 | } 12 | -------------------------------------------------------------------------------- /frontend/pages/index.js: -------------------------------------------------------------------------------- 1 | import React from "react"; 2 | import Router from "next/router"; 3 | 4 | export default function Home() { 5 | React.useEffect(() => { 6 | Router.push("/popular"); 7 | }); 8 | 9 | return
    ; 10 | } 11 | -------------------------------------------------------------------------------- /frontend/pages/json.js: -------------------------------------------------------------------------------- 1 | import dynamic from "next/dynamic"; 2 | const JsonEdit = dynamic( 3 | () => import("../components/JsonEdit"), 4 | { ssr: false } 5 | ); 6 | export default function json() { 7 | return ( 8 |
    9 | 10 |
    11 | ); 12 | } 13 | -------------------------------------------------------------------------------- /frontend/pages/popular.js: -------------------------------------------------------------------------------- 1 | import React, { useEffect, useState } from "react"; 2 | import { fetchData } from "../utils"; 3 | import VideoBoard from "../components/VideoBoard"; 4 | export default function popular() { 5 | let [videoData, setVideoDataState] = useState(); 6 | 7 | const fetchPopularVideos = async (setVideoDataState) => { 8 | console.log("fetching popular videos"); 9 | const ytPromise = fetchData("yt_popular"); 10 | const lbPromise = fetchData("lbry_popular"); 11 | const bcPromise = fetchData("bitchute_popular"); 12 | const rbPromise = fetchData("rb_popular"); 13 | 14 | let allPopular = {}; 15 | allPopular.platform = "all"; 16 | allPopular.ready = false; 17 | allPopular.content = []; 18 | 19 | const ytResults = await ytPromise; 20 | if (ytResults) { 21 | allPopular.content = ytResults.content.slice(1, 10); 22 | setVideoDataState(allPopular); 23 | } 24 | 25 | const lbryResults = await lbPromise; 26 | if (lbryResults) { 27 | allPopular.content = allPopular.content.concat( 28 | lbryResults.content.slice(1, 10) 29 | ); 30 | setVideoDataState(); 31 | setVideoDataState(allPopular); 32 | } 33 | 34 | const bcResults = await bcPromise; 35 | if (bcResults) { 36 | allPopular.content = allPopular.content.concat( 37 | bcResults.content.slice(1, 10) 38 | ); 39 | setVideoDataState(); 40 | setVideoDataState(allPopular); 41 | } 42 | 43 | const rbResults = await rbPromise; 44 | if (rbResults) { 45 | allPopular.content = allPopular.content.concat( 46 | rbResults.content.slice(1, 10) 47 | ); 48 | setVideoDataState(); 49 | setVideoDataState(allPopular); 50 | } 51 | 52 | allPopular.ready = true; 53 | setVideoDataState(); 54 | setVideoDataState(allPopular); 55 | }; 56 | 57 | useEffect(() => { 58 | fetchPopularVideos(setVideoDataState); 59 | 60 | return () => { 61 | setVideoDataState([]); 62 | }; 63 | }, []); 64 | 65 | return ( 66 |
    67 | {/* TODO */} 68 | 69 |
    70 | ); 71 | } 72 | -------------------------------------------------------------------------------- /frontend/pages/search.js: -------------------------------------------------------------------------------- 1 | import React, { useEffect, useState } from "react"; 2 | import { useRouter } from "next/router"; 3 | import { 4 | checkSentence, 5 | fetchSearchAPi, 6 | YOUTUBE_SEARCH, 7 | YOUTUBE_SEARCH_CHANNELS, 8 | LBRY_SEARCH, 9 | LBRY_SEARCH_CHANNELS, 10 | BITCHUTE_SEARCH, 11 | RUMBLE_SEARCH, 12 | } from "../utils"; 13 | import VideoBoard from "../components/VideoBoard"; 14 | export default function search() { 15 | const router = useRouter(); 16 | let [videoData, setVideoDataState] = useState(); 17 | 18 | const fetchSearchVideos = async (searchQuery, setVideoDataState) => { 19 | console.log(`searching: ${searchQuery}`); 20 | const checkQuery = checkSentence(searchQuery); 21 | 22 | let allWait = []; 23 | allWait.push(fetchSearchAPi(YOUTUBE_SEARCH_CHANNELS, searchQuery)); 24 | allWait.push(fetchSearchAPi(LBRY_SEARCH_CHANNELS, searchQuery)); 25 | allWait.push(fetchSearchAPi(YOUTUBE_SEARCH, searchQuery)); 26 | allWait.push(fetchSearchAPi(LBRY_SEARCH, searchQuery)); 27 | allWait.push(fetchSearchAPi(BITCHUTE_SEARCH, searchQuery)); 28 | allWait.push(fetchSearchAPi(RUMBLE_SEARCH, searchQuery)); 29 | let allSearch = {}; 30 | allSearch.platform = "search"; 31 | allSearch.ready = false; 32 | allSearch.content = []; 33 | for (const waitSub of allWait) { 34 | const result = await waitSub; 35 | if (!result || result.ready === false) { 36 | continue; 37 | } 38 | allSearch.content = allSearch.content.concat(result.content); 39 | setVideoDataState(); 40 | setVideoDataState(allSearch); 41 | } 42 | allSearch.ready = true; 43 | 44 | const checkResult = await checkQuery; 45 | if (checkResult.need_change) { 46 | allSearch.suggestion = checkResult.result; 47 | } 48 | setVideoDataState(); 49 | setVideoDataState(allSearch); 50 | }; 51 | 52 | useEffect(() => { 53 | const targetUrl = router.asPath; 54 | if (!targetUrl.includes("search=")) { 55 | router.push("/popular"); 56 | } 57 | const searchQuery = targetUrl.split("search=")[1]; 58 | 59 | fetchSearchVideos(searchQuery, setVideoDataState); 60 | 61 | return () => { 62 | setVideoDataState([]); 63 | }; 64 | }, []); 65 | 66 | return ; 67 | } 68 | -------------------------------------------------------------------------------- /frontend/pages/settings.js: -------------------------------------------------------------------------------- 1 | import { ToastProvider } from "react-toast-notifications"; 2 | import dynamic from "next/dynamic"; 3 | const Settings = dynamic(() => import("../components/Settings"), { 4 | ssr: false, 5 | }); 6 | 7 | export default function settings() { 8 | return ( 9 | 10 | 11 | 12 | ); 13 | } 14 | -------------------------------------------------------------------------------- /frontend/pages/subscriptions.js: -------------------------------------------------------------------------------- 1 | import Subscriptions from "../components/Subscriptions"; 2 | export default function subscriptions() { 3 | return ( 4 | 5 | ); 6 | } 7 | -------------------------------------------------------------------------------- /frontend/pages/watch.js: -------------------------------------------------------------------------------- 1 | import dynamic from "next/dynamic"; 2 | const Watch = dynamic( 3 | () => import("../components/Watch"), 4 | { ssr: false } 5 | ); 6 | export default function watch() { 7 | return ( 8 |
    9 | 10 |
    11 | ); 12 | } 13 | -------------------------------------------------------------------------------- /frontend/postcss.config.js: -------------------------------------------------------------------------------- 1 | // postcss.config.js 2 | module.exports = { 3 | plugins: { 4 | tailwindcss: {}, 5 | autoprefixer: {}, 6 | }, 7 | }; 8 | -------------------------------------------------------------------------------- /frontend/public/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/PrivOci/ReTube/36606b58c5d8a3a73f10a271625c1099ecce9730/frontend/public/favicon.ico -------------------------------------------------------------------------------- /frontend/public/vercel.svg: -------------------------------------------------------------------------------- 1 | 3 | 4 | -------------------------------------------------------------------------------- /frontend/routes/routes.js: -------------------------------------------------------------------------------- 1 | import { faFire, faNewspaper, faEdit, faUserCog } from "@fortawesome/free-solid-svg-icons"; 2 | 3 | const routes = [ 4 | { 5 | name: "Popular", 6 | faIcon: faFire, 7 | path: "/popular", 8 | }, 9 | { 10 | name: "Subscriptions", 11 | faIcon: faNewspaper, 12 | path: "/subscriptions", 13 | }, 14 | { 15 | name: "Settings", 16 | faIcon: faUserCog, 17 | path: "/settings", 18 | }, 19 | { 20 | name: "JSON View", 21 | faIcon: faEdit, 22 | path: "/json", 23 | }, 24 | ]; 25 | 26 | export default routes; 27 | -------------------------------------------------------------------------------- /frontend/tailwind.config.js: -------------------------------------------------------------------------------- 1 | module.exports = { 2 | mode: 'jit', 3 | purge: ['./pages/**/*.{js,ts,jsx,tsx}', './components/**/*.{js,ts,jsx,tsx}'], 4 | darkMode: 'media', // or 'media' or 'class' 5 | i18n: { 6 | locales: ["en-US"], 7 | defaultLocale: "en-US", 8 | }, 9 | theme: { 10 | extend: {}, 11 | }, 12 | variants: { 13 | extend: { 14 | backgroundColor: ["checked"], 15 | borderColor: ["checked"], 16 | zIndex: ["hover", "active"], 17 | }, 18 | }, 19 | plugins: [ 20 | require('@tailwindcss/aspect-ratio'), 21 | ], 22 | }; 23 | -------------------------------------------------------------------------------- /frontend/utils.js: -------------------------------------------------------------------------------- 1 | import _ from "lodash"; 2 | import humanizeDuration from "humanize-duration"; 3 | 4 | const BACKEND_URL = "http://localhost:8000"; 5 | 6 | const YT_API = `${BACKEND_URL}/api/youtube`; 7 | const LBRY_API = `${BACKEND_URL}/api/lbry`; 8 | const BITCHUTE_API = `${BACKEND_URL}/api/bitchute`; 9 | const RUBMLE_API = `${BACKEND_URL}/api/rumble`; 10 | 11 | export const YOUTUBE_SEARCH = `${BACKEND_URL}/api/youtube/search/`; 12 | export const LBRY_SEARCH = `${BACKEND_URL}/api/lbry/search/`; 13 | export const BITCHUTE_SEARCH = `${BACKEND_URL}/api/bitchute/search/`; 14 | export const RUMBLE_SEARCH = `${BACKEND_URL}/api/rumble/search/`; 15 | 16 | export const YOUTUBE_SEARCH_CHANNELS = `${BACKEND_URL}/api/youtube/channels`; 17 | export const LBRY_SEARCH_CHANNELS = `${BACKEND_URL}/api/lbry/channels`; 18 | export const RUMBLE_SEARCH_CHANNELS = `${BACKEND_URL}/api/rumble/channels`; 19 | 20 | const CHECK_GRAMMAR_API = `${BACKEND_URL}/api/check`; 21 | 22 | // shared 23 | const YOUTUBE = "yt"; 24 | const LBRY = "lb"; 25 | const BITCHUTE = "bc"; 26 | const RUBMLE = "rb"; 27 | 28 | const shortEnglishHumanizer = humanizeDuration.humanizer({ 29 | language: "shortEn", 30 | languages: { 31 | shortEn: { 32 | d: () => "", 33 | h: () => "", 34 | m: () => "", 35 | s: () => "", 36 | }, 37 | }, 38 | delimiter: ":", 39 | spacer: "", 40 | round: true, 41 | }); 42 | 43 | export const platforms = { 44 | yt: "YouTube", 45 | lb: "Lbry", 46 | bc: "BitChute", 47 | rb: "Rubmle", 48 | }; 49 | 50 | export const removeFromList = (myList, item) => { 51 | _.remove(myList, (i) => { 52 | return i === item; 53 | }); 54 | }; 55 | 56 | export const humanizeDurationSec = (sec) => { 57 | const msec = sec * 1000; 58 | return shortEnglishHumanizer(msec); 59 | }; 60 | 61 | /** 62 | * Used to save watched video URLs; 63 | * @param {string} url video url 64 | */ 65 | export const getIdFromVideo = (url) => { 66 | const details = videoUrlDetails(url); 67 | switch (details[0]) { 68 | case YOUTUBE: 69 | return `yt:${details[1]}`; 70 | case LBRY: 71 | return `lbry:${details[1]}`; 72 | case BITCHUTE: 73 | return `bt:${details[1]}`; 74 | case RUBMLE: 75 | return `rb:${details[1]}`; 76 | default: 77 | return null; 78 | } 79 | }; 80 | 81 | export const videoUrlDetails = (url) => { 82 | url = _.trim(url, "/"); 83 | const parsedUrl = new URL(url); 84 | const href_parsed = new URL(parsedUrl.href); 85 | 86 | let details = []; 87 | if (parsedUrl.hostname.toLowerCase().includes("youtube.com")) { 88 | const ytId = href_parsed.searchParams.get("v"); 89 | details[0] = YOUTUBE; 90 | details[1] = ytId; 91 | } else if (parsedUrl.hostname.toLowerCase().includes("odysee.com")) { 92 | const lbryId = _.trim(href_parsed.pathname, "/"); 93 | details[0] = LBRY; 94 | details[1] = lbryId; 95 | } else if (parsedUrl.hostname.toLowerCase().includes("bitchute.com")) { 96 | const bcId = _.trim(href_parsed.pathname, "/video/"); 97 | details[0] = BITCHUTE; 98 | details[1] = bcId; 99 | } else if (parsedUrl.hostname.toLowerCase().includes("rumble.com")) { 100 | const rbId_pre = _.trim(href_parsed.pathname, "/"); 101 | const rbId = _.trim(rbId_pre, ".hmtl"); 102 | details[0] = RUBMLE; 103 | details[1] = rbId; 104 | } 105 | 106 | return details; 107 | }; 108 | 109 | export const fetchDataSWR = async (url, search) => { 110 | if (url === "/popular") { 111 | return fetchPopularVideos(); 112 | } else if (search) { 113 | return fetchSearchResults(decodeURI(search)); 114 | } else { 115 | return fetchData(url); 116 | } 117 | }; 118 | 119 | export const isAnonymousChannel = (channel) => {}; 120 | 121 | export const channelUrlDetails = (url) => { 122 | let details = []; 123 | if (url === "yt_popular") { 124 | details[0] = YOUTUBE; 125 | details[1] = "popular"; 126 | details[2] = `${YT_API}/p`; 127 | return details; 128 | } else if (url === "lbry_popular") { 129 | details[0] = LBRY; 130 | details[1] = "popular"; 131 | details[2] = `${LBRY_API}/c`; 132 | return details; 133 | } else if (url === "bitchute_popular") { 134 | details[0] = BITCHUTE; 135 | details[1] = "popular"; 136 | details[2] = `${BITCHUTE_API}/c`; 137 | return details; 138 | } else if (url === "rb_popular") { 139 | details[0] = RUBMLE; 140 | details[1] = "popular"; 141 | details[2] = `${RUBMLE_API}/c`; 142 | return details; 143 | } 144 | 145 | // TODO(me): fix this, the funciton name is misleading 146 | // channels 147 | if (url.includes("youtube.com/")) { 148 | details[0] = YOUTUBE; 149 | details[1] = url.split("/channel/")[1]; 150 | details[2] = `${YT_API}/c`; 151 | } else if (url.includes("lbry.tv/@")) { 152 | details[0] = LBRY; 153 | details[1] = url.split("lbry.tv/@")[1]; 154 | details[2] = `${LBRY_API}/c`; 155 | } else if (url.includes("odysee.com/@")) { 156 | details[0] = LBRY; 157 | details[1] = url.split("odysee.com/@")[1]; 158 | details[2] = `${LBRY_API}/c`; 159 | } else if (url.includes("bitchute.com/")) { 160 | details[0] = BITCHUTE; 161 | details[1] = _.trim(url.split("/channel/")[1], "/"); 162 | details[2] = `${BITCHUTE_API}/c`; 163 | } else if (url.includes("rumble.com/")) { 164 | details[0] = RUBMLE; 165 | details[1] = _.trim(url.split("rumble.com/")[1], "/"); 166 | details[2] = `${RUBMLE_API}/c`; 167 | } 168 | return details; 169 | }; 170 | 171 | export const fetchJson = async (target_url, requestOptions) => { 172 | // make sure there is / at the end 173 | if (!target_url.endsWith("/")) { 174 | target_url = target_url.concat("/"); 175 | } 176 | const data = await fetch(target_url, requestOptions).then((response) => 177 | response.json() 178 | ); 179 | return data; 180 | }; 181 | 182 | export const fetchData = async (url) => { 183 | let [platform, id, api_url] = channelUrlDetails(url); 184 | 185 | const requestOptions = { 186 | method: "POST", 187 | headers: { "Content-Type": "application/json" }, 188 | body: JSON.stringify({ 189 | platform: platform, 190 | id: id, 191 | }), 192 | }; 193 | const data = await fetchJson(api_url, requestOptions); 194 | 195 | if (data.ready === false) { 196 | console.log(`failed to get videos: ${url}`); 197 | return null; 198 | } 199 | 200 | return data; 201 | }; 202 | 203 | export const fetchPopularVideos = async () => { 204 | console.log("fetchPopularVideos"); 205 | const ytPromise = fetchData("yt_popular"); 206 | const lbPromise = fetchData("lbry_popular"); 207 | const bcPromise = fetchData("bitchute_popular"); 208 | 209 | let allPopular = {}; 210 | allPopular.platform = "all"; 211 | allPopular.ready = false; 212 | allPopular.content = []; 213 | 214 | allPopular.content = (await ytPromise).content 215 | .slice(1, 10) 216 | .concat((await lbPromise).content.slice(1, 10)) 217 | .concat((await bcPromise).content.slice(1, 10)); 218 | 219 | allPopular.ready = true; 220 | return allPopular; 221 | }; 222 | 223 | export const fetchSearchAPi = async (search_api_url, search_query) => { 224 | const requestOptions = { 225 | method: "POST", 226 | headers: { "Content-Type": "application/json" }, 227 | body: JSON.stringify({ 228 | query: search_query, 229 | max: 15, 230 | }), 231 | }; 232 | 233 | const data = fetchJson(search_api_url, requestOptions); 234 | 235 | return data; 236 | }; 237 | 238 | const is_spell_checker_enabled = () => { 239 | const config = JSON.parse(localStorage.getItem("config")); 240 | return config["spell_checker"]; 241 | }; 242 | 243 | export const checkSentence = async (str) => { 244 | if (!is_spell_checker_enabled()) { 245 | return { need_change: false, result: "" }; 246 | } 247 | 248 | const requestOptions = { 249 | method: "POST", 250 | headers: { "Content-Type": "application/json" }, 251 | body: JSON.stringify({ 252 | query: str, 253 | }), 254 | }; 255 | 256 | const data = fetchJson(CHECK_GRAMMAR_API, requestOptions); 257 | 258 | return data; 259 | }; 260 | 261 | export const fetchSearchResults = async (search_query) => { 262 | console.log(`searching: ${search_query}`); 263 | const check_query = checkSentence(search_query); 264 | 265 | let allWait = []; 266 | allWait.push(fetchSearchAPi(YOUTUBE_SEARCH_CHANNELS, search_query)); 267 | allWait.push(fetchSearchAPi(LBRY_SEARCH_CHANNELS, search_query)); 268 | allWait.push(fetchSearchAPi(YOUTUBE_SEARCH, search_query)); 269 | allWait.push(fetchSearchAPi(LBRY_SEARCH, search_query)); 270 | allWait.push(fetchSearchAPi(BITCHUTE_SEARCH, search_query)); 271 | 272 | let allSearch = {}; 273 | allSearch.platform = "search"; 274 | allSearch.ready = false; 275 | allSearch.content = []; 276 | 277 | for (const waitSub of allWait) { 278 | const result = await waitSub; 279 | if (!result || result.ready === false) { 280 | continue; 281 | } 282 | 283 | allSearch.content = allSearch.content.concat(result.content); 284 | } 285 | allSearch.ready = true; 286 | 287 | const check_result = await check_query; 288 | if (check_result.need_change) { 289 | allSearch.suggestion = check_result.result; 290 | } 291 | 292 | return allSearch; 293 | }; 294 | 295 | export const timeSince = (timestamp) => { 296 | const seconds = Math.floor((new Date() - new Date(timestamp)) / 1000); 297 | 298 | let interval = Math.floor(seconds / 31536000); 299 | 300 | if (interval > 1) { 301 | return interval + " years"; 302 | } 303 | 304 | interval = Math.floor(seconds / 2592000); 305 | if (interval > 1) { 306 | return interval + " months"; 307 | } 308 | 309 | interval = Math.floor(seconds / 86400); 310 | if (interval > 1) { 311 | return interval + " days"; 312 | } 313 | 314 | interval = Math.floor(seconds / 3600); 315 | if (interval > 1) { 316 | return interval + " hours"; 317 | } 318 | 319 | interval = Math.floor(seconds / 60); 320 | if (interval > 1) { 321 | return interval + " minutes"; 322 | } 323 | 324 | return Math.floor(seconds) + " seconds"; 325 | }; 326 | --------------------------------------------------------------------------------