├── .dockerignore ├── .github └── workflows │ ├── codeql-analysis.yml │ ├── docker-build.yml │ └── lint.yml ├── .gitignore ├── Dockerfile ├── LICENSE ├── Makefile ├── README.md ├── cliff.toml ├── docker-compose.autofc2.yml ├── fc2_live_dl ├── FC2LiveDL.py ├── __init__.py ├── __main__.py ├── autofc2.py ├── fc2.py ├── ffmpeg.py ├── hls.py └── util.py ├── pyproject.toml ├── requirements.txt ├── scripts └── release.sh └── setup.cfg /.dockerignore: -------------------------------------------------------------------------------- 1 | *.ws 2 | *.ts 3 | *.mp4 4 | *.m4a 5 | *.json 6 | __pycache__/ 7 | dist/ 8 | build/ 9 | *.egg-info 10 | .install-cache/ 11 | .vim/ 12 | .github/ 13 | Dockerfile 14 | -------------------------------------------------------------------------------- /.github/workflows/codeql-analysis.yml: -------------------------------------------------------------------------------- 1 | # For most projects, this workflow file will not need changing; you simply need 2 | # to commit it to your repository. 3 | # 4 | # You may wish to alter this file to override the set of languages analyzed, 5 | # or to provide custom queries or build logic. 6 | # 7 | # ******** NOTE ******** 8 | # We have attempted to detect the languages in your repository. Please check 9 | # the `language` matrix defined below to confirm you have the correct set of 10 | # supported CodeQL languages. 11 | # 12 | name: "CodeQL" 13 | 14 | on: 15 | push: 16 | branches: [ main ] 17 | pull_request: 18 | # The branches below must be a subset of the branches above 19 | branches: [ main ] 20 | schedule: 21 | - cron: '0 0 * * 0' 22 | 23 | jobs: 24 | analyze: 25 | name: Analyze 26 | runs-on: ubuntu-latest 27 | permissions: 28 | actions: read 29 | contents: read 30 | security-events: write 31 | 32 | strategy: 33 | fail-fast: false 34 | matrix: 35 | language: [ 'python' ] 36 | # CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ] 37 | # Learn more about CodeQL language support at https://git.io/codeql-language-support 38 | 39 | steps: 40 | - name: Checkout repository 41 | uses: actions/checkout@v3 42 | 43 | # Initializes the CodeQL tools for scanning. 44 | - name: Initialize CodeQL 45 | uses: github/codeql-action/init@v2 46 | with: 47 | languages: ${{ matrix.language }} 48 | # If you wish to specify custom queries, you can do so here or in a config file. 49 | # By default, queries listed here will override any specified in a config file. 50 | # Prefix the list here with "+" to use these queries and those in the config file. 51 | # queries: ./path/to/local/query, your-org/your-repo/queries@main 52 | 53 | # Autobuild attempts to build any compiled languages (C/C++, C#, or Java). 54 | # If this step fails, then you should remove it and run the build manually (see below) 55 | - name: Autobuild 56 | uses: github/codeql-action/autobuild@v2 57 | 58 | # ℹ️ Command-line programs to run using the OS shell. 59 | # 📚 https://git.io/JvXDl 60 | 61 | # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines 62 | # and modify them (or add more) to build your code if your project 63 | # uses a compiled language 64 | 65 | #- run: | 66 | # make bootstrap 67 | # make release 68 | 69 | - name: Perform CodeQL Analysis 70 | uses: github/codeql-action/analyze@v2 71 | -------------------------------------------------------------------------------- /.github/workflows/docker-build.yml: -------------------------------------------------------------------------------- 1 | name: docker-build 2 | 3 | on: 4 | push: 5 | branches: [ main ] 6 | pull_request: 7 | branches: [ main ] 8 | 9 | jobs: 10 | docker: 11 | runs-on: ubuntu-latest 12 | steps: 13 | - name: Checkout 14 | uses: actions/checkout@v2 15 | 16 | - name: Docker meta 17 | id: meta 18 | uses: docker/metadata-action@v3 19 | with: 20 | images: | 21 | ghcr.io/holoarchivists/fc2-live-dl 22 | tags: | 23 | type=raw,value=latest,enable=${{ github.ref == format('refs/heads/{0}', github.event.repository.default_branch) }} 24 | type=ref,event=branch 25 | type=ref,event=pr 26 | type=sha 27 | type=semver,pattern={{version}} 28 | type=semver,pattern={{major}}.{{minor}} 29 | type=semver,pattern={{major}} 30 | 31 | - name: Set up Docker Buildx 32 | uses: docker/setup-buildx-action@v1 33 | 34 | - name: Login to ghcr 35 | uses: docker/login-action@v1 36 | with: 37 | registry: ghcr.io 38 | username: ${{ github.actor }} 39 | password: ${{ secrets.GITHUB_TOKEN }} 40 | 41 | - name: Build and push 42 | uses: docker/build-push-action@v2 43 | with: 44 | context: . 45 | platforms: linux/amd64,linux/arm64 46 | push: ${{ github.event_name != 'pull_request' }} 47 | tags: ${{ steps.meta.outputs.tags }} 48 | labels: ${{ steps.meta.outputs.labels }} 49 | cache-from: type=gha 50 | cache-to: type=gha,mode=max 51 | -------------------------------------------------------------------------------- /.github/workflows/lint.yml: -------------------------------------------------------------------------------- 1 | name: Lint 2 | 3 | on: 4 | push: {} 5 | pull_request: 6 | branches: [ main ] 7 | 8 | jobs: 9 | build: 10 | runs-on: ubuntu-latest 11 | strategy: 12 | matrix: 13 | python-version: ["3.8", "3.9", "3.10"] 14 | steps: 15 | - name: Checkout 16 | uses: actions/checkout@v2 17 | 18 | - name: Set up Python ${{ matrix.python-version }} 19 | uses: actions/setup-python@v2 20 | with: 21 | python-version: ${{ matrix.python-version }} 22 | 23 | - name: Install dependencies 24 | run: | 25 | pip install --upgrade pip 26 | pip install -r requirements.txt 27 | pip install mypy 28 | 29 | - name: Analysing the code with mypy 30 | run: | 31 | mypy . 32 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.ws 2 | *.ts 3 | *.mp4 4 | *.m4a 5 | *.json 6 | *.png 7 | __pycache__/ 8 | dist/ 9 | build/ 10 | *.egg-info 11 | .install-cache/ 12 | .vim/ 13 | venv/ 14 | .venv/ -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM python:3.10-alpine3.15 AS base 2 | 3 | ENV PATH /app/.local/bin:$PATH 4 | RUN adduser -DSh /app -u 1000 app 5 | WORKDIR /app 6 | 7 | COPY --chown=1000:1000 setup.cfg pyproject.toml requirements.txt LICENSE ./ 8 | COPY --chown=1000:1000 fc2_live_dl fc2_live_dl 9 | 10 | RUN set -eux; \ 11 | apk add --no-cache ffmpeg; \ 12 | apk add --no-cache --virtual .build-deps \ 13 | gcc g++ make libffi-dev; \ 14 | su app -s /bin/sh -c 'pip install --no-cache --user .'; \ 15 | apk del --purge .build-deps; \ 16 | rm -rf /var/cache/apk/*; 17 | 18 | USER app 19 | CMD ["/app/.local/bin/fc2-live-dl"] 20 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2021 hizkifw 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .PHONY: clean clean-build clean-pyc release docker 2 | 3 | tag_hash=$(shell git rev-parse --short HEAD) 4 | tag_version=$(shell grep 'version' setup.cfg | sed 's/version = //') 5 | 6 | dist: clean 7 | python -m build 8 | 9 | release: clean 10 | ./scripts/release.sh 11 | 12 | publish: dist release 13 | git push --follow-tags origin main 14 | python -m twine upload dist/* 15 | 16 | clean: clean-build clean-pyc 17 | 18 | clean-build: 19 | rm -rf build dist *.egg-info 20 | 21 | clean-pyc: 22 | find . -name '*.pyc' \ 23 | -o -name '*.pyo' \ 24 | -o -name '*~' \ 25 | -o -name '__pycache__' \ 26 | -exec rm -fr {} + 27 | 28 | docker: 29 | docker build \ 30 | -t ghcr.io/holoarchivists/fc2-live-dl:latest \ 31 | -t ghcr.io/holoarchivists/fc2-live-dl:$(tag_hash) \ 32 | -t ghcr.io/holoarchivists/fc2-live-dl:$(tag_version) \ 33 | . 34 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # fc2-live-dl 2 | 3 | > Tool to download FC2 live streams 4 | 5 | [![PyPI](https://img.shields.io/pypi/v/fc2-live-dl)](https://pypi.org/project/fc2-live-dl/ "PyPI") 6 | 7 | ## Requirements 8 | 9 | - Python 3.8 10 | - ffmpeg 11 | 12 | ## Features 13 | 14 | - Wait for a stream to start and automatically start recording 15 | - Save comment/chat logs 16 | - Authenticate with cookies (Netscape format, same one used with youtube-dl) 17 | - Remux recordings to .mp4/.m4a after it's done 18 | - Continuously monitor multiple streams in parallel and automatically start 19 | downloading when any of them goes online 20 | - Get notifications when streams come online via 21 | [Apprise](https://github.com/caronc/apprise) 22 | - Prometheus-compatible metrics 23 | 24 | ## Installation 25 | 26 | ### Using pip 27 | 28 | To install the latest stable version: 29 | 30 | ``` 31 | pip install --upgrade fc2-live-dl 32 | ``` 33 | 34 | To install the latest development version: 35 | 36 | ``` 37 | pip install --upgrade git+https://github.com/HoloArchivists/fc2-live-dl.git#egg=fc2-live-dl 38 | ``` 39 | 40 | ### Using docker 41 | 42 | ``` 43 | docker pull ghcr.io/holoarchivists/fc2-live-dl:latest 44 | ``` 45 | 46 | ## Usage 47 | 48 | ``` 49 | fc2-live-dl https://live.fc2.com/<...> 50 | ``` 51 | 52 | ``` 53 | usage: fc2-live-dl [-h] [-v] 54 | [--quality {150Kbps,400Kbps,1.2Mbps,2Mbps,3Mbps,sound}] 55 | [--latency {low,high,mid}] [--threads THREADS] [-o OUTPUT] 56 | [--no-remux] [-k] [-x] [--cookies COOKIES] [--write-chat] 57 | [--write-info-json] [--write-thumbnail] [--wait] 58 | [--wait-for-quality-timeout WAIT_FOR_QUALITY_TIMEOUT] 59 | [--poll-interval POLL_INTERVAL] 60 | [--log-level {silent,error,warn,info,debug,trace}] 61 | [--trust-env-proxy] [--dump-websocket] 62 | url 63 | 64 | positional arguments: 65 | url A live.fc2.com URL. 66 | 67 | options: 68 | -h, --help show this help message and exit 69 | -v, --version show program's version number and exit 70 | --quality {150Kbps,400Kbps,1.2Mbps,2Mbps,3Mbps,sound} 71 | Quality of the stream to download. Default is 3Mbps. 72 | --latency {low,high,mid} 73 | Stream latency. Select a higher latency if 74 | experiencing stability issues. Default is mid. 75 | --threads THREADS The size of the thread pool used to download segments. 76 | Default is 1. 77 | -o OUTPUT, --output OUTPUT 78 | Set the output filename format. Supports formatting 79 | options similar to youtube-dl. Default is '%(date)s 80 | %(title)s (%(channel_name)s).%(ext)s' 81 | 82 | Available format options: 83 | channel_id (string): ID of the broadcast 84 | channel_name (string): broadcaster's profile name 85 | date (string): local date YYYY-MM-DD 86 | time (string): local time HHMMSS 87 | ext (string): file extension 88 | title (string): title of the live broadcast 89 | --no-remux Do not remux recordings into mp4/m4a after it is 90 | finished. 91 | -k, --keep-intermediates 92 | Keep the raw .ts recordings after it has been remuxed. 93 | -x, --extract-audio Generate an audio-only copy of the stream. 94 | --cookies COOKIES Path to a cookies file. 95 | --write-chat Save live chat into a json file. 96 | --write-info-json Dump output stream information into a json file. 97 | --write-thumbnail Download thumbnail into a file 98 | --wait Wait until the broadcast goes live, then start 99 | recording. 100 | --wait-for-quality-timeout WAIT_FOR_QUALITY_TIMEOUT 101 | If the requested quality is not available, keep 102 | retrying up to this many seconds before falling back 103 | to the next best quality. Default is 15 seconds. 104 | --poll-interval POLL_INTERVAL 105 | How many seconds between checks to see if broadcast is 106 | live. Default is 5. 107 | --log-level {silent,error,warn,info,debug,trace} 108 | Log level verbosity. Default is info. 109 | --trust-env-proxy Trust environment variables for proxy settings. 110 | --dump-websocket Dump all websocket communication to a file for 111 | debugging 112 | 113 | ``` 114 | 115 | ### Using proxies 116 | 117 | To use a HTTP proxy, pass the `--trust-env-proxy` flag and set your proxy 118 | settings in the `HTTP_PROXY`, `HTTPS_PROXY`, `WS_PROXY` or `WSS_PROXY` 119 | environment variables. If not present, proxy settings are taken from the 120 | [`~/.netrc` file](https://www.gnu.org/software/inetutils/manual/html_node/The-_002enetrc-file.html). 121 | 122 | For more information, check 123 | [aiohttp's documentation](https://docs.aiohttp.org/en/stable/client_advanced.html#proxy-support). 124 | 125 | ## autofc2 126 | 127 | > Monitor multiple channels at the same time, and automatically start 128 | > downloading when any of them goes online 129 | 130 | ``` 131 | autofc2 --config autofc2.json 132 | ``` 133 | 134 | Where the `autofc2.json` file looks like this: 135 | 136 | ```json 137 | { 138 | "autofc2": { 139 | "log_level": "info", 140 | "debounce_time": 300, 141 | "metrics": { 142 | "host": "0.0.0.0", 143 | "port": 9090, 144 | "path": "/metrics" 145 | } 146 | }, 147 | "default_params": { 148 | "quality": "3Mbps", 149 | "latency": "mid", 150 | "threads": 4, 151 | "outtmpl": "%(channel_name)s %(_en_name)s/%(date)s %(title)s.%(ext)s", 152 | "write_chat": false, 153 | "write_info_json": false, 154 | "write_thumbnail": false, 155 | "wait_for_live": true, 156 | "wait_for_quality_timeout": 15, 157 | "wait_poll_interval": 5, 158 | "cookies_file": null, 159 | "remux": true, 160 | "keep_intermediates": false, 161 | "extract_audio": true, 162 | "trust_env_proxy": false 163 | }, 164 | "notifications": [ 165 | { 166 | "url": "discord://{WebhookID}/{WebhookToken}", 167 | "message": "%(channel_name)s is live!\nhttps://live.fc2.com/%(channel_id)s" 168 | } 169 | ], 170 | "channels": { 171 | "91544481": { 172 | "_en_name": "Necoma Karin", 173 | "quality": "sound", 174 | "write_thumbnail": true 175 | }, 176 | "72364867": { "_en_name": "Uno Sakura" }, 177 | "40740626": { "_en_name": "Komae Nadeshiko" }, 178 | "81840800": { "_en_name": "Ronomiya Hinagiku" } 179 | } 180 | } 181 | ``` 182 | 183 | The `default_params` object will be the parameters applied to all of the 184 | channels. Check the usage section above for more information on each parameter. 185 | Note that `wait_for_live` needs to be set to `true` for the script to work 186 | properly. You can also override the parameters per-channel. 187 | 188 | Arbitrary parameters can be specified by prefixing them with `_`, and will be 189 | accessible in `outtmpl`. This is useful for specifying custom filenames just 190 | like in the example above. In the example I'm using `_en_name`, but you can use 191 | anything as long as it starts with `_`. 192 | 193 | For notifications, the URL follows the 194 | [Apprise syntax](https://github.com/caronc/apprise#supported-notifications). For 195 | example, if you want to use Discord webhooks, use the `discord://` like so: 196 | 197 | - Original URL: `https://discord.com/api/webhooks/12341234/abcdabcd` 198 | - Turns into: `discord://12341234/abcdabcd` 199 | 200 | You can find out more about the different types of notifiers and how to 201 | configure them on 202 | [Apprise's GitHub](https://github.com/caronc/apprise#supported-notifications). 203 | 204 | The `message` of the notifications follow the same syntax as `outtmpl`. 205 | 206 | Prometheus-compatible metrics is optionally configurable with `autofc2.metrics`. 207 | If you don't want a metrics webserver, remove the `autofc2.metrics` key. 208 | 209 | **NOTE Windows users**: When specifying a file path (e.g. for cookies) in the 210 | json, double up your backslashes, for example: 211 | `"cookies_file": "C:\\Documents\\cookies.txt"`. 212 | 213 | Once configured, you can run the script: 214 | 215 | ``` 216 | autofc2 --config autofc2.json 217 | ``` 218 | 219 | If you need to change the config json, feel free to change it while the script 220 | is running. It will reload the file if it detects any changes. Note that 221 | parameters will not be updated for ongoing streams (i.e. if the script is 222 | recording a stream and you change its settings, it will continue recording with 223 | the old settings and will only apply the new configuration to future 224 | recordings). 225 | 226 | ## Running autofc2 with Docker 227 | 228 | You can run autofc2 using the Docker image by mounting your config json and your 229 | output directory, as well as overriding the default `cmd` with `autofc2` like 230 | so: 231 | 232 | ```bash 233 | # The following mounts `./autofc2.json` into the correct location in the docker 234 | # container, as well as an `/recordings` folder for the recordings. You'll need to 235 | # set the `outtmpl` to something like `/recordings/%(channel_name)s ...` 236 | docker run --rm \ 237 | -v $(pwd)/autofc2.json:/app/autofc2.json:ro \ 238 | -v $(pwd)/recordings:/recordings \ 239 | -e TZ=Asia/Tokyo \ 240 | ghcr.io/holoarchivists/fc2-live-dl:latest \ 241 | autofc2 --config /app/autofc2.json 242 | ``` 243 | 244 | The above command runs the container in the foreground. If you want it to keep 245 | running in the background, you can replace the `--rm` flag with `-d`. The `TZ` 246 | environment can be set to your local timezone, and will affect the timestamps in 247 | the logs. 248 | 249 | **⚠️ IMPORTANT NOTE**: Make sure you set your `outtmpl` properly to match the 250 | bind mounts (`-v`), and test that the files are properly saved to your computer. 251 | **You will lose your recordings** if you don't configure this properly! 252 | 253 | You can also use docker-compose to keep your config in a single file: 254 | 255 | - Download the 256 | [`docker-compose.autofc2.yml`](https://raw.githubusercontent.com/HoloArchivists/fc2-live-dl/main/docker-compose.autofc2.yml) 257 | file into some folder, and name it `docker-compose.yml`. 258 | - Place your `autofc2.json` in the same folder and modify the `outtmpl` so it 259 | starts with `/recordings/`: 260 | 261 | ``` 262 | "outtmpl": "/recordings/%(channel_name)s %(_en_name)s/%(date)s %(title)s.%(ext)s" 263 | ``` 264 | 265 | - Run it! 266 | 267 | ```bash 268 | # Prepare the recordings directory with the right permissions 269 | mkdir ./recordings && chown 1000:1000 ./recordings 270 | 271 | # Run the thing 272 | docker-compose up -d 273 | 274 | # Check the logs 275 | docker-compose logs -f 276 | 277 | # If you wanna kill it 278 | docker-compose down 279 | ``` 280 | 281 | ## Notes 282 | 283 | - FC2 does not allow multiple connections to the same stream, so you can't watch 284 | in the browser while downloading. You can instead preview the file being 285 | downloaded using `mpv` or `vlc`. Alternatively, log in with an account on your 286 | browser. 287 | - Recording only starts from when you start the tool. This tool cannot "seek 288 | back" and record streams from the start. 289 | - If you can't run `fc2-live-dl` or `autofc2`, try uninstalling and reinstalling 290 | with `pip uninstall fc2-live-dl`. 291 | 292 | ## Known issues 293 | 294 | - Tested to work under Linux. It should work on Windows, but no guarantees. If 295 | you're facing any issues on Windows, please 296 | [file an issue](https://github.com/HoloArchivists/fc2-live-dl/issues/new). 297 | - autofc2 will freak out over a private/paid streams. 298 | - `--wait` doesn't work sometimes because FC2 would announce that the stream is 299 | live before the playlist is available. Use `autofc2` if you want to make sure 300 | streams get saved. 301 | - When monitoring many channels with `autofc2`, if you face any 5xx errors, try 302 | increasing the `wait_poll_interval` to something higher. 303 | -------------------------------------------------------------------------------- /cliff.toml: -------------------------------------------------------------------------------- 1 | # configuration file for git-cliff (0.1.0) 2 | 3 | [changelog] 4 | # changelog header 5 | header = """ 6 | # Changelog\n 7 | """ 8 | # template for the changelog body 9 | # https://tera.netlify.app/docs/#introduction 10 | body = """ 11 | {% if version %}\ 12 | ## [{{ version | trim_start_matches(pat="v") }}] - {{ timestamp | date(format="%Y-%m-%d") }} 13 | {% else %}\ 14 | ## [unreleased] 15 | {% endif %}\ 16 | {% for group, commits in commits | group_by(attribute="group") %} 17 | ### {{ group | upper_first }} 18 | {% for commit in commits %} 19 | - {% if commit.breaking %}[**breaking**] {% endif %}{{ commit.message | upper_first }}\ 20 | {% endfor %} 21 | {% endfor %}\n 22 | """ 23 | # remove the leading and trailing whitespaces from the template 24 | trim = true 25 | # changelog footer 26 | footer = """ 27 | 28 | """ 29 | 30 | [git] 31 | # parse the commits based on https://www.conventionalcommits.org 32 | conventional_commits = true 33 | # filter out the commits that are not conventional 34 | filter_unconventional = true 35 | # regex for parsing and grouping commits 36 | commit_parsers = [ 37 | { message = "^feat", group = "Features"}, 38 | { message = "^fix", group = "Bug Fixes"}, 39 | { message = "^doc", group = "Documentation"}, 40 | { message = "^perf", group = "Performance"}, 41 | { message = "^refactor", group = "Refactor"}, 42 | { message = "^style", group = "Styling"}, 43 | { message = "^test", group = "Testing"}, 44 | { message = "^chore\\(release\\): prepare for", skip = true}, 45 | { message = "^chore", group = "Miscellaneous Tasks"}, 46 | { body = ".*security", group = "Security"}, 47 | ] 48 | # filter out the commits that are not matched by commit parsers 49 | filter_commits = false 50 | # glob pattern for matching git tags 51 | tag_pattern = "v[0-9]*" 52 | # regex for skipping tags 53 | skip_tags = "v0.1.0-beta.1" 54 | # regex for ignoring tags 55 | ignore_tags = "" 56 | # sort the tags topologically 57 | topo_order = false 58 | # sort the commits inside sections by oldest/newest order 59 | sort_commits = "oldest" 60 | -------------------------------------------------------------------------------- /docker-compose.autofc2.yml: -------------------------------------------------------------------------------- 1 | # Sample docker-compose.yml file for autofc2 2 | # Check the README for more information 3 | version: '3' 4 | services: 5 | autofc2: 6 | image: ghcr.io/holoarchivists/fc2-live-dl:latest 7 | command: autofc2 8 | volumes: 9 | - ./autofc2.json:/app/autofc2.json:ro 10 | - ./recordings:/recordings 11 | restart: unless-stopped 12 | environment: 13 | # Feel free to update this to your local timezone. This setting influences 14 | # the timestamps on the program's logs. 15 | - TZ=Asia/Tokyo 16 | -------------------------------------------------------------------------------- /fc2_live_dl/FC2LiveDL.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import asyncio 4 | import http.cookies 5 | import traceback 6 | import inspect 7 | import json 8 | import os 9 | import pathlib 10 | import time 11 | from datetime import datetime 12 | from enum import Enum 13 | 14 | import aiohttp 15 | 16 | from .fc2 import FC2LiveStream, FC2WebSocket 17 | from .ffmpeg import FFMpeg 18 | from .hls import HLSDownloader 19 | from .util import Logger, sanitize_filename 20 | 21 | 22 | class CallbackEvent: 23 | class Type(Enum): 24 | WAITING_FOR_ONLINE = 1 25 | STREAM_ONLINE = 2 26 | WAITING_FOR_TARGET_QUALITY = 3 27 | GOT_HLS_URL = 4 28 | FRAGMENT_PROGRESS = 5 29 | MUXING = 6 30 | 31 | def __str__(self): 32 | return self.name 33 | 34 | def __init__(self, instance, channel_id, type: Type, data=None): 35 | self.instance = instance 36 | self.channel_id = channel_id 37 | self.type = type 38 | self.data = data 39 | 40 | def __repr__(self): 41 | return f"CallbackEvent({self.channel_id}, {self.type}, {self.data})" 42 | 43 | 44 | class FC2LiveDL: 45 | # Constants 46 | STREAM_QUALITY = { 47 | "150Kbps": 10, 48 | "400Kbps": 20, 49 | "1.2Mbps": 30, 50 | "2Mbps": 40, 51 | "3Mbps": 50, 52 | "sound": 90, 53 | } 54 | STREAM_LATENCY = { 55 | "low": 0, 56 | "high": 1, 57 | "mid": 2, 58 | } 59 | DEFAULT_PARAMS = { 60 | "quality": "3Mbps", 61 | "latency": "mid", 62 | "threads": 1, 63 | "outtmpl": "%(date)s %(title)s (%(channel_name)s).%(ext)s", 64 | "write_chat": False, 65 | "write_info_json": False, 66 | "write_thumbnail": False, 67 | "wait_for_live": False, 68 | "wait_for_quality_timeout": 15, 69 | "wait_poll_interval": 5, 70 | "cookies_file": None, 71 | "remux": True, 72 | "keep_intermediates": False, 73 | "extract_audio": False, 74 | "trust_env_proxy": False, 75 | "dump_websocket": False, 76 | } 77 | 78 | def __init__(self, params={}, callback=None): 79 | self._logger = Logger("fc2") 80 | self._session = None 81 | self._background_tasks = [] 82 | 83 | self._callback = callback if callback is not None else lambda event: None 84 | self._callback_is_coroutine = inspect.iscoroutinefunction(self._callback) 85 | 86 | self.params = json.loads(json.dumps(self.DEFAULT_PARAMS)) 87 | self.params.update(params) 88 | # Validate outtmpl 89 | self._format_outtmpl() 90 | 91 | # Parse cookies 92 | self._cookie_jar = aiohttp.CookieJar() 93 | cookies_file = self.params["cookies_file"] 94 | if cookies_file is not None: 95 | self._logger.info("Loading cookies from", cookies_file) 96 | cookies = self._parse_cookies_file(cookies_file) 97 | self._cookie_jar.update_cookies(cookies) 98 | 99 | async def __aenter__(self): 100 | self._session = aiohttp.ClientSession( 101 | cookie_jar=self._cookie_jar, 102 | trust_env=self.params["trust_env_proxy"], 103 | ) 104 | self._loop = asyncio.get_running_loop() 105 | return self 106 | 107 | async def __aexit__(self, *err): 108 | self._logger.trace("exit", err) 109 | await self._session.close() 110 | # Sleep for 250ms to allow SSL connections to close. 111 | # See: https://github.com/aio-libs/aiohttp/issues/1925 112 | # See: https://github.com/aio-libs/aiohttp/issues/4324 113 | await asyncio.sleep(0.250) 114 | self._session = None 115 | 116 | def _callback_handler( 117 | self, 118 | instance, 119 | channel_id, 120 | type: CallbackEvent.Type, 121 | data=None, 122 | ): 123 | event = CallbackEvent(instance, channel_id, type, data) 124 | loop = asyncio.get_running_loop() 125 | 126 | if self._callback_is_coroutine: 127 | loop.create_task(self._callback(event)) 128 | else: 129 | loop.run_in_executor(None, self._callback, event) 130 | 131 | async def download(self, channel_id): 132 | # Check ffmpeg 133 | if not await FFMpeg.is_available(): 134 | if self.params["remux"]: 135 | self._logger.error( 136 | "ffmpeg not found in PATH, remuxing is not available" 137 | ) 138 | self._logger.error( 139 | "please install ffmpeg or disable remuxing with --no-remux" 140 | ) 141 | raise FileNotFoundError(FFMpeg.FFMPEG_BIN) 142 | 143 | # Initialize 144 | self._logger = Logger("fc2 " + channel_id) 145 | tasks = [] 146 | fname_stream = None 147 | try: 148 | live = FC2LiveStream(self._session, channel_id) 149 | 150 | self._logger.info("Fetching stream info") 151 | 152 | is_online = await live.is_online() 153 | if not is_online: 154 | if not self.params["wait_for_live"]: 155 | raise FC2LiveStream.NotOnlineException() 156 | self._callback_handler( 157 | self, 158 | channel_id, 159 | CallbackEvent.Type.WAITING_FOR_ONLINE, 160 | ) 161 | await live.wait_for_online(self.params["wait_poll_interval"]) 162 | 163 | meta = await live.get_meta(refetch=False) 164 | self._callback_handler( 165 | self, 166 | channel_id, 167 | CallbackEvent.Type.STREAM_ONLINE, 168 | meta, 169 | ) 170 | 171 | fname_info = self._prepare_file(meta, "info.json") 172 | fname_thumb = self._prepare_file(meta, "png") 173 | fname_stream = self._prepare_file(meta, "ts") 174 | fname_chat = self._prepare_file(meta, "fc2chat.json") 175 | fname_muxed = self._prepare_file( 176 | meta, "m4a" if self.params["quality"] == "sound" else "mp4" 177 | ) 178 | fname_audio = self._prepare_file(meta, "m4a") 179 | fname_websocket = ( 180 | self._prepare_file(meta, "ws") 181 | if self.params["dump_websocket"] 182 | else None 183 | ) 184 | 185 | if self.params["write_info_json"]: 186 | self._logger.info("Writing info json to", fname_info) 187 | with open(fname_info, "w") as f: 188 | f.write(json.dumps(meta)) 189 | 190 | if self.params["write_thumbnail"]: 191 | self._logger.info("Writing thumbnail to", fname_thumb) 192 | try: 193 | thumb_url = meta["channel_data"]["image"] 194 | async with self._session.get(thumb_url) as resp: 195 | with open(fname_thumb, "wb") as f: 196 | async for data in resp.content.iter_chunked(1024): 197 | f.write(data) 198 | except Exception as e: 199 | self._logger.error("Failed to download thumbnail", e) 200 | 201 | ws_url = await live.get_websocket_url() 202 | self._logger.info("Found websocket url") 203 | async with FC2WebSocket( 204 | self._session, ws_url, output_file=fname_websocket 205 | ) as ws: 206 | started = time.time() 207 | mode = self._get_mode() 208 | got_mode = None 209 | hls_url = None 210 | 211 | # Wait for the selected quality to be available 212 | while ( 213 | time.time() - started < self.params["wait_for_quality_timeout"] 214 | and got_mode != mode 215 | ): 216 | hls_info = await ws.get_hls_information() 217 | hls_url, got_mode = self._get_hls_url(hls_info, mode) 218 | 219 | # Log a warning if the requested mode is not available 220 | if got_mode != mode: 221 | self._logger.warn( 222 | "Requested quality", 223 | self._format_mode(mode), 224 | "is not available, waiting ({}/{}s)".format( 225 | round(time.time() - started), 226 | self.params["wait_for_quality_timeout"], 227 | ), 228 | ) 229 | self._callback_handler( 230 | self, 231 | channel_id, 232 | CallbackEvent.Type.WAITING_FOR_TARGET_QUALITY, 233 | { 234 | "requested": self._format_mode(mode), 235 | "available": self._format_mode(got_mode), 236 | "hls_info": hls_info, 237 | }, 238 | ) 239 | await asyncio.sleep(1) 240 | 241 | if got_mode != mode: 242 | self._logger.warn( 243 | "Timeout reached, falling back to next best quality", 244 | self._format_mode(got_mode), 245 | ) 246 | 247 | self._callback_handler( 248 | self, 249 | channel_id, 250 | CallbackEvent.Type.GOT_HLS_URL, 251 | { 252 | "requested": self._format_mode(mode), 253 | "available": self._format_mode(got_mode), 254 | "hls_url": hls_url, 255 | "meta": meta, 256 | }, 257 | ) 258 | 259 | self._logger.info("Received HLS info") 260 | 261 | coros = [] 262 | 263 | coros.append(ws.wait_disconnection()) 264 | 265 | self._logger.info("Writing stream to", fname_stream) 266 | coros.append(self._download_stream(channel_id, hls_url, fname_stream)) 267 | 268 | if self.params["write_chat"]: 269 | self._logger.info("Writing chat to", fname_chat) 270 | coros.append(self._download_chat(ws, fname_chat)) 271 | 272 | tasks = [asyncio.create_task(coro) for coro in coros] 273 | 274 | self._logger.debug("Starting", len(tasks), "tasks") 275 | _exited, _pending = await asyncio.wait( 276 | tasks, return_when=asyncio.FIRST_COMPLETED 277 | ) 278 | self._logger.debug("Tasks exited") 279 | 280 | while len(_pending) > 0: 281 | pending_task = _pending.pop() 282 | self._logger.debug("Cancelling pending task", pending_task) 283 | pending_task.cancel() 284 | 285 | exited = _exited.pop() 286 | self._logger.debug("Exited task was", exited) 287 | if exited.exception() is not None: 288 | raise exited.exception() 289 | except asyncio.CancelledError: 290 | self._logger.error("Interrupted by user") 291 | except FC2WebSocket.ServerDisconnection: 292 | self._logger.error("Server disconnection") 293 | self._logger.error(traceback.format_exc()) 294 | except FC2WebSocket.StreamEnded: 295 | self._logger.info("Stream ended") 296 | finally: 297 | self._logger.debug("Cancelling tasks") 298 | for task in tasks: 299 | if not task.done(): 300 | self._logger.debug("Cancelling", task) 301 | task.cancel() 302 | await task 303 | 304 | if ( 305 | fname_stream is not None 306 | and self.params["remux"] 307 | and os.path.isfile(fname_stream) 308 | ): 309 | self._logger.info("Remuxing stream to", fname_muxed) 310 | await self._remux_stream(channel_id, fname_stream, fname_muxed) 311 | self._logger.debug("Finished remuxing stream", fname_muxed) 312 | 313 | if self.params["extract_audio"]: 314 | self._logger.info("Extracting audio to", fname_audio) 315 | await self._remux_stream( 316 | channel_id, fname_stream, fname_audio, extra_flags=["-vn"] 317 | ) 318 | self._logger.debug("Finished remuxing stream", fname_muxed) 319 | 320 | if not self.params["keep_intermediates"] and os.path.isfile(fname_muxed): 321 | self._logger.info("Removing intermediate files") 322 | os.remove(fname_stream) 323 | else: 324 | self._logger.debug("Not removing intermediates") 325 | else: 326 | self._logger.debug("Not remuxing stream") 327 | 328 | self._logger.info("Done") 329 | 330 | async def _download_stream(self, channel_id, hls_url, fname): 331 | def sizeof_fmt(num, suffix="B"): 332 | for unit in ["", "Ki", "Mi", "Gi", "Ti", "Pi", "Ei", "Zi"]: 333 | if abs(num) < 1024.0: 334 | return f"{num:3.1f}{unit}{suffix}" 335 | num /= 1024.0 336 | return f"{num:.1f}Yi{suffix}" 337 | 338 | try: 339 | async with HLSDownloader( 340 | self._session, hls_url, self.params["threads"] 341 | ) as hls: 342 | with open(fname, "wb") as out: 343 | n_frags = 0 344 | total_size = 0 345 | async for frag in hls.read(): 346 | n_frags += 1 347 | total_size += len(frag) 348 | out.write(frag) 349 | self._logger.info( 350 | "Downloaded", 351 | n_frags, 352 | "fragments,", 353 | sizeof_fmt(total_size), 354 | inline=True, 355 | ) 356 | self._callback_handler( 357 | self, 358 | channel_id, 359 | CallbackEvent.Type.FRAGMENT_PROGRESS, 360 | { 361 | "fragments_downloaded": n_frags, 362 | "total_size": total_size, 363 | }, 364 | ) 365 | except asyncio.CancelledError: 366 | self._logger.debug("_download_stream cancelled") 367 | except Exception as ex: 368 | self._logger.error(ex) 369 | 370 | async def _remux_stream(self, channel_id, ifname, ofname, *, extra_flags=[]): 371 | mux_flags = [ 372 | "-y", 373 | "-hide_banner", 374 | "-loglevel", 375 | "fatal", 376 | "-stats", 377 | "-i", 378 | ifname, 379 | *extra_flags, 380 | "-c", 381 | "copy", 382 | "-movflags", 383 | "faststart", 384 | ofname, 385 | ] 386 | async with FFMpeg(mux_flags) as mux: 387 | self._logger.info("Remuxing stream", inline=True) 388 | self._callback_handler(self, channel_id, CallbackEvent.Type.MUXING) 389 | while await mux.print_status(): 390 | pass 391 | 392 | async def _download_chat(self, ws, fname): 393 | with open(fname, "w") as f: 394 | while True: 395 | comment = await ws.comments.get() 396 | f.write(json.dumps(comment)) 397 | f.write("\n") 398 | 399 | def _get_hls_url(self, hls_info, mode): 400 | p_merged = self._merge_playlists(hls_info) 401 | p_sorted = self._sort_playlists(p_merged) 402 | playlist = self._get_playlist_or_best(p_sorted, mode) 403 | return playlist["url"], playlist["mode"] 404 | 405 | def _get_playlist_or_best(self, sorted_playlists, mode): 406 | playlist = None 407 | 408 | if len(sorted_playlists) == 0: 409 | raise FC2WebSocket.EmptyPlaylistException() 410 | 411 | # Find the playlist with matching (quality, latency) mode 412 | for p in sorted_playlists: 413 | if p["mode"] == mode: 414 | playlist = p 415 | 416 | # If no playlist matches, ignore the quality and find the best 417 | # one matching the latency 418 | if playlist is None: 419 | for p in sorted_playlists: 420 | _, p_latency = self._format_mode(p["mode"]) 421 | _, r_latency = self._format_mode(mode) 422 | if p_latency == r_latency: 423 | playlist = p 424 | break 425 | 426 | # If no playlist matches, return the first one 427 | if playlist is None: 428 | playlist = sorted_playlists[0] 429 | 430 | return playlist 431 | 432 | def _sort_playlists(self, merged_playlists): 433 | def key_map(playlist): 434 | mode = playlist["mode"] 435 | if mode >= 90: 436 | return mode - 90 437 | return mode 438 | 439 | return sorted(merged_playlists, reverse=True, key=key_map) 440 | 441 | def _merge_playlists(self, hls_info): 442 | playlists = [] 443 | for name in ["playlists", "playlists_high_latency", "playlists_middle_latency"]: 444 | if name in hls_info: 445 | playlists.extend(hls_info[name]) 446 | return playlists 447 | 448 | def _get_mode(self): 449 | mode = 0 450 | mode += self.STREAM_QUALITY[self.params["quality"]] 451 | mode += self.STREAM_LATENCY[self.params["latency"]] 452 | return mode 453 | 454 | def _format_mode(self, mode): 455 | def dict_search(haystack, needle): 456 | return list(haystack.keys())[list(haystack.values()).index(needle)] 457 | 458 | latency = dict_search(self.STREAM_LATENCY, mode % 10) 459 | quality = dict_search(self.STREAM_QUALITY, mode // 10 * 10) 460 | return quality, latency 461 | 462 | def _prepare_file(self, meta=None, ext=""): 463 | def get_unique_name(meta, ext): 464 | n = 0 465 | while True: 466 | extn = ext if n == 0 else "{}.{}".format(n, ext) 467 | fname = self._format_outtmpl(meta, {"ext": extn}) 468 | n += 1 469 | if not os.path.exists(fname): 470 | return fname 471 | 472 | fname = get_unique_name(meta, ext) 473 | fpath = pathlib.Path(fname) 474 | fpath.parent.mkdir(parents=True, exist_ok=True) 475 | return fname 476 | 477 | @classmethod 478 | def get_format_info(cls, *, meta=None, params={}, sanitize=False): 479 | finfo = { 480 | "channel_id": "", 481 | "channel_name": "", 482 | "date": datetime.now().strftime("%F"), 483 | "time": datetime.now().strftime("%H%M%S"), 484 | "title": "", 485 | "ext": "", 486 | } 487 | 488 | sanitizer = sanitize_filename if sanitize else lambda x: x 489 | 490 | if meta is not None: 491 | finfo["channel_id"] = sanitizer(meta["channel_data"]["channelid"]) 492 | finfo["channel_name"] = sanitizer(meta["profile_data"]["name"]) 493 | finfo["title"] = sanitizer(meta["channel_data"]["title"]) 494 | 495 | for key in params: 496 | if key.startswith("_"): 497 | finfo[key] = params[key] 498 | 499 | return finfo 500 | 501 | def _format_outtmpl(self, meta=None, overrides={}): 502 | finfo = FC2LiveDL.get_format_info( 503 | meta=meta, 504 | params=self.params, 505 | sanitize=True, 506 | ) 507 | finfo.update(overrides) 508 | 509 | formatted = self.params["outtmpl"] % finfo 510 | if formatted.startswith("-"): 511 | formatted = "_" + formatted 512 | 513 | return formatted 514 | 515 | def _parse_cookies_file(self, cookies_file): 516 | cookies = http.cookies.SimpleCookie() 517 | with open(cookies_file, "r") as cf: 518 | for line in cf: 519 | try: 520 | domain, _flag, path, secure, _expiration, name, value = [ 521 | t.strip() for t in line.split("\t") 522 | ] 523 | cookies[name] = value 524 | cookies[name]["domain"] = domain.replace("#HttpOnly_", "") 525 | cookies[name]["path"] = path 526 | cookies[name]["secure"] = secure 527 | cookies[name]["httponly"] = domain.startswith("#HttpOnly_") 528 | except Exception as ex: 529 | self._logger.trace(line, repr(ex), str(ex)) 530 | return cookies 531 | -------------------------------------------------------------------------------- /fc2_live_dl/__init__.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import asyncio 3 | import json 4 | import sys 5 | from importlib.metadata import version 6 | 7 | from .FC2LiveDL import FC2LiveDL 8 | from .util import Logger, SmartFormatter 9 | 10 | try: 11 | __version__ = version(__name__) 12 | except: 13 | __version__ = "unknown" 14 | 15 | ABOUT = { 16 | "name": "fc2-live-dl", 17 | "version": __version__, 18 | "date": "2022-01-12", 19 | "description": "Download fc2 livestreams", 20 | "author": "hizkifw", 21 | "license": "MIT", 22 | "url": "https://github.com/HoloArchivists/fc2-live-dl", 23 | } 24 | 25 | 26 | async def _main(args): 27 | version = "%(name)s v%(version)s" % ABOUT 28 | parser = argparse.ArgumentParser(formatter_class=SmartFormatter) 29 | parser.add_argument("url", help="A live.fc2.com URL.") 30 | 31 | parser.add_argument("-v", "--version", action="version", version=version) 32 | parser.add_argument( 33 | "--quality", 34 | choices=FC2LiveDL.STREAM_QUALITY.keys(), 35 | default=FC2LiveDL.DEFAULT_PARAMS["quality"], 36 | help="Quality of the stream to download. Default is {}.".format( 37 | FC2LiveDL.DEFAULT_PARAMS["quality"] 38 | ), 39 | ) 40 | parser.add_argument( 41 | "--latency", 42 | choices=FC2LiveDL.STREAM_LATENCY.keys(), 43 | default=FC2LiveDL.DEFAULT_PARAMS["latency"], 44 | help="Stream latency. Select a higher latency if experiencing stability issues. Default is {}.".format( 45 | FC2LiveDL.DEFAULT_PARAMS["latency"] 46 | ), 47 | ) 48 | parser.add_argument( 49 | "--threads", 50 | type=int, 51 | default=1, 52 | help="The size of the thread pool used to download segments. Default is 1.", 53 | ) 54 | parser.add_argument( 55 | "-o", 56 | "--output", 57 | default=FC2LiveDL.DEFAULT_PARAMS["outtmpl"], 58 | help="""A|Set the output filename format. Supports formatting options similar to youtube-dl. Default is '{}' 59 | 60 | Available format options: 61 | channel_id (string): ID of the broadcast 62 | channel_name (string): broadcaster's profile name 63 | date (string): local date YYYY-MM-DD 64 | time (string): local time HHMMSS 65 | ext (string): file extension 66 | title (string): title of the live broadcast""".format( 67 | FC2LiveDL.DEFAULT_PARAMS["outtmpl"].replace("%", "%%") 68 | ), 69 | ) 70 | 71 | parser.add_argument( 72 | "--no-remux", 73 | action="store_true", 74 | help="Do not remux recordings into mp4/m4a after it is finished.", 75 | ) 76 | parser.add_argument( 77 | "-k", 78 | "--keep-intermediates", 79 | action="store_true", 80 | help="Keep the raw .ts recordings after it has been remuxed.", 81 | ) 82 | parser.add_argument( 83 | "-x", 84 | "--extract-audio", 85 | action="store_true", 86 | help="Generate an audio-only copy of the stream.", 87 | ) 88 | 89 | parser.add_argument("--cookies", help="Path to a cookies file.") 90 | 91 | parser.add_argument( 92 | "--write-chat", action="store_true", help="Save live chat into a json file." 93 | ) 94 | parser.add_argument( 95 | "--write-info-json", 96 | action="store_true", 97 | help="Dump output stream information into a json file.", 98 | ) 99 | parser.add_argument( 100 | "--write-thumbnail", action="store_true", help="Download thumbnail into a file" 101 | ) 102 | parser.add_argument( 103 | "--wait", 104 | action="store_true", 105 | help="Wait until the broadcast goes live, then start recording.", 106 | ) 107 | parser.add_argument( 108 | "--wait-for-quality-timeout", 109 | type=float, 110 | default=FC2LiveDL.DEFAULT_PARAMS["wait_for_quality_timeout"], 111 | help="If the requested quality is not available, keep retrying up to this many seconds before falling back to the next best quality. Default is {} seconds.".format( 112 | FC2LiveDL.DEFAULT_PARAMS["wait_for_quality_timeout"] 113 | ), 114 | ) 115 | parser.add_argument( 116 | "--poll-interval", 117 | type=float, 118 | default=FC2LiveDL.DEFAULT_PARAMS["wait_poll_interval"], 119 | help="How many seconds between checks to see if broadcast is live. Default is {}.".format( 120 | FC2LiveDL.DEFAULT_PARAMS["wait_poll_interval"] 121 | ), 122 | ) 123 | parser.add_argument( 124 | "--log-level", 125 | default="info", 126 | choices=Logger.LOGLEVELS.keys(), 127 | help="Log level verbosity. Default is info.", 128 | ) 129 | parser.add_argument( 130 | "--trust-env-proxy", 131 | action="store_true", 132 | help="Trust environment variables for proxy settings.", 133 | ) 134 | 135 | # Debug flags 136 | parser.add_argument( 137 | "--dump-websocket", 138 | action="store_true", 139 | help="Dump all websocket communication to a file for debugging", 140 | ) 141 | 142 | # Init fc2-live-dl 143 | args = parser.parse_args(args[1:]) 144 | Logger.loglevel = Logger.LOGLEVELS[args.log_level] 145 | params = { 146 | "quality": args.quality, 147 | "latency": args.latency, 148 | "threads": args.threads, 149 | "outtmpl": args.output, 150 | "write_chat": args.write_chat, 151 | "write_info_json": args.write_info_json, 152 | "write_thumbnail": args.write_thumbnail, 153 | "wait_for_live": args.wait, 154 | "wait_for_quality_timeout": args.wait_for_quality_timeout, 155 | "wait_poll_interval": args.poll_interval, 156 | "cookies_file": args.cookies, 157 | "remux": not args.no_remux, 158 | "keep_intermediates": args.keep_intermediates, 159 | "extract_audio": args.extract_audio, 160 | "trust_env_proxy": args.trust_env_proxy, 161 | "dump_websocket": args.dump_websocket, 162 | } 163 | 164 | logger = Logger("main") 165 | 166 | channel_id = None 167 | try: 168 | channel_id = ( 169 | args.url.replace("http:", "https:") 170 | .split("https://live.fc2.com")[1] 171 | .split("/")[1] 172 | ) 173 | except: 174 | logger.error("Error parsing URL: please provide a https://live.fc2.com/ URL.") 175 | return False 176 | 177 | logger.info(version) 178 | logger.debug("Using options:", json.dumps(vars(args), indent=2)) 179 | 180 | async with FC2LiveDL(params) as fc2: 181 | try: 182 | await fc2.download(channel_id) 183 | logger.debug("Done") 184 | except Exception as ex: 185 | logger.error(repr(ex), str(ex)) 186 | 187 | 188 | def main(): 189 | try: 190 | asyncio.run(_main(sys.argv)) 191 | except KeyboardInterrupt: 192 | pass 193 | 194 | 195 | __all__ = ["main", "FC2LiveDL"] 196 | -------------------------------------------------------------------------------- /fc2_live_dl/__main__.py: -------------------------------------------------------------------------------- 1 | from . import main 2 | 3 | if __name__ == "__main__": 4 | main() 5 | -------------------------------------------------------------------------------- /fc2_live_dl/autofc2.py: -------------------------------------------------------------------------------- 1 | import traceback 2 | import argparse 3 | import asyncio 4 | import json 5 | import time 6 | 7 | import apprise 8 | from aiohttp import web 9 | 10 | from .FC2LiveDL import FC2LiveDL, CallbackEvent 11 | from .util import Logger 12 | 13 | 14 | class Metrics: 15 | prefix = "autofc2_" 16 | 17 | def __init__(self): 18 | self._lock = asyncio.Lock() 19 | self._channel_metrics = {} 20 | 21 | def _reset(self, channel_id): 22 | self._channel_metrics[channel_id] = { 23 | "event_type": 0, 24 | "fragments_downloaded": 0, 25 | "total_downloaded": 0, 26 | } 27 | 28 | async def reset(self, channel_id): 29 | async with self._lock: 30 | self._reset(channel_id) 31 | 32 | async def update(self, event: CallbackEvent): 33 | async with self._lock: 34 | if event.channel_id not in self._channel_metrics: 35 | self._reset(event.channel_id) 36 | 37 | self._channel_metrics[event.channel_id]["event_type"] = event.type 38 | if event.type == CallbackEvent.Type.FRAGMENT_PROGRESS: 39 | self._channel_metrics[event.channel_id]["fragments_downloaded"] = ( 40 | event.data["fragments_downloaded"] 41 | ) 42 | self._channel_metrics[event.channel_id]["total_downloaded"] = ( 43 | event.data["total_size"] 44 | ) 45 | 46 | async def promstr(self): 47 | async with self._lock: 48 | res = "" 49 | for channel_id, metrics in self._channel_metrics.items(): 50 | label = f'channel_id="{channel_id}"' 51 | 52 | for typ in CallbackEvent.Type: 53 | val = 1 if metrics["event_type"] == typ else 0 54 | res += f'{self.prefix}event{{{label},type="{typ.name.lower()}"}} {val}\n' 55 | 56 | res += f"{self.prefix}fragments_downloaded{{{label}}} {metrics['fragments_downloaded']}\n" 57 | res += f"{self.prefix}bytes_downloaded{{{label}}} {metrics['total_downloaded']}\n" 58 | 59 | return res 60 | 61 | async def http_server(self, host, port, path): 62 | async def handler(request): 63 | return web.Response(text=await self.promstr(), content_type="text/plain") 64 | 65 | app = web.Application() 66 | app.add_routes([web.get(path, handler)]) 67 | runner = web.AppRunner(app) 68 | await runner.setup() 69 | site = web.TCPSite(runner, host, port) 70 | await site.start() 71 | 72 | 73 | class ChannelState: 74 | def __init__(self): 75 | self._last_startup_time = 0 76 | 77 | async def wait_for_debounce(self, duration): 78 | diff = time.time() - self._last_startup_time 79 | if diff < duration: 80 | await asyncio.sleep(duration - diff) 81 | self._last_startup_time = time.time() 82 | 83 | 84 | class AutoFC2: 85 | default_args = { 86 | "config": "autofc2.json", 87 | } 88 | 89 | def __init__(self, args): 90 | # Merge default args with user args 91 | self.args = self.clone(self.default_args) 92 | self.args.update(args) 93 | 94 | self.logger = Logger("autofc2") 95 | self.logger.info("starting") 96 | self.last_valid_config = None 97 | self.metrics = Metrics() 98 | self.channel_state = {} 99 | 100 | # Disable progress spinners 101 | Logger.print_inline = False 102 | 103 | def get_config(self): 104 | try: 105 | with open(self.args["config"], "r", encoding="utf8") as f: 106 | self.last_valid_config = json.load(f) 107 | except Exception as ex: 108 | if self.last_valid_config is None: 109 | self.logger.error("Error reading config file") 110 | raise ex 111 | else: 112 | self.logger.warn("Warning: unable to load config, using last valid one") 113 | self.logger.warn(ex) 114 | return self.last_valid_config 115 | 116 | def clone(self, obj): 117 | return json.loads(json.dumps(obj)) 118 | 119 | def get_channels(self): 120 | config = self.get_config() 121 | return config["channels"].keys() 122 | 123 | def get_channel_params(self, channel_id): 124 | config = self.get_config() 125 | params = self.clone(config["default_params"]) 126 | params.update(self.clone(config["channels"][channel_id])) 127 | return params 128 | 129 | def reload_channels_list(self, tasks): 130 | async def noop(): 131 | pass 132 | 133 | channels = self.get_channels() 134 | for channel_id in channels: 135 | if channel_id not in tasks: 136 | tasks[channel_id] = asyncio.create_task(noop()) 137 | 138 | for channel_id in tasks.keys(): 139 | if channel_id not in channels: 140 | tasks[channel_id].cancel() 141 | 142 | async def debounce_channel(self, channel_id): 143 | config = self.get_config() 144 | debounce_time = 0 145 | if "autofc2" in config and "debounce_time" in config["autofc2"]: 146 | debounce_time = config["autofc2"]["debounce_time"] 147 | 148 | if channel_id not in self.channel_state: 149 | self.channel_state[channel_id] = ChannelState() 150 | 151 | if debounce_time > 0: 152 | await self.channel_state[channel_id].wait_for_debounce(debounce_time) 153 | 154 | async def config_watcher(self): 155 | last_log_level = Logger.loglevel 156 | 157 | while True: 158 | await asyncio.sleep(1) 159 | 160 | config = self.get_config() 161 | 162 | if "autofc2" not in config: 163 | continue 164 | 165 | log_level = config["autofc2"]["log_level"] 166 | if log_level == last_log_level: 167 | continue 168 | 169 | last_log_level = log_level 170 | 171 | if log_level not in Logger.LOGLEVELS: 172 | self.logger.error(f"Invalid log level {log_level}") 173 | continue 174 | 175 | Logger.loglevel = Logger.LOGLEVELS[log_level] 176 | self.logger.info(f"Setting log level to {log_level}") 177 | 178 | async def handle_event(self, event): 179 | try: 180 | await self.metrics.update(event) 181 | 182 | if event.type != CallbackEvent.Type.GOT_HLS_URL: 183 | return 184 | 185 | config = self.get_config() 186 | finfo = FC2LiveDL.get_format_info( 187 | meta=event.data["meta"], 188 | params=event.instance.params, 189 | sanitize=False, 190 | ) 191 | 192 | if "notifications" not in config: 193 | return 194 | 195 | for cfg in config["notifications"]: 196 | notifier = apprise.Apprise() 197 | notifier.add(cfg["url"]) 198 | await notifier.async_notify(body=cfg["message"] % finfo) 199 | 200 | except: 201 | self.logger.error("Error handling event") 202 | self.logger.error(traceback.format_exc()) 203 | return 204 | 205 | async def handle_channel(self, channel_id): 206 | params = self.get_channel_params(channel_id) 207 | async with FC2LiveDL(params, self.handle_event) as fc2: 208 | await self.debounce_channel(channel_id) 209 | await self.metrics.reset(channel_id) 210 | await fc2.download(channel_id) 211 | 212 | async def metrics_webserver(self): 213 | config = self.get_config() 214 | if "autofc2" not in config or "metrics" not in config["autofc2"]: 215 | # Stall forever 216 | return await asyncio.Future() 217 | 218 | metrics_cfg = config["autofc2"]["metrics"] 219 | 220 | self.logger.info( 221 | f"Metrics available at http://{metrics_cfg['host']}:{metrics_cfg['port']}{metrics_cfg['path']}" 222 | ) 223 | return await self.metrics.http_server( 224 | metrics_cfg["host"], 225 | metrics_cfg["port"], 226 | metrics_cfg["path"], 227 | ) 228 | 229 | async def _main(self): 230 | tasks = {} 231 | sleep_task = None 232 | config_task = asyncio.create_task(self.config_watcher()) 233 | metrics_task = asyncio.create_task(self.metrics_webserver()) 234 | try: 235 | while True: 236 | self.reload_channels_list(tasks) 237 | sleep_task = asyncio.create_task(asyncio.sleep(1)) 238 | task_arr = [config_task, sleep_task, metrics_task] 239 | for channel in tasks.keys(): 240 | if tasks[channel].done(): 241 | tasks[channel] = asyncio.create_task( 242 | self.handle_channel(channel) 243 | ) 244 | task_arr.append(tasks[channel]) 245 | 246 | await asyncio.wait(task_arr, return_when=asyncio.FIRST_COMPLETED) 247 | await asyncio.sleep(1) 248 | except asyncio.CancelledError: 249 | self.logger.error("Interrupted") 250 | finally: 251 | if sleep_task is not None: 252 | sleep_task.cancel() 253 | for task in tasks.values(): 254 | task.cancel() 255 | 256 | def main(self): 257 | try: 258 | asyncio.run(self._main()) 259 | except KeyboardInterrupt: 260 | pass 261 | 262 | 263 | def main(): 264 | parser = argparse.ArgumentParser( 265 | description="Automatically download FC2 live streams" 266 | ) 267 | parser.add_argument( 268 | "--config", 269 | "-c", 270 | help="config file to use", 271 | default="autofc2.json", 272 | ) 273 | args = parser.parse_args() 274 | 275 | AutoFC2({"config": args.config}).main() 276 | 277 | 278 | if __name__ == "__main__": 279 | main() 280 | -------------------------------------------------------------------------------- /fc2_live_dl/fc2.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import base64 3 | import html 4 | import json 5 | import time 6 | 7 | from .util import AsyncMap, Logger 8 | 9 | 10 | class FC2WebSocket: 11 | heartbeat_interval = 30 12 | 13 | def __init__(self, session, url, *, output_file=None): 14 | self._session = session 15 | self._url = url 16 | self._msg_id = 0 17 | self._msg_responses = AsyncMap() 18 | self._last_heartbeat = 0 19 | self._is_ready = False 20 | self._logger = Logger("ws") 21 | self.comments = asyncio.Queue() 22 | 23 | self._output_file = None 24 | if output_file is not None: 25 | self._logger.info("Writing websocket to", output_file) 26 | self._output_file = open(output_file, "w") 27 | 28 | def __del__(self): 29 | if self._output_file is not None: 30 | self._logger.debug("Closing file") 31 | self._output_file.close() 32 | 33 | async def __aenter__(self): 34 | self._loop = asyncio.get_running_loop() 35 | self._ws = await self._session.ws_connect(self._url) 36 | self._logger.trace(self._ws) 37 | self._logger.debug("connected") 38 | self._task = asyncio.create_task(self._main_loop(), name="main_loop") 39 | return self 40 | 41 | async def __aexit__(self, *err): 42 | self._logger.trace("exit", err) 43 | if not self._task.done(): 44 | self._task.cancel() 45 | await self._ws.close() 46 | self._logger.debug("closed") 47 | 48 | async def wait_disconnection(self): 49 | res = await self._task 50 | if res.exception() is not None: 51 | raise res.exception() 52 | 53 | async def get_hls_information(self): 54 | msg = None 55 | tries = 0 56 | max_tries = 5 57 | 58 | while msg is None and tries < max_tries: 59 | msg = await self._send_message_and_wait("get_hls_information", timeout=5) 60 | 61 | backoff_delay = 2**tries 62 | tries += 1 63 | 64 | if msg is None: 65 | self._logger.warn( 66 | "Timeout reached waiting for HLS information, retrying in", 67 | backoff_delay, 68 | "seconds", 69 | ) 70 | await asyncio.sleep(backoff_delay) 71 | elif "playlists" not in msg["arguments"]: 72 | msg = None 73 | self._logger.warn( 74 | "Received empty playlist, retrying in", backoff_delay, "seconds" 75 | ) 76 | await asyncio.sleep(backoff_delay) 77 | 78 | if tries == max_tries: 79 | self._logger.error("Gave up after", tries, "tries") 80 | raise self.EmptyPlaylistException() 81 | 82 | return msg["arguments"] 83 | 84 | async def _main_loop(self): 85 | while True: 86 | try: 87 | msg = await asyncio.wait_for( 88 | self._ws.receive_json(), self.heartbeat_interval 89 | ) 90 | except asyncio.TimeoutError: 91 | self._logger.debug( 92 | f"Got no messages for {self.heartbeat_interval} seconds, sending heartbeat" 93 | ) 94 | await self._try_heartbeat() 95 | continue 96 | 97 | self._logger.trace("<", json.dumps(msg)[:100]) 98 | if self._output_file is not None: 99 | self._output_file.write("< ") 100 | self._output_file.write(json.dumps(msg)) 101 | self._output_file.write("\n") 102 | 103 | if msg["name"] == "connect_complete": 104 | self._is_ready = True 105 | elif msg["name"] == "_response_": 106 | await self._msg_responses.put(msg["id"], msg) 107 | elif msg["name"] == "control_disconnection": 108 | code = msg["arguments"]["code"] 109 | if code == 4101: 110 | raise self.PaidProgramDisconnection() 111 | elif code == 4507: 112 | raise self.LoginRequiredError() 113 | elif code == 4512: 114 | raise self.MultipleConnectionError() 115 | else: 116 | raise self.ServerDisconnection(code) 117 | elif msg["name"] == "publish_stop": 118 | raise self.StreamEnded() 119 | elif msg["name"] == "comment": 120 | for comment in msg["arguments"]["comments"]: 121 | await self.comments.put(comment) 122 | 123 | await self._try_heartbeat() 124 | 125 | async def _try_heartbeat(self): 126 | if time.time() - self._last_heartbeat < self.heartbeat_interval: 127 | return 128 | self._logger.debug("heartbeat") 129 | await self._send_message("heartbeat") 130 | self._last_heartbeat = time.time() 131 | 132 | async def _send_message_and_wait(self, name, arguments={}, *, timeout=0): 133 | msg_id = await self._send_message(name, arguments) 134 | if msg_id is None: 135 | return None 136 | 137 | msg_wait_task = asyncio.create_task(self._msg_responses.pop(msg_id)) 138 | tasks = [msg_wait_task, self._task] 139 | 140 | if timeout > 0: 141 | tasks.append(asyncio.create_task(asyncio.sleep(timeout), name="timeout")) 142 | 143 | _done, _pending = await asyncio.wait(tasks, return_when=asyncio.FIRST_COMPLETED) 144 | done = _done.pop() 145 | if done.get_name() == "main_loop": 146 | _pending.pop().cancel() 147 | raise done.exception() 148 | elif done.get_name() == "timeout": 149 | return None 150 | return done.result() 151 | 152 | async def _send_message(self, name, arguments={}): 153 | self._msg_id += 1 154 | msg = {"name": name, "arguments": arguments, "id": self._msg_id} 155 | 156 | self._logger.trace(">", name, arguments) 157 | if self._output_file is not None: 158 | self._output_file.write("> ") 159 | self._output_file.write(json.dumps(msg)) 160 | self._output_file.write("\n") 161 | 162 | try: 163 | await self._ws.send_json(msg) 164 | except asyncio.TimeoutError as e: 165 | self._logger.debug("_send_message: send_json timeout", e) 166 | return None 167 | return self._msg_id 168 | 169 | class ServerDisconnection(Exception): 170 | """Raised when the server sends a `control_disconnection` message""" 171 | 172 | def __init__(self, code=None, reason=None): 173 | self.code = code 174 | self.reason = reason 175 | 176 | def __str__(self): 177 | if self.reason is not None: 178 | return "Server disconnected: {} ({})".format(self.code, self.reason) 179 | return "Server disconnected: {}".format(self.code) 180 | 181 | class PaidProgramDisconnection(ServerDisconnection): 182 | """Raised when the streamer switches the broadcast to a paid program""" 183 | 184 | def __init__(self): 185 | super().__init__(code=4101, reason="Paid program") 186 | 187 | class LoginRequiredError(ServerDisconnection): 188 | """Raised when the stream requires a login""" 189 | 190 | def __init__(self): 191 | super().__init__(code=4507, reason="Login required") 192 | 193 | class MultipleConnectionError(ServerDisconnection): 194 | """Raised when the server detects multiple connections to the same live stream""" 195 | 196 | def __init__(self): 197 | super().__init__(code=4512, reason="Multiple connections") 198 | 199 | class StreamEnded(Exception): 200 | def __str__(self): 201 | return "Stream has ended" 202 | 203 | class EmptyPlaylistException(Exception): 204 | """Raised when the server did not return a valid playlist""" 205 | 206 | def __str__(self): 207 | return "Server did not return a valid playlist" 208 | 209 | 210 | class FC2LiveStream: 211 | 212 | MAX_LIVE_CHECK_INTERVAL = 300 213 | 214 | def __init__(self, session, channel_id): 215 | self._meta = None 216 | self._session = session 217 | self._logger = Logger("live") 218 | self.channel_id = channel_id 219 | 220 | async def wait_for_online(self, interval): 221 | current_interval = interval 222 | while True: 223 | try: 224 | if await self.is_online(): 225 | break 226 | except Exception as e: 227 | description = f"{e.__class__.__name__}: {e}" 228 | self._logger.warn( 229 | f"Error when checking if stream is live: {description}" 230 | ) 231 | current_interval = min( 232 | current_interval * 2, self.MAX_LIVE_CHECK_INTERVAL 233 | ) 234 | self._logger.debug(f"Next check in {current_interval} seconds") 235 | else: 236 | if current_interval != interval: 237 | self._logger.debug( 238 | f"Successfully fetched live status, restoring check interval of {interval} seconds" 239 | ) 240 | current_interval = interval 241 | 242 | for _ in range(current_interval): 243 | self._logger.info("Waiting for stream", inline=True, spin=True) 244 | await asyncio.sleep(1) 245 | 246 | async def is_online(self, *, refetch=True): 247 | meta = await self.get_meta(refetch=refetch) 248 | return meta["channel_data"]["is_publish"] > 0 249 | 250 | async def get_websocket_url(self): 251 | meta = await self.get_meta() 252 | if not await self.is_online(refetch=False): 253 | raise self.NotOnlineException() 254 | 255 | orz = "" 256 | cookie_orz = self._get_cookie("l_ortkn") 257 | if cookie_orz is not None: 258 | orz = cookie_orz.value 259 | 260 | url = "https://live.fc2.com/api/getControlServer.php" 261 | data = { 262 | "channel_id": self.channel_id, 263 | "mode": "play", 264 | "orz": orz, 265 | "channel_version": meta["channel_data"]["version"], 266 | "client_version": "2.1.0\n+[1]", 267 | "client_type": "pc", 268 | "client_app": "browser_hls", 269 | "ipv6": "", 270 | } 271 | self._logger.trace("get_websocket_url>", url, data) 272 | async with self._session.post(url, data=data) as resp: 273 | self._logger.trace(resp.request_info) 274 | info = await resp.json() 275 | self._logger.trace(" 0: 284 | self._logger.debug("Logged in with ID", fc2id) 285 | else: 286 | self._logger.debug("Using anonymous account") 287 | except: 288 | self._logger.debug("Using anonymous account") 289 | 290 | return "%(url)s?control_token=%(control_token)s" % info 291 | 292 | async def get_meta(self, *, refetch=False): 293 | if self._meta is not None and not refetch: 294 | return self._meta 295 | 296 | url = "https://live.fc2.com/api/memberApi.php" 297 | data = { 298 | "channel": 1, 299 | "profile": 1, 300 | "user": 1, 301 | "streamid": self.channel_id, 302 | } 303 | self._logger.trace("get_meta>", url, data) 304 | async with self._session.post(url, data=data) as resp: 305 | resp.raise_for_status() 306 | # FC2 returns text/javascript instead of application/json 307 | # Content type is specified so aiohttp knows what to expect 308 | data = await resp.json(content_type="text/javascript") 309 | self._logger.trace(" 0] 85 | for item in parts: 86 | if last_item[-1] == "=": 87 | stats[last_item[:-1]] = item 88 | elif "=" in item: 89 | k, v = item.split("=") 90 | stats[k] = v 91 | last_item = item 92 | return stats 93 | -------------------------------------------------------------------------------- /fc2_live_dl/hls.py: -------------------------------------------------------------------------------- 1 | import asyncio 2 | import time 3 | 4 | from .fc2 import FC2WebSocket 5 | from .util import Logger 6 | 7 | 8 | class HLSDownloader: 9 | def __init__(self, session, url, threads): 10 | self._session = session 11 | self._url = url 12 | self._threads = threads 13 | self._logger = Logger("hls") 14 | self._frag_urls = asyncio.PriorityQueue(100) 15 | self._frag_data = asyncio.PriorityQueue(100) 16 | self._download_task = None 17 | 18 | async def __aenter__(self): 19 | self._loop = asyncio.get_running_loop() 20 | self._logger.debug("init") 21 | return self 22 | 23 | async def __aexit__(self, *err): 24 | self._logger.trace("exit", err) 25 | if self._download_task is not None: 26 | self._download_task.cancel() 27 | await self._download_task 28 | 29 | async def _get_fragment_urls(self): 30 | async with self._session.get(self._url) as resp: 31 | if resp.status == 403: 32 | raise FC2WebSocket.StreamEnded() 33 | elif resp.status == 404: 34 | return [] 35 | playlist = await resp.text() 36 | return [ 37 | line.strip() 38 | for line in playlist.split("\n") 39 | if len(line) > 0 and not line[0] == "#" 40 | ] 41 | 42 | @staticmethod 43 | def _get_fragment_id(fragment_url): 44 | """Take url and return filename part of it""" 45 | if not fragment_url: 46 | return fragment_url 47 | return fragment_url.split("?")[0].split("/")[-1] 48 | 49 | async def _fill_queue(self): 50 | last_fragment_timestamp = time.time() 51 | last_fragment = None 52 | frag_idx = 0 53 | while True: 54 | try: 55 | frags = await self._get_fragment_urls() 56 | frags_numbers = [self._get_fragment_id(url) for url in frags] 57 | 58 | try: 59 | new_idx = 1 + frags_numbers.index( 60 | self._get_fragment_id(last_fragment) 61 | ) 62 | except ValueError: 63 | new_idx = 0 64 | 65 | n_new = len(frags) - new_idx 66 | if n_new > 0: 67 | last_fragment_timestamp = time.time() 68 | self._logger.debug("Found", n_new, "new fragments") 69 | 70 | for frag in frags[new_idx:]: 71 | last_fragment = frag 72 | await self._frag_urls.put((frag_idx, (frag, 0))) 73 | frag_idx += 1 74 | 75 | if time.time() - last_fragment_timestamp > 30: 76 | self._logger.debug("Timeout receiving new segments") 77 | return 78 | 79 | await asyncio.sleep(1) 80 | except Exception as ex: 81 | self._logger.error("Error fetching new segments:", ex) 82 | return 83 | 84 | async def _download_worker(self, wid): 85 | try: 86 | while True: 87 | i, (url, tries) = await self._frag_urls.get() 88 | self._logger.debug(wid, "Downloading fragment", i) 89 | try: 90 | async with self._session.get(url) as resp: 91 | if resp.status > 299: 92 | self._logger.error( 93 | wid, "Fragment", i, "errored:", resp.status 94 | ) 95 | if tries < 5: 96 | self._logger.debug(wid, "Retrying fragment", i) 97 | await self._frag_urls.put((i, (url, tries + 1))) 98 | else: 99 | self._logger.error( 100 | wid, 101 | "Gave up on fragment", 102 | i, 103 | "after", 104 | tries, 105 | "tries", 106 | ) 107 | await self._frag_data.put((i, b"")) 108 | else: 109 | await self._frag_data.put((i, await resp.read())) 110 | except Exception as ex: 111 | self._logger.error(wid, "Unhandled exception:", ex) 112 | except asyncio.CancelledError: 113 | self._logger.debug("worker", wid, "cancelled") 114 | 115 | async def _download(self): 116 | tasks = [] 117 | try: 118 | if self._threads > 1: 119 | self._logger.info("Downloading with", self._threads, "threads") 120 | 121 | if self._threads > 8: 122 | self._logger.warn("Using more than 8 threads is not recommended") 123 | 124 | tasks = [ 125 | asyncio.create_task(self._download_worker(i)) 126 | for i in range(self._threads) 127 | ] 128 | 129 | self._logger.debug("Starting queue worker") 130 | await self._fill_queue() 131 | self._logger.debug("Queue finished") 132 | 133 | for task in tasks: 134 | task.cancel() 135 | await task 136 | self._logger.debug("Workers quit") 137 | except asyncio.CancelledError: 138 | self._logger.debug("_download cancelled") 139 | for task in tasks: 140 | task.cancel() 141 | await task 142 | 143 | async def _read(self, index): 144 | while True: 145 | p, frag = await self._frag_data.get() 146 | if p == index: 147 | return frag 148 | await self._frag_data.put((p, frag)) 149 | await asyncio.sleep(0.1) 150 | 151 | async def read(self): 152 | try: 153 | if self._download_task is None: 154 | self._download_task = asyncio.create_task(self._download()) 155 | 156 | index = 0 157 | while True: 158 | yield await self._read(index) 159 | index += 1 160 | except asyncio.CancelledError: 161 | self._logger.debug("read cancelled") 162 | if self._download_task is not None: 163 | self._download_task.cancel() 164 | await self._download_task 165 | -------------------------------------------------------------------------------- /fc2_live_dl/util.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import asyncio 3 | import re 4 | import sys 5 | from datetime import datetime 6 | 7 | 8 | class Logger: 9 | LOGLEVELS = { 10 | "silent": 0, 11 | "error": 1, 12 | "warn": 2, 13 | "info": 3, 14 | "debug": 4, 15 | "trace": 5, 16 | } 17 | 18 | loglevel = LOGLEVELS["info"] 19 | print_inline = True 20 | print_colors = True 21 | 22 | ansi_purple = "\033[35m" 23 | ansi_cyan = "\033[36m" 24 | ansi_yellow = "\033[33m" 25 | ansi_red = "\033[31m" 26 | ansi_reset = "\033[0m" 27 | ansi_delete_line = "\033[K" 28 | 29 | def __init__(self, module): 30 | self._module = module 31 | self._loadspin_n = 0 32 | 33 | if not sys.stdout.isatty(): 34 | self.print_inline = False 35 | self.print_colors = False 36 | 37 | def trace(self, *args, **kwargs): 38 | if self.loglevel >= self.LOGLEVELS["trace"]: 39 | self._print(self.ansi_purple, *args, **kwargs) 40 | 41 | def debug(self, *args, **kwargs): 42 | if self.loglevel >= self.LOGLEVELS["debug"]: 43 | self._print(self.ansi_cyan, *args, **kwargs) 44 | 45 | def info(self, *args, **kwargs): 46 | if self.loglevel >= self.LOGLEVELS["info"]: 47 | self._print("", *args, **kwargs) 48 | 49 | def warn(self, *args, **kwargs): 50 | if self.loglevel >= self.LOGLEVELS["warn"]: 51 | self._print(self.ansi_yellow, *args, **kwargs) 52 | 53 | def error(self, *args, **kwargs): 54 | if self.loglevel >= self.LOGLEVELS["error"]: 55 | self._print(self.ansi_red, *args, **kwargs) 56 | 57 | def _spin(self): 58 | chars = "⡆⠇⠋⠙⠸⢰⣠⣄" 59 | self._loadspin_n = (self._loadspin_n + 1) % len(chars) 60 | return chars[self._loadspin_n] 61 | 62 | def _print(self, color, *args, inline=False, spin=False): 63 | if inline and not self.print_inline: 64 | return 65 | 66 | args = list(args) 67 | 68 | if self.print_colors: 69 | args.append(self.ansi_reset) 70 | else: 71 | color = "" 72 | 73 | if spin: 74 | args.insert(0, self._spin()) 75 | 76 | end = self.ansi_delete_line if self.print_inline else "" 77 | end = end + ("\r" if inline else "\n") 78 | 79 | timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S") 80 | 81 | print( 82 | "{} {}[{}]".format(timestamp, color, self._module), 83 | *args, 84 | end=end, 85 | flush=True 86 | ) 87 | 88 | 89 | class AsyncMap: 90 | def __init__(self): 91 | self._map = {} 92 | self._cond = asyncio.Condition() 93 | 94 | async def put(self, key, value): 95 | async with self._cond: 96 | self._map[key] = value 97 | self._cond.notify_all() 98 | 99 | async def pop(self, key): 100 | while True: 101 | async with self._cond: 102 | await self._cond.wait() 103 | if key in self._map: 104 | return self._map.pop(key) 105 | 106 | 107 | class SmartFormatter(argparse.HelpFormatter): 108 | def flatten(self, input_array): 109 | result_array = [] 110 | for element in input_array: 111 | if isinstance(element, str): 112 | result_array.append(element) 113 | elif isinstance(element, list): 114 | result_array += self.flatten(element) 115 | return result_array 116 | 117 | def _split_lines(self, text, width): 118 | if text.startswith("R|"): 119 | return text[2:].splitlines() 120 | elif text.startswith("A|"): 121 | return self.flatten( 122 | [ 123 | ( 124 | argparse.HelpFormatter._split_lines(self, x, width) 125 | if len(x) >= width 126 | else x 127 | ) 128 | for x in text[2:].splitlines() 129 | ] 130 | ) 131 | return argparse.HelpFormatter._split_lines(self, text, width) 132 | 133 | 134 | def sanitize_filename(fname): 135 | # https://stackoverflow.com/a/31976060 136 | fname = str(fname) 137 | 138 | # replace windows and linux forbidden characters 139 | fname = re.sub(r"[\\/:*?\"<>|]+", "_", fname) 140 | 141 | # remove ascii control characters 142 | fname = re.sub(r"[\x00-\x1f\x7f]", "", fname) 143 | 144 | # remove leading and trailing whitespace 145 | fname = fname.strip() 146 | 147 | # remove leading and trailing dots 148 | fname = fname.strip(".") 149 | 150 | # check windows reserved names 151 | badnames = """ 152 | CON PRN AUX NUL 153 | COM1 COM2 COM3 COM4 COM5 COM6 COM7 COM8 COM9 154 | LPT1 LPT2 LPT3 LPT4 LPT5 LPT6 LPT7 LPT8 LPT9 155 | """.split() 156 | 157 | fup = fname.upper() 158 | for badname in badnames: 159 | if fup == badname or fup.startswith(badname + "."): 160 | fname = "_" + fname 161 | 162 | return fname 163 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = [ 3 | "setuptools>=45", 4 | "wheel" 5 | ] 6 | build-backend = "setuptools.build_meta" 7 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | aiohttp>=3.7.4.post0 2 | apprise>=1.4.5 3 | -------------------------------------------------------------------------------- /scripts/release.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | # Exit on error 4 | set -e 5 | 6 | # Get versions 7 | py_version=$(grep 'version' setup.cfg | sed 's/version = //') 8 | latest_tag=$(git tag --sort=-v:refname | head -n 1 | sed 's/v//') 9 | 10 | # Skip if latest tag is the same as current version 11 | if [ "$py_version" == "$latest_tag" ]; then 12 | echo "Latest version is already tagged." 13 | exit 0 14 | fi 15 | 16 | # Make sure we're in the right branch 17 | if [ "$(git rev-parse --abbrev-ref HEAD)" != "main" ]; then 18 | echo "You must be in the main branch to run this script" 19 | exit 1 20 | fi 21 | 22 | # Make sure working tree is clean 23 | if [ -n "$(git status --porcelain)" ]; then 24 | echo "Working tree is not clean. Please commit or stash changes before running this script." 25 | exit 1 26 | fi 27 | 28 | # Create a new tag 29 | git tag -a "v$py_version" -em "$(git-cliff -ut $py_version -s all)" 30 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | name = fc2-live-dl 3 | version = 2.2.0 4 | author = Hizkia Felix 5 | author_email = felix@hizkifw.me 6 | description = Download live streams from FC2 7 | long_description = file: README.md 8 | long_description_content_type = text/markdown 9 | url = https://github.com/HoloArchivists/fc2-live-dl 10 | project_urls = 11 | Bug Tracker = https://github.com/HoloArchivists/fc2-live-dl/issues 12 | classifiers = 13 | Development Status :: 5 - Production/Stable 14 | Environment :: Console 15 | License :: OSI Approved :: MIT License 16 | Natural Language :: English 17 | Operating System :: OS Independent 18 | Programming Language :: Python :: 3 :: Only 19 | Programming Language :: Python :: 3.8 20 | 21 | [options] 22 | package_dir = 23 | = . 24 | python_requires = >=3.8 25 | packages = find: 26 | install_requires = 27 | aiohttp >= 3.8.1 28 | aiodns >= 3.0.0 29 | apprise >= 1.4.5 30 | 31 | [options.entry_points] 32 | console_scripts = 33 | fc2-live-dl = fc2_live_dl:main 34 | autofc2 = fc2_live_dl.autofc2:main 35 | 36 | [options.packages.find] 37 | where = . 38 | --------------------------------------------------------------------------------