├── streamer
├── cloud
│ ├── __init__.py
│ ├── base.py
│ ├── uploader.py
│ ├── gcs.py
│ ├── s3.py
│ └── pool.py
├── __init__.py
├── min_versions.py
├── util.py
├── external_command_node.py
├── subprocessWindowsPatch.py
├── pipe.py
├── output_stream.py
├── autodetect.py
├── node_base.py
├── proxy_node.py
├── periodconcat_node.py
└── packager_node.py
├── .release-please-manifest.json
├── optional_requirements.txt
├── docs
├── source
│ ├── shaka-streamer-logo.png
│ ├── module_api.rst
│ ├── index.rst
│ ├── autolink.js
│ ├── hardware_encoding.rst
│ ├── cloud_storage.rst
│ ├── configuration_fields.rst
│ ├── prerequisites.rst
│ ├── overview.rst
│ └── conf.py
└── build.sh
├── requirements.txt
├── .gitignore
├── package.json
├── .release-please-config.json
├── .github
└── workflows
│ ├── validate-pr-title.yaml
│ ├── update-issues.yaml
│ ├── sync-labels.yaml
│ ├── publish-docs.yaml
│ ├── settings.yaml
│ ├── release-please.yaml
│ └── build-and-test.yaml
├── binaries
├── README.md
├── streamer_binaries
│ └── __init__.py
├── setup.py
└── build_wheels.py
├── README.md
├── CONTRIBUTING.md
├── config_files
├── input_webcam_config.yaml
├── input_looped_file_config.yaml
├── input_microphone_config.yaml
├── pipeline_live_hardware_config.yaml
├── pipeline_vod_config.yaml
├── pipeline_live_config.yaml
├── bitrate_config.yaml
├── pipeline_low_latency_dash_config.yaml
├── input_external_command.yaml
├── pipeline_live_encrypted_config.yaml
├── bitrate_hls_config.yaml
├── input_vod_config.yaml
├── pipeline_vod_encrypted_config.yaml
├── pipeline_vod_encrypted_raw_config.yaml
└── input_multiperiod.yaml
├── tests
└── karma.conf.js
├── SECURITY.md
├── setup.py
├── CODE_OF_CONDUCT.md
├── shaka-streamer
└── LICENSE
/streamer/cloud/__init__.py:
--------------------------------------------------------------------------------
1 | from . import *
2 |
--------------------------------------------------------------------------------
/.release-please-manifest.json:
--------------------------------------------------------------------------------
1 | {
2 | ".": "1.3.0"
3 | }
4 |
--------------------------------------------------------------------------------
/streamer/__init__.py:
--------------------------------------------------------------------------------
1 | __version__ = '1.3.0' # x-release-please-version
2 |
3 | from . import controller_node
4 |
--------------------------------------------------------------------------------
/optional_requirements.txt:
--------------------------------------------------------------------------------
1 | flask>=3,<4
2 | mypy>=1,<2
3 | setuptools>=75
4 | sphinx>=7,<8
5 | twine>=5,<6
6 | wheel>=0.44,<1
7 |
--------------------------------------------------------------------------------
/docs/source/shaka-streamer-logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/shaka-project/shaka-streamer/HEAD/docs/source/shaka-streamer-logo.png
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | distro>=1.9,<2; platform_system == "Linux"
2 | pywin32>=308; platform_system == "Windows"
3 | pyyaml>=6,<7
4 | setproctitle>=1,<2
5 | typing_extensions>=4,<5
6 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | *.pyc
2 | output_files/
3 | test_assets/
4 | node_modules/
5 | Sintel.*
6 | docs/build/
7 | build/
8 | dist/
9 | shaka_streamer.egg-info/
10 | shaka_streamer_binaries.egg-info/
11 | .idea/
12 | venv/
13 | dev/
14 | .vscode/
15 | ffmpeg*
16 | ffprobe*
17 | packager*
18 | .mypy_cache/
19 |
--------------------------------------------------------------------------------
/package.json:
--------------------------------------------------------------------------------
1 | {
2 | "scripts": {
3 | "lint": "python3 -m mypy streamer/ shaka-streamer",
4 | "test": "python3 run_end_to_end_tests.py"
5 | },
6 | "devDependencies": {
7 | "karma": "^6.3.16",
8 | "karma-chrome-launcher": "^3.1.0",
9 | "karma-jasmine": "^4.0.1",
10 | "karma-junit-reporter": "^2.0.1",
11 | "karma-spec-reporter": "^0.0.33",
12 | "shaka-player": "^4.11.7"
13 | }
14 | }
15 |
--------------------------------------------------------------------------------
/.release-please-config.json:
--------------------------------------------------------------------------------
1 | {
2 | "last-release-sha": "a86da1b96c0134ea1e8c2f4f1d33520efe7aa73c",
3 | "packages": {
4 | ".": {
5 | "include-component-in-tag": false,
6 | "include-v-in-tag": true,
7 | "component": "",
8 | "extra-files": [
9 | "streamer/__init__.py",
10 | "binaries/streamer_binaries/__init__.py"
11 | ],
12 | "release-type": "python"
13 | }
14 | }
15 | }
16 |
--------------------------------------------------------------------------------
/docs/build.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | # Copyright 2019 Google LLC
4 | #
5 | # Licensed under the Apache License, Version 2.0 (the "License");
6 | # you may not use this file except in compliance with the License.
7 | # You may obtain a copy of the License at
8 | #
9 | # https://www.apache.org/licenses/LICENSE-2.0
10 | #
11 | # Unless required by applicable law or agreed to in writing, software
12 | # distributed under the License is distributed on an "AS IS" BASIS,
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | # See the License for the specific language governing permissions and
15 | # limitations under the License.
16 |
17 | set -e
18 | set -x
19 |
20 | cd $(dirname $0)
21 |
22 | sphinx-build -b html ./source ./build
23 |
--------------------------------------------------------------------------------
/streamer/min_versions.py:
--------------------------------------------------------------------------------
1 | # Copyright 2024 Google LLC
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # https://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | """Minimum versions of tools we depend on."""
16 |
17 | # These are minimum semantic versions expressed as tuples of ints.
18 | FFMPEG = (7, 1)
19 | PACKAGER = (3, 4, 0)
20 |
--------------------------------------------------------------------------------
/.github/workflows/validate-pr-title.yaml:
--------------------------------------------------------------------------------
1 | name: Validate PR Title
2 |
3 | on:
4 | # NOTE: The automated PRs from release-please-action do not seem to trigger
5 | # any of the default PR triggers (opened, synchronize, reopened). So we need
6 | # additional types. This is a good set that makes it easy to trigger the
7 | # workflow manually if needed. This is not neccessary if your release-please
8 | # workflow uses a personal access token (PAT) from Shaka Bot.
9 | pull_request_target:
10 | types:
11 | - opened
12 | - reopened
13 | - edited
14 | - synchronize
15 | - assigned
16 | - labeled
17 | - ready_for_review
18 | - review_requested
19 |
20 | jobs:
21 | main:
22 | name: Validate PR Title
23 | runs-on: ubuntu-latest
24 | steps:
25 | - uses: amannn/action-semantic-pull-request@v5
26 | env:
27 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
28 |
--------------------------------------------------------------------------------
/docs/source/module_api.rst:
--------------------------------------------------------------------------------
1 | ..
2 | Copyright 2019 Google LLC
3 |
4 | Licensed under the Apache License, Version 2.0 (the "License");
5 | you may not use this file except in compliance with the License.
6 | You may obtain a copy of the License at
7 |
8 | https://www.apache.org/licenses/LICENSE-2.0
9 |
10 | Unless required by applicable law or agreed to in writing, software
11 | distributed under the License is distributed on an "AS IS" BASIS,
12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | See the License for the specific language governing permissions and
14 | limitations under the License.
15 |
16 |
17 | Module API
18 | ==========
19 |
20 | .. automodule:: streamer.controller_node
21 | .. autoclass:: streamer.node_base.ProcessStatus
22 | .. automodule:: streamer.configuration
23 | :exclude-members: Base, Field, ValidatingType, RuntimeMapType
24 | :no-inherited-members:
25 |
--------------------------------------------------------------------------------
/.github/workflows/update-issues.yaml:
--------------------------------------------------------------------------------
1 | # Install this in .github/workflows/ to automate issue maintenance.
2 | name: Update Issues
3 |
4 | on:
5 | workflow_dispatch:
6 | # Allows for manual triggering.
7 | schedule:
8 | # Run every 30 minutes
9 | - cron: '*/30 * * * *'
10 |
11 | jobs:
12 | update-issues:
13 | runs-on: ubuntu-latest
14 |
15 | permissions:
16 | # "Write" to Issues to add labels, milestones, comments, etc.
17 | issues: write
18 | # "Write" to Pull Requests for the same.
19 | pull-requests: write
20 |
21 | steps:
22 | - name: Checkout code
23 | uses: actions/checkout@v4
24 | with:
25 | repository: shaka-project/shaka-github-tools
26 | persist-credentials: false
27 |
28 | - name: Update Issues
29 | env:
30 | # Use SHAKA_BOT_TOKEN if found, otherwise the default GITHUB_TOKEN.
31 | GITHUB_TOKEN: ${{ secrets.SHAKA_BOT_TOKEN || secrets.GITHUB_TOKEN }}
32 | run: |
33 | cd update-issues
34 | npm ci
35 | node main.js
36 |
--------------------------------------------------------------------------------
/binaries/README.md:
--------------------------------------------------------------------------------
1 | # 
2 |
3 | Shaka Streamer Binaries is a companion package to [Shaka Streamer][] that
4 | provides platform-specific binaries for Streamer's dependencies: [FFmpeg][] and
5 | [Shaka Packager][].
6 |
7 | FFmpeg binaries are built from open, verifiable, automated workflows at
8 | https://github.com/shaka-project/static-ffmpeg-binaries
9 |
10 | Shaka Packager binaries are official releases from
11 | https://github.com/shaka-project/shaka-packager
12 |
13 | Install or upgrade Shaka Streamer and its binaries through `pip3` with:
14 |
15 | ```sh
16 | # To install globally (drop the "sudo" for Windows):
17 | sudo pip3 install --upgrade shaka-streamer shaka-streamer-binaries
18 |
19 | # To install per-user:
20 | pip3 install --user --upgrade shaka-streamer shaka-streamer-binaries
21 | ```
22 |
23 | [FFmpeg]: https://ffmpeg.org/
24 | [Shaka Packager]: https://github.com/shaka-project/shaka-packager
25 | [Shaka Streamer]: https://pypi.org/project/shaka-streamer/
26 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # 
2 |
3 | Shaka Streamer offers a simple config-file based approach to preparing streaming
4 | media. It greatly simplifies the process of using FFmpeg and Shaka Packager for
5 | both VOD and live content.
6 |
7 | Live documentation can be found at
8 | https://shaka-project.github.io/shaka-streamer/ and is generated from the
9 | `docs/source/` folder, as well as the source code itself.
10 |
11 | Sample configs can be found in the [`config_files/`] folder in the repo.
12 |
13 | [`config_files/`]: https://github.com/shaka-project/shaka-streamer/tree/main/config_files
14 |
15 | Install or upgrade Shaka Streamer and its [binaries][] through `pip3` with:
16 |
17 | ```sh
18 | # To install globally (drop the "sudo" for Windows):
19 | sudo pip3 install --upgrade shaka-streamer shaka-streamer-binaries
20 |
21 | # To install per-user:
22 | pip3 install --user --upgrade shaka-streamer shaka-streamer-binaries
23 | ```
24 |
25 | [binaries]: https://pypi.org/project/shaka-streamer-binaries/
26 |
--------------------------------------------------------------------------------
/streamer/util.py:
--------------------------------------------------------------------------------
1 | # Copyright 2021 Google LLC
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # https://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | """Utility functions used by multiple modules."""
16 |
17 | import urllib.parse
18 |
19 | def is_url(output_location: str) -> bool:
20 | """Returns True if the output location is a URL."""
21 | return urllib.parse.urlparse(output_location).scheme != ''
22 |
23 | def is_http_url(output_location: str) -> bool:
24 | """Returns True if the output location is an HTTP/HTTPS URL."""
25 | scheme = urllib.parse.urlparse(output_location).scheme
26 | return scheme in ['http', 'https']
27 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # How to Contribute
2 |
3 | We'd love to accept your patches and contributions to this project. There are
4 | just a few small guidelines you need to follow.
5 |
6 | ## Contributor License Agreement
7 |
8 | Contributions to this project must be accompanied by a Contributor License
9 | Agreement. You (or your employer) retain the copyright to your contribution;
10 | this simply gives us permission to use and redistribute your contributions as
11 | part of the project. Head over to to see
12 | your current agreements on file or to sign a new one.
13 |
14 | You generally only need to submit a CLA once, so if you've already submitted one
15 | (even if it was for a different project), you probably don't need to do it
16 | again.
17 |
18 | ## Code reviews
19 |
20 | All submissions, including submissions by project members, require review. We
21 | use GitHub pull requests for this purpose. Consult
22 | [GitHub Help](https://help.github.com/articles/about-pull-requests/) for more
23 | information on using pull requests.
24 |
25 | ## Community Guidelines
26 |
27 | This project follows [Google's Open Source Community
28 | Guidelines](https://opensource.google.com/conduct/).
29 |
--------------------------------------------------------------------------------
/config_files/input_webcam_config.yaml:
--------------------------------------------------------------------------------
1 | # Copyright 2019 Google LLC
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # https://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | # This is a sample input configuration file for Shaka Streamer for webcam input.
16 |
17 | # List of inputs.
18 | inputs:
19 | # The type of input.
20 | - input_type: webcam
21 | # Name of the input device. On Linux, this is a device node path.
22 | # On macOS, this is a device name, typically "default".
23 | # On Windows, this is the name of the directshow capture source, which
24 | # differs based on the hardware. On a laptop this will typically
25 | # be "video=Integrated Camera".
26 | name: /dev/video0
27 | # The media type.
28 | media_type: video
29 |
--------------------------------------------------------------------------------
/tests/karma.conf.js:
--------------------------------------------------------------------------------
1 | // Copyright 2019 Google LLC
2 | //
3 | // Licensed under the Apache License, Version 2.0 (the "License");
4 | // you may not use this file except in compliance with the License.
5 | // You may obtain a copy of the License at
6 | //
7 | // https://www.apache.org/licenses/LICENSE-2.0
8 | //
9 | // Unless required by applicable law or agreed to in writing, software
10 | // distributed under the License is distributed on an "AS IS" BASIS,
11 | // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | // See the License for the specific language governing permissions and
13 | // limitations under the License.
14 |
15 | /** @param {!Object} config */
16 | module.exports = function(config) {
17 | config.set({
18 | basePath: __dirname,
19 | browserNoActivityTimeout: 5 * 60 * 1000, // Disconnect after 5m silence
20 | client: {
21 | captureConsole: true,
22 | filter: config.filter,
23 | seed: config.seed,
24 | debug: !!config.debug,
25 | testWidevine: !!config.testWidevine,
26 | },
27 | frameworks: ['jasmine'],
28 | files: [
29 | // Shaka Player
30 | '../node_modules/shaka-player/dist/shaka-player.compiled.js',
31 |
32 | // End to end tests
33 | 'tests.js',
34 | ],
35 | });
36 | };
37 |
--------------------------------------------------------------------------------
/.github/workflows/sync-labels.yaml:
--------------------------------------------------------------------------------
1 | # Install this in .github/workflows/ to automate label updates
2 | name: Sync Labels
3 |
4 | on:
5 | workflow_dispatch:
6 | # Allows for manual triggering.
7 | inputs:
8 | dry_run:
9 | description: "If true, don't make any actual changes"
10 | required: false
11 | default: false
12 | schedule:
13 | # Run every week on Sunday at 5:42 AM.
14 | - cron: '42 5 * * 0'
15 |
16 | jobs:
17 | sync-labels:
18 | runs-on: ubuntu-latest
19 |
20 | permissions:
21 | # "Write" to Issues to manage labels for the repo
22 | issues: write
23 |
24 | steps:
25 | - name: Checkout code
26 | uses: actions/checkout@v4
27 | with:
28 | repository: shaka-project/shaka-github-tools
29 | persist-credentials: false
30 |
31 | # TODO: revert to micnncim and new release after landing
32 | # https://github.com/micnncim/action-label-syncer/pull/68
33 | - uses: joeyparrish/action-label-syncer@v1.8.0
34 | with:
35 | dry_run: ${{ github.event.inputs.dry_run || false }}
36 | prune: true
37 | manifest: sync-labels/configs/${{ github.repository }}.yaml
38 | repository: ${{ github.repository }}
39 | token: ${{ github.token }}
40 |
--------------------------------------------------------------------------------
/config_files/input_looped_file_config.yaml:
--------------------------------------------------------------------------------
1 | # Copyright 2019 Google LLC
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # https://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | # This is a sample input configuration file for Shaka Streamer for a looped
16 | # file input.
17 |
18 | # List of inputs.
19 | inputs:
20 | # The type of input.
21 | - input_type: looped_file
22 | # Name of the input file.
23 | # This example can be downloaded from https://storage.googleapis.com/shaka-streamer-assets/sample-inputs/Sintel.2010.720p.mkv
24 | name: Sintel.2010.720p.mkv
25 | # The media type of the input. Can be audio or video.
26 | media_type: video
27 |
28 | # The type of input.
29 | - input_type: looped_file
30 | # A second track (audio) from the same input file.
31 | name: Sintel.2010.720p.mkv
32 | media_type: audio
33 |
--------------------------------------------------------------------------------
/docs/source/index.rst:
--------------------------------------------------------------------------------
1 | ..
2 | Copyright 2019 Google LLC
3 |
4 | Licensed under the Apache License, Version 2.0 (the "License");
5 | you may not use this file except in compliance with the License.
6 | You may obtain a copy of the License at
7 |
8 | https://www.apache.org/licenses/LICENSE-2.0
9 |
10 | Unless required by applicable law or agreed to in writing, software
11 | distributed under the License is distributed on an "AS IS" BASIS,
12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | See the License for the specific language governing permissions and
14 | limitations under the License.
15 |
16 | .. image:: shaka-streamer-logo.png
17 | :align: center
18 | :alt: Shaka Streamer
19 |
20 | Shaka Streamer offers a simple config-file based approach to preparing
21 | streaming media. It greatly simplifies the process of using FFmpeg and Shaka
22 | Packager for both VOD and live content.
23 |
24 |
25 | Shaka Streamer documentation
26 | ============================
27 |
28 | .. toctree::
29 | :maxdepth: 2
30 | :caption: Contents:
31 |
32 | overview
33 | prerequisites
34 | cloud_storage
35 | hardware_encoding
36 | configuration_fields
37 | module_api
38 |
39 | Indices and tables
40 | ==================
41 |
42 | * :ref:`genindex`
43 | * :ref:`modindex`
44 | * :ref:`search`
45 |
--------------------------------------------------------------------------------
/config_files/input_microphone_config.yaml:
--------------------------------------------------------------------------------
1 | # Copyright 2019 Google LLC
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # https://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | # This is a sample input configuration file for Shaka Streamer for microphone input.
16 |
17 | # List of inputs.
18 | inputs:
19 | # The type of input.
20 | - input_type: microphone
21 | # Name of the input device.
22 | # On Linux, this is the audio device name, typically "default", or
23 | # the index of the audio device("0","1","2",etc...).
24 | # On macOS, this is the audio device name, typically ":default", or
25 | # the index of the audio device after a colon(":0",":1",":2",etc...).
26 | # On Windows, this is the name of the directshow capture source, which
27 | # differs based on the hardware. E.g. "audio=Microphone Array".
28 | name: "0"
29 | # The media type.
30 | media_type: audio
31 |
--------------------------------------------------------------------------------
/config_files/pipeline_live_hardware_config.yaml:
--------------------------------------------------------------------------------
1 | # Copyright 2019 Google LLC
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # https://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | # This is a sample pipeline configuration file for Shaka Streamer in live mode.
16 | # Here you configure resolutions, manifest formats, segment size, and more.
17 |
18 | # Streaming mode. Can be live or vod.
19 | streaming_mode: live
20 |
21 | # A list of resolutions to encode.
22 | resolutions:
23 | - 720p
24 | - 480p
25 |
26 | # A list of channel layouts to encode.
27 | channel_layouts:
28 | - stereo
29 |
30 | # The codecs to encode with.
31 | audio_codecs:
32 | - aac
33 | - opus
34 | video_codecs:
35 | - h264
36 | - hw:vp9 # Use hardware encoding for VP9.
37 |
38 | # Manifest format (dash, hls, or both)
39 | manifest_format:
40 | - dash
41 | - hls
42 |
43 | # Length of each segment in seconds.
44 | segment_size: 4
45 |
--------------------------------------------------------------------------------
/SECURITY.md:
--------------------------------------------------------------------------------
1 | # Security Policy
2 |
3 | ## Supported Versions
4 |
5 | * This repository does not currently maintain release branches. **Only the latest release is supported.**
6 |
7 | * If a security issue is identified in a current release, the fix will trigger a new release from `main`.
8 |
9 | * If a security issue is identified in any release, we will disclose the issue and advise everyone to upgrade to the latest release.
10 |
11 |
12 | ## Reporting a Vulnerability
13 |
14 | Per Google policy, please use https://g.co/vulnz to report security vulnerabilities. Google uses this for intake and triage. For valid issues, we will do coordination and disclosure here on GitHub (including using a GitHub Security Advisory when necessary).
15 |
16 | The Google Security Team will process your report within a day, and respond within a week (although it will depend on the severity of your report).
17 |
18 |
19 | ## Remediation Actions
20 |
21 | * A GitHub issue will be created with the `type: vulnerability` label to coordinate a response. After remediation, we will also use this issue to disclose any details we withheld between receiving the private report and resolving the issue.
22 |
23 | * A GitHub Security Advisory may be created, if appropriate. For example, this would be done if the issue impacts users or dependent projects. This might be skipped for other issues, such as CI workflow vulnerabilities.
24 |
25 | * Vulnerabilities in NPM modules will be reported to NPM so that they show up in `npm audit`.
26 |
27 |
28 |
--------------------------------------------------------------------------------
/config_files/pipeline_vod_config.yaml:
--------------------------------------------------------------------------------
1 | # Copyright 2019 Google LLC
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # https://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | # This is a sample pipeline configuration file for Shaka Streamer in VOD mode.
16 | # Here you configure resolutions, manifest formats, segment size, and more.
17 |
18 | # Streaming mode. Can be live or vod.
19 | streaming_mode: vod
20 |
21 | # A list of resolutions to encode.
22 | # For VOD, you can specify many more resolutions than you would with live,
23 | # since the encoding does not need to be done in real time.
24 | resolutions:
25 | - 4k
26 | - 1080p
27 | - 720p
28 | - 480p
29 | - 360p
30 |
31 | # A list of channel layouts to encode.
32 | channel_layouts:
33 | - stereo
34 | - surround
35 |
36 | # The codecs to encode with.
37 | audio_codecs:
38 | - aac
39 | - opus
40 | video_codecs:
41 | - h264
42 | - vp9
43 |
44 | # Manifest format (dash, hls or both)
45 | manifest_format:
46 | - dash
47 | - hls
48 |
49 | # Length of each segment in seconds.
50 | segment_size: 10
51 |
52 | # Forces the use of SegmentTemplate in DASH.
53 | segment_per_file: True
54 |
--------------------------------------------------------------------------------
/streamer/cloud/base.py:
--------------------------------------------------------------------------------
1 | # Copyright 2025 Google LLC
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # https://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | """Upload to cloud storage providers.
16 |
17 | Base class definition."""
18 |
19 | import abc
20 |
21 |
22 | class CloudUploaderBase(object):
23 | @abc.abstractmethod
24 | def write_non_chunked(self, path: str, data: bytes) -> None:
25 | """Write the non-chunked data to the destination."""
26 | pass
27 |
28 | @abc.abstractmethod
29 | def start_chunked(self, path: str) -> None:
30 | """Set up for a chunked transfer to the destination."""
31 | pass
32 |
33 | @abc.abstractmethod
34 | def write_chunk(self, data: bytes) -> None:
35 | """Handle a single chunk of data."""
36 | pass
37 |
38 | @abc.abstractmethod
39 | def end_chunked(self) -> None:
40 | """End the chunked transfer."""
41 | pass
42 |
43 | @abc.abstractmethod
44 | def delete(self, path: str) -> None:
45 | """Delete the file from cloud storage."""
46 | pass
47 |
48 | @abc.abstractmethod
49 | def reset(self) -> None:
50 | """Reset any chunked output state."""
51 | pass
52 |
--------------------------------------------------------------------------------
/config_files/pipeline_live_config.yaml:
--------------------------------------------------------------------------------
1 | # Copyright 2019 Google LLC
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # https://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | # This is a sample pipeline configuration file for Shaka Streamer in live mode.
16 | # Here you configure resolutions, manifest formats, segment size, and more.
17 |
18 | # Streaming mode. Can be live or vod.
19 | streaming_mode: live
20 |
21 | # A list of resolutions to encode.
22 | resolutions:
23 | - 720p
24 | - 480p
25 |
26 | # A list of channel layouts to encode.
27 | channel_layouts:
28 | - stereo
29 |
30 | # The codecs to encode with.
31 | audio_codecs:
32 | - aac
33 | video_codecs:
34 | - h264
35 |
36 | # Manifest format (dash, hls, or both)
37 | manifest_format:
38 | - dash
39 | - hls
40 |
41 | # Length of each segment in seconds.
42 | segment_size: 4
43 |
44 | # Availability window, or the number of seconds a segment remains available.
45 | availability_window: 300
46 |
47 | # Presentation delay, or how far back from the edge the player should be.
48 | presentation_delay: 30
49 |
50 | # Update period, or how often the player should fetch a new manifest.
51 | update_period: 8
52 |
--------------------------------------------------------------------------------
/binaries/streamer_binaries/__init__.py:
--------------------------------------------------------------------------------
1 | import os
2 | import platform
3 |
4 | __version__ = '1.3.0' # x-release-please-version
5 |
6 |
7 | # Get the directory path where this __init__.py file resides.
8 | _dir_path = os.path.abspath(os.path.dirname(__file__))
9 |
10 | # Compute the part of the file name that indicates the OS.
11 | _os = {
12 | 'Linux': 'linux',
13 | 'Windows': 'win',
14 | 'Darwin': 'osx',
15 | }[platform.system()]
16 |
17 | # Compute the part of the file name that indicates the CPU architecture.
18 | _cpu = {
19 | 'x86_64': 'x64', # Linux/Mac report this key
20 | 'AMD64': 'x64', # Windows reports this key
21 | 'aarch64': 'arm64', # Linux reports this key
22 | 'arm64': 'arm64', # Mac reports this key
23 | }[platform.machine()]
24 |
25 | # Specific versions of Ubuntu with special builds for hardware-encoding.
26 | _ubuntu_versions_with_hw_encoders = (
27 | '22.04',
28 | '24.04',
29 | )
30 |
31 | # Module level variables.
32 | ffmpeg = os.path.join(_dir_path, 'ffmpeg-{}-{}'.format(_os, _cpu))
33 | """The path to the installed FFmpeg binary."""
34 |
35 | ffprobe = os.path.join(_dir_path, 'ffprobe-{}-{}'.format(_os, _cpu))
36 | """The path to the installed FFprobe binary."""
37 |
38 | packager = os.path.join(_dir_path, 'packager-{}-{}'.format(_os, _cpu))
39 | """The path to the installed Shaka Packager binary."""
40 |
41 | # Special overrides for Ubuntu builds with hardware encoding support.
42 | # These are not static binaries, and so they must be matched to the distro.
43 | if _os == 'linux':
44 | import distro
45 |
46 | if distro.id() == 'ubuntu':
47 | if distro.version() in _ubuntu_versions_with_hw_encoders:
48 | suffix = '-ubuntu-' + distro.version()
49 | ffmpeg += suffix
50 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | # Copyright 2019 Google LLC
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # https://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | import base64
16 | import setuptools
17 |
18 | import streamer
19 |
20 | with open('README.md', 'r') as f:
21 | long_description = f.read()
22 |
23 | setuptools.setup(
24 | name='shaka-streamer',
25 | version=streamer.__version__,
26 | author='Google',
27 | description='A simple config-file based approach to streaming media.',
28 | long_description=long_description,
29 | long_description_content_type='text/markdown',
30 | url='https://github.com/shaka-project/shaka-streamer',
31 | packages=setuptools.find_packages(),
32 | install_requires=[
33 | 'pywin32>=308; platform_system == "Windows"',
34 | 'pyyaml>=6,<7',
35 | 'setproctitle>=1,<2',
36 | 'typing_extensions>=4,<5',
37 | ],
38 | scripts=['shaka-streamer'],
39 | classifiers=[
40 | 'Programming Language :: Python :: 3',
41 | 'License :: OSI Approved :: Apache Software License',
42 | 'Operating System :: POSIX :: Linux',
43 | 'Operating System :: MacOS :: MacOS X',
44 | 'Operating System :: Microsoft :: Windows',
45 | ],
46 | # Python 3.9+ tested in GitHub Actions CI
47 | python_requires='>=3.9',
48 | )
49 |
--------------------------------------------------------------------------------
/config_files/bitrate_config.yaml:
--------------------------------------------------------------------------------
1 | # Copyright 2019 Google LLC
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # https://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | # This is a sample configuration file for Shaka Streamer to set custom
16 | # bitrates and resolutions.
17 |
18 | audio_channel_layouts:
19 | mono:
20 | max_channels: 1
21 | bitrates:
22 | aac: '64k'
23 | opus: '32k'
24 | stereo:
25 | max_channels: 2
26 | bitrates:
27 | aac: '128k'
28 | opus: '64k'
29 | surround:
30 | max_channels: 6
31 | bitrates:
32 | aac: '192k'
33 | opus: '96k'
34 |
35 | video_resolutions:
36 | wee:
37 | max_width: 250
38 | max_height: 100
39 | bitrates:
40 | h264: '108k'
41 | vp9: '95k'
42 | small:
43 | max_width: 750
44 | max_height: 300
45 | bitrates:
46 | h264: '400k'
47 | vp9: '276k'
48 | PAL-TV:
49 | max_width: 1024
50 | max_height: 576
51 | bitrates:
52 | h264: '2.5M'
53 | vp9: '1M'
54 | so-very-big:
55 | max_width: 3840
56 | max_height: 2160
57 | max_frame_rate: 30
58 | bitrates:
59 | h264: '17M'
60 | vp9: '12M'
61 | so-very-big-high-frame-rate:
62 | max_width: 3840
63 | max_height: 2160
64 | max_frame_rate: .inf
65 | bitrates:
66 | h264: '25M'
67 | vp9: '18M'
68 |
69 |
--------------------------------------------------------------------------------
/config_files/pipeline_low_latency_dash_config.yaml:
--------------------------------------------------------------------------------
1 | # Copyright 2019 Google LLC
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # https://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | # This is a sample pipeline configuration file for Shaka Streamer in live mode.
16 | # Here you configure resolutions, manifest formats, segment size, and more.
17 |
18 | # Streaming mode. Can be live or vod.
19 | streaming_mode: live
20 |
21 | # A list of resolutions to encode.
22 | resolutions:
23 | - 720p
24 | - 480p
25 |
26 | # A list of channel layouts to encode.
27 | channel_layouts:
28 | - stereo
29 |
30 | # The codecs to encode with.
31 | audio_codecs:
32 | - aac
33 | video_codecs:
34 | - h264
35 |
36 | # Manifest format must be DASH for LL-DASH streaming
37 | manifest_format:
38 | - dash
39 |
40 | # Length of each segment in seconds.
41 | segment_size: 2
42 |
43 | # Availability window, or the number of seconds a segment remains available.
44 | availability_window: 300
45 |
46 | # Presentation delay, or how far back from the edge the player should be.
47 | presentation_delay: 0
48 |
49 | # Update period, or how often the player should fetch a new manifest.
50 | update_period: 8
51 |
52 | # Stream in low latency dash mode, or chunked
53 | low_latency_dash_mode: True
54 |
55 | # UTC timing values, or the global timing source used for segment time stamps.
56 | utc_timings:
57 | - scheme_id_uri: urn:mpeg:dash:utc:http-xsdate:2014
58 | value: https://akamai.com/?.iso
--------------------------------------------------------------------------------
/streamer/cloud/uploader.py:
--------------------------------------------------------------------------------
1 | # Copyright 2025 Google LLC
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # https://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | """Upload to cloud storage providers."""
16 |
17 | from streamer.cloud.base import CloudUploaderBase
18 |
19 |
20 | # Supported protocols. Built based on which optional modules are available for
21 | # cloud storage providers.
22 | SUPPORTED_PROTOCOLS: list[str] = []
23 |
24 |
25 | # All supported protocols. Used to provide more useful error messages.
26 | ALL_SUPPORTED_PROTOCOLS: list[str] = ['gs', 's3']
27 |
28 |
29 | # Try to load the GCS (Google Cloud Storage) uploader. If we can, the user has
30 | # the libraries needed for GCS support.
31 | try:
32 | from streamer.cloud.gcs import GCSUploader
33 | SUPPORTED_PROTOCOLS.append('gs')
34 | except:
35 | pass
36 |
37 |
38 | # Try to load the S3 (Amazon Cloud Storage) uploader. If we can, the user has
39 | # the libraries needed for S3 support.
40 | try:
41 | from streamer.cloud.s3 import S3Uploader
42 | SUPPORTED_PROTOCOLS.append('s3')
43 | except:
44 | pass
45 |
46 |
47 | def create(upload_location: str) -> CloudUploaderBase:
48 | """Create an uploader appropriate to the upload location URL."""
49 |
50 | if upload_location.startswith("gs://"):
51 | return GCSUploader(upload_location)
52 | elif upload_location.startswith("s3://"):
53 | return S3Uploader(upload_location)
54 | else:
55 | raise RuntimeError("Protocol of {} isn't supported".format(upload_location))
56 |
--------------------------------------------------------------------------------
/.github/workflows/publish-docs.yaml:
--------------------------------------------------------------------------------
1 | # A workflow to publish the docs to GitHub Pages.
2 | name: Publish Docs
3 |
4 | # Runs on push to main.
5 | # Can also be run manually for debugging purposes.
6 | on:
7 | push:
8 | branches:
9 | - main
10 | # For manual debugging:
11 | workflow_dispatch:
12 | inputs:
13 | ref:
14 | description: "The ref to build docs from."
15 | required: false
16 |
17 | defaults:
18 | run:
19 | shell: bash
20 |
21 | # If another instance of this workflow is started, cancel the old one.
22 | concurrency:
23 | group: ${{ github.workflow }}
24 | cancel-in-progress: true
25 |
26 | jobs:
27 | build_docs:
28 | name: Build docs
29 | runs-on: ubuntu-latest
30 | steps:
31 | - name: Checkout code
32 | uses: actions/checkout@v4
33 | with:
34 | ref: ${{ inputs.ref || github.ref }}
35 | persist-credentials: false
36 |
37 | - name: Set Python version
38 | uses: actions/setup-python@v5
39 | with:
40 | python-version: 3.13
41 |
42 | - name: Install Python deps
43 | run: |
44 | python3 -m pip install -r requirements.txt
45 | python3 -m pip install -r optional_requirements.txt
46 |
47 | - name: Build docs
48 | run: ./docs/build.sh
49 |
50 | - name: Upload docs artifacts
51 | uses: actions/upload-pages-artifact@v3
52 | with:
53 | path: docs/build/
54 |
55 | publish_docs:
56 | name: Publish updated docs
57 | needs: build_docs
58 | runs-on: ubuntu-latest
59 |
60 | # Grant GITHUB_TOKEN the permissions required to deploy to Pages
61 | permissions:
62 | pages: write
63 | id-token: write
64 |
65 | # Deploy to the github-pages environment
66 | environment:
67 | name: github-pages
68 | url: ${{ steps.deployment.outputs.page_url }}
69 |
70 | steps:
71 | - name: Deploy to GitHub Pages
72 | id: deployment
73 | uses: actions/deploy-pages@v4
74 |
--------------------------------------------------------------------------------
/binaries/setup.py:
--------------------------------------------------------------------------------
1 | # Copyright 2021 Google LLC
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # https://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 |
16 | import sys
17 | import setuptools # type: ignore
18 |
19 | import streamer_binaries
20 |
21 | separator_index = sys.argv.index('--')
22 | platform_binaries = sys.argv[separator_index + 1:]
23 | sys.argv = sys.argv[:separator_index]
24 |
25 | with open('README.md', 'r') as f:
26 | long_description = f.read()
27 |
28 | setuptools.setup(
29 | name='shaka-streamer-binaries',
30 | version=streamer_binaries.__version__,
31 | author='Google',
32 | description='A package containing FFmpeg, FFprobe, and Shaka Packager static builds.',
33 | long_description=long_description,
34 | long_description_content_type='text/markdown',
35 | url='https://github.com/shaka-project/shaka-streamer/tree/main/binaries',
36 | packages=[streamer_binaries.__name__,],
37 | classifiers=[
38 | 'Programming Language :: Python :: 3',
39 | 'License :: OSI Approved :: Apache Software License',
40 | 'Operating System :: POSIX :: Linux',
41 | 'Operating System :: MacOS :: MacOS X',
42 | 'Operating System :: Microsoft :: Windows',
43 | ],
44 | package_data={
45 | # Only add the corresponding platform specific binaries to the wheel.
46 | streamer_binaries.__name__: platform_binaries,
47 | },
48 | install_requires=[
49 | # This is only used for Linux, and only supports Linux.
50 | 'distro>=1.9,<2; platform_system == "Linux"',
51 | ],
52 | )
53 |
--------------------------------------------------------------------------------
/config_files/input_external_command.yaml:
--------------------------------------------------------------------------------
1 | # Copyright 2019 Google LLC
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # https://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | # This is a sample input configuration file for Shaka Streamer for using an
16 | # external command as input. This particular example only runs on Linux or
17 | # macOS, and requires a tool called Gource (https://gource.io/). You can
18 | # adapt this to any other external command that generates video or audio.
19 |
20 | # List of inputs.
21 | inputs:
22 | # The type of input.
23 | - input_type: external_command
24 | # The command to run. Here $SHAKA_STREAMER_EXTERNAL_COMMAND_OUTPUT
25 | # (or %SHAKA_STREAMER_EXTERNAL_COMMAND_OUTPUT% on Windows) is replaced
26 | # with the output pipe.
27 | name: >
28 | gource
29 | -1240x680
30 | --output-framerate 30
31 | --loop
32 | --disable-input
33 | -o $SHAKA_STREAMER_EXTERNAL_COMMAND_OUTPUT
34 | # Extra input arguments for ffmpeg to understand the command's output.
35 | extra_input_args: '-f image2pipe -framerate 25 -re'
36 | # Extra filters to be applied to the input. In this case, add 20 pixels of
37 | # block padding to the input on all sides to pad it up to 720p. This helps
38 | # account for overscan on TVs.
39 | filters:
40 | - pad=1280:720:20:20
41 | # The media type of the input. Can be audio or video.
42 | media_type: video
43 | # Frame rate in seconds.
44 | frame_rate: 25
45 | # Resolution of the input.
46 | resolution: 720p
47 | # Channel layout of the input.
48 | channel_layout: stereo
49 |
--------------------------------------------------------------------------------
/.github/workflows/settings.yaml:
--------------------------------------------------------------------------------
1 | # Copyright 2022 Google LLC
2 | #
3 | # Use of this source code is governed by a BSD-style
4 | # license that can be found in the LICENSE file or at
5 | # https://developers.google.com/open-source/licenses/bsd
6 |
7 | # A reusable workflow to extract settings from a repository.
8 | # To enable a setting, create a "GitHub Environment" with the same name.
9 | #
10 | # This enables per-repo settings that aren't copied to a fork. This is better
11 | # than "vars" or "secrets", since those would require the use of
12 | # `pull_request_target` instead of `pull_request` triggers, which come with
13 | # additional risks such as the bypassing of "require approval" rules for
14 | # workflows.
15 | #
16 | # Without a setting for flags like "self_hosted", test workflows for a fork
17 | # would time out waiting for self-hosted runners that the fork doesn't have.
18 | name: Settings
19 |
20 | # Runs when called from another workflow.
21 | on:
22 | workflow_call:
23 | outputs:
24 | self_hosted:
25 | description: "Enable jobs requiring a self-hosted runner."
26 | value: ${{ jobs.settings.outputs.self_hosted }}
27 | debug:
28 | description: "Enable SSH debugging when a workflow fails."
29 | value: ${{ jobs.settings.outputs.debug }}
30 |
31 | jobs:
32 | settings:
33 | runs-on: ubuntu-latest
34 | outputs:
35 | self_hosted: ${{ steps.settings.outputs.self_hosted }}
36 | debug: ${{ steps.settings.outputs.debug }}
37 | env:
38 | GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
39 | steps:
40 | - id: settings
41 | run: |
42 | environments=$(gh api /repos/${{ github.repository }}/environments)
43 | for name in self_hosted debug; do
44 | exists=$(echo $environments | jq ".environments[] | select(.name == \"$name\")")
45 | if [[ "$exists" != "" ]]; then
46 | echo "$name=true" >> $GITHUB_OUTPUT
47 | echo "\"$name\" enabled."
48 | else
49 | echo "$name=" >> $GITHUB_OUTPUT
50 | echo "\"$name\" disabled."
51 | fi
52 | done
53 |
--------------------------------------------------------------------------------
/config_files/pipeline_live_encrypted_config.yaml:
--------------------------------------------------------------------------------
1 | # Copyright 2019 Google LLC
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # https://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | # This is a sample pipeline configuration file for Shaka Streamer in live mode.
16 | # Here you configure resolutions, manifest formats, segment size, and more.
17 |
18 | # Streaming mode. Can be live or vod.
19 | streaming_mode: live
20 |
21 | # A list of resolutions to encode.
22 | resolutions:
23 | - 720p
24 | - 480p
25 |
26 | # A list of channel layouts to encode.
27 | channel_layouts:
28 | - stereo
29 |
30 | # Manifest format (dash, hls, or both)
31 | manifest_format:
32 | - dash
33 | - hls
34 |
35 | encryption:
36 | # Enables encryption.
37 | # If disabled, the following settings are ignored.
38 | enable: True
39 | # Content identifier that identifies which encryption key to use.
40 | # This will default to a random content ID, so this is optional.
41 | content_id: '1234'
42 | # Key server url. An encryption key is generated from this server.
43 | key_server_url: https://license.uat.widevine.com/cenc/getcontentkey/widevine_test
44 | # The name of the signer.
45 | signer: widevine_test
46 | # AES signing key in hex string.
47 | signing_key: 1ae8ccd0e7985cc0b6203a55855a1034afc252980e970ca90e5202689f947ab9
48 | # AES signing iv in hex string.
49 | signing_iv: d58ce954203b7c9a9a9d467f59839249
50 | # Protection scheme (cenc or cbcs)
51 | # These are different methods of using a block cipher to encrypt media.
52 | protection_scheme: cenc
53 | # Seconds of unencrypted media at the beginning of the stream.
54 | clear_lead: 10
55 |
--------------------------------------------------------------------------------
/docs/source/autolink.js:
--------------------------------------------------------------------------------
1 | /**
2 | * Copyright 2019 Google LLC
3 | *
4 | * Licensed under the Apache License, Version 2.0 (the "License");
5 | * you may not use this file except in compliance with the License.
6 | * You may obtain a copy of the License at
7 | *
8 | * https://www.apache.org/licenses/LICENSE-2.0
9 | *
10 | * Unless required by applicable law or agreed to in writing, software
11 | * distributed under the License is distributed on an "AS IS" BASIS,
12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | * See the License for the specific language governing permissions and
14 | * limitations under the License.
15 | */
16 |
17 | function autolink() {
18 | // Every type defined in these docs has a hash that can be linked to to see
19 | // its fields or enum values. But Sphinx isn't linking to them automatically.
20 | // This may be caused by a missing Sphinx plugin, but since we heavily
21 | // post-process the generated type info for our config docs (see
22 | // docs/source/conf.py), it is easiest to add these links here.
23 | const linkMap = new Map();
24 |
25 | for (const element of document.querySelectorAll('.sig-name')) {
26 | const previousElement = element.previousElementSibling;
27 | if (previousElement && previousElement.classList.contains('sig-prename')) {
28 | const shortName = element.textContent;
29 | const longName = previousElement.textContent + element.textContent;
30 | const link = `` +
31 | `${shortName}`;
32 | linkMap.set(shortName, link);
33 | }
34 | }
35 |
36 | const propertyElements = document.querySelectorAll('.property');
37 | for (const [shortName, link] of linkMap) {
38 | // A regex that matches the name with word boundaries, so "Input" doesn't
39 | // match "InputType".
40 | const regex = new RegExp(`\\b${shortName}\\b`);
41 |
42 | for (const element of propertyElements) {
43 | if (regex.exec(element.textContent)) {
44 | element.innerHTML = element.innerHTML.replace(regex, link);
45 | }
46 | }
47 | }
48 | }
49 |
50 | document.addEventListener('DOMContentLoaded', autolink);
51 |
--------------------------------------------------------------------------------
/config_files/bitrate_hls_config.yaml:
--------------------------------------------------------------------------------
1 | # Copyright 2021 Google LLC
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # https://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | # This is a sample configuration file for Shaka Streamer to set custom
16 | # bitrates following Apple's HLS recommended values.
17 |
18 | audio_channel_layouts:
19 | mono:
20 | max_channels: 1
21 | bitrates:
22 | aac: '64k'
23 | ac3: '92k'
24 | eac3: '64k'
25 | stereo:
26 | max_channels: 2
27 | bitrates:
28 | aac: '160k'
29 | ac3: '192k'
30 | eac3: '160k'
31 | surround:
32 | max_channels: 6
33 | bitrates:
34 | aac: '320k'
35 | ac3: '384k'
36 | eac3: '192k'
37 |
38 | video_resolutions:
39 | ninth-hd:
40 | max_width: 416
41 | max_height: 234
42 | max_frame_rate: 30
43 | bitrates:
44 | h264: '145k'
45 | hevc: '100k'
46 | fourth-hd:
47 | max_width: 640
48 | max_height: 360
49 | max_frame_rate: 30
50 | bitrates:
51 | h264: '365k'
52 | hevc: '145k'
53 | third-hd:
54 | max_width: 768
55 | max_height: 432
56 | max_frame_rate: 30
57 | bitrates:
58 | h264: '1.1M'
59 | hevc: '300k'
60 | quarter-fhd:
61 | max_width: 960
62 | max_height: 540
63 | bitrates:
64 | h264: '2M'
65 | hevc: '1.6M'
66 | hd:
67 | max_width: 1280
68 | max_height: 720
69 | bitrates:
70 | h264: '4.5M'
71 | hevc: '3.4M'
72 | full-hd:
73 | max_width: 1920
74 | max_height: 1080
75 | bitrates:
76 | h264: '7.8M'
77 | hevc: '5.8M'
78 | quad-hd:
79 | max_width: 2560
80 | max_height: 1440
81 | bitrates:
82 | h264: '16M'
83 | hevc: '8.1M'
84 | ultra-hd:
85 | max_width: 3840
86 | max_height: 2160
87 | bitrates:
88 | h264: '34M'
89 | hevc: '16.8M'
90 |
91 |
--------------------------------------------------------------------------------
/config_files/input_vod_config.yaml:
--------------------------------------------------------------------------------
1 | # Copyright 2019 Google LLC
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # https://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | # This is a sample input configuration file for Shaka Streamer for VOD.
16 |
17 | # List of inputs.
18 | inputs:
19 | # Name of the input file.
20 | # This example can be downloaded from https://storage.googleapis.com/shaka-streamer-assets/sample-inputs/Sintel.2010.4k.mkv
21 | - name: Sintel.2010.4k.mkv
22 | # The media type of the input. Can be audio or video.
23 | media_type: video
24 |
25 | # A second track (audio) from the same input file.
26 | - name: Sintel.2010.4k.mkv
27 | media_type: audio
28 |
29 | # Several text tracks of different languages.
30 | # https://storage.googleapis.com/shaka-streamer-assets/sample-inputs/Sintel.2010.Arabic.vtt
31 | - name: Sintel.2010.Arabic.vtt
32 | media_type: text
33 | language: ar
34 |
35 | # https://storage.googleapis.com/shaka-streamer-assets/sample-inputs/Sintel.2010.English.vtt
36 | - name: Sintel.2010.English.vtt
37 | media_type: text
38 | language: en
39 |
40 | # https://storage.googleapis.com/shaka-streamer-assets/sample-inputs/Sintel.2010.Esperanto.vtt
41 | - name: Sintel.2010.Esperanto.vtt
42 | media_type: text
43 | language: eo
44 |
45 | # https://storage.googleapis.com/shaka-streamer-assets/sample-inputs/Sintel.2010.Spanish.vtt
46 | - name: Sintel.2010.Spanish.vtt
47 | media_type: text
48 | language: es
49 |
50 | # https://storage.googleapis.com/shaka-streamer-assets/sample-inputs/Sintel.2010.French.vtt
51 | - name: Sintel.2010.French.vtt
52 | media_type: text
53 | language: fr
54 |
55 | # https://storage.googleapis.com/shaka-streamer-assets/sample-inputs/Sintel.2010.Chinese.vtt
56 | - name: Sintel.2010.Chinese.vtt
57 | media_type: text
58 | language: zh
59 |
--------------------------------------------------------------------------------
/config_files/pipeline_vod_encrypted_config.yaml:
--------------------------------------------------------------------------------
1 | # Copyright 2019 Google LLC
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # https://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | # This is a sample pipeline configuration file for Shaka Streamer in VOD mode.
16 | # Here you configure resolutions, manifest formats, segment size, and more.
17 |
18 | # Streaming mode. Can be live or vod.
19 | streaming_mode: vod
20 |
21 | # A list of resolutions to encode.
22 | # For VOD, you can specify many more resolutions than you would with live,
23 | # since the encoding does not need to be done in real time.
24 | resolutions:
25 | - 4k
26 | - 1080p
27 | - 720p
28 | - 480p
29 | - 360p
30 |
31 | # A list of channel layouts to encode.
32 | channel_layouts:
33 | - stereo
34 | - surround
35 |
36 | # Manifest format (dash, hls or both)
37 | manifest_format:
38 | - dash
39 | - hls
40 |
41 | # Length of each segment in seconds.
42 | segment_size: 10
43 |
44 | # Forces the use of SegmentTemplate in DASH.
45 | segment_per_file: True
46 |
47 | encryption:
48 | # Enables encryption.
49 | # If disabled, the following settings are ignored.
50 | enable: True
51 | # Content identifier that identifies which encryption key to use.
52 | # This will default to a random content ID, so this is optional.
53 | content_id: '1234'
54 | # Key server url. An encryption key is generated from this server.
55 | key_server_url: https://license.uat.widevine.com/cenc/getcontentkey/widevine_test
56 | # The name of the signer.
57 | signer: widevine_test
58 | # AES signing key in hex string.
59 | signing_key: 1ae8ccd0e7985cc0b6203a55855a1034afc252980e970ca90e5202689f947ab9
60 | # AES signing iv in hex string.
61 | signing_iv: d58ce954203b7c9a9a9d467f59839249
62 | # Protection scheme (cenc or cbcs)
63 | # These are different methods of using a block cipher to encrypt media.
64 | protection_scheme: cenc
65 | # Seconds of unencrypted media at the beginning of the stream.
66 | clear_lead: 10
67 |
--------------------------------------------------------------------------------
/streamer/external_command_node.py:
--------------------------------------------------------------------------------
1 | # Copyright 2019 Google LLC
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # https://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | """A module that runs an external command to generate media."""
16 |
17 | import os
18 | import signal
19 | import subprocess
20 | from . import node_base
21 |
22 | class ExternalCommandNode(node_base.NodeBase):
23 |
24 | def __init__(self, command: str, output_path: str):
25 | super().__init__()
26 | self._command = command
27 | self._output_path = output_path
28 |
29 | def start(self):
30 | # This environment/shell variable must be used by the external command as
31 | # the place it sends its generated output. Since the command is executed
32 | # with shell=True, the command can include
33 | # $SHAKA_STREAMER_EXTERNAL_COMMAND_OUTPUT at any point.
34 | env = {
35 | 'SHAKA_STREAMER_EXTERNAL_COMMAND_OUTPUT': self._output_path,
36 | }
37 | # The yaml file may contain a multi-line string, which seems to cause
38 | # subprocess to execute each line as a command when shell=True. So convert
39 | # newlines into spaces.
40 | command = self._command.replace('\n', ' ')
41 | # Create a new group for the spawned shell to easily shut it down.
42 | if os.name == 'posix':
43 | # A POSIX only argument.
44 | new_group_flag = {'start_new_session': True}
45 | elif os.name == 'nt':
46 | # A Windows only argument.
47 | new_group_flag = {'creationflags': subprocess.CREATE_NEW_PROCESS_GROUP}
48 | self._process = self._create_process(command, shell=True,
49 | env=env, **new_group_flag)
50 |
51 | def stop(self, status):
52 | # Since we created the external shell process in a new group, sending
53 | # a SIGTERM to the group will terminate the shell and its children.
54 | if self.check_status() == node_base.ProcessStatus.Running:
55 | if os.name == 'posix':
56 | os.killpg(os.getpgid(self._process.pid), signal.SIGTERM)
57 | elif os.name == 'nt':
58 | os.kill(self._process.pid, signal.CTRL_BREAK_EVENT)
59 |
--------------------------------------------------------------------------------
/config_files/pipeline_vod_encrypted_raw_config.yaml:
--------------------------------------------------------------------------------
1 | # Copyright 2019 Google LLC
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # https://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | # This is a sample pipeline configuration file for Shaka Streamer in VOD mode.
16 | # Here you configure resolutions, manifest formats, segment size, and more.
17 |
18 | # Streaming mode. Can be live or vod.
19 | streaming_mode: vod
20 |
21 | # A list of resolutions to encode.
22 | # For VOD, you can specify many more resolutions than you would with live,
23 | # since the encoding does not need to be done in real time.
24 | resolutions:
25 | - 4k
26 | - 1080p
27 | - 720p
28 | - 480p
29 | - 360p
30 |
31 | # A list of channel layouts to encode.
32 | channel_layouts:
33 | - stereo
34 | - surround
35 |
36 | # Manifest format (dash, hls or both)
37 | manifest_format:
38 | - dash
39 | - hls
40 |
41 | # Length of each segment in seconds.
42 | segment_size: 10
43 |
44 | # Forces the use of SegmentTemplate in DASH.
45 | segment_per_file: True
46 |
47 | encryption:
48 | # Enables encryption.
49 | # If disabled, the following settings are ignored.
50 | enable: True
51 | # Set to 'raw' to use the Raw Key Encryption mode. Default is widevine.
52 | encryption_mode: raw
53 | # List of keys. Key and key id are 32 digit hex strings
54 | # Optionally 'label' can be specified. If no label is specified, it
55 | # is assumed to be the default key.
56 | keys:
57 | - key_id: 8858d6731bee84d3b6e3d12f3c767a26
58 | key: 1ae8ccd0e7985cc0b6203a55855a1034
59 | # Optional IV. If not specified one will be randomly created
60 | # Must be either 16 digit or 32 digit hex
61 | iv: 8858d6731bee84d3b6e3d12f3c767a26
62 | # One or more pssh boxes in hex string format.
63 | pssh: "000000317073736800000000EDEF8BA979D64ACEA3C827DCD\
64 | 51D21ED00000011220F7465737420636F6E74656E74206964"
65 | # Optional protection systems to be generated
66 | protection_systems:
67 | - Widevine
68 | - FairPlay
69 | - PlayReady
70 | - Marlin
71 | - CommonSystem
72 | # Protection scheme (cenc or cbcs)
73 | # These are different methods of using a block cipher to encrypt media.
74 | protection_scheme: cenc
75 | # Seconds of unencrypted media at the beginning of the stream.
76 | clear_lead: 10
77 |
--------------------------------------------------------------------------------
/docs/source/hardware_encoding.rst:
--------------------------------------------------------------------------------
1 | ..
2 | Copyright 2019 Google LLC
3 |
4 | Licensed under the Apache License, Version 2.0 (the "License");
5 | you may not use this file except in compliance with the License.
6 | You may obtain a copy of the License at
7 |
8 | https://www.apache.org/licenses/LICENSE-2.0
9 |
10 | Unless required by applicable law or agreed to in writing, software
11 | distributed under the License is distributed on an "AS IS" BASIS,
12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | See the License for the specific language governing permissions and
14 | limitations under the License.
15 |
16 | Hardware Encoding
17 | =================
18 |
19 | Setup on Linux (Intel)
20 | ----------------------
21 |
22 | By default, hardware encoding on Linux uses FFmpeg’s VAAPI support, which
23 | supports Intel devices.
24 |
25 | To use VAAPI, you must also install the appropriate VAAPI driver for your
26 | device. For example, on Ubuntu, you can install all available VAAPI drivers
27 | with:
28 |
29 | .. code:: sh
30 |
31 | sudo apt -y install va-driver-all
32 |
33 | VAAPI support is enabled by default in Debian & Ubuntu packages for FFmpeg.
34 |
35 | Setup on Linux (Nvidia)
36 | -----------------------
37 |
38 | You may also use FFmpeg's NVENC support on Linux, which supports Nvidia devices.
39 |
40 | For this, set ``hwaccel_api`` in the pipeline config to ``'nvenc'``.
41 |
42 | The underlying driver and special FFmpeg headers can be installed with:
43 |
44 | .. code:: sh
45 |
46 | sudo apt -y install libnvidia-encode1
47 | git clone https://git.videolan.org/git/ffmpeg/nv-codec-headers.git
48 | (cd nv-codec-headers && make & sudo make install)
49 |
50 | NVENC support is **not** enabled by default in Debian & Ubuntu packages for
51 | FFmpeg. To use it, you may need to build FFmpeg from source and pass
52 | ``--enable-nvenc`` to configure. See instructions in :doc:`prerequisites` for
53 | details on building FFmpeg from source.
54 |
55 | Setup on macOS
56 | --------------
57 |
58 | Hardware encoding on macOS uses Apple's VideoToolbox API. No setup is required.
59 |
60 | Setup on Windows
61 | ----------------
62 |
63 | Hardware encoding for Windows is not yet supported, but we are accepting PRs if
64 | you’d like to contribute additional platform support. This doc may be a useful
65 | reference for hardware-related options in FFmpeg:
66 | https://trac.ffmpeg.org/wiki/HWAccelIntro
67 |
68 | Configuration
69 | -------------
70 |
71 | To activate hardware encoding for any video codec, simply prefix the codec name
72 | with ``hw:`` in the pipeline config file.
73 |
74 | For example, see this snippet from
75 | `config_files/pipeline_live_hardware_config.yaml`:
76 |
77 | .. code:: yaml
78 |
79 | audio_codecs:
80 | - aac
81 | - opus
82 | video_codecs:
83 | - h264
84 | - hw:vp9
85 |
86 | Note that not all codecs are supported by all devices or APIs. For a list of
87 | supported hardware codecs, see: https://trac.ffmpeg.org/wiki/HWAccelIntro
88 |
--------------------------------------------------------------------------------
/.github/workflows/release-please.yaml:
--------------------------------------------------------------------------------
1 | name: Release
2 |
3 | on:
4 | push:
5 | branches:
6 | - main
7 |
8 | jobs:
9 | release:
10 | runs-on: ubuntu-latest
11 | outputs:
12 | release_created: ${{ steps.release.outputs.release_created }}
13 | tag_name: ${{ steps.release.outputs.tag_name }}
14 | patch: ${{ steps.release.outputs.patch }}
15 |
16 | steps:
17 | # Create/update release PR
18 | - uses: googleapis/release-please-action@v4
19 | id: release
20 | with:
21 | # Make sure we create the PR against the correct branch.
22 | target-branch: ${{ github.ref_name }}
23 | # Use a special shaka-bot access token for releases.
24 | token: ${{ secrets.RELEASE_PLEASE_TOKEN }}
25 | # See also settings in these files:
26 | manifest-file: .release-please-manifest.json
27 | config-file: .release-please-config.json
28 |
29 | # The jobs below are all conditional on a release having been created by
30 | # someone merging the release PR. They all run in parallel.
31 |
32 | pypi:
33 | runs-on: ubuntu-latest
34 | needs: release
35 | if: needs.release.outputs.release_created
36 | permissions:
37 | id-token: write # IMPORTANT: this permission is mandatory for trusted publishing
38 | steps:
39 | - uses: actions/checkout@v4
40 | with:
41 | ref: refs/tags/${{ needs.release.outputs.tag_name }}
42 | fetch-depth: 0
43 | persist-credentials: false
44 |
45 | - name: Set Python version
46 | uses: actions/setup-python@v5
47 | with:
48 | python-version: 3.13
49 |
50 | - name: Install Python deps
51 | run: |
52 | python3 -m pip install -r requirements.txt
53 | python3 -m pip install -r optional_requirements.txt
54 |
55 | - name: Build modules
56 | run: |
57 | # Clean everything. Doesn't matter in a workflow, but if you're
58 | # running this locally...
59 | rm -rf build dist binaries/dist
60 | rm -rf *.egg-info binaries/*.egg-info
61 | rm -f binaries/streamer_binaries/{ffmpeg-*,ffprobe-*,packager-*}
62 | # Build everything.
63 | python3 setup.py sdist bdist_wheel
64 | python3 binaries/build_wheels.py
65 | # Put all the build outputs into one folder for upload together.
66 | cp binaries/dist/* dist/
67 |
68 | - name: Check modules
69 | run: |
70 | python3 -m twine check --strict dist/*
71 |
72 | - name: Attach modules to the release
73 | env:
74 | GH_TOKEN: ${{ secrets.RELEASE_PLEASE_TOKEN }}
75 | run: |
76 | gh release upload --clobber "${{ needs.release.outputs.tag_name }}" dist/*
77 |
78 | # This uses PyPi's trusted publishing config. It can see and verify that
79 | # the publication request comes from this repo and this exact workflow,
80 | # and this repo and workflow are allow-listed for publication without a
81 | # token.
82 | - name: Publish modules
83 | uses: pypa/gh-action-pypi-publish@release/v1
84 | with:
85 | verbose: true
86 | attestations: true
87 |
--------------------------------------------------------------------------------
/docs/source/cloud_storage.rst:
--------------------------------------------------------------------------------
1 | ..
2 | Copyright 2024 Google LLC
3 |
4 | Licensed under the Apache License, Version 2.0 (the "License");
5 | you may not use this file except in compliance with the License.
6 | You may obtain a copy of the License at
7 |
8 | https://www.apache.org/licenses/LICENSE-2.0
9 |
10 | Unless required by applicable law or agreed to in writing, software
11 | distributed under the License is distributed on an "AS IS" BASIS,
12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | See the License for the specific language governing permissions and
14 | limitations under the License.
15 |
16 | Cloud Storage
17 | =============
18 | Shaka Streamer can output to an HTTP/HTTPS server or to cloud storage.
19 |
20 | HTTP or HTTPS URLs will be passed directly to Shaka Packager, which will make
21 | PUT requests to the HTTP/HTTPS server to write output files. The URL you pass
22 | will be a base for the URLs Packager writes to. For example, if you pass
23 | https://localhost:8080/foo/bar/, Packager would make a PUT request to
24 | https://localhost:8080/foo/bar/dash.mpd to write the manifest (with default
25 | settings).
26 |
27 | Cloud storage URLs can be either Google Cloud Storage URLs (beginning with
28 | gs://) or Amazon S3 URLs (beginning with s3://). Like the HTTP support
29 | described above, these are a base URL. If you ask for output to gs://foo/bar/,
30 | Streamer will write to gs://foo/bar/dash.mpd (with default settings).
31 |
32 | Cloud storage output uses the storage provider's Python libraries. Find more
33 | details on setup and authentication below.
34 |
35 |
36 | Google Cloud Storage Setup
37 | ~~~~~~~~~~~~~~~~~~~~~~~~~~
38 |
39 | Install the Python module if you haven't yet:
40 |
41 | .. code:: sh
42 |
43 | python3 -m pip install google-cloud-storage
44 |
45 | To use the default authentication, you will need default application
46 | credentials installed. On Linux, these live in
47 | ``~/.config/gcloud/application_default_credentials.json``.
48 |
49 | The easiest way to install default credentials is through the Google Cloud SDK.
50 | See https://cloud.google.com/sdk/docs/install-sdk to install the SDK. Then run:
51 |
52 | .. code:: sh
53 |
54 | gcloud init
55 | gcloud auth application-default login
56 |
57 | Follow the instructions given to you by gcloud to initialize the environment
58 | and login.
59 |
60 | Example command-line for live streaming to Google Cloud Storage:
61 |
62 | .. code:: sh
63 |
64 | python3 shaka-streamer \
65 | -i config_files/input_looped_file_config.yaml \
66 | -p config_files/pipeline_live_config.yaml \
67 | -o gs://my_gcs_bucket/folder/
68 |
69 |
70 | Amazon S3 Setup
71 | ~~~~~~~~~~~~~~~
72 |
73 | Install the Python module if you haven't yet:
74 |
75 | .. code:: sh
76 |
77 | python3 -m pip install boto3
78 |
79 | To authenticate to Amazon S3, you can either add credentials to your `boto
80 | config file`_ or login interactively using the `AWS CLI`_.
81 |
82 | .. code:: sh
83 |
84 | aws configure
85 |
86 | Example command-line for live streaming to Amazon S3:
87 |
88 | .. code:: sh
89 |
90 | python3 shaka-streamer \
91 | -i config_files/input_looped_file_config.yaml \
92 | -p config_files/pipeline_live_config.yaml \
93 | -o s3://my_s3_bucket/folder/
94 |
95 |
96 | .. _boto config file: http://boto.cloudhackers.com/en/latest/boto_config_tut.html
97 | .. _AWS CLI: https://boto3.amazonaws.com/v1/documentation/api/latest/guide/configuration.html
98 |
--------------------------------------------------------------------------------
/streamer/cloud/gcs.py:
--------------------------------------------------------------------------------
1 | # Copyright 2025 Google LLC
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # https://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | """Upload to Google Cloud Storage."""
16 |
17 | import urllib.parse
18 |
19 | from typing import BinaryIO, Optional
20 |
21 | import google.cloud.storage # type: ignore
22 | import google.api_core.exceptions # type: ignore
23 |
24 | from streamer.cloud.base import CloudUploaderBase
25 |
26 |
27 | class GCSUploader(CloudUploaderBase):
28 | """See base class for interface docs."""
29 |
30 | def __init__(self, upload_location: str) -> None:
31 | # Parse the upload location (URL).
32 | url = urllib.parse.urlparse(upload_location)
33 |
34 | self._client = google.cloud.storage.Client()
35 | # If upload_location is "gs://foo/bar", url.netloc is "foo", which is the
36 | # bucket name.
37 | self._bucket = self._client.bucket(url.netloc)
38 |
39 | # Strip both left and right slashes. Otherwise, we get a blank folder name.
40 | self._base_path = url.path.strip('/')
41 |
42 | # A file-like object from the Google Cloud Storage module that we write to
43 | # during a chunked upload.
44 | self._chunked_output: Optional[BinaryIO] = None
45 |
46 | def write_non_chunked(self, path: str, data: bytes) -> None:
47 | # No leading slashes, or we get a blank folder name.
48 | full_path = (self._base_path + path).strip('/')
49 |
50 | # An object representing the destination blob.
51 | blob = self._bucket.blob(full_path)
52 | blob.cache_control = 'no-cache'
53 |
54 | # A file-like interface to that blob.
55 | output = blob.open('wb', retry=google.cloud.storage.retry.DEFAULT_RETRY)
56 | output.write(data)
57 | output.close()
58 |
59 | def start_chunked(self, path: str) -> None:
60 | # No leading slashes, or we get a blank folder name.
61 | full_path = (self._base_path + path).strip('/')
62 |
63 | # An object representing the destination blob.
64 | blob = self._bucket.blob(full_path)
65 | blob.cache_control = 'no-cache'
66 |
67 | # A file-like interface to that blob.
68 | self._chunked_output = blob.open(
69 | 'wb', retry=google.cloud.storage.retry.DEFAULT_RETRY)
70 |
71 | def write_chunk(self, data: bytes) -> None:
72 | assert self._chunked_output is not None
73 | self._chunked_output.write(data)
74 |
75 | def end_chunked(self) -> None:
76 | self.reset()
77 |
78 | def delete(self, path: str) -> None:
79 | # No leading slashes, or we get a blank folder name.
80 | full_path = (self._base_path + path).strip('/')
81 | blob = self._bucket.blob(full_path)
82 | try:
83 | blob.delete(retry=google.cloud.storage.retry.DEFAULT_RETRY)
84 | except google.api_core.exceptions.NotFound:
85 | # Some delete calls seem to throw "not found", but the files still get
86 | # deleted. So ignore these and don't fail the request.
87 | pass
88 |
89 | def reset(self) -> None:
90 | if self._chunked_output:
91 | self._chunked_output.close()
92 | self._chunked_output = None
93 |
--------------------------------------------------------------------------------
/docs/source/configuration_fields.rst:
--------------------------------------------------------------------------------
1 | ..
2 | Copyright 2019 Google LLC
3 |
4 | Licensed under the Apache License, Version 2.0 (the "License");
5 | you may not use this file except in compliance with the License.
6 | You may obtain a copy of the License at
7 |
8 | https://www.apache.org/licenses/LICENSE-2.0
9 |
10 | Unless required by applicable law or agreed to in writing, software
11 | distributed under the License is distributed on an "AS IS" BASIS,
12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | See the License for the specific language governing permissions and
14 | limitations under the License.
15 |
16 |
17 | Configuration Field Reference
18 | =============================
19 |
20 | There are two config files required by Shaka Streamer: one to describe the
21 | inputs, and one to describe the encoding pipeline. Through the module API,
22 | these are taken as dictionaries. Through the command-line front-end, these are
23 | parsed as `YAML files `_.
24 |
25 | *(If you aren't familiar with YAML, it fills many of the same roles as JSON,
26 | except that it's more readable and can contain comments.)*
27 |
28 | If you are just getting started with Shaka Streamer, you should probably look in
29 | the `config_files/`_ folder and browse through some examples. If you are trying
30 | to customize one of those examples or get more details on supported options,
31 | this document is for you.
32 |
33 | .. _config_files/: https://github.com/shaka-project/shaka-streamer/tree/main/config_files
34 |
35 |
36 | Input Configs
37 | -------------
38 |
39 | The input config describes the inputs. In general, each needs to have an input
40 | type (such as a looped file), a media type (such as video), and a name (such as
41 | a file path). Other fields may be required for certain types.
42 |
43 | An input config is generally composed of multiple inputs, such as one high-res
44 | video, one audio input per language, and possibly some subtitle or caption
45 | files.
46 |
47 | ..
48 | Sphinx wants to sort these, but we should put the top-level config structures
49 | first, then the others.
50 | .. autoclass:: streamer.input_configuration.InputConfig
51 | .. autoclass:: streamer.input_configuration.Input
52 | .. automodule:: streamer.input_configuration
53 | :exclude-members: InputConfig, Input
54 |
55 |
56 | Pipeline Configs
57 | ----------------
58 |
59 | The pipeline config describes the encoding pipeline. The only required
60 | parameters are the streaming mode (live or VOD) and the resolutions.
61 | Everything else has default values, but you may want to customize the codecs,
62 | resolutions, availability window, and/or encryption settings.
63 |
64 | ..
65 | Sphinx wants to sort these, but we should put the top-level config structure
66 | first, then the others.
67 | .. autoclass:: streamer.pipeline_configuration.PipelineConfig
68 | .. automodule:: streamer.pipeline_configuration
69 | :exclude-members: PipelineConfig
70 |
71 |
72 | Custom Bitrate and Resolution Configs
73 | -------------------------------------
74 |
75 | To customize bitrates or resolution, you may provide a third config file
76 | defining these. If this config is given, it replaces the default definitions.
77 |
78 | ..
79 | Sphinx wants to sort these, but we should put the top-level config structure
80 | first, then the others.
81 | .. autoclass:: streamer.bitrate_configuration.BitrateConfig
82 | .. automodule:: streamer.bitrate_configuration
83 | :exclude-members: BitrateConfig, BitrateString, VideoResolutionName,
84 | get_value, keys, set_map, sorted_values
85 |
--------------------------------------------------------------------------------
/streamer/subprocessWindowsPatch.py:
--------------------------------------------------------------------------------
1 | # Copyright 2021 Google LLC
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | """Monkeypatch subprocess on Windows to find .CMD scripts.
16 |
17 | Without this patch, subprocess fails to find .CMD scripts on Windows, even
18 | though these are executables and should be treated as such according to the
19 | PATHEXT environment variable.
20 |
21 | Many people "work around" this issue on Windows with subprocess's shell=True
22 | argument, but this comes with a risk of shell injection vulnerabilities.
23 |
24 | Another solution is to explicitly add ".CMD" to the end of some commands on
25 | Windows, but this breaks portability and requires "if windows" to be scattered
26 | around a codebase.
27 |
28 | This monkeypatch allows the caller of subprocess to stop worrying about Windows
29 | nuances and to go back to the security best practice of shell=False. Any .CMD
30 | script that would be found by the Windows shell will now be found by
31 | subprocess. And because we're using the standard Windows PATHEXT environment
32 | variable, this can be extended to other types of executable scripts, as well.
33 | """
34 |
35 | import os
36 | import subprocess
37 | import sys
38 |
39 |
40 | # NOTE: All of the higher-level methods eventually delegate to Popen, so we
41 | # only patch that one method.
42 | # run => Popen
43 | # call => Popen
44 | # check_call => call => Popen
45 | # check_output => run => Popen
46 |
47 | # These environment variables should almost certainly exist, but these are
48 | # defaults in case they are missing.
49 | DEFAULT_PATHEXT = '.COM;.EXE;.BAT;.CMD'
50 | DEFAULT_PATH = r'C:\WINDOWS\system32;C:\WINDOWS'
51 |
52 |
53 | def resolve(exe):
54 | """Resolve a command name into a full path to the executable."""
55 |
56 | if '/' in exe or '\\' in exe or ':' in exe:
57 | # This is a path, so don't modify it.
58 | return exe
59 |
60 | if '.' in exe:
61 | # This has an extension already. Don't search for an extension.
62 | exe_names = [exe]
63 | else:
64 | # This is a command name without an extension, so check for every extension
65 | # in PATHEXT.
66 | extensions = os.environ.get('PATHEXT', DEFAULT_PATHEXT).split(';')
67 | exe_names = [exe + ext for ext in extensions]
68 |
69 | exe_paths = os.environ.get('PATH', DEFAULT_PATH).split(';')
70 |
71 | for path in exe_paths:
72 | for name in exe_names:
73 | candidate = os.path.join(path, name)
74 | if os.access(candidate, os.X_OK): # If executable
75 | return candidate
76 |
77 | # Failed to resolve, so return the original name and let Popen fail with a
78 | # natural-looking error complaining that this command cannot be found.
79 | return exe
80 |
81 |
82 | real_Popen = subprocess.Popen
83 |
84 |
85 | def Popen(args, *more_args, **kwargs):
86 | """A patch to install over subprocess.Popen."""
87 |
88 | # If the first argument is a list, resolve the command name, which is the
89 | # first item in the list.
90 | if isinstance(args, list):
91 | args[0] = resolve(args[0])
92 |
93 | # Delegate to the real Popen implementation.
94 | return real_Popen(args, *more_args, **kwargs)
95 |
96 |
97 | # Only patch win32, but not cygwin. Cygwin works correctly already.
98 | if sys.platform == 'win32':
99 | # Patch over Popen.
100 | subprocess.Popen = Popen # type: ignore
101 | # Copy the docstring from the real Popen into the patch, so that
102 | # help(subprocess.Popen) is still relatively sane with this patch installed.
103 | Popen.__doc__ = real_Popen.__doc__
104 |
--------------------------------------------------------------------------------
/config_files/input_multiperiod.yaml:
--------------------------------------------------------------------------------
1 | # Copyright 2019 Google LLC
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # https://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | # This is a sample input configuration file for Shaka Streamer for multi-period stream.
16 |
17 | multiperiod_inputs_list:
18 |
19 | # List of inputs, this will be the first period in the final multi-period manifest.
20 | - inputs:
21 | # Name of the input file.
22 | # This example can be downloaded from https://storage.googleapis.com/shaka-streamer-assets/sample-inputs/Sintel.2010.4k.mkv
23 | - name: Sintel.2010.4k.mkv
24 | # The media type of the input. Can be audio or video.
25 | media_type: video
26 |
27 | # A second track (audio) from the same input file.
28 | - name: Sintel.2010.4k.mkv
29 | media_type: audio
30 |
31 | # Several text tracks of different languages.
32 | # https://storage.googleapis.com/shaka-streamer-assets/sample-inputs/Sintel.2010.Arabic.vtt
33 | - name: Sintel.2010.Arabic.vtt
34 | media_type: text
35 | language: ar
36 |
37 | # https://storage.googleapis.com/shaka-streamer-assets/sample-inputs/Sintel.2010.English.vtt
38 | - name: Sintel.2010.English.vtt
39 | media_type: text
40 | language: en
41 |
42 | # https://storage.googleapis.com/shaka-streamer-assets/sample-inputs/Sintel.2010.Esperanto.vtt
43 | - name: Sintel.2010.Esperanto.vtt
44 | media_type: text
45 | language: eo
46 |
47 | # https://storage.googleapis.com/shaka-streamer-assets/sample-inputs/Sintel.2010.Spanish.vtt
48 | - name: Sintel.2010.Spanish.vtt
49 | media_type: text
50 | language: es
51 |
52 | # https://storage.googleapis.com/shaka-streamer-assets/sample-inputs/Sintel.2010.French.vtt
53 | - name: Sintel.2010.French.vtt
54 | media_type: text
55 | language: fr
56 |
57 | # https://storage.googleapis.com/shaka-streamer-assets/sample-inputs/Sintel.2010.Chinese.vtt
58 | - name: Sintel.2010.Chinese.vtt
59 | media_type: text
60 | language: zh
61 |
62 | # List of inputs, this will be the second period in the final multi-period manifest.
63 | - inputs:
64 | # Name of the input file.
65 | # This example can be downloaded from https://storage.googleapis.com/shaka-streamer-assets/sample-inputs/Sintel.2010.4k.mkv
66 | - name: Sintel.2010.4k.mkv
67 | # The media type of the input. Can be audio or video.
68 | media_type: video
69 |
70 | # A second track (audio) from the same input file.
71 | - name: Sintel.2010.4k.mkv
72 | media_type: audio
73 |
74 | # Several text tracks of different languages.
75 | # https://storage.googleapis.com/shaka-streamer-assets/sample-inputs/Sintel.2010.Arabic.vtt
76 | - name: Sintel.2010.Arabic.vtt
77 | media_type: text
78 | language: ar
79 |
80 | # https://storage.googleapis.com/shaka-streamer-assets/sample-inputs/Sintel.2010.English.vtt
81 | - name: Sintel.2010.English.vtt
82 | media_type: text
83 | language: en
84 |
85 | # https://storage.googleapis.com/shaka-streamer-assets/sample-inputs/Sintel.2010.Esperanto.vtt
86 | - name: Sintel.2010.Esperanto.vtt
87 | media_type: text
88 | language: eo
89 |
90 | # https://storage.googleapis.com/shaka-streamer-assets/sample-inputs/Sintel.2010.Spanish.vtt
91 | - name: Sintel.2010.Spanish.vtt
92 | media_type: text
93 | language: es
94 |
95 | # https://storage.googleapis.com/shaka-streamer-assets/sample-inputs/Sintel.2010.French.vtt
96 | - name: Sintel.2010.French.vtt
97 | media_type: text
98 | language: fr
99 |
100 | # https://storage.googleapis.com/shaka-streamer-assets/sample-inputs/Sintel.2010.Chinese.vtt
101 | - name: Sintel.2010.Chinese.vtt
102 | media_type: text
103 | language: zh
104 |
105 |
--------------------------------------------------------------------------------
/streamer/cloud/s3.py:
--------------------------------------------------------------------------------
1 | # Copyright 2025 Google LLC
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # https://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | """Upload to Amazon S3."""
16 |
17 | import urllib.parse
18 |
19 | from typing import Any, Optional
20 |
21 | import boto3 # type: ignore
22 | import botocore.config # type: ignore
23 |
24 | from streamer.cloud.base import CloudUploaderBase
25 |
26 |
27 | # S3 has a minimum chunk size for multipart uploads.
28 | MIN_S3_CHUNK_SIZE = (5 << 20) # 5MB
29 |
30 |
31 | class S3Uploader(CloudUploaderBase):
32 | """See base class for interface docs."""
33 |
34 | def __init__(self, upload_location: str) -> None:
35 | # Parse the upload location (URL).
36 | url = urllib.parse.urlparse(upload_location)
37 |
38 | config = botocore.config.Config(retries = {'mode': 'standard'})
39 | self._client = boto3.client('s3', config=config)
40 |
41 | # If upload_location is "s3://foo/bar", url.netloc is "foo", which is the
42 | # bucket name.
43 | self._bucket_name = url.netloc
44 |
45 | # Strip both left and right slashes. Otherwise, we get a blank folder name.
46 | self._base_path = url.path.strip('/')
47 |
48 | # State for chunked uploads:
49 | self._upload_id: Optional[str] = None
50 | self._upload_path: Optional[str] = None
51 | self._next_part_number: int = 0
52 | self._part_info: list[dict[str,Any]] = []
53 | self._data: bytes = b''
54 |
55 | def write_non_chunked(self, path: str, data: bytes) -> None:
56 | # No leading slashes, or we get a blank folder name.
57 | full_path = (self._base_path + path).strip('/')
58 |
59 | # Write the whole object at once.
60 | self._client.put_object(Body=data, Bucket=self._bucket_name, Key=full_path,
61 | ExtraArgs={'CacheControl': 'no-cache'})
62 |
63 | def start_chunked(self, path: str) -> None:
64 | # No leading slashes, or we get a blank folder name.
65 | self._upload_path = (self._base_path + path).strip('/')
66 |
67 | # Ask the client to start a multi-part upload.
68 | response = self._client.create_multipart_upload(
69 | Bucket=self._bucket_name, Key=self._upload_path,
70 | CacheControl='no-cache')
71 |
72 | # This ID is sent to subsequent calls into the S3 client.
73 | self._upload_id = response['UploadId']
74 |
75 | # We must accumulate metadata about each part to complete the file at the
76 | # end of the chunked transfer.
77 | self._part_info = []
78 | # We must also number the parts.
79 | self._next_part_number = 1
80 | # Multi-part uploads for S3 can't have chunks smaller than 5MB.
81 | # We accumulate data for chunks here.
82 | self._data = b''
83 |
84 | def write_chunk(self, data: bytes, force: bool = False) -> None:
85 | # Collect data until we hit the minimum chunk size.
86 | self._data += data
87 |
88 | data_len = len(self._data)
89 | if data_len >= MIN_S3_CHUNK_SIZE or (data_len and force):
90 | # Upload one "part", which may be comprised of multiple HTTP chunks from
91 | # Packager.
92 | response = self._client.upload_part(
93 | Bucket=self._bucket_name, Key=self._upload_path,
94 | PartNumber=self._next_part_number, UploadId=self._upload_id,
95 | Body=self._data)
96 |
97 | # We have to collect this data, in this format, to finish the multipart
98 | # upload later.
99 | self._part_info.append({
100 | 'PartNumber': self._next_part_number,
101 | 'ETag': response['ETag'],
102 | })
103 | self._next_part_number += 1
104 | self._data = b''
105 |
106 | def end_chunked(self) -> None:
107 | # Flush the buffer.
108 | self.write_chunk(b'', force=True)
109 |
110 | # Complete the multipart upload.
111 | upload_info = { 'Parts': self._part_info }
112 | self._client.complete_multipart_upload(
113 | Bucket=self._bucket_name, Key=self._upload_path,
114 | UploadId=self._upload_id, MultipartUpload=upload_info)
115 | self.reset()
116 |
117 | def delete(self, path: str) -> None:
118 | self._client.delete_object(
119 | Bucket=self._bucket_name, Key=self._upload_path)
120 |
121 | def reset(self) -> None:
122 | self._upload_id = None
123 | self._upload_path = None
124 | self._next_part_number = 0
125 | self._part_info = []
126 | self._data = b''
127 |
--------------------------------------------------------------------------------
/docs/source/prerequisites.rst:
--------------------------------------------------------------------------------
1 | ..
2 | Copyright 2019 Google LLC
3 |
4 | Licensed under the Apache License, Version 2.0 (the "License");
5 | you may not use this file except in compliance with the License.
6 | You may obtain a copy of the License at
7 |
8 | https://www.apache.org/licenses/LICENSE-2.0
9 |
10 | Unless required by applicable law or agreed to in writing, software
11 | distributed under the License is distributed on an "AS IS" BASIS,
12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | See the License for the specific language governing permissions and
14 | limitations under the License.
15 |
16 | Installing Prerequisites
17 | ========================
18 |
19 | TL;DR
20 | -----
21 |
22 | If you installed Shaka Streamer via pip, you already have the necessary Python
23 | dependencies. If you don't want to use your own FFmpeg and Shaka Packager,
24 | install our prebuilt binaries with:
25 |
26 | .. code:: sh
27 |
28 | pip3 install shaka-streamer-binaries
29 |
30 | The rest of this document only matters for development setup if you plan to
31 | make changes to Shaka Streamer.
32 |
33 |
34 | Required runtime modules
35 | ------------------------
36 |
37 | To install required modules via Ubuntu or Debian packages:
38 |
39 | .. code:: sh
40 |
41 | sudo apt -y install python3-yaml python3-distro
42 |
43 |
44 | For any platform, you can install them via pip:
45 |
46 | .. code:: sh
47 |
48 | pip3 install -r requirements.txt
49 |
50 |
51 | Development modules
52 | -------------------
53 |
54 | To install development modules via Ubuntu or Debian packages:
55 |
56 | .. code:: sh
57 |
58 | sudo apt -y install \
59 | python3-flask python3-mypy python3-setuptools \
60 | python3-sphinx python3-wheel
61 |
62 |
63 | For any platform, you can install them via pip:
64 |
65 | .. code:: sh
66 |
67 | pip3 install -r optional_requirements.txt
68 |
69 |
70 |
71 | Shaka Streamer Binaries package (recommended)
72 | ---------------------------------------------
73 |
74 | Shaka Streamer requires `Shaka Packager`_ and `FFmpeg`_ as it uses them
75 | internally.
76 |
77 | These binaries can be installed for your platform easily with the
78 | ``shaka-streamer-binaries`` package:
79 |
80 | .. code:: sh
81 |
82 | pip3 install shaka-streamer-binaries
83 |
84 | The static FFmpeg builds are pulled from here:
85 | https://github.com/shaka-project/static-ffmpeg-binaries
86 |
87 | The static Shaka Packager builds are pulled from here:
88 | https://github.com/shaka-project/shaka-packager
89 |
90 | FFmpeg builds for Ubuntu require you to install vaapi packages:
91 |
92 | .. code:: sh
93 |
94 | sudo apt -y install libva2 libva-drm2
95 |
96 |
97 | Shaka Packager (manual installation, not recommended)
98 | -----------------------------------------------------
99 |
100 | Pre-built Shaka Packager binaries can be downloaded from github here:
101 | https://github.com/shaka-project/shaka-packager/releases
102 |
103 | To install a Shaka Packager binary on Linux:
104 |
105 | .. code:: sh
106 |
107 | sudo install -m 755 ~/Downloads/packager-linux \
108 | /usr/local/bin/packager
109 |
110 | To build Shaka Packager from source, follow instructions here:
111 | https://shaka-project.github.io/shaka-packager/html/build_instructions.html
112 |
113 |
114 | FFmpeg (manual installation, not recommended)
115 | ---------------------------------------------
116 |
117 | If your Linux distribution has FFmpeg v4.1+, you can just install the package.
118 | For example, this will work in Ubuntu 19.04+:
119 |
120 | .. code:: sh
121 |
122 | sudo apt -y install ffmpeg
123 |
124 | For older versions of Ubuntu or any other Linux distro which does not have a
125 | new enough version of FFmpeg, you can build it from source. For example:
126 |
127 | .. code:: sh
128 |
129 | sudo apt -y install \
130 | libx264-dev libvpx-dev libopus-dev libfreetype6-dev \
131 | libfontconfig1-dev libsdl2-dev yasm \
132 | va-driver-all libnvidia-encode1
133 |
134 | git clone https://github.com/FFmpeg/FFmpeg ffmpeg
135 | cd ffmpeg
136 | git checkout n4.1.3
137 | ./configure \
138 | --enable-libx264 --enable-libvpx --enable-libopus \
139 | --enable-gpl --enable-libfreetype --enable-libfontconfig
140 | make
141 | sudo make install
142 |
143 | For macOS, you can either build FFmpeg from source or you can use `Homebrew`_
144 | to install it:
145 |
146 | .. code:: sh
147 |
148 | brew install ffmpeg
149 |
150 |
151 | Cloud Storage (optional)
152 | ------------------------
153 |
154 | Shaka Streamer can push content directly to a Google Cloud Storage or Amazon S3
155 | bucket. To use this feature, additional Python modules are required.
156 |
157 | See :doc:`cloud_storage` for details.
158 |
159 |
160 | Test Dependencies (optional)
161 | ----------------------------
162 |
163 | To run the end-to-end tests, you must also install nodejs and NPM.
164 |
165 | To install these via Ubuntu or Debian packages:
166 |
167 | .. code:: sh
168 |
169 | sudo apt -y install nodejs npm
170 |
171 | To install Node.js and NPM on any other platform, you can try one of these:
172 |
173 | * https://github.com/nodesource/distributions
174 | * https://nodejs.org/en/download/
175 |
176 |
177 | .. _Shaka Packager: https://github.com/shaka-project/shaka-packager
178 | .. _FFmpeg: https://ffmpeg.org/
179 | .. _Homebrew: https://brew.sh/
180 |
--------------------------------------------------------------------------------
/streamer/pipe.py:
--------------------------------------------------------------------------------
1 | # Copyright 2021 Google LLC
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # https://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | """A module that encapsulates all the platform-specific logic related to creating
16 | named pipes."""
17 |
18 | import os
19 | import sys
20 | import uuid
21 | from threading import Thread
22 | from typing import Optional
23 |
24 | class Pipe:
25 | """A class that represents a pipe."""
26 |
27 | def __init__(self) -> None:
28 | """Initializes a non-functioning pipe."""
29 |
30 | self._read_pipe_name = ''
31 | self._write_pipe_name = ''
32 | self._thread: Optional[Thread] = None
33 |
34 | @staticmethod
35 | def create_ipc_pipe(temp_dir: str, suffix: str = '') -> 'Pipe':
36 | """A static method used to create a pipe between two processes.
37 |
38 | On POSIX systems, it creates a named pipe using `os.mkfifo`.
39 |
40 | On Windows platforms, it starts a backgroud thread that transfars data from the
41 | writer to the reader process it is connected to.
42 | """
43 |
44 | unique_name = str(uuid.uuid4()) + suffix
45 | pipe = Pipe()
46 |
47 | if sys.platform == 'win32':
48 | import win32pipe # type: ignore
49 | pipe_name = '-nt-shaka-' + unique_name
50 | # The read pipe is connected to a writer process.
51 | pipe._read_pipe_name = r'\\.\pipe\W' + pipe_name
52 | # The write pipe is connected to a reader process.
53 | pipe._write_pipe_name = r'\\.\pipe\R' + pipe_name
54 | buf_size = 64 * 1024
55 |
56 | read_side = win32pipe.CreateNamedPipe(
57 | pipe._read_pipe_name,
58 | win32pipe.PIPE_ACCESS_INBOUND,
59 | win32pipe.PIPE_WAIT | win32pipe.PIPE_TYPE_BYTE | win32pipe.PIPE_READMODE_BYTE,
60 | 1,
61 | buf_size,
62 | buf_size,
63 | 0,
64 | None)
65 |
66 | write_side = win32pipe.CreateNamedPipe(
67 | pipe._write_pipe_name,
68 | win32pipe.PIPE_ACCESS_OUTBOUND,
69 | win32pipe.PIPE_WAIT | win32pipe.PIPE_TYPE_BYTE | win32pipe.PIPE_READMODE_BYTE,
70 | 1,
71 | buf_size,
72 | buf_size,
73 | 0,
74 | None)
75 |
76 | pipe._thread = Thread(
77 | target=Pipe._win_thread_fn,
78 | args=(read_side, write_side, buf_size),
79 | daemon=True)
80 | # Start the thread.
81 | pipe._thread.start()
82 | elif hasattr(os, 'mkfifo'):
83 | pipe_name = os.path.join(temp_dir, unique_name)
84 | pipe._read_pipe_name = pipe_name
85 | pipe._write_pipe_name = pipe_name
86 | readable_by_owner_only = 0o600 # Unix permission bits
87 | os.mkfifo(pipe_name, mode=readable_by_owner_only)
88 | else:
89 | raise RuntimeError('Platform not supported.')
90 | return pipe
91 |
92 | @staticmethod
93 | def create_file_pipe(path: str, mode: str) -> 'Pipe':
94 | """Returns a Pipe object whose read or write end is a path to a file."""
95 |
96 | pipe = Pipe()
97 | # A process will write on the read pipe(file).
98 | if mode == 'w':
99 | pipe._read_pipe_name = path
100 | # A process will read from the write pipe(file).
101 | elif mode == 'r':
102 | pipe._write_pipe_name = path
103 | else:
104 | raise RuntimeError("'{}' is not a valid mode for a Pipe.".format(mode))
105 | return pipe
106 |
107 | @staticmethod
108 | def _win_thread_fn(read_side, write_side, buf_size):
109 | """This method serves as a server that connects a writer client
110 | to a reader client.
111 |
112 | This methods will run as a thread, and will only be called on Windows platforms.
113 | """
114 |
115 | import win32pipe, win32file, pywintypes # type: ignore
116 | try:
117 | # Connect to both ends of the pipe before starting the transfer.
118 | # This funciton is blocking. If no process is connected yet, it will wait
119 | # indefinitely.
120 | win32pipe.ConnectNamedPipe(read_side)
121 | win32pipe.ConnectNamedPipe(write_side)
122 | while True:
123 | # Writer -> read_side -> write_side -> Reader
124 | _, data = win32file.ReadFile(read_side, buf_size)
125 | win32file.WriteFile(write_side, data)
126 | except Exception as ex:
127 | # Remove the pipes from the system.
128 | win32file.CloseHandle(read_side)
129 | win32file.CloseHandle(write_side)
130 | # If the error was due to one of the processes shutting down, just exit normally.
131 | if isinstance(ex, pywintypes.error) and ex.args[0] in [109, 232]:
132 | return 0
133 | # Otherwise, raise that error.
134 | raise ex
135 |
136 | def read_end(self) -> str:
137 | """Returns a pipe/file path that a reader process can read from."""
138 | assert self._write_pipe_name
139 | return self._write_pipe_name
140 |
141 | def write_end(self) -> str:
142 | """Returns a pipe/file path that a writer process can write to."""
143 | assert self._read_pipe_name
144 | return self._read_pipe_name
145 |
--------------------------------------------------------------------------------
/docs/source/overview.rst:
--------------------------------------------------------------------------------
1 | ..
2 | Copyright 2019 Google LLC
3 |
4 | Licensed under the Apache License, Version 2.0 (the "License");
5 | you may not use this file except in compliance with the License.
6 | You may obtain a copy of the License at
7 |
8 | https://www.apache.org/licenses/LICENSE-2.0
9 |
10 | Unless required by applicable law or agreed to in writing, software
11 | distributed under the License is distributed on an "AS IS" BASIS,
12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | See the License for the specific language governing permissions and
14 | limitations under the License.
15 |
16 | Overview
17 | ========
18 |
19 | Why Shaka Streamer?
20 | -------------------
21 |
22 | Shaka Streamer is packaging and streaming made easy.
23 |
24 | * Simple, config-file-based application
25 |
26 | * No complicated command-lines
27 | * Sane defaults
28 | * Reusable configs
29 |
30 | * Runs on Linux, macOS, and Windows
31 | * Supports almost any input FFmpeg can ingest
32 | * Can push output automatically to Google Cloud Storage or Amazon S3
33 | * FFmpeg and Shaka Packager binaries provided
34 |
35 | See also the more detailed list of :ref:`Features` below.
36 |
37 |
38 | Getting started
39 | ---------------
40 |
41 | Shaka Streamer requires `Python 3.9+`_. Release versions of Shaka Streamer can
42 | be installed or upgraded through ``pip3`` with:
43 |
44 | .. code:: sh
45 |
46 | # To install/upgrade globally (drop the "sudo" for Windows):
47 | sudo pip3 install --upgrade shaka-streamer shaka-streamer-binaries
48 |
49 | # To install/upgrade per-user:
50 | pip3 install --user --upgrade shaka-streamer shaka-streamer-binaries
51 |
52 |
53 | The ``shaka-streamer-binaries`` package contains `Shaka Packager`_ and `FFmpeg`_
54 | binaries, for your convenience. You may also choose to install these
55 | dependencies separately and use ``shaka-streamer --use-system-binaries`` instead
56 | of the binary package.
57 |
58 | To use Shaka Streamer, you need two YAML config files: one to describe the
59 | input, and one to describe the encoding pipeline. Sample configs can be found
60 | in the `config_files/`_ folder. Sample inputs referenced there can be
61 | downloaded individually over HTTPS or all at once through gsutil:
62 |
63 | .. code:: sh
64 |
65 | gsutil -m cp gs://shaka-streamer-assets/sample-inputs/* .
66 |
67 |
68 |
69 | Features
70 | --------
71 |
72 | * Supports:
73 |
74 | * VOD or live content
75 | * DASH and HLS output (or both at once)
76 | * VOD multi-period DASH (and equivalent HLS output)
77 | * Clear or encrypted output
78 | * Hardware encoding (if available from the platform)
79 | * Output to HTTP/HTTPS server or cloud storage provider (see
80 | :doc:`cloud_storage`)
81 |
82 | * Lots of options for input
83 |
84 | * Transcode and package static input for VOD
85 | * Loop a file for simulated live streaming
86 | * Grab video from a webcam
87 | * Generate input from an arbitrary external command
88 |
89 | * Gives you control over details if you want it
90 |
91 | * Control DASH live stream attributes
92 | * Control output folders and file names
93 | * Add arbitrary FFmpeg filters for input or output
94 |
95 |
96 | Known issues
97 | ~~~~~~~~~~~~
98 | We do support subtitles/captions (``media_type`` set to ``text``) for VOD
99 | content. But please note that at this time, we have no way to pipeline text
100 | for live streams, loop a single text input with ``input_type`` of
101 | ``looped_file``, transform text streams from one format to another, or cut a
102 | snippet of text using the ``start_time`` and ``end_time`` fields of the input
103 | config.
104 |
105 | Multiple VAAPI devices are not yet supported on Linux. See `issue #17`_.
106 |
107 |
108 | Development
109 | -----------
110 | See :doc:`prerequisites` for detailed instructions on installing prerequisites
111 | and optional dependencies.
112 |
113 |
114 | Running tests
115 | ~~~~~~~~~~~~~
116 |
117 | We have end-to-end tests that will start streams and check them from a headless
118 | browser using Shaka Player. End-to-end tests can be run like so:
119 |
120 | .. code:: sh
121 |
122 | python3 run_end_to_end_tests.py
123 |
124 |
125 | Technical details
126 | ~~~~~~~~~~~~~~~~~
127 |
128 | Shaka Streamer connects FFmpeg and Shaka Packager in a pipeline, such that
129 | output from FFmpeg is piped directly into the packager, and packaging and
130 | transcoding of all resolutions, bitrates, and languages occur in parallel.
131 |
132 | The overall pipeline is composed of several nodes. At a minimum, these are
133 | ``TranscoderNode`` (which runs FFmpeg) and ``PackagerNode`` (which runs Shaka
134 | Packager). They communicate via named pipes on Linux and macOS.
135 |
136 | All input types are read directly by ``TranscoderNode``. If the input type is
137 | ``looped_file``, then ``TranscoderNode`` will add additional FFmpeg options to
138 | loop that input file indefinitely.
139 |
140 | If the ``-o`` option is given with a Google Cloud Storage URL, then an
141 | additional node called ``ProxyNode`` is added after ``PackagerNode``. It runs a
142 | local webserver which takes the output of packager and pushes to cloud storage.
143 |
144 | The pipeline and the nodes in it are constructed by ``ControllerNode`` based on
145 | your config files. If you want to write your own front-end or interface
146 | directly to the pipeline, you can create a ``ControllerNode`` and call the
147 | ``start()``, ``stop()``, and ``is_running()`` methods on it. You can use
148 | the ``shaka-streamer`` script as an example of how to do this. See also
149 | :doc:`module_api`.
150 |
151 |
152 | .. _config_files/: https://github.com/shaka-project/shaka-streamer/tree/main/config_files
153 | .. _issue #17: https://github.com/shaka-project/shaka-streamer/issues/17
154 | .. _Python 3.9+: https://www.python.org/downloads/
155 | .. _Shaka Packager: https://github.com/shaka-project/shaka-packager
156 | .. _FFmpeg: https://ffmpeg.org/
157 |
--------------------------------------------------------------------------------
/binaries/build_wheels.py:
--------------------------------------------------------------------------------
1 | # Copyright 2021 Google LLC
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # https://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | """A script that downloads ffmpeg, ffprobe, and packager static builds for all
16 | the platforms we build for and then builds distribution wheels for them.
17 | """
18 |
19 | import os
20 | import shutil
21 | import subprocess
22 | import urllib.request
23 |
24 | import streamer_binaries
25 |
26 |
27 | # Version constants.
28 | # Change to download different versions.
29 | FFMPEG_VERSION = 'n7.1-2'
30 | PACKAGER_VERSION = 'v3.4.0'
31 |
32 | # A map of suffixes that will be combined with the binary download links
33 | # to achieve a full download link. Different suffix for each platform.
34 | # Extend this dictionary to add more platforms.
35 | PLATFORM_SUFFIXES = {
36 | # Linux x64
37 | 'manylinux2014_x86_64': '-linux-x64',
38 | # Linux arm64
39 | 'manylinux2014_aarch64': '-linux-arm64',
40 | # macOS x64 with 10.9 SDK
41 | 'macosx_10_9_x86_64': '-osx-x64',
42 | # macOS arm64 with 11.0 SDK
43 | 'macosx_11_0_arm64': '-osx-arm64',
44 | # Windows x64
45 | 'win_amd64': '-win-x64.exe',
46 | }
47 |
48 | FFMPEG_DL_PREFIX = 'https://github.com/shaka-project/static-ffmpeg-binaries/releases/download/' + FFMPEG_VERSION
49 | PACKAGER_DL_PREFIX = 'https://github.com/shaka-project/shaka-packager/releases/download/' + PACKAGER_VERSION
50 |
51 | # The download links to each binary. These download links aren't complete.
52 | # They are missing the platfrom-specific suffix and optional distro-specific
53 | # suffix (Linux only).
54 | DISTRO_BINARIES_DL = [
55 | FFMPEG_DL_PREFIX + '/ffmpeg',
56 | ]
57 | # These don't have distro-specific suffixes on Linux.
58 | NON_DISTRO_BINARIES_DL = [
59 | FFMPEG_DL_PREFIX + '/ffprobe',
60 | PACKAGER_DL_PREFIX + '/packager',
61 | ]
62 | # Important: wrap map() in list(), because map returns an iterator, and we need
63 | # a real list.
64 | UBUNTU_SUFFIXES = list(map(
65 | lambda version: '-ubuntu-{}'.format(version),
66 | streamer_binaries._ubuntu_versions_with_hw_encoders))
67 |
68 | BINARIES_ROOT_DIR = os.path.abspath(os.path.dirname(__file__))
69 |
70 |
71 | def build_bdist_wheel(platform_name, platform_binaries):
72 | """Builds a wheel distribution for `platform_name` adding the files
73 | in `platform_binaries` to it using the `package_data` parameter."""
74 |
75 | args = [
76 | 'python3', 'setup.py',
77 | # Build binary as a wheel.
78 | 'bdist_wheel',
79 | # Platform name to embed in generated filenames.
80 | '--plat-name', platform_name,
81 | # Temporary directory for creating the distribution.
82 | '--bdist-dir', platform_name,
83 | # Python tag to embed in the generated filenames.
84 | '--python-tag', 'py3',
85 | # Run quietly.
86 | '--quiet',
87 | ]
88 |
89 | # After '--', we send the platform specific binaries that we want to include.
90 | args += ['--']
91 | args += platform_binaries
92 |
93 | subprocess.check_call(args, cwd=BINARIES_ROOT_DIR)
94 |
95 | # Remove the build directory so that it is not reused by 'setup.py'.
96 | shutil.rmtree(os.path.join(BINARIES_ROOT_DIR, 'build'))
97 |
98 | def download_binary(download_url: str, download_dir: str) -> str:
99 | """Downloads a file and writes it to the file system.
100 | Returns the file name.
101 | """
102 | binary_name = download_url.split('/')[-1]
103 | binary_path = os.path.join(download_dir, binary_name)
104 |
105 | print('downloading', binary_name, flush=True, end=' ')
106 | urllib.request.urlretrieve(download_url, binary_path)
107 | print('(finished)')
108 |
109 | # Set executable permissions for the downloaded binaries.
110 | executable_permissions = 0o755
111 | os.chmod(binary_path, executable_permissions)
112 |
113 | return binary_name
114 |
115 |
116 | def main():
117 | # For each platform(OS+CPU), we download the its binaries and create a binary
118 | # wheel distribution that contains the executable binaries specific to this
119 | # platform.
120 | download_dir = os.path.join(BINARIES_ROOT_DIR, streamer_binaries.__name__)
121 |
122 | for platform_name, suffix in PLATFORM_SUFFIXES.items():
123 | binaries_to_include = []
124 |
125 | # Use the suffix specific to this platfrom to construct the full download
126 | # link for each binary.
127 | for binary_dl in NON_DISTRO_BINARIES_DL:
128 | download_link = binary_dl + suffix
129 | binary_name = download_binary(download_url=download_link,
130 | download_dir=download_dir)
131 | binaries_to_include.append(binary_name)
132 |
133 | # FFmpeg binaries have extra variants for Ubuntu Linux to support hardware
134 | # encoding.
135 | for binary_dl in DISTRO_BINARIES_DL:
136 | download_link = binary_dl + suffix
137 | binary_name = download_binary(download_url=download_link,
138 | download_dir=download_dir)
139 | binaries_to_include.append(binary_name)
140 |
141 | if 'linux' in suffix:
142 | for ubuntu_suffix in UBUNTU_SUFFIXES:
143 | download_link = binary_dl + suffix + ubuntu_suffix
144 | binary_name = download_binary(download_url=download_link,
145 | download_dir=download_dir)
146 | binaries_to_include.append(binary_name)
147 |
148 | # Build a wheel distribution for this platform and include the binaries we
149 | # have just downloaded.
150 | build_bdist_wheel(platform_name, binaries_to_include)
151 |
152 |
153 | if __name__ == '__main__':
154 | main()
155 |
--------------------------------------------------------------------------------
/CODE_OF_CONDUCT.md:
--------------------------------------------------------------------------------
1 | # Contributor Covenant Code of Conduct
2 |
3 | ## Our Pledge
4 |
5 | We as members, contributors, and leaders pledge to make participation in our
6 | community a harassment-free experience for everyone, regardless of age, body
7 | size, visible or invisible disability, ethnicity, sex characteristics, gender
8 | identity and expression, level of experience, education, socio-economic status,
9 | nationality, personal appearance, race, caste, color, religion, or sexual
10 | identity and orientation.
11 |
12 | We pledge to act and interact in ways that contribute to an open, welcoming,
13 | diverse, inclusive, and healthy community.
14 |
15 | ## Our Standards
16 |
17 | Examples of behavior that contributes to a positive environment for our
18 | community include:
19 |
20 | * Demonstrating empathy and kindness toward other people
21 | * Being respectful of differing opinions, viewpoints, and experiences
22 | * Giving and gracefully accepting constructive feedback
23 | * Accepting responsibility and apologizing to those affected by our mistakes,
24 | and learning from the experience
25 | * Focusing on what is best not just for us as individuals, but for the overall
26 | community
27 |
28 | Examples of unacceptable behavior include:
29 |
30 | * The use of sexualized language or imagery, and sexual attention or advances of
31 | any kind
32 | * Trolling, insulting or derogatory comments, and personal or political attacks
33 | * Public or private harassment
34 | * Publishing others' private information, such as a physical or email address,
35 | without their explicit permission
36 | * Other conduct which could reasonably be considered inappropriate in a
37 | professional setting
38 |
39 | ## Enforcement Responsibilities
40 |
41 | Community leaders are responsible for clarifying and enforcing our standards of
42 | acceptable behavior and will take appropriate and fair corrective action in
43 | response to any behavior that they deem inappropriate, threatening, offensive,
44 | or harmful.
45 |
46 | Community leaders have the right and responsibility to remove, edit, or reject
47 | comments, commits, code, wiki edits, issues, and other contributions that are
48 | not aligned to this Code of Conduct, and will communicate reasons for moderation
49 | decisions when appropriate.
50 |
51 | ## Scope
52 |
53 | This Code of Conduct applies within all community spaces, and also applies when
54 | an individual is officially representing the community in public spaces.
55 | Examples of representing our community include using an official email address,
56 | posting via an official social media account, or acting as an appointed
57 | representative at an online or offline event.
58 |
59 | ## Enforcement
60 |
61 | Instances of abusive, harassing, or otherwise unacceptable behavior may be
62 | reported to the community leaders responsible for enforcement at
63 | *shaka-player-maintainers@googlegroups.com*. If for any reason, you are
64 | uncomfortable reaching out to the community leaders, please email
65 | *opensource@google.com*.
66 | All complaints will be reviewed and investigated promptly and fairly.
67 |
68 | All community leaders are obligated to respect the privacy and security of the
69 | reporter of any incident.
70 |
71 | ## Enforcement Guidelines
72 |
73 | Community leaders will follow these Community Impact Guidelines in determining
74 | the consequences for any action they deem in violation of this Code of Conduct:
75 |
76 | ### 1. Correction
77 |
78 | **Community Impact**: Use of inappropriate language or other behavior deemed
79 | unprofessional or unwelcome in the community.
80 |
81 | **Consequence**: A written warning from community leaders, providing
82 | clarity around the nature of the violation and an explanation of why the
83 | behavior was inappropriate. A public apology may be requested.
84 |
85 | ### 2. Warning
86 |
87 | **Community Impact**: A violation through a single incident or series of
88 | actions.
89 |
90 | **Consequence**: A warning with consequences for continued behavior. No
91 | interaction with the people involved, including unsolicited interaction with
92 | those enforcing the Code of Conduct, for a specified period of time. This
93 | includes avoiding interactions in community spaces as well as external channels
94 | like social media. Violating these terms may lead to a temporary or permanent
95 | ban.
96 |
97 | ### 3. Temporary Ban
98 |
99 | **Community Impact**: A serious violation of community standards, including
100 | sustained inappropriate behavior.
101 |
102 | **Consequence**: A temporary ban from any sort of interaction or public
103 | communication with the community for a specified period of time. No public or
104 | private interaction with the people involved, including unsolicited interaction
105 | with those enforcing the Code of Conduct, is allowed during this period.
106 | Violating these terms may lead to a permanent ban.
107 |
108 | ### 4. Permanent Ban
109 |
110 | **Community Impact**: Demonstrating a pattern of violation of community
111 | standards, including sustained inappropriate behavior, harassment of an
112 | individual, or aggression toward or disparagement of classes of individuals.
113 |
114 | **Consequence**: A permanent ban from any sort of public interaction within the
115 | community.
116 |
117 | ## Attribution
118 |
119 | This Code of Conduct is adapted from the [Contributor Covenant][homepage],
120 | version 2.1, available at
121 | [https://www.contributor-covenant.org/version/2/1/code_of_conduct.html][v2.1].
122 |
123 | Community Impact Guidelines were inspired by
124 | [Mozilla's code of conduct enforcement ladder][Mozilla CoC].
125 |
126 | For answers to common questions about this code of conduct, see the FAQ at
127 | [https://www.contributor-covenant.org/faq][FAQ]. Translations are available at
128 | [https://www.contributor-covenant.org/translations][translations].
129 |
130 | [homepage]: https://www.contributor-covenant.org
131 | [v2.1]: https://www.contributor-covenant.org/version/2/1/code_of_conduct.html
132 | [Mozilla CoC]: https://github.com/mozilla/diversity
133 | [FAQ]: https://www.contributor-covenant.org/faq
134 | [translations]: https://www.contributor-covenant.org/translations
135 |
--------------------------------------------------------------------------------
/shaka-streamer:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | #
3 | # Copyright 2019 Google LLC
4 | #
5 | # Licensed under the Apache License, Version 2.0 (the "License");
6 | # you may not use this file except in compliance with the License.
7 | # You may obtain a copy of the License at
8 | #
9 | # https://www.apache.org/licenses/LICENSE-2.0
10 | #
11 | # Unless required by applicable law or agreed to in writing, software
12 | # distributed under the License is distributed on an "AS IS" BASIS,
13 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14 | # See the License for the specific language governing permissions and
15 | # limitations under the License.
16 |
17 | """
18 | Shaka Streamer v{version}
19 |
20 | Shaka Streamer offers a simple config-file based approach to preparing streaming
21 | media. It greatly simplifies the process of using FFmpeg and Shaka Packager for
22 | both VOD and live content.
23 |
24 | Full documentation can be found at
25 | https://shaka-project.github.io/shaka-streamer/
26 | """
27 |
28 | import argparse
29 | import json
30 | import sys
31 | import time
32 | import yaml # type: ignore
33 |
34 | import streamer
35 |
36 |
37 | class CustomArgParseFormatter(
38 | argparse.ArgumentDefaultsHelpFormatter,
39 | argparse.RawDescriptionHelpFormatter):
40 | """A custom formatter that combines the features of multiple base classes.
41 |
42 | This gives us defaults for each argument in the help text, plus it preserves
43 | whitespace in the description field.
44 | """
45 | pass
46 |
47 |
48 | def main():
49 | description = __doc__.format(version=streamer.__version__)
50 |
51 | parser = argparse.ArgumentParser(description=description,
52 | formatter_class=CustomArgParseFormatter,
53 | epilog="""
54 | The output location can be a local filsystem folder. It will be created if it
55 | does not exist. It can also be an HTTP or HTTPS URL, or a cloud storage URL.
56 | See docs: https://shaka-project.github.io/shaka-streamer/cloud_storage.html
57 | """)
58 |
59 | parser.add_argument('-i', '--input-config',
60 | required=True,
61 | help='The path to the input config file (required).')
62 | parser.add_argument('-p', '--pipeline-config',
63 | required=True,
64 | help='The path to the pipeline config file (required).')
65 | parser.add_argument('-b', '--bitrate-config',
66 | help='The path to a config file which defines custom ' +
67 | 'bitrates and resolutions for transcoding. ' +
68 | '(optional, see example in ' +
69 | 'config_files/bitrate_config.yaml)')
70 | parser.add_argument('-c', '--cloud-url',
71 | default=None,
72 | help='The Google Cloud Storage or Amazon S3 URL to ' +
73 | 'upload to. (Starts with gs:// or s3://) (DEPRECATED, use -o)')
74 | parser.add_argument('-o', '--output',
75 | default='output_files',
76 | help='The output folder or URL to write files to. See ' +
77 | 'below for details.')
78 | parser.add_argument('--skip-deps-check',
79 | action='store_true',
80 | help='Skip checks for dependencies and their versions. ' +
81 | 'This can be useful for testing pre-release ' +
82 | 'versions of FFmpeg or Shaka Packager.')
83 | parser.add_argument('--use-system-binaries',
84 | action='store_true',
85 | help='Use FFmpeg, FFprobe and Shaka Packager binaries ' +
86 | 'found in PATH instead of the ones offered by ' +
87 | 'Shaka Streamer.')
88 | parser.add_argument('--log-configs',
89 | action='store_true',
90 | help='Log simplified versions of the config files to ' +
91 | 'stderr. May be useful in services, to look at ' +
92 | 'logs later to understand when configs changed, ' +
93 | 'and how they may have impacted performance.')
94 |
95 | args = parser.parse_args()
96 |
97 |
98 | controller = streamer.controller_node.ControllerNode()
99 |
100 | with open(args.input_config) as f:
101 | input_config_dict = yaml.safe_load(f)
102 | with open(args.pipeline_config) as f:
103 | pipeline_config_dict = yaml.safe_load(f)
104 |
105 | bitrate_config_dict = {}
106 | if args.bitrate_config:
107 | with open(args.bitrate_config) as f:
108 | bitrate_config_dict = yaml.safe_load(f)
109 |
110 | if args.log_configs:
111 | configs = {
112 | 'input': input_config_dict,
113 | 'pipeline': pipeline_config_dict,
114 | 'bitrate': bitrate_config_dict,
115 | }
116 | print('Configs: {}'.format(json.dumps(configs)), file=sys.stderr)
117 |
118 | try:
119 | if args.cloud_url:
120 | print('Warning: -c/--cloud-url is deprecated; use -o/--output instead',
121 | file=sys.stderr)
122 | args.output = args.cloud_url
123 |
124 | with controller.start(args.output, input_config_dict, pipeline_config_dict,
125 | bitrate_config_dict,
126 | not args.skip_deps_check,
127 | not args.use_system_binaries):
128 | # Sleep so long as the pipeline is still running.
129 | while True:
130 | status = controller.check_status()
131 | if status != streamer.node_base.ProcessStatus.Running:
132 | return 0 if status == streamer.node_base.ProcessStatus.Finished else 1
133 |
134 | time.sleep(1)
135 | except (streamer.controller_node.VersionError,
136 | streamer.configuration.ConfigError) as e:
137 | # These are common errors meant to give the user specific, helpful
138 | # information. Format these errors in a relatively friendly way, with no
139 | # backtrace or other Python-specific information.
140 | print('Fatal error:')
141 | print(' ' + str(e))
142 |
143 | if __name__ == '__main__':
144 | sys.exit(main())
145 |
--------------------------------------------------------------------------------
/streamer/output_stream.py:
--------------------------------------------------------------------------------
1 | # Copyright 2019 Google LLC
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # https://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | """Contains information about each output stream."""
16 |
17 | from streamer.bitrate_configuration import AudioCodec, AudioChannelLayout, VideoCodec, VideoResolution
18 | from streamer.input_configuration import Input, MediaType
19 | from streamer.pipe import Pipe
20 | from typing import Dict, Union
21 |
22 |
23 | class OutputStream(object):
24 | """Base class for output streams."""
25 |
26 | def __init__(self,
27 | type: MediaType,
28 | input: Input,
29 | codec: Union[AudioCodec, VideoCodec, None],
30 | pipe_dir: str,
31 | skip_transcoding: bool = False,
32 | pipe_suffix: str = '') -> None:
33 |
34 | self.type: MediaType = type
35 | self.skip_transcoding = skip_transcoding
36 | self.input: Input = input
37 | self.features: Dict[str, str] = {}
38 | self.codec: Union[AudioCodec, VideoCodec, None] = codec
39 |
40 | if self.skip_transcoding:
41 | # If skip_transcoding is specified, let the Packager read from a plain
42 | # file instead of an IPC pipe.
43 | self.ipc_pipe = Pipe.create_file_pipe(self.input.name, mode='r')
44 | else:
45 | self.ipc_pipe = Pipe.create_ipc_pipe(pipe_dir, pipe_suffix)
46 |
47 | def is_hardware_accelerated(self) -> bool:
48 | """Returns True if this output stream uses hardware acceleration."""
49 | if self.codec:
50 | return self.codec.is_hardware_accelerated()
51 | return False
52 |
53 | def get_ffmpeg_codec_string(self, hwaccel_api: str) -> str:
54 | """Returns a codec string accepted by FFmpeg for this stream's codec."""
55 | assert self.codec is not None
56 | return self.codec.get_ffmpeg_codec_string(hwaccel_api)
57 |
58 | def is_dash_only(self) -> bool:
59 | """Returns True if the output format is restricted to DASH protocol"""
60 | if self.codec is not None:
61 | return self.codec.get_output_format() == 'webm'
62 | return False
63 |
64 | def get_init_seg_file(self) -> Pipe:
65 | INIT_SEGMENT = {
66 | MediaType.AUDIO: 'audio_{language}_{channels}c_{bitrate}_{codec}_init.{format}',
67 | MediaType.VIDEO: 'video_{resolution_name}_{bitrate}_{codec}_init.{format}',
68 | MediaType.TEXT: 'text_{language}_init.{format}',
69 | }
70 | path_templ = INIT_SEGMENT[self.type].format(**self.features)
71 | return Pipe.create_file_pipe(path_templ, mode='w')
72 |
73 | def get_media_seg_file(self) -> Pipe:
74 | MEDIA_SEGMENT = {
75 | MediaType.AUDIO: 'audio_{language}_{channels}c_{bitrate}_{codec}_$Number$.{format}',
76 | MediaType.VIDEO: 'video_{resolution_name}_{bitrate}_{codec}_$Number$.{format}',
77 | MediaType.TEXT: 'text_{language}_$Number$.{format}',
78 | }
79 | path_templ = MEDIA_SEGMENT[self.type].format(**self.features)
80 | return Pipe.create_file_pipe(path_templ, mode='w')
81 |
82 | def get_single_seg_file(self) -> Pipe:
83 | SINGLE_SEGMENT = {
84 | MediaType.AUDIO: 'audio_{language}_{channels}c_{bitrate}_{codec}.{format}',
85 | MediaType.VIDEO: 'video_{resolution_name}_{bitrate}_{codec}.{format}',
86 | MediaType.TEXT: 'text_{language}.{format}',
87 | }
88 | path_templ = SINGLE_SEGMENT[self.type].format(**self.features)
89 | return Pipe.create_file_pipe(path_templ, mode='w')
90 |
91 | def get_identification(self) -> str:
92 | SINGLE_SEGMENT = {
93 | MediaType.AUDIO: '{language}_{channels}c_{bitrate}_{codec}_{format}',
94 | MediaType.VIDEO: '{resolution_name}_{bitrate}_{codec}_{format}',
95 | MediaType.TEXT: '{language}_{format}',
96 | }
97 | return SINGLE_SEGMENT[self.type].format(**self.features)
98 |
99 |
100 | class AudioOutputStream(OutputStream):
101 |
102 | def __init__(self,
103 | input: Input,
104 | pipe_dir: str,
105 | codec: AudioCodec,
106 | channel_layout: AudioChannelLayout) -> None:
107 |
108 | super().__init__(MediaType.AUDIO, input, codec, pipe_dir)
109 | # Override the codec type and specify that it's an audio codec
110 | self.codec: AudioCodec = codec
111 | self.layout = channel_layout
112 |
113 | # The features that will be used to generate the output filename.
114 | self.features = {
115 | 'language': input.language,
116 | 'channels': str(self.layout.max_channels),
117 | 'bitrate': self.get_bitrate(),
118 | 'format': self.codec.get_output_format(),
119 | 'codec': self.codec.value,
120 | }
121 |
122 | def get_bitrate(self) -> str:
123 | """Returns the bitrate for this stream."""
124 | return self.layout.bitrates[self.codec]
125 |
126 |
127 | class VideoOutputStream(OutputStream):
128 |
129 | def __init__(self,
130 | input: Input,
131 | pipe_dir: str,
132 | codec: VideoCodec,
133 | resolution: VideoResolution) -> None:
134 | super().__init__(MediaType.VIDEO, input, codec, pipe_dir)
135 | # Override the codec type and specify that it's an audio codec
136 | self.codec: VideoCodec = codec
137 | self.resolution = resolution
138 |
139 | # The features that will be used to generate the output filename.
140 | self.features = {
141 | 'resolution_name': self.resolution.get_key(),
142 | 'bitrate': self.get_bitrate(),
143 | 'format': self.codec.get_output_format(),
144 | 'codec': self.codec.value,
145 | }
146 |
147 | def get_bitrate(self) -> str:
148 | """Returns the bitrate for this stream."""
149 | return self.resolution.bitrates[self.codec]
150 |
151 |
152 | class TextOutputStream(OutputStream):
153 |
154 | def __init__(self,
155 | input: Input,
156 | pipe_dir: str,
157 | skip_transcoding: bool):
158 | # We don't have a codec per se for text, but we'd like to generically
159 | # process OutputStream objects in ways that are easier with this attribute
160 | # set, so set it to None.
161 | codec = None
162 |
163 | super().__init__(MediaType.TEXT, input, codec, pipe_dir,
164 | skip_transcoding, pipe_suffix='.vtt')
165 |
166 | # The features that will be used to generate the output filename.
167 | self.features = {
168 | 'language': input.language,
169 | 'format': 'mp4',
170 | }
171 |
--------------------------------------------------------------------------------
/streamer/autodetect.py:
--------------------------------------------------------------------------------
1 | # Copyright 2019 Google LLC
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # https://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | """A module to contain auto-detection logic; based on ffprobe."""
16 |
17 | import shlex
18 | import subprocess
19 | import time
20 |
21 | from streamer.bitrate_configuration import (AudioChannelLayout, AudioChannelLayoutName,
22 | VideoResolution, VideoResolutionName)
23 | from streamer.input_configuration import Input, InputType
24 | from typing import Optional, List
25 |
26 | # These cannot be probed by ffprobe.
27 | TYPES_WE_CANT_PROBE = [
28 | InputType.EXTERNAL_COMMAND,
29 | ]
30 |
31 | # This module level variable might be set by the controller node
32 | # if the user chooses to use the shaka streamer bundled binaries.
33 | hermetic_ffprobe: Optional[str] = None
34 |
35 | def _probe(input: Input, field: str) -> Optional[str]:
36 | """Autodetect some feature of the input, if possible, using ffprobe.
37 |
38 | Args:
39 | input (Input): An input object from input_configuration.
40 | field (str): A field to pass to ffprobe's -show_entries option.
41 |
42 | Returns:
43 | The requested field from ffprobe as a string, or None if this fails.
44 | """
45 |
46 | if input.input_type in TYPES_WE_CANT_PROBE:
47 | # Not supported for this type.
48 | return None
49 |
50 | args: List[str] = [
51 | # Probe this input file
52 | hermetic_ffprobe or 'ffprobe',
53 | input.name,
54 | ]
55 |
56 | # Add any required input arguments for this input type
57 | args += input.get_input_args()
58 |
59 | args += [
60 | # Specifically, this stream
61 | '-select_streams', input.get_stream_specifier(),
62 | # Show the needed metadata only
63 | '-show_entries', field,
64 | # Print the metadata in a compact form, which is easier to parse
65 | '-of', 'compact=p=0:nk=1'
66 | ]
67 |
68 | print('+ ' + ' '.join([shlex.quote(arg) for arg in args]))
69 |
70 | output_bytes: bytes = subprocess.check_output(args, stderr=subprocess.DEVNULL)
71 | # The output is either some probe information or just a blank line.
72 | output_string: str = output_bytes.decode('utf-8').strip()
73 | # With certain container formats, ffprobe returns a duplicate
74 | # output and some empty lines in between. Issue #119
75 | output_string = output_string.split('\n')[0]
76 | # After stripping the newline, we can fall back to None if it's empty.
77 | probe_output: Optional[str] = output_string or None
78 |
79 | # Webcams on Linux seem to behave badly if the device is rapidly opened and
80 | # closed. Therefore, sleep for 1 second after a webcam probe.
81 | if input.input_type == InputType.WEBCAM:
82 | time.sleep(1)
83 |
84 | return probe_output
85 |
86 | def is_present(input: Input) -> bool:
87 | """Returns true if the stream for this input is indeed found.
88 |
89 | If we can't probe this input type, assume it is present."""
90 |
91 | return bool(_probe(input, 'stream=index') or
92 | input.input_type in TYPES_WE_CANT_PROBE)
93 |
94 | def get_language(input: Input) -> Optional[str]:
95 | """Returns the autodetected the language of the input."""
96 | return _probe(input, 'stream_tags=language')
97 |
98 | def get_interlaced(input: Input) -> bool:
99 | """Returns True if we detect that the input is interlaced."""
100 | interlaced_string = _probe(input, 'stream=field_order')
101 |
102 | # These constants represent the order of the fields (2 fields per frame) of
103 | # different types of interlaced video. They can be found in
104 | # https://www.ffmpeg.org/ffmpeg-codecs.html under the description of the
105 | # field_order option. Anything else (including None) should be considered
106 | # progressive (non-interlaced) video.
107 | return interlaced_string in [
108 | 'tt',
109 | 'bb',
110 | 'tb',
111 | 'bt',
112 | ]
113 |
114 | def get_frame_rate(input: Input) -> Optional[float]:
115 | """Returns the autodetected frame rate of the input."""
116 |
117 | frame_rate_string = _probe(input, 'stream=avg_frame_rate')
118 | if frame_rate_string is None:
119 | return None
120 |
121 | # This string is the framerate in the form of a fraction, such as '24/1' or
122 | # '30000/1001'. Occasionally, there is a pipe after the framerate, such as
123 | # '32700/1091|'. We must split it into pieces and do the division to get a
124 | # float.
125 | fraction = frame_rate_string.rstrip('|').split('/')
126 | if len(fraction) == 1:
127 | frame_rate = float(fraction[0])
128 | else:
129 | frame_rate = float(fraction[0]) / float(fraction[1])
130 |
131 | # The detected frame rate for interlaced content is twice what it should be.
132 | # It's actually the field rate, where it takes two interlaced fields to make
133 | # a frame. Because we have to know if it's interlaced already, we must
134 | # assert that is_interlaced has been set before now.
135 | assert input.is_interlaced is not None
136 | if input.is_interlaced:
137 | frame_rate /= 2.0
138 |
139 | return frame_rate
140 |
141 | def get_resolution(input: Input) -> Optional[VideoResolutionName]:
142 | """Returns the autodetected resolution of the input."""
143 |
144 | resolution_string = _probe(input, 'stream=width,height')
145 | if resolution_string is None:
146 | return None
147 |
148 | # This is the resolution of the video in the form of 'WIDTH|HEIGHT'. For
149 | # example, '1920|1080'. Occasionally, there is a pipe after the resolution,
150 | # such as '1920|1080|'. We have to split up width and height and match that
151 | # to a named resolution.
152 | width_string, height_string = resolution_string.rstrip('|').split('|')
153 | width, height = int(width_string), int(height_string)
154 |
155 | for bucket in VideoResolution.sorted_values():
156 | # The first bucket this fits into is the one.
157 | if (width <= bucket.max_width and height <= bucket.max_height and
158 | input.frame_rate <= bucket.max_frame_rate):
159 | return bucket.get_key()
160 |
161 | return None
162 |
163 | def get_channel_layout(input: Input) -> Optional[AudioChannelLayoutName]:
164 | """Returns the autodetected channel count of the input."""
165 |
166 | channel_count_string = _probe(input, 'stream=channels')
167 | if channel_count_string is None:
168 | return None
169 |
170 | channel_count = int(channel_count_string)
171 | for bucket in AudioChannelLayout.sorted_values():
172 | if channel_count <= bucket.max_channels:
173 | return bucket.get_key()
174 |
175 | return None
176 |
177 | def get_forced_subttitle(input: Input) -> bool:
178 | """Returns the forced subtitle value of the input."""
179 |
180 | forced_subttitle_string = _probe(input, 'disposition=forced')
181 |
182 | if forced_subttitle_string is None:
183 | return False
184 |
185 | return bool(forced_subttitle_string)
186 |
--------------------------------------------------------------------------------
/streamer/node_base.py:
--------------------------------------------------------------------------------
1 | # Copyright 2019 Google LLC
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # https://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | """Base classes for nodes."""
16 |
17 | import abc
18 | import enum
19 | import os
20 | import shlex
21 | import subprocess
22 | import sys
23 | import threading
24 | import time
25 | import traceback
26 |
27 | from . import node_base
28 | from typing import Any, Dict, IO, List, Optional, Union
29 |
30 | class ProcessStatus(enum.Enum):
31 | # Use number values so we can sort based on value.
32 |
33 | Finished = 0
34 | """The node has completed its task and shut down."""
35 |
36 | Running = 1
37 | """The node is still running."""
38 |
39 | Errored = 2
40 | """The node has failed."""
41 |
42 |
43 | class NodeBase(object):
44 | """A base class for nodes that run a single subprocess."""
45 |
46 | @abc.abstractmethod
47 | def __init__(self) -> None:
48 | self._process: Optional[subprocess.Popen] = None
49 |
50 | def __del__(self) -> None:
51 | # If the process isn't stopped by now, stop it here. It is preferable to
52 | # explicitly call stop().
53 | self.stop(None)
54 |
55 | @abc.abstractmethod
56 | def start(self):
57 | """Start the subprocess.
58 |
59 | Should be overridden by the subclass to construct a command line, call
60 | self._create_process, and assign the result to self._process.
61 | """
62 | pass
63 |
64 | def _create_process(self,
65 | args: Union[str, List[str]],
66 | env: Dict[str, str] = {},
67 | merge_env: bool = True,
68 | stdout: Union[int, IO[Any], None] = None,
69 | stderr: Union[int, IO[Any], None] = None,
70 | shell: bool = False, **kwargs) -> subprocess.Popen:
71 | """A central point to create subprocesses, so that we can debug the
72 | command-line arguments.
73 |
74 | Args:
75 | args: An array of strings if shell is False, or a single string is shell
76 | is True; the command line of the subprocess.
77 | env: A dictionary of environment variables to pass to the subprocess.
78 | merge_env: If true, merge env with the parent process environment.
79 | shell: If true, args must be a single string, which will be executed as a
80 | shell command.
81 | Returns:
82 | The Popen object of the subprocess.
83 | """
84 | if merge_env:
85 | child_env = os.environ.copy()
86 | child_env.update(env)
87 | else:
88 | child_env = env
89 |
90 | # Print arguments formatted as output from bash -x would be.
91 | # This makes it easy to see the arguments and easy to copy/paste them for
92 | # debugging in a shell.
93 | if shell:
94 | assert isinstance(args, str)
95 | print('+ ' + args)
96 | else:
97 | assert type(args) is list
98 | print('+ ' + ' '.join([shlex.quote(arg) for arg in args]))
99 |
100 |
101 | return subprocess.Popen(args,
102 | env=child_env,
103 | stdin=subprocess.DEVNULL,
104 | stdout=stdout, stderr=stderr,
105 | shell=shell, **kwargs)
106 |
107 | def check_status(self) -> ProcessStatus:
108 | """Returns the current ProcessStatus of the node."""
109 | if not self._process:
110 | raise ValueError('Must have a process to check')
111 |
112 | self._process.poll()
113 | if self._process.returncode is None:
114 | return ProcessStatus.Running
115 |
116 | if self._process.returncode == 0:
117 | return ProcessStatus.Finished
118 | else:
119 | return ProcessStatus.Errored
120 |
121 | def stop(self, status: Optional[ProcessStatus]) -> None:
122 | """Stop the subprocess if it's still running."""
123 | if self._process:
124 | # Slightly more polite than kill. Try this first.
125 | self._process.terminate()
126 |
127 | if self.check_status() == ProcessStatus.Running:
128 | # If it's not dead yet, wait 1 second.
129 | time.sleep(1)
130 |
131 | if self.check_status() == ProcessStatus.Running:
132 | # If it's still not dead, use kill.
133 | self._process.kill()
134 | # Wait for the process to die and read its exit code. There is no way
135 | # to ignore a kill signal, so this will happen quickly. If we don't do
136 | # this, it can create a zombie process.
137 | self._process.wait()
138 |
139 | class PolitelyWaitOnFinish(node_base.NodeBase):
140 | """A mixin that makes stop() wait for the subprocess if status is Finished.
141 |
142 | This is as opposed to the base class behavior, in which stop() forces
143 | the subprocesses of a node to terminate.
144 | """
145 |
146 | def stop(self, status: Optional[ProcessStatus]) -> None:
147 | if self._process and status == ProcessStatus.Finished:
148 | try:
149 | print('Waiting for', self.__class__.__name__)
150 | self._process.wait(timeout=300) # 5m timeout
151 | except subprocess.TimeoutExpired:
152 | traceback.print_exc() # print the exception
153 | # Fall through.
154 |
155 | super().stop(status)
156 |
157 | class ThreadedNodeBase(NodeBase):
158 | """A base class for nodes that run a thread.
159 |
160 | The thread repeats some callback in a background thread.
161 | """
162 |
163 | _thread: Optional[threading.Thread]
164 |
165 | def __init__(self, thread_name: str, continue_on_exception: bool, sleep_time: float):
166 | super().__init__()
167 | self._status = ProcessStatus.Finished
168 | self._thread_name = thread_name
169 | self._thread = None
170 | self._continue_on_exception = continue_on_exception
171 | self._sleep_time = sleep_time
172 | self._sleep_waker_event = threading.Event()
173 |
174 | def _thread_main(self) -> None:
175 | while self._status == ProcessStatus.Running:
176 | try:
177 | self._thread_single_pass()
178 | except:
179 | print('Exception in', self._thread_name, '-', sys.exc_info())
180 |
181 | if self._continue_on_exception:
182 | print(self.__class__.__name__+": 'Continuing.'")
183 | else:
184 | print(self.__class__.__name__+": 'Quitting.'")
185 | self._status = ProcessStatus.Errored
186 | return
187 |
188 | # Wait a little bit before performing the next pass.
189 | self._sleep_waker_event.wait(self._sleep_time)
190 |
191 | @abc.abstractmethod
192 | def _thread_single_pass(self) -> None:
193 | """Runs a single step of the thread loop.
194 |
195 | This is implemented by subclasses to do whatever it is they do. It will be
196 | called repeatedly by the base class from the node's background thread. If
197 | this method raises an exception, the behavior depends on the
198 | continue_on_exception argument in the constructor. If
199 | continue_on_exception is true, the the thread will continue. Otherwise, an
200 | exception will stop the thread and therefore the node.
201 | """
202 | pass
203 |
204 | def start(self) -> None:
205 | self._status = ProcessStatus.Running
206 | self._thread = threading.Thread(target=self._thread_main, name=self._thread_name)
207 | self._thread.start()
208 |
209 | def stop(self, status: Optional[ProcessStatus]) -> None:
210 | self._status = ProcessStatus.Finished
211 | # If the thread was sleeping, wake it up.
212 | self._sleep_waker_event.set()
213 | if self._thread:
214 | self._thread.join()
215 |
216 | def check_status(self) -> ProcessStatus:
217 | return self._status
218 |
--------------------------------------------------------------------------------
/streamer/cloud/pool.py:
--------------------------------------------------------------------------------
1 | # Copyright 2025 Google LLC
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # https://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | """A pool of worker processes to upload to cloud storage."""
16 |
17 | import abc
18 | import enum
19 | from setproctitle import setproctitle # type: ignore
20 | from queue import Queue
21 |
22 | from typing import Optional
23 | from typing_extensions import Self
24 |
25 | import multiprocessing
26 | # On Windows, we get multiprocessing.connection.PipeConnection.
27 | # On Linux/macOS, we get multiprocessing.connection.Connection.
28 | # Both inherit from multiprocessing.connection._ConnectionBase.
29 | from multiprocessing.connection import _ConnectionBase
30 |
31 | from streamer.cloud.base import CloudUploaderBase
32 | import streamer.cloud.uploader as Uploader
33 |
34 |
35 | class MessageType(enum.Enum):
36 | """Message type constants used for IPC from the main process to the pool."""
37 |
38 | WRITE_NON_CHUNKED = 'write_non_chunked'
39 | START_CHUNKED = 'start_chunked'
40 | WRITE_CHUNK = 'write_chunk'
41 | END_CHUNKED = 'end_chunked'
42 | DELETE = 'delete'
43 | RESET = 'reset'
44 |
45 |
46 | class Message(object):
47 | """Message objects used for IPC from the main process to the pool."""
48 | def __init__(self, type: MessageType, path: str = "",
49 | data: bytes = b'') -> None:
50 | self.type: MessageType = type
51 | self.path: str = path
52 | self.data: bytes = data
53 |
54 | @staticmethod
55 | def write_non_chunked(path: str, data: bytes) -> 'Message':
56 | """A request to write non-chunked data (all at once)."""
57 | return Message(MessageType.WRITE_NON_CHUNKED, path, data)
58 |
59 | @staticmethod
60 | def start_chunked(path: str) -> 'Message':
61 | """A request to start a chunked data transfer."""
62 | return Message(MessageType.START_CHUNKED, path)
63 |
64 | @staticmethod
65 | def write_chunk(data: bytes) -> 'Message':
66 | """A request to write a single chunk of data."""
67 | return Message(MessageType.WRITE_CHUNK, data = data)
68 |
69 | @staticmethod
70 | def end_chunked() -> 'Message':
71 | """A request to end a chunked data transfer."""
72 | return Message(MessageType.END_CHUNKED)
73 |
74 | @staticmethod
75 | def delete(path: str) -> 'Message':
76 | """A request to delete a file."""
77 | return Message(MessageType.DELETE, path)
78 |
79 | @staticmethod
80 | def reset() -> 'Message':
81 | """A request to reset state when releasing a worker."""
82 | return Message(MessageType.RESET)
83 |
84 |
85 | def worker_target(upload_location: str, reader: _ConnectionBase):
86 | """Target for multiprocessing.Process.
87 |
88 | This is the entry point for every worker subprocess.
89 |
90 | Reads messages from IPC and talks to cloud storage."""
91 |
92 | # Set the title of the process as it appears in "ps" under Linux.
93 | setproctitle('shaka-streamer cloud upload worker')
94 |
95 | # Create an uploader using whatever vendor-specific module is necessary for
96 | # this upload location URL. (Google Cloud Storage, Amazon S3, etc.)
97 | uploader = Uploader.create(upload_location)
98 |
99 | # Wait for command messages from the main process, proxying each command to
100 | # the uploader.
101 | while True:
102 | try:
103 | message: Message = reader.recv()
104 |
105 | if message.type == MessageType.WRITE_NON_CHUNKED:
106 | uploader.write_non_chunked(message.path, message.data)
107 | elif message.type == MessageType.START_CHUNKED:
108 | uploader.start_chunked(message.path)
109 | elif message.type == MessageType.WRITE_CHUNK:
110 | uploader.write_chunk(message.data)
111 | elif message.type == MessageType.END_CHUNKED:
112 | uploader.end_chunked()
113 | elif message.type == MessageType.DELETE:
114 | uploader.delete(message.path)
115 | elif message.type == MessageType.RESET:
116 | uploader.reset()
117 | except EOFError:
118 | # Quit the process when the other end of the pipe is closed.
119 | return
120 |
121 |
122 | class WorkerProcess(object):
123 | """A worker process and the write end of its pipe."""
124 |
125 | def __init__(self, process: multiprocessing.Process,
126 | writer: _ConnectionBase) -> None:
127 | self.process = process
128 | self.writer = writer
129 |
130 |
131 | class AbstractPool(object):
132 | """An interface for a WorkerHandle (below) to talk to Pool (which references
133 | WorkerHandle). Created to break a circular dependency for static typing."""
134 |
135 | @abc.abstractmethod
136 | def _release(self, process: WorkerProcess) -> None:
137 | """Add a process back into the pool."""
138 | pass
139 |
140 |
141 | class WorkerHandle(CloudUploaderBase):
142 | """A proxy for a cloud uploader interface that sends commands to a worker
143 | process. It is also a context manager for use with "with" statements."""
144 |
145 | def __init__(self, pool: AbstractPool, process: WorkerProcess) -> None:
146 | self._pool = pool
147 | self._process = process
148 |
149 | def __enter__(self) -> Self:
150 | # Part of the interface for context managers, but there's nothing to do
151 | # here.
152 | return self
153 |
154 | def __exit__(self, *args, **kwargs) -> None:
155 | """Reset the subprocess's uploader and release the subprocess back to the
156 | pool."""
157 |
158 | self._process.writer.send(Message.reset())
159 | self._pool._release(self._process)
160 |
161 | def write_non_chunked(self, path: str, data: bytes) -> None:
162 | self._process.writer.send(Message.write_non_chunked(path, data))
163 |
164 | def start_chunked(self, path: str) -> None:
165 | self._process.writer.send(Message.start_chunked(path))
166 |
167 | def write_chunk(self, data: bytes) -> None:
168 | self._process.writer.send(Message.write_chunk(data))
169 |
170 | def end_chunked(self) -> None:
171 | self._process.writer.send(Message.end_chunked())
172 |
173 | def delete(self, path: str) -> None:
174 | self._process.writer.send(Message.delete(path))
175 |
176 | def reset(self) -> None:
177 | # Part of the interface for uploaders, but this should not be called
178 | # explicitly.
179 | pass
180 |
181 |
182 | class Pool(AbstractPool):
183 | """A pool of worker subprocesses that handle cloud upload actions."""
184 |
185 | def __init__(self, upload_location: str, size: int) -> None:
186 | self._all_processes: list[WorkerProcess] = []
187 | self._available_processes: Queue[WorkerProcess] = Queue()
188 |
189 | for i in range(size):
190 | reader, writer = multiprocessing.Pipe(duplex=False)
191 | process = multiprocessing.Process(target=worker_target,
192 | args=(upload_location, reader))
193 | process.start()
194 | worker_process = WorkerProcess(process, writer)
195 | self._available_processes.put(worker_process)
196 | self._all_processes.append(worker_process)
197 |
198 | def _release(self, worker_process: WorkerProcess) -> None:
199 | """Called by worker handles to release the worker back to the pool."""
200 |
201 | self._available_processes.put(worker_process)
202 |
203 | def get_worker(self) -> WorkerHandle:
204 | """Get an available worker. Blocks until one is available.
205 |
206 | Returns a WorkerHandle meant to be used as a context manager (with "with"
207 | statements) so that it will be automatically released."""
208 |
209 | worker_process = self._available_processes.get(block=True)
210 | return WorkerHandle(self, worker_process)
211 |
212 | def close(self) -> None:
213 | """Close all worker processes."""
214 |
215 | for process in self._all_processes:
216 | process.writer.close()
217 | process.process.join()
218 |
--------------------------------------------------------------------------------
/.github/workflows/build-and-test.yaml:
--------------------------------------------------------------------------------
1 | name: Build and Test
2 |
3 | # Builds and tests on all combinations of OS and python version.
4 | # Also builds the docs.
5 | #
6 | # Runs when a pull request is opened or updated.
7 | #
8 | # Can also be run manually for debugging purposes.
9 | on:
10 | pull_request:
11 | types: [opened, synchronize, reopened]
12 | workflow_dispatch:
13 | inputs:
14 | ref:
15 | description: "The ref to build and test."
16 | required: false
17 | schedule:
18 | # Run every night at midnight PST / 8am UTC, testing against the main branch.
19 | - cron: '0 8 * * *'
20 |
21 | defaults:
22 | run:
23 | shell: bash
24 |
25 | # If another instance of this workflow is started for the same PR, cancel the
26 | # old one. If a PR is updated and a new test run is started, the old test run
27 | # will be cancelled automatically to conserve resources.
28 | concurrency:
29 | group: ${{ github.workflow }}-${{ github.event.number || inputs.ref }}
30 | cancel-in-progress: true
31 |
32 | jobs:
33 | settings:
34 | name: Settings
35 | uses: ./.github/workflows/settings.yaml
36 |
37 | # Configure the build matrix based on inputs. The list of objects in the
38 | # build matrix contents can't be changed by conditionals, but it can be
39 | # computed by another job and deserialized.
40 | matrix_config:
41 | needs: settings
42 | runs-on: ubuntu-latest
43 | outputs:
44 | MATRIX: ${{ steps.configure.outputs.MATRIX }}
45 | steps:
46 | - uses: actions/checkout@v4
47 | with:
48 | ref: ${{ inputs.ref || (github.event.number && format('refs/pull/{0}/merge', github.event.number)) }}
49 | persist-credentials: false
50 |
51 | - name: Configure Build Matrix
52 | id: configure
53 | shell: node {0}
54 | run: |
55 | const fs = require('fs');
56 | const enableDebug = '${{ needs.settings.debug }}' != '';
57 | const enableSelfHosted = '${{ needs.settings.self_hosted }}' != '';
58 |
59 | // Use enableSelfHosted to decide what the build matrix below should
60 | // include.
61 | const buildMatrix = JSON.parse(fs.readFileSync("${{ github.workspace }}/build-matrix.json"));
62 | const {hosted, selfHosted, pythonVersions} = buildMatrix;
63 | const devices = enableSelfHosted ? hosted.concat(selfHosted) : hosted;
64 |
65 | const matrix = [];
66 | for (const device of devices) {
67 | for (const version of pythonVersions) {
68 | // Clone device, add "python" field, push onto the matrix.
69 | matrix.push(Object.assign({}, device, {python_version: version}));
70 | }
71 | }
72 |
73 | // Output a JSON object consumed by the build matrix below.
74 | fs.appendFileSync(
75 | process.env['GITHUB_OUTPUT'],
76 | `MATRIX=${ JSON.stringify(matrix) }\n`);
77 |
78 | // Log the outputs, for the sake of debugging this script.
79 | console.log({enableDebug, enableSelfHosted, matrix});
80 |
81 | build_and_test:
82 | needs: [settings, matrix_config]
83 | strategy:
84 | # Let other matrix entries complete, so we have all results on failure
85 | # instead of just the first failure.
86 | fail-fast: false
87 | matrix:
88 | include: ${{ fromJSON(needs.matrix_config.outputs.MATRIX) }}
89 |
90 | name: Build and test ${{ matrix.os_name }} ${{ matrix.target_arch }} Python ${{ matrix.python_version }}
91 | runs-on: ${{ matrix.os }}
92 |
93 | steps:
94 | - name: Checkout code
95 | uses: actions/checkout@v4
96 | with:
97 | ref: ${{ inputs.ref || (github.event.number && format('refs/pull/{0}/merge', github.event.number)) }}
98 | persist-credentials: false
99 |
100 | - name: Set Python version
101 | uses: actions/setup-python@v5
102 | with:
103 | python-version: ${{ matrix.python_version }}
104 |
105 | - name: Debug Python version
106 | run: python3 --version
107 |
108 | - name: Install Linux deps
109 | if: runner.os == 'Linux'
110 | run: |
111 | sudo apt -y update
112 | sudo apt -y install \
113 | libva2 libva-drm2 \
114 | nodejs npm xvfb
115 |
116 | - name: Install Chromium (non-Snap, arm64 Linux only)
117 | if: runner.os == 'Linux' && matrix.target_arch == 'arm64'
118 | run: |
119 | sudo add-apt-repository ppa:xtradeb/apps -y
120 | sudo apt update
121 | sudo apt -y install chromium
122 |
123 | # Running inside a Docker container, we need to kill the sandbox.
124 | # We also need to set these XDG environment variables, or else we get
125 | # errors like "chrome_crashpad_handler: --database is required".
126 | # See https://github.com/hardkoded/puppeteer-sharp/issues/2633
127 | # Heredocs interpolate variables, so escape the dollar sign below.
128 | cat >/usr/local/bin/chromium <> $GITHUB_ENV
137 |
138 | - name: Install Python deps
139 | run: |
140 | python3 -m pip install -r requirements.txt
141 | python3 -m pip install -r optional_requirements.txt
142 |
143 | - name: Download and install binaries
144 | run: |
145 | # Fetch binaries locally instead of installing the release version of
146 | # the binary package. This lets us test changes to the binary package
147 | # before it is released.
148 | # In case of network flake, try it three times. This is arbitrary.
149 | python3 binaries/build_wheels.py || python3 binaries/build_wheels.py || python3 binaries/build_wheels.py
150 |
151 | # Make sure the locally-created binary package for each platform can
152 | # be locally installed, so we know they are correctly formatted/named.
153 | # This also makes these binaries available for the test run.
154 | if [[ '${{ runner.os }}' == 'Windows' ]]; then
155 | python3 -m pip install binaries/dist/shaka_streamer_binaries*win*amd64.whl
156 | elif [[ '${{ runner.os }}' == 'Linux' ]]; then
157 | if [[ '${{ matrix.target_arch }}' == 'x64' ]]; then
158 | python3 -m pip install binaries/dist/shaka_streamer_binaries*linux*x86_64.whl
159 | elif [[ '${{ matrix.target_arch }}' == 'arm64' ]]; then
160 | python3 -m pip install binaries/dist/shaka_streamer_binaries*linux*aarch64.whl
161 | fi
162 | elif [[ '${{ runner.os }}' == 'macOS' ]]; then
163 | if [[ '${{ matrix.target_arch }}' == 'x64' ]]; then
164 | python3 -m pip install binaries/dist/shaka_streamer_binaries*mac*x86_64.whl
165 | elif [[ '${{ matrix.target_arch }}' == 'arm64' ]]; then
166 | python3 -m pip install binaries/dist/shaka_streamer_binaries*mac*arm64.whl
167 | fi
168 | fi
169 |
170 | - name: Build docs (Linux only)
171 | if: runner.os == 'Linux'
172 | run: bash docs/build.sh
173 |
174 | - name: Run tests
175 | run: |
176 | if [[ '${{ runner.os }}' == 'Linux' ]]; then
177 | # Run without X11 on Linux by using xvfb.
178 | WRAPPER="xvfb-run -a"
179 | else
180 | WRAPPER=""
181 | fi
182 |
183 | if [[ '${{ runner.os }}' == 'Linux' && '${{ matrix.target_arch }}' == 'arm64' ]]; then
184 | # There is no Widevine CDM for Linux arm64 at this time.
185 | # By setting this here instead of probing during the test, we can
186 | # be sure to notice failures if Widevine disappears from our
187 | # testing environment on platforms where this would not be
188 | # expected.
189 | EXTRA_ARGS="--no-test-widevine"
190 | else
191 | EXTRA_ARGS=""
192 | fi
193 |
194 | # Use the "spec" reporter for clearer logs in GitHub Actions
195 | $WRAPPER python3 run_end_to_end_tests.py --reporters spec $EXTRA_ARGS
196 |
197 | - name: Debug on failure
198 | uses: mxschmitt/action-tmate@v3.6
199 | with:
200 | limit-access-to-actor: true
201 | if: failure() && needs.settings.debug != ''
202 |
--------------------------------------------------------------------------------
/streamer/proxy_node.py:
--------------------------------------------------------------------------------
1 | # Copyright 2021 Google LLC
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # https://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | """A simple proxy server to upload to cloud stroage providers."""
16 |
17 | import time
18 | import traceback
19 | import urllib.parse
20 |
21 | from http.server import ThreadingHTTPServer, BaseHTTPRequestHandler
22 | from typing import Optional, Union
23 |
24 | from streamer.node_base import ProcessStatus, ThreadedNodeBase
25 |
26 | from streamer.cloud.pool import Pool
27 | from streamer.cloud.uploader import ALL_SUPPORTED_PROTOCOLS, SUPPORTED_PROTOCOLS
28 |
29 |
30 | # HTTP status codes
31 | HTTP_STATUS_CREATED = 201
32 | HTTP_STATUS_ACCEPTED = 202
33 | HTTP_STATUS_NO_CONTENT = 204
34 | HTTP_STATUS_FAILED = 500
35 |
36 |
37 | # Don't write the same file more than once per rate limiter period.
38 | # For live streams, this avoids HTTP 429 "Too many request" errors.
39 | RATE_LIMITER_PERIOD_IN_SECONDS = 2
40 |
41 |
42 | class RateLimiter(object):
43 | """A rate limiter that tracks which files we have written to recently."""
44 |
45 | def __init__(self) -> None:
46 | self._reset(time.time())
47 |
48 | def suppress(self, path) -> bool:
49 | """Returns true if you should skip this upload."""
50 |
51 | now = time.time()
52 | if now > self._last_check + RATE_LIMITER_PERIOD_IN_SECONDS:
53 | self._reset(now)
54 |
55 | if path in self._recent_files:
56 | return True # skip
57 |
58 | self._recent_files.add(path)
59 | return False # upload
60 |
61 | def _reset(self, now: float) -> None:
62 | # These files are only valid for RATE_LIMITER_PERIOD_IN_SECONDS.
63 | # After that, they get cleared.
64 | self._recent_files: set[str] = set()
65 |
66 | # The timestamp of the last check; the start of the rate limiter period.
67 | self._last_check: float = now
68 |
69 |
70 | class RequestHandler(BaseHTTPRequestHandler):
71 | """A request handler that processes requests coming from Shaka Packager and
72 | relays them to the destination.
73 | """
74 |
75 | def __init__(self, rate_limiter: RateLimiter, pool: Pool,
76 | *args, **kwargs) -> None:
77 | self._rate_limiter: RateLimiter = rate_limiter
78 | self._pool: Pool = pool
79 |
80 | # The HTTP server passes *args and *kwargs that we need to pass along, but
81 | # don't otherwise care about. This must happen last, or somehow our
82 | # members never get set.
83 | super().__init__(*args, **kwargs)
84 |
85 | # NOTE: The default values here for log_request are taken from the base
86 | # class, and not a design decision of ours.
87 | def log_request(self, code: Union[int, str] = '-', size: Union[int, str] = '-') -> None:
88 | """Override the request logging feature of the Python HTTP server."""
89 | try:
90 | code_int = int(code)
91 | except:
92 | code_int = 0
93 |
94 | if code_int >= 200 and code_int <= 299:
95 | # Stub out log_request to avoid creating noise from the HTTP server when
96 | # requests are successful.
97 | return
98 |
99 | return super().log_request(code, size)
100 |
101 | def _parse_chunked_transfer(self, suppress: bool) -> None:
102 | # Here we parse the chunked transfer encoding and delegate to the
103 | # subclass's start/chunk/end methods. If |suppress|, we parse the input
104 | # but don't do anything with it.
105 | with self._pool.get_worker() as worker:
106 | if not suppress:
107 | worker.start_chunked(self.path)
108 |
109 | while True:
110 | # Parse the chunk size
111 | chunk_size_line = self.rfile.readline().strip()
112 | chunk_size = int(chunk_size_line, 16)
113 |
114 | # Read the chunk and process it
115 | if chunk_size != 0:
116 | data = self.rfile.read(chunk_size)
117 | if not suppress:
118 | worker.write_chunk(data)
119 |
120 | self.rfile.readline() # Read the trailer
121 |
122 | if chunk_size == 0:
123 | break # EOF
124 |
125 | # All done.
126 | if not suppress:
127 | worker.end_chunked()
128 |
129 | def _parse_non_chunked_transfer(self, suppress: bool) -> None:
130 | # We have the whole file at once, with a known length.
131 | content_length = int(self.headers['Content-Length'])
132 |
133 | if suppress:
134 | # If |suppress|, we read the input but don't do anything with it.
135 | self.rfile.read(content_length)
136 | else:
137 | with self._pool.get_worker() as worker:
138 | worker.write_non_chunked(self.path, self.rfile.read(content_length))
139 |
140 | def do_PUT(self) -> None:
141 | """Handle PUT requests coming from Shaka Packager."""
142 | suppress = self._rate_limiter.suppress(self.path)
143 |
144 | try:
145 | if self.headers.get('Transfer-Encoding', '').lower() == 'chunked':
146 | self._parse_chunked_transfer(suppress)
147 | else:
148 | self._parse_non_chunked_transfer(suppress)
149 |
150 | # Close the input and respond.
151 | self.rfile.close()
152 | self.send_response(HTTP_STATUS_ACCEPTED if suppress else HTTP_STATUS_CREATED)
153 | except Exception as ex:
154 | print('Upload failure: ' + str(ex))
155 | traceback.print_exc()
156 | self.send_response(HTTP_STATUS_FAILED)
157 |
158 | # If we don't call this at the end of the handler, Packager says we
159 | # "returned nothing".
160 | self.end_headers()
161 |
162 | def do_DELETE(self) -> None:
163 | """Handle DELETE requests coming from Shaka Packager."""
164 | try:
165 | with self._pool.get_worker() as worker:
166 | worker.delete(self.path)
167 | self.send_response(HTTP_STATUS_NO_CONTENT)
168 | except Exception as ex:
169 | print('Upload failure: ' + str(ex))
170 | traceback.print_exc()
171 | self.send_response(HTTP_STATUS_FAILED)
172 |
173 | # If we don't call this at the end of the handler, Packager says we
174 | # "returned nothing".
175 | self.end_headers()
176 |
177 |
178 | class ProxyNode(ThreadedNodeBase):
179 | """Runs an HTTP server at `self.server_location` to upload to cloud.
180 |
181 | Subclasses handle upload to specific cloud storage providers.
182 |
183 | The local HTTP server at `self.server_location` can only ingest PUT requests.
184 | """
185 | SUPPORTED_PROTOCOLS = SUPPORTED_PROTOCOLS
186 | ALL_SUPPORTED_PROTOCOLS = ALL_SUPPORTED_PROTOCOLS
187 |
188 | server_location: str = ''
189 |
190 | def __init__(self, upload_location: str, pool_size: int) -> None:
191 | super().__init__(thread_name=self.__class__.__name__,
192 | continue_on_exception=True,
193 | sleep_time=3)
194 | if not ProxyNode.is_supported(upload_location):
195 | raise RuntimeError("Protocol of {} isn't supported".format(upload_location))
196 |
197 | self._upload_location = upload_location
198 | self._rate_limiter = RateLimiter()
199 | self._server: Optional[ThreadingHTTPServer] = None
200 | self._pool: Optional[Pool] = None
201 | self._pool_size: int = pool_size
202 |
203 | def create_handler(self, *args, **kwargs) -> BaseHTTPRequestHandler:
204 | assert self._pool is not None
205 | return RequestHandler(self._rate_limiter, self._pool, *args, **kwargs)
206 |
207 | def start(self) -> None:
208 | # Will be started early to get server location.
209 | if self._server is not None:
210 | return
211 |
212 | self._pool = Pool(self._upload_location, self._pool_size)
213 |
214 | handler_factory = (
215 | lambda *args, **kwargs: self.create_handler(*args, **kwargs))
216 |
217 | # By specifying port 0, a random unused port will be chosen for the server.
218 | self._server = ThreadingHTTPServer(
219 | ('localhost', 0), handler_factory)
220 | self.server_location = (
221 | 'http://' + self._server.server_name +
222 | ':' + str(self._server.server_port))
223 |
224 | return super().start()
225 |
226 | def stop(self, status: Optional[ProcessStatus]) -> None:
227 | if self._server:
228 | self._server.shutdown()
229 | self._server = None
230 | if self._pool:
231 | self._pool.close()
232 | self._pool = None
233 | return super().stop(status)
234 |
235 | def check_status(self) -> ProcessStatus:
236 | # This makes sure this node will never prevent the shutdown of the whole
237 | # system. It will be stopped explicitly when ControllerNode tears down.
238 | return ProcessStatus.Finished
239 |
240 | def _thread_single_pass(self) -> None:
241 | assert self._server is not None
242 | # Will terminate on server.shutdown().
243 | self._server.serve_forever()
244 |
245 | @staticmethod
246 | def is_understood(upload_location: str) -> bool:
247 | """Is the URL understood, independent of libraries available?"""
248 | url = urllib.parse.urlparse(upload_location)
249 | return url.scheme in ALL_SUPPORTED_PROTOCOLS
250 |
251 | @staticmethod
252 | def is_supported(upload_location: str) -> bool:
253 | """Is the URL supported with the libraries available?"""
254 | url = urllib.parse.urlparse(upload_location)
255 | return url.scheme in SUPPORTED_PROTOCOLS
256 |
--------------------------------------------------------------------------------
/streamer/periodconcat_node.py:
--------------------------------------------------------------------------------
1 | # Copyright 2021 Google LLC
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # https://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | """Concatenates inputs into periods by creating a master DASH/HLS file."""
16 |
17 | import os
18 | import re
19 | import time
20 | from typing import List
21 | from xml.etree import ElementTree
22 | from streamer import __version__
23 | from streamer.node_base import ProcessStatus, ThreadedNodeBase
24 | from streamer.packager_node import PackagerNode
25 | from streamer.pipeline_configuration import PipelineConfig, ManifestFormat
26 | from streamer.output_stream import AudioOutputStream, VideoOutputStream
27 | from streamer.m3u8_concater import HLSConcater
28 |
29 |
30 | class PeriodConcatNode(ThreadedNodeBase):
31 | """A node that concatenates multiple DASH manifests and/or HLS playlists
32 | when the input is a multiperiod_inputs_list and the output is to the local the system.
33 | """
34 |
35 | def __init__(self,
36 | pipeline_config: PipelineConfig,
37 | packager_nodes: List[PackagerNode],
38 | output_location: str) -> None:
39 | """Stores all relevant information needed for the period concatenation."""
40 | super().__init__(thread_name='periodconcat', continue_on_exception=False, sleep_time=3)
41 | self._pipeline_config = pipeline_config
42 | self._output_location = output_location
43 | self._packager_nodes: List[PackagerNode] = packager_nodes
44 | self._concat_will_fail = False
45 |
46 | # know whether the first period has video and audio or not.
47 | fp_has_vid, fp_has_aud = False, False
48 | for output_stream in packager_nodes[0].output_streams:
49 | if isinstance(output_stream, VideoOutputStream):
50 | fp_has_vid = True
51 | elif isinstance(output_stream, AudioOutputStream):
52 | fp_has_aud = True
53 |
54 | for i, packager_node in enumerate(self._packager_nodes):
55 | has_vid, has_aud = False, False
56 | for output_stream in packager_node.output_streams:
57 | if isinstance(output_stream, VideoOutputStream):
58 | has_vid = True
59 | elif isinstance(output_stream, AudioOutputStream):
60 | has_aud = True
61 | if has_vid != fp_has_vid or has_aud != fp_has_aud:
62 | self._concat_will_fail = True
63 | print("\nWARNING: Stopping period concatenation.")
64 | print("Period#{} has {}video and has {}audio while Period#1 "
65 | "has {}video and has {}audio.".format(i + 1,
66 | "" if has_vid else "no ",
67 | "" if has_aud else "no ",
68 | "" if fp_has_vid else "no ",
69 | "" if fp_has_aud else "no "))
70 | print("\nHINT:\n\tBe sure that either all the periods have video or all do not,\n"
71 | "\tand all the periods have audio or all do not, i.e. don't mix videoless\n"
72 | "\tperiods with other periods that have video.\n"
73 | "\tThis is necessary for the concatenation to be performed successfully.\n")
74 | time.sleep(5)
75 | break
76 |
77 | def _thread_single_pass(self) -> None:
78 | """Watches all the PackagerNode(s), if at least one of them is running it skips this
79 | _thread_single_pass, if all of them are finished, it starts period concatenation, if one of
80 | them is errored, it raises a RuntimeError.
81 | """
82 |
83 | for i, packager_node in enumerate(self._packager_nodes):
84 | status = packager_node.check_status()
85 | if status == ProcessStatus.Running:
86 | return
87 | elif status == ProcessStatus.Errored:
88 | raise RuntimeError(
89 | 'Concatenation is stopped due '
90 | 'to an error in PackagerNode#{}.'.format(i + 1))
91 |
92 | if self._concat_will_fail:
93 | raise RuntimeError('Unable to concatenate the inputs.')
94 |
95 | if ManifestFormat.DASH in self._pipeline_config.manifest_format:
96 | self._dash_concat()
97 |
98 | if ManifestFormat.HLS in self._pipeline_config.manifest_format:
99 | self._hls_concat()
100 |
101 | self._status = ProcessStatus.Finished
102 |
103 | def _dash_concat(self) -> None:
104 | """Concatenates multiple single-period DASH manifests into one multi-period DASH manifest."""
105 |
106 | def find(elem: ElementTree.Element, *args: str) -> ElementTree.Element:
107 | """A better interface for the Element.find() method.
108 | Use it only if it is guaranteed that the element we are searching for is inside,
109 | Otherwise it will raise an AssertionError."""
110 |
111 | full_path = '/'.join(['shaka-live:' + tag for tag in args])
112 | child_elem = elem.find(full_path, {'shaka-live': default_dash_namespace})
113 |
114 | # elem.find() returns either an ElementTree.Element or None.
115 | assert child_elem is not None, 'Unable to find: {} using the namespace: {}'.format(
116 | full_path, default_dash_namespace)
117 | return child_elem
118 |
119 | # Periods that are going to be collected from different MPD files.
120 | periods: List[ElementTree.Element] = []
121 |
122 | # Get the root of an MPD file that we will concatenate periods into.
123 | concat_mpd = ElementTree.ElementTree(file=os.path.join(
124 | self._packager_nodes[0].output_location,
125 | self._pipeline_config.dash_output)).getroot()
126 | assert concat_mpd is not None, 'Failed to parse MPD'
127 |
128 | # Get the default namespace.
129 | namespace_matches = re.search(r'\{([^}]*)\}', concat_mpd.tag)
130 | assert namespace_matches is not None, 'Unable to find the default namespace.'
131 | default_dash_namespace = namespace_matches.group(1)
132 |
133 | # Remove the 'mediaPresentationDuration' attribute.
134 | concat_mpd.attrib.pop('mediaPresentationDuration')
135 | # Remove the Period element in that MPD element.
136 | concat_mpd.remove(find(concat_mpd, 'Period'))
137 |
138 | for packager_node in self._packager_nodes:
139 | mpd = ElementTree.ElementTree(file=os.path.join(
140 | packager_node.output_location,
141 | self._pipeline_config.dash_output)).getroot()
142 | assert mpd is not None, 'Failed to parse subsequent MPD'
143 | period = find(mpd, 'Period')
144 | period.attrib['duration'] = mpd.attrib['mediaPresentationDuration']
145 |
146 | # A BaseURL that will have the relative path to media file.
147 | base_url = ElementTree.Element('{{{}}}BaseURL'.format(default_dash_namespace))
148 | base_url.text = os.path.relpath(packager_node.output_location, self._output_location) + '/'
149 | period.insert(0, base_url)
150 |
151 | periods.append(period)
152 |
153 | # Add the periods collected from all the files.
154 | concat_mpd.extend(periods)
155 |
156 | # Write the period concat to the output_location.
157 | with open(os.path.join(
158 | self._output_location,
159 | self._pipeline_config.dash_output), 'w') as master_dash:
160 |
161 | contents = "\n"
162 | # TODO: Add Shaka-Packager version to this xml comment.
163 | contents += "\n"
164 | contents += "\n".format(__version__)
165 |
166 | # xml.ElementTree replaces the default namespace with 'ns0'.
167 | # Register the DASH namespace back as the default namespace before converting to string.
168 | ElementTree.register_namespace('', default_dash_namespace)
169 |
170 | # xml.etree.ElementTree already has an ElementTree().write() method,
171 | # but it won't allow putting comments at the begining of the file.
172 | contents += ElementTree.tostring(element=concat_mpd, encoding='unicode')
173 | master_dash.write(contents)
174 |
175 | def _hls_concat(self) -> None:
176 | """Concatenates multiple HLS playlists using #EXT-X-DISCONTINUITY."""
177 |
178 | # Initialize the HLS concater with a sample Master HLS playlist and
179 | # the output location of the concatenated playlists.
180 | first_hls_playlist = os.path.join(self._packager_nodes[0].output_location,
181 | self._pipeline_config.hls_output)
182 | # NOTE: Media files' segments location will be relative to this
183 | # self._output_location we pass to the constructor.
184 | hls_concater = HLSConcater(first_hls_playlist, self._output_location)
185 |
186 | for packager_node in self._packager_nodes:
187 | hls_playlist = os.path.join(packager_node.output_location,
188 | self._pipeline_config.hls_output)
189 | hls_concater.add(hls_playlist, packager_node)
190 |
191 | # Start the period concatenation and write the output in the output location
192 | # passed to the HLSConcater at the construction time.
193 | hls_concater.concat_and_write(
194 | self._pipeline_config.hls_output,
195 | 'Concatenated with https://github.com/shaka-project/shaka-streamer'
196 | ' version {}'.format(__version__),
197 | )
198 |
--------------------------------------------------------------------------------
/streamer/packager_node.py:
--------------------------------------------------------------------------------
1 | # Copyright 2019 Google LLC
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # https://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | """A module that feeds information from two named pipes into shaka-packager."""
16 |
17 | import os
18 | import subprocess
19 |
20 | from . import input_configuration
21 | from . import node_base
22 | from . import pipeline_configuration
23 |
24 | from streamer.bitrate_configuration import AudioCodec, VideoCodec
25 | from streamer.input_configuration import MediaType
26 | from streamer.output_stream import OutputStream
27 | from streamer.pipeline_configuration import EncryptionMode, PipelineConfig
28 | from streamer.util import is_url
29 | from typing import List, Optional, Union, cast
30 |
31 | # Alias a few classes to avoid repeating namespaces later.
32 | ManifestFormat = pipeline_configuration.ManifestFormat
33 | StreamingMode = pipeline_configuration.StreamingMode
34 |
35 | class SegmentError(Exception):
36 | """Raise when segment is incompatible with format."""
37 | pass
38 |
39 | def build_path(output_location, sub_path):
40 | """Handle annoying edge cases with paths for cloud upload.
41 | If a path has two slashes, GCS will create an intermediate directory named "".
42 | So we have to be careful in how we construct paths to avoid this.
43 | """
44 | # ControllerNode should have already stripped trailing slashes from the output
45 | # location.
46 |
47 | # Sometimes the segment dir is empty. This handles that special case.
48 | if not sub_path:
49 | return output_location
50 |
51 | if is_url(output_location):
52 | # Don't use os.path.join, since URLs must use forward slashes and Streamer
53 | # could be used on Windows.
54 | return output_location + '/' + sub_path
55 |
56 | return os.path.join(output_location, sub_path)
57 |
58 |
59 | class PackagerNode(node_base.PolitelyWaitOnFinish):
60 |
61 | def __init__(self,
62 | pipeline_config: PipelineConfig,
63 | output_location: str,
64 | output_streams: List[OutputStream],
65 | index: int,
66 | hermetic_packager: Optional[str]) -> None:
67 | super().__init__()
68 | self._pipeline_config: PipelineConfig = pipeline_config
69 | self.output_location: str = output_location
70 | self._segment_dir: str = build_path(
71 | output_location, pipeline_config.segment_folder)
72 | self.output_streams: List[OutputStream] = output_streams
73 | self._index = index
74 | # If a hermetic packager is passed, use it.
75 | self._packager = hermetic_packager or 'packager'
76 |
77 | def start(self) -> None:
78 | args = [
79 | self._packager,
80 | ]
81 |
82 | args += [self._setup_stream(stream) for stream in self.output_streams]
83 |
84 | if self._pipeline_config.quiet:
85 | args += [
86 | '--quiet', # Only output error logs
87 | ]
88 |
89 | args += [
90 | # Segment duration given in seconds.
91 | '--segment_duration', str(self._pipeline_config.segment_size),
92 | # Signal DolbyVision using the modern supplemental codecs approach
93 | '--use_dovi_supplemental_codecs',
94 | ]
95 |
96 | if self._pipeline_config.streaming_mode == StreamingMode.LIVE:
97 | args += [
98 | # Number of seconds the user can rewind through backwards.
99 | '--time_shift_buffer_depth',
100 | str(self._pipeline_config.availability_window),
101 | # Number of segments preserved outside the current live window.
102 | # NOTE: This must not be set below 3, or the first segment in an HLS
103 | # playlist may become unavailable before the playlist is updated.
104 | '--preserved_segments_outside_live_window', '3',
105 | # Number of seconds of content encoded/packaged that is ahead of the
106 | # live edge.
107 | '--suggested_presentation_delay',
108 | str(self._pipeline_config.presentation_delay),
109 | # Number of seconds between manifest updates.
110 | '--minimum_update_period',
111 | str(self._pipeline_config.update_period),
112 | # Ignore HTTP output failures so that the pipeline doesn't stop if we
113 | # fail to upload one segment. Only enable this for live streams,
114 | # since for VOD, we really should signal that failure to the user.
115 | '--ignore_http_output_failures',
116 | ]
117 |
118 | args += self._setup_manifest_format()
119 |
120 | if self._pipeline_config.encryption.enable:
121 | args += self._setup_encryption()
122 |
123 | stdout = None
124 | if self._pipeline_config.debug_logs:
125 | # Log by writing all Packager output to a file. Unlike the logging
126 | # system in ffmpeg, this will stop any Packager output from getting to
127 | # the screen.
128 | packager_log_file = 'PackagerNode-' + str(self._index) + '.log'
129 | stdout = open(packager_log_file, 'w')
130 |
131 | self._process: subprocess.Popen = self._create_process(
132 | args,
133 | stderr=subprocess.STDOUT,
134 | stdout=stdout)
135 |
136 | def _setup_stream(self, stream: OutputStream) -> str:
137 | dict = {
138 | 'in': stream.ipc_pipe.read_end(),
139 | 'stream': stream.type.value,
140 | }
141 |
142 | if stream.input.skip_encryption:
143 | dict['skip_encryption'] = str(stream.input.skip_encryption)
144 |
145 | if stream.type == MediaType.AUDIO:
146 | dict['hls_group_id'] = str(cast(AudioCodec, stream.codec).value)
147 |
148 | if stream.type == MediaType.VIDEO and self._pipeline_config.generate_iframe_playlist:
149 | dict['iframe_playlist_name'] = 'iframe_' + stream.get_identification() + '.m3u8'
150 |
151 | if stream.input.drm_label:
152 | dict['drm_label'] = stream.input.drm_label
153 |
154 | if stream.input.forced_subtitle:
155 | dict['forced_subtitle'] = '1'
156 |
157 | # Note: Shaka Packager will not accept 'und' as a language, but Shaka
158 | # Player will fill that in if the language metadata is missing from the
159 | # manifest/playlist.
160 | if stream.input.language and stream.input.language != 'und':
161 | dict['language'] = stream.input.language
162 |
163 | if self._pipeline_config.segment_per_file:
164 | dict['init_segment'] = build_path(
165 | self._segment_dir,
166 | stream.get_init_seg_file().write_end())
167 | dict['segment_template'] = build_path(
168 | self._segment_dir,
169 | stream.get_media_seg_file().write_end())
170 | else:
171 | dict['output'] = build_path(
172 | self._segment_dir,
173 | stream.get_single_seg_file().write_end())
174 |
175 | if stream.is_dash_only():
176 | dict['dash_only'] = '1'
177 |
178 | # The format of this argument to Shaka Packager is a single string of
179 | # key=value pairs separated by commas.
180 | return ','.join(key + '=' + value for key, value in dict.items())
181 |
182 | def _setup_manifest_format(self) -> List[str]:
183 | args: List[str] = []
184 |
185 | if ManifestFormat.DASH in self._pipeline_config.manifest_format:
186 | if self._pipeline_config.utc_timings:
187 | args += [
188 | '--utc_timings',
189 | ','.join(timing.scheme_id_uri + '=' +
190 | timing.value for timing in self._pipeline_config.utc_timings)
191 | ]
192 |
193 | if self._pipeline_config.low_latency_dash_mode:
194 | args += [
195 | '--low_latency_dash_mode=true',
196 | ]
197 |
198 | if self._pipeline_config.streaming_mode == StreamingMode.VOD:
199 | args += [
200 | '--generate_static_live_mpd',
201 | ]
202 |
203 | args += [
204 | # Generate DASH manifest file.
205 | '--mpd_output',
206 | build_path(self.output_location, self._pipeline_config.dash_output),
207 | ]
208 |
209 | if ManifestFormat.HLS in self._pipeline_config.manifest_format:
210 | if self._pipeline_config.streaming_mode == StreamingMode.LIVE:
211 | args += [
212 | '--hls_playlist_type', 'LIVE',
213 | ]
214 | else:
215 | args += [
216 | '--hls_playlist_type', 'VOD',
217 | ]
218 |
219 | args += [
220 | # Generate HLS playlist file(s).
221 | '--hls_master_playlist_output',
222 | build_path(self.output_location, self._pipeline_config.hls_output),
223 | ]
224 |
225 | return args
226 |
227 | def _setup_encryption_keys(self) -> List[str]:
228 | # Sets up encryption keys for raw encryption mode
229 | keys = []
230 | for key in self._pipeline_config.encryption.keys:
231 | key_str = ''
232 | if key.label:
233 | key_str = 'label=' + key.label + ':'
234 | key_str += 'key_id=' + key.key_id + ':key=' + key.key
235 | keys.append(key_str)
236 | return keys
237 |
238 | def _setup_encryption(self) -> List[str]:
239 | # Sets up encryption of content.
240 |
241 | encryption = self._pipeline_config.encryption
242 |
243 | args = []
244 |
245 | if encryption.encryption_mode == EncryptionMode.WIDEVINE:
246 | args = [
247 | '--enable_widevine_encryption',
248 | '--key_server_url', encryption.key_server_url,
249 | '--content_id', encryption.content_id,
250 | '--signer', encryption.signer,
251 | '--aes_signing_key', encryption.signing_key,
252 | '--aes_signing_iv', encryption.signing_iv,
253 | ]
254 | elif encryption.encryption_mode == EncryptionMode.RAW:
255 | # raw key encryption mode
256 | args = [
257 | '--enable_raw_key_encryption',
258 | '--keys',
259 | ','.join(self._setup_encryption_keys()),
260 | ]
261 | if encryption.iv:
262 | args.extend(['--iv', encryption.iv])
263 | if encryption.pssh:
264 | args.extend(['--pssh', encryption.pssh])
265 | if encryption.hls_key_uri:
266 | args.extend(['--hls_key_uri', encryption.hls_key_uri])
267 |
268 | # Common arguments
269 | args.extend([
270 | '--protection_scheme',
271 | encryption.protection_scheme.value,
272 | '--clear_lead', str(encryption.clear_lead),
273 | '--create_session_keys',
274 | ])
275 |
276 | if encryption.protection_systems:
277 | args.extend([
278 | '--protection_systems', ','.join(
279 | [p.value for p in encryption.protection_systems]
280 | )
281 | ])
282 |
283 | return args
284 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 |
2 | Apache License
3 | Version 2.0, January 2004
4 | http://www.apache.org/licenses/
5 |
6 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
7 |
8 | 1. Definitions.
9 |
10 | "License" shall mean the terms and conditions for use, reproduction,
11 | and distribution as defined by Sections 1 through 9 of this document.
12 |
13 | "Licensor" shall mean the copyright owner or entity authorized by
14 | the copyright owner that is granting the License.
15 |
16 | "Legal Entity" shall mean the union of the acting entity and all
17 | other entities that control, are controlled by, or are under common
18 | control with that entity. For the purposes of this definition,
19 | "control" means (i) the power, direct or indirect, to cause the
20 | direction or management of such entity, whether by contract or
21 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
22 | outstanding shares, or (iii) beneficial ownership of such entity.
23 |
24 | "You" (or "Your") shall mean an individual or Legal Entity
25 | exercising permissions granted by this License.
26 |
27 | "Source" form shall mean the preferred form for making modifications,
28 | including but not limited to software source code, documentation
29 | source, and configuration files.
30 |
31 | "Object" form shall mean any form resulting from mechanical
32 | transformation or translation of a Source form, including but
33 | not limited to compiled object code, generated documentation,
34 | and conversions to other media types.
35 |
36 | "Work" shall mean the work of authorship, whether in Source or
37 | Object form, made available under the License, as indicated by a
38 | copyright notice that is included in or attached to the work
39 | (an example is provided in the Appendix below).
40 |
41 | "Derivative Works" shall mean any work, whether in Source or Object
42 | form, that is based on (or derived from) the Work and for which the
43 | editorial revisions, annotations, elaborations, or other modifications
44 | represent, as a whole, an original work of authorship. For the purposes
45 | of this License, Derivative Works shall not include works that remain
46 | separable from, or merely link (or bind by name) to the interfaces of,
47 | the Work and Derivative Works thereof.
48 |
49 | "Contribution" shall mean any work of authorship, including
50 | the original version of the Work and any modifications or additions
51 | to that Work or Derivative Works thereof, that is intentionally
52 | submitted to Licensor for inclusion in the Work by the copyright owner
53 | or by an individual or Legal Entity authorized to submit on behalf of
54 | the copyright owner. For the purposes of this definition, "submitted"
55 | means any form of electronic, verbal, or written communication sent
56 | to the Licensor or its representatives, including but not limited to
57 | communication on electronic mailing lists, source code control systems,
58 | and issue tracking systems that are managed by, or on behalf of, the
59 | Licensor for the purpose of discussing and improving the Work, but
60 | excluding communication that is conspicuously marked or otherwise
61 | designated in writing by the copyright owner as "Not a Contribution."
62 |
63 | "Contributor" shall mean Licensor and any individual or Legal Entity
64 | on behalf of whom a Contribution has been received by Licensor and
65 | subsequently incorporated within the Work.
66 |
67 | 2. Grant of Copyright License. Subject to the terms and conditions of
68 | this License, each Contributor hereby grants to You a perpetual,
69 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
70 | copyright license to reproduce, prepare Derivative Works of,
71 | publicly display, publicly perform, sublicense, and distribute the
72 | Work and such Derivative Works in Source or Object form.
73 |
74 | 3. Grant of Patent License. Subject to the terms and conditions of
75 | this License, each Contributor hereby grants to You a perpetual,
76 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
77 | (except as stated in this section) patent license to make, have made,
78 | use, offer to sell, sell, import, and otherwise transfer the Work,
79 | where such license applies only to those patent claims licensable
80 | by such Contributor that are necessarily infringed by their
81 | Contribution(s) alone or by combination of their Contribution(s)
82 | with the Work to which such Contribution(s) was submitted. If You
83 | institute patent litigation against any entity (including a
84 | cross-claim or counterclaim in a lawsuit) alleging that the Work
85 | or a Contribution incorporated within the Work constitutes direct
86 | or contributory patent infringement, then any patent licenses
87 | granted to You under this License for that Work shall terminate
88 | as of the date such litigation is filed.
89 |
90 | 4. Redistribution. You may reproduce and distribute copies of the
91 | Work or Derivative Works thereof in any medium, with or without
92 | modifications, and in Source or Object form, provided that You
93 | meet the following conditions:
94 |
95 | (a) You must give any other recipients of the Work or
96 | Derivative Works a copy of this License; and
97 |
98 | (b) You must cause any modified files to carry prominent notices
99 | stating that You changed the files; and
100 |
101 | (c) You must retain, in the Source form of any Derivative Works
102 | that You distribute, all copyright, patent, trademark, and
103 | attribution notices from the Source form of the Work,
104 | excluding those notices that do not pertain to any part of
105 | the Derivative Works; and
106 |
107 | (d) If the Work includes a "NOTICE" text file as part of its
108 | distribution, then any Derivative Works that You distribute must
109 | include a readable copy of the attribution notices contained
110 | within such NOTICE file, excluding those notices that do not
111 | pertain to any part of the Derivative Works, in at least one
112 | of the following places: within a NOTICE text file distributed
113 | as part of the Derivative Works; within the Source form or
114 | documentation, if provided along with the Derivative Works; or,
115 | within a display generated by the Derivative Works, if and
116 | wherever such third-party notices normally appear. The contents
117 | of the NOTICE file are for informational purposes only and
118 | do not modify the License. You may add Your own attribution
119 | notices within Derivative Works that You distribute, alongside
120 | or as an addendum to the NOTICE text from the Work, provided
121 | that such additional attribution notices cannot be construed
122 | as modifying the License.
123 |
124 | You may add Your own copyright statement to Your modifications and
125 | may provide additional or different license terms and conditions
126 | for use, reproduction, or distribution of Your modifications, or
127 | for any such Derivative Works as a whole, provided Your use,
128 | reproduction, and distribution of the Work otherwise complies with
129 | the conditions stated in this License.
130 |
131 | 5. Submission of Contributions. Unless You explicitly state otherwise,
132 | any Contribution intentionally submitted for inclusion in the Work
133 | by You to the Licensor shall be under the terms and conditions of
134 | this License, without any additional terms or conditions.
135 | Notwithstanding the above, nothing herein shall supersede or modify
136 | the terms of any separate license agreement you may have executed
137 | with Licensor regarding such Contributions.
138 |
139 | 6. Trademarks. This License does not grant permission to use the trade
140 | names, trademarks, service marks, or product names of the Licensor,
141 | except as required for reasonable and customary use in describing the
142 | origin of the Work and reproducing the content of the NOTICE file.
143 |
144 | 7. Disclaimer of Warranty. Unless required by applicable law or
145 | agreed to in writing, Licensor provides the Work (and each
146 | Contributor provides its Contributions) on an "AS IS" BASIS,
147 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
148 | implied, including, without limitation, any warranties or conditions
149 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
150 | PARTICULAR PURPOSE. You are solely responsible for determining the
151 | appropriateness of using or redistributing the Work and assume any
152 | risks associated with Your exercise of permissions under this License.
153 |
154 | 8. Limitation of Liability. In no event and under no legal theory,
155 | whether in tort (including negligence), contract, or otherwise,
156 | unless required by applicable law (such as deliberate and grossly
157 | negligent acts) or agreed to in writing, shall any Contributor be
158 | liable to You for damages, including any direct, indirect, special,
159 | incidental, or consequential damages of any character arising as a
160 | result of this License or out of the use or inability to use the
161 | Work (including but not limited to damages for loss of goodwill,
162 | work stoppage, computer failure or malfunction, or any and all
163 | other commercial damages or losses), even if such Contributor
164 | has been advised of the possibility of such damages.
165 |
166 | 9. Accepting Warranty or Additional Liability. While redistributing
167 | the Work or Derivative Works thereof, You may choose to offer,
168 | and charge a fee for, acceptance of support, warranty, indemnity,
169 | or other liability obligations and/or rights consistent with this
170 | License. However, in accepting such obligations, You may act only
171 | on Your own behalf and on Your sole responsibility, not on behalf
172 | of any other Contributor, and only if You agree to indemnify,
173 | defend, and hold each Contributor harmless for any liability
174 | incurred by, or claims asserted against, such Contributor by reason
175 | of your accepting any such warranty or additional liability.
176 |
177 | END OF TERMS AND CONDITIONS
178 |
179 | APPENDIX: How to apply the Apache License to your work.
180 |
181 | To apply the Apache License to your work, attach the following
182 | boilerplate notice, with the fields enclosed by brackets "[]"
183 | replaced with your own identifying information. (Don't include
184 | the brackets!) The text should be enclosed in the appropriate
185 | comment syntax for the file format. We also recommend that a
186 | file or class name and description of purpose be included on the
187 | same "printed page" as the copyright notice for easier
188 | identification within third-party archives.
189 |
190 | Copyright [yyyy] [name of copyright owner]
191 |
192 | Licensed under the Apache License, Version 2.0 (the "License");
193 | you may not use this file except in compliance with the License.
194 | You may obtain a copy of the License at
195 |
196 | http://www.apache.org/licenses/LICENSE-2.0
197 |
198 | Unless required by applicable law or agreed to in writing, software
199 | distributed under the License is distributed on an "AS IS" BASIS,
200 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
201 | See the License for the specific language governing permissions and
202 | limitations under the License.
203 |
--------------------------------------------------------------------------------
/docs/source/conf.py:
--------------------------------------------------------------------------------
1 | # Copyright 2019 Google LLC
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # https://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 |
16 | # Configuration file for the Sphinx documentation builder. Generated by
17 | # sphinx-quickstart and heavily customized.
18 | #
19 | # For full documentation on Sphinx, see:
20 | # http://www.sphinx-doc.org/en/master/config
21 | #
22 | # For full documentation on the "autodoc" extension, which does most of the
23 | # heavy lifting for us, and which needs the most customization, see:
24 | # http://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html
25 |
26 |
27 | # -- Path setup --------------------------------------------------------------
28 |
29 | # If extensions (or modules to document with autodoc) are in another directory,
30 | # add these directories to sys.path here. If the directory is relative to the
31 | # documentation root, use os.path.abspath to make it absolute, like shown here.
32 |
33 | # This adds the path to the project root, so that "streamer" can be found by
34 | # the doc generator.
35 | import os
36 | import sys
37 | ROOT = os.path.join(os.path.dirname(__file__), '..', '..')
38 | sys.path.insert(0, os.path.abspath(ROOT))
39 |
40 | # This imports certain types we will use directly in the customization at the
41 | # bottom of the config file.
42 | import docutils.nodes
43 | import sphinx.addnodes
44 | import streamer
45 | from streamer import bitrate_configuration
46 | import types
47 |
48 |
49 | # -- Project information -----------------------------------------------------
50 |
51 | project = 'Shaka Streamer'
52 | copyright = '2019, Google'
53 | author = 'Google'
54 |
55 | # The short X.Y version
56 | version = '.'.join(streamer.__version__.split('.')[0:2])
57 | # The full version, including alpha/beta/rc tags
58 | release = streamer.__version__
59 |
60 |
61 | # -- General configuration ---------------------------------------------------
62 |
63 | # If your documentation needs a minimal Sphinx version, state it here.
64 | #
65 | # needs_sphinx = '1.0'
66 |
67 | # Add any Sphinx extension module names here, as strings. They can be
68 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
69 | # ones.
70 | extensions = [
71 | 'sphinx.ext.autodoc',
72 | 'sphinx.ext.autosectionlabel',
73 | 'sphinx.ext.viewcode',
74 | ]
75 |
76 | # Add any paths that contain templates here, relative to this directory.
77 | templates_path = []
78 |
79 | # The suffix(es) of source filenames.
80 | # You can specify multiple suffix as a list of string:
81 | #
82 | # source_suffix = ['.rst', '.md']
83 | source_suffix = '.rst'
84 |
85 | # The master toctree document.
86 | master_doc = 'index'
87 |
88 | # The language for content autogenerated by Sphinx. Refer to documentation
89 | # for a list of supported languages.
90 | #
91 | # This is also used if you do content translation via gettext catalogs.
92 | # Usually you set "language" from the command line for these cases.
93 | language = None
94 |
95 | # List of patterns, relative to source directory, that match files and
96 | # directories to ignore when looking for source files.
97 | # This pattern also affects html_static_path and html_extra_path.
98 | exclude_patterns = []
99 |
100 | # The name of the Pygments (syntax highlighting) style to use.
101 | pygments_style = None
102 |
103 |
104 | # -- Options for HTML output -------------------------------------------------
105 |
106 | # The theme to use for HTML and HTML Help pages. See the documentation for
107 | # a list of builtin themes.
108 | #
109 | html_theme = 'nature'
110 |
111 | # Theme options are theme-specific and customize the look and feel of a theme
112 | # further. For a list of options available for each theme, see the
113 | # documentation.
114 | #
115 | html_theme_options = {
116 | 'globaltoc_collapse': False,
117 | }
118 |
119 | # Add any paths that contain custom static files (such as style sheets) here,
120 | # relative to this directory. They are copied after the builtin static files,
121 | # so a file named "default.css" will overwrite the builtin "default.css".
122 | html_static_path = [
123 | 'autolink.js',
124 | ]
125 |
126 | # Add these files as script tags to the generated HTML.
127 | html_js_files = [
128 | 'autolink.js',
129 | ]
130 |
131 | # Custom sidebar templates, must be a dictionary that maps document names
132 | # to template names.
133 | #
134 | # The default sidebars (for documents that don't match any pattern) are
135 | # defined by theme itself. Builtin themes are using these templates by
136 | # default: ``['localtoc.html', 'relations.html', 'sourcelink.html',
137 | # 'searchbox.html']``.
138 | #
139 | html_sidebars = {
140 | '*': ['globaltoc.html', 'searchbox.html'],
141 | }
142 |
143 |
144 | # -- Options for HTMLHelp output ---------------------------------------------
145 |
146 | # Output file base name for HTML help builder.
147 | htmlhelp_basename = 'ShakaStreamerDoc'
148 |
149 |
150 | # -- Options for LaTeX output ------------------------------------------------
151 |
152 | latex_elements = {
153 | # The paper size ('letterpaper' or 'a4paper').
154 | #
155 | # 'papersize': 'letterpaper',
156 |
157 | # The font size ('10pt', '11pt' or '12pt').
158 | #
159 | # 'pointsize': '10pt',
160 |
161 | # Additional stuff for the LaTeX preamble.
162 | #
163 | # 'preamble': '',
164 |
165 | # Latex figure (float) alignment
166 | #
167 | # 'figure_align': 'htbp',
168 | }
169 |
170 | # Grouping the document tree into LaTeX files. List of tuples
171 | # (source start file, target name, title,
172 | # author, documentclass [howto, manual, or own class]).
173 | latex_documents = [
174 | (master_doc, 'ShakaStreamer.tex', 'Shaka Streamer Documentation',
175 | 'Google', 'manual'),
176 | ]
177 |
178 |
179 | # -- Options for manual page output ------------------------------------------
180 |
181 | # One entry per manual page. List of tuples
182 | # (source start file, name, description, authors, manual section).
183 | man_pages = [
184 | (master_doc, 'shakastreamer', 'Shaka Streamer Documentation',
185 | [author], 1)
186 | ]
187 |
188 |
189 | # -- Options for Texinfo output ----------------------------------------------
190 |
191 | # Grouping the document tree into Texinfo files. List of tuples
192 | # (source start file, target name, title, author,
193 | # dir menu entry, description, category)
194 | texinfo_documents = [
195 | (master_doc, 'ShakaStreamer', 'Shaka Streamer Documentation',
196 | author, 'ShakaStreamer', 'One line description of project.',
197 | 'Miscellaneous'),
198 | ]
199 |
200 |
201 | # -- Options for Epub output -------------------------------------------------
202 |
203 | # Bibliographic Dublin Core info.
204 | epub_title = project
205 |
206 | # The unique identifier of the text. This can be a ISBN number
207 | # or the project homepage.
208 | #
209 | # epub_identifier = ''
210 |
211 | # A unique identification for the text.
212 | #
213 | # epub_uid = ''
214 |
215 | # A list of files that should not be packed into the epub file.
216 | epub_exclude_files = ['search.html']
217 |
218 |
219 | # -- Extension configuration -------------------------------------------------
220 |
221 | # Default settings for autdoc.
222 | autodoc_default_options = {
223 | 'members': True, # Document members,
224 | 'undoc-members': True, # including those with no docstring,
225 | 'inherited-members': True, # and inherited members,
226 | 'member-order': 'bysource', # in source-code order,
227 | 'show-inheritance': True, # and show details on inheritance.
228 | }
229 |
230 | # By default, put everything in the docs in the same order it appears in the
231 | # source.
232 | autodoc_member_order = 'bysource'
233 |
234 |
235 | # Initialize the default resolutions and channel layouts so that their names can
236 | # be shown in the generated docs.
237 | bitrate_config = bitrate_configuration.BitrateConfig({})
238 | bitrate_configuration.VideoResolution.set_map(bitrate_config.video_resolutions)
239 | bitrate_configuration.AudioChannelLayout.set_map(
240 | bitrate_config.audio_channel_layouts)
241 |
242 | # A map from fully-qualified field names to the Field object that represents
243 | # what type it accepts.
244 | name_to_type_map = {}
245 |
246 | def process_signature(app, _, name, obj, *other_ignored_args):
247 | """A callback for each signature in the docs.
248 |
249 | Here, we build a map of the various config field names to their Field objects
250 | so that we can later override the documentation for those types in
251 | process_doc_nodes.
252 |
253 | For full documentation on this callback, see
254 | http://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html#docstring-preprocessing
255 | """
256 |
257 | if isinstance(obj, streamer.configuration.Field):
258 | name_to_type_map[name] = obj
259 |
260 | def get_first_child(node, type):
261 | """Return the first child of |node| that has type |type|."""
262 |
263 | index = node.first_child_matching_class(type)
264 | return node[index]
265 |
266 | def process_doc_nodes(app, doctree, fromdocname):
267 | """A callback invoked when the documentation is built.
268 |
269 | We use this opportunity to override the docs for config Field objects to
270 | indicate a human-readable type instead of just "Field".
271 |
272 | For full documentation on this callback, see
273 | http://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html#docstring-preprocessing
274 | """
275 |
276 | # Go through all the signature nodes.
277 | for node in doctree.traverse(sphinx.addnodes.desc_signature):
278 | # Find the ones that refer to "Field" objects.
279 | if 'streamer.configuration.Field' in str(node):
280 | # Get the name of the thing.
281 | name = node['ids'][0]
282 |
283 | # Find the node that contains the type text.
284 | annotation = get_first_child(node, sphinx.addnodes.desc_annotation)
285 | text = get_first_child(annotation, docutils.nodes.Text)
286 |
287 | # Replace "Field" with more descriptive text.
288 | field_object = name_to_type_map[name]
289 | replacement_text = ': ' + field_object.get_type_name()
290 | annotation.replace(text, docutils.nodes.Text(
291 | data=replacement_text, rawsource=replacement_text))
292 |
293 | def skip_member(app, what, name, obj, skip, options):
294 | """A callback invoked on each member to decide if it should be skipped.
295 |
296 | Returns True to skip a member in the docs. Any member which is skipped will
297 | not appear in the generated documentation.
298 |
299 | For full documentation on this callback, see
300 | http://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html#skipping-members
301 | """
302 |
303 | # The configuration objects have a few methods which are "public" in the
304 | # sense that they are used by other classes, but internal to the project.
305 | # Since the configuration objects serve as documentation for the config file
306 | # structure, we should skip those methods.
307 |
308 | # In Sphinx 1.8, the "what" parameter for these is "class",
309 | # though it seems like it should be "method" according to the Sphinx docs.
310 | # Though this may have been fixed in a later version of Sphinx, we can ignore
311 | # "what" and check the type of "obj" instead. For our case, this is a
312 | # reference to the actual method.
313 | if type(obj) is types.FunctionType and 'configuration' in obj.__module__:
314 | return True
315 |
316 |
317 | def setup(app):
318 | """Called by Sphinx on startup.
319 |
320 | Allows us to install callbacks for certain events and customize the docs.
321 | """
322 |
323 | app.connect('autodoc-process-signature', process_signature)
324 | app.connect('doctree-resolved', process_doc_nodes)
325 | app.connect('autodoc-skip-member', skip_member)
326 |
327 |
--------------------------------------------------------------------------------