├── .github
├── CODEOWNERS
└── workflows
│ ├── broken-links.yml
│ └── sdk_generation.yaml
├── .gitignore
├── .prettierrc.yaml
├── .speakeasy
├── workflow.lock
└── workflow.yaml
├── Dockerfile
├── LICENSE
├── Makefile
├── README.md
├── ai
├── api-reference
│ ├── audio-to-text.mdx
│ ├── gateway.openapi.yaml
│ ├── image-to-image.mdx
│ ├── image-to-text.mdx
│ ├── image-to-video.mdx
│ ├── llm.mdx
│ ├── overview.mdx
│ ├── segment-anything-2.mdx
│ ├── text-to-image.mdx
│ ├── text-to-speech.mdx
│ └── upscale.mdx
├── builders
│ ├── gateways.mdx
│ ├── get-started.mdx
│ └── showcase.mdx
├── contributors
│ ├── coming-soon.mdx
│ ├── developers.mdx
│ ├── get-started.mdx
│ └── guides
│ │ ├── add-model.mdx
│ │ └── add-pipeline.mdx
├── gateways
│ ├── get-started.mdx
│ ├── onchain.mdx
│ └── start-gateway.mdx
├── introduction.mdx
├── orchestrators
│ ├── ai-worker.mdx
│ ├── benchmarking.mdx
│ ├── get-started.mdx
│ ├── models-config.mdx
│ ├── models-download.mdx
│ ├── onchain.mdx
│ └── start-orchestrator.mdx
├── pipelines
│ ├── audio-to-text.mdx
│ ├── image-to-image.mdx
│ ├── image-to-text.mdx
│ ├── image-to-video.mdx
│ ├── llm.mdx
│ ├── overview.mdx
│ ├── segment-anything-2.mdx
│ ├── text-to-image.mdx
│ ├── text-to-speech.mdx
│ └── upscale.mdx
├── sdks
│ ├── go.mdx
│ ├── javascript.mdx
│ ├── overview.mdx
│ └── python.mdx
└── whats-new.mdx
├── api-reference
├── asset
│ ├── delete.mdx
│ ├── get-all.mdx
│ ├── get.mdx
│ ├── overview.mdx
│ ├── update.mdx
│ ├── upload-via-url.mdx
│ └── upload.mdx
├── generate
│ ├── audio-to-text.mdx
│ ├── image-to-image.mdx
│ ├── image-to-text.mdx
│ ├── image-to-video.mdx
│ ├── llm.mdx
│ ├── overview.mdx
│ ├── segment-anything-2.mdx
│ ├── text-to-image.mdx
│ ├── text-to-speech.mdx
│ └── upscale.mdx
├── multistream
│ ├── create.mdx
│ ├── delete.mdx
│ ├── get-all.mdx
│ ├── get.mdx
│ ├── overview.mdx
│ └── update.mdx
├── overview
│ ├── authentication.mdx
│ └── introduction.mdx
├── playback
│ ├── get.mdx
│ └── overview.mdx
├── room
│ ├── create-user.mdx
│ ├── create.mdx
│ ├── delete.mdx
│ ├── get-user.mdx
│ ├── get.mdx
│ ├── remove-user.mdx
│ ├── start-egress.mdx
│ ├── stop-egress.mdx
│ ├── update-user.mdx
│ └── update.mdx
├── session
│ ├── get-all.mdx
│ ├── get-clip.mdx
│ ├── get-recording.mdx
│ ├── get.mdx
│ └── overview.mdx
├── signing-key
│ ├── create.mdx
│ ├── delete.mdx
│ ├── get-all.mdx
│ ├── get.mdx
│ ├── overview.mdx
│ └── update.mdx
├── stream
│ ├── add-multistream-target.mdx
│ ├── create-clip.mdx
│ ├── create.mdx
│ ├── delete-multistream-target.mdx
│ ├── delete.mdx
│ ├── get-all.mdx
│ ├── get-clip.mdx
│ ├── get.mdx
│ ├── overview.mdx
│ ├── terminate.mdx
│ └── update.mdx
├── task
│ ├── get-all.mdx
│ ├── get.mdx
│ └── overview.mdx
├── transcode
│ ├── create.mdx
│ └── overview.mdx
├── viewership
│ ├── get-creators-metrics.mdx
│ ├── get-public-total-views.mdx
│ ├── get-realtime-viewership.mdx
│ ├── get-usage-metrics.mdx
│ └── get-viewership-metrics.mdx
└── webhook
│ ├── create.mdx
│ ├── delete.mdx
│ ├── get-all.mdx
│ ├── get.mdx
│ ├── overview.mdx
│ └── update.mdx
├── delegators
└── guides
│ ├── bridge-lpt-to-arbitrum.mdx
│ ├── migrate-stake-to-arbitrum.mdx
│ └── yield-calculation.mdx
├── developers
├── core-concepts
│ ├── core-api
│ │ ├── access-control.mdx
│ │ ├── asset.mdx
│ │ ├── multistream.mdx
│ │ └── stream.mdx
│ ├── livepeer-network
│ │ ├── delegators.mdx
│ │ ├── gateways.mdx
│ │ └── orchestrators.mdx
│ ├── player
│ │ └── overview.mdx
│ └── studio
│ │ ├── in-browser-broadcast.mdx
│ │ ├── stream-health.mdx
│ │ └── webhooks.mdx
├── guides
│ ├── access-control-jwt.mdx
│ ├── access-control-webhooks.mdx
│ ├── clip-a-livestream.mdx
│ ├── create-livestream.mdx
│ ├── encrypted-asset.mdx
│ ├── get-engagement-analytics-via-api.mdx
│ ├── get-engagement-analytics-via-grafana.mdx
│ ├── get-engagement-analytics-via-timeplus.mdx
│ ├── listen-to-asset-events.mdx
│ ├── listen-to-stream-events.mdx
│ ├── livestream-from-browser.mdx
│ ├── managing-projects.mdx
│ ├── monitor-stream-health.mdx
│ ├── multistream.mdx
│ ├── optimize-latency-of-a-livestream.mdx
│ ├── overview.mdx
│ ├── playback-a-livestream.mdx
│ ├── playback-an-asset.mdx
│ ├── setup-and-listen-to-webhooks.mdx
│ ├── stream-via-obs.mdx
│ ├── thumbnails-live.mdx
│ ├── thumbnails-vod.mdx
│ ├── transcode-video-storj.mdx
│ ├── transcode-video-w3s.mdx
│ └── upload-video-asset.mdx
├── introduction.mdx
├── livepeer-studio-cli.mdx
├── quick-start.mdx
└── tutorials
│ ├── decentralized-app-with-fvm.mdx
│ ├── token-gate-videos-with-lit.mdx
│ ├── upload-playback-videos-4everland.mdx
│ ├── upload-playback-videos-on-arweave.mdx
│ └── upload-playback-videos-on-ipfs.mdx
├── favicon.png
├── gateways
└── guides
│ ├── docker-install.mdx
│ ├── fund-gateway.mdx
│ ├── gateway-overview.mdx
│ ├── linux-install.mdx
│ ├── playback-content.mdx
│ ├── publish-content.mdx
│ ├── transcoding-options.mdx
│ └── windows-install.mdx
├── images
├── ai
│ ├── ai-serviceregistry-explorer-page.png
│ ├── ai-serviceregistry-setserviceuri.png
│ ├── cool-cat-hat-moving.gif
│ ├── cool-cat-hat.png
│ ├── cool-cat-low-res.png
│ ├── cool-cat.png
│ ├── showcase
│ │ ├── dream.png
│ │ ├── inference_stronk_rocks.png
│ │ ├── letsgenerate_ai.png
│ │ └── tsunameme_ai.png
│ └── swagger_ui.png
├── asset-page.png
├── background.png
├── blender-poster-2.png
├── blender-poster.png
├── codepen-player.png
├── create-api-key.png
├── delegating-guides
│ ├── arbitrum-oog.png
│ ├── arbitrum-retry-ui.png
│ ├── claim-d.png
│ ├── confirm-d.png
│ ├── connect-wallet-d.png
│ ├── connect-wallet-d2.png
│ └── migrate-d1.png
├── engagement
│ ├── Dashboard-1.png
│ ├── Dashboard-10.png
│ └── Dashboard-4.png
├── obs
│ ├── LVS4.png
│ ├── LVS5.png
│ ├── OBS1.png
│ ├── OBS2.png
│ ├── OBS3.png
│ └── OBS4.png
├── orchestrating-guides
│ ├── begin-migration.png
│ ├── connect-wallet.png
│ ├── connect-wallet2.png
│ ├── sign-cli.png
│ ├── sign-cli2.png
│ ├── sign-cli3.png
│ ├── sign-cli4.png
│ ├── sign-web.png
│ └── stake-info.png
├── poll.png
├── project-creation.png
├── quickstart
│ ├── api-keys.png
│ ├── cors.png
│ ├── create-an-account.png
│ ├── create-an-api-key.png
│ └── create-key.png
├── stream-health.png
├── stream-page.png
├── studio-in-browser-stream.png
├── titan-node.png
├── tutorials
│ ├── authors
│ │ ├── evan.jpeg
│ │ └── suhail.jpeg
│ ├── grafana-connections.jpg
│ ├── grafana-import-dashboard.png
│ ├── guildxyz-app-homepage.png
│ ├── lit-app-homepage.png
│ ├── livepeer_dashboard.png
│ ├── studio-add-webhook.png
│ ├── studio-create-api.png
│ ├── timeplus.png
│ └── vod-diagram.png
├── vote-livepeer-cli-instructions.png
├── vote-livepeer-cli.png
├── waterfalls-poster.png
├── webhooks.png
└── webrtmp.png
├── logo
├── dark.svg
└── light.svg
├── mint.json
├── openapi.yaml
├── orchestrators
└── guides
│ ├── assess-capabilities.mdx
│ ├── benchmark-transcoding.mdx
│ ├── configure-reward-calling.mdx
│ ├── connect-to-arbitrum.mdx
│ ├── dual-mine.mdx
│ ├── gateway-introspection.mdx
│ ├── get-started.mdx
│ ├── install-go-livepeer.mdx
│ ├── migrate-from-contract-wallet.mdx
│ ├── migrate-to-arbitrum.mdx
│ ├── monitor-metrics.mdx
│ ├── o-t-split.mdx
│ ├── set-pricing.mdx
│ ├── set-session-limits.mdx
│ ├── troubleshoot.mdx
│ └── vote.mdx
├── references
├── api-support-matrix.mdx
├── awesome-livepeer.mdx
├── contract-addresses.mdx
├── example-applications.mdx
├── go-livepeer
│ ├── bandwidth-requirements.mdx
│ ├── cli-reference.mdx
│ ├── gpu-support.mdx
│ ├── hardware-requirements.mdx
│ └── prometheus-metrics.mdx
├── knowledge-base
│ ├── livestream.mdx
│ ├── playback.mdx
│ └── vod.mdx
└── subgraph.mdx
├── sdks
├── go.mdx
├── introduction.mdx
├── javascript.mdx
├── python.mdx
└── react
│ ├── Broadcast.mdx
│ ├── Player.mdx
│ ├── broadcast
│ ├── Audio.mdx
│ ├── Camera.mdx
│ ├── Container.mdx
│ ├── Controls.mdx
│ ├── Enabled.mdx
│ ├── Error.mdx
│ ├── Fullscreen.mdx
│ ├── Loading.mdx
│ ├── PictureInPicture.mdx
│ ├── Portal.mdx
│ ├── Root.mdx
│ ├── Screenshare.mdx
│ ├── Source.mdx
│ ├── Status.mdx
│ ├── Video.mdx
│ ├── get-ingest.mdx
│ └── useBroadcastContext.mdx
│ ├── getting-started.mdx
│ ├── migration
│ ├── 3.x
│ │ ├── Broadcast.mdx
│ │ ├── LivepeerConfig.mdx
│ │ ├── Player.mdx
│ │ ├── asset
│ │ │ ├── useAsset.mdx
│ │ │ ├── useAssetMetrics.mdx
│ │ │ ├── useCreateAsset.mdx
│ │ │ └── useUpdateAsset.mdx
│ │ ├── client.mdx
│ │ ├── constants
│ │ │ ├── abis.mdx
│ │ │ └── contract-addresses.mdx
│ │ ├── getting-started.mdx
│ │ ├── playback
│ │ │ └── usePlaybackInfo.mdx
│ │ ├── providers
│ │ │ └── studio.mdx
│ │ └── stream
│ │ │ ├── useCreateStream.mdx
│ │ │ ├── useStream.mdx
│ │ │ ├── useStreamSession.mdx
│ │ │ ├── useStreamSessions.mdx
│ │ │ └── useUpdateStream.mdx
│ └── migration-4.x.mdx
│ └── player
│ ├── Clip.mdx
│ ├── Container.mdx
│ ├── Controls.mdx
│ ├── Error.mdx
│ ├── Fullscreen.mdx
│ ├── Live.mdx
│ ├── Loading.mdx
│ ├── PictureInPicture.mdx
│ ├── Play.mdx
│ ├── Portal.mdx
│ ├── Poster.mdx
│ ├── RateSelect.mdx
│ ├── Root.mdx
│ ├── Seek.mdx
│ ├── Time.mdx
│ ├── Video.mdx
│ ├── VideoQualitySelect.mdx
│ ├── Volume.mdx
│ ├── get-src.mdx
│ └── useMediaContext.mdx
├── self-hosting
├── deploying.mdx
├── how-to-contribute.mdx
├── overview.mdx
└── self-hosting-with-docker.mdx
└── style.css
/.github/CODEOWNERS:
--------------------------------------------------------------------------------
1 | # Default reviewers for the AI documentation.
2 | ai/ @rickstaa
3 | * @livepeer/studio-team
4 |
--------------------------------------------------------------------------------
/.github/workflows/broken-links.yml:
--------------------------------------------------------------------------------
1 | name: Check Broken Links
2 |
3 | on:
4 | pull_request:
5 | branches:
6 | - main
7 |
8 | jobs:
9 | broken-links:
10 | runs-on: ubuntu-latest
11 |
12 | steps:
13 | - name: Checkout repository
14 | uses: actions/checkout@v4
15 |
16 | - name: Set up Node.js
17 | uses: actions/setup-node@v4
18 | with:
19 | node-version: '22'
20 |
21 | - name: Install Mintlify globally
22 | run: npm install -g mintlify
23 |
24 | - name: Run broken-links check
25 | run: npx mintlify broken-links
26 |
--------------------------------------------------------------------------------
/.github/workflows/sdk_generation.yaml:
--------------------------------------------------------------------------------
1 | name: Generate
2 | permissions:
3 | checks: write
4 | contents: write
5 | pull-requests: write
6 | statuses: write
7 | "on":
8 | workflow_dispatch:
9 | inputs:
10 | force:
11 | description: Force generation of SDKs
12 | type: boolean
13 | default: false
14 | schedule:
15 | - cron: 0 0 * * *
16 | jobs:
17 | generate:
18 | uses: speakeasy-api/sdk-generation-action/.github/workflows/workflow-executor.yaml@v15
19 | with:
20 | force: ${{ github.event.inputs.force }}
21 | mode: pr
22 | speakeasy_version: latest
23 | secrets:
24 | github_access_token: ${{ secrets.GITHUB_TOKEN }}
25 | speakeasy_api_key: ${{ secrets.SPEAKEASY_API_KEY }}
26 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | node_modules
2 | .DS_Store
3 |
4 | # IDE
5 | .vscode
6 | *.code-workspace
7 |
--------------------------------------------------------------------------------
/.prettierrc.yaml:
--------------------------------------------------------------------------------
1 | proseWrap: always
2 |
--------------------------------------------------------------------------------
/.speakeasy/workflow.lock:
--------------------------------------------------------------------------------
1 | speakeasyVersion: 1.543.4
2 | sources:
3 | livepeer-studio-api:
4 | sourceNamespace: livepeer-studio-api
5 | sourceRevisionDigest: sha256:2a39aaefa6d4f52498dc9ee5aec6e20911a09bd474bb9eb0c2f8f67de8d149de
6 | sourceBlobDigest: sha256:50f137973ae47156ff6dc3811e285de7ae10db626653de49f76cdbfbc82f7e44
7 | tags:
8 | - latest
9 | - speakeasy-sdk-regen-1745712246
10 | - 1.0.0
11 | targets: {}
12 | workflow:
13 | workflowVersion: 1.0.0
14 | speakeasyVersion: latest
15 | sources:
16 | livepeer-ai-api:
17 | inputs:
18 | - location: https://raw.githubusercontent.com/livepeer/ai-worker/main/runner/gateway.openapi.yaml
19 | overlays:
20 | - location: https://raw.githubusercontent.com/livepeer/livepeer-ai-js/main/codeSamples.yaml
21 | - location: https://raw.githubusercontent.com/livepeer/livepeer-ai-go/main/codeSamples.yaml
22 | - location: https://raw.githubusercontent.com/livepeer/livepeer-ai-python/main/codeSamples.yaml
23 | output: ai/api-reference/gateway.openapi.yaml
24 | registry:
25 | location: registry.speakeasyapi.dev/livepeer/livepeer-ai/livepeer-ai-oas
26 | livepeer-studio-api:
27 | inputs:
28 | - location: https://raw.githubusercontent.com/livepeer/studio/master/packages/api/src/schema/api-schema.yaml
29 | - location: https://raw.githubusercontent.com/livepeer/studio/master/packages/api/src/schema/ai-api-schema.yaml
30 | overlays:
31 | - location: https://raw.githubusercontent.com/livepeer/livepeer-js/main/codeSamples.yaml
32 | - location: https://raw.githubusercontent.com/livepeer/livepeer-go/main/codeSamples.yaml
33 | - location: https://raw.githubusercontent.com/livepeer/livepeer-python/main/codeSamples.yaml
34 | output: openapi.yaml
35 | registry:
36 | location: registry.speakeasyapi.dev/livepeer/livepeer-studio/livepeer-studio-api
37 | targets: {}
38 |
--------------------------------------------------------------------------------
/.speakeasy/workflow.yaml:
--------------------------------------------------------------------------------
1 | workflowVersion: 1.0.0
2 | speakeasyVersion: latest
3 | sources:
4 | livepeer-ai-api:
5 | inputs:
6 | - location: https://raw.githubusercontent.com/livepeer/ai-worker/main/runner/gateway.openapi.yaml
7 | overlays:
8 | - location: https://raw.githubusercontent.com/livepeer/livepeer-ai-js/main/codeSamples.yaml
9 | - location: https://raw.githubusercontent.com/livepeer/livepeer-ai-go/main/codeSamples.yaml
10 | - location: https://raw.githubusercontent.com/livepeer/livepeer-ai-python/main/codeSamples.yaml
11 | output: ai/api-reference/gateway.openapi.yaml
12 | registry:
13 | location: registry.speakeasyapi.dev/livepeer/livepeer-ai/livepeer-ai-oas
14 | livepeer-studio-api:
15 | inputs:
16 | - location: https://raw.githubusercontent.com/livepeer/studio/master/packages/api/src/schema/api-schema.yaml
17 | - location: https://raw.githubusercontent.com/livepeer/studio/master/packages/api/src/schema/ai-api-schema.yaml
18 | overlays:
19 | - location: https://raw.githubusercontent.com/livepeer/livepeer-js/main/codeSamples.yaml
20 | - location: https://raw.githubusercontent.com/livepeer/livepeer-go/main/codeSamples.yaml
21 | - location: https://raw.githubusercontent.com/livepeer/livepeer-python/main/codeSamples.yaml
22 | output: openapi.yaml
23 | registry:
24 | location: registry.speakeasyapi.dev/livepeer/livepeer-studio/livepeer-studio-api
25 | targets: {}
26 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM node:current
2 |
3 | RUN npm install -g mintlify
4 |
5 | WORKDIR /app
6 |
7 | ADD . .
8 |
9 | CMD ["mintlify", "dev"]
10 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2023 Livepeer Inc
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | .PHONY: all
2 | all:
3 | docker buildx build --platform linux/amd64 --load -t livepeer/docs .
4 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # Livepeer Docs
2 |
3 | ### 👩💻 Development
4 |
5 | Install the [Mintlify CLI](https://www.npmjs.com/package/mintlify) to preview
6 | the documentation changes locally. To install, use the following command
7 |
8 | ```bash
9 | npm i -g mintlify
10 | ```
11 |
12 | Run the following command at the root of your documentation (where mint.json is)
13 |
14 | ```bash
15 | mintlify dev
16 | ```
17 |
--------------------------------------------------------------------------------
/ai/api-reference/audio-to-text.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | openapi: post /audio-to-text
3 | ---
4 |
5 |
6 | The default Gateway used in this guide is the public
7 | [Livepeer.cloud](https://www.livepeer.cloud/) Gateway. It is free to use but
8 | not intended for production-ready applications. For production-ready
9 | applications, consider using the [Livepeer Studio](https://livepeer.studio/)
10 | Gateway, which requires an API token. Alternatively, you can set up your own
11 | Gateway node or partner with one via the `ai-video` channel on
12 | [Discord](https://discord.gg/livepeer).
13 |
14 |
15 |
16 | Please note that the exact parameters, default values, and responses may vary
17 | between models. For more information on model-specific parameters, please
18 | refer to the respective model documentation available in the [audio-to-text
19 | pipeline](/ai/pipelines/audio-to-text). Not all parameters might be available
20 | for a given model.
21 |
22 |
--------------------------------------------------------------------------------
/ai/api-reference/image-to-image.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | openapi: post /image-to-image
3 | ---
4 |
5 |
6 | The default Gateway used in this guide is the public
7 | [Livepeer.cloud](https://www.livepeer.cloud/) Gateway. It is free to use but
8 | not intended for production-ready applications. For production-ready
9 | applications, consider using the [Livepeer Studio](https://livepeer.studio/)
10 | Gateway, which requires an API token. Alternatively, you can set up your own
11 | Gateway node or partner with one via the `ai-video` channel on
12 | [Discord](https://discord.gg/livepeer).
13 |
14 |
15 |
16 | Please note that the exact parameters, default values, and responses may vary
17 | between models. For more information on model-specific parameters, please
18 | refer to the respective model documentation available in the [image-to-image
19 | pipeline](/ai/pipelines/image-to-image). Not all parameters might be available
20 | for a given model.
21 |
22 |
--------------------------------------------------------------------------------
/ai/api-reference/image-to-text.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | openapi: post /image-to-text
3 | ---
4 |
5 |
6 | The default Gateway used in this guide is the public
7 | [Livepeer.cloud](https://www.livepeer.cloud/) Gateway. It is free to use but
8 | not intended for production-ready applications. For production-ready
9 | applications, consider using the [Livepeer Studio](https://livepeer.studio/)
10 | Gateway, which requires an API token. Alternatively, you can set up your own
11 | Gateway node or partner with one via the `ai-video` channel on
12 | [Discord](https://discord.gg/livepeer).
13 |
14 |
15 |
16 | Please note that the exact parameters, default values, and responses may vary
17 | between models. For more information on model-specific parameters, please
18 | refer to the respective model documentation available in the [image-to-text
19 | pipeline](/ai/pipelines/image-to-text). Not all parameters might be available
20 | for a given model.
21 |
22 |
--------------------------------------------------------------------------------
/ai/api-reference/image-to-video.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | openapi: post /image-to-video
3 | ---
4 |
5 |
6 | The default Gateway used in this guide is the public
7 | [Livepeer.cloud](https://www.livepeer.cloud/) Gateway. It is free to use but
8 | not intended for production-ready applications. For production-ready
9 | applications, consider using the [Livepeer Studio](https://livepeer.studio/)
10 | Gateway, which requires an API token. Alternatively, you can set up your own
11 | Gateway node or partner with one via the `ai-video` channel on
12 | [Discord](https://discord.gg/livepeer).
13 |
14 |
15 |
16 | Please note that the exact parameters, default values, and responses may vary
17 | between models. For more information on model-specific parameters, please
18 | refer to the respective model documentation available in the
19 | [image-to-video pipeline](/ai/pipelines/image-to-video). Not all
20 | parameters might be available for a given model.
21 |
22 |
--------------------------------------------------------------------------------
/ai/api-reference/llm.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | openapi: post /llm
3 | ---
4 |
5 |
6 | The LLM pipeline is OpenAI API-compatible but does **not** implement all features of the OpenAI API.
7 |
8 |
9 |
10 | The default Gateway used in this guide is the public
11 | [Livepeer.cloud](https://www.livepeer.cloud/) Gateway. It is free to use but
12 | not intended for production-ready applications. For production-ready
13 | applications, consider using the [Livepeer Studio](https://livepeer.studio/)
14 | Gateway, which requires an API token. Alternatively, you can set up your own
15 | Gateway node or partner with one via the `ai-video` channel on
16 | [Discord](https://discord.gg/livepeer).
17 |
18 |
19 | ### Streaming Responses
20 |
21 |
22 | Ensure your client supports SSE and processes each `data:` line as it arrives.
23 |
24 |
25 | By default, the `/llm` endpoint returns a single JSON response in the OpenAI
26 | [chat/completions](https://platform.openai.com/docs/api-reference/chat/object)
27 | format, as shown in the sidebar.
28 |
29 | To receive responses token-by-token, set `"stream": true` in the request body. The server will then use **Server-Sent Events (SSE)** to stream output in real time.
30 |
31 |
32 | Each streamed chunk will look like:
33 |
34 | ```json
35 | data: {
36 | "choices": [
37 | {
38 | "delta": {
39 | "content": "...token...",
40 | "role": "assistant"
41 | },
42 | "finish_reason": null
43 | }
44 | ]
45 | }
46 | ```
47 |
48 | The final chunk will have empty content and `"finish_reason": "stop"`:
49 |
50 | ```json
51 | data: {
52 | "choices": [
53 | {
54 | "delta": {
55 | "content": "",
56 | "role": "assistant"
57 | },
58 | "finish_reason": "stop"
59 | }
60 | ]
61 | }
62 | ```
63 |
--------------------------------------------------------------------------------
/ai/api-reference/overview.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: Introduction
3 | description: "Learn more about Livepeer's AI API."
4 | ---
5 |
6 | Welcome to the Livepeer AI API reference docs! Here you'll find all the
7 | endpoints exposed on the standard Livepeer AI API, learn how to use them and
8 | what they return.
9 |
10 | The Livepeer API is organized around REST, has predictable resource-oriented
11 | URLs, accepts JSON request bodies, returns JSON-encoded responses, and uses
12 | standard HTTP response codes, authentication, and verbs.
13 |
14 | There are multiple gateway providers active in the ecosystem that can be used to interact with the Livepeer AI network. For more details, visit the [AI Gateways](/ai/builders/gateways) page.
15 |
--------------------------------------------------------------------------------
/ai/api-reference/segment-anything-2.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | openapi: post /segment-anything-2
3 | ---
4 |
5 |
6 | The default Gateway used in this guide is the public
7 | [Livepeer.cloud](https://www.livepeer.cloud/) Gateway. It is free to use but
8 | not intended for production-ready applications. For production-ready
9 | applications, consider using the [Livepeer Studio](https://livepeer.studio/)
10 | Gateway, which requires an API token. Alternatively, you can set up your own
11 | Gateway node or partner with one via the `ai-video` channel on
12 | [Discord](https://discord.gg/livepeer).
13 |
14 |
15 |
16 | Please note that the exact parameters, default values, and responses may vary
17 | between models. For more information on model-specific parameters, please
18 | refer to the respective model documentation available in the
19 | [segment-anything-2 pipeline](/ai/pipelines/segment-anything-2). Not all
20 | parameters might be available for a given model.
21 |
22 |
--------------------------------------------------------------------------------
/ai/api-reference/text-to-image.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | openapi: post /text-to-image
3 | ---
4 |
5 |
6 | The default Gateway used in this guide is the public
7 | [Livepeer.cloud](https://www.livepeer.cloud/) Gateway. It is free to use but
8 | not intended for production-ready applications. For production-ready
9 | applications, consider using the [Livepeer Studio](https://livepeer.studio/)
10 | Gateway, which requires an API token. Alternatively, you can set up your own
11 | Gateway node or partner with one via the `ai-video` channel on
12 | [Discord](https://discord.gg/livepeer).
13 |
14 |
15 |
16 | Please note that the exact parameters, default values, and responses may vary
17 | between models. For more information on model-specific parameters, please
18 | refer to the respective model documentation available in the [text
19 | pipeline](/ai/pipelines/text-to-image). Not all parameters might be available
20 | for a given model.
21 |
22 |
--------------------------------------------------------------------------------
/ai/api-reference/text-to-speech.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | openapi: post /text-to-speech
3 | ---
4 |
5 |
6 | The default Gateway used in this guide is the public
7 | [Livepeer.cloud](https://www.livepeer.cloud/) Gateway. It is free to use but
8 | not intended for production-ready applications. For production-ready
9 | applications, consider using the [Livepeer Studio](https://livepeer.studio/)
10 | Gateway, which requires an API token. Alternatively, you can set up your own
11 | Gateway node or partner with one via the `ai-video` channel on
12 | [Discord](https://discord.gg/livepeer).
13 |
14 |
15 |
16 | Please note that the exact parameters, default values, and responses may vary
17 | between models. For more information on model-specific parameters, please
18 | refer to the respective model documentation available in the [text-to-speech
19 | pipeline](/ai/pipelines/text-to-speech). Not all parameters might be available
20 | for a given model.
21 |
22 |
--------------------------------------------------------------------------------
/ai/api-reference/upscale.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | openapi: post /upscale
3 | ---
4 |
5 |
6 | The default Gateway used in this guide is the public
7 | [Livepeer.cloud](https://www.livepeer.cloud/) Gateway. It is free to use but
8 | not intended for production-ready applications. For production-ready
9 | applications, consider using the [Livepeer Studio](https://livepeer.studio/)
10 | Gateway, which requires an API token. Alternatively, you can set up your own
11 | Gateway node or partner with one via the `ai-video` channel on
12 | [Discord](https://discord.gg/livepeer).
13 |
14 |
15 |
16 | Please note that the exact parameters, default values, and responses may vary
17 | between models. For more information on model-specific parameters, please
18 | refer to the respective model documentation available in the [upscaling
19 | pipeline](/ai/pipelines/upscale). Not all parameters might be available for a
20 | given model.
21 |
22 |
--------------------------------------------------------------------------------
/ai/builders/get-started.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: Building on Livepeer AI
3 | ---
4 |
5 | Livepeer AI, now in its **Beta** phase, is fully operational and already
6 | powering a diverse range of cutting-edge [applications](/ai/builders/showcase).
7 | Developers are actively building on the platform, with a rapidly growing
8 | community. While the network continues to evolve with frequent updates, its
9 | stable foundation offers an exciting space for innovation. With a variety of
10 | [easy-to-use client SDKs](/ai/sdks/overview), developers can dive in and start
11 | shaping the future of AI-powered solutions.
12 |
13 |
14 |
20 | Discover the available AI pipelines.
21 |
22 |
28 | Explore developer SDKs for interacting with the Livepeer AI API.
29 |
30 |
36 | Choose an AI Gateway to interact with the AI Subnet.
37 |
38 |
44 | Explore and interact with the Livepeer AI API.
45 |
46 |
47 | Explore projects built on Livepeer AI.
48 |
49 |
50 |
--------------------------------------------------------------------------------
/ai/builders/showcase.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: Showcase
3 | ---
4 |
5 |
6 | If you would like your project to be featured, please contact us at
7 | [ai@livepeer.org](mailto:ai@livepeer.org).
8 |
9 |
10 | Several
11 | [startups](https://mirror.xyz/livepeer.eth/XcpTZyAkc40kdQMGHZ7prAO7gnB2S1y45iacMtdzrII),
12 | applications, and projects have been developed using Livepeer AI. Below are a
13 | few that have agreed to be featured.
14 |
15 |
16 |
17 |
22 | A mobile app that enables anyone to create **stunning gif expressions** with
23 | generative AI, powered by the Livepeer network.
24 |
25 |
29 |
30 | An open-source platform, built on **Stability-AI/StableStudio**, designed for
31 | experimenting with all pipelines on the AI Subnet.
32 |
33 |
34 |
39 | Create images and videos for **free** on the Livepeer network with this generator.
40 |
41 |
42 |
47 | A robust open-source tool for testing and comparing the complete feature set
48 | of Livepeers AI pipeline - including custom-built **experimental features**.
49 |
50 |
51 |
--------------------------------------------------------------------------------
/ai/contributors/coming-soon.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: Get Started
3 | ---
4 |
5 | Amazing that you decided to join our open-source community and help us
6 | democratize AI! 🎉
7 |
8 | All our software is open-source, and anyone can permissionlessly add their AI
9 | compute job to the Livepeer network. We have a strong developer community with a
10 | [core developer group](https://github.com/livepeer/project-management) and tens
11 | of open-source developers. The best way to get in touch with this community and
12 | start developing is by joining our [Discord](https://discord.gg/livepeer).
13 |
14 | We also offer multiple opportunities for developers to get rewarded for their
15 | contributions, such as our
16 | [grants program](https://livepeer.notion.site/Livepeer-Grants-Program-f91071b5030d4c31ad4dd08e7c026526),
17 | [software bounty program](https://github.com/livepeer/bounties), and
18 | [bug bounty program](https://immunefi.com/bug-bounty/livepeer/information/) for
19 | security researchers.
20 |
21 | Let's build something amazing together! 🚀
22 |
23 |
24 | Contribution guidelines are coming soon. In the meantime, join the `ai-video`
25 | channel on the [Livepeer Discord](https://discord.gg/livepeer) for any questions
26 | or assistance. Let's build something amazing together! 🚀
27 |
28 |
--------------------------------------------------------------------------------
/ai/contributors/developers.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: Develop the AI Subnet
3 | ---
4 |
5 | It's great to see the decision to contribute to the Livepeer AI Subnet and help
6 | achieve the mission of **building a decentralized AI ecosystem**. Here are the
7 | steps to get started:
8 |
9 | 1. **Get familiar with the Developers Portal**: Visit
10 | https://livepeer.org/developers and read through the general documentation to
11 | get familiar with the Livepeer ecosystem.
12 | 2. **Join the Livepeer Discord**: Join the Livepeer Discord at
13 | https://discord.gg/livepeer and introduce yourself in the `ai-video` channel.
14 | Share your background and what you are interested in building.
15 | 3. **Get familiar with the AI Subnet**: Read through the
16 | [AI Subnet documentation](/ai/introduction) to understand the architecture
17 | and how it works.
18 | 4. **Follow the Developer Guides**: Read through the developer guides below to
19 | get started with building on the AI Subnet.
20 |
21 | ## Developer Guides
22 |
23 |
24 |
30 | Add support for a new diffusion model.
31 |
32 |
38 | Add a new pipeline to the AI Subnet.
39 |
40 |
41 |
--------------------------------------------------------------------------------
/ai/contributors/get-started.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: Get Started
3 | ---
4 |
5 | It's fantastic to see interest in contributing to the Livepeer AI Subnet! There
6 | are many ways to contribute, and excitement is high to build a decentralized AI
7 | ecosystem together ❤️. Here are a few ways to get started:
8 |
9 | - **Orchestrators**: With GPUs that have at least 16GB of VRAM, contribution is
10 | possible by setting up an AI Orchestrator node and processing AI tasks on the
11 | AI Subnet. Check out the
12 | [Orchestrator Setup Guide](/ai/orchestrators/get-started) page to learn more.
13 | - **Gateways**: Server hardware can be used to connect customers to the AI
14 | Subnet by setting up an AI Gateway node. Check out the
15 | [Set up AI Gateway](/ai/gateways/get-started) page to learn more.
16 | - **Founder**: Founders can contribute by building AI applications on the AI
17 | Subnet. Check out the [Build on the AI Subnet](/ai/builders) page to learn
18 | more.
19 | - **Developers**: Open Source Developers can contribute by improving the AI
20 | Subnet codebase. Check out the guide on
21 | [Contributing to the AI Subnet](/ai/contributors/coming-soon) to learn more.
22 |
23 | For any questions or help needed, reach out in the `ai-video` channel of the
24 | [Livepeer Discord](https://discord.gg/livepeer). Support is available to help get
25 | started and along the way. Let's build together! 🚀
26 |
--------------------------------------------------------------------------------
/ai/contributors/guides/add-model.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: Add new Model
3 | ---
4 |
5 | Adding support for a new model to the AI Subnet is a great way to get started
6 | with contributing to the Livepeer AI Subnet. This guide will walk you through
7 | the steps to add a new model to the AI Subnet.
8 |
9 | ## Video Guide
10 |
11 | In this video, one of the Livepeer ecosystem's core Developers will walk you
12 | through the steps to add a new model to the AI Subnet.
13 |
14 |
23 |
24 | ## Step-by-Step Guide
25 |
26 | Adding a new model to the AI Subnet is relatively straightforward since each
27 | supported [pipeline](/ai/contributors/guides/add-pipeline) is designed to support multiple
28 | diffusion models in the respective diffusion pipeline. To add a new model, you
29 | will need to follow these steps:
30 |
31 | 1. **Fork the AI Worker Repository**: Fork the
32 | [Livepeer AI Worker](https://github.com/livepeer/ai-worker) repository to
33 | your GitHub account.
34 | 2. **Clone the Repository**: Clone the forked repository to your local machine.
35 | 3. **Find a Model**: Find a diffusion model from
36 | [Hugging Face](https://huggingface.co/models?pipeline_tag=image-to-video)
37 | that you would like to add to the AI Subnet. Replace the `image-to-video`
38 | pipeline tag in the URL with the pipeline tag of your choice.
39 | 4. **Download the Model**: Update the
40 | [runner/dl_checkpoints.sh](https://github.com/livepeer/ai-worker/blob/main/runner/dl_checkpoints.sh#L73)
41 | script to download the model weights from Hugging Face.
42 | 5. **Set the MODEL_ID env variable**: Set the `MODEL_ID` environment variable to
43 | the hugging face identifier of the model you want to add.
44 | 6. **Run the AI Worker**: Run the AI Worker with the updated
45 | `runner/dl_checkpoints.sh` script to download the model weights. 6 **Test the
46 | Model**: Test the model by starting the AI Worker (see the
47 | [runner README](https://github.com/livepeer/ai-worker/tree/main/runner#readme)).
48 | 7. **Make the necessary changes**: If a model does not work natively with the AI
49 | worker, you may need to make changes to the respective pipeline the model is
50 | in.
51 |
52 |
53 | The best way to develop the AI Worker is by using the [VScode Dev
54 | Container](https://code.visualstudio.com/docs/devcontainers/containers)
55 | provided in the repository. This will allow you to debug and test your changes
56 | in a consistent environment. For more information, see [the runner dev
57 | documentation](https://github.com/livepeer/ai-worker/blob/main/runner/dev/README.md).
58 |
59 |
--------------------------------------------------------------------------------
/ai/contributors/guides/add-pipeline.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: Add new Pipeline
3 | ---
4 |
5 | Adding support for a new model to the AI Subnet offers the opportunity to expand
6 | its capabilities and provide more options for developers to build on top of the
7 | Livepeer network. This guide will walk you through the steps to add a new model
8 | to the AI Subnet.
9 |
10 | ## Video Guide
11 |
12 | In this video, one of the Livepeer ecosystem's core Developers will walk you
13 | through the steps to add a new pipeline to the AI Subnet.
14 |
15 |
24 |
25 | ## Step-by-Step Guide
26 |
27 | Adding a new pipeline to the AI Subnet is more involved than
28 | [adding a new model](/ai/contributors/guides/add-model), but it is still relatively
29 | straightforward. To add a new pipeline, you will need to follow these steps:
30 |
31 | 1. **Fork the AI Worker Repository**: Fork the [Livepeer AI Worker]() repository
32 | to your GitHub account.
33 | 2. **Clone the Repository**: Clone the forked repository to your local machine.
34 | 3. **Create a New Pipeline**: Create a new pipeline in the `runner/pipelines`
35 | directory. You can use the existing pipelines as a reference. To get started,
36 | you can use the template files in the `runner/examples` directory.
37 | 4. **Create a New Route**: Create a new route in the `runner/routes` directory
38 | to handle the API requests for the new pipeline. To get started, you can use
39 | the template files in the `runner/examples` directory.
40 |
41 |
42 | The best way to develop the AI Worker is by using the [VScode Dev
43 | Container](https://code.visualstudio.com/docs/devcontainers/containers)
44 | provided in the repository. This will allow you to debug and test your changes
45 | in a consistent environment. For more information, see [the runner dev
46 | documentation](https://github.com/livepeer/ai-worker/blob/main/runner/dev/README.md).
47 |
48 |
--------------------------------------------------------------------------------
/ai/gateways/get-started.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: Get Started
3 | ---
4 |
5 | If you're interested in joining Livepeer AI as a **Gateway** (formerly known as
6 | a _Broadcaster_) to perform **AI inference task routing** and provide inference
7 | services to customers, this guide is for you. It builds on the
8 | [Gateway Setup Guide](/gateways/guides/gateway-overview) for the Mainnet
9 | Transcoding Network, with additional steps specific to AI operations.
10 |
11 | Dive into the **subpages** for a step-by-step walkthrough of the AI-specific
12 | setup. For foundational knowledge on general Gateway operations, refer to the
13 | [Gateway Setup Guide](/gateways/guides/gateway-overview). This guide extends it
14 | with the steps needed for AI task routing.
15 |
16 |
21 | Visit the Gateway Setup Guide for detailed instructions on setting up a
22 | Gateway node on the Mainnet Transcoding Network.
23 |
24 |
25 | ## Prerequisites
26 |
27 | Before you begin setting up your AI Gateway node, ensure you have:
28 |
29 | - A [Linux](https://ubuntu.com/download/desktop) system (Support for Windows and
30 | macOS coming soon)
31 | - Root user access to the system
32 |
--------------------------------------------------------------------------------
/ai/orchestrators/get-started.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: Get Started
3 | ---
4 |
5 | If you're interested in joining Livepeer AI as an **Orchestrator** to perform
6 | **AI inference** on the Livepeer AI Network and earn fees, this guide is for
7 | you. It is tailored to help you set up an AI Orchestrator node, building
8 | upon the [Orchestrator Setup Guide](/orchestrators/guides/get-started) for the
9 | Mainnet Transcoding Network, but with additional steps for AI operations.
10 |
11 | For a step-by-step walkthrough, refer to the **subpages** of this guide. For a
12 | more general understanding of Orchestrator operations, you can consult the
13 | [Orchestrator Setup Guide](/orchestrators/guides/get-started) for the Mainnet
14 | Transcoding Network, as the AI guide extends upon the foundational knowledge
15 | provided there.
16 |
17 |
22 | Visit the Orchestrator Setup Guide for detailed instructions on setting up an
23 | Orchestrator node on the Mainnet Transcoding Network.
24 |
25 |
26 | ## Prerequisites
27 |
28 | Before setting up your AI Orchestrator node, ensure you meet the following
29 | requirements:
30 |
31 | - You are operating a Top 100
32 | [Mainnet Orchestrator](/orchestrators/guides/get-started) on the Mainnet
33 | Transcoding Network
34 | - **High VRAM GPUs Required**: Livepeer AI requires GPUs with at least 16GB of VRAM for most tasks. For optimal performance and higher job selection chances, 30/40 series GPUs or comparable models are recommended. Exact requirements are in the [AI Pipelines](/ai/pipelines) documentation.
35 | - **[Docker](https://docs.docker.com/engine/install)** is installed on your
36 | machine
37 | - **[CUDA 12.4](https://developer.nvidia.com/cuda-12-4-0-download-archive)** is
38 | installed on your machine
39 | - **[Nvidia Container Toolkit](https://docs.nvidia.com/datacenter/cloud-native/container-toolkit/install-guide.html)**
40 | is installed on your machine
41 | - You are using a **[Linux](https://ubuntu.com/download/desktop)** system
42 | (Support for Windows and macOS is coming soon)
43 | - You have
44 | **[Python 3.10](https://www.python.org/downloads/release/python-3100/)** or
45 | higher installed (for downloading and managing AI models)
46 |
--------------------------------------------------------------------------------
/ai/orchestrators/models-download.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: Download AI Models
3 | ---
4 |
5 | AI Orchestrators on the Livepeer AI network can choose from various AI models to
6 | advertise on the network. A comprehensive list of currently supported models for
7 | each Livepeer AI pipeline is available on the [Pipelines](/ai/pipelines) page.
8 | Before specifying which models to advertise on the Livepeer AI network, you must
9 | first download the models to your machine. The following steps will guide you
10 | through downloading the **recommended** models for Livepeer AI.
11 |
12 |
13 |
14 | Use the following command to install the HuggingFace CLI:
15 |
16 | ```bash
17 | pip install huggingface_hub[cli,hf_transfer]
18 | ```
19 |
20 |
21 | Follow the instructions on the [Hugging Face website](https://huggingface.co/docs/hub/en/security-tokens) to generate an access token with **read** permissions. Then, use the Hugging Face CLI to install the token:
22 |
23 | ```bash
24 | huggingface-cli login
25 | ```
26 | You will be prompted to paste the access token you created on the Hugging Face website.
27 |
28 | If you have trouble locating `huggingface-cli` in your path, it may be necessary to add `/home/$USER/.local/bin/` to your local path.
29 |
30 |
31 | The currently recommended models include one **gated** model. Therefore, you must accept the terms of the [SVD1.1 model](https://huggingface.co/stabilityai/stable-video-diffusion-img2vid-xt-1-1) on its model page before downloading.
32 |
33 |
34 | Use the following command to download the **recommended** models for Livepeer AI:
35 |
36 | ```bash
37 | cd ~/.lpData
38 | curl -s https://raw.githubusercontent.com/livepeer/ai-worker/main/runner/dl_checkpoints.sh | bash -s -- --beta
39 | ```
40 |
41 | This command downloads the recommended models for Livepeer AI and stores them in your machine's `~/.lpData/models` directory. To obtain a complete set of models, omit the `--beta` flag. This will require additional disk space.
42 |
43 |
44 |
45 |
--------------------------------------------------------------------------------
/ai/sdks/go.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: Go
3 | description: "Learn how to run your first AI inference job using the Livepeer AI Go SDK."
4 | icon: golang
5 | ---
6 |
7 |
8 |
9 | To get the most out of this guide, you’ll need to:
10 |
11 | - [Choose an AI Gateway](/ai/builders/gateways)
12 | - **Optional**: Get an API key (required for some gateways).
13 |
14 |
15 |
16 |
17 | Get the Livepeer AI Golang SDK.
18 |
19 | ```bash
20 | go get github.com/livepeer/livepeer-ai-go
21 | ```
22 |
23 |
24 |
25 |
26 | The first step is to initialize the SDK (with your API key if required).
27 |
28 | ```go
29 | package main
30 |
31 | import (
32 | "context"
33 | livepeeraigo "github.com/livepeer/livepeer-ai-go"
34 | "github.com/livepeer/livepeer-ai-go/models/components"
35 | "log"
36 | )
37 |
38 | func main() {
39 | s := livepeeraigo.New(
40 | livepeeraigo.WithSecurity(""),
41 | )
42 | }
43 | ```
44 |
45 |
46 |
47 | Now that you have the SDK installed and initialized, you can use it to request one of the [available AI services](/ai/pipelines/overview).
48 |
49 | ```go
50 | package main
51 |
52 | import (
53 | "context"
54 | livepeeraigo "github.com/livepeer/livepeer-ai-go"
55 | "github.com/livepeer/livepeer-ai-go/models/components"
56 | "log"
57 | )
58 |
59 | func main() {
60 | s := livepeeraigo.New(
61 | livepeeraigo.WithSecurity(""),
62 | )
63 |
64 | ctx := context.Background()
65 | res, err := s.Generate.TextToImage(ctx, components.TextToImageParams{
66 | Prompt: "",
67 | })
68 | if err != nil {
69 | log.Fatal(err)
70 | }
71 | if res.ImageResponse != nil {
72 | // handle response
73 | }
74 | }
75 | ```
76 |
77 |
78 |
79 |
80 |
85 | See the examples on GitHub.
86 |
87 |
88 |
89 |
90 | ## Next steps
91 |
92 | Checkout Livepeer AI [API Reference](/ai/api-reference) to learn more about the
93 | Livepeer AI API and the Golang SDK.
94 |
--------------------------------------------------------------------------------
/ai/sdks/javascript.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: JavaScript
3 | description:
4 | "Learn how to run your first AI inference job using the Livepeer AI JS SDK."
5 | icon: js
6 | ---
7 |
8 |
9 |
10 | To get the most out of this guide, you’ll need to:
11 |
12 | - [Choose an AI Gateway](/ai/builders/gateways)
13 | - **Optional**: Get an API key (required for some gateways).
14 |
15 |
16 |
17 |
18 | Get the Livepeer AI JavaScript SDK.
19 |
20 | ```bash
21 | npm add @livepeer/ai
22 | ```
23 |
24 |
25 |
26 |
27 | The first step is to initialize the SDK (with your API key if required).
28 |
29 | ```js
30 | import { Livepeer } from "@livepeer/ai";
31 |
32 | const livepeer = new Livepeer({
33 | httpBearer: "",
34 | });
35 | ```
36 |
37 |
38 |
39 | Now that you have the SDK installed and initialized, you can use it to request one of the [available AI services](/ai/pipelines/overview).
40 |
41 | ```js
42 | import { Livepeer } from "@livepeer/ai";
43 |
44 | const livepeer = new Livepeer({
45 | httpBearer: "",
46 | });
47 |
48 | async function run() {
49 | const result = await livepeer.generate.textToImage({
50 | prompt: "",
51 | });
52 |
53 | // Handle the result
54 | console.log(result);
55 | }
56 |
57 | run();
58 | ```
59 |
60 |
61 |
62 |
63 |
68 | See the examples on GitHub.
69 |
70 |
71 |
72 |
73 | ## Next steps
74 |
75 | Checkout Livepeer AI [API Reference](/ai/api-reference) to learn more about the
76 | Livepeer AI API and the Javascript SDK.
77 |
--------------------------------------------------------------------------------
/ai/sdks/overview.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "AI SDKs"
3 | description: "Explore developer SDKs for interacting with the Livepeer AI API."
4 | sidebarTitle: "Overview"
5 | ---
6 |
7 | ## Client-side SDKs
8 |
9 | Client-side SDKs simplify the process of using the Livepeer AI API. Just install
10 | a language-specific SDK into your application, choose a
11 | [AI Gateway](/ai/builders/gateways), and get started with only 2 lines of
12 | code. For some gateways, initialisation with an API key is optional.
13 |
14 |
15 |
16 |
22 |
23 |
24 |
--------------------------------------------------------------------------------
/ai/sdks/python.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: Python
3 | description:
4 | "Learn how to run your first AI inference job using the Livepeer AI Python
5 | SDK."
6 | icon: python
7 | ---
8 |
9 |
10 |
11 | To get the most out of this guide, you’ll need to:
12 |
13 | - [Choose an AI Gateway](/ai/builders/gateways)
14 | - **Optional**: Get an API key (required for some gateways).
15 |
16 |
17 |
18 |
19 | Get the Livepeer AI Python SDK.
20 |
21 | ```bash
22 | pip install livepeer-ai
23 | ```
24 |
25 |
26 |
27 |
28 | The first step is to initialize the SDK (with your API key if required).
29 |
30 | ```python
31 | from livepeer_ai import Livepeer
32 |
33 | s = Livepeer(
34 | http_bearer="",
35 | )
36 | ```
37 |
38 |
39 |
40 | Now that you have the SDK installed and initialized, you can use it to request one of the [available AI services](/ai/pipelines/overview).
41 |
42 | ```python
43 | from livepeer_ai import Livepeer
44 |
45 | s = Livepeer(
46 | http_bearer="",
47 | )
48 |
49 | res = s.generate.text_to_image(request={
50 | "prompt": "",
51 | })
52 |
53 | if res.image_response is not None:
54 | # handle response
55 | pass
56 | ```
57 |
58 |
59 |
60 |
61 |
66 | See the examples on GitHub.
67 |
68 |
69 |
70 |
71 | ## Next steps
72 |
73 | Checkout Livepeer AI [API Reference](/ai/api-reference) to learn more about the
74 | Livepeer AI API and the Python SDK.
75 |
--------------------------------------------------------------------------------
/ai/whats-new.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "What's New"
3 | icon: "rocket"
4 | url: "https://livepeer-ai.productlane.com/changelog"
5 | ---
6 |
--------------------------------------------------------------------------------
/api-reference/asset/delete.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Delete an asset"
3 | openapi: "DELETE /asset/{assetId}"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/asset/get-all.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Retrieve assets"
3 | openapi: "GET /asset"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/asset/get.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Retrieve an asset"
3 | openapi: "GET /asset/{assetId}"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/asset/update.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Update an asset"
3 | openapi: "PATCH /asset/{assetId}"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/asset/upload-via-url.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Upload asset via URL"
3 | openapi: "POST /asset/upload/url"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/asset/upload.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Upload an asset"
3 | openapi: "POST /asset/request-upload"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/generate/audio-to-text.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Audio To Text"
3 | openapi: "POST /api/beta/generate/audio-to-text"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/generate/image-to-image.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Image To Image"
3 | openapi: "POST /api/beta/generate/image-to-image"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/generate/image-to-text.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Image To Text"
3 | openapi: "POST /api/beta/generate/image-to-text"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/generate/image-to-video.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Image To Video"
3 | openapi: "POST /api/beta/generate/image-to-video"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/generate/overview.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | description:
3 | "The Generate API is used to run generative AI models."
4 | ---
5 |
6 | These APIs implement the [Livepeer AI Gateway API spec](/ai/api-reference) and
7 | are served under the Studio platform suite. They are prefixed with
8 | `/api/beta/generate` and are used to run AI models to generate content.
9 |
--------------------------------------------------------------------------------
/api-reference/generate/segment-anything-2.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Segment Anything 2"
3 | openapi: "POST /api/beta/generate/segment-anything-2"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/generate/text-to-image.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Text To Image"
3 | openapi: "POST /api/beta/generate/text-to-image"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/generate/text-to-speech.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Text to Speech"
3 | openapi: "POST /api/beta/generate/text-to-speech"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/generate/upscale.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Upscale"
3 | openapi: "POST /api/beta/generate/upscale"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/multistream/create.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Create a multistream"
3 | openapi: "POST /multistream/target"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/multistream/delete.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Delete a multistream"
3 | openapi: "DELETE /multistream/target/{id}"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/multistream/get-all.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Retrieve all multistreams"
3 | openapi: "GET /multistream/target"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/multistream/get.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Retrieve a multistream"
3 | openapi: "GET /multistream/target/{id}"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/multistream/overview.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | description:
3 | "The Multistream target API is used to create, retrieve, update, delete
4 | multi-stream targets object from pipeline."
5 | ---
6 |
7 | ### Multistream target object
8 |
9 |
10 | Unique identifier for the multistream target.
11 |
12 |
13 | Name of the multistream target.
14 |
15 |
16 | Livepeer-compatible multistream target URL (RTMP(S) or SRT). This URL is used
17 | for streaming to the target platform.
18 |
19 |
20 | Indicates if this multistream target is disabled. If true, it will not be used
21 | for pushing even if configured in a stream object.
22 |
23 |
24 | Timestamp (in milliseconds) at which the multistream target object was
25 | created.
26 |
27 |
28 |
29 | ```json Response
30 | {
31 | "id": "09F8B46C-61A0-4254-9875-F71F4C605BC7",
32 | "name": "My Multistream Target",
33 | "url": "rtmps://live.my-service.tv/channel/secretKey",
34 | "disabled": "boolean",
35 | "createdAt": 1587667174725
36 | }
37 | ```
38 |
39 |
40 |
--------------------------------------------------------------------------------
/api-reference/multistream/update.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Update a multistream"
3 | openapi: "PATCH /multistream/target/{id}"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/overview/authentication.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: Authentication
3 | description: "Learn more about Livepeer's API."
4 | icon: "lock"
5 | ---
6 |
7 | Livepeer API uses API keys to verify and authorize requests. You can manage and
8 | review your API keys through Livepeer Studio. You need to pass your API key in
9 | the `Authorization` header with a `Bearer` prefix while sending a request.
10 |
11 | ```
12 | Bearer YOUR_API_KEY
13 | ```
14 |
15 | It's important to note that your API keys come with significant privileges, so
16 | it's essential to keep them safe and secure! Refrain from sharing your secret
17 | API keys in GitHub or other publicly accessible places.
18 |
19 | By default, API keys can only be used from a backend server. This is to ensure
20 | maximum security and prevent that you accidentally expose your account by
21 | including the secret API key in a public web page.
22 |
23 | ### CORS-Enabled Keys
24 |
25 |
26 | Please read the below documentation in its entirety before using CORS-enabled
27 | API keys. **There is a different security model for CORS keys.**
28 |
29 |
30 | Studio supports the creation of CORS-enabled API keys. This is a special option
31 | when generating an API key which allows a webpage to make requests **directly**
32 | to Studio, as opposed to coming from your backend.
33 |
34 | #### Security with CORS Keys
35 |
36 | **The security model is different for CORS-enabled API keys.** Since any user
37 | has access to these keys, the IDs of assets and streams **must** be kept secret
38 | from anyone who should not have admin control over them. For instance, a viewer
39 | should only have access to the playback ID, since knowing the asset ID (together
40 | with the CORS-enabled API key, which is embedded in the webpage) allows them to
41 | make changes to the asset.
42 |
43 | This is the same for streams - if a user has access to a stream ID alongside the
44 | CORS API key, they can modify the stream or view the stream key. If a viewer had
45 | access to the stream ID + CORS API key, they could hijack the stream. **A
46 | `playbackId` should be exposed to the viewer only.**
47 |
48 | 
49 |
--------------------------------------------------------------------------------
/api-reference/overview/introduction.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: Introduction
3 | description: "Learn more about Livepeer's API."
4 | icon: "hand-wave"
5 | ---
6 |
7 | Welcome to the Livepeer API reference docs! Here you'll find all the endpoints
8 | exposed on the standard Livepeer API, learn how to use them and what they
9 | return.
10 |
11 | The Livepeer API is organized around REST, has predictable resource-oriented
12 | URLs, accepts JSON request bodies, returns JSON-encoded responses, and uses
13 | standard HTTP response codes, authentication, and verbs.
14 |
--------------------------------------------------------------------------------
/api-reference/playback/get.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Retrieve Playback Info"
3 | openapi: "GET /playback/{id}"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/room/create-user.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Create a user"
3 | openapi: "POST /room/{id}/user"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/room/create.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Create a room"
3 | openapi: "POST /room"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/room/delete.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Delete a room"
3 | openapi: "DELETE /room/{id}"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/room/get-user.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Retrieve a user"
3 | openapi: "GET /room/{id}/user/{userId}"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/room/get.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Retrieve a room"
3 | openapi: "GET /room/{id}"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/room/remove-user.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Remove a user"
3 | openapi: "DELETE /room/{id}/user/{userId}"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/room/start-egress.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Start RTMP egress"
3 | openapi: "POST /room/{id}/egress"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/room/stop-egress.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Stop RTMP egress"
3 | openapi: "DELETE /room/{id}/egress"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/room/update-user.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Update a user"
3 | openapi: "PUT /room/{id}/user/{userId}"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/room/update.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Update a stream"
3 | openapi: "PATCH /stream/{id}"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/session/get-all.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Retrieve all sessions"
3 | openapi: "GET /session"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/session/get-clip.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Retrieve clips of a session"
3 | openapi: "GET /session/{id}/clips"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/session/get-recording.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Retrieve recorded sessions"
3 | openapi: "GET /stream/{parentId}/sessions"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/session/get.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Retrieve a session"
3 | openapi: "GET /session/{id}"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/signing-key/create.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Create a signing key"
3 | openapi: "POST /access-control/signing-key"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/signing-key/delete.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Delete a signing key"
3 | openapi: "DELETE /access-control/signing-key/{keyId}"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/signing-key/get-all.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Retrieve signing keys"
3 | openapi: "GET /access-control/signing-key"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/signing-key/get.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Retrieve a signing key"
3 | openapi: "GET /access-control/signing-key/{keyId}"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/signing-key/overview.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | description:
3 | "The Access control API is used to create, retrieve, update, delete signing
4 | keys object from pipeline."
5 | ---
6 |
7 | ### Signing key Object
8 |
9 |
10 | Unique identifier for the signing key.
11 |
12 |
13 | Name of the signing key.
14 |
15 |
16 | Timestamp (in milliseconds) at which the signing key was created.
17 |
18 |
19 | Timestamp of the last activity with the signing key.
20 |
21 |
22 | The public key portion of the signing key.
23 |
24 |
25 | Indicates whether the signing key is disabled.
26 |
27 |
28 | The private key portion of the signing key.
29 |
30 |
31 |
32 | ```json Response
33 | {
34 | "id": "78df0075-b5f3-4683-a618-1086faca35dc",
35 | "name": "My signing key",
36 | "createdAt": 1587667174725,
37 | "lastSeen": 1587667174725,
38 | "publicKey": "LS0tLS1CRUdJTiBQUklWQVRFIBtFWS0tLS0tCk1JR0hBZ0VBTUJNR0J5cUdTTTQ5QWdFR0NDcUdTTTQ5QXdFSEJHMHdhd0lCQVFRZ1RDRzhRWDZKdkR0eC95ZDMKdlpkUHJKR25LcjhiWHRsdXNIL2FOYW5XdHEraFJBTkNBQVE0QnZ6ODI2L2lDaXV1U0NiZVkwc3FmOXljYWh0OApDRFYyUFF2bDFVM1FLSVRBcWRpaktLa0FSUFVkcWRrYWZzR21PMzBDeElPaDBLNWJSQW5XQzd4KwotLS0tLUVORCBQUklWQVRFIEtFWS0tLS0tCg==",
39 | "disabled": "boolean",
40 | "privateKey": "LS0tLS1CRUdJTiBQUklWQVRFIBtFWS0tLS0tCk1JR0hBZ0VBTUJNR0J5cUdTTTQ5QWdFR0NDcUdTTTQ5QXdFSEJHMHdhd0lCQVFRZ1RDRzhRWDZKdkR0eC95ZDMKdlpkUHJKR25LcjhiWHRsdXNIL2FOYW5XdHEraFJBTkNBQVE0QnZ6ODI2L2lDaXV1U0NiZVkwc3FmOXljYWh0OApDRFYyUFF2bDFVM1FLSVRBcWRpaktLa0FSUFVkcWRrYWZzR21PMzBDeElPaDBLNWJSQW5XQzd4KwotLS0tLUVORCBQUklWQVRFIEtFWS0tLS0tCg=="
41 | }
42 | ```
43 |
44 |
45 |
--------------------------------------------------------------------------------
/api-reference/signing-key/update.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Update a signing key"
3 | openapi: "PATCH /access-control/signing-key/{keyId}"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/stream/add-multistream-target.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Add a multistream target"
3 | openapi: "POST /stream/{id}/create-multistream-target"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/stream/create-clip.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Create a clip"
3 | openapi: "POST /clip"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/stream/create.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Create a livestream"
3 | openapi: "POST /stream"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/stream/delete-multistream-target.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Remove a multistream target"
3 | openapi: "DELETE /stream/{id}/multistream/{targetId}"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/stream/delete.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Delete a livestream"
3 | openapi: "DELETE /stream/{id}"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/stream/get-all.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Retrieve all livestreams"
3 | openapi: "GET /stream"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/stream/get-clip.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Retrieve clips of a livestream"
3 | openapi: "GET /stream/{id}/clips"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/stream/get.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Retrieve a livestream"
3 | openapi: "GET /stream/{id}"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/stream/terminate.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Terminates a livestream"
3 | openapi: "DELETE /stream/{id}/terminate"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/stream/update.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Update a livestream"
3 | openapi: "PATCH /stream/{id}"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/task/get-all.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Retrieve tasks"
3 | openapi: "GET /task"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/task/get.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Retrieve a task"
3 | openapi: "GET /task/{taskId}"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/transcode/create.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Transcode a video"
3 | openapi: "POST /transcode"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/viewership/get-creators-metrics.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Query creator viewership metrics"
3 | openapi: "GET /data/views/query/creator"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/viewership/get-public-total-views.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Query public total views metrics"
3 | openapi: "GET /data/views/query/total/{playbackId}"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/viewership/get-realtime-viewership.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Query realtime viewership"
3 | openapi: "GET /data/views/now"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/viewership/get-usage-metrics.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Query usage metrics"
3 | openapi: "GET /data/usage/query"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/viewership/get-viewership-metrics.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Query viewership metrics"
3 | openapi: "GET /data/views/query"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/webhook/create.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Create a webhook"
3 | openapi: "POST /webhook"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/webhook/delete.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Delete a webhook"
3 | openapi: "DELETE /webhook/{id}"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/webhook/get-all.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Retrieve all webhooks"
3 | openapi: "GET /webhook"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/webhook/get.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Retrieve a webhook"
3 | openapi: "GET /webhook/{id}"
4 | ---
5 |
--------------------------------------------------------------------------------
/api-reference/webhook/overview.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | description:
3 | "The Webhooks API is used to create, retrieve, update, delete webhooks object
4 | from pipeline."
5 | ---
6 |
7 | ### Webhook Object
8 |
9 |
10 | Unique identifier for the webhook.
11 |
12 |
13 | Name of the webhook.
14 |
15 |
16 | Timestamp (in milliseconds) at which the webhook object was created.
17 |
18 |
19 | List of events that the webhook subscribes to. Possible events include
20 | stream.started, stream.detection, recording.ready, etc.
21 |
22 |
23 | URL of the webhook endpoint.
24 |
25 |
26 | Shared secret used to sign the webhook payload.
27 |
28 |
29 | StreamId of the stream to which the webhook is applied.
30 |
31 |
32 | Status of the webhook, including last failure and last triggered timestamp.
33 |
34 | Details about the last failure of the webhook, including timestamp, error
35 | message, response, and status code.
36 |
37 |
38 | Timestamp (in milliseconds) at which the webhook was last triggered.
39 |
40 |
41 |
42 |
43 | ```json Response
44 | {
45 | "id": "de7818e7-610a-4057-8f6f-b785dc1e6f88",
46 | "name": "My webhook",
47 | "createdAt": 1587667174725,
48 | "events": [
49 | "stream.started",
50 | "recording.ready"
51 | ],
52 | "url": "https://webhook.example.com",
53 | "sharedSecret": "mySharedSecret",
54 | "streamId": "de7818e7-610a-4057-8f6f-b785dc1e6f88",
55 | "status": {
56 | "lastFailure": {
57 | "timestamp": 1587667174725,
58 | "error": "Error message",
59 | "response": "Response body",
60 | "statusCode": 500
61 | },
62 | "lastTriggeredAt": 1587667174725
63 | }
64 | }
65 | ```
66 |
67 |
68 |
--------------------------------------------------------------------------------
/api-reference/webhook/update.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Update a webhook"
3 | openapi: "PUT /webhook/{id}"
4 | ---
5 |
--------------------------------------------------------------------------------
/developers/core-concepts/core-api/access-control.mdx:
--------------------------------------------------------------------------------
1 | Livepeer supports access control on streams and assets, which lets developers
2 | restrict who can view media on their platform, depending on their application
3 | needs. The API supports both webhook and JWT-based access control.
4 |
5 | #### Webhook
6 |
7 | The recommended option for application access control for applications is to use
8 | webhooks. It has the simplest integration path, and is familiar for developers.
9 |
10 | A user can mark a stream or asset as "gated", which means that a viewer cannot
11 | access the stream without a token.
12 |
13 | When stream playback is attempted, Studio sends a `playback.accessControl`
14 | webhook event, and applications can register a webhook handler to respond to
15 | this event.
16 |
17 | If the webhook responds with a `2XX` HTTP response code, Livepeer will enable
18 | playback. If it is non-2XX, playback will be rejected. The access control check
19 | happens at the playback layer, which means that even if a user gets access to
20 | the underlying `playbackId`, they will not be able to play the media without
21 | valid credentials.
22 |
23 | Using this framework, a blockchain agonistic Token Gating feature can be easily
24 | implemented. Rich ACC providers, like [Lit](https://litprotocol.com/), can be
25 | integrated using webhook access control.
26 |
27 | #### JWT
28 |
29 | JSON Web Tokens (JWTs) offer another secure mechanism for developers to
30 | implement access control on their platform's media content.
31 |
32 | To use this method, a content piece — a stream or an asset — can be marked with
33 | a JWT-specific playback policy. This means viewers will require a valid JWT to
34 | access the content.
35 |
36 | Upon a playback request, Livepeer checks the provided JWT for validity. The
37 | validation process involves ensuring the JWT was signed with the correct
38 | `signingKey` and that it hasn't expired. If the JWT is valid, access to the
39 | content is granted; otherwise, it's denied.
40 |
41 | JWTs provide the flexibility of defining various claims or data within the token
42 | itself, which can be used to carry additional metadata or access control logic.
43 | For instance, specific viewer information or expiration details can be embedded
44 | directly in the JWT.
45 |
46 | To implement this feature, developers can utilize the SDK clients to create
47 | JWT-gated content. Upon content creation, developers can then use custom API
48 | routes to sign and issue JWTs for authorized viewers, ensuring that only those
49 | with the correct tokens can view the content.
50 |
51 | Depending on the player used, the JWT can either be passed as a specific prop or
52 | appended to the playback URL as a query parameter to authenticate the viewer.
53 |
--------------------------------------------------------------------------------
/developers/core-concepts/core-api/multistream.mdx:
--------------------------------------------------------------------------------
1 | A multistream facilitates the simultaneous broadcast of a source stream, along
2 | with its transcoded renditions, to various RTMP/RTMPS destinations like Twitch,
3 | Facebook Live, and YouTube Live. This feature ensures that users can maximize
4 | their audience reach across multiple platforms with a single streaming session.
5 |
6 | When setting up a multistream, users have the option to utilize the Livepeer
7 | Studio Dashboard, directly interfaces with Livepeer Studio API, or the Livepeer
8 | Studio SDKs. These offer comprehensive management tools to configure, modify, or
9 | delete specific multistream targets linked with individual streams. It's
10 | important to note that configurations are stream-specific; hence, they don't
11 | automatically transfer when generating new streams.
12 |
13 | The essential parameters for setting up a target include a name, the ingest URL,
14 | and, if relevant, adding a stream key. Users can also specify which rendition
15 | profile they intend to dispatch to the target.
16 |
17 | After creating a multistream target, it can be toggled as active/inactive. Also,
18 | both the dashboard and API provide options for editing or deleting these
19 | targets. **To initiate multistreaming, the multistream target needs to be linked
20 | to a stream.**
21 |
22 | #### Monitoring
23 |
24 | For performance monitoring and understanding stream health, Studio offers
25 | real-time indicators through its dashboard and API, showing whether a
26 | multistream target is currently active or offline. The status updates might
27 | experience a slight delay before the stream's live status gets mirrored on the
28 | destination platform. In Studio's "Health" tab, users can obtain a detailed view
29 | of the ingest rate for the active source stream.
30 |
31 | #### Webhooks
32 |
33 | Additionally, Livepeer has three webhooks to monitor multistream targets:
34 |
35 | 1. `multistream.connected`: Indicates a successful connection to the multistream
36 | target and confirms the stream's live status on the corresponding service.
37 | 2. `multistream.error`: Flags issues during the connection process, suggesting
38 | potential configuration errors or problems with the destination platform.
39 | 3. `multistream.disconnected`: Notifies users when a stream concludes and
40 | multistreaming to a particular target has also ended.
41 |
--------------------------------------------------------------------------------
/developers/core-concepts/livepeer-network/delegators.mdx:
--------------------------------------------------------------------------------
1 | Delegators are a crucial group within the Livepeer protocol, playing a vital but
2 | passive role in the system's operations and security. They are individuals or
3 | entities who hold Livepeer tokens and, rather than actively processing or
4 | broadcasting video content themselves, opt to back and support other active
5 | participants - Orchestrators. By "staking" their tokens, which can be equated to
6 | placing a deposit, Delegators essentially lock up their tokens for a designated
7 | period, signaling their trust and support for chosen
8 | [Orchestrators](/developers/core-concepts/livepeer-network/orchestrators).
9 |
10 | The act of staking helps in multiple ways. Firstly, it reinforces the network's
11 | overall security by ensuring participants have a vested interest in the system's
12 | proper functioning. The locked tokens can serve as a form of collateral,
13 | ensuring the system remains resistant to malicious intentions or attacks.
14 |
15 |
16 | Checkout [Livepeer primer](https://livepeer.org/primer) to learn more about
17 | Livepeer network.
18 |
19 |
20 | #### Livepeer Token (LPT)
21 |
22 | Delegators use Livepeer token (LPT), which are ERC-20 compliant. It can be
23 | purchased through various channels, including trading platforms like
24 | [Uniswap](https://app.uniswap.org). The distribution of these tokens was
25 | initially executed via a "Merkle Mine" technique during the network's inception
26 | phase. The token's value is subject to inflation based on an algorithmic
27 | issuance model over time.
28 |
29 | #### Governance
30 |
31 | Apart from security, LPT staking also empowers Delegators with votes in the
32 | network's governance. They can weigh in on protocol proposals, thus making
33 | collaborative decisions on the network's future direction. Additionally, the
34 | amount of staked and delegated token determines how tasks or jobs are
35 | distributed within the network, thereby acting as a mechanism for work
36 | coordination.
37 |
38 | For those interested in exploring or managing their staking activities, the
39 | [Livepeer Explorer](https://explorer.livepeer.org) offers a comprehensive
40 | interface, ensuring transparency and ease of operations in the staking process.
41 |
--------------------------------------------------------------------------------
/developers/core-concepts/livepeer-network/gateways.mdx:
--------------------------------------------------------------------------------
1 | A Gateway (formerly known as a Broadcaster) on the Livepeer network
2 | is a node that is using the network for video streaming or generative AI inference.
3 | Running a gateway is simple, and it exposes an API that allows you to build
4 | your video application on top of Livepeer.
5 | Under the hood, gateways are responsible for routing transcoding or
6 | AI inference tasks to the appropriate
7 | [Orchestrator](/developers/core-concepts/livepeer-network/orchestrators)
8 | nodes for processing. By running the
9 | [`go-livepeer` client](https://github.com/livepeer/go-livepeer), individuals can
10 | join the Livepeer network as Gateways.
11 |
12 |
13 | Check out the [Livepeer primer](https://livepeer.org/primer) to learn more
14 | about the Livepeer network.
15 |
16 |
17 | #### Configuration
18 |
19 | Gateways do not need to stake
20 | [LPT](/developers/core-concepts/livepeer-network/delegators#livepeer-token-lpt)
21 | to participate in the network. They only
22 | [require enough ETH](/gateways/guides/fund-gateway) to cover the cost of
23 | transcoding and AI inference jobs. Unlike Orchestrators, Gateways do not need a
24 | GPU to participate. Any machine with a decent CPU and sufficient bandwidth can
25 | operate as a Gateway.
26 |
27 | Gateways are essential components of the Livepeer ecosystem, serving as the
28 | bridge between the Orchestrators performing the work and the clients requesting
29 | the work. Many entities may run gateway nodes and make them available to others
30 | as a hosted service. They often provide additional services such as content delivery
31 | networks (CDNs) and base currency subscriptions, ensuring that jobs are
32 | delivered swiftly to the right client and that clients don't need to set up
33 | crypto wallets themselves.
34 |
--------------------------------------------------------------------------------
/developers/core-concepts/livepeer-network/orchestrators.mdx:
--------------------------------------------------------------------------------
1 | An Orchestrator on the Livepeer network is an entity or node that facilitates
2 | the video processing tasks such as transcoding. Orchestrators provide their
3 | computational resources to assist gateways and developers in
4 | transcoding/delivering videos. By running the
5 | [`go-livepeer` client](https://github.com/livepeer/go-livepeer), individuals can
6 | join the Livepeer network as Orchestrators.
7 |
8 |
9 | Checkout [Livepeer primer](https://livepeer.org/primer) to learn more about
10 | Livepeer network.
11 |
12 |
13 | 
14 |
15 | #### Micropayments
16 |
17 | Orchestrators play a pivotal role in transcoding and distributing video streams
18 | on the Livepeer network. They advertise the price they charge for their video
19 | processing services, and when they receive a transcode job, they perform
20 | transcoding. In return, they receive payments in the form of a
21 | [probabilistic micropayment](https://medium.com/livepeer-blog/a-primer-on-livepeers-probabilistic-micropayments-e16788b29331).
22 |
23 | In order to become an active Orchestrator and be eligible for payments, they
24 | need to stake a certain amount of LPT. The active set of Orchestrators comprises
25 | the top 100 Orchestrators with the highest stake on the network. Activation,
26 | management of rates, and other settings can be done through tools like
27 | `livepeer_cli`.
28 |
29 | #### Configuration
30 |
31 | Orchestrators leverage different parts of their GPUs (typically using
32 | [Nvidia's NVENC/NVDEC](https://developer.nvidia.com/blog/nvidia-ffmpeg-transcoding-guide/))
33 | to handle video encoding tasks without significantly interrupting other tasks,
34 | like cryptocurrency mining. This setup allows for optimal utilization of the
35 | Orchestrator's resources.
36 |
37 | For ease of operation and to ensure they are accessible for jobs, Orchestrators
38 | are required to set up specific parameters and ensure they have the necessary
39 | prerequisites, including proper networking configurations, accessibility to
40 | Arbitrum nodes, sufficient bandwidth, and more. They can also manage their ETH
41 | accounts for transactions and rewards either manually or by letting the Livepeer system
42 | handle it automatically.
43 |
44 | Orchestrators are essential components of the Livepeer ecosystem, offering their
45 | computational resources for video processing while earning rewards for their
46 | contributions.
47 |
--------------------------------------------------------------------------------
/developers/core-concepts/player/overview.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: Player
3 | ---
4 |
5 | The Livepeer Player is a drop-in React component that you can put in your web or
6 | native mobile application (with React Native) to play assets/livestreams. It
7 | provides a responsive UI based on video player dimensions and playback type,
8 | poster image support, and modern video player capabilities.
9 |
10 | An embeddable version is also hosted at `https://lvpr.tv`, with many of the
11 | props exposed via query params, so that you can easily embed it using an iframe.
12 |
13 | #### Playback
14 |
15 | The Player is optimized for low latency playback:
16 |
17 | - [Livestream playback](/developers/guides/playback-a-livestream) prioritizes
18 | low latency WebRTC using WHEP (by default)
19 | - [Asset playback](/developers/guides/playback-an-asset) prioritizes
20 | [MP4 renditions](/sdks/react/player/Root#mp4-playback-urls), which are cached
21 | with a CDN for rapid delivery
22 |
23 | If any of these options fail, it will fall back to HLS playback, using
24 | [HLS.js](https://github.com/video-dev/hls.js). This ensures a smooth viewing
25 | experience for your users.
26 |
27 | There is also [custom retry logic](/sdks/react/player/Root#technical-details) in
28 | the Player to make sure that if a livestream is offline or there are any
29 | transient network issues, playback will be retried.
30 |
31 | #### Embed
32 |
33 | To embed the Player, a URL can be added to an iframe similar to:
34 |
35 | ```bash
36 | https://lvpr.tv?v={playbackId}&lowLatency=true
37 | ```
38 |
39 | To see all of the available query parameters, see the
40 | [embeddable player](/developers/guides/playback-an-asset#embeddable-player)
41 | section.
42 |
43 | #### Metrics
44 |
45 | The Livepeer Player reports engagement metrics without any extra configuration.
46 | These metrics can then be queried using the
47 | [viewership API](/developers/guides/get-engagement-analytics-via-api) and shown
48 | alongside the Player, for display of view count and other valuable viewership
49 | data.
50 |
--------------------------------------------------------------------------------
/developers/core-concepts/studio/in-browser-broadcast.mdx:
--------------------------------------------------------------------------------
1 | An in-browser broadcast refers to the process of transmitting live video content
2 | directly from a user's web browser to online audiences in real-time, facilitated
3 | by Livepeer's low latency WebRTC technology. This method eliminates the need for
4 | specialized broadcasting equipment or external software such as OBS, allowing
5 | content creators to engage with their viewers effortlessly.
6 |
7 | 
8 |
9 | 1. **Initialization:**
10 |
11 | - A stream is created through Livepeer's API.
12 | - A WebRTC connection is created within a web application using the stream key.
13 | [SDP negotiation using WHIP](https://datatracker.ietf.org/doc/html/draft-ietf-wish-whip)
14 | creates a low-latency WebRTC connection to Livepeer's infrastructure.
15 | - Optionally, a developer can use the React Broadcast component to do this
16 | automatically, given a stream key.
17 |
18 | 2. **WebRTC Broadcast:**
19 |
20 | - The user's webcam and microphone capture live video and audio content directly
21 | from the browser. This can also include screen capture, using browser APIs.
22 |
23 | 3. **Stream Presentation:**
24 |
25 | - The live video content is made accessible to viewers through an embedded
26 | iframe or player.
27 | - The iframe references a specific URL associated with the stream playback ID,
28 | allowing viewers to watch the broadcast.
29 |
30 | 4. **STUN/TURN Server Utilization:**
31 |
32 | - STUN (Session Traversal Utilities for NAT) and TURN (Traversal Using Relays
33 | around NAT) servers are utilized to enhance network connectivity, ensuring
34 | minimal disruptions due to firewalls or network challenges. **STUN/TURN are
35 | required for broadcasting.**
36 |
37 | In-browser broadcasting empowers content creators to effortlessly share live
38 | video with their audiences, allowing real-time engagement and interaction
39 | directly from the browser.
40 |
--------------------------------------------------------------------------------
/developers/guides/get-engagement-analytics-via-grafana.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Engagement via Grafana"
3 | description: "Learn how to visualize your engagement metrics with Grafana"
4 | ---
5 |
6 | In May 2023, we released powerful engagement metrics offering detailed
7 | information on viewer behavior and playback quality on your platform. The API
8 | includes engagement metrics such as view counts and watch time, as well as
9 | performance metrics such as error rate, time to first frame, rebuffer ratio, and
10 | exit-before-starts across a variety of dimensions. For more details, check out
11 | [the docs here][1].
12 |
13 | In this tutorial we will introduce a simple, free, and customizable method to
14 | quickly visualize the core metrics and dimensions of the API.
15 |
16 | ## Prerequisites
17 |
18 | Before you begin this tutorial, make sure you have:
19 |
20 | 1. Integrated the necessary components to capture viewership data. Details
21 | [here][2].
22 | 2. Viewed videos via (1) to collect data.
23 | 3. Created an [CORS-enabled API Key][3].
24 | The key must allow CORS access from your Grafana origin, or from all (`*`)
25 | 4. Set up a Grafana account and workspace with permissions to add new dashboard
26 | (and datasource/connection, if necessary). A free account is sufficient and
27 | available at [grafana.com][4]
28 | 5. Install the [`JSON API`][marcusolsson-json-datasource] plugin for grafana.
29 |
30 | With that we are ready to set up our dashboard!
31 |
32 | ## Setting up the Engagement Dashboard
33 |
34 | - Login to Grafana
35 | - Click "Connections" > "Connect Data"
36 |
37 |
38 |
43 |
44 |
45 | - Set up your JSON API:
46 | 1. Name: e.g. "Livepeer Engagement Data"
47 | 2. URL:
48 | [`https://livepeer.studio/api/data/views/query`][5]
49 | 3. Authentication methods: Forward OAuth Identity
50 | 4. TLS Settings: Skip TLS certificate validation
51 | 5. Custom HTTP Headers
52 | 1. Header: "Authorization"
53 | 2. Value: Full Access `Bearer `
54 | - Save and Test
55 | - ["Import" the dashboard][6] from the official [Livepeer Studio Viewership
56 | Engagement dashboard][dashboard] and using the JSON API datasource created
57 | above.
58 |
59 |
60 |
64 |
65 |
66 | - Rename the dashboard if you'd prefer
67 |
68 |
69 | [1]: /developers/guides/get-engagement-analytics-via-api
70 | [2]: /developers/guides/get-engagement-analytics-via-api#registering-views
71 | [3]: /api-reference/overview/authentication
72 | [4]: https://grafana.com/ "Grafana"
73 | [5]: /api-reference/viewership/get-viewership-metrics
74 | [6]: https://grafana.com/docs/grafana/latest/dashboards/build-dashboards/import-dashboards/#import-a-dashboard
75 | [dashboard]: https://grafana.com/grafana/dashboards/20511-livepeer-studio-user-engagement/
76 | [marcusolsson-json-datasource]: https://grafana.com/grafana/plugins/marcusolsson-json-datasource/ "JSON API Grafana Plugin"
77 |
--------------------------------------------------------------------------------
/developers/guides/listen-to-asset-events.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Listen to asset events"
3 | description: "Learn how to listen to asset events using Studio webhooks."
4 | ---
5 |
6 | Livepeer Studio uses webhooks to communicate with your application
7 | asynchronously when events for your asset occur. For example, you may want to
8 | know when an asset has been `uploaded` or is `ready`, so that you can surface
9 | this information to viewers.
10 |
11 | When these events happen, you can configure Studio to make a `POST` request to a
12 | webhook URL that you specify.
13 |
14 | ### Type of asset events
15 |
16 | | | |
17 | | --------------- | ------------------------------------------------------------------------------------------------------------------------ |
18 | | `asset.created` | This fires when an On Demand asset is created. |
19 | | `asset.updated` | This fires when an On Demand asset is updated. The asset payload will contain a playback URL when playback is available. |
20 | | `asset.ready` | This fires when an On Demand asset is ready. Playback will be available with all transcoded renditions. |
21 | | `asset.failed` | This fires when an On Demand asset fails during the upload or during processing. |
22 | | `asset.deleted` | This fires when an On Demand asset is deleted. |
23 |
24 | ### Set up a webhook endpoint
25 |
26 | The first step is to set up a webhook endpoint in your application. This is the
27 | URL that Livepeer Studio will send the event to - learn more about
28 | [setting up a webhook endpoint](/developers/guides/setup-and-listen-to-webhooks).
29 |
30 | ### Add a webhook URL to Livepeer Studio
31 |
32 | Log in to the [Livepeer Studio](https://livepeer.studio/) and navigate to the
33 | [Developers/Webhooks](https://livepeer.studio/dashboard/developers/webhooks)
34 | page.
35 |
36 | 
37 |
38 | Click the "Create Webhook" button and enter the URL of the webhook endpoint. Select any
39 | asset event (with an `asset` prefix) and click "Create Webhook".
40 |
--------------------------------------------------------------------------------
/developers/guides/managing-projects.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Managing Projects"
3 | description:
4 | "Explore best practices for managing multiple environments in Livepeer Studio"
5 | ---
6 |
7 | 
8 |
9 | ## Overview
10 |
11 | Projects in Livepeer Studio allow for the organization of streams, assets, API
12 | keys, and usage within dedicated environments. This feature is helpful for
13 | separating staging and production environments, managing multiple applications,
14 | and ensuring efficient workflow within a single account.
15 |
16 | ## Why Use Projects?
17 |
18 | - **Separation of Environments**: Keep your staging and production environments
19 | separate to avoid conflicts and ensure reliable testing.
20 | - **Centralized Management for Multiple Applications**: Instead of juggling
21 | multiple accounts, you can build and manage separate applications from within
22 | the same account. This is ideal for developers and companies that operate
23 | multiple apps or brands and wish to streamline their management in one place.
24 |
25 | ## Getting Started with Projects
26 |
27 | ### Creating a New Project
28 |
29 | To start building with separate environments or applications, you'll need to
30 | create a new project. Here's how:
31 |
32 | 1. In the sidebar, click on the project dropdown at the top of the menu.
33 | 2. Choose **+ New project** from the dropdown list.
34 | 3. Enter a name for your new project when prompted.
35 | 4. Confirm the creation to set up your new environment.
36 |
37 |
38 | ### Renaming a Project
39 |
40 | 1. Within a project, navigate to the **Settings** section in the sidebar.
41 | 2. Find the project name field, make your changes, and save.
42 |
43 |
44 | ### Deleting a Project
45 |
46 | In the current version of Livepeer Studio, you **cannot** delete a project. But we are working on adding this feature soon.
47 |
48 | {/* #### Recommended Pre-Deletion Steps:
49 |
50 | 1. **Backup**: Confirm that you have secured backups of all essential data.
51 | 2. **Verification:** Ensure that the project is no longer in use and all
52 | necessary migrations have been completed.
53 |
54 | #### Deletion Steps:
55 |
56 | 1. Navigate to the **Settings** option in the project's sidebar to access the
57 | detailed settings view.
58 | 2. Scroll down to find the Delete Project section.
59 | 3. Click on the **Delete** button.
60 | 4. As a safety measure, you will be prompted to type in the name of the project
61 | to confirm your intent to delete.
62 | 5. After typing the name, proceed by confirming the deletion. */}
63 |
64 | ## Conclusion
65 |
66 | With the introduction of Projects, Livepeer Studio provides you with a powerful
67 | way to manage your application's live and on-demand streams. By leveraging the
68 | ability to create separate projects for staging and production, you can
69 | streamline your workflows and ensure a clean separation of your streaming
70 | environments.
71 |
--------------------------------------------------------------------------------
/developers/guides/thumbnails-live.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Get a livestream thumbnail"
3 | description: "Learn how to retrieve thumbnails for a live stream"
4 | ---
5 |
6 | For live streams, we provide a single updating thumbnail URL - it will return
7 | the **first frame of the most recent segment of video**.
8 |
9 | ## Create a stream
10 |
11 | Follow our previous guide on
12 | [creating a stream](/developers/guides/create-livestream) to get a stream key. A
13 | creator can then start a stream.
14 |
15 | ## Fetch playback info
16 |
17 | After <1 minute of the stream ingest starting, a thumbnail URL should be
18 | returned from the [playback info API endpoint](/api-reference/playback/get).
19 |
20 | Example response:
21 |
22 | ```json
23 | {
24 | "type": "live",
25 | "meta": {
26 | "live": 0,
27 | "source": [
28 | {
29 | "hrn": "HLS (TS)",
30 | "type": "html5/application/vnd.apple.mpegurl",
31 | "url": "https://livepeercdn.studio/hls/{playbackId}/index.m3u8"
32 | },
33 | {
34 | "hrn": "WebRTC (H264)",
35 | "type": "html5/video/h264",
36 | "url": "https://livepeercdn.studio/webrtc/{playbackId}"
37 | },
38 | {
39 | "hrn": "Thumbnail (PNG)",
40 | "type": "image/png",
41 | "url": "https://recordings-cdn-s.lp-playback.studio/hls/{playbackId}/{ID}/source/latest.png"
42 | }
43 | ]
44 | }
45 | }
46 | ```
47 |
48 | You should see an entry in the `source` array with the type `image/png`. This
49 | URL will always return the **latest thumbnail of your stream**.
50 |
51 |
52 | The thumbnail will always have a `hrn` of `Thumbnail (PNG)` and `type` of
53 | `image/png`.
54 |
55 |
56 | This thumbnail is also used in the Player - it is automatically parsed in
57 | [`getSrc`](/sdks/react/player/get-src) and passed as the video `poster`
58 | attribute, and updated every few seconds.
59 |
60 | ## Examples
61 |
62 | A common use for this feature is to show a live preview URL which constantly
63 | updates with the latest frame from the stream.
64 |
65 | ```tsx
66 | function PreviewComponent() {
67 | // Update to your thumbnail URL
68 | const thumbnailUrl =
69 | "https://recordings-cdn-s.lp-playback.studio/hls/61482gtjzi49cyvb/6cf39a0f-8b68-4ff8-8c7b-b105d6a6a9ed/source/latest.png";
70 |
71 | const [randomValue, setRandomValue] = useState(Date.now());
72 |
73 | // We will append a random value to the URL to force the browser to fetch the latest image (at least every 5 seconds)
74 | useEffect(() => {
75 | const interval = setInterval(() => {
76 | setRandomValue(Date.now()); // Update the randomValue every 5 seconds
77 | }, 5000);
78 |
79 | return () => clearInterval(interval); // Clean up the interval on unmount
80 | }, []);
81 |
82 | return ;
83 | }
84 | ```
85 |
--------------------------------------------------------------------------------
/developers/introduction.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: Introduction
3 | description: "Explore APIs, guides, and examples"
4 | icon: "hand-wave"
5 | ---
6 |
7 |
8 | If you're looking for documentation on Livepeer Pipelines, please navigate [here](https://pipelines.livepeer.org/docs)
9 |
10 |
11 |
12 | Learn how to add live and on-demand video experience to your app using Livepeer
13 | Studio. Effortlessly manage livestreams, video uploads, API keys, network usage,
14 | billing, and more.
15 |
16 |
17 |
18 | Get started with Livepeer Studio in less than 5 minutes.
19 |
20 |
21 |
22 | Learn how to add live or on-demand video experiences to your app.
23 |
24 |
25 |
26 | Explore the Livepeer Studio API
27 |
28 |
29 |
30 | Get up and running with SDKs and pre-built UI components
31 |
32 |
33 |
34 | ## Explore the Livepeer Studio SDKs
35 |
36 | Explore developer SDKs, pre-built UI components, and tools for interacting with
37 | the Livepeer Studio API.
38 |
39 | ### Server-side SDKs
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 | ### React Components
50 |
51 |
52 |
53 | Fully customizable video player component for seamless playback
54 |
55 |
56 |
57 | Full-featured broadcast component with controls, settings, and device
58 | selection
59 |
60 |
61 |
62 | [View all developer tools](/sdks/introduction)
--------------------------------------------------------------------------------
/developers/livepeer-studio-cli.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: CLI
3 | description: "Generate a new Livepeer app."
4 | icon: "rectangle-terminal"
5 | ---
6 |
7 | The Livepeer Studio CLI is a command line tool that helps you generate a new
8 | Livepeer app in just a few seconds.
9 |
10 | ## Getting Started
11 |
12 | First, create a Livepeer API key
13 | [here](https://livepeer.studio/dashboard/developers/api-keys). Next, use the CLI
14 | to generate a new project.
15 |
16 | ```sh
17 | npx @livepeer/create
18 | ```
19 |
20 | When prompted, enter your Livepeer **API key** and **Stream ID**.
21 |
22 | Once the app has been created, `cd` into the new directory and run the start
23 | command:
24 |
25 | ```sh
26 | npm run dev
27 | ```
28 |
--------------------------------------------------------------------------------
/favicon.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/favicon.png
--------------------------------------------------------------------------------
/gateways/guides/fund-gateway.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: Fund The Livepeer Gateway
3 | icon: ethereum
4 | description:
5 | The following steps will walk you through adding funds to the newly created
6 | ETH account. This includes funding the ETH account on Ethereum Mainnet,
7 | bridging the funds to Arbritrum's L2 Network, and finally using the Livepeer
8 | CLI to allocate the proper deposit and reserve amounts.
9 | ---
10 |
11 | # Add Funds to Gateway Wallet
12 |
13 | In order to use the Gateway you need to send ETH to your Gateway address on
14 | Ethereum Mainnet and then bridged to Arbitrum's L2 Network.
15 |
16 |
17 | _If you have ETH on the Arbitrum L2 Network, you can simply transfer the funds
18 | to the newly created Gateway ETH Account._
19 |
20 |
21 | _Livepeer runs on the Arbitrium's L2 Network and requires the funds to be
22 | bridged._
23 |
24 |
25 | # Bridge Funds to Arbitrum
26 |
27 | If you need to bridge ETH you can use the official bridge
28 | https://bridge.arbitrum.io/ or use an exchange that supports L2 transfers. For
29 | additonal information on bridging view the
30 | [Livepeer bridging guide.](/delegators/guides/bridge-lpt-to-arbitrum)
31 |
32 | Once you have ETH on the Arbitrum network, transfer it to your newly created
33 | Gateway address.
34 |
35 | # Deposit Gateway Funds via Livepeer CLI
36 |
37 | We now need to divide the Gateway funds into a **Deposit** and **Reserve**
38 |
39 | In this guide we are using a total of 0.1 ETH. This is minimum recommended and
40 | best suited for testing.
41 |
42 | To calculate the price your Gateway will pay for transcoding, divide the
43 | _Reserve_ amount by 100. In our example each payment will be 0.0003 ETH (0.03
44 | / 100)
45 |
46 | As you pay for transcoding the amount paid is subtracted from your _Deposit_, so
47 | make sure to monitor your _Deposit_ balance and top it off to keep your Gateway
48 | transcoding.
49 |
50 | ## Open the Livepeer CLI
51 |
52 | Open the Livepeer CLI by following the instructions for your platform.
53 |
54 | Choose **Option 11. Invoke "deposit broadcasting funds" (ETH)**
55 |
56 | - Enter 0.065 for the **Deposit** and 0.03 for the **Reserve** amounts when
57 | prompted.
58 |
59 | Choose **Option 1. Get node status** and confirm that the correct amounts are
60 | visible in the **BROADCASTER STATS** section.
61 |
--------------------------------------------------------------------------------
/gateways/guides/gateway-overview.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: Overview
3 | description:
4 | This guide will walk you through the Livepeer Gateway installation and setup.
5 | Steps to install for Ubuntu Linux, Docker, and Windows are provided. Choose
6 | the environment you want, follow install instructions, then continue to the
7 | configuration section.
8 | icon: Rocket
9 | ---
10 |
11 |
12 | The Livepeer Gateway was previously called the Livepeer Broadcaster so you
13 | will see some commands and labels still use the Broadcaster name that haven't
14 | been updated in the code.
15 |
16 |
17 | # Quick Links
18 |
19 |
20 |
25 | Install & Configure Docker
26 |
27 |
32 | Install & Configure Linux Binary
33 |
34 |
39 | Install & Configure Windows Binary
40 |
41 |
46 | Specify the resolution and bitrate for your encoding ladder
47 |
48 |
53 | Add Funds to Gateway Wallet
54 |
55 |
60 | Publish and consume content to the Livepeer Gateway.
61 |
62 |
67 | Playback using VLC Media Player
68 |
69 |
70 |
71 | ## Prerequisites
72 |
73 | Working knowledge of system adminsitration tasks for your target platform are
74 | required. This guide provides directions for Linux, Windows, and Docker
75 | platforms. Familiarity with Livepeer protocol is beneficial. For more
76 | information view the go Livepeer
77 | [installation guide.](/orchestrators/guides/install-go-livepeer)
78 |
79 | This guide was developed using:
80 |
81 | - Ubuntu Linux 22.04
82 | - Docker 20.10.14
83 | - Windows
84 | - Livepeer 0.7.2
85 | - root user access (sudo is ok)
86 |
87 | Have access to an Arbitrum RPC URL. This is required to run Livepeer. Popular
88 | services include [Infura](https://www.infura.io/) and
89 | [Alchemy](https://www.alchemy.com/). Be aware that these services have their own
90 | pricing plans. That being said, the latest versions of livepeer should be able
91 | to stay within the request limit for these provider's free tier at least for a
92 | single node. As an alternative, you can self-host your own Arbitrum node, see
93 | the
94 | [instructions from Offchain Labs](https://docs.arbitrum.io/node-running/how-tos/running-a-full-node).
95 |
--------------------------------------------------------------------------------
/gateways/guides/linux-install.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: Linux Install
3 | icon: download
4 | ---
5 |
6 | # Download the Livepeer binary
7 |
8 | ```
9 | sudo wget https://github.com/livepeer/go-livepeer/releases/download//livepeer-linux-amd64.tar.gz
10 | ```
11 |
12 | Unpack and remove the compressed file
13 |
14 | ```
15 | sudo tar -zxvf livepeer-linux-amd64.tar.gz
16 | sudo rm livepeer-linux-amd64.tar.gz
17 | sudo mv livepeer-linux-amd64/* /usr/local/bin/
18 | ```
19 |
20 | # Generate a new keystore file
21 |
22 | ```
23 | /usr/local/bin/livepeer -network arbitrum-one-mainnet -ethUrl -gateway
24 | exit
25 | ```
26 |
27 |
28 | When generating a new keystore file, the program will prompt you for a
29 | password. This password is used to decrypt the keystore file and access the
30 | private key. Make sure to never share or lose access to either the password or
31 | the keystore file
32 |
33 |
34 | # Create a file containing your Gateway Ethereum password
35 |
36 | ```
37 | sudo mkdir /usr/local/bin/lptConfig
38 | sudo nano /usr/local/bin/lptConfig/node.txt
39 | ```
40 |
41 | Enter your password and save the file
42 |
43 | # Create a system service
44 |
45 | ```
46 | sudo nano /etc/systemd/system/livepeer.service
47 | ```
48 |
49 | Paste and update the following startup script with your personal info:
50 |
51 | ```
52 | [Unit]
53 | Description=Livepeer
54 |
55 | [Service]
56 | Type=simple
57 | User=root
58 | Restart=always
59 | RestartSec=4
60 | ExecStart=/usr/local/bin/livepeer -network arbitrum-one-mainnet \
61 | -ethUrl= \
62 | -cliAddr=127.0.0.1:5935 \
63 | -ethPassword=/usr/local/bin/lptConfig/node.txt \
64 | -maxPricePerUnit=300 \
65 | -broadcaster=true \
66 | -serviceAddr=:8935 \
67 | -transcodingOptions=/usr/local/bin/lptConfig/transcodingOptions.json \
68 | -rtmpAddr=0.0.0.0:1935 \
69 | -httpAddr=0.0.0.0:8935 \
70 | -monitor=true \
71 | -v 6
72 |
73 | [Install]
74 | WantedBy=default.target
75 | ```
76 |
77 | Start the system service
78 |
79 | ```
80 | sudo systemctl daemon-reload
81 | sudo systemctl enable --now livepeer
82 | ```
83 |
84 | Open the Livepeer CLI
85 |
86 | ```
87 | livepeer_cli -host 127.0.0.1 -http 5935
88 | ```
89 |
90 | Jump to [Configure Transcoding Options](/gateways/guides/transcoding-options) to
91 | finish configuring the Gateway
92 |
--------------------------------------------------------------------------------
/gateways/guides/playback-content.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: Playback Content
3 | icon: circle-play
4 | ---
5 |
6 | # Playback using VLC Media Player
7 |
8 | This section explains how to view content from the Livepeer Gateway. We will be
9 | using [VLC Media Player](https://videolan.org/).
10 |
11 | 1. Download and install
12 | [VLC Media Player](https://www.videolan.org/vlc/index.html).
13 | 2. Launch VLC Media Player.
14 | 3. Select **Media > Open Network Stream...** (Ctrl-N).
15 | 4. Enter `http://:8935/stream/.m3u8` as the network URL.
16 | 5. Click **Play** and view the content from the `obs-studio` stream.
17 |
--------------------------------------------------------------------------------
/gateways/guides/publish-content.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: Publish Content
3 | icon: upload
4 | ---
5 |
6 | This section explains how to publish and consume content to the Livepeer
7 | Gateway.
8 |
9 | This can be done via a command line interface using FFmpeg, or from a graphical
10 | user interface using OBS Studio and VLC Media Player.
11 |
12 | # Command Line Interface
13 |
14 | This section explains how to publish content to and from Livepeer Gateway using
15 | a command line interface (CLI).
16 |
17 | ## Install `FFmpeg`
18 |
19 | Install `FFmpeg` for your platform following the instructions on the
20 | [FFmpeg website](https://ffmpeg.org/download.html).
21 |
22 | ## Run the following command to send an RTMP test stream to the Gateway:
23 |
24 | ```
25 | ffmpeg -re -f lavfi -i \
26 | testsrc=size=1280x720:rate=30,format=yuv420p \
27 | -f lavfi -i sine -c:v libx264 -b:v 3000k \
28 | -x264-params keyint=60 -c:a aac -f flv \
29 | rtmp://:1935/test_source
30 | ```
31 |
32 | - `test_source` is the "stream key" for this publication.
33 | - `size=1280x720` defines the dimensions of the test video source in pixels
34 | - `rate=30` defines the frame rate of the test video in frames per second
35 | - `1000k` defines the bitrate for the stream
36 | - `keyint=60` defines the keyframe interval in frames
37 |
38 | ## Run the following command to send a recorded video file to the Gateway:
39 |
40 | ```
41 | ffmpeg \
42 | -re \
43 | -i video.mov \
44 | -codec copy \
45 | -f flv rtmp://:1935/video_file
46 | ```
47 |
48 | - `video_file` is the "stream key" for this stream.
49 |
50 | # Graphical User Interface
51 |
52 | This section explains how to publish media to the Livepeer Gateway using a
53 | graphical user interface (GUI).
54 |
55 | ## Publish content using OBS Studio
56 |
57 | OBS Studio can be used to publish streaming media to the Livepeer Gateway:
58 |
59 | 1. Install [OBS Studio](https://obsproject.com/).
60 | 2. Open OBS Studio and go to **File > Settings > Stream**.
61 | 3. Enter the following details:
62 |
63 | ```txt
64 | Service: Custom
65 | Server: rtmp://:1935
66 | Stream Key: stream-key
67 | ```
68 |
69 | 4. Go to the **Output** tab and set **Output Mode** to **Advanced**.
70 | 5. Set the **Keyframe Interval** to `1`.
71 | 6. Click **OK** and then **Start Streaming** in the main window.
72 |
--------------------------------------------------------------------------------
/gateways/guides/transcoding-options.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: Configure Transcoding Options
3 | icon: gear
4 | description:
5 | To better control your encoding profiles it is recommended to use a json file
6 | to specify the resolution and bitrate for your encoding ladder.
7 | ---
8 |
9 | # Create the JSON file
10 |
11 | Use the following as a template for your json file
12 |
13 | ```
14 | [
15 | {
16 | "name": "480p0",
17 | "fps": 0,
18 | "bitrate": 1600000,
19 | "width": 854,
20 | "height": 480,
21 | "profile": "h264constrainedhigh",
22 | "gop": "1"
23 | },
24 | {
25 | "name": "720p0",
26 | "fps": 0,
27 | "bitrate": 3000000,
28 | "width": 1280,
29 | "height": 720,
30 | "profile": "h264constrainedhigh",
31 | "gop": "1"
32 | },
33 | {
34 | "name": "1080p0",
35 | "fps": 0,
36 | "bitrate": 6500000,
37 | "width": 1920,
38 | "height": 1080,
39 | "profile": "h264constrainedhigh",
40 | "gop": "1"
41 | }
42 | ]
43 | ```
44 |
45 | ## Modify Docker Config
46 |
47 | Create the transcodingOptions.json file using the above template.
48 |
49 | ```
50 | nano -p /var/lib/docker/volumes/gateway-lpData/_data/transcodingOptions.json
51 | ```
52 |
53 | Modify the docker-compose.yml file from the root user's home directory _/root/_
54 | and add the following below `-pixelsPerUnit=1`
55 |
56 | ```
57 | -transcodingOptions=/root/.lpData/transcodingOptions.json
58 | ```
59 |
60 | ## Modify Linux Config
61 |
62 | Create the transcodingOptions.json file using the above template.
63 |
64 | ```
65 | sudo nano /usr/local/bin/lptConfig/transcodingOptions.json
66 | ```
67 |
68 | Modify the Linux Service file /etc/systemd/system/livepeer.service and add the
69 | following below `-pixelsPerUnit=1`
70 |
71 | ```
72 | -transcodingOptions=/usr/local/bin/lptConfig/transcodingOptions.json \
73 | ```
74 |
75 | ## Modify Windows Config
76 |
77 | Create the transcodingOptions.json file using the above template.
78 |
79 | Open notepad (or your text editor of choice) paste the template above and save
80 | the transcodingOptions.json file in the following location.
81 |
82 | **Note:** Replace **YOUR_USER_NAME** with your actual user name
83 |
84 | ```
85 | C:\Users\YOUR_USER_NAME\.lpData\transcodingOptions.json
86 | ```
87 |
88 | Modify Windows bat file to include the following command after
89 | `-pixelsPerUnit=1`
90 |
91 | ```
92 | -transcodingOptions=C:\Users\YOUR_USER_NAME\.lpData\transcodingOptions.json
93 | ```
94 |
--------------------------------------------------------------------------------
/gateways/guides/windows-install.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: Windows Install
3 | icon: download
4 | ---
5 |
6 | ## Download and unzip the Livepeer binary
7 |
8 | ```
9 | https://github.com/livepeer/go-livepeer/releases/download//livepeer-windows-amd64.zip
10 |
11 | ```
12 |
13 | ## Create a bat file to launch Livepeer.
14 |
15 | Use the following as a template, adding your personal info and save a .bat file
16 | in the same directory as the Livepeer executable.
17 |
18 | ```
19 | livepeer.exe -network=arbitrum-one-mainnet -ethUrl= -cliAddr=127.0.0.1:5935 -serviceAddr=:8935 -broadcaster -maxPricePerUnit=300 -pricePerUnit=1 -monitor=true -v=6 -rtmpAddr=0.0.0.0:1935 -httpAddr=0.0.0.0:8935 -blockPollingInterval=20
20 |
21 | PAUSE
22 | ```
23 |
24 | ## Start the Livepeer Gateway
25 |
26 | Start the Livepeer Gateway using the .bat file.
27 |
28 | When prompted enter and confirm a password.
29 |
30 |
31 | This password is used to decrypt the keystore file and access the private key.
32 | Make sure to never share or lose access to either the password or the keystore
33 | file
34 |
35 |
36 | After confirming your password close the terminal.
37 |
38 | ## Create a file containing your Gateway Ethereum password
39 |
40 | In `C:\Users\YOUR_USER_NAME\.lpData` create a txt file named `ethsecret.txt`
41 | with the password you created in the previous step.
42 |
43 | ## Add the `-ethPassword` flag to your .bat file
44 |
45 | Add `-ethPassword=C:\Users\YOUR_USER_NAME\.lpData\ethsecret.txt` to the
46 | previously created .bat file
47 |
48 | _If you'd like the Gateway to start with Windows you can create a System service
49 | using [NSSM](https://nssm.cc/) or the Windows Task Scheduler._
50 |
51 | Open the Livepeer CLI, then Jump to
52 | [Configure Transcoding Options](/gateways/guides/transcoding-options) to finish
53 | configuring the Gateway
54 |
55 | ```
56 | livepeer_cli.exe -host 127.0.0.1 -http 5935
57 | ```
58 |
--------------------------------------------------------------------------------
/images/ai/ai-serviceregistry-explorer-page.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/ai/ai-serviceregistry-explorer-page.png
--------------------------------------------------------------------------------
/images/ai/ai-serviceregistry-setserviceuri.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/ai/ai-serviceregistry-setserviceuri.png
--------------------------------------------------------------------------------
/images/ai/cool-cat-hat-moving.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/ai/cool-cat-hat-moving.gif
--------------------------------------------------------------------------------
/images/ai/cool-cat-hat.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/ai/cool-cat-hat.png
--------------------------------------------------------------------------------
/images/ai/cool-cat-low-res.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/ai/cool-cat-low-res.png
--------------------------------------------------------------------------------
/images/ai/cool-cat.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/ai/cool-cat.png
--------------------------------------------------------------------------------
/images/ai/showcase/dream.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/ai/showcase/dream.png
--------------------------------------------------------------------------------
/images/ai/showcase/inference_stronk_rocks.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/ai/showcase/inference_stronk_rocks.png
--------------------------------------------------------------------------------
/images/ai/showcase/letsgenerate_ai.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/ai/showcase/letsgenerate_ai.png
--------------------------------------------------------------------------------
/images/ai/showcase/tsunameme_ai.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/ai/showcase/tsunameme_ai.png
--------------------------------------------------------------------------------
/images/ai/swagger_ui.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/ai/swagger_ui.png
--------------------------------------------------------------------------------
/images/asset-page.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/asset-page.png
--------------------------------------------------------------------------------
/images/background.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/background.png
--------------------------------------------------------------------------------
/images/blender-poster-2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/blender-poster-2.png
--------------------------------------------------------------------------------
/images/blender-poster.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/blender-poster.png
--------------------------------------------------------------------------------
/images/codepen-player.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/codepen-player.png
--------------------------------------------------------------------------------
/images/create-api-key.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/create-api-key.png
--------------------------------------------------------------------------------
/images/delegating-guides/arbitrum-oog.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/delegating-guides/arbitrum-oog.png
--------------------------------------------------------------------------------
/images/delegating-guides/arbitrum-retry-ui.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/delegating-guides/arbitrum-retry-ui.png
--------------------------------------------------------------------------------
/images/delegating-guides/claim-d.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/delegating-guides/claim-d.png
--------------------------------------------------------------------------------
/images/delegating-guides/confirm-d.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/delegating-guides/confirm-d.png
--------------------------------------------------------------------------------
/images/delegating-guides/connect-wallet-d.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/delegating-guides/connect-wallet-d.png
--------------------------------------------------------------------------------
/images/delegating-guides/connect-wallet-d2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/delegating-guides/connect-wallet-d2.png
--------------------------------------------------------------------------------
/images/delegating-guides/migrate-d1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/delegating-guides/migrate-d1.png
--------------------------------------------------------------------------------
/images/engagement/Dashboard-1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/engagement/Dashboard-1.png
--------------------------------------------------------------------------------
/images/engagement/Dashboard-10.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/engagement/Dashboard-10.png
--------------------------------------------------------------------------------
/images/engagement/Dashboard-4.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/engagement/Dashboard-4.png
--------------------------------------------------------------------------------
/images/obs/LVS4.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/obs/LVS4.png
--------------------------------------------------------------------------------
/images/obs/LVS5.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/obs/LVS5.png
--------------------------------------------------------------------------------
/images/obs/OBS1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/obs/OBS1.png
--------------------------------------------------------------------------------
/images/obs/OBS2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/obs/OBS2.png
--------------------------------------------------------------------------------
/images/obs/OBS3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/obs/OBS3.png
--------------------------------------------------------------------------------
/images/obs/OBS4.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/obs/OBS4.png
--------------------------------------------------------------------------------
/images/orchestrating-guides/begin-migration.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/orchestrating-guides/begin-migration.png
--------------------------------------------------------------------------------
/images/orchestrating-guides/connect-wallet.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/orchestrating-guides/connect-wallet.png
--------------------------------------------------------------------------------
/images/orchestrating-guides/connect-wallet2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/orchestrating-guides/connect-wallet2.png
--------------------------------------------------------------------------------
/images/orchestrating-guides/sign-cli.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/orchestrating-guides/sign-cli.png
--------------------------------------------------------------------------------
/images/orchestrating-guides/sign-cli2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/orchestrating-guides/sign-cli2.png
--------------------------------------------------------------------------------
/images/orchestrating-guides/sign-cli3.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/orchestrating-guides/sign-cli3.png
--------------------------------------------------------------------------------
/images/orchestrating-guides/sign-cli4.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/orchestrating-guides/sign-cli4.png
--------------------------------------------------------------------------------
/images/orchestrating-guides/sign-web.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/orchestrating-guides/sign-web.png
--------------------------------------------------------------------------------
/images/orchestrating-guides/stake-info.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/orchestrating-guides/stake-info.png
--------------------------------------------------------------------------------
/images/poll.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/poll.png
--------------------------------------------------------------------------------
/images/project-creation.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/project-creation.png
--------------------------------------------------------------------------------
/images/quickstart/api-keys.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/quickstart/api-keys.png
--------------------------------------------------------------------------------
/images/quickstart/cors.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/quickstart/cors.png
--------------------------------------------------------------------------------
/images/quickstart/create-an-account.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/quickstart/create-an-account.png
--------------------------------------------------------------------------------
/images/quickstart/create-an-api-key.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/quickstart/create-an-api-key.png
--------------------------------------------------------------------------------
/images/quickstart/create-key.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/quickstart/create-key.png
--------------------------------------------------------------------------------
/images/stream-health.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/stream-health.png
--------------------------------------------------------------------------------
/images/stream-page.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/stream-page.png
--------------------------------------------------------------------------------
/images/studio-in-browser-stream.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/studio-in-browser-stream.png
--------------------------------------------------------------------------------
/images/titan-node.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/titan-node.png
--------------------------------------------------------------------------------
/images/tutorials/authors/evan.jpeg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/tutorials/authors/evan.jpeg
--------------------------------------------------------------------------------
/images/tutorials/authors/suhail.jpeg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/tutorials/authors/suhail.jpeg
--------------------------------------------------------------------------------
/images/tutorials/grafana-connections.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/tutorials/grafana-connections.jpg
--------------------------------------------------------------------------------
/images/tutorials/grafana-import-dashboard.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/tutorials/grafana-import-dashboard.png
--------------------------------------------------------------------------------
/images/tutorials/guildxyz-app-homepage.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/tutorials/guildxyz-app-homepage.png
--------------------------------------------------------------------------------
/images/tutorials/lit-app-homepage.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/tutorials/lit-app-homepage.png
--------------------------------------------------------------------------------
/images/tutorials/livepeer_dashboard.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/tutorials/livepeer_dashboard.png
--------------------------------------------------------------------------------
/images/tutorials/studio-add-webhook.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/tutorials/studio-add-webhook.png
--------------------------------------------------------------------------------
/images/tutorials/studio-create-api.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/tutorials/studio-create-api.png
--------------------------------------------------------------------------------
/images/tutorials/timeplus.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/tutorials/timeplus.png
--------------------------------------------------------------------------------
/images/tutorials/vod-diagram.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/tutorials/vod-diagram.png
--------------------------------------------------------------------------------
/images/vote-livepeer-cli-instructions.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/vote-livepeer-cli-instructions.png
--------------------------------------------------------------------------------
/images/vote-livepeer-cli.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/vote-livepeer-cli.png
--------------------------------------------------------------------------------
/images/waterfalls-poster.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/waterfalls-poster.png
--------------------------------------------------------------------------------
/images/webhooks.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/webhooks.png
--------------------------------------------------------------------------------
/images/webrtmp.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/livepeer/docs/1759e00c522edb29b6e93582563d0e93959e6b3f/images/webrtmp.png
--------------------------------------------------------------------------------
/orchestrators/guides/assess-capabilities.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: Assess concurrent stream
3 | icon: signal-stream
4 | ---
5 |
6 | Once you have confirmed that your
7 | [hardware](/references/go-livepeer/gpu-support) is supported by `Go Livepeer`,
8 | you should assess how many concurrent streams it can support.
9 |
10 | ## Hardware functionality and constraints
11 |
12 | The Livepeer protocol enables those with the excess hardware and bandwidth
13 | available to earn additional revenue by advertising video encoding services on
14 | an open marketplace, and using their idle hardware to perform the work. There
15 | are a number of different types of hardware capable of encoding video in a
16 | performant and cost-effective manner, each with its own unique capabilities and
17 | terms of use.
18 |
19 | Some of these terms restrict users from utilizing their own hardware to its full
20 | capacity through artificial restrictions. While googling for open-source patches
21 | reveals workarounds to these limitations, Livepeer encourages operators on the
22 | network to read and comply with the terms of service and usage policies of the
23 | hardware that they are using.
24 |
25 | ### NVIDIA
26 |
27 | If you are using an NVIDIA card, check you are running the latest driver version
28 | on your NVIDIA GPU, or update your driver before proceeding to benchmarking. You
29 | can access the GPU configuration, Display Adapters, and drivers for your
30 | operating system either directly or through your NVIDIA Control Panel.
31 |
32 | Concurrent session caps for NVIDIA hardware can be found
33 | [here](https://developer.nvidia.com/video-encode-decode-gpu-support-matrix).
34 |
35 | ## Testing
36 |
37 | You can test the performance of your card using the `livepeer_bench`
38 | [benchmarking tool](/orchestrators/guides/benchmark-transcoding).
39 |
--------------------------------------------------------------------------------
/orchestrators/guides/configure-reward-calling.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: Reward Calls
3 | icon: coins
4 | ---
5 |
6 | This guide provides instructions and recommendations on the ways to call reward
7 | once you've activated your orchestrator on the Livepeer network.
8 |
9 | # About Calling Reward
10 |
11 | By default, an active orchestrator will automatically call reward in each round,
12 | submitting an Arbitrum transaction that distributes newly minted LPT rewards to
13 | itself and its delegators.
14 |
15 | The amount of LPT rewards distributed by the reward call depends on the
16 | orchestrator's stake, i.e. its own stake and that of its delegators. It is
17 | important to note that for orchestrators with very low stake, the ETH
18 | transaction cost of calling reward may exceed the amount of LPT rewards
19 | distributed. The threshold to profitably call reward depends on several factors,
20 | including but not limited to the market price of LPT and the current inflation
21 | rate.
22 |
23 | ## Getting Started with Reward Calls
24 |
25 | When you first initiate reward calls, it may make economic sense for you to
26 | [disable automatic reward calls](/orchestrators/guides/configure-reward-calling#disable-automatic-reward-calls)
27 | and then
28 | [manually call reward](/orchestrators/guides/configure-reward-calling#manually-call-reward)
29 | in each round instead.
30 |
31 | You then can
32 | [enable automatic reward calls](/orchestrators/guides/configure-reward-calling#enable-automatic-reward-calls)
33 | when you are confident that the distribution of LPT relative to the ETH
34 | transaction cost makes economic sense.
35 |
36 | ## Disable automatic reward calls
37 |
38 | Disable automatic reward calls with the `-reward=false` flag:
39 |
40 | **For example:**
41 |
42 | ```bash
43 | livepeer \
44 | -network arbitrum-one-mainnet \
45 | -reward=false
46 | ```
47 |
48 | > **Note:** for the purposes of this example above, all other flags are omitted.
49 |
50 | ## Manually call reward
51 |
52 | Use `livepeer_cli` to manually call reward:
53 |
54 | 1. Estimate the current ETH transaction cost for calling reward and ensure you
55 | have enough ETH in your wallet to execute the transaction.
56 |
57 | - The gas cost for a reward call is typically 350k-450k.
58 | - Get the required gas price from [ethgasstation](https://ethgasstation.info/)
59 | or [gasnow](https://www.gasnow.org/).
60 |
61 | - The ETH transaction cost will be the gas cost multiplied by the gas price.
62 |
63 | 2. Make sure `livepeer` is running.
64 |
65 | 3. Run `livepeer_cli`
66 |
67 | 4. Enter the number corresponding to the `Invoke "reward"` option
68 |
69 | 5. Wait for the transaction to confirm.
70 |
71 | You can view this in the logs of your orchestrator, which will indicate a
72 | transaction has been submitted and confirmed on-chain.
73 |
74 | ## Enable automatic reward calls
75 |
76 | - To enable automatic reward calls omit the `-reward=false` flag (enabled by
77 | default).
78 |
--------------------------------------------------------------------------------
/orchestrators/guides/connect-to-arbitrum.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: Connect to Arbitrum
3 | icon: plug
4 | ---
5 |
6 | `go-livepeer` requires access to the [Arbitrum](https://arbitrum.io/) network
7 | for typical usage.
8 |
9 | There are two ways you can connect to the Arbitrum network: via a hosted API
10 | service or via your own self-hosted node. Connecting to a hosted API service is
11 | recommended for users that are getting started and you always have the option to
12 | switch to your own node later on.
13 |
14 | ### Hosted API services
15 |
16 | Hosted API services run Arbitrum nodes on behalf of their users. Popular
17 | services include [Infura](https://infura.io/) and
18 | [Alchemy](https://alchemyapi.io/). Be aware that these services have their own
19 | pricing plans. That being said, the latest versions of `livepeer` should be able
20 | to stay within the request limit for Infura's free tier at least for a single
21 | node.
22 |
23 | The following examples describe the required flags to connect to an
24 | EVM-compatible network via Infura (all other flags omitted):
25 |
26 | To connect to Arbitrum mainnet:
27 |
28 | ```bash
29 | livepeer \
30 | -network arbitrum-one-mainnet
31 | -ethUrl https://arbitrum-mainnet.infura.io/v3/ # Visit https://infura.io to obtain a PROJECT_ID
32 | ```
33 |
34 | ### Self-hosted Arbitrum nodes
35 |
36 | If you want to run your own Arbitrum node, set one up using the
37 | [instructions from Offchain Labs](https://developer.offchainlabs.com/docs/running_node).
38 |
39 | Once your node is synced, connect `livepeer` to the node with the following
40 | flags (all other flags omitted):
41 |
42 | ```bash
43 | livepeer \
44 | -network mainnet \
45 | -ethUrl http://localhost:8545 # Assumes that your node is running on the same machine as livepeer
46 | ```
47 |
48 | ## Supported networks
49 |
50 | `livepeer` supports the networks listed below. The required flags for connecting
51 | to a network are described (all other flags are omitted).
52 |
53 | ### Arbitrum One
54 |
55 | Arbitrum One is the production public network.
56 |
57 | ```bash
58 | livepeer \
59 | -network arbitrum-one-mainnet
60 | -ethUrl # URL for Arbitrum mainnet provider
61 | ```
62 |
63 | ### Private Network
64 |
65 | Custom private networks where the Livepeer smart contracts are deployed can be
66 | used for development purposes.
67 |
68 | ```bash
69 | livepeer \
70 | -network # Name of network
71 | -ethUrl # URL for private network provider
72 | -ethController # Address of the Controller smart contract deployed on the private network
73 | ```
74 |
75 | ### Offchain
76 |
77 | Offchain networks that do not require interaction with the Livepeer smart
78 | contracts can be used for development purposes.
79 |
80 | ```bash
81 | livepeer \
82 | -network offchain
83 | ```
84 |
--------------------------------------------------------------------------------
/orchestrators/guides/gateway-introspection.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: Gateway Introspection
3 | icon: tower-broadcast
4 | ---
5 |
6 | We launched a public API to enable Gateway introspection. Users of the API
7 | will be able to review the activity inside the Livepeer Gateway nodes and
8 | understand the selection algorithms used to assign work to Orchestrator nodes.
9 |
10 | This is the initial release of the API, and only a few log lines have been
11 | enabled in the public logs. Orchestrators are encouraged to open pull requests
12 | to enable additional logs, which will be closely reviewed by the Livepeer team.
13 | Additionally, the core Livepeer team will publish more logs from Livepeer
14 | Gateways that may aid in understanding the selection algorithms.
15 |
16 | This API uses Grafana's Loki for log aggregation. Examples of API usage are
17 | provided below. For more guidance, refer to
18 | [this page](https://grafana.com/docs/loki/v2.4.x/api/#get-lokiapiv1query).
19 |
20 | ### API usage
21 |
22 | - Public logs from Livepeer Gateways are available through the public Loki
23 | instance.
24 | - Example queries:
25 |
26 | ```bash
27 | # logs from all regions:
28 | curl -G -s https://loki.livepeer.report/loki/api/v1/query \
29 | --data-urlencode "query={region=~\".+\"}" | jq
30 |
31 | # logs from all regions between two timestamps (UNIX epoch nanoseconds)
32 | curl -G -s https://loki.livepeer.report/loki/api/v1/query_range \
33 | --data-urlencode "query={region=~\".+\"}" --data 'start=1727335380000000000' --data 'end=1727635380000000000' | jq
34 |
35 | # logs from a specific region (e.g. NYC):
36 | curl -G -s https://loki.livepeer.report/loki/api/v1/query \
37 | --data-urlencode "query={region=~\"nyc\"}" | jq
38 |
39 | # logs related to a specific orchestrator IP-address:
40 | curl -G -s https://loki.livepeer.report/loki/api/v1/query \
41 | --data-urlencode "query={region=~\".+\"} |= \"clientIP=121.127.46.156\"" | jq
42 |
43 | # list of all possible regions:
44 | curl -G -s https://loki.livepeer.report/loki/api/v1/label/region/values | jq '.data'
45 | ```
46 |
--------------------------------------------------------------------------------
/orchestrators/guides/monitor-metrics.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: Monitor Metrics
3 | icon: monitor-waveform
4 | ---
5 |
6 | This guide provides instructions on configuring metrics monitoring for
7 | orchestrators that have been
8 | [activated](/orchestrators/guides/get-started#activate) on the Livepeer network.
9 |
10 | - Enable Metrics Monitoring
11 | - Monitor with visualizations
12 | - Prometheus
13 | - Grafana
14 | - Monitor with Docker
15 |
16 |
17 | You can refer to [Prometheus
18 | Metrics](/references/go-livepeer/prometheus-metrics) to check what metrics are
19 | exposed.
20 |
21 |
22 | ## Enabling Metrics Monitoring
23 |
24 | You can enable metrics monitoring with `livepeer.exe` adding the `-monitor` flag
25 | and additional parameters:
26 |
27 | - `-monitor`: enables metric monitoring
28 | - `-metricsPerStream`: groups performance metrics per stream
29 | - `-metricsClientIP`: exposes client's IP in metrics
30 |
31 | **For Example:**
32 |
33 | Enable metrics monitoring with a combined orchestrator and transcoder:
34 |
35 | ```bash
36 | livepeer \
37 | -orchestrator \
38 | -transcoder \
39 | -monitor
40 | ```
41 |
42 | For the purpose of this example, other flags have been omitted.
43 |
44 | ## Monitoring With Prometheus and Grafana
45 |
46 | Follow the instructions in this
47 | [monitoring guide](https://forum.livepeer.org/t/guide-transcoder-monitoring-with-prometheus-grafana/1225)
48 | to learn how metrics recorded by `livepeer` can be:
49 |
50 | - Exported to [Prometheus](https://prometheus.io/), and
51 | - Visualized in [Grafana](https://grafana.com/)
52 |
53 | ## Monitoring with Docker
54 |
55 | You can use this
56 | [Docker container](https://github.com/livepeer/livepeer-monitoring) to easily
57 | start monitoring your orchestrator or transcoder. It bundles Prometheus,
58 | Grafana, and a few starter Grafana dashboard templates.
59 |
--------------------------------------------------------------------------------
/orchestrators/guides/o-t-split.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: Connect to Transcoders
3 | icon: plug
4 | ---
5 |
6 | In this guide we'll go over how to connect multiple transcoding processes to a
7 | single orchestrator.
8 |
9 | ## Run a standalone orchestrator
10 |
11 | ```bash
12 | livepeer \
13 | -network arbitrum-one-mainnet \
14 | -ethURL \
15 | -orchestrator \
16 | -orchSecret \
17 | -pricePerUnit \
18 | -serviceAddr
19 | ```
20 |
21 | - `-orchSecret` is used to specify a secret that transcoders can use to connect
22 | with the orchestrator. The secret can be provided in plaintext or via a file
23 | (recommended) i.e. `-orchSecret secret.txt`
24 |
25 | ## Run a standalone transcoder
26 |
27 | The following instructions assume that the transcoder is run on a separate
28 | machine from the orchestrator. These instructions can be used to connect as many
29 | transcoders as you want to the orchestrator.
30 |
31 | ```bash
32 | livepeer -transcoder \
33 | -nvidia \ # Only required for transcoding with Nvidia GPUs
34 | -orchSecret \
35 | -orchAddr
36 | ```
37 |
38 | - The value for `-orchSecret` should be the same as the value used for your
39 | orchestrator
40 | - `-orchAddr` is used to specify the publicly accessible address that the
41 | orchestrator is receiving transcoder registration requests at
42 |
43 | On startup, the transcoder will automatically run a test to confirm that it is
44 | able to transcode using the specified GPUs. The transcoder will exit if this
45 | test fails. If the test passes, you should see the following message in the log
46 | output without any additional error messages following it indicating that your
47 | transcoder successfully connected with the orchestrator:
48 |
49 | ```bash
50 | Registering transcoder to my-orchestrator.com:443
51 | ```
52 |
53 | When the orchestrator receives a connection from a transcoder, you will see a
54 | message in the orchestrator logs that looks like:
55 |
56 | ```bash
57 | Got a RegisterTranscoder request from transcoder=10.3.27.1 capacity=10
58 | ```
59 |
60 | The `transcoder` field indicates the IP of the connecting transcoder and the
61 | `capacity` field indicates the number of simultaneous transcoding jobs that the
62 | transcoder can handle. Once the orchestrator has at least one transcoder
63 | connected, it will be able to send transcoding jobs to the transcoder when it
64 | receives a stream from a gateway.
65 |
--------------------------------------------------------------------------------
/orchestrators/guides/vote.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: Vote on proposals
3 | icon: check-to-slot
4 | ---
5 |
6 | Follow the steps below to set and configure voting in governance polls with
7 | `livepeer_cli`. You can do this without exporting keys from the machine on which
8 | the orchestrator node is running.
9 |
10 | ## Voting With the `livepeer_cli`
11 |
12 | Follow the steps herein to access and vote with the `livepeer_cli`:
13 |
14 | 1. Find the contract address for the poll in the
15 | [Livepeer Explorer](https://explorer.livepeer.org/voting) page for the poll.
16 |
17 |
18 |
19 | 
20 |
21 |
22 |
23 | The poll page displays the following message:
24 |
25 |
26 |
27 | 
28 |
29 |
30 |
31 | Click the link, "Follow these instructions", to display the instructions for
32 | voting with `livepeer_cli`:
33 |
34 |
35 |
36 | 
37 |
38 |
39 |
40 |
41 |
42 | It is important to note the poll contract address for upcoming steps.
43 |
44 |
45 | 2. Run `livepeer_cli`
46 |
47 | 3. Enter the number corresponding to the option to `Vote on a poll`
48 |
49 | 4. Enter the contract address saved in step 1.:
50 |
51 | ```bash
52 | Enter the contract address for the poll you want to vote on - >
53 | ```
54 |
55 | You will be prompted with the following voting options:
56 |
57 | ```bash
58 | Identifier Voting Options
59 | 0 Yes
60 | 1 No
61 | ```
62 |
63 | 5. Choose and confirm your vote
64 |
65 | **For example:**
66 |
67 | ```bash
68 | Enter the ID of the option you want to vote for - > 0
69 | Are you sure you want to vote "Yes" ? (y/n) - > y
70 |
71 | success
72 | ```
73 |
74 | 6. Wait for the transaction to be confirmed. You should be able to view your
75 | node submitting the vote transaction.
76 |
77 | **For example:**
78 |
79 | ```bash
80 | I0422 03:30:44.191809 43457 backend.go:96]
81 | ******************************Eth Transaction******************************
82 |
83 | Invoking transaction: "vote". Inputs: "_choiceID: 0" Hash: "0xf6957c190f1f16fc2ca4a93846903eb435c5e08fa7f6f40b6e159aab6d74905f".
84 |
85 | **************************************************************************
86 | ```
87 |
88 | 7. Once the vote transaction is confirmed, you will be able to see your vote
89 | reflected in the explorer poll page of the UI.
90 |
--------------------------------------------------------------------------------
/references/awesome-livepeer.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Awesome Livepeer"
3 | url: "https://github.com/livepeer/awesome-livepeer"
4 | icon: "stars"
5 | ---
6 |
--------------------------------------------------------------------------------
/references/example-applications.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Example Applications"
3 | description:
4 | "Curated collection of Livepeer example applications and integrations"
5 | icon: "grid-round-2-plus"
6 | ---
7 |
8 | Enjoy a curated collection of Livepeer example applications and integrations.
9 | Use these starter projects to build your own video applications.
10 |
11 | | Example | Description |
12 | | -------------------------------------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------- |
13 | | [React Native Example App](https://github.com/suhailkakar/Livepeer-YouTube) | Example application which demonstrates how to use Livepeer React to create a React Native YouTube clone. |
14 | | [Justcast.it](https://github.com/victorges/justcast.it) | Web app that provides code for adding "1-click starting a livestream" so that you can create a stream using the Livepeer API. |
15 | | [EVM Token-Gated Livestream](https://github.com/suhailkakar/Livepeer-EVM-Tokengating) | Example application which demonstrates how to create and tokengate a livestream using Livepeer on EVM compatible chains. |
16 | | [Decentralized storage playback](https://github.com/suhailkakar/livepeer-dStorage-playback/) | Example application which demonstrates how to play back a video from decentralized storage such as IPFS and Arweave using Livepeer. |
17 |
--------------------------------------------------------------------------------
/references/go-livepeer/bandwidth-requirements.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: Bandwidth Requirements
3 | ---
4 |
5 | The bandwidth requirements for video miners are based on the amount of video
6 | data that needs to be uploaded and downloaded during transcoding.
7 |
8 | The amount of available bandwidth will not only impact the number of streams
9 | that can be transcoded on a machine, but also the speed of data upload/download
10 | which needs to be fast so that video streaming applications can receive
11 | transcoded results as soon as possible.
12 |
13 | The amount of bandwidth required for a stream will depend on the bitrate of the
14 | source stream and the bitrate of the output renditions. The download bandwidth
15 | required for a single stream can be roughly estimated as the bitrate of the
16 | source stream. The upload bandwidth required for a single stream can be roughly
17 | estimated as the sum of the bitrates of each of the output renditions for the
18 | stream. As a result, the total number of streams that can be transcoded given a
19 | certain amount of available of bandwidth will vary.
20 |
21 | While there is not a strict bandwidth requirement for video miners, past testing
22 | has demonstrated that 1G upload/download bandwidth is a good starting point if
23 | possible. If you do not have access to this amount of bandwidth you will still
24 | be able to transcode on the network, but you will have a lower ceiling on the
25 | number of streams you will be able to handle.
26 |
27 | Upload/download bandwidth available can be tested with tools such as:
28 |
29 | - [speedtest](https://www.speedtest.net/apps/cli)
30 | - By default the tool will run a bandwidth test against the closest public
31 | server, but there is also an option to run the test against a specified
32 | public server
33 | - Note: The results of this test also depend on the available bandwidth on the
34 | server used
35 | - [iperf3](https://iperf.fr/)
36 | - This tool can be run on client and server machines that you have access to
37 | - If you have access to a machine with good bandwidth availability in a region
38 | that you expect/want to receive streams from, then this tool will likely be
39 | more useful than speedtest
40 |
--------------------------------------------------------------------------------
/references/go-livepeer/hardware-requirements.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: Hardware Requirements
3 | ---
4 |
5 | The hardware requirements for video miners are based on the computational
6 | resources required for transcoding.
7 |
8 | ## GPU
9 |
10 | A GPU with a built-in hardware video encoder/decoder is **strongly** recommended
11 | because it will significantly speed up transcoding and it will be difficult to
12 | compete for work in the marketplace without one. Transcoding capacity will scale
13 | with the number of GPUs available.
14 |
15 | Currently, `livepeer` only supports Nvidia GPUs with
16 | [NVENC (hardware video encoder) and NVDEC (hardware video decoder) support](https://developer.nvidia.com/video-encode-and-decode-gpu-support-matrix-new).
17 | For a list of of GPUs that have been tested and that are known to be supported
18 | by `livepeer`, see [this page](/references/go-livepeer/gpu-support).
19 |
20 | ## CPU
21 |
22 | CPU transcoding using a software video encoder/decoder is possible, but not
23 | recommended due to its significant speed disadvantage relative to GPU
24 | transcoding. If you choose to do CPU transcoding (perhaps to try things out or
25 | as a temporary measure until a GPU is available), generally a CPU with more
26 | cores will improve transcoding speed.
27 |
28 | ## RAM
29 |
30 | TBD.
31 |
32 | ## Disk
33 |
34 | TBD.
35 |
--------------------------------------------------------------------------------
/references/subgraph.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Awesome Livepeer"
3 | url: "github.com/livepeer/awesome-livepeer"
4 | ---
5 |
--------------------------------------------------------------------------------
/sdks/go.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: Go
3 | description: "Learn how to create your first stream using the Livepeer Go SDK."
4 | icon: golang
5 | ---
6 |
7 |
8 |
9 | To get the most out of this guide, you’ll need to:
10 |
11 | - [Create an API key](https://livepeer.studio/dashboard/developers/api-keys)
12 |
13 |
14 |
15 |
16 |
17 | Get the Livepeer Go SDK.
18 |
19 | ```bash
20 | go get github.com/livepeer/livepeer-go
21 | ```
22 |
23 |
24 |
25 |
26 | The first step is to initialize the SDK with your Livepeer Studio API key.
27 |
28 | ```go
29 | package main
30 |
31 | import (
32 | "context"
33 | livepeer "github.com/livepeer/livepeer-go"
34 | "github.com/livepeer/livepeer-go/models/components"
35 | "log"
36 | )
37 |
38 | func main() {
39 | lpClient := livepeer.New(
40 | livepeer.WithSecurity(""),
41 | )
42 | }
43 | ```
44 |
45 |
46 |
47 | Now that you have the SDK installed and initialized, you can use it in your app.
48 | Let's create a stream.
49 |
50 | ```go
51 | package main
52 |
53 | import (
54 | "context"
55 | livepeer "github.com/livepeer/livepeer-go"
56 | "github.com/livepeer/livepeer-go/models/components"
57 | "log"
58 | )
59 |
60 | func main() {
61 | lpClient := livepeer.New(
62 | livepeer.WithSecurity(""),
63 | )
64 |
65 | ctx := context.Background()
66 | res, err := lpClient.Stream.Create(ctx, components.NewStreamPayload{
67 | Name: "test_stream",
68 | })
69 | if err != nil {
70 | log.Fatal(err)
71 | }
72 | if res.Stream != nil {
73 | log.Printf("Stream created successfully")
74 | }
75 | }
76 |
77 | ```
78 |
79 |
80 |
81 |
86 | See an example on GitHub.
87 |
88 |
89 |
90 |
91 |
92 | ## Next steps
93 |
94 | Checkout Livepeer [API Reference](/api-reference) to learn more about the
95 | Livepeer API and the Go SDK.
96 |
--------------------------------------------------------------------------------
/sdks/introduction.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "SDKs"
3 | description:
4 | "Explore developer SDKs, pre-built UI components, and tools for interacting with
5 | the Livepeer Studio API."
6 | sidebarTitle: "Introduction"
7 | ---
8 |
9 | ## Server-side SDKs
10 |
11 | Server-side SDKs reduce the amount of work required to use the Livepeer Studio
12 | API. Install a language-specific SDK into your application, initialize with your
13 | API key and get started with just 2 lines of code.
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 | ## React Components
22 |
23 | Build with composable React primitives for both broadcasting and playback.
24 |
25 |
26 |
32 | Fully customizable video player component for seamless playback
33 |
34 |
40 | Full-featured broadcast component with controls, settings, and device
41 | selection
42 |
43 |
44 |
--------------------------------------------------------------------------------
/sdks/javascript.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: JavaScript
3 | description:
4 | "Learn how to create your first stream using the Livepeer Javascript SDK."
5 | icon: js
6 | ---
7 |
8 |
9 |
10 | To get the most out of this guide, you’ll need to:
11 |
12 | - [Create an API key](https://livepeer.studio/dashboard/developers/api-keys)
13 |
14 |
15 |
16 |
17 |
18 | Get the Livepeer Javascript SDK.
19 |
20 |
21 |
22 |
23 | ```bash
24 | npm install livepeer
25 | ```
26 |
27 |
28 |
29 |
30 | ```bash
31 | yarn add livepeer
32 | ```
33 |
34 |
35 |
36 |
37 | ```bash
38 | pnpm install livepeer
39 | ```
40 |
41 |
42 |
43 |
44 | ```bash
45 | bun install livepeer
46 | ```
47 |
48 |
49 |
50 |
51 |
52 |
53 | The first step is to initialize the SDK with your Livepeer Studio API key.
54 |
55 | ```js
56 | import { Livepeer } from "livepeer";
57 |
58 | const livepeer = new Livepeer({
59 | apiKey: "", // Your API key
60 | });
61 | ```
62 |
63 |
64 |
65 | Now that you have the SDK installed and initialized, you can use it in your app.
66 | Let's create a stream.
67 |
68 | ```js
69 | import { Livepeer } from "livepeer";
70 |
71 | const livepeer = new Livepeer({
72 | apiKey: "",
73 | });
74 |
75 | const main = async () => {
76 | const { stream } = await livepeer.stream.create({
77 | name: "Hello from JS SDK!",
78 | });
79 | console.log(stream);
80 | };
81 |
82 | main();
83 | ```
84 |
85 |
86 |
87 |
92 | See the full example on GitHub.
93 |
94 |
95 |
96 |
97 |
98 | ## Next steps
99 |
100 | Checkout Livepeer [API Reference](/api-reference) to learn more about the
101 | Livepeer API and the Javascript SDK.
102 |
--------------------------------------------------------------------------------
/sdks/python.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: Python
3 | description:
4 | "Learn how to create your first stream using the Livepeer Python SDK."
5 | icon: python
6 | ---
7 |
8 |
9 |
10 | To get the most out of this guide, you’ll need to:
11 |
12 | - [Create an API key](https://livepeer.studio/dashboard/developers/api-keys)
13 |
14 |
15 |
16 |
17 |
18 | Get the Livepeer Python SDK.
19 |
20 | ```bash
21 | pip install git+https://github.com/livepeer/livepeer-python.git
22 | ```
23 |
24 |
25 |
26 |
27 | The first step is to initialize the SDK with your Livepeer Studio API key.
28 |
29 | ```python
30 | import livepeer
31 | from livepeer.models import components
32 |
33 | lpClient = livepeer.Livepeer(
34 | api_key="",
35 | )
36 | ```
37 |
38 |
39 |
40 | Now that you have the SDK installed and initialized, you can use it in your app.
41 | Let's create a stream.
42 |
43 | ```python
44 | import livepeer
45 | from livepeer.models import components
46 |
47 | lpClient = livepeer.Livepeer(
48 | api_key="",
49 | )
50 |
51 | req = components.NewStreamPayload(
52 | name='test_stream',
53 | )
54 |
55 | res = s.stream.create(req)
56 |
57 | if res.stream is not None:
58 | # handle response
59 | pass
60 | ```
61 |
62 |
63 |
64 |
69 | See an example on GitHub.
70 |
71 |
72 |
73 |
74 | ## Next steps
75 |
76 | Checkout Livepeer [API Reference](/api-reference) to learn more about the
77 | Livepeer API and the Javascript SDK.
78 |
--------------------------------------------------------------------------------
/sdks/react/broadcast/Controls.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Controls"
3 | description:
4 | "The `Controls` component provides an interactive container for all other
5 | broadcast control elements such as `AudioTrigger`, `Fullscreen`, etc."
6 | icon: gamepad
7 | ---
8 |
9 |
21 |
22 |
23 | `Controls` serves as the interactive layer for broadcast control actions, dynamically displayed based on the broadcast state, loading state, or presence of an error. It is designed to automatically show upon user interaction with the Container element and hides based on the `Root` component's `autohide` property.
24 |
25 | By default, controls are shown during hover or interaction.
26 |
27 |
28 |
29 | ## Features
30 |
31 | - Automatic conditional rendering based on user interactions
32 | - Compatible with CSS animations for dynamic enter/exit animations
33 |
34 |
35 | The visibility of `Controls` is managed automatically, relying on the
36 | `autohide` property of the `Root` component to determine how long after a user
37 | interaction to wait before hiding.
38 |
39 |
40 | ## Anatomy
41 |
42 | Import the components and piece the parts together.
43 |
44 | ```tsx
45 | import * as Broadcast from "@livepeer/react/broadcast";
46 |
47 | export default () => (
48 |
49 |
50 |
51 | {/* Place interactive control elements here, like enable/disable broadcast, screenshare toggle, etc. */}
52 |
53 |
54 |
55 | );
56 | ```
57 |
58 | ## Props
59 |
60 | ### `forceMount`
61 |
62 | Ensures the component is always in the DOM, useful for animation controls.
63 |
64 | ### `autoHide`
65 |
66 | Defines the auto-hide delay in milliseconds after user interaction. Default is
67 | 3000ms. Set to 0 to disable auto-hide.
68 |
69 | ## Data Attributes
70 |
71 | ### `data-livepeer-controls`
72 |
73 | Serves to identify the component's role within the broadcast interface.
74 |
75 | ### `data-visible`
76 |
77 | Reflects the visibility status of the controls. It's `"true"` when the controls
78 | are currently visible (not hidden, not loading, no error present, and WebRTC is
79 | supported), and `"false"` when they're not visible.
80 |
81 | This is often used for dynamically applying enter/exit CSS animations.
82 |
--------------------------------------------------------------------------------
/sdks/react/broadcast/Error.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Error"
3 | description:
4 | "The `ErrorIndicator` component is a visual representation of error states in
5 | broadcast based on error types."
6 | icon: triangle-exclamation
7 | ---
8 |
9 |
21 |
22 |
23 | The `Error` component can be shown for all error types, or a subset of them.
24 | It is usually recommended to be more granular in the error types, since some
25 | errors are more "fatal" and some cannot be recovered from.
26 |
27 |
28 | ## Features
29 |
30 | - Conditional rendering based on the error state, customizable with the
31 | `matcher` pattern
32 | - Dynamic data attributes for custom styling based on the current error state
33 |
34 | ## Anatomy
35 |
36 | Import the components and piece the parts together.
37 |
38 | ```tsx
39 | import * as Broadcast from "@livepeer/react/broadcast";
40 |
41 | export default () => (
42 |
43 |
44 |
45 |
46 |
47 | );
48 | ```
49 |
50 | ## Props
51 |
52 | The component accepts all props suitable for a div element, in addition to the
53 | following specific props:
54 |
55 | ### `forceMount`
56 |
57 | A boolean to force the component's presence in the DOM, useful for controlling
58 | animations with external libraries.
59 |
60 | ### `matcher`
61 |
62 | A prop to define the condition under which the `ErrorIndicator` should be
63 | visible. It can be a string indicating a broad category of errors (like `"all"`
64 | or `"not-permissions"`) or a more specific error type. Alternatively, it can be
65 | a function for custom logic, receiving the PlaybackError type and returning a
66 | boolean for whether it should be shown.
67 |
68 | ## Data Attributes
69 |
70 | ### `data-livepeer-error-indicator`
71 |
72 | Always present to indicate the component's role.
73 |
74 | ### `data-error-state`
75 |
76 | Indicates whether there is an error (`"true"` or `"false"`).
77 |
78 | ### `data-error-type`
79 |
80 | Specifies the type of the current error or `"none"` if no error is present.
81 |
82 | Could be one of:
83 | `"offline" | "access-control" | "fallback" | "permissions" | "unknown" | "none"`
84 |
85 | ### `data-visible`
86 |
87 | Reflects whether the indicator is currently visible (`"true"` or `"false"`).
88 |
89 | This is often used for dynamically applying enter/exit CSS animations.
90 |
--------------------------------------------------------------------------------
/sdks/react/broadcast/Loading.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Loading"
3 | description:
4 | "The `LoadingIndicator` component is a visual indicator of the loading state
5 | of the media."
6 | icon: spinner
7 | ---
8 |
9 |
21 |
22 |
23 | The `Loading` component can be composed with any other elements. Typically it
24 | is combined with Poster
25 |
26 |
27 | ## Features
28 |
29 | - Conditional rendering based on the error state, customizable with the
30 | `matcher` pattern
31 | - Dynamic data attributes for custom styling based on the current error state
32 |
33 | ## Anatomy
34 |
35 | Import the components and piece the parts together.
36 |
37 | ```tsx
38 | import * as Broadcast from "@livepeer/react/broadcast";
39 |
40 | export default () => (
41 |
42 |
43 |
44 |
45 |
46 | );
47 | ```
48 |
49 | ## Props
50 |
51 | The component accepts all props suitable for a div element, in addition to the
52 | following specific props:
53 |
54 | ### `forceMount`
55 |
56 | A boolean value that, when set to true, ensures the component is always mounted.
57 | This is particularly useful for managing animations with external animation
58 | libraries.
59 |
60 | ### `matcher`
61 |
62 | A prop intended to define the circumstances under which the `LoadingIndicator`
63 | should be visible. It can be a direct boolean comparison with the loading state
64 | or a function that provides custom logic for visibility determination.
65 |
66 | ## Data Attributes
67 |
68 | ### `data-livepeer-loading-indicator`
69 |
70 | Serves to identify the component's role.
71 |
72 | ### `data-loading`
73 |
74 | Displays the current loading state, `"true"` when media is loading and `"false"`
75 | when it's not.
76 |
77 | ### `data-visible`
78 |
79 | Indicates the visibility status of the indicator, with `"true"` meaning it's
80 | currently visible and `"false"` denoting it's not.
81 |
82 | This is often used for dynamically applying enter/exit CSS animations.
83 |
--------------------------------------------------------------------------------
/sdks/react/broadcast/PictureInPicture.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Picture in Picture"
3 | description:
4 | "The `PictureInPictureTrigger` component toggles the picture-in-picture mode."
5 | icon: clone
6 | ---
7 |
8 |
20 |
21 | ## Features
22 |
23 | - Automatic conditional rendering based on browser API availability,
24 | customizable with the `matcher` pattern
25 | - Supports both the [w3c](https://w3c.github.io/picture-in-picture/) standard
26 | (which most modern browsers support), as well as the
27 | [older Safari/iOS spec](https://developer.apple.com/documentation/webkitjs/adding_picture_in_picture_to_your_safari_media_controls)
28 | (see the browsers which support Picture-in-Picture on
29 | [caniuse](https://caniuse.com/picture-in-picture))
30 |
31 | ## Anatomy
32 |
33 | Import the components and piece the parts together.
34 |
35 | ```tsx
36 | import * as Broadcast from "@livepeer/react/broadcast";
37 |
38 | export default () => (
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 | );
50 | ```
51 |
52 | ## Props
53 |
54 | The component accepts all props suitable for a button element.
55 |
56 | ## Data Attributes
57 |
58 | ### `data-livepeer-controls-picture-in-picture-trigger`
59 |
60 | Serves to identify the component's role.
61 |
62 | ### `data-picture-in-picture`
63 |
64 | Indicates the current picture-in-picture state, `"true"` when the feature is
65 | active and `"false"` otherwise.
66 |
67 | ### `data-visible`
68 |
69 | Reflects the visibility status of the trigger, with `"true"` meaning it's
70 | currently visible (i.e., picture-in-picture is supported and the media is not in
71 | fullscreen mode) and `"false"` denoting it's not visible.
72 |
73 | This is often used for dynamically applying enter/exit CSS animations.
74 |
--------------------------------------------------------------------------------
/sdks/react/broadcast/Portal.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Portal"
3 | description:
4 | "The `Portal` component allows projection of content into different parts of
5 | the DOM."
6 | icon: person-to-portal
7 | ---
8 |
9 |
21 |
22 |
23 | The `Portal` component is a simple wrapper over
24 | [@radix-ui/react-portal](https://www.radix-ui.com/primitives/docs/utilities/portal#portal).
25 | It inherits all of the documentation for the component.
26 |
27 |
28 | ## Features
29 |
30 | - Provides an easy way to portal controls out of the Root components
31 |
32 | ## Anatomy
33 |
34 | Import the components and piece the parts together.
35 |
36 | ```tsx
37 | import * as Broadcast from "@livepeer/react/broadcast";
38 |
39 | export default () => (
40 |
41 |
42 | {/* Place portalled elements here, which can still
43 | consume the React Context from the Root, but are
44 | appended to the document */}
45 |
46 |
47 | );
48 | ```
49 |
50 | ## Props
51 |
52 | ### `children`
53 |
54 | React nodes that are to be rendered inside the portal.
55 |
56 | ### `container`
57 |
58 | An optional prop specifying a container element where the portal's children
59 | should be rendered. If not provided, the children are appended to the body of
60 | the document by default.
61 |
--------------------------------------------------------------------------------
/sdks/react/broadcast/Status.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Status"
3 | description:
4 | "The `StatusIndicator` component provides visual cues for the broadcast
5 | status."
6 | icon: wave-pulse
7 | ---
8 |
9 |
21 |
22 | ## Features
23 |
24 | - Synchronizes broadcast status with a visual indicator
25 |
26 | ## Anatomy
27 |
28 | Import the components and piece the parts together.
29 |
30 | ```tsx
31 | import * as Broadcast from "@livepeer/react/broadcast";
32 |
33 | export default () => (
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 | );
45 | ```
46 |
47 | ## Props
48 |
49 | ### `forceMount`
50 |
51 | A boolean that, when true, ensures the component is always mounted. This is
52 | particularly useful for managing animations with external animation libraries.
53 |
54 | ### `matcher`
55 |
56 | A prop required to define the conditions under which the `StatusIndicator`
57 | should be visible. It can be a specific `BroadcastStatus` value or a function
58 | that provides custom logic for visibility determination based on the broadcast
59 | status.
60 |
61 | The status can be of the type: `"live" | "pending" | "idle"`
62 |
63 | ## Data Attributes
64 |
65 | ### `data-livepeer-controls-status-indicator`
66 |
67 | Serves to identify the component's role within the broadcast interface.
68 |
69 | ### `data-status`
70 |
71 | Shows the current broadcast status as a string.
72 |
73 | The status can be of the type: `"live" | "pending" | "idle"`
74 |
75 | ### `data-visible`
76 |
77 | Reflects the visibility status of the indicator, with `"true"` meaning it's
78 | currently visible based on the `matcher` or broadcast status and `"false"`
79 | denoting it's not visible.
80 |
--------------------------------------------------------------------------------
/sdks/react/broadcast/Video.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Video"
3 | description: "The `Video` component is a container for previewing
4 | broadcast video content."
5 | icon: video
6 | ---
7 |
8 |
20 |
21 |
22 | As you can see in the above example, the component will request permissions
23 | for video/audio input - so it prompts the user for access to their microphone
24 | and camera **upon component mount**.
25 |
26 | The `getUserMedia` browser API used to get the video/audio is only available in
27 | secure contexts (HTTPS).
28 |
29 |
30 |
31 | #### Features
32 |
33 | - Synchronizes state with the HTML5 video element
34 | - Keyboard hotkeys (`B` to toggle whether the broadcast is enabled, `L` to
35 | toggle audio, `V` to toggle video, `D` to broadcast display media, `C` for
36 | next video source, `M` for next audio source)
37 | - Adheres to [WAI-ARIA](https://w3c.github.io/aria/) design patterns
38 | - Handles all common browser APIs, and old WebKit/Firefox APIs (see
39 | [caniuse](https://caniuse.com/mdn-api_mediadevices_getusermedia))
40 |
41 |
42 | The Broadcast above will work with only keyboard shortcuts, without any custom
43 | controls.
44 |
45 |
46 | ### Anatomy
47 |
48 | Import the components and piece the parts together.
49 |
50 | ```tsx
51 | import * as Broadcast from "@livepeer/react/broadcast";
52 |
53 | export default () => (
54 |
55 |
56 |
57 | );
58 | ```
59 |
60 | ## Props
61 |
62 | The component accepts most props suitable for a HTML5 `video` tag, except for
63 | `src` and `poster` which are managed internally or provided explicitly through
64 | props.
65 |
66 | ## Data Attributes
67 |
68 | ### `data-livepeer-video`
69 |
70 | Serves to identify the component's role within the Broadcast.
71 |
72 | ### `data-enabled`
73 |
74 | Indicates the enabled state of the broadcast, `"true"` when broadcasting is
75 | enabled and `"false"` when it is not.
76 |
--------------------------------------------------------------------------------
/sdks/react/broadcast/get-ingest.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "getIngest"
3 | description:
4 | "`getIngest` is a utility function for converting various types of ingest
5 | information into a standardized WHIP URL format suitable for broadcasting."
6 | icon: link
7 | ---
8 |
9 | ```tsx
10 | import * as Broadcast from "@livepeer/react/broadcast";
11 | import { getIngest } from "@livepeer/react";
12 |
13 | // Usage with a stream key
14 | const streamKey = "your-stream-key-here";
15 | const whipUrl = getIngest(streamKey, {
16 | baseUrl: "https://playback.livepeer.studio/webrtc",
17 | });
18 |
19 | // Usage with Livepeer stream data
20 | const livepeerStreamData = {
21 | id: "stream-id",
22 | streamKey: "your-stream-key-here",
23 | // Other unused Livepeer stream data...
24 | };
25 |
26 | // This is either a string or null, depending on whether
27 | // the input was parsed successfully
28 | const whipUrlFromLivepeer = getIngest(livepeerStreamData);
29 |
30 | export default () => (
31 |
32 | {/* All child components. */}
33 |
34 | );
35 | ```
36 |
37 | ### Functionality
38 |
39 | `getIngest` simplifies the process of preparing ingest sources for broadcasting
40 | by standardizing them into WHIP URLs.
41 |
42 | #### Input Types
43 |
44 | The function supports a variety of input types to accommodate different
45 | broadcasting setups:
46 |
47 | - **String**: Directly returns the string if it's a valid URL. Constructs a WHIP
48 | URL using a base URL for stream keys.
49 | - **LivepeerStream**: Uses the `streamKey` from Livepeer stream data to generate
50 | a WHIP URL.
51 | - **CloudflareStreamData**: Extracts the URL from Cloudflare stream data
52 | objects.
53 |
54 | Optional parameters (`opts`) include:
55 |
56 | - **baseUrl**: The base URL for constructing WHIP URLs. Essential when the input
57 | is a stream key.
58 |
59 | #### Output Format
60 |
61 | The function outputs a WHIP URL string suitable for use in broadcasting setups,
62 | or `null` if the input can't be processed into a valid WHIP URL.
63 |
--------------------------------------------------------------------------------
/sdks/react/migration/3.x/client.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Client"
3 | description:
4 | "The livepeer Client is a framework agnostic client that manages wallet
5 | connection state and configuration, such as: auto-connection, connectors, and
6 | ethers providers."
7 | ---
8 |
9 | The livepeer `Client` is a framework agnostic client that manages state and
10 | connection to a Livepeer provider. The `createReactClient` function wraps the
11 | `Client` with a React-specific caching layer for faster queries.
12 |
13 | ## Usage
14 |
15 |
16 |
17 | ```tsx
18 | import { createReactClient } from '@livepeer/react';
19 | ```
20 |
21 |
22 |
23 |
24 | ```tsx
25 | import { createReactClient } from '@livepeer/react-native';
26 | ```
27 |
28 |
29 |
30 |
31 | The client can be created using `createReactClient`.
32 |
33 | ```tsx
34 | const client = createReactClient({
35 | provider: studioProvider({ apiKey: "yourStudioApiKey" }),
36 | });
37 | ```
38 |
39 | ## Configuration
40 |
41 | ### provider
42 |
43 | Livepeer provider interface for connecting to the network.
44 |
45 | ```tsx
46 | const client = createReactClient({
47 | provider: studioProvider({ apiKey: "yourStudioApiKey" }),
48 | });
49 | ```
50 |
51 | ### storage (optional)
52 |
53 |
54 |
55 |
56 | The default strategy to persist and cache data. Used for both state management
57 | and query caching. Defaults to `window.localStorage`.
58 |
59 | To disable the use of localStorage, we provide a convenient "no-op" storage
60 | option:
61 |
62 | ```tsx
63 | import {
64 | createReactClient,
65 | studioProvider,
66 | noopStorage,
67 | createStorage,
68 | } from "@livepeer/react";
69 |
70 | const client = createReactClient({
71 | provider: studioProvider({ apiKey: 'yourStudioApiKey' }),
72 | storage: createStorage({
73 | storage: noopStorage,
74 | }),
75 | });
76 | ```
77 |
78 |
79 |
80 |
81 | The default strategy to persist and cache data. Used for both state management
82 | and query caching. Defaults to no storage.
83 |
84 | ```tsx
85 | import AsyncStorage from '@react-native-async-storage/async-storage';
86 |
87 | const client = createReactClient({
88 | provider: studioProvider({ apiKey: 'yourStudioApiKey' }),
89 | storage: createStorage({ storage: AsyncStorage }),
90 | });
91 | ```
92 |
93 |
94 |
95 |
96 | ### queryClient (optional)
97 |
98 | The react-query
99 | [QueryClient](https://tanstack.com/query/v4/docs/reference/QueryClient) used to
100 | cache/deduplicate queries. Defaults to caching for 24 hours and no retries.
101 |
102 | ```ts
103 | const client = createReactClient({
104 | provider: studioProvider({ apiKey: "yourStudioApiKey" }),
105 | queryClient: new QueryClient({
106 | defaultOptions: {
107 | queries: {
108 | cacheTime: 1_000 * 60 * 60, // 1 hour
109 | retry: 100,
110 | },
111 | },
112 | }),
113 | });
114 | ```
115 |
--------------------------------------------------------------------------------
/sdks/react/migration/3.x/constants/abis.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Application Binary Interface"
3 | description: "All Livepeer ABIs available via import."
4 | ---
5 |
6 | All Livepeer ABIs are available via import.
7 |
8 | ## Protocol
9 |
10 |
11 |
12 | ```tsx
13 | import {
14 | BondingManagerABI,
15 | ControllerABI,
16 | LivepeerTokenABI,
17 | LivepeerTokenFaucetABI,
18 | MerkleSnapshotABI,
19 | MinterABI,
20 | PollABI,
21 | PollCreatorABI,
22 | RoundsManagerABI,
23 | ServiceRegistryABI,
24 | TicketBrokerABI,
25 | } from '@livepeer/react';
26 | ```
27 |
28 |
29 |
30 |
31 | ```tsx
32 | import {
33 | BondingManagerABI,
34 | ControllerABI,
35 | LivepeerTokenABI,
36 | LivepeerTokenFaucetABI,
37 | MerkleSnapshotABI,
38 | MinterABI,
39 | PollABI,
40 | PollCreatorABI,
41 | RoundsManagerABI,
42 | ServiceRegistryABI,
43 | TicketBrokerABI,
44 | } from '@livepeer/react-native';
45 | ```
46 |
47 |
48 |
49 |
50 | ## Bridging
51 |
52 |
53 |
54 | ```tsx
55 | import {
56 | ArbRetryableTxABI,
57 | InboxABI,
58 | L1BondingManagerABI,
59 | L1MigratorABI,
60 | L2LPTGatewayABI,
61 | L2MigratorABI,
62 | NodeInterfaceABI,
63 | } from '@livepeer/react';
64 | ```
65 |
66 |
67 |
68 |
69 | ```tsx
70 | import {
71 | ArbRetryableTxABI,
72 | InboxABI,
73 | L1BondingManagerABI,
74 | L1MigratorABI,
75 | L2LPTGatewayABI,
76 | L2MigratorABI,
77 | NodeInterfaceABI,
78 | } from '@livepeer/react-native';
79 | ```
80 |
81 |
82 |
83 |
--------------------------------------------------------------------------------
/sdks/react/migration/3.x/constants/contract-addresses.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Contract Addresses"
3 | description: "All contract addresses for the Livepeer protocol."
4 | ---
5 |
6 | All contract addresses for the Livepeer protocol.
7 |
8 | ## Addresses
9 |
10 | The addresses for the protocol contracts are provided in a simple import with
11 | types.
12 |
13 |
14 |
15 | ```tsx
16 | import {
17 | arbitrumOneAddress,
18 | arbitrumRinkebyAddress,
19 | mainnetAddress,
20 | rinkebyAddress,
21 | } from '@livepeer/react';
22 | ```
23 |
24 |
25 |
26 |
27 | ```tsx
28 | import {
29 | arbitrumOneAddress,
30 | arbitrumRinkebyAddress,
31 | mainnetAddress,
32 | rinkebyAddress,
33 | } from '@livepeer/react-native';
34 | ```
35 |
36 |
37 |
38 |
39 | These addresses will be expanded upon in a future version of Livepeer React to
40 | include more in-depth contract interaction.
41 |
42 | ## Chain IDs
43 |
44 | There are exports for the chain IDs the Livepeer protocol is currently deployed
45 | on. This includes both Arbitrum One/mainnet and testnet chains.
46 |
47 |
48 |
49 |
50 | ```tsx
51 | import {
52 | allChainId, mainnetChainId, testnetChainId
53 | } from '@livepeer/react';
54 | ```
55 |
56 |
57 |
58 |
59 | ```tsx
60 | import {
61 | allChainId, mainnetChainId, testnetChainId
62 | } from '@livepeer/react-native';
63 | ```
64 |
65 |
66 |
67 |
--------------------------------------------------------------------------------
/sdks/react/migration/3.x/providers/studio.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Studio"
3 | description: "Official Livepeer provider for Studio."
4 | ---
5 |
6 | The `studioProvider` configures the client with
7 | [Livepeer Studio](https://livepeer.studio).
8 |
9 | ## Usage
10 |
11 |
12 |
13 | ```tsx
14 | import { studioProvider } from '@livepeer/react';
15 | ```
16 |
17 |
18 |
19 |
20 | ```tsx
21 | import { studioProvider } from '@livepeer/react-native';
22 | ```
23 |
24 |
25 |
26 |
27 | The `studioProvider` can then be instantiated with an API key.
28 |
29 | ```tsx App.js
30 | const client = createReactClient({
31 | provider: studioProvider({ apiKey: "yourStudioApiKey" }),
32 | });
33 | ```
34 |
35 | ## Configuration
36 |
37 | ### apiKey
38 |
39 | A [CORS protected](/api-reference/overview/authentication) Studio API key. If no
40 | API key is provided, the provider will use a default key which is rate-limited.
41 |
42 | ```ts
43 | import { createReactClient, studioProvider } from "@livepeer/react";
44 |
45 | const client = createReactClient({
46 | provider: studioProvider({ apiKey: "yourStudioApiKey" }),
47 | });
48 | ```
49 |
50 | ### baseUrl
51 |
52 | A base URL for the provider. If no base URL is provided, the provider will use
53 | the `https://livepeer.studio` domain.
54 |
55 | ```ts
56 | import { createReactClient, studioProvider } from "@livepeer/react";
57 |
58 | const client = createReactClient({
59 | provider: studioProvider({
60 | apiKey: "yourStudioApiKey",
61 | baseUrl: "https://studio.my-domain.com/root/api",
62 | }),
63 | });
64 | ```
65 |
--------------------------------------------------------------------------------
/sdks/react/player/Controls.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Controls"
3 | description:
4 | "The `Controls` component provides an interactive container for all other
5 | media control elements such as `Play`, `Fullscreen`, etc."
6 | icon: gamepad
7 | ---
8 |
9 |
21 |
22 |
23 | `Controls` serves as the interactive layer for media control actions, dynamically displayed based on the media state, loading state, or presence of an error. It is designed to automatically show upon user interaction with the Container element and hides based on the `Root` component's `autohide` property.
24 |
25 | By default, controls are shown during hover or interaction, mimicking the
26 | behavior of standard media players.
27 |
28 |
29 |
30 | ## Features
31 |
32 | - Automatic conditional rendering based on user interactions
33 | - Click/touch to pause/play media
34 | - Compatible with CSS animations for dynamic enter/exit animations
35 |
36 |
37 | The visibility of `Controls` is managed automatically, relying on the
38 | `autohide` property of the `Root` component to determine how long after a user
39 | interaction to wait before hiding.
40 |
41 |
42 | ## Anatomy
43 |
44 | Import the components and piece the parts together.
45 |
46 | ```tsx
47 | import * as Player from "@livepeer/react/player";
48 |
49 | export default () => (
50 |
51 |
52 |
53 | {/* Place interactive control elements here, like play/pause buttons, progress bars, etc. */}
54 |
55 |
56 |
57 | );
58 | ```
59 |
60 | ## Props
61 |
62 | ### `forceMount`
63 |
64 | Ensures the component is always in the DOM, useful for animation controls.
65 |
66 | ### `autoHide`
67 |
68 | Defines the auto-hide delay in milliseconds after user interaction. Default is
69 | 3000ms. Set to 0 to disable auto-hide.
70 |
71 | ## Data Attributes
72 |
73 | ### `data-livepeer-controls`
74 |
75 | Identifies the control component within the Player.
76 |
77 | ### `data-visible`
78 |
79 | Indicates if the controls are visible (`"true"`) or not (`"false"`), useful for
80 | CSS-based animations.
81 |
--------------------------------------------------------------------------------
/sdks/react/player/Error.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Error"
3 | description:
4 | "The `ErrorIndicator` component is a visual representation of error states in
5 | media playback based on error types."
6 | icon: triangle-exclamation
7 | ---
8 |
9 |
21 |
22 |
23 | The `Error` component can be shown for all error types, or a subset of them.
24 | It is usually recommended to be more granular in the error types, since some
25 | errors are more "fatal" and some cannot be recovered from.
26 |
27 |
28 | ## Features
29 |
30 | - Conditional rendering based on the error state, customizable with the
31 | `matcher` pattern
32 | - Dynamic data attributes for custom styling based on the current error state
33 |
34 | ## Anatomy
35 |
36 | Import the components and piece the parts together.
37 |
38 | ```tsx
39 | import * as Player from "@livepeer/react/player";
40 |
41 | export default () => (
42 |
43 |
44 |
45 |
46 |
47 | );
48 | ```
49 |
50 | ## Props
51 |
52 | The component accepts all props suitable for a div element, in addition to the
53 | following specific props:
54 |
55 | ### `forceMount`
56 |
57 | A boolean to force the component's presence in the DOM, useful for controlling
58 | animations with external libraries.
59 |
60 | ### `matcher`
61 |
62 | A prop to define the condition under which the `ErrorIndicator` should be
63 | visible. It can be a string indicating a broad category of errors (like `"all"`
64 | or `"not-permissions"`) or a more specific error type. Alternatively, it can be
65 | a function for custom logic, receiving the PlaybackError type and returning a
66 | boolean for whether it should be shown.
67 |
68 | ## Data Attributes
69 |
70 | ### `data-livepeer-error-indicator`
71 |
72 | Always present to indicate the component's role.
73 |
74 | ### `data-error-state`
75 |
76 | Indicates whether there is an error (`"true"` or `"false"`).
77 |
78 | ### `data-error-type`
79 |
80 | Specifies the type of the current error or `"none"` if no error is present.
81 |
82 | Could be one of:
83 | `"offline" | "access-control" | "fallback" | "permissions" | "unknown" | "none"`
84 |
85 | ### `data-visible`
86 |
87 | Reflects whether the indicator is currently visible (`"true"` or `"false"`).
88 |
89 | This is often used for dynamically applying enter/exit CSS animations.
90 |
--------------------------------------------------------------------------------
/sdks/react/player/Live.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Live"
3 | description:
4 | "The `LiveIndicator` component visually signals the live status of media
5 | playback."
6 | icon: headset
7 | ---
8 |
9 |
21 |
22 |
23 | `LiveIndicator` is designed to provide a clear, visual cue of the live status
24 | of the media playback.
25 |
26 |
27 | ## Features
28 |
29 | - Conditional rendering based on live playback status, customizable with the
30 | `matcher` pattern
31 | - Adheres to [WAI-ARIA](https://w3c.github.io/aria/) design patterns
32 | - Compatible with CSS animations for dynamic enter/exit animations
33 |
34 | ## Anatomy
35 |
36 | Import the components and piece the parts together.
37 |
38 | ```tsx
39 | import * as Player from "@livepeer/react/player";
40 |
41 | export default () => (
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 | );
53 | ```
54 |
55 | ## Props
56 |
57 | ### `forceMount`
58 |
59 | Keeps the component always mounted, aiding in managing animations.
60 |
61 | ### `matcher`
62 |
63 | Determines when the `LiveIndicator` is shown, accepting a boolean or a function
64 | for customized visibility logic.
65 |
66 | ## Data Attributes
67 |
68 | ### `data-livepeer-live-indicator`
69 |
70 | Designates the live indicator component within the Player.
71 |
72 | ### `data-live`
73 |
74 | Denotes the live status (`"true"` for live, `"false"` for not live).
75 |
76 | ### `data-visible`
77 |
78 | Signifies if the indicator is visible (`"true"`) or hidden (`"false"`), useful
79 | for CSS-based animations.
80 |
--------------------------------------------------------------------------------
/sdks/react/player/Loading.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Loading"
3 | description:
4 | "The `LoadingIndicator` component is a visual indicator of the loading state
5 | of the media."
6 | icon: spinner
7 | ---
8 |
9 |
21 |
22 |
23 | The `Loading` component can be composed with any other elements. Typically it
24 | is combined with Poster
25 |
26 |
27 | ## Features
28 |
29 | - Conditional rendering based on the error state, customizable with the
30 | `matcher` pattern
31 | - Dynamic data attributes for custom styling based on the current error state
32 |
33 | ## Anatomy
34 |
35 | Import the components and piece the parts together.
36 |
37 | ```tsx
38 | import * as Player from "@livepeer/react/player";
39 |
40 | export default () => (
41 |
42 |
43 |
44 |
45 |
46 | );
47 | ```
48 |
49 | ## Props
50 |
51 | The component accepts all props suitable for a div element, in addition to the
52 | following specific props:
53 |
54 | ### `forceMount`
55 |
56 | A boolean value that, when set to true, ensures the component is always mounted.
57 | This is particularly useful for managing animations with external animation
58 | libraries.
59 |
60 | ### `matcher`
61 |
62 | A prop intended to define the circumstances under which the `LoadingIndicator`
63 | should be visible. It can be a direct boolean comparison with the loading state
64 | or a function that provides custom logic for visibility determination.
65 |
66 | ## Data Attributes
67 |
68 | ### `data-livepeer-loading-indicator`
69 |
70 | Serves to identify the component's role.
71 |
72 | ### `data-loading`
73 |
74 | Displays the current loading state, `"true"` when media is loading and `"false"`
75 | when it's not.
76 |
77 | ### `data-visible`
78 |
79 | Indicates the visibility status of the indicator, with `"true"` meaning it's
80 | currently visible and `"false"` denoting it's not.
81 |
82 | This is often used for dynamically applying enter/exit CSS animations.
83 |
--------------------------------------------------------------------------------
/sdks/react/player/PictureInPicture.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Picture in Picture"
3 | description:
4 | "The `PictureInPictureTrigger` component toggles the picture-in-picture mode."
5 | icon: clone
6 | ---
7 |
8 |
20 |
21 | ## Features
22 |
23 | - Automatic conditional rendering based on browser API availability,
24 | customizable with the `matcher` pattern
25 | - Supports both the [w3c](https://w3c.github.io/picture-in-picture/) standard
26 | (which most modern browsers support), as well as the
27 | [older Safari/iOS spec](https://developer.apple.com/documentation/webkitjs/adding_picture_in_picture_to_your_safari_media_controls)
28 | (see the browsers which support Picture-in-Picture on
29 | [caniuse](https://caniuse.com/picture-in-picture))
30 |
31 | ## Anatomy
32 |
33 | Import the components and piece the parts together.
34 |
35 | ```tsx
36 | import * as Player from "@livepeer/react/player";
37 |
38 | export default () => (
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 | );
50 | ```
51 |
52 | ## Props
53 |
54 | The component accepts all props suitable for a button element.
55 |
56 | ## Data Attributes
57 |
58 | ### `data-livepeer-controls-picture-in-picture-trigger`
59 |
60 | Serves to identify the component's role.
61 |
62 | ### `data-picture-in-picture`
63 |
64 | Indicates the current picture-in-picture state, `"true"` when the feature is
65 | active and `"false"` otherwise.
66 |
67 | ### `data-visible`
68 |
69 | Reflects the visibility status of the trigger, with `"true"` meaning it's
70 | currently visible (i.e., picture-in-picture is supported and the media is not in
71 | fullscreen mode) and `"false"` denoting it's not visible.
72 |
73 | This is often used for dynamically applying enter/exit CSS animations.
74 |
--------------------------------------------------------------------------------
/sdks/react/player/Play.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Play"
3 | description:
4 | "The `PlayPauseTrigger` component enables users to toggle playback and the
5 | `PlayingIndicator` provides a visual cue for the current playback state."
6 | icon: play
7 | ---
8 |
9 |
21 |
22 |
23 | `PlayPauseTrigger` and `PlayingIndicator` integrate together to form the
24 | typical play/pause interaction for media players. They do not have to be used
25 | together and can combine with other components.
26 |
27 |
28 | ## Features
29 |
30 | - Controls the playback state and conditionally renders based on playing status,
31 | customizable with the `matcher` pattern
32 | - Keyboard hotkeys and adheres to [WAI-ARIA](https://w3c.github.io/aria/) design
33 | patterns
34 |
35 | ## Anatomy
36 |
37 | Import the components and piece the parts together.
38 |
39 | ```tsx
40 | import * as Player from "@livepeer/react/player";
41 |
42 | export default () => (
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 | );
56 | ```
57 |
58 | ## Props
59 |
60 | ### `PlayPauseTrigger`
61 |
62 | Accepts all props for a typical button element.
63 |
64 | ### `PlayingIndicator`
65 |
66 | #### `forceMount`
67 |
68 | Ensures the component is always mounted, beneficial for animation consistency.
69 |
70 | #### `matcher`
71 |
72 | Defines the condition under which the `PlayingIndicator` displays, using a
73 | boolean or a custom function for tailored visibility logic.
74 |
75 | ## Data Attributes
76 |
77 | ### PlayPauseTrigger
78 |
79 | #### `data-livepeer-play-pause-trigger`
80 |
81 | Identifies the play/pause button within the Player.
82 |
83 | #### `data-playing`
84 |
85 | Shows the playback status (`"true"` for playing, `"false"` for paused).
86 |
87 | #### `data-visible`
88 |
89 | Signifies if the indicator is visible (`"true"`) or hidden (`"false"`), useful
90 | for CSS-based animations and visibility adjustments.
91 |
92 | ### PlayingIndicator
93 |
94 | #### `data-livepeer-playing-indicator`
95 |
96 | Marks the playing status indicator component.
97 |
98 | #### `data-playing`
99 |
100 | Shows the playback status (`"true"` for playing, `"false"` for paused).
101 |
102 | #### `data-visible`
103 |
104 | Signifies if the indicator is visible (`"true"`) or hidden (`"false"`), useful
105 | for CSS-based animations.
106 |
--------------------------------------------------------------------------------
/sdks/react/player/Portal.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Portal"
3 | description:
4 | "The `Portal` component allows projection of content into different parts of
5 | the DOM."
6 | icon: person-to-portal
7 | ---
8 |
9 |
21 |
22 |
23 | The `Portal` component is a simple wrapper over
24 | [@radix-ui/react-portal](https://www.radix-ui.com/primitives/docs/utilities/portal#portal).
25 | It inherits all of the documentation for the component.
26 |
27 |
28 | ## Features
29 |
30 | - Provides an easy way to portal controls out of the Root components
31 |
32 | ## Anatomy
33 |
34 | Import the components and piece the parts together.
35 |
36 | ```tsx
37 | import * as Player from "@livepeer/react/player";
38 |
39 | export default () => (
40 |
41 |
42 | {/* Place portalled elements here, which can still
43 | consume the React Context from the Root, but are
44 | appended to the document */}
45 |
46 |
47 | );
48 | ```
49 |
50 | ## Props
51 |
52 | ### `children`
53 |
54 | React nodes that are to be rendered inside the portal.
55 |
56 | ### `container`
57 |
58 | An optional prop specifying a container element where the portal's children
59 | should be rendered. If not provided, the children are appended to the body of
60 | the document by default.
61 |
--------------------------------------------------------------------------------
/sdks/react/player/Poster.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Poster"
3 | description:
4 | "The `Poster` component displays a visual placeholder for media content prior
5 | to playback."
6 | icon: image
7 | ---
8 |
9 |
21 |
22 |
23 | When used with Livepeer's playback info API, `getSrc` will automatically parse
24 | any available thumbnail images in the JSON response and pass them to `Poster`.
25 | The example above assumes that the playback info has a "Thumbnail (JPEG)"
26 | source.
27 |
28 |
29 | ## Features
30 |
31 | - Displays a preview image as a placeholder while media is loading
32 | - Conditionally renders based on the availability of a thumbnail image
33 |
34 | ## Anatomy
35 |
36 | Import the components and piece the parts together.
37 |
38 | ```tsx
39 | import * as Player from "@livepeer/react/player";
40 |
41 | export default () => (
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 | );
50 | ```
51 |
52 | ## Props
53 |
54 | `Poster` accepts all typical HTML `img` props.
55 |
56 |
57 | It is usually used alongside a `Player.Video` with poster set to `null`, so
58 | that the default poster URL is not parsed and set on the HTML5 video element.
59 |
60 | It can be composed with custom img renderers, like Next.js Image, with the
61 | `asChild` prop, so you can customize the image loading to go through your
62 | optimization pipeline.
63 |
64 |
65 |
66 | ### `forceMount`
67 |
68 | Ensures consistent presence in the DOM, aiding in the smooth application of
69 | animations.
70 |
71 | ### `src`
72 |
73 | Specifies the URL for the poster image, allowing for a customized preview. If
74 | not set, the component attempts to retrieve a poster from the media's context.
75 |
76 | ## Data Attributes
77 |
78 | ### `data-livepeer-poster`
79 |
80 | Identifies the poster component within the Player.
81 |
82 | ### `data-visible`
83 |
84 | Signifies if the poster is visible (`"true"`) or hidden (`"false"`), useful for
85 | CSS-based animations.
86 |
--------------------------------------------------------------------------------
/sdks/react/player/Time.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Time"
3 | description: "The `Time` component provides a display of the
4 | current time, total duration, and progress of the media."
5 | icon: clock
6 | ---
7 |
8 |
20 |
21 | ## Features
22 |
23 | - Automatic display of friendly time from state store
24 | - Handles live and static assets (e.g. no fixed duration vs fixed duration)
25 |
26 | ## Anatomy
27 |
28 | Import the components and piece the parts together.
29 |
30 | ```tsx
31 | import * as Player from "@livepeer/react/player";
32 |
33 | export default () => (
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 | );
45 | ```
46 |
47 | ## Props
48 |
49 | The component accepts all props suitable for a span element, except for
50 | `children`, since the formatted time will be rendered as the only child.
51 |
52 | ## Data Attributes
53 |
54 | ### `data-livepeer-controls-time`
55 |
56 | Serves to identify the component's role in the playback interface.
57 |
58 | ### `data-duration`
59 |
60 | Displays the total duration of the media.
61 |
62 | ### `data-progress`
63 |
64 | Indicates the current progress of the media playback, usually represented as a
65 | percentage or time elapsed.
66 |
67 | ### `data-live`
68 |
69 | Shows the live state of the media, `"true"` for live streams and `"false"` for
70 | pre-recorded content.
71 |
--------------------------------------------------------------------------------
/sdks/react/player/Video.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "Video"
3 | description: "The `Video` component renders the actual video
4 | content and manages various aspects of media playback."
5 | icon: video
6 | ---
7 |
8 |
20 |
21 | #### Features
22 |
23 | - Synchronizes state with the HTML5 video element
24 | - Keyboard hotkeys (`←`/`→` to seek, `K` to pause, `M` to toggle mute, `F`
25 | fullscreen, `I` picture-in-picture)
26 | - Adheres to [WAI-ARIA](https://w3c.github.io/aria/) design patterns
27 |
28 | ### Anatomy
29 |
30 | Import the components and piece the parts together.
31 |
32 | ```tsx
33 | import * as Player from "@livepeer/react/player";
34 |
35 | export default () => (
36 |
37 |
38 |
39 | );
40 | ```
41 |
42 | ## Props
43 |
44 | The component accepts most props suitable for a HTML5 `video` tag, except for
45 | `src`, `poster`, `autoPlay`, and `preload`, which are managed internally or
46 | provided explicitly through props.
47 |
48 | ### `poster`
49 |
50 | Optional. Controls the poster source. By default, it uses the thumbnail from the
51 | `Root` src input. Set to `null` to disable the default poster image from the
52 | `Root` src.
53 |
54 | ### `hlsConfig`
55 |
56 | Optional. Controls the HLS.js config. By default, it adds JWT or Access Token
57 | headers to HLS segment requests.
58 |
59 | ## Data Attributes
60 |
61 | ### `data-livepeer-video`
62 |
63 | Serves to identify the component's role within the Player.
64 |
65 | ### `data-livepeer-source-type`
66 |
67 | Indicates the type of the current source, such as "none" when there's no source,
68 | or other types based on the media being played.
69 |
70 | This can be of the type: `"audio" | "video" | "hls" | "webrtc" | "none"`
71 |
--------------------------------------------------------------------------------
/sdks/react/player/get-src.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: "getSrc"
3 | description:
4 | "`getSrc` is a utility function for parsing various types of playback
5 | information and converting them into a standardized format suitable for the
6 | Player."
7 | icon: link
8 | ---
9 |
10 | ```tsx
11 | import { getSrc } from "@livepeer/react/external";
12 | import * as Player from "@livepeer/react/player";
13 |
14 | // An example response from the livepeer playback info API endpoint
15 | const vodSource = {
16 | type: "vod",
17 | meta: {
18 | playbackPolicy: null,
19 | source: [
20 | {
21 | hrn: "HLS (TS)",
22 | type: "html5/application/vnd.apple.mpegurl",
23 | url: "https://lp-playback.com/hls/f5eese9wwl88k4g8/index.m3u8",
24 | },
25 | ],
26 | },
27 | };
28 |
29 | // This is either an array of `Src` or null,
30 | // and can be passed into the Player.Root `src`
31 | const src = getSrc(vodSource);
32 |
33 | export default () => (
34 | {/* All child components. */}
35 | );
36 | ```
37 |
38 | #### Features
39 |
40 | - Supports inputs like Livepeer playback info, Cloudflare stream data, Mux URLs,
41 | arrays of strings, and single strings
42 | - Transforms into `Src[]` which contains essential playback information, along
43 | with supportive metadata like thumbnails and VTT files.
44 |
45 | ### Functionality
46 |
47 | #### Input Types
48 |
49 | The function can process the following input types:
50 |
51 | - `LivepeerPlaybackInfo`: Extracts the 'source' array from its 'meta' property
52 | from the playback info endpoint.
53 | - `LivepeerSource` or `LivepeerSource[]`: Uses the source object(s) directly.
54 | - `CloudflareStreamData`: Retrieves the stream data from a Cloudflare stream
55 | data object.
56 | - `string[]`: Assumes each string as a URL and creates an array of Source
57 | objects.
58 | - `string`: Assumes the string is a URL and creates a single Source object.
59 |
60 | #### Output Format
61 |
62 | The output is an array of `Src` objects or `null` if the input is invalid or
63 | empty.
64 |
--------------------------------------------------------------------------------
/self-hosting/overview.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: Overview
3 | icon: flask
4 | ---
5 |
6 | Learn how to self-host Livepeer Studio using a single Docker command.
7 |
8 |
9 | **Self-hosting is in Alpha**, but we're excited for the first group of hackers
10 | to start working on it!
11 |
12 |
13 | ### Current capabilities:
14 |
15 | - Boots up a full-stack Livepeer Studio experience with a single command, either
16 | locally or on a hosted server.
17 | - Capable of transcoding on the Livepeer Network.
18 | - Allows for development of applications against the Livepeer Studio API locally
19 | that can then transfer to the hosted version at
20 | [livepeer.studio](https://livepeer.studio) when you're ready to go to
21 | production.
22 | - Bundles a fully-local offchain Livepeer gateway and orchestrator, so that you
23 | may test transcoding with no external dependencies.
24 |
25 | ### Current limitations:
26 |
27 | - No usage or billing data.
28 | - No GPU transcoding support. We recommend using very low-bitrate test files,
29 | especially if using Docker for Mac or Docker for Windows. The built-in
30 | profiles for livestream transcoding use a single 240p low-quality rendition.
31 | - It is currently required to run Livepeer Studio as a Docker image. A
32 | single-binary fully-static build is in the works.
33 |
--------------------------------------------------------------------------------
/self-hosting/self-hosting-with-docker.mdx:
--------------------------------------------------------------------------------
1 | ---
2 | title: Self-Hosting with Docker
3 | icon: person-running
4 | ---
5 |
6 | First, select a directory for persisting your database and video content; in
7 | this example we will be using `$HOME/livepeer-catalyst`.
8 |
9 | ```shell
10 | CATALYST_DIR="$HOME/livepeer-catalyst"
11 | mkdir -p $CATALYST_DIR
12 | docker run \
13 | -v $CATALYST_DIR:/data \
14 | --rm \
15 | -it \
16 | --name catalyst \
17 | --shm-size=4gb \
18 | -p 8888:8888 \
19 | -p 5432:5432 \
20 | -p 1935:1935 \
21 | -p 4242:4242 \
22 | -p 3478:3478 \
23 | -p 3478:3478/udp \
24 | -p 5349:5349 \
25 | -p 40000-40100:40000-40100/udp \
26 | livepeer/catalyst:next
27 | ```
28 |
29 | You will be greeted with a very large amount of spam — give it a minute or so to
30 | boot up. You can then connect to your local box instance:
31 |
32 | Address: [http://localhost:8888](http://localhost:8888) Email:
33 | `admin@example.com` Password: `livepeer`
34 |
35 | You can also access the MistServer dashboard to access some underlying
36 | livestreaming infrastructure:
37 |
38 | Address: [http://localhost:4242](http://localhost:4242) Username: `test`
39 | Password: `test`
40 |
41 | To get you started, the database snapshot includes a few predefined streams.
42 |
43 | | Stream | Stream Key | Playback ID | Recording enabled? |
44 | | ---------------- | ------------------- | ------------ | ------------------ |
45 | | [tiny-transcode] | 2222-2222-2222-2222 | 222222222222 | No |
46 | | [tiny-recording] | 4444-4444-4444-4444 | 444444444444 | Yes |
47 |
48 | [tiny-transcode]:
49 | http://localhost:8888/dashboard/streams/22222222-2222-2222-2222-222222222222
50 | [tiny-recording]:
51 | http://localhost:8888/dashboard/streams/44444444-4444-4444-4444-444444444444
52 |
53 | For properly testing a livestream input comparable to OBS output, you will want
54 | a low-bitrate test file with no B-Frames and a short GOP length.
55 | [Here's a sample appropriately-formatted Big Buck Bunny file you can use][BBB].
56 | To stream in to your local box, you can use an `ffmpeg` command such as:
57 |
58 | ```shell
59 | curl -LO https://test-harness-gcp.livepeer.fish/Big_Buck_Bunny_360p_1sGOP_NoBFrames.mp4
60 | ffmpeg \
61 | -stream_loop \
62 | -1 \
63 | -re \
64 | -i Big_Buck_Bunny_360p_1sGOP_NoBFrames.mp4 \
65 | -c copy \
66 | -f flv \
67 | rtmp://localhost/live/2222-2222-2222-2222
68 | ```
69 |
70 | [BBB]:
71 | https://test-harness-gcp.livepeer.fish/Big_Buck_Bunny_360p_1sGOP_NoBFrames.mp4
72 |
--------------------------------------------------------------------------------
/style.css:
--------------------------------------------------------------------------------
1 | /* img[alt="dark logo"],
2 | img[alt="light logo"] {
3 | max-width: 180px;
4 | } */
5 |
--------------------------------------------------------------------------------