├── .github
└── workflows
│ └── build_docs.yml
├── .gitignore
├── CHANGELOG.md
├── LICENSE
├── README.md
├── README_FRAME_TIMES.md
├── analysis_options.yaml
├── doc
└── website
│ ├── .gitignore
│ ├── analysis_options.yaml
│ ├── bin
│ └── ffmpeg_cli_docs.dart
│ ├── pubspec.yaml
│ └── source
│ ├── _data.yaml
│ ├── _includes
│ └── components
│ │ └── code_two_column.jinja
│ ├── images
│ └── branding
│ │ ├── logo.png
│ │ └── social.png
│ └── index.md
├── example
├── assets
│ ├── Butterfly-209.mp4
│ └── bee.mp4
├── bin
│ ├── main.dart
│ └── main_raw.dart
├── output
│ └── test_render.mp4
└── pubspec.yaml
├── lib
├── ffmpeg_cli.dart
└── src
│ ├── colors.dart
│ ├── ffmpeg
│ ├── ffmpeg_command.dart
│ ├── ffmpeg_command_builder.dart
│ ├── filters
│ │ ├── audio_mix_filter.dart
│ │ ├── concat_filter.dart
│ │ ├── copy_filter.dart
│ │ ├── crop_filter.dart
│ │ ├── custom_filter.dart
│ │ ├── delay_filter.dart
│ │ ├── fade_filter.dart
│ │ ├── fps_filter.dart
│ │ ├── null_filter.dart
│ │ ├── overlay_filter.dart
│ │ ├── presentation_timestamp_filter.dart
│ │ ├── sample_aspect_ratio_filter.dart
│ │ ├── scale_filter.dart
│ │ ├── subtitle_filter.dart
│ │ ├── sws.dart
│ │ ├── tpad_filter.dart
│ │ ├── trim_filter.dart
│ │ └── volume_filter.dart
│ ├── log_level.dart
│ └── video_size.dart
│ ├── ffprobe
│ ├── ffprobe.dart
│ ├── ffprobe_json.dart
│ └── ffprobe_json.g.dart
│ ├── logging.dart
│ └── time.dart
├── pubspec.yaml
└── test
├── ffmpeg
└── ffmpeg_command_test.dart
└── time_test.dart
/.github/workflows/build_docs.yml:
--------------------------------------------------------------------------------
1 | name: Build and deploy documentation website
2 | on:
3 | push:
4 | branches:
5 | main
6 | workflow_dispatch:
7 |
8 | jobs:
9 | build:
10 | runs-on: ubuntu-latest
11 | defaults:
12 | run:
13 | working-directory: ./doc/website
14 | steps:
15 | # Checkout the repository
16 | - uses: actions/checkout@v3
17 |
18 | # Setup a Dart environment
19 | - uses: dart-lang/setup-dart@v1
20 |
21 | # Download all the packages that the app uses
22 | - run: dart pub get
23 |
24 | # Build the static site
25 | - run: dart run bin/ffmpeg_cli_docs.dart
26 | env:
27 | GHUB_DOC_WEBSITE_TOKEN: ${{ vars.GHUB_DOC_WEBSITE_TOKEN }}
28 |
29 | # Zip and upload the static site.
30 | - name: Upload artifact
31 | uses: actions/upload-pages-artifact@v1
32 | with:
33 | path: ./doc/website/build
34 |
35 | deploy:
36 | name: Deploy
37 | needs: build
38 | runs-on: ubuntu-latest
39 |
40 | # Grant GITHUB_TOKEN the permissions required to make a Pages deployment
41 | permissions:
42 | pages: write # to deploy to Pages
43 | id-token: write # to verify the deployment originates from an appropriate source
44 |
45 | environment:
46 | name: github-pages
47 | url: ${{ steps.deployment.outputs.page_url }}
48 |
49 | steps:
50 | - name: Deploy to GitHub Pages
51 | id: deployment
52 | uses: actions/deploy-pages@v2
53 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Ignore dev validation assets
2 | /assets
3 |
4 | # Dart
5 | .dart_tool/
6 | .packages
7 | build/
8 | pubspec.lock
9 |
10 | # Documentation
11 | /doc/api
12 |
13 | # IntelliJ
14 | *.iml
15 | *.ipr
16 | *.iws
17 | .idea/
18 |
19 | # Mac
20 | .DS_Store
--------------------------------------------------------------------------------
/CHANGELOG.md:
--------------------------------------------------------------------------------
1 | ## [0.3.0] - Feb, 2024
2 | [BREAKING] - Made the FFMPEG executable path configurable so that FFMPEG can be used from non-standard locations
3 |
4 | ## [0.2.0] - Oct, 2023
5 | New runner, new filters, filter updates
6 |
7 | * Split single command into `FfmpegCommand.simple` and `FfmpegCommand.complex`, per FFMPEG documentation
8 | * A simple command has a single pipe of filters
9 | * A complex command has an entire graph of filters
10 | * New filters
11 | * `SubtitleFilter`
12 | * Adjusted filters
13 | * Renamed `RawFilter` to `CustomFilter`
14 | * A number of filters had properties added or adjusted
15 |
16 | ## [0.1.0] - April, 2022: Initial release
17 |
18 | * `Ffmpeg` and `FfmpegCommand` - executes FFMPEG CLI commands from Dart
19 | * `FfmpegBuilder` - (optional) builder that constructs `FfmpegCommand`s, making it easier to correlate stream IDs
20 | * `Ffprobe` - partial support for the `ffprobe` CLI.
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright (c) 2022 Declarative, Inc.
2 |
3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
4 |
5 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
6 |
7 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 | Check out our Usage Guide
14 |
15 | ---
16 | ## What is FFMPEG?
17 | [FFMPEG](https://ffmpeg.org/ffmpeg.html) is a very popular, longstanding tool for reading, writing, and streaming audio and video content. Most developers use FFMPEG through its command-line interface (CLI), because that's much easier than interfacing with the C code upon which FFMPEG is built.
18 |
19 | ## What is `ffmpeg_cli`
20 | This package allows you to configure FFMPEG CLI commands with Dart code.
21 |
22 | `ffmpeg_cli` purposefully retains the complexity of FFMPEG commands so that anything the FFMPEG CLI can do, the `ffmpeg_cli` package can do.
23 |
24 | ## Quickstart
25 | First, make sure that `ffmpeg` is installed on your device, and is available on your system path.
26 |
27 | Compose an FFMPEG command with Dart:
28 |
29 | ```dart
30 | // Define an output stream, which will map the filter
31 | // graph to the video file. The ID names can be whatever
32 | // you'd like.
33 | const outputStream = FfmpegStream(
34 | videoId: "[final_v]",
35 | audioId: "[final_a]",
36 | );
37 |
38 | // Compose your desired FFMPEG command, with inputs, filters, arguments,
39 | // and an output location.
40 | final command = FfmpegCommand(
41 | inputs: [
42 | FfmpegInput.asset("assets/intro.mp4"),
43 | FfmpegInput.asset("assets/content.mp4"),
44 | FfmpegInput.asset("assets/outro.mov"),
45 | ],
46 | args: [
47 | // Map the filter graph to the video file output.
48 | CliArg(name: 'map', value: outputStream.videoId!),
49 | CliArg(name: 'map', value: outputStream.audioId!),
50 | const CliArg(name: 'vsync', value: '2'),
51 | ],
52 | filterGraph: FilterGraph(
53 | chains: [
54 | FilterChain(
55 | inputs: [
56 | // Send all 3 video assets into the concat filter.
57 | const FfmpegStream(videoId: "[0:v]", audioId: "[0:a]"),
58 | const FfmpegStream(videoId: "[1:v]", audioId: "[1:a]"),
59 | const FfmpegStream(videoId: "[2:v]", audioId: "[2:a]"),
60 | ],
61 | filters: [
62 | // Concatenate 3 segments.
63 | ConcatFilter(
64 | segmentCount: 3,
65 | outputVideoStreamCount: 1,
66 | outputAudioStreamCount: 1,
67 | ),
68 | ],
69 | outputs: [
70 | // Give the output stream the given audio/video IDs
71 | outputStream,
72 | ],
73 | ),
74 | ],
75 | ),
76 | outputFilepath: "/my/output/file.mp4",
77 | );
78 |
79 | // Execute command
80 | final process = await Ffmpeg().run(command: command);
81 | ```
82 |
83 | ## How ffprobe support is managed
84 | There are a lot of properties in `ffprobe`. Many of these properties can present in
85 | varying formats, which are not effectively documented.
86 |
87 | The approach to `ffprobe` is to add missing result parameters as they are discovered
88 | and to add parsing functionality per property as the various possible formats are
89 | discovered. In other words, only do what is necessary in the given moment because
90 | the overall scope is too large and difficult to discover.
91 |
--------------------------------------------------------------------------------
/README_FRAME_TIMES.md:
--------------------------------------------------------------------------------
1 | ### How videos encode frame times
2 | Modern video containers do not store a constant frame rate. They store a timestamp for each frame:
3 |
4 | **Conceptual frame times (not quite accurate, read further):**
5 | Frame Frame Time
6 | 0 0.00
7 | 1 0.04
8 | 2 0.08
9 | 3 0.12
10 |
11 | In practice, rather than store the explicit timestamp of a given frame, two numbers are taken together to synthesize a frame time: the **Presentation TimeStamp (pts)**, and the **timebase (tb)**.
12 |
13 | **The timebase is a fundamental unit of time**, such that every frame appears on a multiple of the timebase. Or, said differently, the timebase is the greatest common denominator of all frame times.
14 |
15 | **The presentation timestamp refers to a specific frame's time as a multiple of the timebase**. Therefore, a given frame's actual time is: **pst * tb**.
16 |
17 | **Example:**
18 |
19 | Timebase = 1/75; Timescale = 75
20 |
21 | Frame pts pts_time
22 | 0 0 0 x 1/75 = 0.00
23 | 1 3 3 x 1/75 = 0.04
24 | 2 6 6 x 1/75 = 0.08
25 | 3 9 9 x 1/75 = 0.12
26 |
27 | **Using a timebase and presentation timestamp allows videos to encode variable frame rates, rather than limiting the entire video to a static frame rate.**
28 |
--------------------------------------------------------------------------------
/analysis_options.yaml:
--------------------------------------------------------------------------------
1 | include: package:flutter_lints/flutter.yaml
2 |
3 | analyzer:
4 | exclude:
5 | - doc/**
6 |
--------------------------------------------------------------------------------
/doc/website/.gitignore:
--------------------------------------------------------------------------------
1 | # Build output
2 | /build
3 |
4 | # https://dart.dev/guides/libraries/private-files
5 | # Created by `dart pub`
6 | .dart_tool/
7 |
8 | # Android Studio and IntelliJ IDEA
9 | .idea
10 |
11 | .DS_Store
12 |
--------------------------------------------------------------------------------
/doc/website/analysis_options.yaml:
--------------------------------------------------------------------------------
1 | # This file configures the static analysis results for your project (errors,
2 | # warnings, and lints).
3 | #
4 | # This enables the 'recommended' set of lints from `package:lints`.
5 | # This set helps identify many issues that may lead to problems when running
6 | # or consuming Dart code, and enforces writing Dart using a single, idiomatic
7 | # style and format.
8 | #
9 | # If you want a smaller set of lints you can change this to specify
10 | # 'package:lints/core.yaml'. These are just the most critical lints
11 | # (the recommended set includes the core lints).
12 | # The core lints are also what is used by pub.dev for scoring packages.
13 |
14 | include: package:lints/recommended.yaml
15 |
16 | # Uncomment the following section to specify additional rules.
17 |
18 | # linter:
19 | # rules:
20 | # - camel_case_types
21 |
22 | # analyzer:
23 | # exclude:
24 | # - path/to/excluded/files/**
25 |
26 | # For more information about the core and recommended set of lints, see
27 | # https://dart.dev/go/core-lints
28 |
29 | # For additional information about configuring this file, see
30 | # https://dart.dev/guides/language/analysis-options
31 |
--------------------------------------------------------------------------------
/doc/website/bin/ffmpeg_cli_docs.dart:
--------------------------------------------------------------------------------
1 | import 'dart:io';
2 |
3 | import 'package:static_shock/static_shock.dart';
4 |
5 | Future main(List arguments) async {
6 | // Configure the static website generator.
7 | final staticShock = StaticShock()
8 | // Pick source files.
9 | ..pick(DirectoryPicker.parse("images"))
10 | ..pickRemote(
11 | layouts: _remoteLayouts,
12 | components: _remoteComponents,
13 | assets: _remoteAssets,
14 | )
15 | // Add all needed plugins.
16 | ..plugin(const MarkdownPlugin())
17 | ..plugin(const JinjaPlugin())
18 | ..plugin(const PrettyUrlsPlugin())
19 | ..plugin(const RedirectsPlugin())
20 | ..plugin(const SassPlugin())
21 | ..plugin(DraftingPlugin(
22 | showDrafts: arguments.contains("preview"),
23 | ))
24 | ..plugin(const PubPackagePlugin({
25 | "ffmpeg_cli",
26 | }))
27 | ..plugin(
28 | GitHubContributorsPlugin(
29 | authToken: Platform.environment["github_doc_website_token"],
30 | ),
31 | );
32 |
33 | // Generate the static website.
34 | await staticShock.generateSite();
35 | }
36 |
37 | final _remoteLayouts = {
38 | // Main page layout.
39 | RemoteInclude(
40 | url:
41 | "https://raw.githubusercontent.com/Flutter-Bounty-Hunters/fbh_branding/main/single-page-doc-sites/_includes/layouts/homepage.jinja?raw=true",
42 | name: "homepage",
43 | ),
44 | };
45 |
46 | final _remoteComponents = {
47 | // Contributors component (used by main page layout).
48 | RemoteInclude(
49 | url:
50 | "https://raw.githubusercontent.com/Flutter-Bounty-Hunters/fbh_branding/main/single-page-doc-sites/_includes/components/contributors.jinja?raw=true",
51 | name: "contributors",
52 | ),
53 | };
54 |
55 | final _remoteAssets = {
56 | // Sass styles.
57 | HttpFileGroup.fromUrlTemplate(
58 | "https://raw.githubusercontent.com/Flutter-Bounty-Hunters/fbh_branding/main/single-page-doc-sites/styles/\$",
59 | buildDirectory: "styles/",
60 | files: {
61 | "theme.scss",
62 | "homepage.scss",
63 | },
64 | ),
65 |
66 | // Flutter and Dart logos.
67 | RemoteFile(
68 | url:
69 | "https://raw.githubusercontent.com/Flutter-Bounty-Hunters/fbh_branding/main/single-page-doc-sites/images/google/dart-logo.svg?raw=true",
70 | buildPath: FileRelativePath("images/branding/", "dart-logo", "svg"),
71 | ),
72 | RemoteFile(
73 | url:
74 | "https://raw.githubusercontent.com/Flutter-Bounty-Hunters/fbh_branding/main/single-page-doc-sites/images/google/flutter-logo.svg?raw=true",
75 | buildPath: FileRelativePath("images/branding/", "flutter-logo", "svg"),
76 | ),
77 |
78 | // Dart Favicons.
79 | HttpFileGroup.fromUrlTemplate(
80 | "https://github.com/Flutter-Bounty-Hunters/fbh_branding/blob/main/single-page-doc-sites/images/google/dart-favicon/\$?raw=true",
81 | buildDirectory: "images/favicon/dart/",
82 | files: {
83 | "android-chrome-192x192.png",
84 | "android-chrome-512x512.png",
85 | "apple-touch-icon.png",
86 | "browserconfig.xml",
87 | "favicon-16x16.png",
88 | "favicon-32x32.png",
89 | "favicon.ico",
90 | "mstile-150x150.png",
91 | "site.webmanifest",
92 | },
93 | ),
94 |
95 | // Flutter Favicons.
96 | HttpFileGroup.fromUrlTemplate(
97 | "https://github.com/Flutter-Bounty-Hunters/fbh_branding/blob/main/single-page-doc-sites/images/google/flutter-favicon/\$?raw=true",
98 | buildDirectory: "images/favicon/flutter/",
99 | files: {
100 | "android-chrome-192x192.png",
101 | "android-chrome-512x512.png",
102 | "apple-touch-icon.png",
103 | "browserconfig.xml",
104 | "favicon-16x16.png",
105 | "favicon-32x32.png",
106 | "favicon.ico",
107 | "mstile-150x150.png",
108 | "safari-pinned-tab.svg",
109 | "site.webmanifest",
110 | },
111 | ),
112 | };
113 |
--------------------------------------------------------------------------------
/doc/website/pubspec.yaml:
--------------------------------------------------------------------------------
1 | name: ffmpeg_cli_docs
2 | description: Documentation for FFMPEG CLI
3 | version: 1.0.0
4 | publish_to: none
5 |
6 | environment:
7 | sdk: ^3.0.0
8 |
9 | dependencies:
10 | static_shock: ^0.0.9
11 |
12 | dev_dependencies:
13 | lints: ^2.0.0
14 | test: ^1.21.0
15 |
--------------------------------------------------------------------------------
/doc/website/source/_data.yaml:
--------------------------------------------------------------------------------
1 | homepage_url: https://flutter-bounty-hunters.github.io/ffmpeg_cli/
2 |
3 | # Configuration for the package that this website documents.
4 | package:
5 | name: ffmpeg_cli
6 | title: FFMPEG CLI
7 | description: Run FFMPEG CLI commands from Dart
8 | type: dart
9 | is_on_pub: true
10 | github:
11 | url: https://github.com/flutter-bounty-Hunters/ffmpeg_cli
12 | organization: flutter-bounty-hunters
13 | name: ffmpeg_cli
14 | discord: https://discord.gg/8hna2VD32s
15 | sponsorship: https://flutterbountyhunters.com
16 |
17 | # Configuration of the GitHub plugin for loading info about GitHub repositories.
18 | github:
19 | contributors:
20 | repositories:
21 | - { organization: flutter-bounty-hunters, name: ffmpeg_cli }
22 |
--------------------------------------------------------------------------------
/doc/website/source/_includes/components/code_two_column.jinja:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
With Flutter Test Robots
5 |
testWidgets((tester) async {
6 | // Setup the test.
7 |
8 | // Press CMD+V (like for a paste).
9 | await tester.pressCmdV();
10 |
11 | // Press CMD+ALT+LEFT (jump to start).
12 | await tester.pressCmdAltLeftArrow();
13 |
14 | // Verify expectations.
15 | });
16 |
17 |
18 |
19 |
With Flutter
20 |
testWidgets((tester) async {
21 | // Setup the test.
22 |
23 | // Press CMD+V (like for a paste).
24 | await tester.sendKeyDownEvent(
25 | LogicalKeyboardKey.meta,
26 | );
27 | await tester.sendKeyEvent(
28 | LogicalKeyboardKey.keyV,
29 | );
30 | await tester.sendKeyUpEvent(
31 | LogicalKeyboardKey.meta,
32 | );
33 |
34 | // Press CMD+ALT+LEFT (jump to start).
35 | await tester.sendKeyDownEvent(
36 | LogicalKeyboardKey.meta,
37 | );
38 | await tester.sendKeyDownEvent(
39 | LogicalKeyboardKey.alt,
40 | );
41 | await tester.sendKeyEvent(
42 | LogicalKeyboardKey.left,
43 | );
44 | await tester.sendKeyUpEvent(
45 | LogicalKeyboardKey.left,
46 | );
47 | await tester.sendKeyUpEvent(
48 | LogicalKeyboardKey.meta,
49 | );
50 |
51 | // Verify expectations.
52 | });
53 |
54 |
55 |
56 |
--------------------------------------------------------------------------------
/doc/website/source/images/branding/logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Flutter-Bounty-Hunters/ffmpeg_cli/9a06a3072dfe87682f14b16f43687d9e30b2ae08/doc/website/source/images/branding/logo.png
--------------------------------------------------------------------------------
/doc/website/source/images/branding/social.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Flutter-Bounty-Hunters/ffmpeg_cli/9a06a3072dfe87682f14b16f43687d9e30b2ae08/doc/website/source/images/branding/social.png
--------------------------------------------------------------------------------
/doc/website/source/index.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: layouts/homepage.jinja
3 | contentRenderers:
4 | - markdown
5 | - jinja
6 | ---
7 | ## What is FFMPEG?
8 | [FFMPEG](https://ffmpeg.org/) is a collection of powerful and complicated video and audio processing tools. These tools are
9 | written in C++. Due to the complexity of building and coding for these tools, FFMPEG is primarily
10 | utilized through command-line (CLI) scripts.
11 |
12 | ### FFMPEG scripts are cryptic!
13 | FFMPEG CLI scripts are notoriously difficult to read and write. It’s very easy to accidentally break a script.
14 |
15 | ```bash
16 | ffmpeg \
17 | -f lavfi -i testsrc \
18 | -f lavfi -i testsrc \
19 | -f lavfi -i testsrc \
20 | -f lavfi -i testsrc \
21 | -filter_complex \
22 | "[0:v] pad=iw*2:ih*2 [a]; \
23 | [1:v] negate [b]; \
24 | [2:v] hflip [c]; \
25 | [3:v] edgedetect [d]; \
26 | [a] [b] overlay=w [x]; \
27 | [x] [c] overlay=0:h [y]; \
28 | [y] [d] overlay=w:h [out]" \
29 | -map "[out]" \
30 | -c:v ffv1 -t 5 multiple_input_grid.avi
31 | ```
32 |
33 | FFMPEG filter graphs are inherently complex - there’s no getting around that. However, that
34 | complexity doesn’t need to be compounded by trying to work with shell scripts. The `ffmpeg_cli`
35 | package makes it possible to compose these commands in Dart.
36 |
37 | ---
38 |
39 | ## FFMPEG commands in Dart
40 | FFMPEG supports two styles of commands. FFMPEG calls them “simple” and “complex”. The difference is
41 | that a “simple” command has a single pipeline of steps from input to out. A “complex” command
42 | contains an entire graph of steps. The ffmpeg_cli packages supports both.
43 |
44 | ### Compose a simple command
45 |
46 | ```dart
47 | final command = FfmpegCommand.simple(
48 | inputs: [
49 | // The input video is "raw-video.mov".
50 | FfmpegInput.asset(“raw-video.mov”),
51 | ],
52 | args: [
53 | // Cut the first 2 minutes and 30 seconds off the video.
54 | CliArg("ss", "2:30")
55 | ],
56 | // Transcode and output to a file called "my-video.mp4".
57 | outputFilepath: “my-video.mp4”,
58 | );
59 | ```
60 |
61 | ### Compose a complex command
62 |
63 | ```dart
64 | final command = FfmpegCommand.complex(
65 | // Take three inputs: an intro, some primary content, and an outro.
66 | inputs: [
67 | FfmpegInput.asset("assets/intro.mp4"),
68 | FfmpegInput.asset("assets/content.mp4"),
69 | FfmpegInput.asset("assets/outro.mov"),
70 | ],
71 | args: [
72 | // Send typical mapping args to send the result of the filter graph to
73 | // the output file.
74 | CliArg(name: 'map', value: outputStream.videoId!),
75 | CliArg(name: 'map', value: outputStream.audioId!),
76 | // A couple additional settings that seem to be necessary in some cases
77 | // to get the expected output. Play with these as needed.
78 | const CliArg(name: 'y'),
79 | const CliArg(name: 'vsync', value: '2'),
80 | ],
81 | // Configure the whole FFMPEG filter graph.
82 | filterGraph: FilterGraph(
83 | chains: [
84 | // Add a filter to the graph.
85 | FilterChain(
86 | // Send all three of our input videos into this filter.
87 | inputs: [
88 | const FfmpegStream(videoId: "[0:v]", audioId: "[0:a]"),
89 | const FfmpegStream(videoId: "[1:v]", audioId: "[1:a]"),
90 | const FfmpegStream(videoId: "[2:v]", audioId: "[2:a]"),
91 | ],
92 | // Apply a concatenation filter, which plays each input back-to-back.
93 | filters: [
94 | ConcatFilter(segmentCount: 3, outputVideoStreamCount: 1, outputAudioStreamCount: 1),
95 | ],
96 | // Send the result to the final output file.
97 | outputs: [
98 | outputStream,
99 | ],
100 | ),
101 | ],
102 | ),
103 | outputFilepath: "/my/output/file.mp4",
104 | );
105 | ```
106 |
107 | ---
108 |
109 | ## Run the command
110 | Once you've assembled an `FfmpegCommand`, you can execute it from Dart. Behind the scenes, the
111 | `ffmpeg_cli` package takes your Dart object, converts it to a standard FFMPEG CLI command, and then
112 | runs that command in an invisible shell.
113 |
114 | ```dart
115 | // Create the command.
116 | final command = createMyCommand();
117 |
118 | // Execute command.
119 | final process = await Ffmpeg().run(command: command);
120 | ```
121 |
122 | ---
123 |
124 | ## Built by the
Flutter Bounty Hunters
125 | This package was built by the [Flutter Bounty Hunters (FBH)](https://flutterbountyhunters.com).
126 | The Flutter Bounty Hunters is a development agency that works exclusively on open source Flutter
127 | and Dark packages.
128 |
129 | With funding from corporate clients, the goal of the Flutter Bounty Hunters is to solve
130 | common problems for The Last Time™. If your team gets value from Flutter Bounty Hunter
131 | packages, please consider funding further development.
132 |
133 | ### Other FBH packages
134 | Other packages that the Flutter Bounty Hunters brought to the community...
135 |
136 | [Super Editor, Super Text, Attributed Text](https://github.com/superlistapp/super_editor), [Static Shock](https://staticshock.io),
137 | [Follow the Leader](https://github.com/flutter-bounty-hunters/follow_the_leader), [Overlord](https://github.com/flutter-bounty-hunters/overlord),
138 | [Flutter Test Robots](https://github.com/flutter-bounty-hunters/dart_rss), and more.
139 |
140 | ## Contributors
141 | The `{{ package.name }}` package was built by...
142 |
143 | {{ components.contributors() }}
--------------------------------------------------------------------------------
/example/assets/Butterfly-209.mp4:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Flutter-Bounty-Hunters/ffmpeg_cli/9a06a3072dfe87682f14b16f43687d9e30b2ae08/example/assets/Butterfly-209.mp4
--------------------------------------------------------------------------------
/example/assets/bee.mp4:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Flutter-Bounty-Hunters/ffmpeg_cli/9a06a3072dfe87682f14b16f43687d9e30b2ae08/example/assets/bee.mp4
--------------------------------------------------------------------------------
/example/bin/main.dart:
--------------------------------------------------------------------------------
1 | // ignore_for_file: avoid_print
2 |
3 | import 'dart:convert';
4 | import 'dart:io';
5 |
6 | import 'package:ffmpeg_cli/ffmpeg_cli.dart';
7 |
8 | /// Uses an [FfmpegBuilder] to create an [FfmpegCommand], then
9 | /// runs the [FfmpegCommand] to render a video.
10 | void main() async {
11 | final commandBuilder = FfmpegBuilder();
12 |
13 | final butterflyStream = commandBuilder.addAsset("assets/Butterfly-209.mp4");
14 | final beeStream = commandBuilder.addAsset("assets/bee.mp4");
15 | final outputStream = commandBuilder.createStream(hasVideo: true, hasAudio: true);
16 |
17 | commandBuilder.addFilterChain(
18 | // We combine the two example videos into one, by using a
19 | // "concat" filter.
20 | FilterChain(
21 | inputs: [
22 | // The inputs into the "concat" filter are the input IDs
23 | // for the source videos that FFMPEG generated.
24 | butterflyStream,
25 | beeStream,
26 | ],
27 | filters: [
28 | // Combine the two source videos, one after the other, by
29 | // using the "concat" filter.
30 | ConcatFilter(
31 | segmentCount: 2,
32 | outputVideoStreamCount: 1,
33 | outputAudioStreamCount: 1,
34 | ),
35 | ],
36 | outputs: [
37 | // This "concat" filter will produce a video stream and an
38 | // audio stream. Here, we give those streams IDs so that we
39 | // can pass them into other FilterChains, or map them to the
40 | // output file.
41 | outputStream,
42 | ],
43 | ),
44 | );
45 |
46 | final cliCommand = commandBuilder.build(
47 | args: [
48 | // Set the FFMPEG log level.
49 | CliArg.logLevel(LogLevel.info),
50 | // Map the final stream IDs from the filter graph to
51 | // the output file.
52 | CliArg(name: 'map', value: outputStream.videoId!),
53 | CliArg(name: 'map', value: outputStream.audioId!),
54 | const CliArg(name: 'vsync', value: '2'),
55 | ],
56 | outputFilepath: "output/test_render.mp4",
57 | );
58 |
59 | print('');
60 | print('Expected command input: ');
61 | print(cliCommand.expectedCliInput());
62 | print('');
63 |
64 | // Run the FFMPEG command.
65 | final process = await Ffmpeg().run(cliCommand);
66 |
67 | // Pipe the process output to the Dart console.
68 | process.stderr.transform(utf8.decoder).listen((data) {
69 | print(data);
70 | });
71 |
72 | // Allow the user to respond to FFMPEG queries, such as file overwrite
73 | // confirmations.
74 | stdin.pipe(process.stdin);
75 |
76 | await process.exitCode;
77 | print('DONE');
78 | }
79 |
--------------------------------------------------------------------------------
/example/bin/main_raw.dart:
--------------------------------------------------------------------------------
1 | // ignore_for_file: avoid_print
2 |
3 | import 'dart:convert';
4 | import 'dart:io';
5 |
6 | import 'package:ffmpeg_cli/ffmpeg_cli.dart';
7 |
8 | /// Manually configures an [FfmpegCommand], without using the help
9 | /// of an [FfmpegBuilder].
10 | void main() async {
11 | // Create an FFMPEG complex command so we can run it with filters
12 | final cliCommand = FfmpegCommand.complex(
13 | inputs: [
14 | // These assets are passed to FFMPEG with the input "-i" flag.
15 | // Each input is auto-assigned stream IDs by FFMPEG, e.g.:
16 | // 1: "[0:v]" and "[0:a]"
17 | // 2: "[1:v]" and "[1:a]"
18 | // ...
19 | FfmpegInput.asset("assets/Butterfly-209.mp4"),
20 | FfmpegInput.asset("assets/bee.mp4"),
21 | ],
22 | args: [
23 | // Set the FFMPEG log level.
24 | CliArg.logLevel(LogLevel.info),
25 | // Map the final stream IDs from the filter graph to
26 | // the output file.
27 | const CliArg(name: 'map', value: "[comp_0_v]"),
28 | const CliArg(name: 'map', value: "[comp_0_a]"),
29 | // TODO: need to generalize knowledge of when to use vsync -2
30 | const CliArg(name: 'vsync', value: '2'),
31 | ],
32 | filterGraph: FilterGraph(
33 | chains: [
34 | // We combine the two example videos into one, by using a
35 | // "concat" filter.
36 | FilterChain(
37 | inputs: [
38 | // The inputs into the "concat" filter are the input IDs
39 | // for the source videos that FFMPEG generated.
40 | const FfmpegStream(videoId: "[0:v]", audioId: "[0:a]"),
41 | const FfmpegStream(videoId: "[1:v]", audioId: "[1:a]"),
42 | ],
43 | filters: [
44 | // Combine the two source videos, one after the other, by
45 | // using the "concat" filter.
46 | ConcatFilter(
47 | segmentCount: 2,
48 | outputVideoStreamCount: 1,
49 | outputAudioStreamCount: 1,
50 | ),
51 | ],
52 | outputs: [
53 | // This "concat" filter will produce a video stream and an
54 | // audio stream. Here, we give those streams IDs so that we
55 | // can pass them into other FilterChains, or map them to the
56 | // output file.
57 | const FfmpegStream(videoId: "[comp_0_v]", audioId: "[comp_0_a]"),
58 | ],
59 | ),
60 | ],
61 | ),
62 | outputFilepath: "output/test_render.mp4",
63 | );
64 |
65 | print('');
66 | print('Expected command input: ');
67 | print(cliCommand.expectedCliInput());
68 | print('');
69 |
70 | // Run the FFMPEG command.
71 | final process = await Ffmpeg().run(cliCommand);
72 |
73 | // Pipe the process output to the Dart console.
74 | process.stderr.transform(utf8.decoder).listen((data) {
75 | print(data);
76 | });
77 |
78 | // Allow the user to respond to FFMPEG queries, such as file overwrite
79 | // confirmations.
80 | stdin.pipe(process.stdin);
81 |
82 | await process.exitCode;
83 | print('DONE');
84 | }
85 |
--------------------------------------------------------------------------------
/example/output/test_render.mp4:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/Flutter-Bounty-Hunters/ffmpeg_cli/9a06a3072dfe87682f14b16f43687d9e30b2ae08/example/output/test_render.mp4
--------------------------------------------------------------------------------
/example/pubspec.yaml:
--------------------------------------------------------------------------------
1 | name: example
2 | version: 1.0.0
3 | description: Example for ffmpeg_cli
4 |
5 | publish_to: none
6 |
7 | environment:
8 | sdk: '>=3.0.0 <4.0.0'
9 |
10 | dependencies:
11 | ffmpeg_cli:
12 | path: ../
13 |
14 | dev_dependencies:
15 | flutter_lints: ^2.0.3
16 |
--------------------------------------------------------------------------------
/lib/ffmpeg_cli.dart:
--------------------------------------------------------------------------------
1 | export 'src/colors.dart';
2 | export 'src/time.dart';
3 |
4 | // FFPROBE
5 | export 'src/ffprobe/ffprobe.dart';
6 | export 'src/ffprobe/ffprobe_json.dart';
7 |
8 | // FFMPEG
9 | export 'src/ffmpeg/ffmpeg_command.dart';
10 | export 'src/ffmpeg/ffmpeg_command_builder.dart';
11 | export 'src/ffmpeg/log_level.dart';
12 | export 'src/ffmpeg/video_size.dart';
13 |
14 | // FFMPEG filters
15 | export 'src/ffmpeg/filters/audio_mix_filter.dart';
16 | export 'src/ffmpeg/filters/concat_filter.dart';
17 | export 'src/ffmpeg/filters/copy_filter.dart';
18 | export 'src/ffmpeg/filters/crop_filter.dart';
19 | export 'src/ffmpeg/filters/custom_filter.dart';
20 | export 'src/ffmpeg/filters/delay_filter.dart';
21 | export 'src/ffmpeg/filters/fade_filter.dart';
22 | export 'src/ffmpeg/filters/fps_filter.dart';
23 | export 'src/ffmpeg/filters/null_filter.dart';
24 | export 'src/ffmpeg/filters/overlay_filter.dart';
25 | export 'src/ffmpeg/filters/presentation_timestamp_filter.dart';
26 | export 'src/ffmpeg/filters/sample_aspect_ratio_filter.dart';
27 | export 'src/ffmpeg/filters/scale_filter.dart';
28 | export 'src/ffmpeg/filters/sws.dart';
29 | export 'src/ffmpeg/filters/tpad_filter.dart';
30 | export 'src/ffmpeg/filters/trim_filter.dart';
31 | export 'src/ffmpeg/filters/volume_filter.dart';
32 | export 'src/ffmpeg/filters/subtitle_filter.dart';
33 |
--------------------------------------------------------------------------------
/lib/src/colors.dart:
--------------------------------------------------------------------------------
1 | /// Used to generate color in the correct sequence for FFMPEG
2 | ///
3 | /// FFMPEG uses `0xRRGGBB[AA]`
4 | class FfmpegColor {
5 | static FfmpegColor parse(String color) => FfmpegColor(int.parse(color));
6 |
7 | const FfmpegColor(this.color);
8 |
9 | final int color;
10 |
11 | bool get isTranslucent => color < 0xFF000000;
12 |
13 | String _computeRGBHex() => (color & 0x00FFFFFF).toRadixString(16).padLeft(6, '0');
14 |
15 | String _computeAlphaHex() => (color >> 24).toRadixString(16).padLeft(2, '0');
16 |
17 | // FFMPEG displays colors as `0xRRGGBB[AA]` (as well as some other formats)
18 | String toCli() => '0x${_computeRGBHex()}${_computeAlphaHex()}';
19 |
20 | @override
21 | bool operator ==(Object other) =>
22 | identical(this, other) || other is FfmpegColor && runtimeType == other.runtimeType && color == other.color;
23 |
24 | @override
25 | int get hashCode => color.hashCode;
26 | }
27 |
--------------------------------------------------------------------------------
/lib/src/ffmpeg/ffmpeg_command.dart:
--------------------------------------------------------------------------------
1 | import 'dart:io';
2 |
3 | import 'package:collection/collection.dart';
4 |
5 | import 'package:ffmpeg_cli/src/ffmpeg/log_level.dart';
6 |
7 | /// Dart wrappers for FFMPEG CLI commands, arguments, flags, and filters.
8 |
9 | /// Executes FFMPEG commands from Dart.
10 | class Ffmpeg {
11 | /// Executes the given [command].
12 | Future run(FfmpegCommand command) {
13 | final cli = command.toCli();
14 | return Process.start(
15 | cli.executable,
16 | cli.args,
17 | );
18 | }
19 | }
20 |
21 | /// FFMPEG CLI command.
22 | ///
23 | /// The `ffmpeg` CLI command is the primary CLI tool for FFMPEG. This class
24 | /// is a Dart wrapper around that command.
25 | ///
26 | /// `inputs` should include all video, audio, image, and other assets
27 | /// that are referenced in the desired command.
28 | ///
29 | /// `args` should include all CLI arguments for the desired command.
30 | ///
31 | /// `filterGraph` describes how the assets should be composed to form the
32 | /// final video.
33 | ///
34 | /// `outputFilepath` is the path to where the final video should be stored
35 | class FfmpegCommand {
36 | const FfmpegCommand.complex({
37 | this.ffmpegPath,
38 | this.inputs = const [],
39 | this.args = const [],
40 | required this.filterGraph,
41 | required this.outputFilepath,
42 | });
43 |
44 | const FfmpegCommand.simple({
45 | this.ffmpegPath,
46 | this.inputs = const [],
47 | this.args = const [],
48 | required this.outputFilepath,
49 | }) : filterGraph = null;
50 |
51 | /// The path of the `ffmpeg` cli executable.
52 | ///
53 | /// For example, [ffmpegPath] might be `/opt/homebrew/bin/ffmpeg` on macOS
54 | /// or `C:\ffmpeg\ffmpeg.exe` on Windows. If `null`, the `ffmpeg`
55 | /// from path is used.
56 | final String? ffmpegPath;
57 |
58 | /// FFMPEG command inputs, such as assets and virtual devices.
59 | final List inputs;
60 |
61 | /// All non-input arguments for the FFMPEG command, such as "map".
62 | final List args;
63 |
64 | /// The graph of filters that produce the final video.
65 | final FilterGraph? filterGraph;
66 |
67 | /// The file path for the rendered video.
68 | final String outputFilepath;
69 |
70 | /// Converts this command to a series of CLI arguments, which can be
71 | /// passed to a `Process` for execution.
72 | CliCommand toCli() {
73 | return CliCommand(
74 | executable: ffmpegPath ?? 'ffmpeg',
75 | args: [
76 | for (final input in inputs) ...input.args,
77 | for (final arg in args) ...["-${arg.name}", if (arg.value != null) arg.value!],
78 | if (filterGraph != null) ...[
79 | '-filter_complex',
80 | filterGraph!.toCli(),
81 | ],
82 | outputFilepath,
83 | ],
84 | );
85 | }
86 |
87 | /// Returns a string that represents what this command is expected to
88 | /// look like when run by a `Process`.
89 | ///
90 | /// This method is provided for debugging purposes because we can't see
91 | /// what command is actually running in the `Process`.
92 | String expectedCliInput() {
93 | final buffer = StringBuffer('ffmpeg\n');
94 | for (final input in inputs) {
95 | buffer.writeln(' ${input.toCli()}');
96 | }
97 | for (final arg in args) {
98 | buffer.writeln(' ${arg.toCli()}');
99 | }
100 | if (filterGraph != null) {
101 | buffer.writeln(' -filter_complex ');
102 | buffer.writeln(filterGraph!.toCli(indent: ' '));
103 | }
104 | buffer.writeln(' $outputFilepath');
105 |
106 | return buffer.toString();
107 | }
108 | }
109 |
110 | /// An input into an FFMPEG filter graph.
111 | ///
112 | /// An input might refer to a video file, audio file, or virtual device.
113 | class FfmpegInput {
114 | /// Configures an FFMPEG input for an asset at the given [assetPath].
115 | FfmpegInput.asset(assetPath) : args = ['-i', assetPath];
116 |
117 | /// Configures an FFMPEG input for a virtual device.
118 | //
119 | /// See the FFMPEG docs for more information.
120 | FfmpegInput.virtualDevice(String device) : args = ['-f', 'lavfi', '-i', device];
121 |
122 | const FfmpegInput(this.args);
123 |
124 | /// List of CLI arguments that configure a single FFMPEG input.
125 | final List args;
126 |
127 | /// Returns this input in a form that can be added to a CLI string.
128 | ///
129 | /// Example: "-i /videos/vid1.mp4"
130 | String toCli() => args.join(' ');
131 |
132 | @override
133 | bool operator ==(Object other) =>
134 | identical(this, other) || other is FfmpegInput && runtimeType == other.runtimeType && toCli() == other.toCli();
135 |
136 | @override
137 | int get hashCode => toCli().hashCode;
138 | }
139 |
140 | /// An argument that is passed to the FFMPEG CLI command.
141 | class CliArg {
142 | CliArg.logLevel(LogLevel level) : this(name: 'loglevel', value: level.toFfmpegString());
143 |
144 | const CliArg({
145 | required this.name,
146 | this.value,
147 | });
148 |
149 | final String name;
150 | final String? value;
151 |
152 | String toCli() => '-$name ${(value != null) ? value : ""}';
153 | }
154 |
155 | /// A filter graph that describes how FFMPEG should compose various assets
156 | /// to form a final, rendered video.
157 | ///
158 | /// FFMPEG filter graph syntax reference:
159 | /// http://ffmpeg.org/ffmpeg-filters.html#Filtergraph-syntax-1
160 | class FilterGraph {
161 | const FilterGraph({
162 | required this.chains,
163 | });
164 |
165 | final List chains;
166 |
167 | /// Returns this filter graph in a form that can be run in a CLI command.
168 | String toCli({indent = ''}) {
169 | return chains.map((chain) => indent + chain.toCli()).join('; \n');
170 | }
171 | }
172 |
173 | /// A single pipeline of operations within a larger filter graph.
174 | ///
175 | /// A filter chain has some number of input streams, those streams then
176 | /// have some number of filters applied to them in the given order, and
177 | /// those filters then produce some number of output streams.
178 | class FilterChain {
179 | const FilterChain({
180 | this.inputs = const [],
181 | required this.filters,
182 | this.outputs = const [],
183 | });
184 |
185 | /// Streams that flow into the [filters].
186 | final List inputs;
187 |
188 | /// Filters that apply to the [inputs], and generate the [outputs].
189 | final List filters;
190 |
191 | /// New streams that flow out of the [filters], after applying those
192 | /// [filters] to the [inputs].
193 | final List outputs;
194 |
195 | /// Formats this filter chain for the FFMPEG CLI.
196 | ///
197 | /// Format:
198 | /// [in1] [in2] [in3] filter1, filter2, [out1] [out2] [out3]
199 | ///
200 | /// Example:
201 | /// [0:0] trim=start='10':end='15' [out_v]
202 | String toCli() {
203 | return '${(inputs).map((stream) => stream.toString()).join(' ')} ${filters.map((filter) => filter.toCli()).join(', ')} ${(outputs).join(' ')}';
204 | }
205 | }
206 |
207 | /// A single video/audio stream pair within an FFMPEG filter graph.
208 | ///
209 | /// A stream might include a video ID, or an audio ID, or both.
210 | ///
211 | /// Every filter chain in an FFMPEG filter graph requires one or more
212 | /// input streams, and produces one or more output streams. From a CLI
213 | /// perspective, these streams are just string names within the filter
214 | /// graph configuration. However, these string names need to match, as
215 | /// outputs from one filter chain are used as inputs in another filter
216 | /// chain. To that end, these streams are represented by this class.
217 | class FfmpegStream {
218 | const FfmpegStream({
219 | this.videoId,
220 | this.audioId,
221 | }) : assert(videoId != null || audioId != null, "FfmpegStream must include a videoId, or an audioId.");
222 |
223 | /// Handle to a video stream, e.g., "[0:v]".
224 | final String? videoId;
225 |
226 | /// Handle to an audio stream, e.g., "[0:a]".
227 | final String? audioId;
228 |
229 | /// Returns a copy of this stream with just the video stream handle.
230 | ///
231 | /// If this stream only includes video, then this stream is returned.
232 | FfmpegStream get videoOnly {
233 | return audioId == null ? this : FfmpegStream(videoId: videoId);
234 | }
235 |
236 | /// Returns a copy of this stream with just the audio stream handle.
237 | ///
238 | /// If this stream only includes audio, then this stream is returned.
239 | FfmpegStream get audioOnly {
240 | return videoId == null ? this : FfmpegStream(audioId: audioId);
241 | }
242 |
243 | /// Returns the video and audio handles for this stream in a list,
244 | /// to pass into a filter graph as filter inputs or outputs, e.g.,
245 | /// "[0:v] [0:a]".
246 | List toCliList() {
247 | final streams = [];
248 | if (videoId != null) {
249 | streams.add(videoId!);
250 | }
251 | if (audioId != null) {
252 | streams.add(audioId!);
253 | }
254 | return streams;
255 | }
256 |
257 | @override
258 | String toString() => toCliList().join(" ");
259 | }
260 |
261 | /// An individual FFMPEG CLI filter, which can be composed within a filter
262 | /// chain, within a broader filter graph.
263 | ///
264 | /// Filters are the workhorse of FFMPEG. Any change or effect to a given
265 | /// asset happens by way of a filter, e.g., trim, fade, concatenation, etc.
266 | abstract class Filter {
267 | String toCli();
268 | }
269 |
270 | /// A command that can be passed to a `Process` for execution.
271 | class CliCommand {
272 | const CliCommand({
273 | required this.executable,
274 | required this.args,
275 | });
276 |
277 | /// The name of the executable.
278 | ///
279 | /// It can be an executable name to be located from the path,
280 | /// a relative path, or an absolute path.
281 | final String executable;
282 |
283 | /// The arguments to be passed to the executable.
284 | final List args;
285 |
286 | @override
287 | String toString() {
288 | return '[CliCommand: $executable ${args.join(' ')}]';
289 | }
290 |
291 | @override
292 | bool operator ==(Object other) {
293 | return identical(this, other) ||
294 | other is CliCommand &&
295 | runtimeType == other.runtimeType &&
296 | executable == other.executable &&
297 | const DeepCollectionEquality().equals(args, other.args);
298 | }
299 |
300 | @override
301 | int get hashCode => executable.hashCode ^ args.hashCode;
302 | }
303 |
--------------------------------------------------------------------------------
/lib/src/ffmpeg/ffmpeg_command_builder.dart:
--------------------------------------------------------------------------------
1 | import 'package:ffmpeg_cli/src/logging.dart';
2 |
3 | import 'ffmpeg_command.dart';
4 |
5 | /// Builds an [FfmpegCommand] by accumulating all inputs and filter
6 | /// streams for a given command, and then generates the CLI arguments.
7 | ///
8 | /// An [FfmpegCommand] can be constructed directly, but [FfmpegBuilder]
9 | /// helps to ensure that all of your inputs have appropriate flags, and
10 | /// all of your stream IDs match, where expected.
11 | ///
12 | /// To build an [FfmpegCommand], first, ddd inputs with [addAsset],
13 | /// [addNullVideo], [addNullAudio], [addVideoVirtualDevice], and
14 | /// [addAudioVirtualDevice].
15 | ///
16 | /// Configure the filter graph by creating streams with [createStream], and
17 | /// then combine those [FfmpegStream]s with [Filter]s, into [FilterChain]s,
18 | /// and add the [FilterChain]s with [addFilterChain].
19 | ///
20 | /// Once you've added all inputs, and you've configured the filter graph,
21 | /// create the [FfmpegCommand] with [build].
22 | class FfmpegBuilder {
23 | // FFMPEG command inputs, e.g., assets.
24 | final Map _inputs = {};
25 |
26 | // FfmpegBuilder assigns unique IDs to streams by tracking the total
27 | // number of created streams, and then using the number after that.
28 | // Using incrementing IDs makes it easier to trace bugs, rather than
29 | // generate unrelated IDs for every stream.
30 | int _compositionStreamCount = 0;
31 |
32 | final List _filterChains = [];
33 |
34 | /// Adds an input asset at the given [assetPath].
35 | ///
36 | /// If [hasVideo] is `true`, the asset is processed for video frames.
37 | /// If [hasAudio] is `true`, the asset is processed for audio streams.
38 | FfmpegStream addAsset(
39 | String assetPath, {
40 | bool hasVideo = true,
41 | bool hasAudio = true,
42 | }) {
43 | final input = FfmpegInput.asset(assetPath);
44 |
45 | // Generate video and audio stream IDs using the format that
46 | // FFMPEG expects.
47 | final videoId = hasVideo
48 | ? hasVideo && hasAudio
49 | ? '[${_inputs.length}:v]'
50 | : '[${_inputs.length}]'
51 | : null;
52 | final audioId = hasAudio
53 | ? hasVideo && hasAudio
54 | ? '[${_inputs.length}:a]'
55 | : '[${_inputs.length}]'
56 | : null;
57 |
58 | _inputs.putIfAbsent(
59 | input,
60 | () => FfmpegStream(
61 | videoId: videoId,
62 | audioId: audioId,
63 | ));
64 |
65 | return _inputs[input]!;
66 | }
67 |
68 | /// Adds a virtual video input asset with the given [width] and [height],
69 | /// which can be used to fill up time when no other video is available.
70 | FfmpegStream addNullVideo({
71 | required int width,
72 | required int height,
73 | }) {
74 | final input = FfmpegInput.virtualDevice('nullsrc=s=${width}x$height');
75 | final stream = _inputs.putIfAbsent(
76 | input,
77 | () => FfmpegStream(
78 | videoId: '[${_inputs.length}]',
79 | audioId: null,
80 | ));
81 | return stream;
82 | }
83 |
84 | /// Adds a virtual audio input asset, which can be used to fill audio
85 | /// when no other audio source is available.
86 | FfmpegStream addNullAudio() {
87 | final input = FfmpegInput.virtualDevice('anullsrc=sample_rate=48000');
88 | final stream = _inputs.putIfAbsent(
89 | input,
90 | () => FfmpegStream(
91 | videoId: null,
92 | audioId: '[${_inputs.length}]',
93 | ));
94 | return stream;
95 | }
96 |
97 | FfmpegStream addVideoVirtualDevice(String device) {
98 | final input = FfmpegInput.virtualDevice(device);
99 | final stream = _inputs.putIfAbsent(
100 | input,
101 | () => FfmpegStream(
102 | videoId: '[${_inputs.length}]',
103 | audioId: null,
104 | ));
105 | return stream;
106 | }
107 |
108 | FfmpegStream addAudioVirtualDevice(String device) {
109 | final input = FfmpegInput.virtualDevice(device);
110 | final stream = _inputs.putIfAbsent(
111 | input,
112 | () => FfmpegStream(
113 | videoId: null,
114 | audioId: '[${_inputs.length}]',
115 | ));
116 | return stream;
117 | }
118 |
119 | FfmpegStream createStream({bool hasVideo = true, bool hasAudio = true}) {
120 | final stream = FfmpegStream(
121 | videoId: hasVideo ? '[comp_${_compositionStreamCount}_v]' : null,
122 | audioId: hasAudio ? '[comp_${_compositionStreamCount}_a]' : null,
123 | );
124 |
125 | _compositionStreamCount += 1;
126 |
127 | return stream;
128 | }
129 |
130 | void addFilterChain(FilterChain chain) {
131 | _filterChains.add(chain);
132 | }
133 |
134 | /// Accumulates all the input assets and filter chains in this builder
135 | /// and returns an [FfmpegCommand] that generates a corresponding video,
136 | /// which is rendered to the given [outputFilepath].
137 | ///
138 | /// Provide a [ffmpegPath] to customize the path of the ffmpeg cli.
139 | /// For example, [ffmpegPath] might be `/opt/homebrew/bin/ffmpeg` on macOS
140 | /// or `C:\ffmpeg\ffmpeg.exe` on Windows. If `null`, the `ffmpeg`
141 | /// from path is used.
142 | ///
143 | /// To run the command, see [FfmpegCommand].
144 | FfmpegCommand build({
145 | String? ffmpegPath,
146 | required List args,
147 | FfmpegStream? mainOutStream,
148 | required String outputFilepath,
149 | }) {
150 | ffmpegBuilderLog.info('Building command. Filter chains:');
151 | for (final chain in _filterChains) {
152 | ffmpegBuilderLog.info(' - ${chain.toCli()}');
153 | }
154 |
155 | ffmpegBuilderLog.info('Filter chains: $_filterChains');
156 | return FfmpegCommand.complex(
157 | ffmpegPath: ffmpegPath,
158 | inputs: _inputs.keys.toList(),
159 | args: args,
160 | filterGraph: FilterGraph(
161 | chains: _filterChains,
162 | ),
163 | outputFilepath: outputFilepath,
164 | );
165 | }
166 | }
167 |
--------------------------------------------------------------------------------
/lib/src/ffmpeg/filters/audio_mix_filter.dart:
--------------------------------------------------------------------------------
1 | import 'package:ffmpeg_cli/src/ffmpeg/ffmpeg_command.dart';
2 |
3 | /// Mixes multiple audio streams together into one.
4 | ///
5 | /// This will automatically cut the volume of each input proportional
6 | /// to the number of inputs, e.g., 1/2 volume for 2 inputs, 1/3 volume for 3
7 | /// inputs.
8 | class AMixFilter implements Filter {
9 | const AMixFilter({
10 | this.inputCount,
11 | });
12 |
13 | /// Number of inputs (defaults to 2)
14 | final int? inputCount;
15 |
16 | @override
17 | String toCli() {
18 | return (inputCount != null) ? 'amix=inputs=${inputCount!}' : 'amix';
19 | }
20 | }
21 |
--------------------------------------------------------------------------------
/lib/src/ffmpeg/filters/concat_filter.dart:
--------------------------------------------------------------------------------
1 | import 'package:ffmpeg_cli/src/ffmpeg/ffmpeg_command.dart';
2 |
3 | /// Combines a list of video and audio streams in series into a single set of
4 | /// video and audio output streams.
5 | class ConcatFilter implements Filter {
6 | ConcatFilter({
7 | this.segmentCount,
8 | required this.outputVideoStreamCount,
9 | required this.outputAudioStreamCount,
10 | }) : assert(segmentCount == null || segmentCount > 0),
11 | assert(outputVideoStreamCount >= 0),
12 | assert(outputAudioStreamCount >= 0);
13 |
14 | /// Number of segments (defaults to 2)
15 | final int? segmentCount;
16 |
17 | /// Number of output video streams
18 | final int outputVideoStreamCount;
19 |
20 | /// Number of output audio streams
21 | final int outputAudioStreamCount;
22 |
23 | @override
24 | String toCli() {
25 | return (segmentCount != null)
26 | ? 'concat=n=$segmentCount:v=$outputVideoStreamCount:a=$outputAudioStreamCount'
27 | : 'concat=v=$outputVideoStreamCount:a=$outputAudioStreamCount';
28 | }
29 | }
30 |
--------------------------------------------------------------------------------
/lib/src/ffmpeg/filters/copy_filter.dart:
--------------------------------------------------------------------------------
1 | import 'package:ffmpeg_cli/src/ffmpeg/ffmpeg_command.dart';
2 |
3 | /// Copies the input video stream (unchanged) to the output video stream.
4 | class CopyFilter implements Filter {
5 | const CopyFilter();
6 |
7 | @override
8 | String toCli() {
9 | return 'copy';
10 | }
11 | }
12 |
13 | /// Copies the input audio stream (unchanged) to the output audio stream.
14 | class ACopyFilter implements Filter {
15 | const ACopyFilter();
16 |
17 | @override
18 | String toCli() {
19 | return 'acopy';
20 | }
21 | }
22 |
--------------------------------------------------------------------------------
/lib/src/ffmpeg/filters/crop_filter.dart:
--------------------------------------------------------------------------------
1 | import 'package:ffmpeg_cli/src/ffmpeg/ffmpeg_command.dart';
2 |
3 | /// Crops the input video to given dimensions.
4 | class CropFilter implements Filter {
5 | const CropFilter({
6 | required this.width,
7 | required this.height,
8 | this.x,
9 | this.y,
10 | });
11 |
12 | /// Width of the output rectangle
13 | final int width;
14 |
15 | /// Height of the output rectangle
16 | final int height;
17 |
18 | /// x-position of the top left corner of the output rectangle
19 | final int? x;
20 |
21 | /// y-position of the top left corner of the output rectangle
22 | final int? y;
23 |
24 | // TODO: keep_aspect
25 |
26 | // TODO: exact
27 |
28 | @override
29 | String toCli() {
30 | return 'crop=$width:$height${x != null ? ':$x' : ''}${y != null ? ':$y' : ''}';
31 | }
32 | }
33 |
--------------------------------------------------------------------------------
/lib/src/ffmpeg/filters/custom_filter.dart:
--------------------------------------------------------------------------------
1 | import 'package:ffmpeg_cli/src/ffmpeg/ffmpeg_command.dart';
2 |
3 | /// Uses the `cliValue` as a literal representation of an FFMPEG
4 | /// CLI filter, in the case that such a filter is not currently
5 | /// available as a `Filter` in this package.
6 | class CustomFilter implements Filter {
7 | CustomFilter(
8 | this.cliValue,
9 | ) : assert(cliValue.isNotEmpty);
10 |
11 | /// Filter (any filters that are currently not available as a `Filter`)
12 | final String cliValue;
13 |
14 | @override
15 | String toCli() => cliValue;
16 | }
17 |
--------------------------------------------------------------------------------
/lib/src/ffmpeg/filters/delay_filter.dart:
--------------------------------------------------------------------------------
1 | import 'package:ffmpeg_cli/src/ffmpeg/ffmpeg_command.dart';
2 |
3 | /// Delays a given audio stream.
4 | class ADelayFilter implements Filter {
5 | const ADelayFilter({
6 | required this.delays,
7 | });
8 |
9 | /// The delay for each audio stream in order
10 | final List delays;
11 |
12 | @override
13 | String toCli() {
14 | return 'adelay=${delays.map((delay) => (delay.inMilliseconds)).join('|')}';
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/lib/src/ffmpeg/filters/fade_filter.dart:
--------------------------------------------------------------------------------
1 | import 'package:ffmpeg_cli/ffmpeg_cli.dart';
2 |
3 | /// Fades a given video stream.
4 | class FadeFilter implements Filter {
5 | const FadeFilter({
6 | this.type,
7 | this.startFrame,
8 | this.nbFrames,
9 | this.alpha,
10 | this.startTime,
11 | this.duration,
12 | this.color,
13 | }) : assert(type == 'in' || type == 'out'),
14 | assert(alpha == null || alpha == 0 || alpha == 1);
15 |
16 | /// Effect type (default is in)
17 | final String? type;
18 |
19 | /// The frame to start the fade effect (default is 0)
20 | final String? startFrame;
21 |
22 | /// The number of frames that the effect lasts (default is 25)
23 | final String? nbFrames;
24 |
25 | /// Fade only alpha channel
26 | final int? alpha;
27 |
28 | /// The timestamp of the frame to start the fade effect (default is 0)
29 | final Duration? startTime;
30 |
31 | /// The number of seconds for which the fade effect lasts
32 | final Duration? duration;
33 |
34 | /// The color of the fade (default is black)
35 | final String? color;
36 |
37 | @override
38 | String toCli() {
39 | final properties = [
40 | if (type != null) 'type=$type',
41 | if (alpha != null) 'alpha=$alpha',
42 | if (startFrame != null) 'start_frame=$startFrame',
43 | if (nbFrames != null) 'nb_frames=$nbFrames',
44 | if (startTime != null) 'start_time=${startTime!.toSeconds()}',
45 | if (duration != null) 'duration=${duration!.toSeconds()}',
46 | if (color != null) 'color=$color',
47 | ];
48 |
49 | return 'fade=${properties.join(':')}';
50 | }
51 | }
52 |
53 | /// Options include
54 | /// `tri`
55 | /// `qsin`
56 | /// `hsin`
57 | /// `esin`
58 | /// `log`
59 | /// `ipar`
60 | /// `qua`
61 | /// `cub`
62 | /// `squ`
63 | /// `cbr`
64 | /// `par`
65 | /// `exp`
66 | /// `iqsin`
67 | /// `ihsin`
68 | /// `dese`
69 | /// `desi`
70 | /// `losi`
71 | /// `sinc`
72 | /// `isinc`
73 | /// `nofade`
74 | enum AFadeCurve {
75 | tri,
76 | qsin,
77 | hsin,
78 | esin,
79 | log,
80 | ipar,
81 | qua,
82 | cub,
83 | squ,
84 | cbr,
85 | par,
86 | exp,
87 | iqsin,
88 | ihsin,
89 | dese,
90 | desi,
91 | losi,
92 | sinc,
93 | isinc,
94 | nofade
95 | }
96 |
97 | /// Fades a given audio stream.
98 | class AFadeFilter implements Filter {
99 | const AFadeFilter({
100 | required this.type,
101 | this.startTime,
102 | this.duration,
103 | this.curve,
104 | }) : assert(type == 'in' || type == 'out');
105 |
106 | /// Effect type (default is in)
107 | final String type;
108 |
109 | /// The timestamp of the frame to start the fade effect (default is 0)
110 | final Duration? startTime;
111 |
112 | /// The number of seconds for which the fade effect lasts
113 | final Duration? duration;
114 |
115 | /// The curve for the fade transition
116 | final AFadeCurve? curve;
117 |
118 | @override
119 | String toCli() {
120 | final argList = [
121 | 'type=$type',
122 | if (startTime != null) 'start_time=${startTime!.toSeconds()}',
123 | if (duration != null) 'duration=${duration!.toSeconds()}',
124 | if (curve != null) 'curve=$curve'
125 | ];
126 |
127 | return 'afade=${argList.join(':')}';
128 | }
129 | }
130 |
--------------------------------------------------------------------------------
/lib/src/ffmpeg/filters/fps_filter.dart:
--------------------------------------------------------------------------------
1 | import 'package:ffmpeg_cli/src/ffmpeg/ffmpeg_command.dart';
2 | import 'package:ffmpeg_cli/src/time.dart';
3 |
4 | /// Converts the input video stream to the specified constant frame rate
5 | /// by duplicating or dropping frames as necessary.
6 | class FpsFilter implements Filter {
7 | FpsFilter({
8 | required this.fps,
9 | this.startTime,
10 | this.round,
11 | this.eofAction,
12 | }) : assert(fps > 0),
13 | assert(round == null || const ['zero', 'inf', 'down', 'up', 'near'].contains(round)),
14 | assert(eofAction == null || const ['round', 'pass'].contains(eofAction));
15 |
16 | /// The desired output frame rate (default is 25)
17 | final int fps;
18 |
19 | /// The first presentation timestamp where the filter will take place
20 | final Duration? startTime;
21 |
22 | /// The timestamp rounding method for `startTime` (default is near)
23 | final String? round;
24 |
25 | /// The action performed when reading the last frame
26 | final String? eofAction;
27 |
28 | @override
29 | String toCli() {
30 | final properties = [
31 | fps.toString(),
32 | if (startTime != null) "start_time='${startTime!.toSeconds()}'",
33 | if (round != null) 'round=$round',
34 | if (eofAction != null) 'eof_action=$eofAction',
35 | ];
36 |
37 | return 'fps=${properties.join(':')}';
38 | }
39 | }
40 |
--------------------------------------------------------------------------------
/lib/src/ffmpeg/filters/null_filter.dart:
--------------------------------------------------------------------------------
1 | import 'package:ffmpeg_cli/src/ffmpeg/ffmpeg_command.dart';
2 |
3 | /// Routes the input video stream to the output video stream
4 | /// without any modifications.
5 | class NullFilter implements Filter {
6 | const NullFilter();
7 |
8 | @override
9 | String toCli() {
10 | return 'null';
11 | }
12 | }
13 |
14 | /// Routes the input audio stream to the output audio stream
15 | /// without any modifications.
16 | class ANullFilter implements Filter {
17 | const ANullFilter();
18 |
19 | @override
20 | String toCli() {
21 | return 'anull';
22 | }
23 | }
24 |
--------------------------------------------------------------------------------
/lib/src/ffmpeg/filters/overlay_filter.dart:
--------------------------------------------------------------------------------
1 | import 'package:ffmpeg_cli/src/ffmpeg/ffmpeg_command.dart';
2 |
3 | /// Overlays one video on top of another.
4 | ///
5 | /// First video stream is the "main" and the second video stream is the "overlay".
6 | class OverlayFilter implements Filter {
7 | const OverlayFilter({
8 | this.x,
9 | this.y,
10 | this.eofAction,
11 | this.shortest,
12 | this.overlayW,
13 | this.overlayH,
14 | this.inputFrameCount,
15 | this.timestamp,
16 | }) : assert(shortest == null || shortest == 1 || shortest == 0),
17 | assert(eofAction == null || eofAction == 'repeat' || eofAction == 'endall' || eofAction == 'pass');
18 |
19 | /// x-position of the image taken from the top left corner
20 | final int? x;
21 |
22 | /// y-position of the image taken from the top left corner
23 | final int? y;
24 |
25 | /// The action to take when EOF is encountered on the secondary input
26 | final String? eofAction;
27 |
28 | /// Force the output to terminate when the shortest input terminates
29 | final int? shortest;
30 |
31 | /// Overlay width
32 | final int? overlayW;
33 |
34 | /// Overlay height
35 | final int? overlayH;
36 |
37 | /// The number of input frames
38 | final int? inputFrameCount;
39 |
40 | /// The timestamp of when the overlay is displayed
41 | final Duration? timestamp;
42 |
43 | @override
44 | String toCli() {
45 | final properties = [
46 | if (x != null) "$x",
47 | if (y != null) "$y",
48 | if (eofAction != null) "eof_action=$eofAction",
49 | if (shortest != null) "shortest=$shortest",
50 | if (overlayW != null) "overlay_w=$overlayW",
51 | if (overlayH != null) "overlay_h=$overlayH",
52 | if (inputFrameCount != null) "n=$inputFrameCount",
53 | if (timestamp != null) "t=${timestamp!.inSeconds}"
54 | ];
55 | return 'overlay=${properties.join(":")}';
56 | }
57 | }
58 |
--------------------------------------------------------------------------------
/lib/src/ffmpeg/filters/presentation_timestamp_filter.dart:
--------------------------------------------------------------------------------
1 | import 'package:ffmpeg_cli/src/ffmpeg/ffmpeg_command.dart';
2 |
3 | /// Sets the Presentation TimeStamp (PTS) for the given video stream.
4 | ///
5 | /// When trimming a video to a subsection, it's important to also set the
6 | /// PTS of the trimmed video to `"PTS-STARTPTS"` to avoid an empty gap
7 | /// before the start of the trimmed video.
8 | class SetPtsFilter implements Filter {
9 | const SetPtsFilter.startPts() : this(pts: 'PTS-STARTPTS');
10 |
11 | const SetPtsFilter({required this.pts});
12 |
13 | /// The presentation timestamp in input
14 | final String pts;
15 |
16 | @override
17 | String toCli() => "setpts=$pts";
18 | }
19 |
20 | /// Sets the Presentation TimeStamp (PTS) for the given audio stream.
21 | class ASetPtsFilter implements Filter {
22 | const ASetPtsFilter.startPts() : this(pts: 'PTS-STARTPTS');
23 |
24 | const ASetPtsFilter({required this.pts});
25 |
26 | /// The presentation timestamp in input
27 | final String pts;
28 |
29 | @override
30 | String toCli() => "asetpts=$pts";
31 | }
32 |
--------------------------------------------------------------------------------
/lib/src/ffmpeg/filters/sample_aspect_ratio_filter.dart:
--------------------------------------------------------------------------------
1 | import 'package:ffmpeg_cli/src/ffmpeg/ffmpeg_command.dart';
2 |
3 | /// Sets the Sample Aspect Ratio for the filter output video.
4 | class SetSarFilter implements Filter {
5 | SetSarFilter({
6 | required this.sar,
7 | }) : assert(sar.isNotEmpty);
8 |
9 | /// The input sample aspect ratio
10 | final String sar;
11 |
12 | @override
13 | String toCli() {
14 | return 'setsar=sar=$sar';
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/lib/src/ffmpeg/filters/scale_filter.dart:
--------------------------------------------------------------------------------
1 | import 'package:ffmpeg_cli/ffmpeg_cli.dart';
2 | import 'package:ffmpeg_cli/src/ffmpeg/video_size.dart';
3 |
4 | /// Resize the input video
5 | ///
6 | /// Forces the output display ratio to be the same as the input
7 | class ScaleFilter implements Filter {
8 | ScaleFilter({
9 | this.width,
10 | this.height,
11 | this.eval,
12 | this.interl,
13 | this.swsFlags,
14 | this.param0,
15 | this.param1,
16 | this.size,
17 | }) : assert(width == null || width >= -1),
18 | assert(height == null || height >= -1),
19 | assert(eval == null || eval == 'init' || eval == 'frame'),
20 | assert(interl == null || interl == 1 || interl == 0 || interl == -1);
21 |
22 | /// Width for scale
23 | final int? width;
24 |
25 | /// Height for scale
26 | final int? height;
27 |
28 | /// When to evaluate width and height expression (default is init)
29 | final String? eval;
30 |
31 | /// Set the interlacing mode (default is 0)
32 | final int? interl;
33 |
34 | /// Scalar flags used to set the scaling algorithm
35 | final SwsFlag? swsFlags;
36 |
37 | /// Set libswscale parameter
38 | final String? param0;
39 |
40 | /// Set libswscale parameter
41 | final String? param1;
42 |
43 | final VideoSize? size;
44 |
45 | // TODO: in_color_matrix
46 | // TODO: out_color_matrix
47 | // TODO: in_range
48 | // TODO: out_range
49 | // TODO: force_original_aspect_ratio
50 | // TODO: force_divisible_by
51 |
52 | @override
53 | String toCli() {
54 | final properties = [
55 | if (width != null) 'width=$width',
56 | if (height != null) 'height=$height',
57 | if (eval != null) 'eval=$eval',
58 | if (interl != null) 'interl=$interl',
59 | if (swsFlags != null) 'sws_flags=${swsFlags!.cliValue}',
60 | if (param0 != null) 'param0=$param0',
61 | if (param1 != null) 'param1=$param1',
62 | if (size != null) 'size=$size',
63 | ];
64 |
65 | return 'scale=${properties.join(':')}';
66 | }
67 | }
68 |
--------------------------------------------------------------------------------
/lib/src/ffmpeg/filters/subtitle_filter.dart:
--------------------------------------------------------------------------------
1 | import 'package:ffmpeg_cli/src/ffmpeg/ffmpeg_command.dart';
2 |
3 | /// Draw subtitles on top of input video
4 | ///
5 | /// Requires libass library to be compiled with FFmpeg
6 | class SubtitleFilter implements Filter {
7 | SubtitleFilter({
8 | required this.filename,
9 | this.forceStyle,
10 | });
11 |
12 | /// Path to where the subtitle file is located
13 | final String filename;
14 |
15 | /// Override the default style, using ASS style format
16 | ///
17 | /// KEY=VALUE seperated by a comma
18 | final String? forceStyle;
19 |
20 | @override
21 | String toCli() {
22 | return (forceStyle != null) ? 'subtitles=$filename:force_style=$forceStyle' : 'subtitles=$filename';
23 | }
24 | }
25 |
--------------------------------------------------------------------------------
/lib/src/ffmpeg/filters/sws.dart:
--------------------------------------------------------------------------------
1 | /// Options include
2 | /// `fastBilinear`
3 | /// `bilinear`
4 | /// `bicubic`
5 | /// `experimental`
6 | /// `neighbor`
7 | /// `area`
8 | /// `bicublin`
9 | /// `gauss`
10 | /// `sinc`
11 | /// `lanczos`
12 | /// `spline`
13 | /// `printInfo`
14 | /// `acurateRnd`
15 | /// `fullChromaInt`
16 | /// `fullChromaInp`
17 | /// `bitexact`
18 | class SwsFlag {
19 | static const fastBilinear = SwsFlag._('fast_bilinear');
20 | static const bilinear = SwsFlag._('bilinear');
21 | static const bicubic = SwsFlag._('bicubic');
22 | static const experimental = SwsFlag._('experimental');
23 | static const neighbor = SwsFlag._('neighbor');
24 | static const area = SwsFlag._('area');
25 | static const bicublin = SwsFlag._('bicublin');
26 | static const gauss = SwsFlag._('gauss');
27 | static const sinc = SwsFlag._('sinc');
28 | static const lanczos = SwsFlag._('lanczos');
29 | static const spline = SwsFlag._('spline');
30 | static const printInfo = SwsFlag._('print_info');
31 | static const accurateRnd = SwsFlag._('accurate_rnd');
32 | static const fullChromaInt = SwsFlag._('full_chroma_int');
33 | static const fullChromaInp = SwsFlag._('full_chroma_inp');
34 | static const bitexact = SwsFlag._('bitexact');
35 |
36 | const SwsFlag._(this.cliValue);
37 |
38 | final String cliValue;
39 |
40 | String toCli() => cliValue;
41 | }
42 |
--------------------------------------------------------------------------------
/lib/src/ffmpeg/filters/tpad_filter.dart:
--------------------------------------------------------------------------------
1 | import 'package:ffmpeg_cli/src/ffmpeg/ffmpeg_command.dart';
2 | import 'package:ffmpeg_cli/src/time.dart';
3 |
4 | /// Adds padding frames to a given video stream.
5 | class TPadFilter implements Filter {
6 | const TPadFilter({
7 | this.start,
8 | this.stop,
9 | this.startDuration,
10 | this.stopDuration,
11 | this.startMode,
12 | this.stopMode,
13 | this.color,
14 | }) : assert(startMode == null || startMode == 'add' || startMode == 'clone'),
15 | assert(stopMode == null || stopMode == 'add' || stopMode == 'clone');
16 |
17 | /// Number of frames to add before video content.
18 | final int? start;
19 |
20 | /// Number of frames to add after video content.
21 | final int? stop;
22 |
23 | /// Time delay added before playing the stream
24 | final Duration? startDuration;
25 |
26 | /// Time delay added after end of the stream
27 | final Duration? stopDuration;
28 |
29 | /// Kind of frames added to beginning of stream
30 | final String? startMode;
31 |
32 | /// Kind of frames added to end of stream
33 | final String? stopMode;
34 |
35 | /// Time delay added after end of the stream
36 | final String? color;
37 |
38 | @override
39 | String toCli() {
40 | final argList = [];
41 | if (start != null) {
42 | argList.add('start=$start');
43 | }
44 | if (stop != null) {
45 | argList.add('stop=$stop');
46 | }
47 | if (startDuration != null) {
48 | argList.add('start_duration=${startDuration!.toSeconds()}');
49 | }
50 | if (stopDuration != null) {
51 | argList.add('stop_duration=${stopDuration!.toSeconds()}');
52 | }
53 | if (startMode != null) {
54 | argList.add('start_mode=$startMode');
55 | }
56 | if (stopMode != null) {
57 | argList.add('stop_mode=$stopMode');
58 | }
59 | if (color != null) {
60 | argList.add('color=$color');
61 | }
62 |
63 | return 'tpad=${argList.join(':')}';
64 | }
65 | }
66 |
--------------------------------------------------------------------------------
/lib/src/ffmpeg/filters/trim_filter.dart:
--------------------------------------------------------------------------------
1 | import 'package:ffmpeg_cli/src/ffmpeg/ffmpeg_command.dart';
2 | import 'package:ffmpeg_cli/src/time.dart';
3 |
4 | /// Reduces a given video stream to the segment between `start` and `end`.
5 | class TrimFilter implements Filter {
6 | const TrimFilter({
7 | this.start,
8 | this.end,
9 | this.duration,
10 | });
11 |
12 | /// The time of the start of the kept section
13 | final Duration? start;
14 |
15 | /// The time of the first frame that will be dropped
16 | final Duration? end;
17 |
18 | /// The maximum duration of the output in seconds
19 | final Duration? duration;
20 |
21 | @override
22 | String toCli() {
23 | final properties = [];
24 | if (start != null) {
25 | properties.add("start='${start!.toSeconds()}'");
26 | }
27 | if (end != null) {
28 | properties.add("end='${end!.toSeconds()}'");
29 | }
30 | if (duration != null) {
31 | properties.add("duration='${duration!.toSeconds()}'");
32 | }
33 | return "trim=${properties.join(':')}";
34 | }
35 | }
36 |
37 | /// Reduces a given audio stream to the segment between `start` and `end`.
38 | class ATrimFilter implements Filter {
39 | const ATrimFilter({
40 | this.start,
41 | this.end,
42 | this.duration,
43 | });
44 |
45 | /// The time of the start of the kept section
46 | final Duration? start;
47 |
48 | /// The time of the first frame that will be dropped
49 | final Duration? end;
50 |
51 | /// The maximum duration of the output in seconds
52 | final Duration? duration;
53 |
54 | @override
55 | String toCli() {
56 | final properties = [];
57 | if (start != null) {
58 | properties.add("start='${start!.toSeconds()}'");
59 | }
60 | if (end != null) {
61 | properties.add("end='${end!.toSeconds()}'");
62 | }
63 | if (duration != null) {
64 | properties.add("duration='${duration!.toSeconds()}'");
65 | }
66 | return "atrim=${properties.join(':')}";
67 | }
68 | }
69 |
--------------------------------------------------------------------------------
/lib/src/ffmpeg/filters/volume_filter.dart:
--------------------------------------------------------------------------------
1 | import 'package:ffmpeg_cli/src/ffmpeg/ffmpeg_command.dart';
2 |
3 | /// Adjusts the volume of a given audio stream based off new `volume` given.
4 | class VolumeFilter implements Filter {
5 | const VolumeFilter({
6 | required this.volume,
7 | });
8 |
9 | /// Set audio volume
10 | final double volume;
11 |
12 | @override
13 | String toCli() {
14 | return 'volume=$volume';
15 | }
16 | }
17 |
--------------------------------------------------------------------------------
/lib/src/ffmpeg/log_level.dart:
--------------------------------------------------------------------------------
1 | /// The log level to be used, options include
2 | /// `quiet`
3 | /// `panic`
4 | /// `fatal`
5 | /// `error`
6 | /// `warning`
7 | /// `info`
8 | /// `verbose`
9 | /// `debug`
10 | /// `trace`
11 | enum LogLevel {
12 | quiet,
13 | panic,
14 | fatal,
15 | error,
16 | warning,
17 | info,
18 | verbose,
19 | debug,
20 | trace,
21 | }
22 |
23 | extension LogLevelSerialization on LogLevel {
24 | String toFfmpegString() {
25 | switch (this) {
26 | case LogLevel.quiet:
27 | return 'quiet';
28 | case LogLevel.panic:
29 | return 'panic';
30 | case LogLevel.fatal:
31 | return 'fatal';
32 | case LogLevel.error:
33 | return 'error';
34 | case LogLevel.warning:
35 | return 'warning';
36 | case LogLevel.info:
37 | return 'info';
38 | case LogLevel.verbose:
39 | return 'verbose';
40 | case LogLevel.debug:
41 | return 'debug';
42 | case LogLevel.trace:
43 | return 'trace';
44 | }
45 | }
46 | }
47 |
--------------------------------------------------------------------------------
/lib/src/ffmpeg/video_size.dart:
--------------------------------------------------------------------------------
1 | class VideoSize {
2 | const VideoSize({
3 | required this.width,
4 | required this.height,
5 | });
6 |
7 | final num width;
8 | final num height;
9 |
10 | @override
11 | String toString() => '[Size]: ${width}x$height';
12 |
13 | @override
14 | bool operator ==(Object other) =>
15 | identical(this, other) ||
16 | other is VideoSize && runtimeType == other.runtimeType && width == other.width && height == other.height;
17 |
18 | @override
19 | int get hashCode => width.hashCode ^ height.hashCode;
20 | }
21 |
--------------------------------------------------------------------------------
/lib/src/ffprobe/ffprobe.dart:
--------------------------------------------------------------------------------
1 | import 'dart:convert';
2 | import 'dart:io';
3 |
4 | import 'ffprobe_json.dart';
5 |
6 | /// The `ffprobe` command in Dart.
7 | ///
8 | /// `ffprobe` is a CLI tool that's used for inspecting video and audio files.
9 | /// The `ffprobe` tool can be used, for example, to determine the types of
10 | /// streams in a video file, a video's codec, or a video's duration.
11 | class Ffprobe {
12 | /// Runs the FFMPEG `ffprobe` CLI command against the given [filepath].
13 | static Future run(
14 | String filepath, {
15 | String? workingDir,
16 | }) async {
17 | final result = await Process.run('ffprobe', [
18 | '-v',
19 | 'quiet',
20 | '-print_format',
21 | 'json',
22 | '-show_format',
23 | '-show_streams',
24 | filepath,
25 | ]);
26 |
27 | if (result.exitCode != 0) {
28 | print('Failed to run ffprobe for "$filepath"');
29 | throw Exception('ffprobe returned error: ${result.exitCode}\n${result.stderr}');
30 | }
31 |
32 | if (result.stdout == null || result.stdout is! String || (result.stdout as String).isEmpty) {
33 | throw Exception('ffprobe did not output expected data: ${result.stdout}');
34 | }
35 |
36 | final json = jsonDecode(result.stdout);
37 | return FfprobeResult.fromJson(json);
38 | }
39 | }
40 |
--------------------------------------------------------------------------------
/lib/src/ffprobe/ffprobe_json.dart:
--------------------------------------------------------------------------------
1 | import 'dart:convert';
2 |
3 | import 'package:ffmpeg_cli/ffmpeg_cli.dart';
4 | import 'package:json_annotation/json_annotation.dart';
5 |
6 | part 'ffprobe_json.g.dart';
7 |
8 | Duration? _durationFromJson(String? durationString) =>
9 | durationString != null ? parseFfmpegTimeDuration(durationString) : null;
10 |
11 | String? _durationToJson(Duration? duration) => duration?.toStandardFormat();
12 |
13 | @JsonSerializable()
14 | class FfprobeResult {
15 | factory FfprobeResult.fromJson(Map json) => _$FfprobeResultFromJson(json);
16 |
17 | FfprobeResult({
18 | this.streams,
19 | this.format,
20 | });
21 |
22 | final List? streams;
23 | final Format? format;
24 |
25 | Map toJson() => _$FfprobeResultToJson(this);
26 |
27 | @override
28 | String toString() {
29 | return 'FfprobeResult:\n' + const JsonEncoder.withIndent(' ').convert(toJson());
30 | }
31 | }
32 |
33 | @JsonSerializable(fieldRename: FieldRename.snake)
34 | class Stream {
35 | factory Stream.fromJson(Map json) => _$StreamFromJson(json);
36 |
37 | Stream({
38 | this.index,
39 | this.codeName,
40 | this.codecLongName,
41 | this.profile,
42 | this.codecType,
43 | this.codecTimeBase,
44 | this.codecTagString,
45 | this.codecTag,
46 | this.width,
47 | this.height,
48 | this.codecWidth,
49 | this.codecHeight,
50 | this.closedCaptions,
51 | this.hasBFrames,
52 | this.pixFmt,
53 | this.level,
54 | this.colorRange,
55 | this.colorSpace,
56 | this.colorTransfer,
57 | this.colorPrimaries,
58 | this.chromaLocation,
59 | this.refs,
60 | this.isAvc,
61 | this.nalLengthSize,
62 | this.rFrameRate,
63 | this.avgFrameRate,
64 | this.timeBase,
65 | this.startPts,
66 | this.startTime,
67 | this.durationTs,
68 | this.duration,
69 | this.bitRate,
70 | this.bitsPerRawSample,
71 | this.maxBitRate,
72 | this.nbFrames,
73 | this.disposition,
74 | this.tags,
75 | this.sampleFmt,
76 | this.sampleRate,
77 | this.channels,
78 | this.channelLayout,
79 | this.bitsPerSample,
80 | });
81 |
82 | final int? index;
83 | final String? codeName;
84 | final String? codecLongName;
85 | final String? profile;
86 | final String? codecType;
87 | final String? codecTimeBase;
88 | final String? codecTagString;
89 | final String? codecTag;
90 | final int? width;
91 | final int? height;
92 | final int? codecWidth;
93 | final int? codecHeight;
94 | final int? closedCaptions;
95 | final int? hasBFrames;
96 | final String? pixFmt;
97 | final int? level;
98 | final String? colorRange;
99 | final String? colorSpace;
100 | final String? colorTransfer;
101 | final String? colorPrimaries;
102 | final String? chromaLocation;
103 | final int? refs;
104 | final String? isAvc;
105 | final String? nalLengthSize;
106 | final String? rFrameRate;
107 | final String? avgFrameRate;
108 | final String? timeBase;
109 | final int? startPts;
110 | @JsonKey(fromJson: _durationFromJson, toJson: _durationToJson)
111 | final Duration? startTime;
112 | final int? durationTs;
113 | @JsonKey(fromJson: _durationFromJson, toJson: _durationToJson)
114 | final Duration? duration;
115 | final String? bitRate;
116 | final String? bitsPerRawSample;
117 | final String? maxBitRate;
118 | final String? nbFrames;
119 | final Disposition? disposition;
120 | final Tags? tags;
121 |
122 | final String? sampleFmt;
123 | final String? sampleRate;
124 | final int? channels;
125 | final String? channelLayout;
126 | final int? bitsPerSample;
127 |
128 | Map toJson() => _$StreamToJson(this);
129 | }
130 |
131 | @JsonSerializable(fieldRename: FieldRename.snake)
132 | class Disposition {
133 | factory Disposition.fromJson(Map json) => _$DispositionFromJson(json);
134 |
135 | Disposition({
136 | this.defaultCount,
137 | this.dub,
138 | this.original,
139 | this.comment,
140 | this.lyrics,
141 | this.karaoke,
142 | this.forced,
143 | this.hearingImpaired,
144 | this.visualImpaired,
145 | this.cleanEffects,
146 | this.attachedPic,
147 | this.timedThumbnails,
148 | });
149 |
150 | @JsonKey(name: 'default')
151 | final int? defaultCount;
152 | final int? dub;
153 | final int? original;
154 | final int? comment;
155 | final int? lyrics;
156 | final int? karaoke;
157 | final int? forced;
158 | final int? hearingImpaired;
159 | final int? visualImpaired;
160 | final int? cleanEffects;
161 | final int? attachedPic;
162 | final int? timedThumbnails;
163 |
164 | Map toJson() => _$DispositionToJson(this);
165 | }
166 |
167 | @JsonSerializable(fieldRename: FieldRename.snake)
168 | class Format {
169 | factory Format.fromJson(Map json) => _$FormatFromJson(json);
170 |
171 | Format({
172 | this.filename,
173 | this.nbStreams,
174 | this.nbPrograms,
175 | this.formatName,
176 | this.formatLongName,
177 | this.startTime,
178 | this.duration,
179 | this.size,
180 | this.bitRate,
181 | this.probeScore,
182 | this.tags,
183 | });
184 |
185 | final String? filename;
186 |
187 | final int? nbStreams;
188 | final int? nbPrograms;
189 |
190 | final String? formatName;
191 | final String? formatLongName;
192 |
193 | @JsonKey(fromJson: _durationFromJson, toJson: _durationToJson)
194 | final Duration? startTime;
195 | @JsonKey(fromJson: _durationFromJson, toJson: _durationToJson)
196 | final Duration? duration;
197 | final String? size;
198 | final String? bitRate;
199 | final int? probeScore;
200 |
201 | final Tags? tags;
202 |
203 | Map toJson() => _$FormatToJson(this);
204 | }
205 |
206 | @JsonSerializable(fieldRename: FieldRename.snake)
207 | class Tags {
208 | factory Tags.fromJson(Map json) => _$TagsFromJson(json);
209 |
210 | Tags({
211 | this.majorBrand,
212 | this.minorVersion,
213 | this.compatibleBrands,
214 | this.creationTime,
215 | this.language,
216 | this.handlerName,
217 | this.encoder,
218 | });
219 |
220 | final String? majorBrand;
221 | final String? minorVersion;
222 | final String? compatibleBrands;
223 | final String? creationTime;
224 | final String? language;
225 | final String? handlerName;
226 | final String? encoder;
227 |
228 | Map toJson() => _$TagsToJson(this);
229 | }
230 |
231 | // "format": {
232 | // "filename": "./assets/test_video.mp4",
233 | // "nb_streams": 2,
234 | // "nb_programs": 0,
235 | // "format_name": "mov,mp4,m4a,3gp,3g2,mj2",
236 | // "format_long_name": "QuickTime / MOV",
237 | // "start_time": "0.000000",
238 | // "duration": "6.549333",
239 | // "size": "2982109",
240 | // "bit_rate": "3642641",
241 | // "probe_score": 100,
242 | // "tags": {
243 | // "major_brand": "mp42",
244 | // "minor_version": "0",
245 | // "compatible_brands": "mp42mp41",
246 | // "creation_time": "2020-12-22T05:22:33.000000Z"
247 | // }
248 | // }
249 |
--------------------------------------------------------------------------------
/lib/src/ffprobe/ffprobe_json.g.dart:
--------------------------------------------------------------------------------
1 | // GENERATED CODE - DO NOT MODIFY BY HAND
2 |
3 | part of 'ffprobe_json.dart';
4 |
5 | // **************************************************************************
6 | // JsonSerializableGenerator
7 | // **************************************************************************
8 |
9 | FfprobeResult _$FfprobeResultFromJson(Map json) =>
10 | FfprobeResult(
11 | streams: (json['streams'] as List?)
12 | ?.map((e) => Stream.fromJson(e as Map))
13 | .toList(),
14 | format: json['format'] == null
15 | ? null
16 | : Format.fromJson(json['format'] as Map),
17 | );
18 |
19 | Map _$FfprobeResultToJson(FfprobeResult instance) =>
20 | {
21 | 'streams': instance.streams,
22 | 'format': instance.format,
23 | };
24 |
25 | Stream _$StreamFromJson(Map json) => Stream(
26 | index: json['index'] as int?,
27 | codeName: json['code_name'] as String?,
28 | codecLongName: json['codec_long_name'] as String?,
29 | profile: json['profile'] as String?,
30 | codecType: json['codec_type'] as String?,
31 | codecTimeBase: json['codec_time_base'] as String?,
32 | codecTagString: json['codec_tag_string'] as String?,
33 | codecTag: json['codec_tag'] as String?,
34 | width: json['width'] as int?,
35 | height: json['height'] as int?,
36 | codecWidth: json['codec_width'] as int?,
37 | codecHeight: json['codec_height'] as int?,
38 | closedCaptions: json['closed_captions'] as int?,
39 | hasBFrames: json['has_b_frames'] as int?,
40 | pixFmt: json['pix_fmt'] as String?,
41 | level: json['level'] as int?,
42 | colorRange: json['color_range'] as String?,
43 | colorSpace: json['color_space'] as String?,
44 | colorTransfer: json['color_transfer'] as String?,
45 | colorPrimaries: json['color_primaries'] as String?,
46 | chromaLocation: json['chroma_location'] as String?,
47 | refs: json['refs'] as int?,
48 | isAvc: json['is_avc'] as String?,
49 | nalLengthSize: json['nal_length_size'] as String?,
50 | rFrameRate: json['r_frame_rate'] as String?,
51 | avgFrameRate: json['avg_frame_rate'] as String?,
52 | timeBase: json['time_base'] as String?,
53 | startPts: json['start_pts'] as int?,
54 | startTime: _durationFromJson(json['start_time'] as String?),
55 | durationTs: json['duration_ts'] as int?,
56 | duration: _durationFromJson(json['duration'] as String?),
57 | bitRate: json['bit_rate'] as String?,
58 | bitsPerRawSample: json['bits_per_raw_sample'] as String?,
59 | maxBitRate: json['max_bit_rate'] as String?,
60 | nbFrames: json['nb_frames'] as String?,
61 | disposition: json['disposition'] == null
62 | ? null
63 | : Disposition.fromJson(json['disposition'] as Map),
64 | tags: json['tags'] == null
65 | ? null
66 | : Tags.fromJson(json['tags'] as Map),
67 | sampleFmt: json['sample_fmt'] as String?,
68 | sampleRate: json['sample_rate'] as String?,
69 | channels: json['channels'] as int?,
70 | channelLayout: json['channel_layout'] as String?,
71 | bitsPerSample: json['bits_per_sample'] as int?,
72 | );
73 |
74 | Map _$StreamToJson(Stream instance) => {
75 | 'index': instance.index,
76 | 'code_name': instance.codeName,
77 | 'codec_long_name': instance.codecLongName,
78 | 'profile': instance.profile,
79 | 'codec_type': instance.codecType,
80 | 'codec_time_base': instance.codecTimeBase,
81 | 'codec_tag_string': instance.codecTagString,
82 | 'codec_tag': instance.codecTag,
83 | 'width': instance.width,
84 | 'height': instance.height,
85 | 'codec_width': instance.codecWidth,
86 | 'codec_height': instance.codecHeight,
87 | 'closed_captions': instance.closedCaptions,
88 | 'has_b_frames': instance.hasBFrames,
89 | 'pix_fmt': instance.pixFmt,
90 | 'level': instance.level,
91 | 'color_range': instance.colorRange,
92 | 'color_space': instance.colorSpace,
93 | 'color_transfer': instance.colorTransfer,
94 | 'color_primaries': instance.colorPrimaries,
95 | 'chroma_location': instance.chromaLocation,
96 | 'refs': instance.refs,
97 | 'is_avc': instance.isAvc,
98 | 'nal_length_size': instance.nalLengthSize,
99 | 'r_frame_rate': instance.rFrameRate,
100 | 'avg_frame_rate': instance.avgFrameRate,
101 | 'time_base': instance.timeBase,
102 | 'start_pts': instance.startPts,
103 | 'start_time': _durationToJson(instance.startTime),
104 | 'duration_ts': instance.durationTs,
105 | 'duration': _durationToJson(instance.duration),
106 | 'bit_rate': instance.bitRate,
107 | 'bits_per_raw_sample': instance.bitsPerRawSample,
108 | 'max_bit_rate': instance.maxBitRate,
109 | 'nb_frames': instance.nbFrames,
110 | 'disposition': instance.disposition,
111 | 'tags': instance.tags,
112 | 'sample_fmt': instance.sampleFmt,
113 | 'sample_rate': instance.sampleRate,
114 | 'channels': instance.channels,
115 | 'channel_layout': instance.channelLayout,
116 | 'bits_per_sample': instance.bitsPerSample,
117 | };
118 |
119 | Disposition _$DispositionFromJson(Map json) => Disposition(
120 | defaultCount: json['default'] as int?,
121 | dub: json['dub'] as int?,
122 | original: json['original'] as int?,
123 | comment: json['comment'] as int?,
124 | lyrics: json['lyrics'] as int?,
125 | karaoke: json['karaoke'] as int?,
126 | forced: json['forced'] as int?,
127 | hearingImpaired: json['hearing_impaired'] as int?,
128 | visualImpaired: json['visual_impaired'] as int?,
129 | cleanEffects: json['clean_effects'] as int?,
130 | attachedPic: json['attached_pic'] as int?,
131 | timedThumbnails: json['timed_thumbnails'] as int?,
132 | );
133 |
134 | Map _$DispositionToJson(Disposition instance) =>
135 | {
136 | 'default': instance.defaultCount,
137 | 'dub': instance.dub,
138 | 'original': instance.original,
139 | 'comment': instance.comment,
140 | 'lyrics': instance.lyrics,
141 | 'karaoke': instance.karaoke,
142 | 'forced': instance.forced,
143 | 'hearing_impaired': instance.hearingImpaired,
144 | 'visual_impaired': instance.visualImpaired,
145 | 'clean_effects': instance.cleanEffects,
146 | 'attached_pic': instance.attachedPic,
147 | 'timed_thumbnails': instance.timedThumbnails,
148 | };
149 |
150 | Format _$FormatFromJson(Map json) => Format(
151 | filename: json['filename'] as String?,
152 | nbStreams: json['nb_streams'] as int?,
153 | nbPrograms: json['nb_programs'] as int?,
154 | formatName: json['format_name'] as String?,
155 | formatLongName: json['format_long_name'] as String?,
156 | startTime: _durationFromJson(json['start_time'] as String?),
157 | duration: _durationFromJson(json['duration'] as String?),
158 | size: json['size'] as String?,
159 | bitRate: json['bit_rate'] as String?,
160 | probeScore: json['probe_score'] as int?,
161 | tags: json['tags'] == null
162 | ? null
163 | : Tags.fromJson(json['tags'] as Map),
164 | );
165 |
166 | Map _$FormatToJson(Format instance) => {
167 | 'filename': instance.filename,
168 | 'nb_streams': instance.nbStreams,
169 | 'nb_programs': instance.nbPrograms,
170 | 'format_name': instance.formatName,
171 | 'format_long_name': instance.formatLongName,
172 | 'start_time': _durationToJson(instance.startTime),
173 | 'duration': _durationToJson(instance.duration),
174 | 'size': instance.size,
175 | 'bit_rate': instance.bitRate,
176 | 'probe_score': instance.probeScore,
177 | 'tags': instance.tags,
178 | };
179 |
180 | Tags _$TagsFromJson(Map json) => Tags(
181 | majorBrand: json['major_brand'] as String?,
182 | minorVersion: json['minor_version'] as String?,
183 | compatibleBrands: json['compatible_brands'] as String?,
184 | creationTime: json['creation_time'] as String?,
185 | language: json['language'] as String?,
186 | handlerName: json['handler_name'] as String?,
187 | encoder: json['encoder'] as String?,
188 | );
189 |
190 | Map _$TagsToJson(Tags instance) => {
191 | 'major_brand': instance.majorBrand,
192 | 'minor_version': instance.minorVersion,
193 | 'compatible_brands': instance.compatibleBrands,
194 | 'creation_time': instance.creationTime,
195 | 'language': instance.language,
196 | 'handler_name': instance.handlerName,
197 | 'encoder': instance.encoder,
198 | };
199 |
--------------------------------------------------------------------------------
/lib/src/logging.dart:
--------------------------------------------------------------------------------
1 | import 'package:logging/logging.dart';
2 |
3 | class LogNames {
4 | static const ffmpegCommand = "ffmpeg.command";
5 | static const ffmpegBuilder = "ffmpeg.command.builder";
6 | static const ffmpegFilter = "ffmpeg.filter";
7 | }
8 |
9 | final ffmpegCommandLog = Logger(LogNames.ffmpegCommand);
10 | final ffmpegBuilderLog = Logger(LogNames.ffmpegBuilder);
11 | final ffmpegFilter = Logger(LogNames.ffmpegFilter);
12 |
13 | final _activeLoggers = {};
14 |
15 | void initAllLoggers(Level level) {
16 | initLoggers(level, {Logger.root});
17 | }
18 |
19 | void initLoggers(Level level, Set loggers) {
20 | hierarchicalLoggingEnabled = true;
21 |
22 | for (final logger in loggers) {
23 | if (!_activeLoggers.contains(logger)) {
24 | // ignore: avoid_print
25 | print("Initializing logger: ${logger.name}");
26 |
27 | logger
28 | ..level = level
29 | ..onRecord.listen(printLog);
30 |
31 | _activeLoggers.add(logger);
32 | }
33 | }
34 | }
35 |
36 | void deactivateLoggers(Set loggers) {
37 | for (final logger in loggers) {
38 | if (_activeLoggers.contains(logger)) {
39 | // ignore: avoid_print
40 | print("Deactivating logger: ${logger.name}");
41 | logger.clearListeners();
42 |
43 | _activeLoggers.remove(logger);
44 | }
45 | }
46 | }
47 |
48 | void printLog(record) {
49 | // ignore: avoid_print
50 | print("${record.level.name}: ${record.time}: ${record.message}");
51 | }
52 |
--------------------------------------------------------------------------------
/lib/src/time.dart:
--------------------------------------------------------------------------------
1 | const _microsPerHour = 3.6e9;
2 | const _microsPerMinute = 6e7;
3 | const _microsPerSecond = 1000000;
4 | const _microsPerMillisecond = 1000;
5 |
6 | Duration parseFfmpegTimeDuration(String durationString) {
7 | if (durationString.endsWith('ms')) {
8 | return parseUnitSpecificDuration(durationString);
9 | } else if (durationString.endsWith('us')) {
10 | return parseUnitSpecificDuration(durationString);
11 | } else if (durationString.endsWith('s')) {
12 | return parseUnitSpecificDuration(durationString);
13 | } else {
14 | return parseStandardDuration(durationString);
15 | }
16 | }
17 |
18 | Duration parseStandardDuration(String durationString) {
19 | if (durationString.isEmpty) {
20 | throw Exception('Duration string must be non-empty: $durationString');
21 | }
22 |
23 | // Process and remove a negative sign, if it exists.
24 | bool isNegative = durationString[0] == '-';
25 | if (isNegative) {
26 | durationString = durationString.substring(1);
27 | }
28 |
29 | final timeComponents = durationString.split(':');
30 | final secondsWithFraction = timeComponents.removeLast();
31 | final seconds = double.parse(secondsWithFraction).truncate();
32 | final microseconds = ((double.parse(secondsWithFraction) - seconds) * _microsPerSecond)
33 | .truncate(); // truncate shouldn't change anything. we just need an int.
34 | String minutes = '';
35 | String hours = '';
36 | if (timeComponents.isNotEmpty) {
37 | minutes = timeComponents.removeLast();
38 | }
39 | if (timeComponents.isNotEmpty) {
40 | hours = timeComponents.removeLast();
41 | }
42 | if (timeComponents.isNotEmpty) {
43 | throw Exception('A standard format duration cannot have any time components beyond hours: "$durationString"');
44 | }
45 |
46 | final signMultiplier = isNegative ? -1 : 1;
47 | return Duration(
48 | hours: (hours.isEmpty ? 0 : int.parse(hours)) * signMultiplier,
49 | minutes: (minutes.isEmpty ? 0 : int.parse(minutes)) * signMultiplier,
50 | seconds: seconds * signMultiplier,
51 | microseconds: microseconds * signMultiplier,
52 | );
53 | }
54 |
55 | Duration parseUnitSpecificDuration(String unitSpecificDuration) {
56 | String durationString = unitSpecificDuration;
57 | if (durationString.isEmpty) {
58 | throw Exception('Duration string must be non-empty: $unitSpecificDuration');
59 | }
60 |
61 | bool isNegative = durationString[0] == '-';
62 | if (isNegative) {
63 | durationString = durationString.substring(1);
64 | }
65 |
66 | int wholeValue;
67 | int wholeValueMicroMultiplier;
68 | int fractionalValue;
69 | int fractionValueMicroMultiplier;
70 |
71 | if (durationString.endsWith('ms')) {
72 | durationString = durationString.substring(0, durationString.length - 2);
73 | wholeValueMicroMultiplier = _microsPerMillisecond;
74 | fractionValueMicroMultiplier = 1;
75 | } else if (durationString.endsWith('us')) {
76 | durationString = durationString.substring(0, durationString.length - 2);
77 | wholeValueMicroMultiplier = 1;
78 | fractionValueMicroMultiplier = 0; // there should never a microsecond fraction.
79 | } else if (durationString.endsWith('s')) {
80 | durationString = durationString.substring(0, durationString.length - 1);
81 | wholeValueMicroMultiplier = _microsPerSecond;
82 | fractionValueMicroMultiplier = _microsPerMillisecond;
83 | } else {
84 | throw Exception('Unit-specific durations must specify the time unit: "$unitSpecificDuration"');
85 | }
86 |
87 | final timeComponents = durationString.split('+');
88 | if (timeComponents.length == 1) {
89 | wholeValue = int.parse(timeComponents[0]);
90 | fractionalValue = 0;
91 | } else if (timeComponents.length == 2) {
92 | wholeValue = int.parse(timeComponents[0]);
93 | fractionalValue = int.parse(timeComponents[1].substring(1)); // Remove leading '.' from fraction
94 | } else {
95 | throw Exception('Invalid unit-specific duration: "$unitSpecificDuration');
96 | }
97 |
98 | final signMultiplier = isNegative ? -1 : 1;
99 | return Duration(
100 | microseconds:
101 | ((wholeValue * wholeValueMicroMultiplier) + (fractionalValue * fractionValueMicroMultiplier)) * signMultiplier,
102 | );
103 | }
104 |
105 | extension FfmpegDuration on Duration {
106 | String toStandardFormat() {
107 | final hours = inHours.abs();
108 | final minutes = inMinutes.abs() - (hours * 60);
109 | final seconds = inSeconds.abs() - (minutes * 60) - (hours * 60 * 60);
110 | final fraction =
111 | (inMicroseconds.abs() - (seconds * _microsPerSecond) - (minutes * _microsPerMinute) - (hours * _microsPerHour))
112 | .toDouble() /
113 | _microsPerSecond;
114 |
115 | final stringBuffer = StringBuffer();
116 | final sign = isNegative ? '-' : '';
117 | stringBuffer.write(sign);
118 | // Hours
119 | if (hours > 0) {
120 | stringBuffer.write('${hours.toString().padLeft(2, '0')}:');
121 | }
122 | // Minutes
123 | if (minutes > 0 || hours > 0) {
124 | stringBuffer.write('${minutes.toString().padLeft(2, '0')}:');
125 | }
126 | // Seconds
127 | if (hours > 0 || minutes > 0) {
128 | stringBuffer.write(seconds.toString().padLeft(2, '0'));
129 | } else {
130 | stringBuffer.write(seconds.toString());
131 | }
132 | // Fraction
133 | if (fraction > 0) {
134 | stringBuffer.write(fraction.toString().substring(1)); // cut off the leading '0'
135 | }
136 | return stringBuffer.toString();
137 | }
138 |
139 | String toUnitSpecifiedFormat(FfmpegTimeUnit timeUnit) {
140 | final sign = isNegative ? '-' : '';
141 | late int whole;
142 | double? fraction;
143 | String? units;
144 | switch (timeUnit) {
145 | case FfmpegTimeUnit.seconds:
146 | whole = inSeconds.abs();
147 | fraction = (inMicroseconds.abs() - (whole * 1000000)).toDouble() / 1000000;
148 | units = 's';
149 | break;
150 | case FfmpegTimeUnit.milliseconds:
151 | whole = inMilliseconds.abs();
152 | fraction = (inMicroseconds.abs() - (whole * 1000)).toDouble() / 1000;
153 | units = 'ms';
154 | break;
155 | case FfmpegTimeUnit.microseconds:
156 | whole = inMicroseconds.abs();
157 | fraction = 0;
158 | units = 'us';
159 | break;
160 | }
161 |
162 | final fractionString =
163 | fraction == 0 ? '' : '+${fraction.toString().substring(1)}'; // Cut the leading '0' off the fraction
164 | return '$sign$whole$fractionString$units';
165 | }
166 |
167 | String toSeconds() {
168 | final seconds = inSeconds;
169 | final fraction = (inMicroseconds - (seconds * _microsPerSecond)) / _microsPerSecond;
170 | return '${seconds + fraction}';
171 | }
172 | }
173 |
174 | enum FfmpegTimeUnit {
175 | seconds,
176 | milliseconds,
177 | microseconds,
178 | }
179 |
180 | //class FfmpegTimeBase {
181 | // TODO:
182 | //}
183 |
184 | //class FfmpegTimestamp {
185 | // TODO:
186 | //}
187 |
--------------------------------------------------------------------------------
/pubspec.yaml:
--------------------------------------------------------------------------------
1 | name: ffmpeg_cli
2 | version: 0.3.0
3 | description: Run FFMPEG CLI commands from Dart.
4 | homepage: https://github.com/Flutter-Bounty-Hunters/ffmpeg_cli
5 |
6 | environment:
7 | sdk: ">=3.0.0 <4.0.0"
8 |
9 | dependencies:
10 | collection: ^1.18.0
11 | json_annotation: ^4.8.1
12 | logging: ^1.2.0
13 |
14 | dev_dependencies:
15 | test: ^1.24.6
16 | flutter_lints: ^2.0.3
17 | build_runner: ^2.4.6
18 | json_serializable: ^6.7.1
19 |
--------------------------------------------------------------------------------
/test/ffmpeg/ffmpeg_command_test.dart:
--------------------------------------------------------------------------------
1 | import 'package:ffmpeg_cli/ffmpeg_cli.dart';
2 | import 'package:test/test.dart';
3 |
4 | void main() {
5 | group("FFMPEG", () {
6 | group("command", () {
7 | test("serializes to CLI arguments", () {
8 | const outputStream = FfmpegStream(videoId: "[final_v]", audioId: "[final_a]");
9 |
10 | final command = FfmpegCommand.complex(
11 | inputs: [
12 | FfmpegInput.asset("assets/intro.mp4"),
13 | FfmpegInput.asset("assets/content.mp4"),
14 | FfmpegInput.asset("assets/outro.mov"),
15 | ],
16 | args: [
17 | CliArg(name: 'map', value: outputStream.videoId!),
18 | CliArg(name: 'map', value: outputStream.audioId!),
19 | const CliArg(name: 'y'),
20 | const CliArg(name: 'vsync', value: '2'),
21 | ],
22 | filterGraph: FilterGraph(
23 | chains: [
24 | FilterChain(
25 | inputs: [
26 | const FfmpegStream(videoId: "[0:v]", audioId: "[0:a]"),
27 | const FfmpegStream(videoId: "[1:v]", audioId: "[1:a]"),
28 | const FfmpegStream(videoId: "[2:v]", audioId: "[2:a]"),
29 | ],
30 | filters: [
31 | ConcatFilter(segmentCount: 3, outputVideoStreamCount: 1, outputAudioStreamCount: 1),
32 | ],
33 | outputs: [
34 | outputStream,
35 | ],
36 | ),
37 | ],
38 | ),
39 | outputFilepath: "/my/output/file.mp4",
40 | );
41 |
42 | expect(
43 | command.toCli(),
44 | const CliCommand(
45 | executable: 'ffmpeg',
46 | args: [
47 | "-i", "assets/intro.mp4", //
48 | "-i", "assets/content.mp4", //
49 | "-i", "assets/outro.mov", //
50 | "-map", "[final_v]", //
51 | "-map", "[final_a]", //
52 | "-y",
53 | "-vsync", "2", //
54 | "-filter_complex",
55 | "[0:v] [0:a] [1:v] [1:a] [2:v] [2:a] concat=n=3:v=1:a=1 [final_v] [final_a]",
56 | "/my/output/file.mp4",
57 | ],
58 | ),
59 | );
60 | });
61 |
62 | test("allows custom ffmpeg path for simple commands", () {
63 | const command = FfmpegCommand.simple(
64 | ffmpegPath: '/opt/homebrew/bin/ffmpeg',
65 | outputFilepath: '/my/output/file.mp4',
66 | );
67 |
68 | expect(
69 | command.toCli(),
70 | const CliCommand(
71 | executable: '/opt/homebrew/bin/ffmpeg',
72 | args: ['/my/output/file.mp4'],
73 | ),
74 | );
75 | });
76 |
77 | test("allows custom ffmpeg path for complex commands", () {
78 | final commandBuilder = FfmpegBuilder();
79 |
80 | final inputStream = commandBuilder.addAsset("assets/bee.mp4");
81 | final outputStream = commandBuilder.createStream(hasVideo: true, hasAudio: true);
82 |
83 | commandBuilder.addFilterChain(
84 | FilterChain(
85 | inputs: [inputStream],
86 | filters: [],
87 | outputs: [outputStream],
88 | ),
89 | );
90 |
91 | final command = commandBuilder.build(
92 | ffmpegPath: '/opt/homebrew/bin/ffmpeg',
93 | args: [
94 | CliArg(name: 'map', value: outputStream.videoId!),
95 | CliArg(name: 'map', value: outputStream.audioId!),
96 | const CliArg(name: 'vsync', value: '2'),
97 | ],
98 | outputFilepath: '/output/test_render.mp4',
99 | );
100 |
101 | expect(
102 | command.toCli(),
103 | const CliCommand(
104 | executable: '/opt/homebrew/bin/ffmpeg',
105 | args: [
106 | '-i', 'assets/bee.mp4', //
107 | '-map', '[comp_0_v]', //
108 | '-map', '[comp_0_a]', //
109 | '-vsync', '2', //
110 | '-filter_complex', '[0:v] [0:a] [comp_0_v] [comp_0_a]', //
111 | '/output/test_render.mp4',
112 | ],
113 | ),
114 | );
115 | });
116 | });
117 | });
118 | }
119 |
--------------------------------------------------------------------------------
/test/time_test.dart:
--------------------------------------------------------------------------------
1 | import 'package:ffmpeg_cli/ffmpeg_cli.dart';
2 | import 'package:test/test.dart';
3 |
4 | void main() {
5 | group('ffmpeg duration', () {
6 | group('input parsing', () {
7 | test('parse "55"', () {
8 | expect(
9 | parseFfmpegTimeDuration('55'),
10 | const Duration(seconds: 55),
11 | );
12 | });
13 |
14 | test('parse "0.2"', () {
15 | expect(
16 | parseFfmpegTimeDuration('0.2'),
17 | const Duration(milliseconds: 200),
18 | );
19 | });
20 |
21 | test('parse "12:03:45"', () {
22 | expect(
23 | parseFfmpegTimeDuration('12:03:45'),
24 | const Duration(hours: 12, minutes: 3, seconds: 45),
25 | );
26 | });
27 |
28 | test('parse "23.189"', () {
29 | expect(
30 | parseFfmpegTimeDuration('23.189'),
31 | const Duration(seconds: 23, milliseconds: 189),
32 | );
33 | });
34 |
35 | test('parse "-23.189"', () {
36 | expect(
37 | parseFfmpegTimeDuration('-23.189'),
38 | const Duration(seconds: -23, milliseconds: -189),
39 | );
40 | });
41 |
42 | test('parse "200ms"', () {
43 | expect(
44 | parseFfmpegTimeDuration('200ms'),
45 | const Duration(milliseconds: 200),
46 | );
47 | });
48 |
49 | test('parse "200000us"', () {
50 | expect(
51 | parseFfmpegTimeDuration('200000us'),
52 | const Duration(microseconds: 200000),
53 | );
54 | });
55 |
56 | test('parse "45+.123s"', () {
57 | expect(
58 | parseFfmpegTimeDuration('45+.123s'),
59 | const Duration(seconds: 45, milliseconds: 123),
60 | );
61 | });
62 |
63 | test('parse "-45+.123s"', () {
64 | expect(
65 | parseFfmpegTimeDuration('-45+.123s'),
66 | const Duration(seconds: -45, milliseconds: -123),
67 | );
68 | });
69 | });
70 |
71 | group('output format', () {
72 | test('standard format spot checks', () {
73 | const duration = Duration(
74 | hours: 3,
75 | minutes: 4,
76 | seconds: 5,
77 | milliseconds: 6,
78 | microseconds: 7,
79 | );
80 |
81 | expect(duration.toStandardFormat(), '03:04:05.006007');
82 | });
83 |
84 | test('standard format: "55"', () {
85 | const duration = Duration(
86 | seconds: 55,
87 | );
88 | expect(duration.toStandardFormat(), '55');
89 | });
90 |
91 | test('standard format: "12:03:45"', () {
92 | const duration = Duration(
93 | hours: 12,
94 | minutes: 3,
95 | seconds: 45,
96 | );
97 | expect(duration.toStandardFormat(), '12:03:45');
98 | });
99 |
100 | test('standard format: "12:03:45"', () {
101 | const duration = Duration(
102 | seconds: 23,
103 | milliseconds: 189,
104 | );
105 | expect(duration.toStandardFormat(), '23.189');
106 | });
107 |
108 | test('standard format: "0.2"', () {
109 | const duration = Duration(
110 | milliseconds: 200,
111 | );
112 | expect(duration.toStandardFormat(), '0.2');
113 | });
114 |
115 | test('unit-specific: 23.189', () {
116 | const duration = Duration(
117 | seconds: 23,
118 | milliseconds: 189,
119 | );
120 | expect(duration.toStandardFormat(), '23.189');
121 | });
122 |
123 | test('unit-specific: -23.189', () {
124 | const duration = Duration(
125 | seconds: -23,
126 | milliseconds: -189,
127 | );
128 | expect(duration.toStandardFormat(), '-23.189');
129 | });
130 |
131 | test('unit-specific spot checks', () {
132 | const duration = Duration(
133 | seconds: 5,
134 | milliseconds: 6,
135 | microseconds: 7,
136 | );
137 |
138 | expect(duration.toUnitSpecifiedFormat(FfmpegTimeUnit.seconds), '5+.006007s');
139 | expect(duration.toUnitSpecifiedFormat(FfmpegTimeUnit.milliseconds), '5006+.007ms');
140 | expect(duration.toUnitSpecifiedFormat(FfmpegTimeUnit.microseconds), '5006007us');
141 | });
142 |
143 | test('unit-specific: 200ms', () {
144 | const duration = Duration(
145 | milliseconds: 200,
146 | );
147 | expect(duration.toUnitSpecifiedFormat(FfmpegTimeUnit.milliseconds), '200ms');
148 | });
149 |
150 | test('unit-specific: 200000us', () {
151 | const duration = Duration(
152 | milliseconds: 200,
153 | );
154 | expect(duration.toUnitSpecifiedFormat(FfmpegTimeUnit.microseconds), '200000us');
155 | });
156 |
157 | test('seconds: 23.456', () {
158 | expect(
159 | const Duration(
160 | seconds: 23,
161 | milliseconds: 456,
162 | ).toSeconds(),
163 | '23.456');
164 | });
165 | });
166 | });
167 |
168 | group('ffmpeg time base', () {
169 | // TODO:
170 | });
171 |
172 | group('ffmpeg timestamp', () {
173 | // TODO:
174 | });
175 | }
176 |
--------------------------------------------------------------------------------