├── .gitattributes
├── .gitignore
├── ColorPreservation.md
├── Encoding.md
├── EncodingOverview.md
├── FfmpegInputs.md
├── ICC
├── P3D60.icc
├── P3D65.icc
├── P3DCI.icc
├── README
├── Rec2020-Rec1886.icc
├── Rec709-Rec1886.icc
├── Simplified-sRGB.icc
├── gamma28.icc
├── linear.icc
└── rec2020.icc
├── LICENSE
├── MarkdownNotes.md
├── OtherFfmpegArgs.md
├── Quickstart.md
├── README.md
├── Testresults.md
├── WebColorPreservation.md
├── _config.yml
├── browsercompare
├── chrome-bt1886-windows.png
├── chrome-gamma2.2-osx.png
├── chrome-gamma2.2-windows.png
├── chrome-gamma2.8-osx.png
├── chrome-gamma2.8-windows.png
├── chrome-linear-osx.png
├── chrome-linear-windows.png
├── chrome-srgb-osx.png
├── chrome-srgb-windows.png
├── firefox-bt1886-osx.png
├── firefox-gamma2.2-osx.png
├── firefox-linear-osx.png
├── firefox-srgb-osx.png
├── safari-bt1886-osx.png
├── safari-gamma2.2-osx.png
├── safari-gamma2.8-osx.png
├── safari-linear-osx.png
├── safari-srgb-g22-osx.png
└── safari-srgb-osx.png
├── compare.html
├── docker
├── README.md
├── ffmpeg-4.4
│ ├── Dockerfile
│ └── runme.sh
├── ffmpeg-5.0
│ ├── Dockerfile
│ └── runme.sh
├── ffmpeg-5.1
│ ├── Dockerfile
│ └── runme.sh
└── rocky-ffmpeg-5.1
│ ├── Dockerfile
│ └── runme.sh
├── enctests
├── HDR_Encoding.md
├── README.md
├── reference-results
│ ├── README.md
│ ├── h264-crf-results.otio
│ ├── h264-crf-test-encode_time.png
│ ├── h264-crf-test-filesize.png
│ ├── h264-crf-test-vmaf_harmonic_mean.png
│ ├── prores-results.otio
│ ├── prores-test-encode_time.png
│ ├── prores-test-filesize.png
│ └── prores-test-vmaf_harmonic_mean.png
├── runtest.sh
├── sources
│ ├── .gitignore
│ ├── download_media.sh
│ ├── enc_sources
│ │ ├── README.md
│ │ ├── chimera_cars_srgb
│ │ │ └── chimera_cars_srgb.%05d.png.yml
│ │ ├── chimera_coaster_srgb
│ │ │ └── chimera_coaster_srgb.%06d.png.yml
│ │ ├── chimera_fountains_srgb
│ │ │ └── chimera_fountains_srgb.%05d.png.yml
│ │ ├── download_media.sh
│ │ └── thumbnails
│ │ │ ├── chimera_cars_srgb.02516.jpg
│ │ │ ├── chimera_fountains_srgb.05439.jpg
│ │ │ ├── chimera_wind_srgb.01126.jpg
│ │ │ └── sintel_trailer_2k_0591.jpg
│ └── hdr_sources
│ │ ├── README.md
│ │ ├── download_media.sh
│ │ └── thumbnails
│ │ ├── sparks2_srgb.06726.jpg
│ │ └── sparks_srgb.6015.jpg
├── test_configs
│ ├── base_tests.yml
│ ├── h264_crf25_tests.yml
│ ├── h264_crf_tests.yml
│ ├── h264_tests.yml
│ ├── prores_profile_tests.yml
│ └── prores_tests.yml
└── testframework
│ ├── __init__.py
│ ├── encoders
│ ├── __init__.py
│ ├── base.py
│ └── ffmpeg_encoder.py
│ ├── generatetests.py
│ ├── main.py
│ ├── otio2html.py
│ ├── templates
│ ├── basic.html.jinja
│ └── doctests.html.jinja
│ └── utils
│ ├── __init__.py
│ ├── outputTemplate.py
│ └── utils.py
├── gamuttests
├── DisplayP3-asRec2020.png
├── apply-icc2020.py
├── gamuttest_v001.nk
├── iccgamut
│ └── gamuttest.md
├── ps-combined-displayp3-g2.2.png
├── ps-combined-rec2020-g2.2.png
├── ps-combined-rec2020.png
└── rec709-asRec2020.png
├── index.md
├── sourceimages
├── 1920px-SMPTE_Color_Bars_16x9-edges.png
├── Digital_LAD_raw.png
├── Digital_LAD_sRGB.png
├── README.md
├── chip-chart-1080-noicc.png
├── chip-chart-1080-noicc.png.yml
├── default-ffmpeg.png
├── greyscale-raw.png
├── greyscale-source-bt1886-ps.png
├── greyscale-source-bt1886.png
├── greyscale-source-gamma195-ps.png
├── greyscale-source-gamma22-ps.png
├── greyscale-source-gamma22.png
├── greyscale-source-gamma28-ps.png
├── greyscale-source-gamma28.png
├── greyscale-source-lin-ps.png
├── greyscale-source-lin.png
├── greyscale-source-rec709-ps.png
├── greyscale-source-rec709.png
├── greyscale-source-srgb-ps.png
├── greyscale-source-srgb.png
├── libswscale-example.png
├── original-png.png
├── radialgrad.png
├── radialgrad.png.yml
└── ward.png
├── static
└── reorder.png
└── tests
├── chip-chart-yuvconvert
└── yuvconvert.md
├── chip-color-test.py
├── greyramp-fulltv
└── fullrange.md
├── greyramp-osx
└── colortrccompare.md
├── greyramp-rev2.nk
├── icctest-fulltv.py
├── icctest-osx.py
├── icctest-rev-ps-nuke.py
├── icctest-rev-ps.py
├── icctest-rev2.py
├── icctest.py
├── python
├── CompareHtml.py
└── CompareOverHtml.py
└── runall.sh
/.gitattributes:
--------------------------------------------------------------------------------
1 | *.py text=lf
2 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | pip-wheel-metadata/
24 | share/python-wheels/
25 | *.egg-info/
26 | .installed.cfg
27 | *.egg
28 | MANIFEST
29 |
30 | # PyInstaller
31 | # Usually these files are written by a python script from a template
32 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
33 | *.manifest
34 | *.spec
35 |
36 | # Installer logs
37 | pip-log.txt
38 | pip-delete-this-directory.txt
39 |
40 | # Unit test / coverage reports
41 | htmlcov/
42 | .tox/
43 | .nox/
44 | .coverage
45 | .coverage.*
46 | .cache
47 | nosetests.xml
48 | coverage.xml
49 | *.cover
50 | *.py,cover
51 | .hypothesis/
52 | .pytest_cache/
53 |
54 | # Translations
55 | *.mo
56 | *.pot
57 |
58 | # Django stuff:
59 | *.log
60 | local_settings.py
61 | db.sqlite3
62 | db.sqlite3-journal
63 |
64 | # Flask stuff:
65 | instance/
66 | .webassets-cache
67 |
68 | # Scrapy stuff:
69 | .scrapy
70 |
71 | # Sphinx documentation
72 | docs/_build/
73 |
74 | # PyBuilder
75 | target/
76 |
77 | # Jupyter Notebook
78 | .ipynb_checkpoints
79 |
80 | # IPython
81 | profile_default/
82 | ipython_config.py
83 |
84 | # pyenv
85 | .python-version
86 |
87 | # pipenv
88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
91 | # install all needed dependencies.
92 | #Pipfile.lock
93 |
94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
95 | __pypackages__/
96 |
97 | # Celery stuff
98 | celerybeat-schedule
99 | celerybeat.pid
100 |
101 | # SageMath parsed files
102 | *.sage.py
103 |
104 | # Environments
105 | .env
106 | .venv
107 | env/
108 | venv/
109 | ENV/
110 | env.bak/
111 | venv.bak/
112 |
113 | # Spyder project settings
114 | .spyderproject
115 | .spyproject
116 |
117 | # Rope project settings
118 | .ropeproject
119 |
120 | # mkdocs documentation
121 | /site
122 |
123 | # mypy
124 | .mypy_cache/
125 | .dmypy.json
126 | dmypy.json
127 |
128 | # Pyre type checker
129 | .pyre/
130 |
131 | enctests/encoded/
132 |
133 | enctests/h264-encode/
134 | _site/
135 | enctests/prores-encode/
136 | enctests/results/
137 | enctests/sources/encsources/chimera_coaster/
138 | .jekyll-cache/
139 | enctests/docs-encode/
140 | enctests/sources/enc_sources/chimera_coaster/
141 |
--------------------------------------------------------------------------------
/ColorPreservation.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: default
3 | title: Color Preservation
4 | nav_order: 3
5 | parent: Encoding Overview
6 |
7 | ---
8 |
9 |
10 |
11 | # RGB to YCrCb Conversion
12 | We would like ffmpeg to do as little as possible in terms of color space conversion. i.e. what comes in, goes out. The problem is that most of the codecs are doing some sort of RGB to YUV conversion (technically YCrCb). The notable exception is x264rgb (see below).
13 |
14 | The main problem is that ffmpeg by default assumes that any unknown still image format has a color space of [rec601](https://en.wikipedia.org/wiki/Rec._601) which is very unlikely to be the color space your source media was generate in. So unless you tell it otherwise it will attempt to convert from that colorspace producing a color shift.
15 |
16 | Separately, all the video formats typically do not use the full numeric range [0-255] but instead the Y' (luminance) channel have a nominal range of [16..235] and the CB and CR channels have a nominal range of [16..240] with 128 as the neutral value. This frequently results in quantisation artifacts for 8-bit encoding (the standard for web playback). This fortunately is something you can change, [see TV vs. Full range](Quickstart.html#tv-vs-full-range-). below. The other option is to use higher bit depth, e.g. 10-bit or 12 bit for formats such as [ProRes](Encoding.html#prores-).
17 |
18 | For more information, see: [https://trac.ffmpeg.org/wiki/colorspace](https://trac.ffmpeg.org/wiki/colorspace)
19 |
20 | TODO -- Review the SWS_Flags.
21 |
22 | For examples comparing these see: [here](https://richardssam.github.io/ffmpeg-tests/tests/chip-chart-yuvconvert/compare.html)
23 |
24 | ## colormatrix filter
25 | ```
26 | -vf "colormatrix=bt470bg:bt709"
27 | ```
28 | This is the most basic colorspace filtering. bt470bg is essentially part of the bt601 spec. See: [https://www.ffmpeg.org/ffmpeg-filters.html#colormatrix]()
29 | This is the most basic colorspace filtering. bt470bg is essentially part of the bt601 spec. See: [https://www.ffmpeg.org/ffmpeg-filters.html#colormatrix]()
30 | e.g.
31 |
32 |
42 | ```
43 | ffmpeg -y -i ../sourceimages/chip-chart-1080-noicc.png \
44 | -sws_flags spline+accurate_rnd+full_chroma_int -vf "colormatrix=bt470bg:bt709" \
45 | -c:v libx264 -preset placebo -qp 0 -x264-params "keyint=15:no-deblock=1" -pix_fmt yuv444p10le -qscale:v 1 \
46 | -color_range tv -colorspace bt709 -color_primaries bt709 -color_trc bt709 \
47 | ./chip-chart-yuvconvert/spline444colormatrix2.mp4
48 | ```
49 |
50 | ## colorspace filter
51 | ```
52 | -vf "colorspace=bt709:iall=bt601-6-625:fast=1"
53 | ```
54 | Using colorspace filter, better quality filter, SIMD so faster too, can support 10-bit too. The second part `-vf "colorspace=bt709:iall=bt601-6-625:fast=1"` encodes for the output being bt709, rather than the default bt601 matrix. iall=bt601-6-625 says to treat all the input (colorspace, primaries and transfer function) with the bt601-6-625 label). fast=1 skips gamma/primary conversion in a mathematically correct way. See: [https://ffmpeg.org/ffmpeg-filters.html#colorspace](https://ffmpeg.org/ffmpeg-filters.html#colorspace)
55 | e.g.
56 |
57 |
62 | ```
63 | ffmpeg -y -i ../sourceimages/chip-chart-1080-noicc.png \
64 | -sws_flags spline+accurate_rnd+full_chroma_int -vf "colorspace=bt709:iall=bt601-6-625:fast=1" \
65 | -c:v libx264 -preset placebo -qp 0 -x264-params "keyint=15:no-deblock=1" -pix_fmt yuv444p10le -qscale:v 1 \
66 | -color_range tv -colorspace bt709 -color_primaries bt709 -color_trc bt709 \
67 | ./chip-chart-yuvconvert/spline444colorspace.mp4
68 | ```
69 |
70 | ## libswscale filter
71 |
72 | ```
73 | -vf "scale=in_range=full:in_color_matrix=bt709:out_range=tv:out_color_matrix=bt709"
74 | ```
75 | Using the libswscale library. Seems similar to colorspace, but with image resizing, and levels built in. [https://www.ffmpeg.org/ffmpeg-filters.html#scale-1](https://www.ffmpeg.org/ffmpeg-filters.html#scale-1)
76 |
77 | This is the recommended filter.
78 | e.g.
79 |
80 |
85 | ```
86 | ffmpeg -y -i ../sourceimages/chip-chart-1080-noicc.png \
87 | -sws_flags spline+accurate_rnd+full_chroma_int+full_chroma_inp -vf "scale=in_range=full:in_color_matrix=bt709:out_range=tv:out_color_matrix=bt709" \
88 | -c:v libx264 -preset placebo -qp 0 -x264-params "keyint=15:no-deblock=1" -pix_fmt yuv444p10le -qscale:v 1 \
89 | -color_range tv -colorspace bt709 -color_primaries bt709 -color_trc bt709 \
90 | ./chip-chart-yuvconvert/spline444out_color_matrix.mp4
91 | ```
92 |
--------------------------------------------------------------------------------
/Encoding.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: default
3 | nav_order: 4
4 | title: Encoding
5 | parent: Encoding Overview
6 | ---
7 |
8 | ## Encoding Overview
9 |
10 | Creating movies for review should be a simple process where the movie file accurately represents the source media content, so that you can feel confident that issues with the content are not as a result of creating the movie. Sadly, this is not the case, there are many steps that can create an incorrect result, either through a color shift, or encoding artifacts. This frequently means there isn't a single right answer for all cases, so we will attempt to document the different scenarios where you could get tripped up.
11 |
12 | We will mostly be focusing on encoding with ffmpeg, however there will be some cases where we will recommend other tools. Splitting the process into two steps:
13 | 1. Convert the source media to the target color space.
14 | 2. Encode the target intermediate frames into the resulting movie.
15 |
16 | ## Encoding
17 | NOTE, We do not have any test suites for encoding a this time. This is an area for future development.
18 |
19 | A good starting point for encoding options is here: [https://trac.ffmpeg.org/wiki/Encode/VFX](https://trac.ffmpeg.org/wiki/Encode/VFX)
20 | ### H264
21 | Key flags (see [https://trac.ffmpeg.org/wiki/Encode/H.264](https://trac.ffmpeg.org/wiki/Encode/H.264) )
22 |
23 | * **-crf 23** - This is the constant rate factor, controlling the default quality (see: [https://slhck.info/video/2017/02/24/crf-guide.html](https://slhck.info/video/2017/02/24/crf-guide.html) ) where -crf 0 is uncompressed. By default this is set to 23, which is a little on the low side, using values closer to 15 is recommended, but this does come at the expense of file-size. For more on this see the [CRF comparison](CRF Comparison) below.
24 | * **-qp 23** - Quantization Parameter - it is recommended that you do not use this, in preference to -crf above (see: [https://slhck.info/video/2017/03/01/rate-control.html](https://slhck.info/video/2017/03/01/rate-control.html) )
25 | * **-preset slower** - [https://trac.ffmpeg.org/wiki/Encode/H.264#FAQ](https://trac.ffmpeg.org/wiki/Encode/H.264#FAQ)
26 | * **-tune film** - Optionally use the tune option to change settings based on specific inputs - [https://trac.ffmpeg.org/wiki/Encode/H.264#FAQ](https://trac.ffmpeg.org/wiki/Encode/H.264#FAQ) - see also: [https://superuser.com/questions/564402/explanation-of-x264-tune](https://superuser.com/questions/564402/explanation-of-x264-tune) I suspect that we would want to use one of:
27 | * **-tune film** good for live action content.
28 | * **-tune animation** good for animated content with areas of flat colors.
29 | * **-tune grain** good for live action content where you want to preserve the grain as much as possible.
30 | * **-qscale:v 9** - Generic quality scale flag: [https://www.ffmpeg.org/ffmpeg.html#toc-Main-options](https://www.ffmpeg.org/ffmpeg.html#toc-Main-options) - TODO experiment with this.
31 |
32 | An example would be:
33 | ```
34 | -preset slower -crf 11 -profile:v high -tune film
35 | ```
36 | #### CRF Comparison
37 |
38 | To help pick appropriate values with the CRF flag, we have run the [Test Framework](enctests/README.html) through some of the [reference media](enctests/sources/enc_sources/README.html).
39 |
40 | |  This is showing CRF values against encoding time. |
41 | |  This is showing CRF values against file size. |
42 | |  This is showing CRF values against VMAF harmonic mean |
43 |
44 |
45 | #### H264 Bitdepth
46 |
47 | By default, h264 is created as a yuv420p file format. This is the recommended format for web playback and also playback with the quicktime player on OSX and other apple devices, but the h264 codec can support other formats that are modified with the `-pix_fmt` flag.
48 |
49 | TODO Needs more investigation, e.g. do you set pix_fmt and profile, or will one set the other?
50 |
51 | |---|---|
52 | |-pix_fmt yuv444p10le| Defines a YUV 444 image at 10bits per component.|
53 | |-profile:v high10 | Support for bit depth 8-10. |
54 | |-profile:v high422 | Support for bit depth 8-10. Support for 4:2:0/4:2:2 chroma subsampling.|
55 | |-profile:v high444 | Support for bit depth 8-10. for 4:2:0/4:2:2/4:4:4 chroma subsampling.|
56 |
57 |
58 | ### ProRes
59 | There are four Prores encoders, Prores, Prores_ks, Prores_aw and now with ffmpeg 5 VideoToolBox Prores, which is a hardware based OSX M1 encoder/decoder.
60 |
61 | From [https://trac.ffmpeg.org/wiki/Encode/VFX](https://trac.ffmpeg.org/wiki/Encode/VFX) the recommendation is to use Prores_ks with -profile:v 3 and the qscale of 11
62 |
63 | Options that can be used include:
64 |
65 | -profile:v values can be one of.
66 | * proxy (0)
67 | * lt (1)
68 | * standard (2)
69 | * hq (3)
70 | * 4444 (4)
71 | * 4444xq (5)
72 |
73 | -qscale:v between values of 9 - 13 give a good result, 0 being best, see below for some wedge tests.
74 |
75 | -vendor apl0 - tricks the codec into believing its from an Apple codec.
76 |
77 | Example encode would look like:
78 |
79 |
84 | ```console
85 | ffmpeg -r 24 -start_number 1 -i inputfile.%04d.png -vf "scale=in_color_matrix=bt709:out_color_matrix=bt709" \
86 | -vframes 100 -c:v prores_ks -profile:v 3 -pix_fmt yuv422p10le \
87 | -color_range tv -colorspace bt709 -color_primaries bt709 -color_trc iec61966-2-1 outputfile.mov
88 | ```
89 |
90 | Using this with the usual color space flags, seems to work well with the exception of ffmpeg itself is unable to read a prores file, and convert it to a still frame. It needs the flags:`-vf scale=in_color_matrix=bt709:out_color_matrix=bt709` added to the command to ensure the right input colorspace is recognised, e.g.:
91 |
92 |
93 |
98 | ```console
99 | ffmpeg -i INPUTFILE.mov -compression_level 10 -pred mixed -pix_fmt rgba64be \
100 | -sws_flags spline+accurate_rnd+full_chroma_int -vframes 1 \
101 | -vf scale=in_color_matrix=bt709:out_color_matrix=bt709 OUTPUTFILE.png
102 | ```
103 |
104 | However, other encoders seem to be recognised correctly, so there is clearly some metadata missing. I did try using the prores_metadata filter to try adding some additional parameters, but it didn't seem to help.
105 |
106 | ```console
107 | ffmpeg -i ./chip-chart-yuvconvert/basicnclc.mov -c copy \
108 | -bsf:v prores_metadata=color_primaries=bt709:color_trc=bt709:colorspace=bt709 \
109 | chip-chart-yuvconvert/basicnclcmetadata.mov
110 | ```
111 |
112 | If you are on a OSX M1 machine and are using ffmpeg 5.0 or higher, you can use the built in libraries to encode to prores using:
113 |
114 | ```console
115 | ffmpeg -r 24 -start_number 1 -i inputfile.%04d.png -vf "scale=in_color_matrix=bt709:out_color_matrix=bt709" \
116 | -vframes 100 -c:v prores_videotoolbox -profile:v 3 -pix_fmt yuv422p \
117 | -color_range tv -colorspace bt709 -color_primaries bt709 -color_trc bt709 outputfile.mp4
118 |
119 | ```
120 |
121 | NOTE, it does not appear to allow `-color_trc iec61966-2-1` (sRGB) -- so this needs more testing.
122 |
123 | TODO:
124 | * Figure out the missing metadata so that ffmpeg can correctly decode a quicktime to still.
125 | * Add other codecs, e.g. DNxHD, AV1
126 | * Wedge qscale values
127 | * Do some colorspace tests with different qscale values to see where color breaks down.
128 | * VMAF
129 |
130 | #### Prores_ks -qscale:v comparison.
131 |
132 | To help pick appropriate values with the -qscale:v , we have run the [Test Framework](enctests/README.html) through some of the [reference media](enctests/sources/enc_sources/README.html).
133 |
134 | |  This is showing qscale values against encoding time. |
135 | |  This is showing qscale values against file size. |
136 | |  This is showing qscale values against VMAF harmonic mean |
--------------------------------------------------------------------------------
/EncodingOverview.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: default
3 | nav_order: 3
4 | title: Encoding Overview
5 | has_children: true
6 |
7 | ---
8 |
9 | {: .no_toc }
10 |
11 | # Media Encoding with ffmpeg
12 |
13 | We will break the encoding process into three parts:
14 | 1. [The RGB to YCrCb conversion](#Color-space-conversion)
15 | 2. [The encoding process itself](Encoding.html)
16 | 3. [Metadata tagging for web browsers](WebColorPreservation.html)
17 |
18 | # Color space conversion.
19 |
20 | FFmpeg is not a great tool for colorspace conversion. We recommend that any color space conversion be done using tools such as [Nuke](https://www.foundry.com/products/nuke-family/nuke) or [oiiotool](https://openimageio.readthedocs.io/en/latest/oiiotool.html) using [OCIO](https://opencolorio.org/). We strongly recommend using the ACES configuration whenever possible, since it provides a good baseline for colorspace conversion. Note, we may mention the use of Nuke a number of times, there are now a large number of 3rd party tools that will also do great at this color space conversion using OCIO.
21 |
22 | Typically, we would assume that an intermediate file would get written out, such as PNG, TIF or DPX for processing in ffmpeg.
23 |
24 | Hint: by default the nuke PNG writer will have the slow compression enabled, this does add a little time that is unnecessary for the sort of intermediate file we are using. In the nuke SDK they do provide the source for the PNG writer, so it is possible to get this disabled. However, you may find that switching to Tif will have the same result.
25 |
26 | ## Easy install of OCIO/OIIO/FFmpeg
27 |
28 | Different approaches for getting started include: [anaconda](https://www.anaconda.com/) you will also need to download the ACES OCIO configuration files from: https://github.com/colour-science/OpenColorIO-Configs
29 | ```
30 | conda create --name aswf-ffmpeg
31 | conda activate aswf-ffmpeg
32 | conda install -c conda-forge py-openimageio
33 | pip install PyYAML pillow
34 | ```
35 | This should give you py-openimageio, openimageio and ffmpeg-4.4
36 |
37 | TODO - Provide other approaches for quickly getting going (e.g. vcpkg)
38 |
39 | ## Quick introduction to color conversion using oiiotool
40 |
41 | ```
42 | export OCIO=~/git/OpenColorIO-Configs/aces_1.2/config.ocio # Or wherever your OCIO is.
43 | oiiotool --framepadding 5 --frames 1-100 sourcefilename_acescg.#.exr --resize 1920x0 \
44 | --colorconvert acescg srgb --dither -o outputimage.#.png
45 | ```
46 |
47 | | --- | --- |
48 | | --frames 1-100 | The frame range of the source media. |
49 | | --framepadding 5 | Set the framepadding to 5 (i.e. outputimage.00001.tif) |
50 | | [--resize 1920x0](https://openimageio.readthedocs.io/en/master/oiiotool.html?highlight=resize%20filter#cmdoption-resize) | Resize the image so that the width is 1920 wide, and adjust the height so that the aspect ratio stays the same. (Note you may want to use [--fit](https://openimageio.readthedocs.io/en/master/oiiotool.html?highlight=resize%20filter#cmdoption-fit) too). This will use the lanczos3 filter for decreasing resolution, and the blackman-harris filter for increase resolution. |
51 | | --colorconvert acescg srgb | Do a colorspace convert from ACEScg to sRGB. (See the autocc flag below)|
52 | | --dither | Adding a dither process when writing to an 8-bit file |
53 |
54 |
55 | Other flags you might want to use include:
56 |
57 | | --- | --- |
58 | | --missingfile checker | If a frame is missing, put a checkboard frame in its place. |
59 | | --threads 2 | If you want to limit the number of threads the oiiotool process consumes, the default is as many threads as there are cores present in the hardware |
60 | | --autocc | Turns on automatic color space conversion,
61 |
62 | The above will work well for many of the h264 files, but for generating movies with an extended bit depth (8-16), you may want to do:
63 | ```
64 | export OCIO=~/git/OpenColorIO-Configs/aces_1.2/config.ocio # Or wherever your OCIO is.
65 | oiiotool --framepadding 5 --frames 1-100 sourcefilename_acescg.#.exr --resize 1920x0
66 | --colorconvert acescg srgb -d uint16 -o outputimage.#.png
67 | ```
68 |
69 | Adding the `-d uint16` flag forces the intermediate file format to be 16-bit, rather than the 8-bit default. Note, we have also removed the dither flag.
70 |
71 | ## Image resizing.
72 |
73 | There are a couple of gotchas with image resizing to watch out for:
74 | * A number of the encoders require that the resulting movie file be a factor of 2, there isnt a direct way to do this in oiiotool, you would need to read the source image file to determine the right output scale.
75 | * Watch for filter options, if you choose to do the filtering in ffmpeg, it defaults to bicubic, which is not a great choice for downrezing image formats. For reasons why lancozs is prefered, see:
76 | * [https://legacy.imagemagick.org/Usage/filter/](https://legacy.imagemagick.org/Usage/filter/)
77 | * [https://www.cambridgeincolour.com/tutorials/image-resize-for-web.htm](https://www.cambridgeincolour.com/tutorials/image-resize-for-web.htm)
78 |
79 |
80 | ## See Also.
81 | The following utility - [https://github.com/jedypod/generate-dailies](https://github.com/jedypod/generate-dailies) calls openimageio directly in python to do the image conversion, exporting the resulting file directly to ffmpeg. It has a nice configuration file for text overlays and ffmpeg configuration that is worth looking at too.
82 |
--------------------------------------------------------------------------------
/FfmpegInputs.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: default
3 | title: Ffmpeg Frame Sequences Specification
4 | nav_order: 2
5 | parent: Encoding Overview
6 |
7 | ---
8 |
9 | # Ffmpeg Frame Sequences Specification
10 |
11 | Care needs to be taken when specifying frame sequences, since there is additional metadata that would normally be present in a movie container (e.g. mp4 or mov files) that are not present in an image sequence.
12 |
13 | Ffmpeg will convert an image sequence using the [image2](https://ffmpeg.org/ffmpeg-formats.html#image2-1) demuxer. This provides the code for determining how to wildcard as well as how to specify the frame-rate.
14 |
15 | ## Image sequences
16 |
17 | There are two approaches for defining image sequences, globbing and printf style.
18 |
19 | ### Printf image sequence definition.
20 |
21 | The conventional approach is to define the image sequence with a "%d" or a "%0Nd", which specifies where the frame number should go, for example:
22 | img.%d.png
23 |
24 | would match: img.0.png img.1.png img.2.png ... img.10.png etc.
25 |
26 | img.%04d.png means the numbers need to be zero padded to 4 digits, so it would match img.0000.png img.0001.png img.0002.png ... etc.
27 |
28 | By default, the frame number is expect to start from 0, but you can define it with the flag: `-start_number`, e.g.:
29 | ```console
30 | ffmpeg -start_number 1 -i img.%04d.png foo.mov
31 | ```
32 | Would start from frame number img.0001.png
33 |
34 | If not defined the end frame will be the last continuous frame in the frame sequence, so if you have a missing frame it will stop there.
35 | You can define the number of frames to capture using the `-frames:v` flag.
36 |
37 | N.B. In a windows command shell, % has a special meaning, so you may need to escape the "%", by replacing it with %%, or quote it, e.g.:
38 | ```console
39 | ffmpeg -start_number 1 -i img.%%04d.png foo.mov
40 | ```
41 | TODO TEST.
42 |
43 | ### Globbing image sequence definition.
44 |
45 | There is a globbing option that makes it a little easier to specify a block of frames, since you dont need to specify the first frame.
46 |
47 | ```console
48 | ffmpeg -pattern_type glob -i "img.*.png" foo.mov
49 | ```
50 | Will grab all frames that start with img. and end with ".png"
51 |
52 |
53 |
54 | ## Frame Rate
55 |
56 | The following aliases are defined for framerate values, either `-r` or `-framerate` parameters.
57 |
58 | | ntsc | 30000/1001 | 29.97 fps equivalent. |
59 | | pal | 25| |
60 | | qntsc | 30000/1001 | VCD compliant NTSC |
61 | | qpal | 25 | VCD compliant PAL |
62 | | sntsc | 30000/1001 | square pixel NTSC |
63 | | spal | 25 | square pixel PAL |
64 | | film | 24 | |
65 | | ntsc-film | 24000/1001 | correct 23.98 |
66 |
67 | Is is prefereable to use a fractional rate, or one of the above settings where possible, for example `-r 30000/1001` is the more precise version of 29.97.
68 |
69 | Other common fractional rates not defined with presets include:
70 |
71 | | 60000/1001 | 59.94 fps equiavalent. |
72 | | 120000/1001 | 119.88 fps equivalent |
73 |
74 | If not specified, the default framerate chosen is 25 fps (i.e. pal).
75 |
76 | TODO Confirm no difference between -r and -framerate. -framerate seems a little more proper.
77 |
78 | ## Looping
79 |
80 | You can loop the input file with the `-loop 1` parameter, e.g.:
81 | ```console
82 | ffmpeg -y -pattern_type glob -loop 1 -framerate ntsc -i "../sourceimages/chip-chart-1080-noicc.*.png" -pix_fmt yuv444p10le -frames:v 100 ./chip-chart-yuvconvert/looptest.mov
83 | ```
84 |
85 | Note, you want to control the number of frames to output, for a long sequence you would put the `-frames:v 100` before the "-i" flag, but here we are putting it before the output, since we want it to apply to the overall looping input, not the input sequence.
86 |
87 |
--------------------------------------------------------------------------------
/ICC/P3D60.icc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/richardssam/ffmpeg-tests/6a49f4a37c148370b0a25fce05cd99ae2e48680e/ICC/P3D60.icc
--------------------------------------------------------------------------------
/ICC/P3D65.icc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/richardssam/ffmpeg-tests/6a49f4a37c148370b0a25fce05cd99ae2e48680e/ICC/P3D65.icc
--------------------------------------------------------------------------------
/ICC/P3DCI.icc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/richardssam/ffmpeg-tests/6a49f4a37c148370b0a25fce05cd99ae2e48680e/ICC/P3DCI.icc
--------------------------------------------------------------------------------
/ICC/README:
--------------------------------------------------------------------------------
1 | Some of these files are sourced from http://color.support/iccprofiles.html
2 |
3 | The files below are generated from photoshop, by creating a custom ICC profile, and then saving it out.
4 | gamma28.icc
5 | linear.icc
6 | Simplified-sRGB.icc
7 |
8 |
--------------------------------------------------------------------------------
/ICC/Rec2020-Rec1886.icc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/richardssam/ffmpeg-tests/6a49f4a37c148370b0a25fce05cd99ae2e48680e/ICC/Rec2020-Rec1886.icc
--------------------------------------------------------------------------------
/ICC/Rec709-Rec1886.icc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/richardssam/ffmpeg-tests/6a49f4a37c148370b0a25fce05cd99ae2e48680e/ICC/Rec709-Rec1886.icc
--------------------------------------------------------------------------------
/ICC/Simplified-sRGB.icc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/richardssam/ffmpeg-tests/6a49f4a37c148370b0a25fce05cd99ae2e48680e/ICC/Simplified-sRGB.icc
--------------------------------------------------------------------------------
/ICC/gamma28.icc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/richardssam/ffmpeg-tests/6a49f4a37c148370b0a25fce05cd99ae2e48680e/ICC/gamma28.icc
--------------------------------------------------------------------------------
/ICC/linear.icc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/richardssam/ffmpeg-tests/6a49f4a37c148370b0a25fce05cd99ae2e48680e/ICC/linear.icc
--------------------------------------------------------------------------------
/ICC/rec2020.icc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/richardssam/ffmpeg-tests/6a49f4a37c148370b0a25fce05cd99ae2e48680e/ICC/rec2020.icc
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2021 Sam Richards
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/MarkdownNotes.md:
--------------------------------------------------------------------------------
1 | # Testing Markdown locally.
2 |
3 | To easily preview markdown, we need to setup jekyll as a local server.
4 |
5 | 1: Install ruby+Devkit [https://rubyinstaller.org/downloads/](https://rubyinstaller.org/downloads/)
6 | 2: Install Jekyll [https://jekyllrb.com/docs/installation/windows/](https://jekyllrb.com/docs/installation/windows/)
7 | 3: gem install just-the-docs jekyll-remote
8 |
9 | If you are on windows, you may want `gem install wdm` too.
10 |
11 |
12 | NOTE, there is a separate config file for running it locally where we point to a local install of just-the-docs
13 |
14 | you can then run it with:
15 | ```console
16 | jekyll serve --incremental -l -o --config _config_local.yml
17 | ```
18 |
19 |
20 |
21 |
--------------------------------------------------------------------------------
/OtherFfmpegArgs.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: default
3 | title: Other ffmpeg arguments.
4 | nav_order: 5.5
5 | parent: Encoding Overview
6 | ---
7 |
8 | # Other ffmpeg arguments.
9 |
10 | This is covering other things that can be done directly in ffmpeg that might be useful.
11 |
12 | ## Audio
13 |
14 | ```
15 | -filter_complex "[1:0]apad" -shortest
16 | ```
17 | This is a useful filter to add when adding an audio file, if the audio file might not match the length of the resulting movie. This will either pad the audio to match the video, if the audio is short, or truncate the audio to match the video.
18 |
19 | TODO - Provide full example of adding audio to the "quickstart" demo.
20 |
21 | ## Image resizing.
22 |
23 | ### Keeping the image height a factor of 2.
24 |
25 | There may be reasons that you want to do any image resizing directly inside ffmpeg, (e.g. when converting from another movie format). A number of the codecs require that the width and height be a factor of 2 (and sometimes 4). The expression below will ensure that the height is set correctly, assuming the width is 1920
26 | ```
27 | -vf scale=1920:trunc(ow/a/2)*2:flags=lanczos
28 | ```
29 |
30 | If you are downrezing, you will get the best results with the lancozs filter, otherwise the default is bicubic.
31 |
32 | TODO - this needs testing, to confirm filter quality.
33 |
34 |
35 | See: [https://trac.ffmpeg.org/wiki/Scaling](https://trac.ffmpeg.org/wiki/Scaling) for more info.
36 |
37 |
38 | ## Concatination of video files.
39 |
40 | See: [https://trac.ffmpeg.org/wiki/Concatenate](https://trac.ffmpeg.org/wiki/Concatenate)
41 |
42 | This has been useful in splitting long prores encodes into chunks, and then merging them back together.
43 | The merge process is not quick, so there are limits to how much you can split the process, but provided that the merge process not I/O bound, it can typically end up with faster encodes.
44 |
45 | TODO - Provide some examples of speed improvement, as well as a sample command line.
46 |
--------------------------------------------------------------------------------
/Quickstart.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: default
3 | nav_order: 2
4 | title: Encoding Cheatsheet
5 | ---
6 |
7 | # Encoding Cheatsheet
8 |
9 | This is a cheatsheet for encoding best practices for VFX/Animation production. For each section there are more detailed sections on why these settings are picked, and notes on what parameters you may want to change.
10 |
11 | This document is based on results from ffmpeg 4.4, we have not tested with 5.0 yet, but do plan to.
12 |
13 | # H264 Encoding from an image sequence for Web Review
14 |
15 | If you are encoding from an image sequence (e.g. imagefile.0000.png imagefile.0001.png ...) to h264 using ffmpeg, we recommend:
16 |
17 |
22 | ```console
23 | ffmpeg -r 24 -start_number 1 -i inputfile.%04d.png \
24 | -vf "scale=in_color_matrix=bt709:out_color_matrix=bt709" \
25 | -frames:v 100 -c:v libx264 -preset slower -pix_fmt yuv420p \
26 | -color_range tv -colorspace bt709 -color_primaries bt709 -color_trc iec61966-2-1 \
27 | outputfile.mp4
28 | ```
29 |
30 | | --- | --- |
31 | | **-r 24** | means 24 frames per second for the png files. |
32 | | **-start_number** 1 | The frame sequence starts from frame 1 (defaults to 0) |
33 | **-i inputfile.%04d.png** | the %04d means the file sequence will be padded to 4 digits, i.e. 0000, 0001, 0002, etc. It is the same syntax supported by the C printf function.
34 | **[-frames:v](https://ffmpeg.org/ffmpeg.html#toc-Video-Options) 100** | is optional, but allows you to specify how many frames to encode, otherwise it will encode the entire frame range. There is an obsolete alias flag `-vframes` which will be retired.
35 | **-c:v libx264** | use the h264 encoding library (libx264)
36 | **-preset slower** | a reasonably high quality preset, which will run slow, but not terribly slow.
37 | **-pix_fmt yuv420p** | use yuv420 video format, which is typical for web playback. If you want a better quality for RV or other desktop tools use -pix_fmt yuv444p10le
38 | **-color_range tv** | mp4 metadata - specifying color range as 16-235 (which is default for web playback).
39 | **-colorspace bt709** | mp4 metadata - specifying bt709 yuv color pixel format
40 | **-color_primaries bt709** | mp4 metadata - bt709 color gamut primaries
41 | **-color_trc iec61966-2-1** | mp4 metadata color transfer = iec61966-2-1 = sRGB - See tests below.
42 |
43 | **-vf "scale=in_color_matrix=bt709:out_color_matrix=bt709"** means use the sw-scale filter, setting:
44 |
45 | | --- | --- |
46 | | **in_color_matrix=bt709** | color space bt709 video coming in (normal for TV/Desktop video).|
47 | | **out_color_matrix=bt709** | means color space bt709 video going out. |
48 |
49 | The combination of this and in_color_matrix will mean the color encoding will match the source media. If you are only adding one set of flags, this is the one, otherwise it will default to an output colorspace of bt601, which is a standard definition spec from the last century, and not suitable for sRGB or HD displays.
50 |
51 | Separately, if you are converting from exr's in other colorspaces, **please use [OCIO](https://opencolorio.org/) to do the color space conversions.** [oiiotool](https://openimageio.readthedocs.io/en/latest/oiiotool.html) is an excellent open-source tool for this.
52 |
53 | For more details see:
54 | * [H264 Encoding](Encoding.md#h264)
55 | * [YUV Conversion](ColorPreservation.md#yuv)
56 | * [Browser color issues](ColorPreservation.md#nclc)
57 |
58 |
59 | # ProRes 422 encoding with ffmpeg.
60 |
61 | Unlike h264 and DnXHD, Prores is a reverse-engineered codec. However, in many cases ffmpeg can produce adequate results. There are a number of codecs, we recommend the prores_ks one.
62 |
63 |
68 | ```console
69 | ffmpeg -r 24 -start_number 1 -i inputfile.%04d.png -vframes 100 \
70 | -c:v prores_ks -profile:v 3 -qscale:v 9 \
71 | -vf scale=in_color_matrix=bt709:out_color_matrix=bt709 -pix_fmt yuv422p10le outputfile.mov
72 | ```
73 |
74 | | --- | --- |
75 | | **-profile:v 3** | Prores profile |
76 | | **-qscale:v 9** | Controls the output quality, lower numbers higher quality and larger file-size. *TODO Need to do testing with different values.* |
77 | | **-pix_fmt yuv422p10le** | Convert to 10-bit YUV 422 |
78 | | **-vendor apl0** | Treat the file as if it was created by the apple-Prores encoder (even though it isnt), helps some tools correctly read the quicktime |
79 |
80 | For more details see:
81 | * [Prores](Encoding.md#prores)
82 | * [YUV Conversion](ColorPreservation.md#yuv)
83 |
84 | # ProRes 4444 encoding with ffmpeg.
85 |
86 | As above, but using 4444 (i.e. a color value for each pixel + an alpha)
87 |
88 |
93 | ```console
94 | ffmpeg -r 24 -start_number 1 -i inputfile.%04d.png -vframes 100 \
95 | -c:v prores_ks -profile:v 4444 -qscale:v 9 \
96 | -vf scale=in_color_matrix=bt709:out_color_matrix=bt709 -pix_fmt yuv444p10le outputfile.mov
97 | ```
98 |
99 | | --------- | ----------- |
100 | | **-profile:v 4444** | Prores profile for 4444 |
101 | | **-qscale:v 9** | Controls the output quality, lower numbers higher quality and larger file-size. *TODO Need to do testing with different values.* |
102 | | **-pix_fmt yuv444p10le** | Convert to 10-bit YUV 4444 |
103 |
104 | For more details see:
105 | * [Prores](Encoding.md#prores)
106 | * [YUV Conversion](ColorPreservation.md#yuv)
107 |
108 | # TV vs. Full range.
109 | All the video formats typically do not use the full numeric range but instead the R', B', G' and Y' (luminance) channel have a nominal range of [16..235] and the CB and CR channels have a nominal range of [16..240] with 128 as the neutral value. This frequently results in quantisation artifacts for 8-bit encoding (the standard for web playback).
110 |
111 | TODO Get Quantization examples.
112 |
113 | You can force the encoding to be full range using the libswscale library by using
114 | ```
115 | -vf "scale=in_range=full:in_color_matrix=bt709:out_range=full:out_color_matrix=bt709"
116 | ```
117 | Specifying *out_range=full* forces the output range, but you also need to set the NCLX tag:
118 | ```
119 | -color_range 2
120 | ```
121 | A full example encode would look like:
122 |
123 |
128 | ```console
129 | ffmpeg -y -loop 1 -i ../sourceimages/radialgrad.png -sws_flags spline+accurate_rnd+full_chroma_int \
130 | -vf "scale=in_range=full:in_color_matrix=bt709:out_range=full:out_color_matrix=bt709" \
131 | -c:v libx264 -t 5 -pix_fmt yuv420p -qscale:v 1 \
132 | -color_range pc -colorspace bt709 -color_primaries bt709 -color_trc iec61966-2-1 ./greyramp-fulltv/radialgrad-full.mp4
133 | ```
134 | We have seen the full range encoding work across all browsers, and a number of players including RV.
135 |
136 | TODO: Do additional testing across all players.
137 |
138 | For more details see:
139 | * [Comparing full-range vs. tv range](https://richardssam.github.io/ffmpeg-tests/tests/greyramp-fulltv/compare.html)
140 | * [Encoding Guide](Encoding.html#range)
141 |
142 |
143 | # Encoding as RGB.
144 | You do not *have* to encode into YCrCb, h264 does support RGB encoding, which may be preferable in some situations.
145 |
146 | Using the encoder:
147 | ```
148 | -c:v libx264rgb
149 | ```
150 | Will skip the conversion completely. Sadly this has no support in web browsers, but is supported by some players (e.g. RV). It is also limited to 8-bit.
151 |
152 | TODO Check about 10-bit encoding.
153 |
154 | For more details see:
155 | * [Comparing full-range vs. tv range](https://richardssam.github.io/ffmpeg-tests/tests/greyramp-fulltv/compare.html)
156 | * [Encoding Guide](Encoding.html#range)
157 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # ASWF Encoding recommendations.
2 |
3 | This contains a variety of test suites to help pick encoding profiles suitable for VFX/Animation media reviews.
4 | We are trying to develop best practices, so we will suggest a good starting point, but there isnt a right answer for all cases, so we do also try to explain what the parameters do, and in what situations you might want to choose different parameters.
5 | This is not the site for general encoding, although you may find answers here, we will likely opt for a simple one-size fits all solution over file-size efficiency.
6 | We welcome suggestions and improvements.
7 |
8 | This is being done as part of the [ASWF Media Review working-group](https://wiki.aswf.io/display/PRWG/Playback+And+Review+Working+Group).
9 |
10 | The main confluence page for this for now is [here](https://wiki.aswf.io/pages/viewpage.action?pageId=16031068)
11 |
12 | 1. [Acknowledgements](#Acknowledgements)
13 | 2. [Encoding Cheat Sheet](docs/Index.md)
14 | 3. [Encoding Overview](docs/Encoding.md#Encoding-Overview)
15 | 4. [Color space conversion](docs/ColorPreservation.md#Color-space-conversion)
16 | 5. [Media Encoding with ffmpeg](docs/ColorPreservation.md#encodestart)
17 | 1. [RGB to YCrCb Conversion](docs/ColorPreservation.md#yuv)
18 | 2. [TV vs. Full range.](#tvfull)
19 | 3. [RGB encode](#rgbencode)
20 | 6. [Encoding](#encode)
21 | 1. [h264](#h264)
22 | 2. [Prores](#prores)
23 | 7. [Metadata NCLC/NCLX](#nclc)
24 | 1. [Gamut - colorprimaries](#gamut)
25 | 2. [Color Range](#range)
26 | 8. [Web Review](#webreview)
27 |
--------------------------------------------------------------------------------
/Testresults.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: default
3 | title: Test Output
4 | nav_order: 6
5 | has_children: true
6 | ---
7 |
8 | # Testing Output
9 |
10 | The pages below show some of the web pages that are generated based on the testing we have done.
11 |
--------------------------------------------------------------------------------
/WebColorPreservation.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: default
3 | title: Web Color Preservation
4 | nav_order: 5
5 | parent: Encoding Overview
6 | ---
7 |
8 |
14 |
15 | # Color Metadata and Web Color Preservation
16 | There are a number of metadata flags designed to help the player know what colorspace the media is in, so it can correctly interpret it for playback. We do recommend adding the metadata tags to media, particularly if you are reviewing it on a web browser, however there are a lot of gotchas here.
17 |
18 | The NCLC/NCLX is defined as a ISO spec here (see [ISO-23091](https://www.iso.org/standard/73412.html) ). The numbers below are part of the definition. NCLC stands for Non-Consistent Luminance Coding, a brief overview of its history is here. For MP4 files, its also known as NCLX. Additionally this metadata can also be represented in the h264 metadata stream in the video usability Information (VUI) block.
19 |
20 | You can read the metadata using [mp4box.js](https://gpac.github.io/mp4box.js/test/filereader.html) which is a visual browser of the mp4 metadata, and look at moov/trak/mdia/minf/stbl/stsd/avc1/colr property.
21 |
22 | NOTE: None of the flags below affect the encoding of the source imagery, they are meant to be used to guide how the mp4 file is decoded.
23 |
24 | The docs are pretty sparse for this, some of the better info is [FFmpeg/pixfmt.h at master](https://github.com/FFmpeg/FFmpeg/blob/master/libavutil/pixfmt.h)
25 |
26 | There are four possible tags that you can apply to movies:
27 | * color_trc - The transfer function (e.g. gamma)
28 | * color_primaries - e.g. bt709, rec2020, display-p3
29 | * color_range - Is it tv vs. full range
30 | * color_space - Is it YUV vs. RGB
31 |
32 |
33 | For a detailed breakdown of what browsers support what flags see: [here](https://wiki.aswf.io/display/PRWG/Color+fidelity+for+Web+Browsers)
34 |
35 |
36 | # Transfer function tests (color_trc flag)
37 | This is setting the transfer function, which is typically going to be related to the gamma of the display. There are a number of existing gamma profiles, e.g. bt709 or sRGB, as well as gamma 2.2, and 2.8. Having said that, bt709 is frankly rather useless, consequently we recommend using sRGB as a default.
38 |
39 | For more details see: [here](tests/greramp-osx/ycrcbcompare.md)
40 |
41 | ## sRGB
42 | Using the `-color_trc iec61966-2-1` flag (the sRGB spec is defined as [iec61966-2-1](https://en.wikipedia.org/wiki/SRGB) ). This appears to be the most reliable one, working across all machines and browsers that support it. It's a shame that the flag has to be so cryptic.
43 |
44 |
45 |
Source SRGB PNG
46 |
Mp4 Video should match PNG
47 |
48 |
49 | ## bt709
50 | Using the `-color_trc bt709` flag (AKA rec709). This is often the default tag, however producers the most confusing results. On Chrome this will actually match sRGB, but on safari it will match the camera bt709 parameters, which roughly match gamma 1.95. NOTE, there is no support at all for BT1886, which is what we would conventionally use for the TV gamma of 2.4, the closest you can get is using quicktime on OSX.
51 |
52 |
53 |
This is the bt709 mp4.
54 |
This is a quicktime with a gamma of 1.95. This should be nearly identical to the above bt709 mp4, which implies OSX is correctly interpreting camera bt709.
55 |
This is the srgb.mp4 which may match the bt709 result. For chrome on windows, this should match bt709, which implies its treating it as sRGB.
56 |
57 |
58 | Screenshots
59 |
60 |
Screenshot of mp4 of chrome on windows
61 |
Screenshot of mp4 of safari on OSX
62 |
63 |
64 | ## Gamma 2.2
65 | Using the `-color_trc gamma22` flag. This does not work correctly on safari.
66 |
67 |
68 |
Source gamma 2.2 PNG
69 |
Mp4 Video should match PNG
70 |
71 |
72 | ## Gamma linear
73 | Using the `-color_trc linear` flag. This is unlikely to ever be used for video, however it does make for a good test that something is working.
74 |
75 |
76 |
Source linear PNG
77 |
Mp4 Video should match PNG
78 |
79 |
80 | ## Summary
81 | We recommend the use of `-color_trc iec61966-2-1` to use sRGB. There is no support for a gamma 2.4, if you still need it, we recommend that you use -color_trc unknown and ensure that your monitor is set correctly
82 |
83 |
84 | # Gamut colorprimaries
85 |
86 | Normally web browsers use the bt709 color gamut (which is different to the bt709 gamma), but in theory you could define your media as having a wider gamut, e.g. DCI-P3 or rec2020. The files below show a PNG and MP4 file defined using the rec2020 gamut, so depending on which monitor you are using it will show different text. This is similar to the excellent [WIDE>Gamut](https://www.wide-gamut.com/) test page.
87 |
88 |
89 |
PNG file
Mp4 file (which should match PNG file)
90 |
91 |
92 |
93 |
94 |
95 |
What the image should look like if nothing is working, or you have a rec2020 monitor.
96 |
What the image should look like if you have a display-p3 monitor.
97 |
98 |
99 | Chrome on windows, and Safari and Chrome on IOS will always assume the display is sRGB. In theory [chrome://flags/#force-color-profile](chrome://flags/#force-color-profile) should give you some settings for this, but it seems to be ignored.
100 |
101 |
102 |
103 | # Web Review
104 | See:
105 | * [NCLC Testing Overview](https://richardssam.github.io/ffmpeg-tests/compare.html) This is an overview of the NCLC Tag tests for web review.
106 | * [Comparing different outputs for -color_trc](https://richardssam.github.io/ffmpeg-tests/tests/greyramp-osx/compare.html) - Showing what the -color_trc flag is doing, compared to embedding in mov and png.
107 | * [Comparing different outputs for the -colorprimaries](https://richardssam.github.io/ffmpeg-tests/gamuttests/iccgamut/compare.html)
108 |
--------------------------------------------------------------------------------
/_config.yml:
--------------------------------------------------------------------------------
1 | remote_theme: just-the-docs/just-the-docs
2 | color_scheme: dark
3 | exclude: ["ICC", "*.yml", "enctests/*/*.mov",
4 | "enctests/sources/encsources/*/*.png", "enctests/sources/encsources/*/*.exr",
5 | "enctests/sources/encsources/*/*.tif", "enctests/sources/hdr_sources/*/*.png",
6 | "enctests/*/*.mov",
7 | "enctests/sources/hdr_sources/*/*.exr", "enctests/sources/Sintel-trailer-1080p-png",
8 | "enctests/sources/1080p"]
9 |
10 | url: "https://richardssam.github.io" # the base hostname & protocol for your site, e.g. http://example.com
11 |
12 | # Footer last edited timestamp
13 | last_edit_timestamp: true # show or hide edit time - page must have `last_modified_date` defined in the frontmatter
14 | last_edit_time_format: "%b %e %Y at %I:%M %p" # uses ruby's time format: https://ruby-doc.org/stdlib-2.7.0/libdoc/time/rdoc/Time.html
15 |
16 |
17 |
18 | # Footer "Edit this page on GitHub" link text
19 | gh_edit_link: true # show or hide edit this page link
20 | gh_edit_link_text: "Edit this page on GitHub"
21 | gh_edit_repository: "https://github.com/richardssam/ffmpeg-tests" # the github URL for your repo
22 | gh_edit_branch: "main" # the branch that your docs is served from
23 | #gh_edit_source: docs # the source that your files originate from
24 | gh_edit_view_mode: "tree" # "tree" or "edit" if you want the user to jump into the editor immediately
25 |
--------------------------------------------------------------------------------
/browsercompare/chrome-bt1886-windows.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/richardssam/ffmpeg-tests/6a49f4a37c148370b0a25fce05cd99ae2e48680e/browsercompare/chrome-bt1886-windows.png
--------------------------------------------------------------------------------
/browsercompare/chrome-gamma2.2-osx.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/richardssam/ffmpeg-tests/6a49f4a37c148370b0a25fce05cd99ae2e48680e/browsercompare/chrome-gamma2.2-osx.png
--------------------------------------------------------------------------------
/browsercompare/chrome-gamma2.2-windows.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/richardssam/ffmpeg-tests/6a49f4a37c148370b0a25fce05cd99ae2e48680e/browsercompare/chrome-gamma2.2-windows.png
--------------------------------------------------------------------------------
/browsercompare/chrome-gamma2.8-osx.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/richardssam/ffmpeg-tests/6a49f4a37c148370b0a25fce05cd99ae2e48680e/browsercompare/chrome-gamma2.8-osx.png
--------------------------------------------------------------------------------
/browsercompare/chrome-gamma2.8-windows.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/richardssam/ffmpeg-tests/6a49f4a37c148370b0a25fce05cd99ae2e48680e/browsercompare/chrome-gamma2.8-windows.png
--------------------------------------------------------------------------------
/browsercompare/chrome-linear-osx.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/richardssam/ffmpeg-tests/6a49f4a37c148370b0a25fce05cd99ae2e48680e/browsercompare/chrome-linear-osx.png
--------------------------------------------------------------------------------
/browsercompare/chrome-linear-windows.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/richardssam/ffmpeg-tests/6a49f4a37c148370b0a25fce05cd99ae2e48680e/browsercompare/chrome-linear-windows.png
--------------------------------------------------------------------------------
/browsercompare/chrome-srgb-osx.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/richardssam/ffmpeg-tests/6a49f4a37c148370b0a25fce05cd99ae2e48680e/browsercompare/chrome-srgb-osx.png
--------------------------------------------------------------------------------
/browsercompare/chrome-srgb-windows.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/richardssam/ffmpeg-tests/6a49f4a37c148370b0a25fce05cd99ae2e48680e/browsercompare/chrome-srgb-windows.png
--------------------------------------------------------------------------------
/browsercompare/firefox-bt1886-osx.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/richardssam/ffmpeg-tests/6a49f4a37c148370b0a25fce05cd99ae2e48680e/browsercompare/firefox-bt1886-osx.png
--------------------------------------------------------------------------------
/browsercompare/firefox-gamma2.2-osx.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/richardssam/ffmpeg-tests/6a49f4a37c148370b0a25fce05cd99ae2e48680e/browsercompare/firefox-gamma2.2-osx.png
--------------------------------------------------------------------------------
/browsercompare/firefox-linear-osx.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/richardssam/ffmpeg-tests/6a49f4a37c148370b0a25fce05cd99ae2e48680e/browsercompare/firefox-linear-osx.png
--------------------------------------------------------------------------------
/browsercompare/firefox-srgb-osx.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/richardssam/ffmpeg-tests/6a49f4a37c148370b0a25fce05cd99ae2e48680e/browsercompare/firefox-srgb-osx.png
--------------------------------------------------------------------------------
/browsercompare/safari-bt1886-osx.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/richardssam/ffmpeg-tests/6a49f4a37c148370b0a25fce05cd99ae2e48680e/browsercompare/safari-bt1886-osx.png
--------------------------------------------------------------------------------
/browsercompare/safari-gamma2.2-osx.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/richardssam/ffmpeg-tests/6a49f4a37c148370b0a25fce05cd99ae2e48680e/browsercompare/safari-gamma2.2-osx.png
--------------------------------------------------------------------------------
/browsercompare/safari-gamma2.8-osx.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/richardssam/ffmpeg-tests/6a49f4a37c148370b0a25fce05cd99ae2e48680e/browsercompare/safari-gamma2.8-osx.png
--------------------------------------------------------------------------------
/browsercompare/safari-linear-osx.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/richardssam/ffmpeg-tests/6a49f4a37c148370b0a25fce05cd99ae2e48680e/browsercompare/safari-linear-osx.png
--------------------------------------------------------------------------------
/browsercompare/safari-srgb-g22-osx.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/richardssam/ffmpeg-tests/6a49f4a37c148370b0a25fce05cd99ae2e48680e/browsercompare/safari-srgb-g22-osx.png
--------------------------------------------------------------------------------
/browsercompare/safari-srgb-osx.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/richardssam/ffmpeg-tests/6a49f4a37c148370b0a25fce05cd99ae2e48680e/browsercompare/safari-srgb-osx.png
--------------------------------------------------------------------------------
/docker/README.md:
--------------------------------------------------------------------------------
1 |
2 | ---
3 | layout: default
4 | nav_order: 5.6
5 | title: Docker Container
6 | parent: Encoding Overview
7 | ---
8 |
9 | # Docker container for testing.
10 |
11 | ## Introduction
12 | The docker containers provide a standard environment to run the test suites in the git repo.
13 |
14 | Its based on https://github.com/AcademySoftwareFoundation/aswf-docker/blob/master/ci-vfxall/Dockerfile - providing the the ASWF environment including OCIO and OIIO.
15 | Its using the ffmpeg build environment based on https://github.com/jrottenberg/ffmpeg.git, but with vmaf compiled in, and also OIIO rebuilt to include OIIO. ACES 1.2 is also checked out, with the python libraries to run the tests.
16 |
17 |
18 | ## Building container
19 |
20 | The runme.sh script will mount the git repo as "/test" and create a shell to run the tests in.
21 |
22 | ### Building for ffmpeg-4.4
23 | Built on the ASWF vfxall image.
24 |
25 | ```
26 | cd ffmpeg-4.4
27 | docker build -t ffmpeg4.4 .
28 | ./runme.sh
29 | ```
30 |
31 |
32 | ### Building for ffmpeg-5.0
33 | Built on the ASWF vfxall image.
34 |
35 | ```
36 | cd ffmpeg-5.0
37 | docker build -t ffmpeg5.0 .
38 | ./runme.sh
39 | ```
40 |
41 |
42 | ### Building for rocky-ffmpeg-5.1
43 | Built on top of Rocky linux i9 (identical to RHEL 9).
44 | This builds all the components directly not relying on any ASWF containers. We have seen some odd behaviour on Ubuntu where the above containers do not build correctly.
45 |
46 | ```
47 | cd rocky-ffmpeg-5.1
48 | docker build -t ffmpeg-5.1 .
49 | ./runme.sh
50 | ```
51 |
52 |
--------------------------------------------------------------------------------
/docker/ffmpeg-4.4/runme.sh:
--------------------------------------------------------------------------------
1 | GITROOT=`git rev-parse --show-toplevel`
2 | docker run \
3 | -it \
4 | --name ffmpeg4.4 \
5 | --mount type=bind,source=${GITROOT},target=/test \
6 | ffmpeg4.4
7 |
--------------------------------------------------------------------------------
/docker/ffmpeg-5.0/runme.sh:
--------------------------------------------------------------------------------
1 | GITROOT=`git rev-parse --show-toplevel`
2 | docker run \
3 | -it \
4 | --name ffmpeg5.0 \
5 | --mount type=bind,source=${GITROOT},target=/test \
6 | ffmpeg5.0
7 |
--------------------------------------------------------------------------------
/docker/ffmpeg-5.1/runme.sh:
--------------------------------------------------------------------------------
1 | GITROOT=`git rev-parse --show-toplevel`
2 | docker run \
3 | -it \
4 | --name ffmpeg5.0 \
5 | --mount type=bind,source=${GITROOT},target=/test \
6 | ffmpeg5.0
7 |
--------------------------------------------------------------------------------
/docker/rocky-ffmpeg-5.1/runme.sh:
--------------------------------------------------------------------------------
1 | GITROOT=`git rev-parse --show-toplevel`
2 | docker run \
3 | -it \
4 | --name ffmpeg-5.1 \
5 | --mount type=bind,source=${GITROOT},target=/test \
6 | ffmpeg-5.1
7 |
--------------------------------------------------------------------------------
/enctests/HDR_Encoding.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: default
3 | nav_order: 6
4 | title: HDR Encoding
5 | parent: Encoding Overview
6 | ---
7 |
8 | # HDR Encoding.
9 |
10 | *This is under development*
11 |
12 |
13 | ## Frame prep
14 |
15 | There are a two main encoding formats [HLG](https://en.wikipedia.org/wiki/Hybrid_log%E2%80%93gamma) and [PQ](https://en.wikipedia.org/wiki/Perceptual_quantizer) encoding. We are choosing HLG since its a slightly simpler format, and requires less additional metadata.
16 |
17 | We take advantage of ACES to do the initial conversion to an intermediate format, which we are using png as the container.
18 |
19 | ```consoleconsole
20 | oiiotool -v --framepadding 5 --frames 6700-6899 sparks2/SPARKS_ACES_#.exr --resize 1920x1014 \
21 | --colorconvert acescg out_rec2020hlg1000nits -d uint16 -o sparks2_hlg/sparks2_hlg.#.png
22 | ```
23 |
24 | | --- | --- |
25 | |--colorconvert acescg out_rec2020hlg1000nits | This is the core colorspace conversion, out_rec2020hlg1000nits is an output colorspace conversion for rec2020 HLG at 1000 nit display |
26 | | -d uint16 | Output as 16-bit file format |
27 |
28 |
29 |
30 | ## HLG 444 FFMPEG encoding
31 |
32 | ```console
33 | ffmpeg -sws_flags print_info+accurate_rnd+bitexact+full_chroma_int \
34 | -color_range pc -color_trc arib-std-b67 -color_primaries bt2020 -colorspace bt2020nc \
35 | -pix_fmt rgb48be -r 30 -start_number 6700 -i sparks2_hlg/sparks2_hlg.%05d.png \
36 | -c:v libx265 \
37 | -tag:v hvc1 \
38 | -color_range tv -color_trc arib-std-b67 -color_primaries bt2020 -colorspace bt2020nc \
39 | -pix_fmt yuv444p10le -sws_flags print_info+accurate_rnd+bitexact+full_chroma_int \
40 | -x265-params 'colorprim=bt2020:transfer=arib-std-b67:colormatrix=bt2020nc:range=limited:master-display=G\(13250,34500\)B\(7500,3000\)R\(34000,16000\)WP\(15635,16450\)L\(10000000,1\):max-cll=1000,400' \
41 | sparks2_hlg_444.mov
42 | ```
43 |
44 | NOTE, this is a little different to other conversions (is this better?). We are defining up front what the source media is defined (e.g. `-color_range pc -color_trc arib-std-b67 -color_primaries bt2020 -colorspace bt2020nc` ).
45 |
46 | ### Source media definition.
47 |
48 | | --- | --- |
49 | | -color_range pc | Set the source range to be full-range |
50 | | -color_trc arib-std-b67 | ARIB STD-B67 is the HLG reference EOTF |
51 | | -color_primaries bt2020 | Use the bt2020 color primaries |
52 | | -colorspace bt2020nc | Tagging the YcBCr as being encoded using the BT-2020 non-constant luminance. |
53 | | -pix_fmt rgb48be | We are assuming 16-bit RGB imagery as input |
54 |
55 |
56 | ### Overall encode params
57 |
58 | | --- | --- |
59 | | -c:v libx265 | Use the h265 encoder |
60 | | -tag:v hvc1 | Tag the file for playback on mac |
61 |
62 | ### Encode media definition
63 |
64 | | --- | --- |
65 | | -color_range tv | Set the source range to be tv range. |
66 | | -color_trc arib-std-b67 | ARIB STD-B67 is the HLG reference EOTF |
67 | | -color_primaries bt2020 | Use the bt2020 color primaries |
68 | | -colorspace bt2020nc | Tagging the YcBCr as being encoded using the BT-2020 non-constant luminance. |
69 | | -pix_fmt yuv444p10le | YUV 444 10-bit output
70 |
71 | ### X265 parameters
72 |
73 | We explicitly define the X265 parameters (see [x265](https://x265.readthedocs.io/en/2.5/cli.html) )
74 |
75 | | --- | --- |
76 | | colorprim=bt2020 | Set the colorprimaries to bt2020 |
77 | | transfer=arib-std-b67 | Set the ETOF to HLG (aka. arib-std-bt67 ) |
78 | | colormatrix=bt2020nc | UTagging the YcBCr as being encoded using the BT-2020 non-constant luminance. |
79 | | range=limited | Set the source range to be tv range. |
80 | | master-display=G\(13250,34500\)B\(7500,3000\)R\(34000,16000\)WP\(15635,16450\)L\(10000000,1\) | SMPTE ST 2086 mastering display color volume SEI info, specified as a string which is parsed when the stream header Essentially setting the X,Y display primaries for rec2020 along with the Whitepoint, and the Max,min luminance values in units of 0.00001 NITs. See the above docs for more info. |
81 | | max-cll=1000,400 | Set the Maximum content light level |
82 |
83 |
84 | ## HLG 420 FFMPEG encoding
85 |
86 | ```console
87 | ffmpeg -sws_flags print_info+accurate_rnd+bitexact+full_chroma_int \
88 | -color_range pc -color_trc arib-std-b67 -color_primaries bt2020 -colorspace bt2020nc \
89 | -pix_fmt rgb48be -r 30 -start_number 6700 -i sparks2_hlg/sparks2_hlg.%05d.png \
90 | -c:v libx265 \
91 | -tag:v hvc1 \
92 | -color_range tv -color_trc arib-std-b67 -color_primaries bt2020 -colorspace bt2020nc \
93 | -pix_fmt yuv420p10le \
94 | -sws_flags print_info+accurate_rnd+bitexact+full_chroma_int \
95 | -x265-params 'colorprim=bt2020:transfer=arib-std-b67:colormatrix=bt2020nc:range=limited:master-display=G\(13250,34500\)B\(7500,3000\)R\(34000,16000\)WP\(15635,16450\)L\(10000000,1\):max-cll=1000,400' \
96 | sparks2_hlg_420.mov
97 |
98 | ```
99 |
100 |
101 | ## HLG 422 FFMPEG Encoding
102 |
103 | ```console
104 | ffmpeg -sws_flags print_info+accurate_rnd+bitexact+full_chroma_int \
105 | -color_range pc -color_trc arib-std-b67 -color_primaries bt2020 -colorspace bt2020nc \
106 | -pix_fmt rgb48be -r 30 -start_number 6700 -i sparks2_hlg/sparks2_hlg.%05d.png \
107 | -c:v libx265 -color_range tv -color_trc arib-std-b67 -color_primaries bt2020 -colorspace bt2020nc \
108 | -pix_fmt yuv420p10le \
109 | -tag:v hvc1 \
110 | -sws_flags print_info+accurate_rnd+bitexact+full_chroma_int \
111 | -x265-params 'colorprim=bt2020:transfer=arib-std-b67:colormatrix=bt2020nc:range=limited:master-display=G\(13250,34500\)B\(7500,3000\)R\(34000,16000\)WP\(15635,16450\)L\(10000000,1\):max-cll=1000,400' \
112 | sparks2_hlg_420_v2.mov
113 | ```
114 |
115 |
116 | Further Reading:
117 | * [H.273 Specification.](https://www.itu.int/rec/T-REC-H.273-202107-I/en)
118 |
--------------------------------------------------------------------------------
/enctests/reference-results/README.md:
--------------------------------------------------------------------------------
1 | This folder contains previous runs of the test suites, along with graphs that are included in the documentation.
2 | We do not include the actual mp4's since they do get quite large.
3 |
--------------------------------------------------------------------------------
/enctests/reference-results/h264-crf-test-encode_time.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/richardssam/ffmpeg-tests/6a49f4a37c148370b0a25fce05cd99ae2e48680e/enctests/reference-results/h264-crf-test-encode_time.png
--------------------------------------------------------------------------------
/enctests/reference-results/h264-crf-test-filesize.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/richardssam/ffmpeg-tests/6a49f4a37c148370b0a25fce05cd99ae2e48680e/enctests/reference-results/h264-crf-test-filesize.png
--------------------------------------------------------------------------------
/enctests/reference-results/h264-crf-test-vmaf_harmonic_mean.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/richardssam/ffmpeg-tests/6a49f4a37c148370b0a25fce05cd99ae2e48680e/enctests/reference-results/h264-crf-test-vmaf_harmonic_mean.png
--------------------------------------------------------------------------------
/enctests/reference-results/prores-test-encode_time.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/richardssam/ffmpeg-tests/6a49f4a37c148370b0a25fce05cd99ae2e48680e/enctests/reference-results/prores-test-encode_time.png
--------------------------------------------------------------------------------
/enctests/reference-results/prores-test-filesize.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/richardssam/ffmpeg-tests/6a49f4a37c148370b0a25fce05cd99ae2e48680e/enctests/reference-results/prores-test-filesize.png
--------------------------------------------------------------------------------
/enctests/reference-results/prores-test-vmaf_harmonic_mean.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/richardssam/ffmpeg-tests/6a49f4a37c148370b0a25fce05cd99ae2e48680e/enctests/reference-results/prores-test-vmaf_harmonic_mean.png
--------------------------------------------------------------------------------
/enctests/runtest.sh:
--------------------------------------------------------------------------------
1 | python3 -m testframework.main --source-folder sources/enc_sources --test-config test_configs/prores_tests.yml --output prores-results.otio --encoded-folder prores-encode
2 |
3 | python3 -m testframework.otio2html --test-config test_configs/prores_tests.yml --results prores-results.otio
4 |
5 | python3 -m testframework.main --source-folder sources/enc_sources --test-config test_configs/h264_tests.yml --output h264-results.otio --encoded-folder h264-encode
6 |
7 | python3 -m testframework.otio2html --test-config test_configs/h264_tests.yml --results h264-results.otio
8 |
9 | python3 -m testframework.main --source-folder sources/enc_sources --test-config test_configs/h264_crf_tests.yml --output h264-crf-results.otio --encoded-folder h264-crf-encode
10 |
11 | python3 -m testframework.otio2html --test-config test_configs/h264_crf_tests.yml --results h264-crf-results.otio
12 |
13 |
14 | #WIP
15 | python3 -m testframework.generatetests
16 |
17 | python3 -m testframework.main --test-config test_configs/documentation_tests.yml --output doc-results.otio --encoded-folder docs-encode
18 | python3 -m testframework.otio2html --test-config test_configs/documentation_tests.yml --results doc-results.otio
19 |
--------------------------------------------------------------------------------
/enctests/sources/.gitignore:
--------------------------------------------------------------------------------
1 | *.png
2 | *.source
3 | *.exr
4 |
--------------------------------------------------------------------------------
/enctests/sources/download_media.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/bash -xv
2 |
3 | if [ ! -d Sintel-trailer-1080p-png ]
4 | then
5 | echo Downloading Sintel-trailer-1080p-png
6 | curl https://s3.amazonaws.com/senkorasic.com/test-media/video/sintel/source/Sintel-trailer-1080p-png.zip -o Sintel-trailer-1080p-png.zip
7 | unzip Sintel-trailer-1080p-png.zip -d Sintel-trailer-1080p-png
8 | rm -rf Sintel-trailer-1080p-png.zip
9 | fi
10 |
11 | #if [ ! -f SMPTE_Color_Bars.png ]
12 | #then
13 | # echo Downloading color bars.
14 | # # from https://commons.wikimedia.org/wiki/File:SMPTE_Color_Bars_16x9.svg
15 | # curl https://upload.wikimedia.org/wikipedia/commons/6/60/SMPTE_Color_Bars_16x9.svg -o SMPTE_Color_Bars.svg
16 | # convert -verbose -size 1920x1080 SMPTE_Color_Bars.svg SMPTE_Color_Bars.png
17 | # fi
18 |
19 | #mkdir -p /usr/local/share/model
20 | #curl https://raw.githubusercontent.com/Netflix/vmaf/master/model/vmaf_v0.6.1.pkl.model -o /usr/local/share/model/vmaf_v0.6.1.pkl.model
21 | #curl https://raw.githubusercontent.com/Netflix/vmaf/master/model/vmaf_v0.6.1.pkl -o /usr/local/share/model/vmaf_v0.6.1.pkl
22 |
23 | #downloading sparks
24 |
25 | # https://opencontent.netflix.com/
26 | # This first bit of media is really just for sparks, which hopefully should stress encoders.
27 | if [ ! -d sparks ]
28 | then
29 | echo Downloading netflix sparks pt1.
30 | aws s3 cp --no-sign-request s3://download.opencontent.netflix.com/sparks/aces_image_sequence_59_94_fps/ sparks --recursive --exclude "*" --include "SPARKS_ACES_061*.exr"
31 | aws s3 cp --no-sign-request s3://download.opencontent.netflix.com/sparks/aces_image_sequence_59_94_fps/ sparks --recursive --exclude "*" --include "SPARKS_ACES_062*.exr"
32 | fi
33 |
34 |
35 | # https://opencontent.netflix.com/
36 | # This is a second extract from sparks, which is a great HDR test.
37 | if [ ! -d sparks2 ]
38 | then
39 | echo Downloading netflix sparks pt2.
40 | aws s3 cp --no-sign-request s3://download.opencontent.netflix.com/sparks/aces_image_sequence_59_94_fps/ sparks2 --recursive --exclude "*" --include "SPARKS_ACES_067*.exr"
41 | aws s3 cp --no-sign-request s3://download.opencontent.netflix.com/sparks/aces_image_sequence_59_94_fps/ sparks2 --recursive --exclude "*" --include "SPARKS_ACES_068*.exr"
42 | fi
43 |
44 | #if [ ! -d OpenColorIO-Configs-1.2 ]
45 | #then
46 | # echo Downloading ACES OCIO 1.2
47 | # curl -L https://github.com/colour-science/OpenColorIO-Configs/archive/refs/tags/v1.2.tar.gz > v1.2.tar.gz
48 | # tar zxvf v1.2.tar.gz OpenColorIO-Configs-1.2/aces_1.2
49 | #fi
50 |
51 | export OCIO=$PWD/OpenColorIO-Configs-1.2/aces_1.2/config.ocio
52 | if [ ! -d sparks_srgb ]
53 | then
54 | mkdir sparks_srgb
55 | echo Building sparks png
56 | oiiotool -v --framepadding 5 --frames 6100-6299 sparks/SPARKS_ACES_#.exr --resize 1920x1014 \
57 | --colorconvert linear srgb --dither -o sparks_srgb/sparks_srgb.#.png
58 |
59 | fi
60 |
61 | if [ ! -d sparks2_srgb ]
62 | then
63 | mkdir sparks2_srgb
64 | echo Building sparks2_srgb
65 | oiiotool -v --framepadding 5 --frames 6700-6899 sparks2/SPARKS_ACES_#.exr --resize 1920x1014 \
66 | --colorconvert acescg out_srgb --dither -o sparks2_srgb/sparks2_srgb.#.png
67 |
68 | fi
69 |
70 | if [ ! -d sparks2_hlg ]
71 | then
72 | mkdir sparks2_hlg
73 | echo Building sparks2_hlg
74 | oiiotool -v --framepadding 5 --frames 6700-6899 sparks2/SPARKS_ACES_#.exr --resize 1920x1014 --colorconvert acescg out_rec2020hlg1000nits -d uint16 -o sparks2_hlg/sparks2_hlg.#.png
75 | fi
76 |
77 | # Encoding test
78 | # ffmpeg -sws_flags print_info+accurate_rnd+bitexact+full_chroma_int -color_range pc -color_trc arib-std-b67 -color_primaries bt2020 -colorspace bt2020nc -pix_fmt rgb48be -r 30 -start_number 6700 -i sparks2_hlg/sparks2_hlg.%05d.png -c:v libx265 -color_range tv -color_trc arib-std-b67 -color_primaries bt2020 -colorspace bt2020nc -pix_fmt yuv444p10le -sws_flags print_info+accurate_rnd+bitexact+full_chroma_int -x265-params ‘colorprim=bt2020:transfer=arib-std-b67:colormatrix=bt2020nc:range=limited:master-display=G\(13250,34500\)B\(7500,3000\)R\(34000,16000\)WP\(15635,16450\)L\(10000000,1\):max-cll=1000,400’ sparks2_hlg_444.mov
79 |
80 | # ffmpeg -sws_flags print_info+accurate_rnd+bitexact+full_chroma_int -color_range pc -color_trc arib-std-b67 -color_primaries bt2020 -colorspace bt2020nc -pix_fmt rgb48be -r 30 -start_number 6700 -i sparks2_hlg/sparks2_hlg.%05d.png -c:v libx265 -color_range tv -color_trc arib-std-b67 -color_primaries bt2020 -colorspace bt2020nc -pix_fmt yuv420p10le -sws_flags print_info+accurate_rnd+bitexact+full_chroma_int -x265-params ‘colorprim=bt2020:transfer=arib-std-b67:colormatrix=bt2020nc:range=limited:master-display=G\(13250,34500\)B\(7500,3000\)R\(34000,16000\)WP\(15635,16450\)L\(10000000,1\):max-cll=1000,400’ sparks2_hlg_420.mov
81 |
82 | #ffmpeg -r 30 -start_number 6700 -i sparks2_srgb/sparks2_srgb.%05d.png -vf "scale=in_color_matrix=bt709:out_color_matrix=bt709" \
83 | # -c:v libx264 -preset slower -pix_fmt yuv420p \
84 | # -color_range 1 -colorspace 1 -color_primaries 1 -color_trc 13 sparks2_srgb_420_v2.mov
85 |
86 | # ffmpeg -sws_flags print_info+accurate_rnd+bitexact+full_chroma_int -color_range pc -color_trc arib-std-b67 -color_primaries bt2020 -colorspace bt2020nc -pix_fmt rgb48be -r 30 -start_number 6700 -i sparks2_hlg/sparks2_hlg.%05d.png -c:v libx265 -color_range tv -color_trc arib-std-b67 -color_primaries bt2020 -colorspace bt2020nc -pix_fmt yuv420p10le -tag:v hvc1 -sws_flags print_info+accurate_rnd+bitexact+full_chroma_int -x265-params ‘colorprim=bt2020:transfer=arib-std-b67:colormatrix=bt2020nc:range=limited:master-display=G\(13250,34500\)B\(7500,3000\)R\(34000,16000\)WP\(15635,16450\)L\(10000000,1\):max-cll=1000,400’ sparks2_hlg_420_v2.mov
87 |
--------------------------------------------------------------------------------
/enctests/sources/enc_sources/README.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: default
3 | nav_order: 7.1
4 | title: Reference Source Test Media
5 | parent: ASWF Encoding Test Framework
6 | ---
7 |
8 | # Reference Source Test Media for HDR Media.
9 |
10 | We have collected a number of source clips that you can use to test on, these are mostly from the excellent netflix [Open Source Content](https://opencontent.netflix.com/) site. Some of the clips are quite large, so we have tried to extract shorter 100-200 frame clips for the encoding tests, and also extracting individual clips from edited sequences.
11 |
12 | You can download the clips using the `download_media.sh` shell script. The script will also convert the frames for the appropriate test image type and color space using oiiotool.
13 |
14 | The clips that are created include:
15 |
16 | |  | [Chimera cars](https://opencontent.netflix.com/#h.nv6npqncwttv) | creating sRGB versions from HDR_P3PQ exr files converted to sRGB|
17 | |  | [Chimera fountains](https://opencontent.netflix.com/#h.nv6npqncwttv) | creating sRGB versions from HDR_P3PQ exr files converted to sRGB |
18 | |  | [chimera wind chimes](https://opencontent.netflix.com/#h.nv6npqncwttv) | creating sRGB versions from HDR_P3PQ exr files converted to sRGB|
19 |
20 |
21 | Other media that could be useful include:
22 | |  | [sintel 1080p-png trailer download](https://s3.amazonaws.com/senkorasic.com/test-media/video/sintel/source/Sintel-trailer-1080p-png.zip) | Blender CG created media. |
23 |
24 |
25 | There are other reference media at: [media.xiph.org](http://media.xiph.org/) and [media.xiph.org/video/derf](http://media.xiph.org/video/derf) in particular. Sadly, most of this media is in a [y4m](https://wiki.multimedia.cx/index.php/YUV4MPEG2) file format, but it can be a good place to look for other test media. This dataset is used for the [https://arewecompressedyet.com/?](https://arewecompressedyet.com/?) site, that is used for comparing different codecs (see [https://github.com/xiph/awcy](https://github.com/xiph/awcy) for the source for the web site).
--------------------------------------------------------------------------------
/enctests/sources/enc_sources/chimera_cars_srgb/chimera_cars_srgb.%05d.png.yml:
--------------------------------------------------------------------------------
1 | images: true
2 | path: /test/enctests/sources/enc_sources/chimera_cars_srgb/chimera_cars_srgb.%05d.png
3 | width: 2048
4 | height: 1080
5 | pix_fmt: rgb48be
6 | in: 2500
7 | duration: 200
8 | rate: 25.0
9 |
--------------------------------------------------------------------------------
/enctests/sources/enc_sources/chimera_coaster_srgb/chimera_coaster_srgb.%06d.png.yml:
--------------------------------------------------------------------------------
1 | images: true
2 | path: /test/enctests/sources/enc_sources/chimera_coaster_srgb/chimera_coaster_srgb.%06d.png
3 | width: 2048
4 | height: 1080
5 | pix_fmt: rgb48be
6 | in: 44200
7 | duration: 200
8 | rate: 25.0
9 |
--------------------------------------------------------------------------------
/enctests/sources/enc_sources/chimera_fountains_srgb/chimera_fountains_srgb.%05d.png.yml:
--------------------------------------------------------------------------------
1 | images: true
2 | path: /test/enctests/sources/enc_sources/chimera_fountains_srgb/chimera_fountains_srgb.%05d.png
3 | width: 2048
4 | height: 1080
5 | pix_fmt: rgb48be
6 | in: 5400
7 | duration: 200
8 | rate: 25.0
9 |
--------------------------------------------------------------------------------
/enctests/sources/enc_sources/download_media.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/bash -xv
2 |
3 | #downloading chimera
4 |
5 | # https://opencontent.netflix.com/
6 | # This first bit of media is really just for sparks, which hopefully should stress encoders.
7 | if [ ! -d chimera_wind_srgb ]
8 | then
9 | aws s3 cp --no-sign-request s3://download.opencontent.netflix.com/Chimera/tif_DCI4k5994p/ chimera_wind --recursive --exclude "*" --include "*_790*.tif"
10 | aws s3 cp --no-sign-request s3://download.opencontent.netflix.com/Chimera/tif_DCI4k5994p/ chimera_wind --recursive --exclude "*" --include "*_791*.tif"
11 | fi
12 |
13 | if [[ ! -d chimera_coaster_srgb && ! -d chimera_coaster ]]
14 | then
15 | aws s3 cp --no-sign-request s3://download.opencontent.netflix.com/Chimera/tif_DCI4k5994p/ chimera_coaster --recursive --exclude "*" --include "*_0442*.tif"
16 | aws s3 cp --no-sign-request s3://download.opencontent.netflix.com/Chimera/tif_DCI4k5994p/ chimera_coaster --recursive --exclude "*" --include "*_0443*.tif"
17 | fi
18 |
19 |
20 | if [ ! -d chimera_cars_srgb ]
21 | then
22 | echo Downloading netflix cars.
23 | aws s3 cp --no-sign-request s3://download.opencontent.netflix.com/Chimera/tif_DCI4k2398p/ chimera_cars --recursive --exclude "*" --include "*_025*.tif"
24 | aws s3 cp --no-sign-request s3://download.opencontent.netflix.com/Chimera/tif_DCI4k2398p/ chimera_cars --recursive --exclude "*" --include "*_026*.tif"
25 | fi
26 |
27 | if [ ! -d chimera_fountains_srgb ]
28 | then
29 | echo Downloading netflix fountains
30 | aws s3 cp --no-sign-request s3://download.opencontent.netflix.com/Chimera/tif_DCI4k2398p/ chimera_fountains --recursive --exclude "*" --include "*_054*.tif"
31 | aws s3 cp --no-sign-request s3://download.opencontent.netflix.com/Chimera/tif_DCI4k2398p/ chimera_fountains --recursive --exclude "*" --include "*_055*.tif"
32 | fi
33 |
34 | # I think the file labeling is actually incorrect, this media is actually pretty close to gamma2.2 or 2.4 its not HDR at all.
35 | # The associated mov file has no HDR metadata associated with it. So I'm going to assume no color space conversion is necessary.
36 |
37 | if [ ! -d chimera_wind_srgb ]
38 | then
39 | mkdir chimera_wind_srgb
40 | echo Building chimera_wind png
41 | oiiotool -v --framepadding 6 --frames 79000-79199 -i chimera_wind/Chimera_DCI4k5994p_HDR_P3PQ_@@@@@@.tif --resize 2048x1080 -dither -o chimera_wind_srgb/chimera_wind_srgb.#.png
42 | rm -rf chimera_wind
43 | fi
44 |
45 | if [ ! -d chimera_coaster_srgb ]
46 | then
47 | mkdir chimera_coaster_srgb
48 | echo Building chimera_coaster_srgb png
49 | oiiotool -v --framepadding 6 --frames 44200-44399 -i chimera_coaster/Chimera_DCI4k5994p_HDR_P3PQ_@@@@@@.tif --resize 2048x1080 -dither -o chimera_coaster_srgb/chimera_coaster_srgb.#.png
50 | #rm -rf chimera_coaster
51 | fi
52 |
53 |
54 | if [ ! -d chimera_cars_srgb ]
55 | then
56 | mkdir chimera_cars_srgb
57 | echo Building chimera_cars png
58 | oiiotool -v --framepadding 5 --frames 2500-2699 -i chimera_cars/Chimera_DCI4k2398p_HDR_P3PQ_@@@@@.tif --resize 2048x1080 -dither -o chimera_cars_srgb/chimera_cars_srgb.#.png
59 | rm -rf chimera_cars
60 | fi
61 |
62 |
63 | if [ ! -d chimera_fountains_srgb ]
64 | then
65 | mkdir chimera_fountains_srgb
66 | echo Building chimera_fountains png
67 | oiiotool -v --framepadding 5 --frames 5400-5599 -i chimera_fountains/Chimera_DCI4k2398p_HDR_P3PQ_@@@@@.tif --resize 2048x1080 -dither -o chimera_fountains_srgb/chimera_fountains_srgb.#.png
68 | rm -rf chimera_fountains
69 | fi
70 |
--------------------------------------------------------------------------------
/enctests/sources/enc_sources/thumbnails/chimera_cars_srgb.02516.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/richardssam/ffmpeg-tests/6a49f4a37c148370b0a25fce05cd99ae2e48680e/enctests/sources/enc_sources/thumbnails/chimera_cars_srgb.02516.jpg
--------------------------------------------------------------------------------
/enctests/sources/enc_sources/thumbnails/chimera_fountains_srgb.05439.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/richardssam/ffmpeg-tests/6a49f4a37c148370b0a25fce05cd99ae2e48680e/enctests/sources/enc_sources/thumbnails/chimera_fountains_srgb.05439.jpg
--------------------------------------------------------------------------------
/enctests/sources/enc_sources/thumbnails/chimera_wind_srgb.01126.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/richardssam/ffmpeg-tests/6a49f4a37c148370b0a25fce05cd99ae2e48680e/enctests/sources/enc_sources/thumbnails/chimera_wind_srgb.01126.jpg
--------------------------------------------------------------------------------
/enctests/sources/enc_sources/thumbnails/sintel_trailer_2k_0591.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/richardssam/ffmpeg-tests/6a49f4a37c148370b0a25fce05cd99ae2e48680e/enctests/sources/enc_sources/thumbnails/sintel_trailer_2k_0591.jpg
--------------------------------------------------------------------------------
/enctests/sources/hdr_sources/README.md:
--------------------------------------------------------------------------------
1 | ---
2 | layout: default
3 | nav_order: 7.2
4 | title: HDR Reference Source Test Media
5 | parent: ASWF Encoding Test Framework
6 | ---
7 |
8 | # Reference Source Test Media for HDR Media.
9 |
10 | We have collected a number of source clips that you can use to test on, these are mostly from the excellent netflix [Open Source Content](https://opencontent.netflix.com/) site. Some of the clips are quite large, so we have tried to extract shorter 100-200 frame clips for the encoding tests, and also extracting individual clips from edited sequences.
11 |
12 | You can download the clips using the `download_media.sh` shell script. The script will also convert the frames for the appropriate test image type and color space using oiiotool.
13 |
14 | The clips that are created include:
15 |
16 | |  | [Sparks part 1](https://opencontent.netflix.com/#h.d0oh6u8prqhe) | Creating both srgb and hlg color space versions for testing HDR and regular sRGB encodes |
17 | |  | [Sparks part 2](https://opencontent.netflix.com/#h.d0oh6u8prqhe) | Creating both srgb and hlg color space versions for testing HDR and regular sRGB encodes |
18 |
--------------------------------------------------------------------------------
/enctests/sources/hdr_sources/download_media.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/bash -xv
2 |
3 | #downloading sparks
4 |
5 | # https://opencontent.netflix.com/
6 | # This first bit of media is really just for sparks, which hopefully should stress encoders.
7 | if [ ! -d sparks ]
8 | then
9 | echo Downloading netflix sparks pt1.
10 | aws s3 cp --no-sign-request s3://download.opencontent.netflix.com/sparks/aces_image_sequence_59_94_fps/ sparks --recursive --exclude "*" --include "SPARKS_ACES_061*.exr"
11 | aws s3 cp --no-sign-request s3://download.opencontent.netflix.com/sparks/aces_image_sequence_59_94_fps/ sparks --recursive --exclude "*" --include "SPARKS_ACES_062*.exr"
12 | fi
13 |
14 |
15 | # https://opencontent.netflix.com/
16 | # This is a second extract from sparks, which is a great HDR test.
17 | if [ ! -d sparks2 ]
18 | then
19 | echo Downloading netflix sparks pt2.
20 | aws s3 cp --no-sign-request s3://download.opencontent.netflix.com/sparks/aces_image_sequence_59_94_fps/ sparks2 --recursive --exclude "*" --include "SPARKS_ACES_067*.exr"
21 | aws s3 cp --no-sign-request s3://download.opencontent.netflix.com/sparks/aces_image_sequence_59_94_fps/ sparks2 --recursive --exclude "*" --include "SPARKS_ACES_068*.exr"
22 | fi
23 |
24 | if [ ! -d sparks_srgb ]
25 | then
26 | mkdir sparks_srgb
27 | echo Building sparks png
28 | oiiotool -v --framepadding 5 --frames 6100-6299 sparks/SPARKS_ACES_#.exr --resize 1920x1014 \
29 | --colorconvert linear srgb --dither -o sparks_srgb/sparks_srgb.#.png
30 |
31 | fi
32 |
33 | if [ ! -d sparks2_srgb ]
34 | then
35 | mkdir sparks2_srgb
36 | echo Building sparks2_srgb
37 | oiiotool -v --framepadding 5 --frames 6700-6899 sparks2/SPARKS_ACES_#.exr --resize 1920x1014 \
38 | --colorconvert acescg out_srgb --dither -o sparks2_srgb/sparks2_srgb.#.png
39 |
40 | fi
41 |
42 | if [ ! -d sparks2_hlg ]
43 | then
44 | mkdir sparks2_hlg
45 | echo Building sparks2_hlg
46 | oiiotool -v --framepadding 5 --frames 6700-6899 sparks2/SPARKS_ACES_#.exr --resize 1920x1014 --colorconvert acescg out_rec2020hlg1000nits -d uint16 -o sparks2_hlg/sparks2_hlg.#.png
47 | fi
48 |
49 | # Encoding test
50 | # ffmpeg -sws_flags print_info+accurate_rnd+bitexact+full_chroma_int -color_range pc -color_trc arib-std-b67 -color_primaries bt2020 -colorspace bt2020nc -pix_fmt rgb48be -r 30 -start_number 6700 -i sparks2_hlg/sparks2_hlg.%05d.png -c:v libx265 -color_range tv -color_trc arib-std-b67 -color_primaries bt2020 -colorspace bt2020nc -pix_fmt yuv444p10le -sws_flags print_info+accurate_rnd+bitexact+full_chroma_int -x265-params ‘colorprim=bt2020:transfer=arib-std-b67:colormatrix=bt2020nc:range=limited:master-display=G\(13250,34500\)B\(7500,3000\)R\(34000,16000\)WP\(15635,16450\)L\(10000000,1\):max-cll=1000,400’ sparks2_hlg_444.mov
51 |
52 | # ffmpeg -sws_flags print_info+accurate_rnd+bitexact+full_chroma_int -color_range pc -color_trc arib-std-b67 -color_primaries bt2020 -colorspace bt2020nc -pix_fmt rgb48be -r 30 -start_number 6700 -i sparks2_hlg/sparks2_hlg.%05d.png -c:v libx265 -color_range tv -color_trc arib-std-b67 -color_primaries bt2020 -colorspace bt2020nc -pix_fmt yuv420p10le -sws_flags print_info+accurate_rnd+bitexact+full_chroma_int -x265-params ‘colorprim=bt2020:transfer=arib-std-b67:colormatrix=bt2020nc:range=limited:master-display=G\(13250,34500\)B\(7500,3000\)R\(34000,16000\)WP\(15635,16450\)L\(10000000,1\):max-cll=1000,400’ sparks2_hlg_420.mov
53 |
54 | #ffmpeg -r 30 -start_number 6700 -i sparks2_srgb/sparks2_srgb.%05d.png -vf "scale=in_color_matrix=bt709:out_color_matrix=bt709" \
55 | # -c:v libx264 -preset slower -pix_fmt yuv420p \
56 | # -color_range 1 -colorspace 1 -color_primaries 1 -color_trc 13 sparks2_srgb_420_v2.mov
57 |
58 | # ffmpeg -sws_flags print_info+accurate_rnd+bitexact+full_chroma_int -color_range pc -color_trc arib-std-b67 -color_primaries bt2020 -colorspace bt2020nc -pix_fmt rgb48be -r 30 -start_number 6700 -i sparks2_hlg/sparks2_hlg.%05d.png -c:v libx265 -color_range tv -color_trc arib-std-b67 -color_primaries bt2020 -colorspace bt2020nc -pix_fmt yuv420p10le -tag:v hvc1 -sws_flags print_info+accurate_rnd+bitexact+full_chroma_int -x265-params ‘colorprim=bt2020:transfer=arib-std-b67:colormatrix=bt2020nc:range=limited:master-display=G\(13250,34500\)B\(7500,3000\)R\(34000,16000\)WP\(15635,16450\)L\(10000000,1\):max-cll=1000,400’ sparks2_hlg_420_v2.mov
59 |
--------------------------------------------------------------------------------
/enctests/sources/hdr_sources/thumbnails/sparks2_srgb.06726.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/richardssam/ffmpeg-tests/6a49f4a37c148370b0a25fce05cd99ae2e48680e/enctests/sources/hdr_sources/thumbnails/sparks2_srgb.06726.jpg
--------------------------------------------------------------------------------
/enctests/sources/hdr_sources/thumbnails/sparks_srgb.6015.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/richardssam/ffmpeg-tests/6a49f4a37c148370b0a25fce05cd99ae2e48680e/enctests/sources/hdr_sources/thumbnails/sparks_srgb.6015.jpg
--------------------------------------------------------------------------------
/enctests/test_configs/base_tests.yml:
--------------------------------------------------------------------------------
1 | ---
2 | test_scale_yuv444p10le:
3 | name: test_scale_yuv444p10le
4 | description: scale (yuv444p10le)
5 | app: ffmpeg
6 | suffix: .mov
7 | encoding_template: 'ffmpeg {input_args} -i "{source}" -vframes {duration} {encoding_args} -y "{outfile}"'
8 | wedges:
9 | wedge0: &base_args
10 | -c:v: libx264
11 | -preset: placebo
12 | -qp: 0
13 | -x264-params: '"keyint=15:no-deblock=1"'
14 | -pix_fmt: yuv444p10le
15 | -sws_flags: spline+accurate_rnd+full_chroma_int
16 | -vf: '"scale=in_range=full:in_color_matrix=bt709:out_range=tv:out_color_matrix=bt709"'
17 | -color_range: 1
18 | -colorspace: 1
19 | -color_primaries: 1
20 | -color_trc: 2
21 |
22 | ---
23 | test_colorspace_rgb:
24 | name: test_colorspace_rgb
25 | description: colorspace_rgb
26 | app: ffmpeg
27 | suffix: .mov
28 | testmask: ../sourceimages/1920px-SMPTE_Color_Bars_16x9-edges.png
29 | encoding_template: 'ffmpeg {input_args} -i "{source}" -vframes {duration} {encoding_args} -y "{outfile}"'
30 | wedges:
31 | wedge0: &base_args
32 | -c:v: libx264
33 | -preset: slow
34 | -crf: 18
35 | -x264-params: '"keyint=15:no-deblock=1"'
36 |
37 | ---
38 | test_colorspace_yuv420p:
39 | name: test_colorspace_yuv420p
40 | description: variations of colorspace yuv420p
41 | app: ffmpeg
42 | suffix: .mov
43 | encoding_template: 'ffmpeg {input_args} -i "{source}" -vframes {duration} {encoding_args} -y "{outfile}"'
44 | wedges:
45 | slow_crf_23: &base_args
46 | -c:v: libx264
47 | -preset: slow
48 | -crf: 23
49 | -x264-params: '"keyint=15:no-deblock=1"'
50 | -pix_fmt: yuv420p
51 | -sws_flags: spline+accurate_rnd+full_chroma_int
52 | -vf: '"scale=in_range=full:in_color_matrix=bt709:out_range=tv:out_color_matrix=bt709"'
53 | -color_range: 1
54 | -colorspace: 1
55 | -color_primaries: 1
56 | -color_trc: 2
57 |
58 | slower_crf_18:
59 | << : *base_args
60 | -preset: slower
61 | -crf: 18
62 |
63 | slower_crf_18_film:
64 | << : *base_args
65 | -preset: slower
66 | -crf: 18
67 | -tune: film
68 |
69 | slow_full_range:
70 | << : *base_args
71 | -crf: 18
72 | -vf: '"scale=in_range=full:in_color_matrix=bt709:out_range=full:out_color_matrix=bt709"'
73 | -color_range: 2
74 | -color_trc: 1
75 |
76 | ---
77 | test_shotgun_diy_encode:
78 | name: test_shotgun_diy_encode
79 | description: From https://support.shotgunsoftware.com/hc/en-us/articles/219030418-Do-it-yourself-DIY-transcoding
80 | app: ffmpeg
81 | suffix: .mov
82 | testmask: ../sourceimages/1920px-SMPTE_Color_Bars_16x9-edges.png
83 | encoding_template: 'ffmpeg {input_args} -i "{source}" -vframes {duration} {encoding_args} -y "{outfile}"'
84 | wedges:
85 | wedge0: &base_args
86 | -vcodec: libx264
87 | -pix_fmt: yuv420p
88 | -g: 30
89 | -vprofile: high
90 | -bf: 0
91 | -crf: 2
92 |
93 | ---
94 | test_wdi-mpeg2:
95 | name: test_wdi-mpeg2
96 | description: test_wdi-mpeg2
97 | app: ffmpeg
98 | suffix: .mov
99 | testmask: ../sourceimages/1920px-SMPTE_Color_Bars_16x9-edges.png
100 | encoding_template: 'ffmpeg {input_args} -i "{source}" -vframes {duration} {encoding_args} -y "{outfile}"'
101 | wedges:
102 | wedge0: &base_args
103 | -vcodec: mpeg2video
104 | -profile:v: 4
105 | -level:v: 4
106 | -b:v: 38M
107 | -bt: 38M
108 | -q:v: 1
109 | -maxrate: 38M
110 | -pix_fmt: yuv420p
111 | -vf: colormatrix=bt601:bt709
112 |
113 | ---
114 | tes_wdi-prores444_scale:
115 | name: tes_wdi-prores444_scale
116 | description: tes_wdi-prores444_scale
117 | app: ffmpeg
118 | suffix: .mov
119 | encoding_template: 'ffmpeg {input_args} -i "{source}" -vframes {duration} {encoding_args} -y "{outfile}"'
120 | wedges:
121 | wedge0: &base_args
122 | -c:v: prores_ks
123 | -profile:v: 4444
124 | -qscale:v: 1
125 | -vendor: ap10
126 | -pix_fmt: yuv444p10le
127 | -sws_flags: spline+accurate_rnd+full_chroma_int
128 | -vf: '"scale=in_range=full:in_color_matrix=bt709:out_range=tv:out_color_matrix=bt709"'
129 | -color_range: 1
130 | -colorspace: 1
131 | -color_primaries: 1
132 | -color_trc: 2
133 |
--------------------------------------------------------------------------------
/enctests/test_configs/h264_crf25_tests.yml:
--------------------------------------------------------------------------------
1 |
2 | test_preset_yuv420p:
3 | name: test_colorspace_yuv420p
4 | description: variations of colorspace yuv420p
5 | app: ffmpeg
6 | suffix: .mov
7 | encoding_template: 'ffmpeg {input_args} -i "{source}" -vframes {duration} {encoding_args} -y "{outfile}"'
8 | wedges:
9 | medium_crf_25: &base_args2
10 | -c:v: libx264
11 | -preset: medium
12 | -crf: 25
13 | -x264-params: '"keyint=15:no-deblock=1"'
14 | -pix_fmt: yuv420p
15 | -sws_flags: spline+accurate_rnd+full_chroma_int
16 | -vf: '"scale=in_range=full:in_color_matrix=bt709:out_range=tv:out_color_matrix=bt709"'
17 | -color_range: 1
18 | -colorspace: 1
19 | -color_primaries: 1
20 | -color_trc: 2
21 |
22 | slow_crf_25:
23 | << : *base_args2
24 | -preset: slow
25 |
26 | slower_crf_25:
27 | << : *base_args2
28 | -preset: slower
29 |
30 | veryslow_crf_25:
31 | << : *base_args2
32 | -preset: veryslow
33 |
34 | placebo_crf_25:
35 | << : *base_args2
36 | -preset: placebo
37 | ---
38 |
39 | reports:
40 | graphs:
41 | - args:
42 | color: -preset
43 | height: 400
44 | barmode: group
45 | x: media
46 | y: vmaf_harmonic_mean
47 | range_y:
48 | - 90
49 | - 100
50 | name: vmaf_harmonic_mean.png
51 | type: bar
52 | - args:
53 | color: -preset
54 | height: 400
55 | x: media
56 | barmode: group
57 | y: encode_time
58 | name: encode_time.png
59 | type: bar
60 | - args:
61 | color: -preset
62 | height: 400
63 | x: media
64 | barmode: group
65 | y: filesize
66 | name: filesize.png
67 | type: bar
68 | name: h264-test
69 | title: H264 Preset Comparison
70 | description: This is a comparison of different Preset values for h264 encoding, with CRF = 18.
71 | directory: h264-crf25-encode
72 | templatefile: basic.html.jinja
73 |
74 |
--------------------------------------------------------------------------------
/enctests/test_configs/h264_crf_tests.yml:
--------------------------------------------------------------------------------
1 | test_crf_yuv420p:
2 | name: test_colorspace_yuv420p
3 | description: variations of colorspace yuv420p
4 | app: ffmpeg
5 | suffix: .mov
6 | encoding_template: 'ffmpeg {input_args} -i "{source}" -vframes {duration} {encoding_args} -y "{outfile}"'
7 | wedges:
8 | slow_crf_10: &base_args
9 | -c:v: libx264
10 | -preset: slow
11 | -crf: 10
12 | -x264-params: '"keyint=15:no-deblock=1"'
13 | -pix_fmt: yuv420p
14 | -sws_flags: spline+accurate_rnd+full_chroma_int
15 | -vf: '"scale=in_range=full:in_color_matrix=bt709:out_range=tv:out_color_matrix=bt709"'
16 | -color_range: 1
17 | -colorspace: 1
18 | -color_primaries: 1
19 | -color_trc: 2
20 |
21 | slow_crf_12:
22 | << : *base_args
23 | -crf: 12
24 |
25 | slow_crf_14:
26 | << : *base_args
27 | -crf: 14
28 |
29 | slow_crf_16:
30 | << : *base_args
31 | -crf: 16
32 |
33 | slow_crf_18:
34 | << : *base_args
35 | -crf: 18
36 |
37 | slow_crf_20:
38 | << : *base_args
39 | -crf: 20
40 |
41 | slow_crf_22:
42 | << : *base_args
43 | -crf: 22
44 |
45 | slow_crf_25:
46 | << : *base_args
47 | -crf: 25
48 |
49 | ---
50 |
51 | reports:
52 | graphs:
53 | - args:
54 | color: media
55 | height: 400
56 | x: -crf
57 | y: vmaf_harmonic_mean
58 | markers: True
59 | name: vmaf_harmonic_mean.png
60 | sortby: name
61 | - args:
62 | color: media
63 | height: 400
64 | x: -crf
65 | y: encode_time
66 | markers: True
67 | name: encode_time.png
68 | sortby: name
69 | - args:
70 | color: media
71 | height: 400
72 | x: -crf
73 | y: filesize
74 | markers: True
75 | name: filesize.png
76 | sortby: name
77 | name: h264-crf-test
78 | title: H264 CRF Comparison
79 | description: This is a comparison of different CRF values for h264 encoding.
80 | directory: h264-crf-encode
81 | templatefile: basic.html.jinja
82 |
83 |
--------------------------------------------------------------------------------
/enctests/test_configs/h264_tests.yml:
--------------------------------------------------------------------------------
1 |
2 | test_preset_yuv420p:
3 | name: test_colorspace_yuv420p
4 | description: variations of colorspace yuv420p
5 | app: ffmpeg
6 | suffix: .mov
7 | encoding_template: 'ffmpeg {input_args} -i "{source}" -vframes {duration} {encoding_args} -y "{outfile}"'
8 | wedges:
9 | medium_crf_18: &base_args2
10 | -c:v: libx264
11 | -preset: medium
12 | -crf: 18
13 | -x264-params: '"keyint=15:no-deblock=1"'
14 | -pix_fmt: yuv420p
15 | -sws_flags: spline+accurate_rnd+full_chroma_int
16 | -vf: '"scale=in_range=full:in_color_matrix=bt709:out_range=tv:out_color_matrix=bt709"'
17 | -color_range: 1
18 | -colorspace: 1
19 | -color_primaries: 1
20 | -color_trc: 2
21 |
22 | slow_crf_18:
23 | << : *base_args2
24 | -preset: slow
25 |
26 | slower_crf_18:
27 | << : *base_args2
28 | -preset: slower
29 |
30 | veryslow_crf_18:
31 | << : *base_args2
32 | -preset: veryslow
33 |
34 | placebo_crf_18:
35 | << : *base_args2
36 | -preset: placebo
37 | ---
38 |
39 | reports:
40 | graphs:
41 | - args:
42 | color: -preset
43 | height: 400
44 | barmode: group
45 | x: media
46 | y: vmaf_harmonic_mean
47 | range_y:
48 | - 90
49 | - 100
50 | name: vmaf_harmonic_mean.png
51 | type: bar
52 | - args:
53 | color: -preset
54 | height: 400
55 | x: media
56 | barmode: group
57 | y: encode_time
58 | name: encode_time.png
59 | type: bar
60 | - args:
61 | color: -preset
62 | height: 400
63 | x: media
64 | barmode: group
65 | y: filesize
66 | name: filesize.png
67 | type: bar
68 | name: h264-test
69 | title: H264 Preset Comparison
70 | description: This is a comparison of different Preset values for h264 encoding, with CRF = 18.
71 | directory: h264-encode
72 | templatefile: basic.html.jinja
73 |
74 |
--------------------------------------------------------------------------------
/enctests/test_configs/prores_profile_tests.yml:
--------------------------------------------------------------------------------
1 |
2 | test_prores_profile:
3 | name: test_prores_profile
4 | description: variations of colorspace yuv420p
5 | app: ffmpeg
6 | suffix: .mov
7 | encoding_template: 'ffmpeg {input_args} -i "{source}" -vframes {duration} {encoding_args} -y "{outfile}"'
8 | wedges:
9 | proxy: &base_args2
10 | -c:v: prores_ks
11 | -profile:v: proxy
12 | -pix_fmt: yuv422p10le
13 | -color_range: tv
14 | -colorspace: bt709
15 | -color_primaries: bt709
16 | -color_trc: iec61966-2-1
17 |
18 | lt:
19 | << : *base_args2
20 | -profile:v: proxy
21 |
22 | standard:
23 | << : *base_args2
24 | -profile:v: standard
25 |
26 | hq:
27 | << : *base_args2
28 | -profile:v: hq
29 |
30 | t4444:
31 | << : *base_args2
32 | -profile:v: 4444
33 | -pix_fmt: yuv444p10le
34 |
35 | t4444xq:
36 | << : *base_args2
37 | -profile:v: 4444xq
38 | -pix_fmt: yuv444p10le
39 |
40 | ---
41 |
42 |
43 | reports:
44 | graphs:
45 | - args:
46 | color: -profile:v
47 | height: 400
48 | barmode: group
49 | x: media
50 | y: vmaf_harmonic_mean
51 | range_y:
52 | - 90
53 | - 100
54 | name: vmaf_harmonic_mean.png
55 | type: bar
56 | - args:
57 | color: -profile:v
58 | height: 400
59 | x: media
60 | barmode: group
61 | y: encode_time
62 | name: encode_time.png
63 | type: bar
64 | - args:
65 | color: -profile:v
66 | height: 400
67 | x: media
68 | barmode: group
69 | y: filesize
70 | name: filesize.png
71 | type: bar
72 | name: prores-profile-test
73 | title: Prores Profile Comparison
74 | description: This is a comparison of different profile values for prores encoding.
75 | directory: prores-profile-encode
76 | templatefile: basic.html.jinja
77 |
78 |
--------------------------------------------------------------------------------
/enctests/test_configs/prores_tests.yml:
--------------------------------------------------------------------------------
1 | test_prores:
2 | name: test_prores_quality
3 | description: variations of prores_quality
4 | app: ffmpeg
5 | suffix: .mov
6 | encoding_template: 'ffmpeg {input_args} -i "{source}" -vframes {duration} {encoding_args} -y "{outfile}"'
7 | wedges:
8 | qscale_16: &base_args
9 | -c:v: prores_ks
10 | -profile:v: 3
11 | -qscale:v: 16
12 | -pix_fmt: yuv422p10le
13 | -color_range: tv
14 | -colorspace: bt709
15 | -color_primaries: bt709
16 | -color_trc: iec61966-2-1
17 |
18 |
19 | qscale_00:
20 | << : *base_args
21 | -qscale:v: 0
22 |
23 | qscale_02:
24 | << : *base_args
25 | -qscale:v: 2
26 |
27 | qscale_04:
28 | << : *base_args
29 | -qscale:v: 4
30 |
31 | qscale_06:
32 | << : *base_args
33 | -qscale:v: 6
34 |
35 | qscale_08:
36 | << : *base_args
37 | -qscale:v: 08
38 |
39 | qscale_10:
40 | << : *base_args
41 | -qscale:v: 10
42 |
43 | qscale_12:
44 | << : *base_args
45 | -qscale:v: 12
46 |
47 | qscale_14:
48 | << : *base_args
49 | -qscale:v: 14
50 |
51 | qscale_14:
52 | << : *base_args
53 | -qscale:v: 14
54 |
55 | ---
56 |
57 | reports:
58 | graphs:
59 | - args:
60 | color: media
61 | height: 400
62 | x: -qscale:v
63 | y: vmaf_harmonic_mean
64 | markers: True
65 | name: vmaf_harmonic_mean.png
66 | sortby: name
67 | - args:
68 | color: media
69 | height: 400
70 | x: -qscale:v
71 | y: encode_time
72 | markers: True
73 | name: encode_time.png
74 | sortby: name
75 | - args:
76 | color: media
77 | height: 400
78 | x: -qscale:v
79 | y: filesize
80 | markers: True
81 | name: filesize.png
82 | sortby: name
83 | name: prores-test
84 | title: Prores quality comparison.
85 | description: This is a comparison of different quality settings for the prores_ks encoder.
86 | directory: prores-encode
87 | templatefile: basic.html.jinja
--------------------------------------------------------------------------------
/enctests/testframework/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/richardssam/ffmpeg-tests/6a49f4a37c148370b0a25fce05cd99ae2e48680e/enctests/testframework/__init__.py
--------------------------------------------------------------------------------
/enctests/testframework/encoders/__init__.py:
--------------------------------------------------------------------------------
1 | import pathlib
2 | import opentimelineio as otio
3 |
4 | from .base import ABCTestEncoder
5 | from .ffmpeg_encoder import FFmpegEncoder
6 |
7 |
8 | def encoder_factory(
9 | source_clip: otio.schema.Clip,
10 | test_config: dict,
11 | destination: pathlib.Path
12 | ) -> ABCTestEncoder:
13 |
14 | encoder_map = {
15 | 'ffmpeg': FFmpegEncoder
16 | }
17 |
18 | encoder = encoder_map.get(test_config.get('app'))
19 |
20 | return encoder(source_clip, test_config, destination)
21 |
--------------------------------------------------------------------------------
/enctests/testframework/encoders/base.py:
--------------------------------------------------------------------------------
1 | import pathlib
2 | from abc import ABC, abstractmethod
3 |
4 | import opentimelineio as otio
5 |
6 |
7 | class ABCTestEncoder(ABC):
8 | @abstractmethod
9 | def __init__(
10 | self,
11 | source_clip: otio.schema.Clip,
12 | test_config: dict,
13 | destination: pathlib.Path
14 | ):
15 | self._application_version = None
16 | self.source_clip = source_clip
17 | self.test_config = test_config
18 | self.destination = destination
19 |
20 | @abstractmethod
21 | def run_wedges(self) -> dict:
22 | """
23 | Encode the wedges described in the test configuration.
24 | Returns a dict containing otio.schema.MediaReferences
25 | """
26 |
27 | @abstractmethod
28 | def get_application_version(self) -> str:
29 | """Return version of encoder application
30 | You should cache the application version in the
31 | self._application_version variable
32 | """
33 |
--------------------------------------------------------------------------------
/enctests/testframework/encoders/ffmpeg_encoder.py:
--------------------------------------------------------------------------------
1 | import os
2 | import time
3 | import shlex
4 | import pathlib
5 | import subprocess
6 | from typing import Tuple
7 | from datetime import datetime, timezone
8 |
9 | import opentimelineio as otio
10 |
11 | from .base import ABCTestEncoder
12 | from ..utils import (
13 | create_media_reference,
14 | get_test_metadata_dict,
15 | sizeof_fmt
16 | )
17 |
18 |
19 | VMAF_LIB_DIR = os.getenv(
20 | 'VMAF_LIB_DIR',
21 | f'{os.path.dirname(__file__)}/.venv/usr/local/lib/x86_64-linux-gnu'
22 | )
23 |
24 |
25 | class FFmpegEncoder(ABCTestEncoder):
26 | def __init__(
27 | self,
28 | source_clip: otio.schema.Clip,
29 | test_config: dict,
30 | destination: pathlib.Path
31 | ):
32 | super(FFmpegEncoder, self).__init__(
33 | source_clip,
34 | test_config,
35 | destination
36 | )
37 |
38 | def run_wedges(self) -> dict:
39 | # The results dictionary is passed to source clip's list of available
40 | # media references. Key is test name and value is media reference
41 | results = {}
42 | for wedge_name, wedge in self.test_config.get('wedges', {}).items():
43 | # Test name is based on main test name and wedge name
44 | test_name = f"{self.test_config.get('name')}-{wedge_name}"
45 | out_file = self.get_output_filename(test_name)
46 |
47 | cmd = self.prep_encoding_command(wedge, out_file)
48 |
49 | print('ffmpeg command:', cmd)
50 |
51 | # Ensure proper environment
52 | env = os.environ
53 | if 'LD_LIBRARY_PATH' in env:
54 | env['LD_LIBRARY_PATH'] += f'{os.pathsep}{VMAF_LIB_DIR}'
55 |
56 | else:
57 | env.update({'LD_LIBRARY_PATH': VMAF_LIB_DIR})
58 |
59 | # Time encoding process
60 | t1 = time.perf_counter()
61 | # Do the encoding
62 | subprocess.call(shlex.split(cmd), env=env)
63 | # Store timing
64 | enctime = time.perf_counter() - t1
65 |
66 | # !! Use this function from utils to create a media reference
67 | # of output the file.
68 | mr = create_media_reference(out_file, self.source_clip)
69 |
70 | # Update metadata for use later
71 | # !! Use this function from utils to make sure we find the metadata
72 | # later on
73 | test_meta = get_test_metadata_dict(mr)
74 | test_meta['encode_arguments'] = wedge
75 | test_meta['description'] = self.test_config.get('description')
76 |
77 | result_meta = test_meta.setdefault('results', {})
78 | result_meta['completed_utc'] = \
79 | datetime.now(timezone.utc).isoformat()
80 | result_meta['encode_time'] = round(enctime, 4)
81 | result_meta['filesize'] = out_file.stat().st_size
82 |
83 | # Add media reference to result list
84 | results.update({test_name: mr})
85 |
86 | return results
87 |
88 | def get_application_version(self) -> str:
89 | if not self._application_version:
90 | cmd = f'ffmpeg -version -v quiet -hide_banner'
91 | _raw = subprocess.check_output(shlex.split(cmd))
92 | version = b'_'.join(_raw.split(b' ')[:3])
93 |
94 | self._application_version = str(version, 'utf-8')
95 |
96 | return self._application_version
97 |
98 | def prep_encoding_command(self, wedge: dict, out_file: pathlib.Path) -> str:
99 | template = self.test_config.get('encoding_template')
100 |
101 | source_path, _ = self.get_source_path()
102 | source_meta = self.source_clip.metadata['aswf_enctests']['source_info']
103 |
104 | input_args = ''
105 | if source_meta.get('images'):
106 | input_args = f"-start_number {source_meta.get('in')}"
107 |
108 | encoding_args = ' '.join(
109 | [f'{key} {value}' for key, value in wedge.items()]
110 | )
111 |
112 | duration = self.source_clip.source_range.duration.to_frames()
113 |
114 | cmd = template.format(
115 | input_args=input_args,
116 | source=source_path,
117 | duration=duration,
118 | encoding_args=encoding_args,
119 | outfile=out_file
120 | )
121 |
122 | return cmd
123 |
124 | def get_source_path(self) -> Tuple[pathlib.Path, str]:
125 | source_mr = self.source_clip.media_reference
126 | symbol = ''
127 | path = pathlib.Path()
128 | if isinstance(source_mr, otio.schema.ExternalReference):
129 | path = pathlib.Path(source_mr.target_url)
130 |
131 | elif isinstance(source_mr, otio.schema.ImageSequenceReference):
132 | symbol = f'%0{source_mr.frame_zero_padding}d'
133 | path = pathlib.Path(source_mr.abstract_target_url(symbol=symbol))
134 |
135 | return path, symbol
136 |
137 | def get_output_filename(self, test_name: str) -> pathlib.Path:
138 | source_path, symbol = self.get_source_path()
139 | stem = source_path.stem.replace(symbol, '')
140 |
141 | out_file = self.destination.absolute().joinpath(
142 | f"{stem}-{test_name}{self.test_config.get('suffix')}"
143 | )
144 |
145 | return out_file
146 |
147 |
--------------------------------------------------------------------------------
/enctests/testframework/generatetests.py:
--------------------------------------------------------------------------------
1 | # This is for generating tests from the markdown code.
2 | import os
3 | import argparse
4 | import yaml
5 | import re
6 | from pathlib import Path
7 |
8 | try:
9 | from yaml import CSafeLoader as SafeLoader
10 | from yaml import CSafeDumper as SafeDumper
11 |
12 | except ImportError:
13 | from yaml import SafeLoader, SafeDumper
14 |
15 | def parse_args():
16 | parser = argparse.ArgumentParser()
17 |
18 | parser.add_argument(
19 | '--root',
20 | action='store',
21 | default='..',
22 | help='Where to look for *.md files to generate the config file.'
23 | )
24 |
25 | parser.add_argument(
26 | '--output-config',
27 | action='store',
28 | dest='output_config_file',
29 | default="test_configs/documentation_tests.yml",
30 | help='Specify output file to write test wrappers to.'
31 | )
32 |
33 | args = parser.parse_args()
34 |
35 | return args
36 |
37 | def main():
38 | args = parse_args()
39 | rootdir = Path(args.root).absolute()
40 |
41 | markdownfiles = []
42 | for root, dirs, files in os.walk(rootdir):
43 | for name in files:
44 | if name.endswith(".md"):
45 | markdownfiles.append(os.path.join(root, name))
46 |
47 | allinfo = []
48 |
49 | for markdownfile in markdownfiles:
50 | with open(markdownfile, "r") as f:
51 | incomment = False
52 | incommand = False
53 | command = ""
54 | info = ""
55 | lastcommandstart = -1
56 | for linenumber, line in enumerate(f):
57 | if "" in line:
61 | incomment = False
62 | continue
63 | if "```" in line:
64 | if incommand:
65 | incommand = False
66 | if "ffmpeg" in command and info != "":
67 | try:
68 | infostruct = yaml.load(info, SafeLoader)
69 | command = command.replace("\\\n", "").replace("\n", "")
70 | allinfo.append({'config': infostruct, 'command': command, 'file': markdownfile, 'line': lastcommandstart})
71 | except:
72 | pass
73 | command = ""
74 | info = ""
75 | else:
76 | incommand = True
77 | lastcommandstart = linenumber
78 | continue
79 | if incomment:
80 | info = info + line
81 | if incommand:
82 | command = command + line
83 |
84 | # Now we process the scanned data to create the config file.
85 | tests = []
86 |
87 | for testcount, test in enumerate(allinfo):
88 | testconfigs = {}
89 |
90 | testname = test['config'].get("name", "test-%d" % (testcount + 1))
91 | outputfile = test['command'].split(" ")[-1]
92 | outputfileext = outputfile.split(".")[-1]
93 | template = re.sub("\s\S+$", ' {encoding_args} -y "{outfile}"', test['command'])
94 | template = re.sub("\s-i\s\S+\s", ' {input_args} -i "{source}" ', template)
95 | template = re.sub("\s-vframes\s\S+\s", ' -vframes {duration} ', template)
96 |
97 | # input arg flags.
98 | for arg in ['-r', '-start_number']:
99 | template = re.sub("\s%s\s\S+\s" % arg, ' ', template)
100 |
101 | wedge = {}
102 |
103 | # Main args.
104 | for arg in ["-qp", "-x264-params", "-qscale:v", "-vf", '-color_range', '-colorspace', '-color_primaries', '-profile:v', '-compression_level', '-pred', '-color_trc', '-pix_fmt', '-preset', '-c:v', '-sws_flags']:
105 | match = re.search("\s%s\s(\S+)\s" % arg, template)
106 | if match:
107 | wedge[arg] = match.group(1)
108 | template = template.replace(match.group(0), ' ')
109 |
110 | #print("Wedge:", wedge)
111 |
112 |
113 | print("Name:", testname)
114 | print("Template:", template)
115 | if "sources" in test['config']:
116 | for i in range(0, len(test['config']['sources'])):
117 | test['config']['sources'][i] = os.path.abspath(os.path.join(args.root, test['config']['sources'][i]))
118 | testconfigs[testname] = test['config']
119 | testconfigs[testname]['encoding_template'] = template
120 | testconfigs[testname]['suffix'] = "." + outputfileext
121 | testconfigs[testname]['description'] = "Test from %s line %d" % (test['file'], test['line'])
122 | testconfigs[testname]['name'] = testname
123 | testconfigs[testname]['app'] = 'ffmpeg'
124 | if 'wedges' in testconfigs[testname]:
125 | # We allow additional wedges to be added in the docs config file.
126 | print("Already got wedge:", testconfigs[testname]['wedges'])
127 | testconfigs[testname]['wedges']['wedge0'] = wedge
128 | else:
129 | testconfigs[testname]['wedges'] = {'wedge0': wedge}
130 | tests.append(testconfigs)
131 |
132 | reports = {'reports': {'name': 'doc-tests',
133 | 'title': 'Ffmpeg encoding guide tests',
134 | 'description': 'This is testing commands that are in the documentation as a way to confirm that all the commands in the documentation produce the desired results.',
135 | 'directory': 'docs-encode',
136 | 'templatefile': 'doctests.html.jinja'}}
137 | tests.append(reports)
138 |
139 | with open(args.output_config_file, "w") as f:
140 | yaml.dump_all(tests, f)
141 |
142 |
143 | if __name__== '__main__':
144 | main()
145 |
--------------------------------------------------------------------------------
/enctests/testframework/otio2html.py:
--------------------------------------------------------------------------------
1 | import argparse
2 | from pathlib import Path
3 |
4 | from testframework.main import *
5 | from testframework.utils.outputTemplate import processTemplate
6 | # This code ideally ends up in main.py but may also make sense as a standalone.
7 |
8 | def parse_args():
9 | parser = argparse.ArgumentParser()
10 |
11 | parser.add_argument(
12 | '--test-config-dir',
13 | action='store',
14 | default='./test_configs',
15 | help='Where to look for *.yml files containing test descriptions'
16 | )
17 |
18 | parser.add_argument(
19 | '--test-config',
20 | action='store',
21 | dest='test_config_file',
22 | default=None,
23 | help='Specify a single test config file to run'
24 | )
25 |
26 |
27 | parser.add_argument(
28 | '--results',
29 | action='store',
30 | default='encoding-test-results.otio',
31 | help='Path to results file including ".otio" extenstion '
32 | '(default: ./encoding-test-results.otio)'
33 | )
34 |
35 |
36 | args = parser.parse_args()
37 |
38 | return args
39 |
40 | def otio2htmlmain():
41 | args = parse_args()
42 |
43 | # Make sure we have a folder for test configs
44 | Path(args.test_config_dir).mkdir(parents=True, exist_ok=True)
45 |
46 | test_configs = []
47 | if args.test_config_file:
48 | test_configs.extend(parse_config_file(Path(args.test_config_file)))
49 | else:
50 | test_configs.extend(
51 | get_configs(args, args.test_config_dir, ENCODE_TEST_SUFFIX)
52 | )
53 |
54 | timeline = otio.adapters.read_from_file(args.results)
55 |
56 | processTemplate(test_configs, timeline)
57 |
58 | if __name__== '__main__':
59 | otio2htmlmain()
60 |
--------------------------------------------------------------------------------
/enctests/testframework/templates/basic.html.jinja:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 | {{config.title}}
6 |
7 |
8 |
9 |
10 |
11 |
12 |
{{config.title}}
13 |
{{config.description}}
14 |
15 |
16 |
17 |
18 |
19 | Testing with ffmpeg version: {{ testinfo.ffmpeg_version}}
20 |
21 | {% for media_name, test in tests.items() %}
22 |
23 |
{{ media_name }} Results
24 |
25 |
26 |
File Path
{{test.source_info.path}}
27 |
28 |
29 |
Resolution
{{test.source_info.width}} x {{test.source_info.height}}
57 | {% endfor %}
58 |
59 |
--------------------------------------------------------------------------------
/enctests/testframework/utils/__init__.py:
--------------------------------------------------------------------------------
1 | from .utils import (
2 | sizeof_fmt,
3 | create_clip,
4 | get_media_info,
5 | get_source_path,
6 | get_source_range,
7 | get_nearest_model,
8 | get_test_metadata_dict,
9 | create_media_reference,
10 | get_source_metadata_dict
11 | )
12 |
--------------------------------------------------------------------------------
/enctests/testframework/utils/outputTemplate.py:
--------------------------------------------------------------------------------
1 |
2 | # This has the following requirements.
3 | # pip install kaleido plotly pandas jinja2
4 |
5 | import os
6 | import plotly.express as px
7 | import opentimelineio as otio
8 | import pandas as pd
9 | import jinja2
10 |
11 | def _exportGraph(reportconfig, graph, alltests):
12 | """
13 | Export a graph using all the test data.
14 | :param reportconfig: The report configuration to output.
15 | :param graph: The specific graph to output.
16 | :param alltest: The raw test data to use.
17 | """
18 | df = pd.DataFrame(alltests)
19 | df = df.sort_values(by=graph.get("sortby", "name"))
20 | if graph.get("type", "line") == "bar":
21 | fig = px.bar(df, **graph.get("args"))
22 | else:
23 | fig = px.line(df, **graph.get("args"))
24 |
25 | filename = reportconfig['name']+"-"+graph.get("name")
26 | if "directory" in reportconfig:
27 | filename = os.path.join(reportconfig['directory'], filename)
28 | if os.path.exists(filename):
29 | # Running inside docker sometimes doesnt let you write over files.
30 | os.remove(filename)
31 | print("Writing out:", filename)
32 | fig.write_image(filename)
33 | print("Written out:", filename)
34 |
35 |
36 |
37 | def processTemplate(test_configs, otio_info):
38 | """
39 | Look for any report configs in the test_configurations and apply them to the specified otio file.
40 | :param test_configs: a list of test configurations.
41 | :param otio_info: A otio object containing the test results.
42 | """
43 |
44 | reportconfig = None
45 | for config in test_configs:
46 | if "reports" in config:
47 | reportconfig = config['reports']
48 |
49 | if reportconfig is None:
50 | print("Failed to find report config. Skipping html export.")
51 | exit(0)
52 |
53 | tracks = otio_info.tracks[0]
54 | testinfo = {'ffmpeg_version': tracks.name.replace("ffmpeg_version_", "")}
55 |
56 | tests = {}
57 | alltests = []
58 | for track in tracks:
59 | results = []
60 | for ref_name, test_info in track.media_references().items():
61 | if ref_name == "DEFAULT_MEDIA":
62 | continue
63 | merge_test_info = test_info.metadata['aswf_enctests']['results']
64 | merge_test_info['name'] = ref_name
65 | if 'description' in test_info.metadata['aswf_enctests']:
66 | merge_test_info['test_description'] = test_info.metadata['aswf_enctests']['description']
67 | results.append(merge_test_info)
68 | merge_test_info['media'] = track.name
69 | merge_test_info['vmaf_min'] = float(merge_test_info['vmaf']['min'])
70 | merge_test_info['vmaf_mean'] = float(merge_test_info['vmaf']['mean'])
71 | merge_test_info['vmaf_harmonic_mean'] = float(merge_test_info['vmaf']['harmonic_mean'])
72 | merge_test_info['filesize'] = merge_test_info['filesize']
73 |
74 | # We merge the arguments into the dictionary too, as well as merge it into a single string, to make the graphing simpler.
75 | args=[]
76 | for k,v in test_info.metadata['aswf_enctests']['encode_arguments'].items():
77 | args.extend([k,str(v)])
78 | merge_test_info[k] = v
79 | merge_test_info['encode_arguments'] = " ".join(args)
80 | alltests.append(merge_test_info)
81 | if track.name in tests:
82 | tests[track.name]['results'].extend(results)
83 | else:
84 | tests[track.name] = {'results': results, 'source_info': track.metadata['aswf_enctests']['source_info']}
85 |
86 | for graph in reportconfig.get("graphs", []):
87 | _exportGraph(reportconfig, graph, alltests)
88 |
89 | environment = jinja2.Environment(loader=jinja2.FileSystemLoader("testframework/templates/"))
90 |
91 | template = environment.get_template(reportconfig['templatefile'])
92 | htmlreport = reportconfig['name']+".html"
93 | if "directory" in reportconfig:
94 | htmlreport = os.path.join(reportconfig['directory'], htmlreport)
95 | if os.path.exists(htmlreport):
96 | # Running inside docker sometimes doesnt let you write over files.
97 | os.remove(htmlreport)
98 | f = open(htmlreport, "w")
99 | f.write(template.render(tests=tests, testinfo=testinfo, config=reportconfig))
100 | f.close()
101 | print("Written out:", htmlreport)
102 |
103 |
--------------------------------------------------------------------------------
/enctests/testframework/utils/utils.py:
--------------------------------------------------------------------------------
1 | import os
2 | import json
3 | import shlex
4 | import pyseq
5 | from copy import deepcopy
6 | from pathlib import Path
7 | from subprocess import run, CalledProcessError
8 |
9 | import opentimelineio as otio
10 |
11 | VMAF_LIB_DIR = os.getenv(
12 | 'VMAF_LIB_DIR',
13 | f'{os.path.dirname(__file__)}/.venv/usr/local/lib/x86_64-linux-gnu'
14 | )
15 |
16 |
17 | # Which vmaf model to use
18 | VMAF_HD_MODEL = os.getenv(
19 | 'VMAF_MODEL',
20 | f'{os.path.dirname(__file__)}/../../tools/vmaf-2.3.1/model/'
21 | ) + "/vmaf_v0.6.1.json"
22 |
23 |
24 | VMAF_4K_MODEL = os.getenv(
25 | 'VMAF_MODEL',
26 | f'{os.path.dirname(__file__)}/../../tools/vmaf-2.3.1/model'
27 | ) + "/vmaf_4k_v0.6.1.json"
28 |
29 |
30 | # Based on accepted answer here:
31 | # https://stackoverflow.com/questions/1094841/get-human-readable-version-of-file-size
32 | def sizeof_fmt(path, suffix="B"):
33 | num = os.path.getsize(path)
34 | for unit in ["", "Ki", "Mi", "Gi", "Ti", "Pi", "Ei", "Zi"]:
35 | if abs(num) < 1024.0:
36 | return f"{num:3.1f}{unit}{suffix}"
37 | num /= 1024.0
38 |
39 | return f"{num:.1f}Yi{suffix}"
40 |
41 |
42 | def calculate_rate(rate_str):
43 | numerator, denominator = rate_str.split('/')
44 | return float(numerator) / float(denominator)
45 |
46 |
47 | def get_nearest_model(width):
48 | models = {
49 | 1920: VMAF_HD_MODEL,
50 | 4096: VMAF_4K_MODEL
51 | }
52 | diff = lambda list_value: abs(list_value - width)
53 |
54 | return models[min(models, key=diff)]
55 |
56 |
57 | def get_media_info(path, startframe=None):
58 | input_args = ''
59 | if startframe:
60 | input_args = f'-start_number {startframe} '
61 |
62 | cmd = f'ffprobe ' \
63 | f'-v quiet ' \
64 | f'-hide_banner ' \
65 | f'-print_format json ' \
66 | f'-show_streams ' \
67 | f'{input_args}' \
68 | f'-i "{path.as_posix()}"'
69 |
70 | print(f'Probe command: {cmd}')
71 | env = os.environ
72 | if 'LD_LIBRARY_PATH' in env:
73 | env['LD_LIBRARY_PATH'] += f'{os.pathsep}{VMAF_LIB_DIR}'
74 |
75 | else:
76 | env.update({'LD_LIBRARY_PATH': VMAF_LIB_DIR})
77 |
78 | try:
79 | proc = run(shlex.split(cmd), capture_output=True, env=env, check=True)
80 | raw_json = json.loads(proc.stdout)
81 |
82 | except CalledProcessError as err:
83 | print(f'Unable to probe "{path.name}, {err}"')
84 | return None
85 |
86 | stream = None
87 | for raw_stream in raw_json.get('streams', []):
88 | if raw_stream.get('codec_type') == 'video':
89 | stream = raw_stream
90 | break
91 |
92 | if not stream:
93 | print(f'Unable to locate video stream in "{path.name}"')
94 | return None
95 |
96 | info = {
97 | 'path': path.as_posix(),
98 | 'width': stream.get('width'),
99 | 'height': stream.get('height'),
100 | 'pix_fmt': stream.get('pix_fmt'),
101 | 'in': startframe or 0,
102 | 'duration': int(stream.get('nb_frames', stream.get('duration_ts', 1))),
103 | 'rate': calculate_rate(stream.get('r_frame_rate'))
104 | }
105 |
106 | return info
107 |
108 |
109 | def create_media_reference(path, source_clip, is_sequence=False):
110 | config = get_source_metadata_dict(source_clip)
111 | rate = float(config.get('rate'))
112 | duration = float(config.get('duration'))
113 |
114 | if is_sequence:
115 | # Create ImageSequenceReference
116 | # TODO find a less error prone way to find correct sequence
117 | seq = max(
118 | pyseq.get_sequences(path.parent.as_posix()),
119 | key=lambda s: s.frames()
120 | )
121 | available_range = otio.opentime.TimeRange(
122 | start_time=otio.opentime.RationalTime(
123 | seq.start(), rate
124 | ),
125 | duration=otio.opentime.RationalTime(
126 | seq.length(), rate
127 | )
128 | )
129 | mr = otio.schema.ImageSequenceReference(
130 | target_url_base=Path(seq.directory()).as_posix(),
131 | name_prefix=seq.head(),
132 | name_suffix=seq.tail(),
133 | start_frame=seq.start(),
134 | frame_step=1,
135 | frame_zero_padding=len(max(seq.digits, key=len)),
136 | rate=rate,
137 | available_range=available_range
138 | )
139 |
140 | else:
141 | # Create ExternalReference
142 | available_range = otio.opentime.TimeRange(
143 | start_time=otio.opentime.RationalTime(
144 | 0, rate
145 | ),
146 | duration=otio.opentime.RationalTime(
147 | duration, rate
148 | )
149 | )
150 | mr = otio.schema.ExternalReference(
151 | target_url=path.resolve().as_posix(),
152 | available_range=available_range,
153 | )
154 | mr.name = path.name
155 |
156 | return mr
157 |
158 |
159 | def get_test_metadata_dict(otio_clip):
160 | aswf_meta = otio_clip.metadata.setdefault('aswf_enctests', {})
161 |
162 | return aswf_meta
163 |
164 |
165 | def get_source_metadata_dict(source_clip):
166 | return source_clip.metadata['aswf_enctests']['source_info']
167 |
168 |
169 | def create_clip(config):
170 | path = Path(config.get('path'))
171 |
172 | clip = otio.schema.Clip(name=path.stem)
173 | clip.metadata.update({'aswf_enctests': {'source_info': deepcopy(config)}})
174 |
175 | # Source range
176 | clip.source_range = get_source_range(config)
177 | clip.start_frame = config.get('in')
178 |
179 | # Check if we have an image sequence source
180 | is_sequence = config.get('images', False)
181 |
182 | # The initial MediaReference is stored as default
183 | mr = create_media_reference(path, clip, is_sequence)
184 | clip.media_reference = mr
185 |
186 | return clip
187 |
188 |
189 | def get_source_range(config):
190 | source_range = otio.opentime.TimeRange(
191 | start_time=otio.opentime.RationalTime(
192 | config.get('in'),
193 | config.get('rate')
194 | ),
195 | duration=otio.opentime.RationalTime.from_seconds(
196 | config.get('duration') /
197 | config.get('rate'),
198 | config.get('rate')
199 | )
200 | )
201 |
202 | return source_range
203 |
204 |
205 | def get_source_path(source_clip):
206 | source_mr = source_clip.media_reference
207 | symbol = ''
208 | path = Path()
209 | if isinstance(source_mr, otio.schema.ExternalReference):
210 | path = Path(source_mr.target_url)
211 |
212 | elif isinstance(source_mr, otio.schema.ImageSequenceReference):
213 | symbol = f'%0{source_mr.frame_zero_padding}d'
214 | path = Path(source_mr.abstract_target_url(symbol=symbol))
215 |
216 | return path, symbol
217 |
--------------------------------------------------------------------------------
/gamuttests/DisplayP3-asRec2020.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/richardssam/ffmpeg-tests/6a49f4a37c148370b0a25fce05cd99ae2e48680e/gamuttests/DisplayP3-asRec2020.png
--------------------------------------------------------------------------------
/gamuttests/apply-icc2020.py:
--------------------------------------------------------------------------------
1 | import sys
2 | sys.path.append("../tests/python")
3 |
4 | from PIL import Image
5 | from PIL import ImageCms
6 | import shutil
7 | import os
8 | from CompareHtml import createCompareHtml
9 | rootpath = "./iccgamut"
10 | if not os.path.exists(rootpath):
11 | os.makedirs(rootpath)
12 |
13 | source_image = "ps-combined-rec2020-g2.2.png"
14 | listimages = []
15 |
16 | with Image.open(source_image) as im:
17 | profile = ImageCms.getOpenProfile("../ICC/rec2020.icc")
18 | im.save(os.path.join(rootpath, "combined-iccrec2020.png"), icc_profile=profile.tobytes())
19 | listimages.append({'label': 'REC2020 PNG file - ', 'image': os.path.basename(source_image), 'group': 'rec2020 png', 'description': 'rec2020 text should be visible only if you have a rec2020 monitor, P3-D65 only if you have a P3 monitor. (Unless the browser is broken).'})
20 | shutil.copyfile(source_image, os.path.join(rootpath, source_image))
21 | #listimages.append({'label': 'rec2020', 'image': "combined-iccrec2020.png", 'group': 'rec2020 png'})
22 |
23 | source_image_p3 = "ps-combined-displayp3-g2.2.png"
24 | shutil.copyfile(source_image_p3, os.path.join(rootpath, source_image_p3))
25 | listimages.append({'label': 'Display P3 PNG file ', 'image': os.path.basename(source_image_p3), 'group': 'displayp3 png', 'description': 'P3-D65 should be visible only if you have a P3-D65 monitor, (unless the browser is broken).'})
26 |
27 | with Image.open(source_image) as im:
28 | profile = ImageCms.getOpenProfile("../ICC/P3D65.icc")
29 | im.save(os.path.join(rootpath, "combined-iccdisplayp3.png"), icc_profile=profile.tobytes())
30 | #listimages.append({'label': 'displayp3 png', 'image': "combined-iccdisplayp3.png", 'group': 'png'})
31 |
32 | #source_image = "combined-srgb.png"
33 | #with Image.open(source_image) as im:
34 | # profile = ImageCms.getOpenProfile("../ICC/Simplified-sRGB.icc")
35 | # im.save(os.path.join(rootpath, "combined-iccsRGB.png"), icc_profile=profile.tobytes())
36 | #listimages.append({'label': 'sRGB png', 'image': "combined-iccsRGB.png", 'group': 'png'})
37 |
38 |
39 | trc_types = [#{'id': 'rec709', 'label': "-color_primaries 1 = rec709", 'fileext': "rec709", 'primnum': 1, 'group': 'rec709 colortrc', 'source': 'combined-srgb.png'},
40 | {'id': 'diosplayp3', 'label': "-color_primaries 12 = display p3", 'fileext': "displayp3", 'primnum': 12, 'group': 'displayp3 mp4', 'source': source_image_p3, 'description': 'P3-D65 should be visible only if you have a P3-D65 monitor, (unless the browser is broken).'},
41 | #{'id': 'dcip3', 'label': "-color_primaries 11 = DCI p3", 'fileext': "dcip3", 'primnum': 11, #'group': 'rec709 colortrc', 'source': 'combined-dcip3.png'},
42 | {'id': 'rec2020', 'label': "-color_primaries 9 = rec2020", 'fileext': "rec2020", 'primnum': 9, 'group': 'rec2020 mp4', 'source': source_image, 'description': 'rec2020 text should be visible only if you have a rec2020 monitor, P3-D65 only if you have a P3 monitor. (Unless the browser is broken).',}
43 | ]
44 | for trc in trc_types:
45 | # TODO Confirm we have the right one.
46 | trc['rootpath'] = rootpath
47 |
48 | cmd = 'ffmpeg -loop 1 -y -i {source} -sws_flags spline+accurate_rnd+full_chroma_int -vf "scale=in_range=full:in_color_matrix=bt709:out_range=full:out_color_matrix=bt709" -c:v libx264 -t 10 -pix_fmt yuv420p -qscale:v 1 -color_range 2 -colorspace 1 -color_primaries {primnum} -color_trc 13 {rootpath}/greyscale-{fileext}.mp4'.format(**trc); ext="mp4"
49 |
50 | os.system(cmd)
51 | trc['ext'] = ext
52 | listimages.append({'id': trc['id'], 'group': trc.get('group', 'unknown'), 'label': trc['label'], 'video': "greyscale-{fileext}.{ext}".format(**trc), 'cmd': cmd, 'description': trc.get("description", "")})
53 |
54 | # Sort images by group.
55 | listimages = sorted(listimages, key=lambda k:k['group'])
56 |
57 | introduction = """
58 |
Primary comparisons
59 |
This is comparing two PNG files to ffmpeg converted versions of those files. Ideally you should be able to see rec2020 if your monitor is rec2020, and P3-D65 if you have a P3 display or will not see any text if nothing is displayed. Since most people do not have rec2020 displays, you shouldnt see the rec2020 text at all. The code to generate these files is here. You can reorder the images to help with comparison by drag and drop.
This is comparing different ways to do the YUV conversion. We are doing it in both 420p and 444p since 444p is a fairer binary image comparison. The takeaway should be to use the libswscale filter. The code to generate these files is here.
Comparing full range encoding vs. tv range, but also yuv420p vs. yuvj420p. We believe that this is well supported on web browsers, and dont see a downside to it. There may be cases where other applications do not read it. The code to generate these files is here. Full screen view is here
This is comparing a png file written with different ICC profiles (i.e. the underlying data is identical in all png files), and comparing it to mp4's where the only change is the -color_trc flag setting, along with mov files with different gamma values. The code to generate these files is here. You can reorder the wedges to help with comparison by drag and drop.
This is trying to reverse out what we think is the gamma for each TRC file, with the hope that if the browser is correctly obaying the flag, that all the ramps would approximately match. The code to generate these files is here. However, the source images were generated in photoshop, by taking the raw.png file, assigning a sRGB profile to it, and then converting to a custom profile, adjusting the gamma but sticking with D65 and HDTV primaries..
This is trying to reverse out what we think is the gamma for each TRC file, with the hope that if the browser is correctly obaying the flag, that all the ramps would approximately match. The code to generate these files is here. However, the source images were generated in photoshop, by taking the raw.png file, assigning a sRGB profile to it, and then converting to a custom profile, adjusting the gamma but sticking with D65 and HDTV primaries..
This is trying to reverse out what we think is the gamma for each TRC file, with the hope that if the browser is correctly obaying the flag, that all the ramps would approximately match. The code to generate these files is here. However, the source images were generated in nuke.
This is comparing a png file written with different ICC profiles (i.e. the underlying data is identical in all png files), and comparing it to mp4's where the only change is the -color_trc flag setting. The code to generate these files is here. You can reorder the wedges to help with comparison by drag and drop.
" % os.path.basename(__file__),
75 | videohtml = ' '
76 | )
77 |
--------------------------------------------------------------------------------
/tests/python/CompareHtml.py:
--------------------------------------------------------------------------------
1 | # Now we build the web page.
2 |
3 | # This is a super basic html template file, trying to keep the number of dependancies down for now.
4 |
5 | header = """
6 |
7 |
8 |
9 |
10 |
11 |
20 |
35 |
36 |
37 | {introduction}
38 |
39 |
40 | """
41 |
42 | image_template = """
43 |
44 |
{label} png
{description}
{cmd}
45 |
46 | """
47 | video_template = """
48 |
49 |
{label} {ext}
{description}
{cmd}
50 |
51 | """
52 |
53 | tail = """
54 |
55 |
56 |
57 |
58 |
59 |
60 | """
61 |
62 | def createCompareHtml(outputpath="compare.html", listimages=[], introduction="", videohtml=" width='1024' height='150' ", cellspacing=1):
63 | """
64 | :param outputpath: output path for htmlfile
65 | :param listimages: list of dictionaries of things to output. Each item has a "label", and either a "image" or a "video" key.
66 | :param introduction: An introduction for the top of the file.
67 | """
68 |
69 | html = header.format(introduction=introduction, cellspacing=cellspacing)
70 | for output in listimages:
71 | if 'group' not in output:
72 | output['group'] = ""
73 | if 'id' not in output:
74 | output['id'] = ""
75 | if 'cmd' not in output:
76 | output['cmd'] = ""
77 | if 'description' not in output:
78 | output['description'] = ""
79 | if "image" in output:
80 | html += image_template.format(**output)
81 | else:
82 | output['videohtml'] = videohtml
83 | output['ext'] = output['video'][-3:]
84 | html += video_template.format(**output)
85 |
86 | html += tail
87 |
88 | f = open(outputpath, "w")
89 | f.write(html)
90 | f.close()
91 |
--------------------------------------------------------------------------------
/tests/python/CompareOverHtml.py:
--------------------------------------------------------------------------------
1 | # Now we build the web page.
2 |
3 | # This is a super basic html template file, trying to keep the number of dependancies down for now.
4 |
5 | header = """
6 |
7 |
8 |
9 |
10 |
11 |
21 |
40 |
41 |
42 |
43 |
44 |
45 | {introduction}
46 |
47 |
74 |
75 |
76 |
77 |
78 |
79 | """
80 |
81 | def createCompareHtml(outputpath="compare.html", listimages=[], introduction="", videohtml=" width='1024' height='150' "):
82 | """
83 | :param outputpath: output path for htmlfile
84 | :param listimages: list of dictionaries of things to output. Each item has a "label", and either a "image" or a "video" key.
85 | :param introduction: An introduction for the top of the file.
86 | """
87 |
88 | html = header.format(introduction=introduction)
89 | for output in listimages:
90 | html += script_template.format(**output)
91 | listimages[1]['introduction'] = introduction
92 | html += body_begin.format(**listimages[1])
93 | for output in listimages:
94 | if "video" not in output and 'image' not in output:
95 | html += label_maintemplate.format(**output)
96 | continue
97 | html += button_template.format(**output)
98 | html += "
"
99 | for output in listimages:
100 | output['videohtml'] = videohtml
101 | if 'description' not in output:
102 | output['description'] = ''
103 | if 'cmd' not in output:
104 | output['cmd'] = ''
105 | if "image" in output:
106 | html += image_template.format(**output)
107 | elif "video" not in output:
108 | html += label_template.format(**output)
109 | else:
110 | output['videohtml'] = videohtml
111 | html += video_template.format(**output)
112 |
113 | html += tail
114 |
115 | f = open(outputpath, "w")
116 | f.write(html)
117 | f.close()
118 |
--------------------------------------------------------------------------------
/tests/runall.sh:
--------------------------------------------------------------------------------
1 | python3 chip-color-test.py
2 | python3 icctest.py
3 | python3 icctest-fulltv.py
4 | # python3 icctest-rev-ps-nuke.py
5 | # python3 icctest-rev-ps.py
6 | python3 icctest-osx.py
7 |
--------------------------------------------------------------------------------