├── .github └── FUNDING.yml ├── examples ├── transparentGradient.json5 ├── kenBurns.json5 ├── videos.json5 ├── run-all-examples.sh ├── resizeHorizontal.json5 ├── gl.json5 ├── subtitle.json5 ├── transitionEasing.json5 ├── speedTest.json5 ├── resizeVertical.json5 ├── customFabric.js ├── customCanvas.js ├── README.md ├── losslesscut.json5 └── commonFeatures.json5 ├── .eslintrc ├── shaders └── rainbow-colors.frag ├── sources ├── shared.js ├── glFrameSource.js ├── frameSource.js ├── videoFrameSource.js └── fabricFrameSource.js ├── LICENSE ├── package.json ├── util.js ├── .gitignore ├── glTransitions.js ├── transitions.js ├── colors.js ├── cli.js ├── README.md └── index.js /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | github: mifi 2 | custom: https://paypal.me/mifino 3 | -------------------------------------------------------------------------------- /examples/transparentGradient.json5: -------------------------------------------------------------------------------- 1 | { 2 | // fast: true, 3 | outPath: './transparentGradient.mp4', 4 | clips: [ 5 | { duration: 0.1, layers: [{ type: 'fill-color', color: 'green' }, { type: 'linear-gradient', colors: ['#ffffffff', '#ffffff00'] }] }, 6 | ], 7 | } 8 | -------------------------------------------------------------------------------- /.eslintrc: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "airbnb-base", 3 | "env": { 4 | "node": true 5 | }, 6 | "parserOptions": { 7 | "sourceType": "script" 8 | }, 9 | "rules": { 10 | "max-len": 0, 11 | "no-console": 0, 12 | "object-curly-newline": 0, 13 | "no-await-in-loop": 0, 14 | } 15 | } 16 | -------------------------------------------------------------------------------- /examples/kenBurns.json5: -------------------------------------------------------------------------------- 1 | { 2 | outPath: './kenBurns.mp4', 3 | defaults: { 4 | transition: { name: 'fade' }, 5 | }, 6 | clips: [ 7 | { duration: 3, layers: [{ type: 'image', path: './assets/img2.jpg', zoomDirection: 'out' }] }, 8 | { duration: 3, layers: [{ type: 'image', path: './assets/img3.jpg', zoomDirection: 'in' }] }, 9 | ], 10 | } 11 | -------------------------------------------------------------------------------- /examples/videos.json5: -------------------------------------------------------------------------------- 1 | { 2 | outPath: './videos.mp4', 3 | defaults: { 4 | transition: { 5 | name: 'linearblur', 6 | }, 7 | }, 8 | clips: [ 9 | { layers: [{ type: 'video', path: './assets/IMG_4605.MOV', cutFrom: 0, cutTo: 2 }] }, 10 | { layers: [{ type: 'video', path: './assets/IMG_1884.MOV', cutFrom: 0, cutTo: 2 }] }, 11 | ], 12 | } -------------------------------------------------------------------------------- /shaders/rainbow-colors.frag: -------------------------------------------------------------------------------- 1 | #ifdef GL_ES 2 | precision mediump float; 3 | #endif 4 | 5 | uniform float time; 6 | uniform vec2 resolution; 7 | 8 | void main() { 9 | vec2 st = gl_FragCoord.xy/resolution.xy; 10 | st.x *= resolution.x/resolution.y; 11 | 12 | vec3 color = vec3(0.); 13 | color = vec3(st.x,st.y,abs(sin(time))); 14 | 15 | gl_FragColor = vec4(color,1.0); 16 | } 17 | -------------------------------------------------------------------------------- /examples/run-all-examples.sh: -------------------------------------------------------------------------------- 1 | #/bin/bash 2 | set -e 3 | 4 | node ../cli.js --json gl.json5 5 | node ../cli.js --json image.json5 6 | node ../cli.js --json losslesscut.json5 7 | node ../cli.js --json resizeHorizontal.json5 8 | node ../cli.js --json resizeVertical.json5 9 | node ../cli.js --json speedTest.json5 10 | node ../cli.js --json subtitle.json5 11 | node ../cli.js --json transitionEasing.json5 12 | node ../cli.js --json transparentGradient.json5 13 | node ../cli.js --json commonFeatures.json5 14 | -------------------------------------------------------------------------------- /examples/resizeHorizontal.json5: -------------------------------------------------------------------------------- 1 | { 2 | width: 320, height: 240, 3 | outPath: './resizeHorizontal.mp4', 4 | defaults: { 5 | transition: null, 6 | layer: { backgroundColor: 'white' }, 7 | }, 8 | clips: [ 9 | { layers: [{ type: 'video', path: './assets/IMG_4605.MOV', cutFrom: 0.4, cutTo: 2 }] }, 10 | { layers: [{ type: 'video', path: './assets/IMG_4605.MOV', cutFrom: 0.4, cutTo: 2, resizeMode: 'contain' }] }, 11 | { layers: [{ type: 'video', path: './assets/IMG_4605.MOV', cutFrom: 0.4, cutTo: 2, resizeMode: 'stretch' }] }, 12 | ], 13 | } 14 | -------------------------------------------------------------------------------- /examples/gl.json5: -------------------------------------------------------------------------------- 1 | { 2 | outPath: './gl.mp4', 3 | clips: [ 4 | { transition: null, duration: 3, layers: [{ type: 'gl', fragmentPath: './assets/shaders/3l23Rh.frag' }] }, 5 | { duration: 3, layers: [{ type: 'gl', fragmentPath: './assets/shaders/MdXyzX.frag' }] }, 6 | { duration: 3, layers: [{ type: 'gl', fragmentPath: './assets/shaders/30daysofshade_010.frag', speed: 1 }] }, 7 | { duration: 3, layers: [{ type: 'gl', fragmentPath: './assets/shaders/rainbow-background.frag' }] }, 8 | { duration: 3, layers: [{ type: 'gl', fragmentPath: './assets/shaders/wd2yDm.frag', speed: 5 }] }, 9 | ], 10 | } -------------------------------------------------------------------------------- /examples/subtitle.json5: -------------------------------------------------------------------------------- 1 | { 2 | outPath: './subtitle.mp4', 3 | defaults: { 4 | layer: { fontPath: './assets/Patua_One/PatuaOne-Regular.ttf' }, 5 | }, 6 | clips: [ 7 | { duration: 2, layers: [ 8 | { type: 'rainbow-colors' }, 9 | { type: 'subtitle', text: 'Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident.' }, 10 | { type: 'title', position: 'top', text: 'Subtitles' }, 11 | ] }, 12 | ], 13 | } -------------------------------------------------------------------------------- /examples/transitionEasing.json5: -------------------------------------------------------------------------------- 1 | { 2 | fast: true, 3 | outPath: './transitionEasing.mp4', 4 | defaults: { 5 | duration: 2, 6 | }, 7 | clips: [ 8 | { transition: { name: 'directional', duration: 0.5 }, layers: [{ type: 'video', path: '/Users/mifi/Desktop/photos/drone koh lipe/DJI_0402.MOV', cutTo: 2 }] }, 9 | { transition: { name: 'directional', duration: 0.5, params: { direction: [1, 0] } }, layers: [{ type: 'video', path: '/Users/mifi/Desktop/photos/drone koh lipe/DJI_0403.MOV', cutTo: 2 }] }, 10 | // { transition: { name: 'directional', duration: 0.5, easing: null }, layers: [{ type: 'video', path: '/Users/mifi/Desktop/photos/drone koh lipe/DJI_0403.MOV', cutTo: 2 }] }, 11 | { layers: [{ type: 'pause' }] }, 12 | ], 13 | } 14 | -------------------------------------------------------------------------------- /examples/speedTest.json5: -------------------------------------------------------------------------------- 1 | { 2 | outPath: './speedTest.mp4', 3 | defaults: { 4 | transition: null, 5 | layer: { fontPath: './assets/Patua_One/PatuaOne-Regular.ttf' }, 6 | }, 7 | clips: [ 8 | { duration: 2, layers: [{ type: 'title-background', text: 'Speed up or slow down video', background: { type: 'radial-gradient' } }] }, 9 | { duration: 2, layers: [{ type: 'video', path: './assets/IMG_4605.MOV', cutFrom: 0, cutTo: 2 }, { type: 'title', text: 'Same speed' }] }, 10 | { duration: 1, layers: [{ type: 'video', path: './assets/IMG_4605.MOV', cutFrom: 0, cutTo: 4 }, { type: 'title', text: '4x' }] }, 11 | { duration: 2, layers: [{ type: 'video', path: './assets/IMG_4605.MOV', cutFrom: 0, cutTo: 1 }, { type: 'title', text: '1/2x' }] }, 12 | ], 13 | } -------------------------------------------------------------------------------- /sources/shared.js: -------------------------------------------------------------------------------- 1 | function canvasToRgba(ctx) { 2 | // const bgra = canvas.toBuffer('raw'); 3 | 4 | /* const rgba = Buffer.allocUnsafe(bgra.length); 5 | for (let i = 0; i < bgra.length; i += 4) { 6 | rgba[i + 0] = bgra[i + 2]; 7 | rgba[i + 1] = bgra[i + 1]; 8 | rgba[i + 2] = bgra[i + 0]; 9 | rgba[i + 3] = bgra[i + 3]; 10 | } */ 11 | 12 | // We cannot use toBuffer('raw') because it returns pre-multiplied alpha data (a different format) 13 | // https://gamedev.stackexchange.com/questions/138813/whats-the-difference-between-alpha-and-premulalpha 14 | // https://github.com/Automattic/node-canvas#image-pixel-formats-experimental 15 | const imageData = ctx.getImageData(0, 0, ctx.canvas.width, ctx.canvas.height); 16 | return Buffer.from(imageData.data); 17 | } 18 | 19 | module.exports = { 20 | canvasToRgba, 21 | }; 22 | -------------------------------------------------------------------------------- /examples/resizeVertical.json5: -------------------------------------------------------------------------------- 1 | { 2 | width: 240, height: 320, fps: 15, 3 | outPath: './resizeVertical.mp4', 4 | defaults: { 5 | transition: { duration: 0 }, 6 | layer: { fontPath: './assets/Patua_One/PatuaOne-Regular.ttf' }, 7 | }, 8 | clips: [ 9 | { duration: 2, layers: [{ type: 'title-background', text: 'Editly can handle all formats and sizes with different fits', background: { type: 'radial-gradient' } }] }, 10 | { layers: [{ type: 'video', path: './assets/IMG_1322.MOV', cutFrom: 0, cutTo: 2, resizeMode: 'contain' }, { type: 'title', text: 'Contain' }] }, 11 | { layers: [{ type: 'video', path: './assets/IMG_1322.MOV', cutFrom: 0, cutTo: 2, resizeMode: 'stretch' }, { type: 'title', text: 'Stretch' }] }, 12 | { layers: [{ type: 'video', path: './assets/IMG_1322.MOV', cutFrom: 0, cutTo: 2 }, { type: 'title', text: 'Cover' }] }, 13 | ], 14 | } -------------------------------------------------------------------------------- /examples/customFabric.js: -------------------------------------------------------------------------------- 1 | const editly = require('..'); 2 | 3 | /* eslint-disable spaced-comment,no-param-reassign */ 4 | 5 | async function func({ width, height, fabric, canvas }) { 6 | async function onRender(progress) { 7 | canvas.backgroundColor = 'hsl(33, 100%, 50%)'; 8 | 9 | const text = new fabric.Text(`PROGRESS\n${Math.floor(progress * 100)}%`, { 10 | originX: 'center', 11 | originY: 'center', 12 | left: width / 2, 13 | top: (height / 2) * (1 + (progress * 0.1 - 0.05)), 14 | fontSize: 20, 15 | textAlign: 'center', 16 | fill: 'white', 17 | }); 18 | 19 | canvas.add(text); 20 | } 21 | 22 | function onClose() { 23 | // Cleanup if you initialized anything 24 | } 25 | 26 | return { onRender, onClose }; 27 | } 28 | 29 | editly({ 30 | // fast: true, 31 | outPath: './customFabric.gif', 32 | // outPath: './customFabric.mp4', 33 | clips: [ 34 | { duration: 2, layers: [{ type: 'fabric', func }] }, 35 | ], 36 | }).catch(console.error); 37 | -------------------------------------------------------------------------------- /examples/customCanvas.js: -------------------------------------------------------------------------------- 1 | const editly = require('..'); 2 | 3 | async function func({ canvas }) { 4 | async function onRender(progress) { 5 | const context = canvas.getContext('2d'); 6 | const centerX = canvas.width / 2; 7 | const centerY = canvas.height / 2; 8 | const radius = 40 * (1 + progress * 0.5); 9 | 10 | context.beginPath(); 11 | context.arc(centerX, centerY, radius, 0, 2 * Math.PI, false); 12 | context.fillStyle = 'hsl(350, 100%, 37%)'; 13 | context.fill(); 14 | context.lineWidth = 5; 15 | context.strokeStyle = '#ffffff'; 16 | context.stroke(); 17 | } 18 | 19 | function onClose() { 20 | // Cleanup if you initialized anything 21 | } 22 | 23 | return { onRender, onClose }; 24 | } 25 | 26 | editly({ 27 | // fast: true, 28 | // outPath: './customCanvas.mp4', 29 | outPath: './customCanvas.gif', 30 | clips: [ 31 | { duration: 2, 32 | layers: [ 33 | { type: 'rainbow-colors' }, 34 | { type: 'canvas', func }, 35 | ] }, 36 | ], 37 | }).catch(console.error); 38 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 Mikael Finstad 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "editly", 3 | "description": "Simple, sexy, declarative video editing", 4 | "version": "0.2.0", 5 | "main": "index.js", 6 | "author": "Mikael Finstad ", 7 | "license": "MIT", 8 | "dependencies": { 9 | "canvas": "^2.6.1", 10 | "execa": "^4.0.0", 11 | "fabric": "^3.6.3", 12 | "file-type": "^14.1.4", 13 | "file-url": "^3.0.0", 14 | "fs-extra": "^9.0.0", 15 | "gl": "^4.5.0", 16 | "gl-buffer": "^2.1.2", 17 | "gl-shader": "^4.2.1", 18 | "gl-texture2d": "^2.1.0", 19 | "gl-transition": "^1.13.0", 20 | "gl-transitions": "^1.43.0", 21 | "json5": "^2.1.3", 22 | "lodash": "^4.17.15", 23 | "meow": "^6.1.0", 24 | "ndarray": "^1.0.19", 25 | "p-map": "^4.0.0" 26 | }, 27 | "scripts": { 28 | "test": "exit 0" 29 | }, 30 | "repository": { 31 | "type": "git", 32 | "url": "git+https://github.com/mifi/editly.git" 33 | }, 34 | "bin": { 35 | "editly": "cli.js" 36 | }, 37 | "devDependencies": { 38 | "eslint": "^5.16.0 || ^6.8.0", 39 | "eslint-config-airbnb-base": "^14.1.0", 40 | "eslint-plugin-import": "^2.20.1" 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /util.js: -------------------------------------------------------------------------------- 1 | const execa = require('execa'); 2 | 3 | function parseFps(fps) { 4 | const match = typeof fps === 'string' && fps.match(/^([0-9]+)\/([0-9]+)$/); 5 | if (match) { 6 | const num = parseInt(match[1], 10); 7 | const den = parseInt(match[2], 10); 8 | if (den > 0) return num / den; 9 | } 10 | return undefined; 11 | } 12 | 13 | async function readFileInfo(p) { 14 | const { stdout } = await execa('ffprobe', [ 15 | '-select_streams', 'v:0', '-show_entries', 'stream', '-of', 'json', p, 16 | ]); 17 | const json = JSON.parse(stdout); 18 | const stream = json.streams[0]; 19 | 20 | const rotation = stream.tags && stream.tags.rotate && parseInt(stream.tags.rotate, 10); 21 | return { 22 | // numFrames: parseInt(stream.nb_frames, 10), 23 | duration: parseFloat(stream.duration, 10), 24 | width: stream.width, // TODO coded_width? 25 | height: stream.height, 26 | framerateStr: stream.r_frame_rate, 27 | rotation: !Number.isNaN(rotation) ? rotation : undefined, 28 | }; 29 | } 30 | 31 | const multipleOf2 = (x) => (x + (x % 2)); 32 | 33 | module.exports = { 34 | parseFps, 35 | readFileInfo, 36 | multipleOf2, 37 | }; 38 | -------------------------------------------------------------------------------- /examples/README.md: -------------------------------------------------------------------------------- 1 | # Examples 2 | 3 | ## Ken Burns zoom slideshow 4 | 5 | ![](https://github.com/mifi/gifs/raw/master/kenburns.gif) 6 | 7 | [kenBurns.json5](https://github.com/mifi/editly/blob/master/examples/kenBurns.json5) 8 | 9 | ```bash 10 | editly kenBurns.json5 11 | ``` 12 | 13 | ## Resize modes 14 | 15 | ![](https://github.com/mifi/gifs/raw/master/resizeHorizontal.gif) 16 | 17 | [resizeHorizontal.json5](https://github.com/mifi/editly/blob/master/examples/resizeHorizontal.json5) 18 | 19 | ```bash 20 | editly resizeHorizontal.json5 21 | ``` 22 | 23 | ## Speed up / slow down with cutting 24 | 25 | ![](https://github.com/mifi/gifs/raw/master/speedTest.gif) 26 | 27 | [speedTest.json5](https://github.com/mifi/editly/blob/master/examples/speedTest.json5) 28 | 29 | ```bash 30 | editly speedTest.json5 31 | ``` 32 | 33 | ## Title and subtitle 34 | 35 | ![](https://github.com/mifi/gifs/raw/master/subtitle.gif) 36 | 37 | [subtitle.json5](https://github.com/mifi/editly/blob/master/examples/subtitle.json5) 38 | 39 | ```bash 40 | editly subtitle.json5 41 | ``` 42 | 43 | ## Custom HTML5 canvas Javascript 44 | 45 | ![](https://github.com/mifi/gifs/raw/master/customCanvas.gif) 46 | 47 | [customCanvas.js](https://github.com/mifi/editly/blob/master/examples/customCanvas.js) 48 | 49 | 50 | ```bash 51 | node customCanvas.js 52 | ``` 53 | 54 | ## Custom Fabric.js 55 | 56 | ![](https://github.com/mifi/gifs/raw/master/customFabric.gif) 57 | 58 | [customFabric.js](https://github.com/mifi/editly/blob/master/examples/customFabric.js) 59 | 60 | 61 | ```bash 62 | node customFabric.js 63 | ``` 64 | 65 | ## LosslessCut tutorial 66 | 67 | [This video](https://www.youtube.com/watch?v=pYHMxXy05Jg) was created with [losslesscut.json5](https://github.com/mifi/editly/blob/master/examples/losslesscut.json5) -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | lerna-debug.log* 8 | 9 | # Diagnostic reports (https://nodejs.org/api/report.html) 10 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 11 | 12 | # Runtime data 13 | pids 14 | *.pid 15 | *.seed 16 | *.pid.lock 17 | 18 | # Directory for instrumented libs generated by jscoverage/JSCover 19 | lib-cov 20 | 21 | # Coverage directory used by tools like istanbul 22 | coverage 23 | *.lcov 24 | 25 | # nyc test coverage 26 | .nyc_output 27 | 28 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 29 | .grunt 30 | 31 | # Bower dependency directory (https://bower.io/) 32 | bower_components 33 | 34 | # node-waf configuration 35 | .lock-wscript 36 | 37 | # Compiled binary addons (https://nodejs.org/api/addons.html) 38 | build/Release 39 | 40 | # Dependency directories 41 | node_modules/ 42 | jspm_packages/ 43 | 44 | # TypeScript v1 declaration files 45 | typings/ 46 | 47 | # TypeScript cache 48 | *.tsbuildinfo 49 | 50 | # Optional npm cache directory 51 | .npm 52 | 53 | # Optional eslint cache 54 | .eslintcache 55 | 56 | # Microbundle cache 57 | .rpt2_cache/ 58 | .rts2_cache_cjs/ 59 | .rts2_cache_es/ 60 | .rts2_cache_umd/ 61 | 62 | # Optional REPL history 63 | .node_repl_history 64 | 65 | # Output of 'npm pack' 66 | *.tgz 67 | 68 | # Yarn Integrity file 69 | .yarn-integrity 70 | 71 | # dotenv environment variables file 72 | .env 73 | .env.test 74 | 75 | # parcel-bundler cache (https://parceljs.org/) 76 | .cache 77 | 78 | # Next.js build output 79 | .next 80 | 81 | # Nuxt.js build / generate output 82 | .nuxt 83 | dist 84 | 85 | # Gatsby files 86 | .cache/ 87 | # Comment in the public line in if your project uses Gatsby and *not* Next.js 88 | # https://nextjs.org/blog/next-9-1#public-directory-support 89 | # public 90 | 91 | # vuepress build output 92 | .vuepress/dist 93 | 94 | # Serverless directories 95 | .serverless/ 96 | 97 | # FuseBox cache 98 | .fusebox/ 99 | 100 | # DynamoDB Local files 101 | .dynamodb/ 102 | 103 | # TernJS port file 104 | .tern-port 105 | 106 | 107 | /examples/assets 108 | /examples/*.mp4 109 | /examples/*.gif 110 | -------------------------------------------------------------------------------- /sources/glFrameSource.js: -------------------------------------------------------------------------------- 1 | const GL = require('gl'); 2 | const createShader = require('gl-shader'); 3 | const fs = require('fs-extra'); 4 | 5 | // I have no idea what I'm doing but it works ¯\_(ツ)_/¯ 6 | 7 | async function createGlFrameSource({ width, height, channels, params }) { 8 | const gl = GL(width, height); 9 | 10 | const defaultVertexSrc = ` 11 | attribute vec2 position; 12 | void main(void) { 13 | gl_Position = vec4(position, 0.0, 1.0 ); 14 | } 15 | `; 16 | const { vertexPath, fragmentPath, vertexSrc: vertexSrcIn, fragmentSrc: fragmentSrcIn, speed = 1 } = params; 17 | 18 | let fragmentSrc = fragmentSrcIn; 19 | let vertexSrc = vertexSrcIn; 20 | 21 | if (fragmentPath) fragmentSrc = await fs.readFile(fragmentPath); 22 | if (vertexPath) vertexSrc = await fs.readFile(vertexPath); 23 | 24 | if (!vertexSrc) vertexSrc = defaultVertexSrc; 25 | 26 | const shader = createShader(gl, vertexSrc, fragmentSrc); 27 | const buffer = gl.createBuffer(); 28 | gl.bindBuffer(gl.ARRAY_BUFFER, buffer); 29 | // https://blog.mayflower.de/4584-Playing-around-with-pixel-shaders-in-WebGL.html 30 | 31 | gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([-1, -1, 1, -1, 1, 1, -1, 1]), gl.STATIC_DRAW); 32 | 33 | async function readNextFrame(progress) { 34 | shader.bind(); 35 | 36 | shader.attributes.position.pointer(); 37 | 38 | shader.uniforms.resolution = [width, height]; 39 | shader.uniforms.time = progress * speed; 40 | 41 | gl.drawArrays(gl.TRIANGLE_FAN, 0, 4); 42 | 43 | const upsideDownArray = Buffer.allocUnsafe(width * height * channels); 44 | gl.readPixels(0, 0, width, height, gl.RGBA, gl.UNSIGNED_BYTE, upsideDownArray); 45 | const outArray = Buffer.allocUnsafe(width * height * channels); 46 | 47 | // Comes out upside down, flip it 48 | for (let i = 0; i < outArray.length; i += 4) { 49 | outArray[i + 0] = upsideDownArray[outArray.length - i + 0]; 50 | outArray[i + 1] = upsideDownArray[outArray.length - i + 1]; 51 | outArray[i + 2] = upsideDownArray[outArray.length - i + 2]; 52 | outArray[i + 3] = upsideDownArray[outArray.length - i + 3]; 53 | } 54 | return outArray; 55 | } 56 | 57 | return { 58 | readNextFrame, 59 | close: () => {}, 60 | }; 61 | } 62 | 63 | module.exports = { 64 | createGlFrameSource, 65 | }; 66 | -------------------------------------------------------------------------------- /glTransitions.js: -------------------------------------------------------------------------------- 1 | const GL = require('gl'); 2 | const ndarray = require('ndarray'); 3 | const createBuffer = require('gl-buffer'); 4 | const transitions = require('gl-transitions'); 5 | const createTransition = require('gl-transition').default; 6 | const createTexture = require('gl-texture2d'); 7 | 8 | module.exports = ({ width, height, channels }) => { 9 | const gl = GL(width, height); 10 | 11 | function runTransitionOnFrame({ fromFrame, toFrame, progress, transitionName, transitionParams = {} }) { 12 | function convertFrame(buf) { 13 | // @see https://github.com/stackgl/gl-texture2d/issues/16 14 | return ndarray(buf, [width, height, channels], [channels, width * channels, 1]); 15 | } 16 | 17 | const buffer = createBuffer(gl, 18 | [-1, -1, -1, 4, 4, -1], 19 | gl.ARRAY_BUFFER, 20 | gl.STATIC_DRAW); 21 | 22 | let transition; 23 | 24 | try { 25 | const resizeMode = 'stretch'; 26 | 27 | const transitionSource = transitions.find((t) => t.name.toLowerCase() === transitionName.toLowerCase()); 28 | 29 | transition = createTransition(gl, transitionSource, { resizeMode }); 30 | 31 | gl.clear(gl.COLOR_BUFFER_BIT); 32 | 33 | // console.time('runTransitionOnFrame internal'); 34 | const fromFrameNdArray = convertFrame(fromFrame); 35 | const textureFrom = createTexture(gl, fromFrameNdArray); 36 | textureFrom.minFilter = gl.LINEAR; 37 | textureFrom.magFilter = gl.LINEAR; 38 | 39 | // console.timeLog('runTransitionOnFrame internal'); 40 | const toFrameNdArray = convertFrame(toFrame); 41 | const textureTo = createTexture(gl, toFrameNdArray); 42 | textureTo.minFilter = gl.LINEAR; 43 | textureTo.magFilter = gl.LINEAR; 44 | 45 | buffer.bind(); 46 | transition.draw(progress, textureFrom, textureTo, gl.drawingBufferWidth, gl.drawingBufferHeight, transitionParams); 47 | 48 | textureFrom.dispose(); 49 | textureTo.dispose(); 50 | 51 | // console.timeLog('runTransitionOnFrame internal'); 52 | 53 | const outArray = Buffer.allocUnsafe(width * height * 4); 54 | gl.readPixels(0, 0, width, height, gl.RGBA, gl.UNSIGNED_BYTE, outArray); 55 | 56 | // console.timeEnd('runTransitionOnFrame internal'); 57 | 58 | return outArray; 59 | 60 | // require('fs').writeFileSync(`${new Date().getTime()}.raw`, outArray); 61 | // Testing: ffmpeg -f rawvideo -vcodec rawvideo -pix_fmt rgba -s 2166x1650 -i 1586619627191.raw -vf format=yuv420p -vcodec libx264 -y out.mp4 62 | } finally { 63 | buffer.dispose(); 64 | if (transition) transition.dispose(); 65 | } 66 | } 67 | 68 | return { 69 | runTransitionOnFrame, 70 | }; 71 | }; 72 | -------------------------------------------------------------------------------- /sources/frameSource.js: -------------------------------------------------------------------------------- 1 | const assert = require('assert'); 2 | const pMap = require('p-map'); 3 | 4 | const { mergeFrames, customFabricFrameSource, createCustomCanvasFrameSource, titleFrameSource, subtitleFrameSource, imageFrameSource, linearGradientFrameSource, radialGradientFrameSource, fillColorFrameSource, createFabricFrameSource } = require('./fabricFrameSource'); 5 | const createVideoFrameSource = require('./videoFrameSource'); 6 | const { createGlFrameSource } = require('./glFrameSource'); 7 | 8 | 9 | async function createFrameSource({ clip, clipIndex, width, height, channels, verbose, enableFfmpegLog, framerateStr }) { 10 | const { layers, duration } = clip; 11 | 12 | const frameSources = await pMap(layers, async (layer, layerIndex) => { 13 | const { type, ...params } = layer; 14 | console.log('createFrameSource', type, 'clip', clipIndex, 'layer', layerIndex); 15 | 16 | const frameSourceFuncs = { 17 | video: createVideoFrameSource, 18 | gl: createGlFrameSource, 19 | canvas: createCustomCanvasFrameSource, 20 | fabric: async (opts) => createFabricFrameSource(customFabricFrameSource, opts), 21 | image: async (opts) => createFabricFrameSource(imageFrameSource, opts), 22 | title: async (opts) => createFabricFrameSource(titleFrameSource, opts), 23 | subtitle: async (opts) => createFabricFrameSource(subtitleFrameSource, opts), 24 | 'linear-gradient': async (opts) => createFabricFrameSource(linearGradientFrameSource, opts), 25 | 'radial-gradient': async (opts) => createFabricFrameSource(radialGradientFrameSource, opts), 26 | 'fill-color': async (opts) => createFabricFrameSource(fillColorFrameSource, opts), 27 | }; 28 | assert(frameSourceFuncs[type], `Invalid type ${type}`); 29 | 30 | const createFrameSourceFunc = frameSourceFuncs[type]; 31 | 32 | return createFrameSourceFunc({ width, height, duration, channels, verbose, enableFfmpegLog, framerateStr, params }); 33 | }, { concurrency: 1 }); 34 | 35 | async function readNextFrame(...args) { 36 | const framesRaw = await pMap(frameSources, async (frameSource) => frameSource.readNextFrame(...args)); 37 | // if (verbose) console.time('Merge frames'); 38 | 39 | const framesRawFiltered = framesRaw.filter((frameRaw) => { 40 | if (frameRaw) return true; 41 | console.warn('Frame source returned empty result'); 42 | return false; 43 | }); 44 | const merged = mergeFrames({ width, height, framesRaw: framesRawFiltered }); 45 | // if (verbose) console.timeEnd('Merge frames'); 46 | return merged; 47 | } 48 | 49 | async function close() { 50 | await pMap(frameSources, async (frameSource) => frameSource.close()); 51 | } 52 | 53 | return { 54 | readNextFrame, 55 | close, 56 | }; 57 | } 58 | 59 | module.exports = { 60 | createFrameSource, 61 | }; 62 | -------------------------------------------------------------------------------- /transitions.js: -------------------------------------------------------------------------------- 1 | const assert = require('assert'); 2 | 3 | const randomTransitionsSet = ['fade', 'fadegrayscale', 'directionalwarp', 'crosswarp', 'dreamyzoom', 'burn', 'crosszoom', 'simplezoom', 'linearblur', 'directional-left', 'directional-right', 'directional-up', 'directional-down']; 4 | 5 | function getRandomTransition() { 6 | return randomTransitionsSet[Math.floor(Math.random() * randomTransitionsSet.length)]; 7 | } 8 | 9 | 10 | // https://easings.net/ 11 | 12 | function easeOutExpo(x) { 13 | return x === 1 ? 1 : 1 - (2 ** (-10 * x)); 14 | } 15 | 16 | function easeInOutCubic(x) { 17 | return x < 0.5 ? 4 * x * x * x : 1 - ((-2 * x + 2) ** 3) / 2; 18 | } 19 | 20 | 21 | function getTransitionEasingFunction(easing, transitionName) { 22 | if (easing !== null) { 23 | if (easing) return { easeOutExpo }[easing]; 24 | if (transitionName === 'directional') return easeOutExpo; 25 | } 26 | return (progress) => progress; 27 | } 28 | 29 | function calcTransition(defaults, transition) { 30 | if (transition === null) return { duration: 0 }; 31 | 32 | let transitionOrDefault = { 33 | name: (transition && transition.name) || (defaults.transition && defaults.transition.name), 34 | duration: (transition && transition.duration != null) ? transition.duration : (defaults.transition && defaults.transition.duration), 35 | params: (transition && transition.params) || (defaults.transition && defaults.transition.params), 36 | easing: (transition && transition.easing !== undefined) ? transition.easing : (defaults.transition && defaults.transition.easing), 37 | }; 38 | 39 | assert(!transitionOrDefault.duration || transitionOrDefault.name, 'Please specify transition name or set duration to 0'); 40 | 41 | if (transitionOrDefault.name === 'random' && transitionOrDefault.duration) { 42 | transitionOrDefault = { easing: transitionOrDefault.easing, name: getRandomTransition(), duration: transitionOrDefault.duration }; 43 | } 44 | 45 | const getTransitionByAlias = () => { 46 | const aliasedTransition = { 47 | 'directional-left': { name: 'directional', params: { direction: [1, 0] } }, 48 | 'directional-right': { name: 'directional', params: { direction: [-1, 0] } }, 49 | 'directional-down': { name: 'directional', params: { direction: [0, 1] } }, 50 | 'directional-up': { name: 'directional', params: { direction: [0, -1] } }, 51 | }[transitionOrDefault.name]; 52 | if (aliasedTransition) return { ...transitionOrDefault, ...aliasedTransition }; 53 | return transitionOrDefault; 54 | }; 55 | 56 | const outTransition = getTransitionByAlias(); 57 | 58 | return { 59 | name: outTransition.name, 60 | duration: outTransition.duration || 0, 61 | params: outTransition.params, 62 | easingFunction: getTransitionEasingFunction(outTransition.easing, outTransition.name), 63 | }; 64 | } 65 | 66 | 67 | module.exports = { 68 | calcTransition, 69 | easeInOutCubic, 70 | easeOutExpo, 71 | }; 72 | -------------------------------------------------------------------------------- /examples/losslesscut.json5: -------------------------------------------------------------------------------- 1 | { 2 | // fast: false, 3 | outPath: './losslesscut.mp4', 4 | // verbose: true, 5 | // enableFfmpegLog: true, 6 | fps: 30, 7 | audioFilePath: './Believe - Roa [Vlog No Copyright Music]-qldyHxWPFUY.m4a', 8 | defaults: { 9 | transition: { name: 'crossZoom', duration: 1 }, 10 | layer: { fontPath: './Patua_One/PatuaOne-Regular.ttf' }, 11 | }, 12 | clips: [ 13 | { duration: 3, layers: [{ type: 'title-background', text: 'LosslessCut', background: { type: 'linear-gradient' } }] }, 14 | { layers: [{ type: 'video', path: '/Users/mifi/Desktop/losslesscut-usage/intro.mov' }] }, 15 | { duration: 3, layers: [{ type: 'title-background', text: 'Capture full resolution screenshots', background: { type: 'radial-gradient' } }] }, 16 | { layers: [{ type: 'video', path: '/Users/mifi/Desktop/losslesscut-usage/capture screenshots.mov' }] }, 17 | { duration: 3, layers: [{ type: 'title-background', text: 'Extract tracks as individual files', background: { type: 'radial-gradient' } }] }, 18 | { layers: [{ type: 'video', path: '/Users/mifi/Desktop/losslesscut-usage/extract tracks as individual files.mov' }] }, 19 | { duration: 3, layers: [{ type: 'title-background', text: 'Keyframes and zoom', background: { type: 'radial-gradient' } }] }, 20 | { layers: [{ type: 'video', path: '/Users/mifi/Desktop/losslesscut-usage/keyframes and zoom.mov' }] }, 21 | { duration: 3, layers: [{ type: 'title-background', text: 'Label segments', background: { type: 'radial-gradient' } }] }, 22 | { layers: [{ type: 'video', path: '/Users/mifi/Desktop/losslesscut-usage/label segments.mov' }] }, 23 | { duration: 3, layers: [{ type: 'title-background', text: 'Lossless rotation', background: { type: 'radial-gradient' } }] }, 24 | { layers: [{ type: 'video', path: '/Users/mifi/Desktop/losslesscut-usage/lossless rotation.mov' }] }, 25 | { duration: 3, layers: [{ type: 'title-background', text: 'Thumbnails', background: { type: 'radial-gradient' } }] }, 26 | { layers: [{ type: 'video', path: '/Users/mifi/Desktop/losslesscut-usage/thumbnails.mov' }] }, 27 | { duration: 3, layers: [{ type: 'title-background', text: 'Audio waveforms', background: { type: 'radial-gradient' } }] }, 28 | { layers: [{ type: 'video', path: '/Users/mifi/Desktop/losslesscut-usage/audio waveform.mov' }] }, 29 | { duration: 3, layers: [{ type: 'title-background', text: 'Track information', background: { type: 'radial-gradient' } }] }, 30 | { layers: [{ type: 'video', path: '/Users/mifi/Desktop/losslesscut-usage/track information.mov' }] }, 31 | { duration: 3, layers: [{ type: 'title-background', text: 'Tracks editor and audio swap', background: { type: 'radial-gradient' } }] }, 32 | { layers: [{ type: 'video', path: '/Users/mifi/Desktop/losslesscut-usage/tracks editor and replace audio.mov' }] }, 33 | { duration: 4, layers: [{ type: 'title-background', text: 'Get it from\nMac App Store\nWindows Store', background: { type: 'color', color: 'black' } }] }, 34 | { duration: 2, layers: [{ type: 'editly-banner' }] }, 35 | ], 36 | } -------------------------------------------------------------------------------- /examples/commonFeatures.json5: -------------------------------------------------------------------------------- 1 | { 2 | // width: 2166, height: 1650, fps: 30, 3 | width: 720, height: 1280, fps: 30, 4 | outPath: './commonFeatures.mp4', 5 | // outPath: './commonFeatures.gif', 6 | audioFilePath: './assets/High [NCS Release] - JPB (No Copyright Music)-R8ZRCXy5vhA.m4a', 7 | defaults: { 8 | transition: { name: 'random' }, 9 | layer: { fontPath: './assets/Patua_One/PatuaOne-Regular.ttf' }, 10 | }, 11 | clips: [ 12 | { duration: 3, transition: { name: 'directional-left' }, layers: [{ type: 'title-background', text: 'EDITLY\nVideo editing framework', background: { type: 'linear-gradient', colors: ['#02aab0', '#00cdac'] } }] }, 13 | { duration: 4, transition: { name: 'dreamyzoom' }, layers: [{ type: 'title-background', text: 'Multi-line text with animated linear or radial gradients', background: { type: 'radial-gradient' } }] }, 14 | { duration: 3, transition: { name: 'directional-right' }, layers: [{ type: 'rainbow-colors' }, { type: 'title', text: 'Colorful backgrounds' }] }, 15 | { duration: 3, layers: [{ type: 'pause' }, { type: 'title', text: 'and separators' }] }, 16 | 17 | { duration: 3, transition: { name: 'fadegrayscale' }, layers: [{ type: 'title-background', text: 'Image slideshows with Ken Burns effect', background: { type: 'linear-gradient' } }] }, 18 | { duration: 2.5, transition: { name: 'directionalWarp' }, layers: [{ type: 'image', path: './assets/vertical.jpg', zoomDirection: 'out' }] }, 19 | { duration: 3, transition: { name: 'dreamyzoom' }, layers: [{ type: 'image', path: './assets/img1.jpg', duration: 2.5, zoomDirection: 'in' }, { type: 'subtitle', text: 'Indonesia has many spectacular locations. Here is the volcano Kelimutu, which has three lakes in its core, some days with three different colors!' }, { type: 'title', position: 'top', text: 'With text' }] }, 20 | { duration: 3, transition: { name: 'colorphase' }, layers: [{ type: 'image', path: './assets/img2.jpg', zoomDirection: 'out' }, { type: 'subtitle', text: 'Komodo national park is the only home of the endangered Komodo dragons' }] }, 21 | { duration: 2.5, transition: { name: 'simplezoom' }, layers: [{ type: 'image', path: './assets/img3.jpg', zoomDirection: 'in' }] }, 22 | 23 | { duration: 1.5, transition: { name: 'crosszoom', duration: 0.3 }, layers: [{ type: 'video', path: '/Users/mifi/Desktop/photos/drone koh lipe/DJI_0402.MOV', cutTo: 58 }, { type: 'title', text: 'Videos' }] }, 24 | { duration: 3, transition: { name: 'fade' }, layers: [{ type: 'video', path: '/Users/mifi/Desktop/photos/drone koh lipe/DJI_0402.MOV', cutFrom: 58 }] }, 25 | { transition: { name: 'fade' }, layers: [{ type: 'video', path: '/Users/mifi/Desktop/photos/drone koh lipe/DJI_0403.MOV', cutTo: 2.5 }] }, 26 | { duration: 1.5, layers: [{ type: 'video', path: '/Users/mifi/Desktop/photos/drone koh lipe/DJI_0401.MOV', cutFrom: 3, cutTo: 30 }] }, 27 | 28 | { duration: 3, transition: { name: 'crosszoom' }, layers: [{ type: 'gl', fragmentPath: './assets/shaders/3l23Rh.frag' }, { type: 'title', text: 'OpenGL\nshaders' }] }, 29 | { duration: 3, layers: [{ type: 'gl', fragmentPath: './assets/shaders/MdXyzX.frag' }] }, 30 | { duration: 3, layers: [{ type: 'gl', fragmentPath: './assets/shaders/30daysofshade_010.frag' }] }, 31 | { duration: 3, layers: [{ type: 'gl', fragmentPath: './assets/shaders/wd2yDm.frag', speed: 5 }] }, 32 | { duration: 3, layers: [{ type: 'editly-banner' }] }, 33 | ], 34 | } 35 | -------------------------------------------------------------------------------- /colors.js: -------------------------------------------------------------------------------- 1 | // TODO make separate npm module 2 | 3 | // https://stackoverflow.com/a/4382138/6519037 4 | const allColors = [ 5 | 'hsl(42, 100%, 50%)', 6 | 'hsl(310, 34%, 37%)', 7 | 'hsl(24, 100%, 50%)', 8 | 'hsl(211, 38%, 74%)', 9 | 'hsl(350, 100%, 37%)', 10 | 'hsl(35, 52%, 59%)', 11 | 'hsl(22, 11%, 45%)', 12 | 'hsl(145, 100%, 24%)', 13 | 'hsl(348, 87%, 71%)', 14 | 'hsl(203, 100%, 27%)', 15 | 'hsl(11, 100%, 68%)', 16 | 'hsl(265, 37%, 34%)', 17 | 'hsl(33, 100%, 50%)', 18 | 'hsl(342, 63%, 42%)', 19 | 'hsl(49, 100%, 47%)', 20 | 'hsl(5, 81%, 27%)', 21 | 'hsl(68, 100%, 33%)', 22 | 'hsl(26, 61%, 21%)', 23 | 'hsl(10, 88%, 51%)', 24 | 'hsl(84, 33%, 12%)', 25 | ]; 26 | 27 | // https://digitalsynopsis.com/design/beautiful-color-ui-gradients-backgrounds/ 28 | const gradientColors = [ 29 | [ 30 | '#ff9aac', 31 | '#ffa875', 32 | ], 33 | [ 34 | '#cc2b5e', 35 | '#753a88', 36 | ], 37 | [ 38 | '#42275a', 39 | '#734b6d', 40 | ], 41 | [ 42 | '#bdc3c7', 43 | '#2c3e50', 44 | ], 45 | [ 46 | '#de6262', 47 | '#ffb88c', 48 | ], 49 | [ 50 | '#eb3349', 51 | '#f45c43', 52 | ], 53 | [ 54 | '#dd5e89', 55 | '#f7bb97', 56 | ], 57 | [ 58 | '#56ab2f', 59 | '#a8e063', 60 | ], 61 | [ 62 | '#614385', 63 | '#516395', 64 | ], 65 | [ 66 | '#eecda3', 67 | '#ef629f', 68 | ], 69 | [ 70 | '#eacda3', 71 | '#d6ae7b', 72 | ], 73 | [ 74 | '#02aab0', 75 | '#00cdac', 76 | ], 77 | [ 78 | '#d66d75', 79 | '#e29587', 80 | ], 81 | [ 82 | '#000428', 83 | '#004e92', 84 | ], 85 | [ 86 | '#ddd6f3', 87 | '#faaca8', 88 | ], 89 | [ 90 | '#7b4397', 91 | '#dc2430', 92 | ], 93 | [ 94 | '#43cea2', 95 | '#185a9d', 96 | ], 97 | [ 98 | '#ba5370', 99 | '#f4e2d8', 100 | ], 101 | [ 102 | '#ff512f', 103 | '#dd2476', 104 | ], 105 | [ 106 | '#4568dc', 107 | '#b06ab3', 108 | ], 109 | [ 110 | '#ec6f66', 111 | '#f3a183', 112 | ], 113 | [ 114 | '#ffd89b', 115 | '#19547b', 116 | ], 117 | [ 118 | '#3a1c71', 119 | '#d76d77', 120 | ], 121 | [ 122 | '#4ca1af', 123 | '#c4e0e5', 124 | ], 125 | [ 126 | '#ff5f6d', 127 | '#ffc371', 128 | ], 129 | [ 130 | '#36d1dc', 131 | '#5b86e5', 132 | ], 133 | [ 134 | '#c33764', 135 | '#1d2671', 136 | ], 137 | [ 138 | '#141e30', 139 | '#243b55', 140 | ], 141 | [ 142 | '#ff7e5f', 143 | '#feb47b', 144 | ], 145 | [ 146 | '#ed4264', 147 | '#ffedbc', 148 | ], 149 | [ 150 | '#2b5876', 151 | '#4e4376', 152 | ], 153 | [ 154 | '#ff9966', 155 | '#ff5e62', 156 | ], 157 | [ 158 | '#aa076b', 159 | '#61045f', 160 | ], 161 | ]; 162 | 163 | /* const lightGradients = [ 164 | [ 165 | '#ee9ca7', 166 | '#ffdde1', 167 | ], 168 | [ 169 | '#2193b0', 170 | '#6dd5ed', 171 | ], 172 | ]; */ 173 | 174 | function getRandomColor(colors = allColors) { 175 | const index = Math.floor(Math.random() * colors.length); 176 | const remainingColors = [...colors]; 177 | remainingColors.splice(index, 1); 178 | return { remainingColors, color: colors[index] || allColors[0] }; 179 | } 180 | 181 | function getRandomColors(num) { 182 | let colors = allColors; 183 | const out = []; 184 | for (let i = 0; i < Math.min(num, allColors.length); i += 1) { 185 | const { remainingColors, color } = getRandomColor(colors); 186 | out.push(color); 187 | colors = remainingColors; 188 | } 189 | return out; 190 | } 191 | 192 | function getRandomGradient() { 193 | return gradientColors[Math.floor(Math.random() * gradientColors.length)]; 194 | } 195 | 196 | module.exports = { 197 | getRandomColors, 198 | getRandomGradient, 199 | }; 200 | -------------------------------------------------------------------------------- /cli.js: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | const meow = require('meow'); 3 | const fs = require('fs'); 4 | const FileType = require('file-type'); 5 | const pMap = require('p-map'); 6 | const JSON5 = require('json5'); 7 | const assert = require('assert'); 8 | 9 | const editly = require('.'); 10 | 11 | // See also readme 12 | const cli = meow(` 13 | Usage 14 | $ editly CLIP1 [CLIP2 [CLIP3 ...]] 15 | where each CLIP can be one of the following: 16 | - A path to a video file 17 | - A path to an image 18 | - A quoted text to show in a title screen, prefixed by "title:" 19 | 20 | Or alternatively: 21 | $ editly --json JSON_PATH 22 | where JSON_PATH is the path to an edit spec JSON file, can be a normal JSON or JSON5 23 | 24 | Options 25 | --out Out video path (defaults to ./editly-out.mp4) - can also be a .gif 26 | --json Use JSON edit spec 27 | --transition-name Name of default transition to use (default: random) 28 | --transition-duration Default transition duration 29 | --width Width which all media will be converted to 30 | --height Height which all media will be converted to 31 | --fps FPS which all videos will be converted to 32 | --font-path Set default font to a .ttf 33 | --audio-file-path Add an audio track 34 | 35 | --fast, -f Fast mode (low resolution and FPS, useful for getting a quick preview) 36 | --verbose, -v 37 | 38 | For more detailed explanation, see: 39 | https://github.com/mifi/editly 40 | 41 | Examples 42 | $ editly title:'My video' clip1.mov clip2.mov title:'My slideshow' img1.jpg img2.jpg title:'THE END' --audio-file-path /path/to/music.mp3 --font-path /path/to/my-favorite-font.ttf 43 | $ editly my-editly.json5 --out output.gif 44 | `, { 45 | flags: { 46 | verbose: { type: 'boolean', alias: 'v' }, 47 | fast: { type: 'boolean', alias: 'f' }, 48 | transitionDuration: { type: 'number' }, 49 | width: { type: 'number' }, 50 | height: { type: 'number' }, 51 | fps: { type: 'number' }, 52 | }, 53 | }); 54 | 55 | (async () => { 56 | let { json } = cli.flags; 57 | // eslint-disable-next-line prefer-destructuring 58 | if (cli.input.length === 1 && /\.(json|json5|js)$/.test(cli.input[0].toLowerCase())) json = cli.input[0]; 59 | 60 | let params = { 61 | defaults: {}, 62 | }; 63 | 64 | if (json) { 65 | params = JSON5.parse(fs.readFileSync(json, 'utf-8')); 66 | } else { 67 | const clipsIn = cli.input; 68 | if (clipsIn.length < 1) cli.showHelp(); 69 | 70 | const clips = await pMap(clipsIn, async (clip) => { 71 | const match = clip.match(/^title:(.+)$/); 72 | if (match) return { type: 'title-background', text: match[1] }; 73 | 74 | const fileType = await FileType.fromFile(clip); 75 | if (!fileType) { 76 | console.error('Invalid file for clip', clip); 77 | cli.showHelp(); 78 | } 79 | 80 | const { mime } = fileType; 81 | 82 | if (mime.startsWith('video')) return { type: 'video', path: clip }; 83 | if (mime.startsWith('image')) return { type: 'image', path: clip }; 84 | 85 | throw new Error(`Unrecognized clip or file type "${clip}"`); 86 | }, { concurrency: 1 }); 87 | 88 | assert(clips.length > 0, 'No clips specified'); 89 | 90 | params.clips = clips.map((clip) => ({ layers: [clip] })); 91 | } 92 | 93 | const { verbose, transitionName, transitionDuration, width, height, fps, audioFilePath, fontPath, fast, out: outPath } = cli.flags; 94 | 95 | if (transitionName || transitionDuration != null) { 96 | params.defaults.transition = {}; 97 | if (transitionName) params.defaults.transition.name = transitionName; 98 | if (transitionDuration) params.defaults.transition.duration = transitionDuration; 99 | } 100 | 101 | if (fontPath) { 102 | params.defaults.layer = { 103 | fontPath, 104 | }; 105 | } 106 | 107 | if (outPath) params.outPath = outPath; 108 | if (audioFilePath) params.audioFilePath = audioFilePath; 109 | if (width) params.width = width; 110 | if (height) params.height = height; 111 | if (fps) params.fps = fps; 112 | 113 | if (fast) params.fast = fast; 114 | if (verbose) params.verbose = verbose; 115 | 116 | if (params.verbose) console.log(JSON5.stringify(params, null, 2)); 117 | 118 | if (!params.outPath) params.outPath = './editly-out.mp4'; 119 | 120 | await editly(params); 121 | })().catch((err) => { 122 | console.error(err); 123 | process.exitCode = 1; 124 | }); 125 | -------------------------------------------------------------------------------- /sources/videoFrameSource.js: -------------------------------------------------------------------------------- 1 | const execa = require('execa'); 2 | const assert = require('assert'); 3 | 4 | module.exports = async ({ width, height, channels, framerateStr, verbose, enableFfmpegLog, params }) => { 5 | const targetSize = width * height * channels; 6 | 7 | // TODO assert that we have read the correct amount of frames 8 | 9 | const { path, cutFrom, cutTo, resizeMode = 'cover', backgroundColor = '#000000', framePtsFactor } = params; 10 | 11 | const buf = Buffer.allocUnsafe(targetSize); 12 | let length = 0; 13 | // let inFrameCount = 0; 14 | 15 | let ptsFilter = ''; 16 | if (framePtsFactor !== 1) { 17 | if (verbose) console.log('framePtsFactor', framePtsFactor); 18 | ptsFilter = `setpts=${framePtsFactor}*PTS,`; 19 | } 20 | 21 | let scaleFilter; 22 | if (resizeMode === 'stretch') scaleFilter = `scale=${width}:${height}`; 23 | // https://superuser.com/questions/891145/ffmpeg-upscale-and-letterbox-a-video/891478 24 | else if (resizeMode === 'contain') scaleFilter = `scale=(iw*sar)*min(${width}/(iw*sar)\\,${height}/ih):ih*min(${width}/(iw*sar)\\,${height}/ih), pad=${width}:${height}:(${width}-iw*min(${width}/iw\\,${height}/ih))/2:(${height}-ih*min(${width}/iw\\,${height}/ih))/2:${backgroundColor}`; 25 | // Cover: https://unix.stackexchange.com/a/192123 26 | else scaleFilter = `scale=(iw*sar)*max(${width}/(iw*sar)\\,${height}/ih):ih*max(${width}/(iw*sar)\\,${height}/ih),crop=${width}:${height}`; 27 | 28 | // http://zulko.github.io/blog/2013/09/27/read-and-write-video-frames-in-python-using-ffmpeg/ 29 | // Testing: ffmpeg -i 'vid.mov' -t 1 -vcodec rawvideo -pix_fmt rgba -f image2pipe - | ffmpeg -f rawvideo -vcodec rawvideo -pix_fmt rgba -s 2166x1650 -i - -vf format=yuv420p -vcodec libx264 -y out.mp4 30 | // https://trac.ffmpeg.org/wiki/ChangingFrameRate 31 | const args = [ 32 | ...(enableFfmpegLog ? [] : ['-hide_banner', '-loglevel', 'panic']), 33 | ...(cutFrom ? ['-ss', cutFrom] : []), 34 | '-i', path, 35 | ...(cutTo ? ['-t', (cutTo - cutFrom) * framePtsFactor] : []), 36 | '-vf', `${ptsFilter}fps=${framerateStr},${scaleFilter}`, 37 | '-map', 'v:0', 38 | '-vcodec', 'rawvideo', 39 | '-pix_fmt', 'rgba', 40 | '-f', 'image2pipe', 41 | '-', 42 | ]; 43 | if (verbose) console.log(args.join(' ')); 44 | 45 | const ps = execa('ffmpeg', args, { encoding: null, buffer: false, stdin: 'ignore', stdout: 'pipe', stderr: process.stderr }); 46 | 47 | const stream = ps.stdout; 48 | 49 | let timeout; 50 | let ended = false; 51 | 52 | const readNextFrame = () => new Promise((resolve, reject) => { 53 | if (ended) { 54 | resolve(); 55 | return; 56 | } 57 | // console.log('Reading new frame', path); 58 | 59 | function onEnd() { 60 | if (verbose) console.log(path, 'ffmpeg stream ended'); 61 | ended = true; 62 | resolve(); 63 | } 64 | 65 | function cleanup() { 66 | stream.pause(); 67 | // eslint-disable-next-line no-use-before-define 68 | stream.removeListener('data', handleChunk); 69 | stream.removeListener('end', onEnd); 70 | stream.removeListener('error', reject); 71 | } 72 | 73 | function handleChunk(chunk) { 74 | // console.log('chunk', chunk.length); 75 | const nCopied = length + chunk.length > targetSize ? targetSize - length : chunk.length; 76 | chunk.copy(buf, length, 0, nCopied); 77 | length += nCopied; 78 | 79 | if (length > targetSize) console.error('OOPS! Overflow', length); 80 | 81 | if (length >= targetSize) { 82 | // console.log('Finished reading frame', inFrameCount, path); 83 | const out = Buffer.from(buf); 84 | 85 | const restLength = chunk.length - nCopied; 86 | if (restLength > 0) { 87 | if (verbose) console.log('Left over data', nCopied, chunk.length, restLength); 88 | chunk.slice(nCopied).copy(buf, 0); 89 | length = restLength; 90 | } else { 91 | length = 0; 92 | } 93 | 94 | // inFrameCount += 1; 95 | 96 | clearTimeout(timeout); 97 | cleanup(); 98 | resolve(out); 99 | } 100 | } 101 | 102 | timeout = setTimeout(() => { 103 | console.warn('Timeout on read video frame'); 104 | cleanup(); 105 | resolve(); 106 | }, 20000); 107 | 108 | stream.on('data', handleChunk); 109 | stream.on('end', onEnd); 110 | stream.on('error', reject); 111 | stream.resume(); 112 | }).then((data) => { 113 | if (data) assert(data.length === targetSize); 114 | return data; 115 | }); 116 | 117 | const close = () => { 118 | if (verbose) console.log('Close', path); 119 | ps.cancel(); 120 | }; 121 | 122 | return { 123 | readNextFrame, 124 | close, 125 | }; 126 | }; 127 | -------------------------------------------------------------------------------- /sources/fabricFrameSource.js: -------------------------------------------------------------------------------- 1 | const { fabric } = require('fabric'); 2 | const fileUrl = require('file-url'); 3 | const nodeCanvas = require('canvas'); 4 | 5 | const { createCanvas } = nodeCanvas; 6 | 7 | const { canvasToRgba } = require('./shared'); 8 | const { getRandomGradient, getRandomColors } = require('../colors'); 9 | const { easeOutExpo } = require('../transitions'); 10 | 11 | // http://fabricjs.com/kitchensink 12 | 13 | 14 | function fabricCanvasToRgba(canvas) { 15 | // https://github.com/fabricjs/fabric.js/blob/26e1a5b55cbeeffb59845337ced3f3f91d533d7d/src/static_canvas.class.js 16 | // https://github.com/fabricjs/fabric.js/issues/3885 17 | const internalCanvas = fabric.util.getNodeCanvas(canvas.lowerCanvasEl); 18 | const ctx = internalCanvas.getContext('2d'); 19 | 20 | // require('fs').writeFileSync(`${Math.floor(Math.random() * 1e12)}.png`, internalCanvas.toBuffer('image/png')); 21 | // throw new Error('abort'); 22 | 23 | return canvasToRgba(ctx); 24 | } 25 | 26 | async function mergeFrames({ width, height, framesRaw }) { 27 | if (framesRaw.length === 1) return framesRaw[0]; 28 | 29 | // Node canvas needs no cleanup https://github.com/Automattic/node-canvas/issues/1216#issuecomment-412390668 30 | const canvas = createCanvas(width, height); 31 | const ctx = canvas.getContext('2d'); 32 | 33 | framesRaw.forEach((frameRaw) => { 34 | const canvas2 = createCanvas(width, height); 35 | const ctx2 = canvas2.getContext('2d'); 36 | // https://developer.mozilla.org/en-US/docs/Web/API/ImageData/ImageData 37 | // https://developer.mozilla.org/en-US/docs/Web/API/CanvasRenderingContext2D/putImageData 38 | ctx2.putImageData(new nodeCanvas.ImageData(Uint8ClampedArray.from(frameRaw), width, height), 0, 0); 39 | // require('fs').writeFileSync(`${Math.floor(Math.random() * 1e12)}.png`, canvas2.toBuffer('image/png')); 40 | 41 | ctx.drawImage(canvas2, 0, 0); 42 | }); 43 | 44 | return canvasToRgba(ctx); 45 | } 46 | 47 | async function createFabricFrameSource(func, { width, height, ...rest }) { 48 | const onInit = async ({ canvas }) => func(({ width, height, fabric, canvas, ...rest })); 49 | 50 | let canvas = new fabric.StaticCanvas(null, { width, height }); 51 | 52 | const { onRender = () => {}, onClose = () => {} } = await onInit({ canvas }) || {}; 53 | 54 | async function readNextFrame(progress) { 55 | await onRender(progress); 56 | 57 | canvas.renderAll(); 58 | 59 | const rgba = fabricCanvasToRgba(canvas); 60 | 61 | canvas.clear(); 62 | // canvas.dispose(); 63 | return rgba; 64 | } 65 | 66 | return { 67 | readNextFrame, 68 | close: () => { 69 | // https://stackoverflow.com/questions/19030174/how-to-manage-memory-in-case-of-multiple-fabric-js-canvas 70 | canvas.dispose(); 71 | canvas = undefined; 72 | onClose(); 73 | }, 74 | }; 75 | } 76 | 77 | async function imageFrameSource({ verbose, params, width, height, canvas }) { 78 | if (verbose) console.log('Loading', params.path); 79 | 80 | const imgData = await new Promise((resolve) => fabric.util.loadImage(fileUrl(params.path), resolve)); 81 | 82 | const getImg = () => new fabric.Image(imgData, { 83 | originX: 'center', 84 | originY: 'center', 85 | left: width / 2, 86 | top: height / 2, 87 | }); 88 | 89 | // Blurred version 90 | const blurredImg = getImg(); 91 | blurredImg.filters = [new fabric.Image.filters.Resize({ scaleX: 0.01, scaleY: 0.01 })]; 92 | blurredImg.applyFilters(); 93 | 94 | if (blurredImg.height > blurredImg.width) blurredImg.scaleToWidth(width); 95 | else blurredImg.scaleToHeight(height); 96 | 97 | 98 | async function onRender(progress) { 99 | const { zoomDirection = 'in', zoomAmount = 0.1 } = params; 100 | 101 | const img = getImg(); 102 | 103 | const scaleFactor = zoomDirection === 'in' ? (1 + progress * zoomAmount) : (1 + zoomAmount * (1 - progress)); 104 | if (img.height > img.width) img.scaleToHeight(height * scaleFactor); 105 | else img.scaleToWidth(width * scaleFactor); 106 | 107 | canvas.add(blurredImg); 108 | canvas.add(img); 109 | } 110 | 111 | function onClose() { 112 | blurredImg.dispose(); 113 | // imgData.dispose(); 114 | } 115 | 116 | return { onRender, onClose }; 117 | } 118 | 119 | async function fillColorFrameSource({ canvas, params }) { 120 | const { color } = params; 121 | 122 | const randomColor = getRandomColors(1)[0]; 123 | 124 | async function onRender() { 125 | // eslint-disable-next-line no-param-reassign 126 | canvas.backgroundColor = color || randomColor; 127 | } 128 | 129 | return { onRender }; 130 | } 131 | 132 | function getRekt(width, height) { 133 | // width and height with room to rotate 134 | return new fabric.Rect({ originX: 'center', originY: 'center', left: width / 2, top: height / 2, width: width * 2, height: height * 2 }); 135 | } 136 | 137 | async function radialGradientFrameSource({ canvas, width, height, params }) { 138 | const { colors: inColors } = params; 139 | 140 | const randomColors = getRandomGradient(); 141 | 142 | async function onRender(progress) { 143 | // console.log('progress', progress); 144 | 145 | const max = Math.max(width, height); 146 | 147 | const colors = inColors && inColors.length === 2 ? inColors : randomColors; 148 | 149 | const r1 = 0; 150 | const r2 = max * (1 + progress) * 0.6; 151 | 152 | const rect = getRekt(width, height); 153 | 154 | const cx = 0.5 * rect.width; 155 | const cy = 0.5 * rect.height; 156 | 157 | rect.setGradient('fill', { 158 | type: 'radial', 159 | r1, 160 | r2, 161 | x1: cx, 162 | y1: cy, 163 | x2: cx, 164 | y2: cy, 165 | colorStops: { 166 | 0: colors[0], 167 | 1: colors[1], 168 | }, 169 | }); 170 | 171 | canvas.add(rect); 172 | } 173 | 174 | return { onRender }; 175 | } 176 | 177 | async function linearGradientFrameSource({ canvas, width, height, params }) { 178 | const { colors: inColors } = params; 179 | 180 | const randomColors = getRandomGradient(); 181 | const colors = inColors && inColors.length === 2 ? inColors : randomColors; 182 | 183 | async function onRender(progress) { 184 | const rect = getRekt(width, height); 185 | 186 | rect.setGradient('fill', { 187 | x1: 0, 188 | y1: 0, 189 | x2: width, 190 | y2: height, 191 | colorStops: { 192 | 0: colors[0], 193 | 1: colors[1], 194 | }, 195 | }); 196 | 197 | rect.rotate(progress * 30); 198 | canvas.add(rect); 199 | } 200 | 201 | return { onRender }; 202 | } 203 | 204 | async function subtitleFrameSource({ canvas, width, height, params }) { 205 | const { text, textColor = '#ffffff', fontFamily = 'sans-serif' } = params; 206 | 207 | async function onRender(progress) { 208 | const easedProgress = easeOutExpo(Math.min(progress, 1)); 209 | 210 | const min = Math.min(width, height); 211 | const padding = 0.05 * min; 212 | 213 | const textBox = new fabric.Textbox(text, { 214 | fill: textColor, 215 | fontFamily, 216 | 217 | fontSize: min / 20, 218 | textAlign: 'left', 219 | width: width - padding * 2, 220 | originX: 'center', 221 | originY: 'bottom', 222 | left: (width / 2) + (-1 + easedProgress) * padding, 223 | top: height - padding, 224 | opacity: easedProgress, 225 | }); 226 | 227 | const rect = new fabric.Rect({ 228 | left: 0, 229 | width, 230 | height: textBox.height + padding * 2, 231 | top: height, 232 | originY: 'bottom', 233 | fill: 'rgba(0,0,0,0.2)', 234 | opacity: easedProgress, 235 | }); 236 | 237 | canvas.add(rect); 238 | canvas.add(textBox); 239 | } 240 | 241 | return { onRender }; 242 | } 243 | 244 | async function titleFrameSource({ canvas, width, height, params }) { 245 | const { text, textColor = '#ffffff', fontFamily = 'sans-serif', position = 'center' } = params; 246 | 247 | async function onRender(progress) { 248 | // console.log('progress', progress); 249 | 250 | const min = Math.min(width, height); 251 | 252 | const fontSize = Math.round(min * 0.1); 253 | 254 | const scale = (1 + progress * 0.2).toFixed(4); 255 | 256 | const textBox = new fabric.Textbox(text, { 257 | fill: textColor, 258 | fontFamily, 259 | fontSize, 260 | textAlign: 'center', 261 | width: width * 0.8, 262 | }); 263 | 264 | const textImage = await new Promise((r) => textBox.cloneAsImage(r)); 265 | 266 | let originY = 'center'; 267 | let top = height / 2; 268 | if (position === 'top') { 269 | originY = 'top'; 270 | top = height * 0.05; 271 | } else if (position === 'bottom') { 272 | originY = 'bottom'; 273 | top = height; 274 | } 275 | 276 | textImage.set({ 277 | originX: 'center', 278 | originY, 279 | left: width / 2, 280 | top, 281 | scaleX: scale, 282 | scaleY: scale, 283 | }); 284 | canvas.add(textImage); 285 | } 286 | 287 | return { onRender }; 288 | } 289 | 290 | async function createCustomCanvasFrameSource({ width, height, params }) { 291 | const canvas = createCanvas(width, height); 292 | const context = canvas.getContext('2d'); 293 | 294 | const { onClose, onRender } = await params.func(({ width, height, canvas })); 295 | 296 | async function readNextFrame(progress) { 297 | context.clearRect(0, 0, canvas.width, canvas.height); 298 | await onRender(progress); 299 | // require('fs').writeFileSync(`${new Date().getTime()}.png`, canvas.toBuffer('image/png')); 300 | return canvasToRgba(context); 301 | } 302 | 303 | return { 304 | readNextFrame, 305 | // Node canvas needs no cleanup https://github.com/Automattic/node-canvas/issues/1216#issuecomment-412390668 306 | close: onClose, 307 | }; 308 | } 309 | 310 | async function customFabricFrameSource({ canvas, width, height, params }) { 311 | return params.func(({ width, height, fabric, canvas })); 312 | } 313 | 314 | function registerFont(...args) { 315 | fabric.nodeCanvas.registerFont(...args); 316 | } 317 | 318 | module.exports = { 319 | mergeFrames, 320 | registerFont, 321 | createFabricFrameSource, 322 | createCustomCanvasFrameSource, 323 | 324 | customFabricFrameSource, 325 | subtitleFrameSource, 326 | titleFrameSource, 327 | fillColorFrameSource, 328 | radialGradientFrameSource, 329 | linearGradientFrameSource, 330 | imageFrameSource, 331 | }; 332 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # editly 🏄‍♀️ 2 | 3 | [![demo](https://github.com/mifi/gifs/raw/master/commonFeatures.gif)](https://youtu.be/LNeclLkxUEY) 4 | 5 | This GIF / YouTube was created with this command: "editly [commonFeatures.json5](https://github.com/mifi/editly/blob/master/examples/commonFeatures.json5)". See [more examples here](https://github.com/mifi/editly/tree/master/examples#examples). 6 | 7 | **Editly** is a tool and framework for declarative NLE (**non-linear video editing**) using Node.js and ffmpeg. Editly allows you to easily and **programmatically create a video** from **set of clips, images and titles**, with smooth transitions between and music overlaid. 8 | 9 | Editly has a simple CLI for quickly assembling a video from a set of clips or images, or you can use its more flexible Javascript API. 10 | 11 | Inspired by [ffmpeg-concat](https://github.com/transitive-bullshit/ffmpeg-concat), editly is much faster and doesn't require much storage because it uses **streaming** editing. Editly aims to be very extensible and feature rich with a pluggable interface for adding new **dynamic content**. 12 | 13 | ## Features 14 | 15 | - Edit videos with code! Declarative API with fun defaults 16 | - Create colorful videos with random colors generated from aesthetically pleasing pallettes and random effects 17 | - Supports any input size like 4K video and DSLR photos 18 | - Can output to any dimensions and aspect ratio, like *Instagram post* (1:1), *Instagram story* (9:16), *YouTube* (16:9), or any other dimensions you like. 19 | - Content will be scaled and letterboxed automatically, even if input aspect ratio is not same, and framerate will be converted. 20 | - Speeds up / slow down videos automatically to match `cutFrom`/`cutTo` segment length with each clip's `duration` 21 | - Overlay text and subtitles on videos, images or backgrounds 22 | - Accepts custom HTML5 Canvas / Fabric.js Javascript code for custom screens or dynamic overlays 23 | - Render custom GL shaders (for example from [shadertoy](https://www.shadertoy.com/)) 24 | - Can output GIF 25 | 26 | ## Use cases 27 | 28 | - Create a slideshow from a set of pictures with text overlay 29 | - Create a fast paced trailer or promo video 30 | - Create a tutorial video with help text 31 | - Simply convert a video to a GIF 32 | - Resize video to any size or framerate and with automatic letterbox/crop (e.g. if you need to upload a video somewhere but the site complains `Video must be 1337x1000 30fps`) 33 | 34 | See [examples](https://github.com/mifi/editly/tree/master/examples) 35 | 36 | ## Requirements 37 | 38 | - [Node.js installed](https://nodejs.org/en/) (Recommended to use newest stable version) 39 | - Should work on Windows, MacOS and Linux. Needs at least Node.js v12.16.2 on MacOS ([see issue](https://github.com/sindresorhus/meow/issues/144)). See also https://github.com/stackgl/headless-gl#system-dependencies 40 | 41 | Make sure you have `ffmpeg` and `ffprobe` installed and available in `PATH` 42 | 43 | ## Installing 44 | 45 | `npm i -g editly` 46 | 47 | ## Usage: Command line video editor 48 | 49 | Run `editly --help` for usage 50 | 51 | Create a simple randomized video edit from videos, images and text with an audio track: 52 | 53 | ```sh 54 | editly \ 55 | title:'My video' \ 56 | clip1.mov \ 57 | clip2.mov \ 58 | title:'My slideshow' \ 59 | img1.jpg \ 60 | img2.jpg \ 61 | title:'THE END' \ 62 | --fast \ 63 | --audio-file-path /path/to/music.mp3 64 | ``` 65 | 66 | Or create an MP4 (or GIF) from a JSON or JSON5 edit spec *(JSON5 is just a more friendly JSON format)*: 67 | 68 | ```sh 69 | editly my-editly.json5 --fast --out output.gif 70 | ``` 71 | 72 | For examples of how to make an JSON edit spec, see below or https://github.com/mifi/editly/tree/master/examples 73 | 74 | When you run with `--fast` or `fast: true` it will render a much quicker low resolution preview ⏩ 75 | 76 | By default without `--fast` it will use the **width**, **height** and **frame rate** from the **first** input video. **all other clips will be converted to these dimensions.** You can of course override any or all of these parameters. 77 | 78 | **TIP:** Use this tool in conjunction with [LosslessCut](https://github.com/mifi/lossless-cut) 79 | 80 | **TIP:** If you need catchy music for your video, have a look at [this YouTube](https://www.youtube.com/channel/UCht8qITGkBvXKsR1Byln-wA) or the [YouTube audio library](https://www.youtube.com/audiolibrary/music?nv=1). Then use [youtube-dl](https://github.com/ytdl-org/youtube-dl) to download the video, and then point `--audio-file-path` at the video file. *Be sure to respect their license!* 81 | 82 | ## Javascript library 83 | 84 | ```js 85 | const editly = require('editly'); 86 | 87 | // See editSpec documentation 88 | await editly(editSpec) 89 | .catch(console.error); 90 | ``` 91 | 92 | ## Edit spec 93 | 94 | Edit specs are Javascript / JSON objects describing the whole edit operation with the following structure: 95 | 96 | ```js 97 | { 98 | outPath, 99 | width, 100 | height, 101 | fps, 102 | defaults: { 103 | duration: 4, 104 | transition: { 105 | duration: 0.5, 106 | name: 'random', 107 | }, 108 | layer: { 109 | fontPath, 110 | // ...more layer defaults 111 | } 112 | }, 113 | audioFilePath, 114 | clips: [ 115 | { 116 | transition, 117 | duration, 118 | layers: [ 119 | { 120 | type, 121 | // ...more layer specific options 122 | } 123 | // ...more layers 124 | ], 125 | } 126 | // ...more clips 127 | ], 128 | 129 | // Testing options: 130 | enableFfmpegLog: false, 131 | verbose: false, 132 | fast: false, 133 | } 134 | ``` 135 | 136 | ### Parameters 137 | 138 | | Parameter | CLI equivalent | Description | Default | | 139 | |-|-|-|-|-| 140 | | `outPath` | `--out` | Out path (mp4, mkv), can also be a `.gif` | | | 141 | | `width` | `--width` | Width which all media will be converted to | `640` | | 142 | | `height` | `--height` | Height which all media will be converted to | auto based on `width` and aspect ratio of **first video** | | 143 | | `fps` | `--fps` | FPS which all videos will be converted to | First video FPS or `25` | | 144 | | `audioFilePath` | `--audio-file-path` | Set an audio track to the whole output video | | | 145 | | `fast` | `--fast`, `-f` | Fast mode (low resolution and FPS, useful for getting a quick preview) | `false` | | 146 | | `defaults.layer.fontPath` | `--font-path` | Set default font to a .ttf | System font | | 147 | | `defaults.layer.*` | | Set any layer parameter that all layers will inherit | | | 148 | | `defaults.duration` | | Set default clip duration for clips that don't have an own duration | `4` | sec | 149 | | `defaults.transition` | | An object `{ name, duration }` describing the default transition. Set to **null** to disable transitions | | | 150 | | `defaults.transition.duration` | `--transition-duration` | Default transition duration | `0.5` | sec | 151 | | `defaults.transition.name` | `--transition-name` | Default transition type. See **Transition types** | `random` | | 152 | | `clips[]` | | List of clip objects that will be concatenated in sequence | | | 153 | | `clips[].duration` | | Clip duration. See `defaults.duration` | `defaults.duration` | | 154 | | `clips[].transition` | | Specify transition at the **end** of this clip. See `defaults.transition` | `defaults.transition` | | 155 | | `clips[].layers[]` | | List of layers within the current clip that will be overlaid in their natural order (last layer on top) | | | 156 | | `clips[].layers[].type` | | Layer type, see below | | | 157 | 158 | ### Transition types 159 | 160 | `transition.name` can be any of [gl-transitions](https://gl-transitions.com/gallery), or any of the following: `directional-left`, `directional-right`, `directional-up`, `directional-down` and `random`. 161 | 162 | ### Layer types 163 | 164 | See [examples](https://github.com/mifi/editly/tree/master/examples) and [commonFeatures.json5](https://github.com/mifi/editly/blob/master/examples/commonFeatures.json5) 165 | 166 | #### Layer type 'video' 167 | 168 | For video layers, if parent `clip.duration` is specified, the video will be slowed/sped-up to match `clip.duration`. If `cutFrom`/`cutTo` is set, the resulting segment (`cutTo`-`cutFrom`) will be slowed/sped-up to fit `clip.duration`. 169 | 170 | | Parameter | Description | Default | | 171 | |-|-|-|-| 172 | | `path` | Path to video file | | | 173 | | `resizeMode` | One of `cover`, `contain`, `stretch` | `contain` | | 174 | | `cutFrom` | Time value to cut from | `0` | sec | 175 | | `cutTo` | Time value to cut from | *end of video* | sec | 176 | | `backgroundColor` | Background of letterboxing | `#000000` | | 177 | 178 | #### Layer type 'image' 179 | 180 | | Parameter | Description | Default | | 181 | |-|-|-|-| 182 | | `path` | Path to image file | | | 183 | | `zoomDirection` | Zoom direction for Ken Burns effect: `in` or `out` | `in` | | 184 | | `zoomAmount` | Zoom amount for Ken Burns effect | `0.1` | | 185 | 186 | #### Layer type 'title' 187 | - `fontPath` - See `defaults.layer.fontPath` 188 | - `text` - Title text to show, keep it short 189 | - `textColor` - default `#ffffff` 190 | - `position` - Vertical position: `top`, `bottom` or `center` 191 | 192 | #### Layer type 'subtitle' 193 | - `fontPath` - See `defaults.layer.fontPath` 194 | - `text` - Subtitle text to show 195 | - `textColor` - default `#ffffff` 196 | 197 | #### Layer type 'title-background' 198 | 199 | Title with background 200 | 201 | - `text` - See type `title` 202 | - `textColor` - See type `title` 203 | - `background` - `{ type, ... }` - See type `radial-gradient`, `linear-gradient` or `fill-color` 204 | - `fontPath` - See type `title` 205 | 206 | #### Layer type 'fill-color', 'pause' 207 | - `color` - Color to fill background, default: randomize 208 | 209 | #### Layer type 'radial-gradient' 210 | - `colors` - Array of two colors, default: randomize 211 | 212 | #### Layer type 'linear-gradient' 213 | - `colors` - Array of two colors, default: randomize 214 | 215 | #### Layer type 'rainbow-colors' 216 | 217 | 🌈🌈🌈 218 | 219 | #### Layer type 'canvas' 220 | 221 | See [customCanvas.js](https://github.com/mifi/editly/blob/master/examples/customCanvas.js) 222 | 223 | - `func` - Custom Javascript function 224 | 225 | #### Layer type 'fabric' 226 | 227 | See [customFabric.js](https://github.com/mifi/editly/blob/master/examples/customFabric.js) 228 | 229 | - `func` - Custom Javascript function 230 | 231 | #### Layer type 'gl' 232 | 233 | Loads a GLSL shader. See [gl.json5](https://github.com/mifi/editly/blob/master/examples/gl.json5) and [rainbow-colors.frag](https://github.com/mifi/editly/blob/master/shaders/rainbow-colors.frag) 234 | 235 | - `fragmentPath` 236 | - `vertexPath` (optional) 237 | 238 | ## Troubleshooting 239 | 240 | - If you get `Error: The specified module could not be found.`, try: `npm un -g editly && npm i -g --build-from-source editly` (see [#15](https://github.com/mifi/editly/issues/15)) 241 | 242 | ## See also 243 | 244 | - https://github.com/transitive-bullshit/awesome-ffmpeg 245 | - https://github.com/h2non/videoshow 246 | - https://github.com/transitive-bullshit/ffmpeg-concat 247 | - https://github.com/sjfricke/awesome-webgl 248 | - https://www.mltframework.org/docs/melt/ 249 | -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | const execa = require('execa'); 2 | const assert = require('assert'); 3 | const pMap = require('p-map'); 4 | const { basename, join } = require('path'); 5 | const flatMap = require('lodash/flatMap'); 6 | const JSON5 = require('json5'); 7 | 8 | const { parseFps, readFileInfo, multipleOf2 } = require('./util'); 9 | const { registerFont } = require('./sources/fabricFrameSource'); 10 | const { createFrameSource } = require('./sources/frameSource'); 11 | const { calcTransition } = require('./transitions'); 12 | 13 | const GlTransitions = require('./glTransitions'); 14 | 15 | // Cache 16 | const loadedFonts = []; 17 | 18 | // See #16 19 | const checkTransition = (transition) => assert(transition == null || typeof transition === 'object', 'Transition must be an object'); 20 | 21 | 22 | module.exports = async (config = {}) => { 23 | const { 24 | // Testing options: 25 | enableFfmpegLog = false, 26 | verbose = false, 27 | fast, 28 | 29 | outPath, 30 | clips: clipsIn, 31 | width: requestedWidth, 32 | height: requestedHeight, 33 | fps: requestedFps, 34 | defaults: defaultsIn = {}, 35 | audioFilePath: audioFilePathIn, 36 | } = config; 37 | 38 | const isGif = outPath.toLowerCase().endsWith('.gif'); 39 | 40 | const audioFilePath = isGif ? undefined : audioFilePathIn; 41 | 42 | checkTransition(defaultsIn.transition); 43 | 44 | const defaults = { 45 | duration: 4, 46 | ...defaultsIn, 47 | transition: defaultsIn.transition === null ? null : { 48 | duration: 0.5, 49 | name: 'random', 50 | ...defaultsIn.transition, 51 | }, 52 | }; 53 | 54 | if (verbose) console.log(JSON5.stringify(config, null, 2)); 55 | 56 | assert(outPath, 'Please provide an output path'); 57 | assert(clipsIn.length > 0, 'Please provide at least 1 clip'); 58 | 59 | async function handleLayer(layer) { 60 | const { type, ...restLayer } = layer; 61 | 62 | if (['fabric', 'canvas'].includes(type)) assert(typeof layer.func === 'function', '"func" must be a function'); 63 | 64 | if (['image', 'fabric', 'canvas', 'gl', 'radial-gradient', 'linear-gradient', 'fill-color'].includes(type)) return layer; 65 | 66 | // TODO if random-background radial-gradient linear etc 67 | if (type === 'pause') return handleLayer({ ...restLayer, type: 'fill-color' }); 68 | 69 | if (type === 'rainbow-colors') return handleLayer({ type: 'gl', fragmentPath: join(__dirname, 'shaders/rainbow-colors.frag') }); 70 | 71 | if (type === 'editly-banner') { 72 | const { fontPath } = layer; 73 | return [ 74 | await handleLayer({ type: 'linear-gradient' }), 75 | await handleLayer({ fontPath, type: 'title', text: 'Made with\nEDITLY\nmifi.no' }), 76 | ]; 77 | } 78 | 79 | // For convenience 80 | if (type === 'title-background') { 81 | const { text, textColor, background, fontFamily, fontPath } = layer; 82 | const outLayers = []; 83 | if (background) { 84 | if (background.type === 'radial-gradient') outLayers.push(await handleLayer({ type: 'radial-gradient', colors: background.colors })); 85 | else if (background.type === 'linear-gradient') outLayers.push(await handleLayer({ type: 'linear-gradient', colors: background.colors })); 86 | else if (background.color) outLayers.push(await handleLayer({ type: 'fill-color', color: background.color })); 87 | } else { 88 | const backgroundTypes = ['radial-gradient', 'linear-gradient', 'fill-color']; 89 | const randomType = backgroundTypes[Math.floor(Math.random() * backgroundTypes.length)]; 90 | outLayers.push(await handleLayer({ type: randomType })); 91 | } 92 | outLayers.push(await handleLayer({ type: 'title', fontFamily, fontPath, text, textColor })); 93 | return outLayers; 94 | } 95 | 96 | if (type === 'title' || type === 'subtitle') { 97 | assert(layer.text, 'Please specify a text'); 98 | 99 | let { fontFamily } = layer; 100 | const { fontPath, ...rest } = layer; 101 | if (fontPath) { 102 | fontFamily = Buffer.from(basename(fontPath)).toString('base64'); 103 | if (!loadedFonts.includes(fontFamily)) { 104 | registerFont(fontPath, { family: fontFamily, weight: 'regular', style: 'normal' }); 105 | loadedFonts.push(fontFamily); 106 | } 107 | } 108 | return { ...rest, fontFamily }; 109 | } 110 | 111 | throw new Error(`Invalid layer type ${type}`); 112 | } 113 | 114 | const clips = await pMap(clipsIn, async (clip, clipIndex) => { 115 | const { transition: userTransition, duration: userDuration, layers } = clip; 116 | 117 | checkTransition(userTransition); 118 | 119 | const videoLayers = layers.filter((layer) => layer.type === 'video'); 120 | assert(videoLayers.length <= 1, 'Max 1 video per layer'); 121 | 122 | const userOrDefaultDuration = userDuration || defaults.duration; 123 | if (videoLayers.length === 0) assert(userOrDefaultDuration, `Duration is required for clip ${clipIndex}`); 124 | 125 | let duration = userOrDefaultDuration; 126 | 127 | const layersOut = flatMap(await pMap(layers, async (layerIn) => { 128 | const layer = { ...defaults.layer, ...layerIn }; 129 | const { type } = layer; 130 | 131 | if (type === 'video') { 132 | const { cutFrom: cutFromIn, cutTo: cutToIn, path } = layer; 133 | const fileInfo = await readFileInfo(path); 134 | const { duration: fileDuration, width: widthIn, height: heightIn, framerateStr, rotation } = fileInfo; 135 | let cutFrom; 136 | let cutTo; 137 | let trimmedSourceDuration = fileDuration; 138 | if (cutFromIn != null || cutToIn != null) { 139 | cutFrom = Math.min(Math.max(0, cutFromIn || 0), fileDuration); 140 | cutTo = Math.min(Math.max(cutFrom, cutToIn || fileDuration), fileDuration); 141 | assert(cutFrom < cutTo, 'cutFrom must be lower than cutTo'); 142 | 143 | trimmedSourceDuration = cutTo - cutFrom; 144 | } 145 | 146 | // If user specified duration, means that should be the output duration 147 | let framePtsFactor; 148 | if (userDuration) { 149 | duration = userDuration; 150 | framePtsFactor = userDuration / trimmedSourceDuration; 151 | } else { 152 | duration = trimmedSourceDuration; 153 | framePtsFactor = 1; 154 | } 155 | 156 | const isRotated = rotation === 90 || rotation === 270; 157 | const width = isRotated ? heightIn : widthIn; 158 | const height = isRotated ? widthIn : heightIn; 159 | 160 | return { ...layer, cutFrom, cutTo, width, height, framerateStr, framePtsFactor }; 161 | } 162 | 163 | return handleLayer(layer); 164 | }, { concurrency: 1 })); 165 | 166 | const transition = calcTransition(defaults, userTransition); 167 | 168 | return { 169 | transition, 170 | duration, 171 | layers: layersOut, 172 | }; 173 | }, { concurrency: 1 }); 174 | 175 | if (verbose) console.log(JSON5.stringify(clips, null, 2)); 176 | 177 | // Try to detect parameters from first video 178 | let detectedWidth; 179 | let detectedHeight; 180 | let firstVideoFramerateStr; 181 | 182 | clips.find((clip) => clip && clip.layers.find((layer) => { 183 | if (layer.type === 'video') { 184 | detectedWidth = layer.width; 185 | detectedHeight = layer.height; 186 | firstVideoFramerateStr = layer.framerateStr; 187 | 188 | return true; 189 | } 190 | return false; 191 | })); 192 | 193 | let width; 194 | let height; 195 | 196 | let desiredWidth; 197 | 198 | if (fast) desiredWidth = 320; 199 | else if (requestedWidth) desiredWidth = requestedWidth; 200 | else if (isGif) desiredWidth = 320; 201 | 202 | if (detectedWidth && detectedHeight) { 203 | if (desiredWidth) { 204 | const calculatedHeight = Math.round((detectedHeight / detectedWidth) * desiredWidth); 205 | height = isGif ? calculatedHeight : multipleOf2(calculatedHeight); // x264 requires multiple of 2 206 | width = desiredWidth; 207 | } else { 208 | width = detectedWidth; 209 | height = detectedHeight; 210 | } 211 | } else if (desiredWidth) { 212 | width = desiredWidth; 213 | height = desiredWidth; 214 | // console.log(`Cannot detect width/height from video, set defaults ${width}x${height}`); 215 | } else { 216 | // No video 217 | width = 640; 218 | height = 640; 219 | } 220 | 221 | // User override? 222 | if (!fast && requestedWidth && requestedHeight) { 223 | width = requestedWidth; 224 | height = requestedHeight; 225 | } 226 | 227 | assert(width, 'Width not specified or detected'); 228 | assert(height, 'Height not specified or detected'); 229 | 230 | let fps; 231 | let framerateStr; 232 | 233 | if (fast) { 234 | fps = 15; 235 | framerateStr = String(fps); 236 | } else if (requestedFps && typeof requestedFps === 'number') { 237 | fps = requestedFps; 238 | framerateStr = String(requestedFps); 239 | } else if (isGif) { 240 | fps = 10; 241 | framerateStr = String(fps); 242 | } else if (firstVideoFramerateStr) { 243 | fps = parseFps(firstVideoFramerateStr); 244 | framerateStr = firstVideoFramerateStr; 245 | } else { 246 | fps = 25; 247 | framerateStr = String(fps); 248 | } 249 | 250 | assert(fps, 'FPS not specified or detected'); 251 | 252 | console.log(`${width}x${height} ${fps}fps`); 253 | 254 | const estimatedTotalFrames = fps * clips.reduce((acc, c, i) => { 255 | let newAcc = acc + c.duration; 256 | if (i !== clips.length - 1) newAcc -= c.transition.duration; 257 | return newAcc; 258 | }, 0); 259 | 260 | const channels = 4; 261 | 262 | const { runTransitionOnFrame } = GlTransitions({ width, height, channels }); 263 | 264 | function startFfmpegWriterProcess() { 265 | // https://superuser.com/questions/556029/how-do-i-convert-a-video-to-gif-using-ffmpeg-with-reasonable-quality 266 | const outputArgs = isGif ? [ 267 | '-vf', 268 | `fps=${fps},scale=${width}:${height}:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse`, 269 | '-loop', 0, 270 | '-y', outPath, 271 | ] : [ 272 | '-vf', 'format=yuv420p', 273 | '-vcodec', 'libx264', 274 | '-profile:v', 'high', 275 | ...(fast ? ['-preset:v', 'ultrafast'] : ['-preset:v', 'medium']), 276 | '-crf', '18', 277 | 278 | '-movflags', 'faststart', 279 | '-y', outPath, 280 | ]; 281 | 282 | const args = [ 283 | ...(enableFfmpegLog ? [] : ['-hide_banner', '-loglevel', 'panic']), 284 | 285 | '-f', 'rawvideo', 286 | '-vcodec', 'rawvideo', 287 | '-pix_fmt', 'rgba', 288 | '-s', `${width}x${height}`, 289 | '-r', framerateStr, 290 | '-i', '-', 291 | 292 | ...(audioFilePath ? ['-i', audioFilePath, '-shortest'] : []), 293 | 294 | '-map', '0:v:0', 295 | ...(audioFilePath ? ['-map', '1:a:0'] : []), 296 | 297 | ...(audioFilePath ? ['-acodec', 'aac', '-b:a', '128k'] : []), 298 | 299 | ...outputArgs, 300 | ]; 301 | if (verbose) console.log('ffmpeg', args.join(' ')); 302 | return execa('ffmpeg', args, { encoding: null, buffer: false, stdin: 'pipe', stdout: process.stdout, stderr: process.stderr }); 303 | } 304 | 305 | let outProcess; 306 | let frameSource1; 307 | let frameSource2; 308 | 309 | try { 310 | outProcess = startFfmpegWriterProcess(); 311 | 312 | let totalFrameCount = 0; 313 | let fromClipFrameCount = 0; 314 | let toClipFrameCount = 0; 315 | 316 | let transitionFromClipId = 0; 317 | 318 | const getTransitionToClipId = () => transitionFromClipId + 1; 319 | const getTransitionFromClip = () => clips[transitionFromClipId]; 320 | const getTransitionToClip = () => clips[getTransitionToClipId()]; 321 | 322 | const getSource = (clip, clipIndex) => createFrameSource({ clip, clipIndex, width, height, channels, verbose, enableFfmpegLog, framerateStr }); 323 | 324 | const getTransitionToSource = async () => (getTransitionToClip() && getSource(getTransitionToClip(), getTransitionToClipId())); 325 | frameSource1 = await getSource(getTransitionFromClip(), transitionFromClipId); 326 | frameSource2 = await getTransitionToSource(); 327 | 328 | // eslint-disable-next-line no-constant-condition 329 | while (true) { 330 | const fromClipNumFrames = Math.round(getTransitionFromClip().duration * fps); 331 | const toClipNumFrames = getTransitionToClip() && Math.round(getTransitionToClip().duration * fps); 332 | const fromClipProgress = fromClipFrameCount / fromClipNumFrames; 333 | const toClipProgress = getTransitionToClip() && toClipFrameCount / toClipNumFrames; 334 | const frameData1 = await frameSource1.readNextFrame(fromClipProgress); 335 | 336 | const clipTransition = getTransitionFromClip().transition; 337 | 338 | const transitionNumFrames = Math.round(clipTransition.duration * fps); 339 | 340 | // Each clip has two transitions, make sure we leave enough room: 341 | const transitionNumFramesSafe = Math.floor(Math.min(Math.min(fromClipNumFrames, toClipNumFrames != null ? toClipNumFrames : Number.MAX_SAFE_INTEGER) / 2, transitionNumFrames)); 342 | // How many frames into the transition are we? negative means not yet started 343 | const transitionFrameAt = fromClipFrameCount - (fromClipNumFrames - transitionNumFramesSafe); 344 | 345 | if (verbose) console.log('Frame', totalFrameCount, 'from', fromClipFrameCount, `(clip ${transitionFromClipId})`, 'to', toClipFrameCount, `(clip ${getTransitionToClipId()})`); 346 | 347 | if (!verbose) { 348 | const percentDone = Math.floor(100 * (totalFrameCount / estimatedTotalFrames)); 349 | if (totalFrameCount % 10 === 0) process.stdout.write(`${String(percentDone).padStart(3, ' ')}% `); 350 | } 351 | 352 | if (!frameData1 || transitionFrameAt >= transitionNumFramesSafe - 1) { 353 | // if (!frameData1 || transitionFrameAt >= transitionNumFramesSafe) { 354 | console.log('Done with transition, switching to next clip'); 355 | transitionFromClipId += 1; 356 | 357 | if (!getTransitionFromClip()) { 358 | console.log('No more transitionFromClip, done'); 359 | break; 360 | } 361 | 362 | // Cleanup old, swap and load next 363 | await frameSource1.close(); 364 | frameSource1 = frameSource2; 365 | frameSource2 = await getTransitionToSource(); 366 | 367 | fromClipFrameCount = transitionNumFramesSafe; 368 | toClipFrameCount = 0; 369 | } else { 370 | let outFrameData; 371 | if (frameSource2 && transitionFrameAt >= 0) { 372 | if (verbose) console.log('Transition', 'frame', transitionFrameAt, '/', transitionNumFramesSafe, clipTransition.name, `${clipTransition.duration}s`); 373 | 374 | const frameData2 = await frameSource2.readNextFrame(toClipProgress); 375 | toClipFrameCount += 1; 376 | 377 | if (frameData2) { 378 | const progress = transitionFrameAt / transitionNumFramesSafe; 379 | const easedProgress = clipTransition.easingFunction(progress); 380 | 381 | if (verbose) console.time('runTransitionOnFrame'); 382 | outFrameData = runTransitionOnFrame({ fromFrame: frameData1, toFrame: frameData2, progress: easedProgress, transitionName: clipTransition.name, transitionParams: clipTransition.params }); 383 | if (verbose) console.timeEnd('runTransitionOnFrame'); 384 | } else { 385 | console.warn('Got no frame data from clip 2!'); 386 | // We have reached end of clip2 but transition is not complete 387 | // Pass thru 388 | // TODO improve, maybe cut it short 389 | outFrameData = frameData1; 390 | } 391 | } else { 392 | outFrameData = frameData1; 393 | } 394 | 395 | // If we don't await we get EINVAL when dealing with high resolution files (big writes) 396 | await new Promise((r) => outProcess.stdin.write(outFrameData, () => r())); 397 | // outProcess.stdin.write(outFrameData); 398 | 399 | fromClipFrameCount += 1; 400 | } 401 | 402 | totalFrameCount += 1; 403 | } 404 | 405 | outProcess.stdin.end(); 406 | 407 | console.log('Done. Output file can be found at:'); 408 | console.log(outPath); 409 | } catch (err) { 410 | console.error('Loop failed', err); 411 | if (outProcess) { 412 | outProcess.kill(); 413 | } 414 | } finally { 415 | if (frameSource1) await frameSource1.close(); 416 | if (frameSource2) await frameSource2.close(); 417 | } 418 | 419 | try { 420 | await outProcess; 421 | } catch (err) { 422 | if (!err.killed) throw err; 423 | } 424 | }; 425 | --------------------------------------------------------------------------------