├── .dockerignore ├── .github ├── FUNDING.yml ├── ISSUE_TEMPLATE.md ├── dependabot.yml └── workflows │ ├── ci.yml │ ├── examples.yml │ └── release.yml ├── .gitignore ├── .husky └── pre-commit ├── .npmignore ├── .prettierignore ├── .prettierrc ├── Dockerfile ├── LICENSE ├── README.md ├── docker-compose.yml ├── eslint.config.mjs ├── examples ├── README.md ├── alpha.json5 ├── audio-transition.json5 ├── audio-volume.json5 ├── audio1.json5 ├── audio2.json5 ├── audio3.json5 ├── audioLoop.json5 ├── commonFeatures.json5 ├── contain-blur.json5 ├── customCanvas.ts ├── customFabric.ts ├── customOutputArgs.json5 ├── fabricImagePostProcessing.ts ├── gl.json5 ├── gradients.json5 ├── image.json5 ├── imageOverlay.json5 ├── kenBurns.json5 ├── mosaic.json5 ├── newsTitle.json5 ├── ph.json5 ├── pip.json5 ├── position.json5 ├── remote.json5 ├── renderSingleFrame.ts ├── run ├── single.json5 ├── slideInText.json5 ├── smartFit.json5 ├── speedTest.json5 ├── subtitle.json5 ├── timeoutTest.json5 ├── transitionEasing.json5 ├── transitions.json5 ├── transparentGradient.json5 ├── videos.json5 ├── videos2.json5 ├── vignette.json5 └── visibleFromUntil.json5 ├── logo.png ├── package.json ├── shaders └── rainbow-colors.frag ├── src ├── BoxBlur.d.ts ├── BoxBlur.js ├── api │ ├── defineFrameSource.ts │ └── index.ts ├── audio.ts ├── cli.ts ├── colors.ts ├── configuration.ts ├── easings.ts ├── ffmpeg.ts ├── frameSource.ts ├── index.ts ├── parseConfig.ts ├── sources │ ├── canvas.ts │ ├── fabric.ts │ ├── fill-color.ts │ ├── gl.ts │ ├── image-overlay.ts │ ├── image.ts │ ├── index.ts │ ├── linear-gradient.ts │ ├── news-title.ts │ ├── radial-gradient.ts │ ├── slide-in-text.ts │ ├── subtitle.ts │ ├── title.ts │ └── video.ts ├── transforms │ └── rawVideoToFrames.ts ├── transition.ts ├── types.ts ├── types │ ├── gl-buffer.d.ts │ ├── gl-texture2d.d.ts │ ├── gl-transition.d.ts │ └── gl-transitions.d.ts └── util.ts ├── test ├── configuration.test.ts ├── integration.test.ts ├── transforms │ └── rawVideoToFrames.test.ts └── transition.test.ts └── tsconfig.json /.dockerignore: -------------------------------------------------------------------------------- 1 | node_modules 2 | examples/assets 3 | -------------------------------------------------------------------------------- /.github/FUNDING.yml: -------------------------------------------------------------------------------- 1 | github: mifi 2 | custom: https://mifi.no/thanks 3 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE.md: -------------------------------------------------------------------------------- 1 | - [ ] I have tried with the newest version of editly: `npm i -g editly` or `npm i editly@latest` 2 | - [ ] I have tried ffmpeg newest stable version 3 | - [ ] I have searched for existing issues 4 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "npm" 4 | directory: "/" 5 | schedule: 6 | interval: "weekly" 7 | open-pull-requests-limit: 100 8 | - package-ecosystem: "github-actions" 9 | directory: "/" 10 | schedule: 11 | interval: "weekly" 12 | open-pull-requests-limit: 100 13 | -------------------------------------------------------------------------------- /.github/workflows/ci.yml: -------------------------------------------------------------------------------- 1 | name: CI 2 | 3 | on: 4 | push: 5 | branches: [main] 6 | pull_request: 7 | schedule: 8 | - cron: "0 0 * * *" # Once/day to test new node versions 9 | 10 | jobs: 11 | # Generate build matrix from endoflife.date 12 | # Coped from https://github.com/bkeepers/dotenv/pull/521 13 | versions: 14 | name: Get latest versions 15 | runs-on: ubuntu-latest 16 | strategy: 17 | matrix: 18 | product: ["nodejs"] 19 | outputs: 20 | nodejs: ${{ steps.supported.outputs.nodejs }} 21 | steps: 22 | - id: supported 23 | run: | 24 | product="${{ matrix.product }}" 25 | data=$(curl https://endoflife.date/api/$product.json) 26 | supported=$(echo $data | jq '[.[] | select(.lts != false and .eol > (now | strftime("%Y-%m-%d")))]') 27 | echo "${product}=$(echo $supported | jq -c 'map(.latest)')" >> $GITHUB_OUTPUT 28 | test: 29 | needs: versions 30 | strategy: 31 | fail-fast: false 32 | matrix: 33 | os: [ubuntu-latest, macos-latest] 34 | nodejs: ${{ fromJSON(needs.versions.outputs.nodejs) }} 35 | runs-on: ${{ matrix.os }} 36 | env: 37 | DISPLAY: :0 38 | steps: 39 | - if: runner.os == 'macOS' 40 | run: | 41 | brew install \ 42 | cairo \ 43 | ffmpeg \ 44 | giflib \ 45 | jpeg \ 46 | libpng \ 47 | librsvg \ 48 | pango \ 49 | pixman \ 50 | pkg-config \ 51 | python-setuptools 52 | - if: runner.os == 'Linux' 53 | run: | 54 | sudo apt-get update && sudo apt-get install -y \ 55 | build-essential \ 56 | ffmpeg \ 57 | libcairo2-dev \ 58 | libgif-dev \ 59 | libglew-dev \ 60 | libglu1-mesa-dev \ 61 | libjpeg-dev \ 62 | libpango1.0-dev \ 63 | librsvg2-dev \ 64 | libxi-dev \ 65 | pkg-config 66 | - if: runner.os == 'Linux' 67 | run: sudo /usr/bin/Xvfb $DISPLAY -screen 0 1280x1024x24 & 68 | - uses: actions/checkout@v4 69 | - uses: actions/setup-node@v4 70 | with: 71 | node-version: ${{ matrix.nodejs }} 72 | cache: "npm" 73 | cache-dependency-path: ./package.json 74 | - run: npm install 75 | - run: npm run build 76 | - run: npm run lint 77 | - run: npm run test 78 | - uses: actions/upload-artifact@v4 79 | with: 80 | name: video-${{ runner.os }}-${{ matrix.nodejs }} 81 | path: editly-out.mp4 82 | compression-level: 0 83 | if-no-files-found: error 84 | docker: 85 | runs-on: ubuntu-latest 86 | steps: 87 | - uses: actions/checkout@v4 88 | - uses: actions/checkout@v4 89 | with: 90 | repository: mifi/editly-assets 91 | path: examples/assets 92 | - uses: actions/setup-node@v4 93 | with: 94 | node-version: ${{ matrix.nodejs }} 95 | cache: "npm" 96 | cache-dependency-path: ./package.json 97 | - run: docker compose build 98 | - run: docker compose up -d 99 | - run: docker compose run editly bash -c "cd examples && ./run audio1.json5" 100 | -------------------------------------------------------------------------------- /.github/workflows/examples.yml: -------------------------------------------------------------------------------- 1 | name: Examples 2 | 3 | on: 4 | push: 5 | branches: [main] 6 | pull_request: 7 | 8 | jobs: 9 | list-examples: 10 | runs-on: ubuntu-latest 11 | outputs: 12 | matrix: ${{ steps.set-matrix.outputs.matrix }} 13 | steps: 14 | - uses: actions/checkout@v4 15 | - id: set-matrix 16 | run: echo "::set-output name=matrix::$(cd examples && ls *.{ts,json5} | jq -R -s -c 'split("\n")[:-1]')" 17 | examples: 18 | needs: list-examples 19 | runs-on: ubuntu-latest 20 | strategy: 21 | matrix: 22 | example: ${{ fromJson(needs.list-examples.outputs.matrix) }} 23 | fail-fast: false 24 | name: ${{ matrix.example }} 25 | env: 26 | DISPLAY: :0 27 | steps: 28 | - run: | 29 | sudo apt-get update && sudo apt-get install -y \ 30 | build-essential \ 31 | ffmpeg \ 32 | libcairo2-dev \ 33 | libgif-dev \ 34 | libglew-dev \ 35 | libglu1-mesa-dev \ 36 | libjpeg-dev \ 37 | libpango1.0-dev \ 38 | librsvg2-dev \ 39 | libxi-dev \ 40 | pkg-config 41 | - run: sudo /usr/bin/Xvfb $DISPLAY -screen 0 1280x1024x24 & 42 | - uses: actions/checkout@v4 43 | - uses: actions/checkout@v4 44 | with: 45 | repository: mifi/editly-assets 46 | path: examples/assets 47 | - uses: actions/setup-node@v4 48 | with: 49 | node-version: 22 50 | cache: "npm" 51 | cache-dependency-path: ./package.json 52 | - run: npm install && npm run build 53 | - run: | 54 | cd examples 55 | ./run "${{ matrix.example }}" 56 | - uses: actions/upload-artifact@v4 57 | with: 58 | name: ${{ matrix.example }} 59 | path: | 60 | examples/*.png 61 | examples/*.gif 62 | examples/*.mp4 63 | examples/*.webp 64 | compression-level: 0 65 | if-no-files-found: error 66 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Release 2 | 3 | on: 4 | push: 5 | branches: 6 | - "master" 7 | tags: 8 | - "v*" 9 | 10 | jobs: 11 | docker: 12 | name: Push Docker image to Docker Hub 13 | runs-on: ubuntu-latest 14 | permissions: 15 | packages: write 16 | contents: read 17 | attestations: write 18 | id-token: write 19 | steps: 20 | - name: Check out the repo 21 | uses: actions/checkout@v4 22 | 23 | - name: Set up QEMU 24 | uses: docker/setup-qemu-action@v3 25 | 26 | - name: Set up Docker Buildx 27 | uses: docker/setup-buildx-action@v3 28 | 29 | - name: Docker meta 30 | id: meta 31 | uses: docker/metadata-action@v5 32 | with: 33 | images: editly/editly 34 | # generate Docker tags based on the following events/attributes 35 | tags: | 36 | type=schedule 37 | type=ref,event=branch 38 | type=ref,event=pr 39 | type=semver,pattern={{version}} 40 | type=semver,pattern={{major}}.{{minor}} 41 | type=semver,pattern={{major}} 42 | type=sha 43 | 44 | - name: Login to DockerHub 45 | uses: docker/login-action@v3 46 | with: 47 | username: ${{ secrets.DOCKERHUB_USERNAME }} 48 | password: ${{ secrets.DOCKERHUB_TOKEN }} 49 | 50 | - name: Build and push 51 | uses: docker/build-push-action@v6 52 | with: 53 | context: . 54 | platforms: linux/amd64,linux/arm64 55 | push: true 56 | tags: ${{ steps.meta.outputs.tags }} 57 | labels: ${{ steps.meta.outputs.labels }} 58 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Logs 2 | logs 3 | *.log 4 | npm-debug.log* 5 | yarn-debug.log* 6 | yarn-error.log* 7 | lerna-debug.log* 8 | 9 | # Diagnostic reports (https://nodejs.org/api/report.html) 10 | report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json 11 | 12 | # Runtime data 13 | pids 14 | *.pid 15 | *.seed 16 | *.pid.lock 17 | 18 | # Directory for instrumented libs generated by jscoverage/JSCover 19 | lib-cov 20 | 21 | # Coverage directory used by tools like istanbul 22 | coverage 23 | *.lcov 24 | 25 | # nyc test coverage 26 | .nyc_output 27 | 28 | # Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) 29 | .grunt 30 | 31 | # Bower dependency directory (https://bower.io/) 32 | bower_components 33 | 34 | # node-waf configuration 35 | .lock-wscript 36 | 37 | # Compiled binary addons (https://nodejs.org/api/addons.html) 38 | build/Release 39 | 40 | # Dependency directories 41 | node_modules/ 42 | jspm_packages/ 43 | 44 | # TypeScript v1 declaration files 45 | typings/ 46 | 47 | # TypeScript cache 48 | *.tsbuildinfo 49 | 50 | # Optional npm cache directory 51 | .npm 52 | 53 | # Optional eslint cache 54 | .eslintcache 55 | 56 | # Microbundle cache 57 | .rpt2_cache/ 58 | .rts2_cache_cjs/ 59 | .rts2_cache_es/ 60 | .rts2_cache_umd/ 61 | 62 | # Optional REPL history 63 | .node_repl_history 64 | 65 | # Output of 'npm pack' 66 | *.tgz 67 | 68 | # Yarn Integrity file 69 | .yarn-integrity 70 | 71 | # dotenv environment variables file 72 | .env 73 | .env.test 74 | 75 | # parcel-bundler cache (https://parceljs.org/) 76 | .cache 77 | 78 | # Next.js build output 79 | .next 80 | 81 | # Nuxt.js build / generate output 82 | .nuxt 83 | dist 84 | 85 | # Gatsby files 86 | .cache/ 87 | # Comment in the public line in if your project uses Gatsby and *not* Next.js 88 | # https://nextjs.org/blog/next-9-1#public-directory-support 89 | # public 90 | 91 | # vuepress build output 92 | .vuepress/dist 93 | 94 | # Serverless directories 95 | .serverless/ 96 | 97 | # FuseBox cache 98 | .fusebox/ 99 | 100 | # DynamoDB Local files 101 | .dynamodb/ 102 | 103 | # TernJS port file 104 | .tern-port 105 | 106 | # Generated files and excluded assets repo 107 | /examples/assets 108 | editly-tmp-*/ 109 | *.mp4 110 | *.gif 111 | *.png 112 | 113 | # Other 114 | .idea 115 | pnpm-lock.yaml 116 | yarn.lock 117 | package-lock.json 118 | -------------------------------------------------------------------------------- /.husky/pre-commit: -------------------------------------------------------------------------------- 1 | npx lint-staged 2 | -------------------------------------------------------------------------------- /.npmignore: -------------------------------------------------------------------------------- 1 | /.github 2 | /eslint.config.mjs 3 | /tsconfig* 4 | /test/ 5 | /Dockerfile 6 | /docker-compose.yml 7 | .dockerignore 8 | /examples 9 | /.cache 10 | /yarn-error.log 11 | editly-tmp-*/ 12 | *.{mp4,png,webm,gif} 13 | examples/assets 14 | mytodo.md 15 | -------------------------------------------------------------------------------- /.prettierignore: -------------------------------------------------------------------------------- 1 | src/BoxBlur.js 2 | shaders/rainbow-colors.frag 3 | -------------------------------------------------------------------------------- /.prettierrc: -------------------------------------------------------------------------------- 1 | { 2 | "$schema": "https://json.schemastore.org/prettierrc", 3 | "printWidth": 100, 4 | "plugins": ["prettier-plugin-organize-imports"] 5 | } 6 | -------------------------------------------------------------------------------- /Dockerfile: -------------------------------------------------------------------------------- 1 | FROM node:lts-bookworm AS build 2 | 3 | # Install dependencies for building canvas/gl 4 | RUN apt-get update -y 5 | 6 | RUN apt-get -y install \ 7 | build-essential \ 8 | libcairo2-dev \ 9 | libgif-dev \ 10 | libgl1-mesa-dev \ 11 | libglew-dev \ 12 | libglu1-mesa-dev \ 13 | libjpeg-dev \ 14 | libpango1.0-dev \ 15 | librsvg2-dev \ 16 | libxi-dev \ 17 | pkg-config \ 18 | python-is-python3 19 | 20 | WORKDIR /app 21 | 22 | # Install node dependencies 23 | COPY package.json ./ 24 | RUN npm install --no-fund --no-audit 25 | 26 | # Add app source 27 | COPY . . 28 | 29 | # Build TypeScript 30 | RUN npm run build 31 | 32 | # Prune dev dependencies 33 | RUN npm prune --omit=dev 34 | 35 | # Purge build dependencies 36 | RUN apt-get --purge autoremove -y \ 37 | build-essential \ 38 | libcairo2-dev \ 39 | libgif-dev \ 40 | libgl1-mesa-dev \ 41 | libglew-dev \ 42 | libglu1-mesa-dev \ 43 | libjpeg-dev \ 44 | libpango1.0-dev \ 45 | librsvg2-dev \ 46 | libxi-dev \ 47 | pkg-config \ 48 | python-is-python3 49 | 50 | # Remove Apt cache 51 | RUN rm -rf /var/lib/apt/lists/* /var/cache/apt/* 52 | 53 | # Final stage for app image 54 | FROM node:lts-bookworm 55 | 56 | # Install runtime dependencies 57 | RUN apt-get update -y \ 58 | && apt-get -y install ffmpeg dumb-init xvfb libcairo2 libpango1.0 libgif7 librsvg2-2 \ 59 | && rm -rf /var/lib/apt/lists/* /var/cache/apt/* 60 | 61 | WORKDIR /app 62 | COPY --from=build /app /app 63 | 64 | # Ensure `editly` binary available in container 65 | RUN npm link 66 | 67 | ENTRYPOINT ["/usr/bin/dumb-init", "--", "xvfb-run", "--server-args", "-screen 0 1280x1024x24 -ac"] 68 | CMD [ "editly" ] 69 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2020 Mikael Finstad 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /docker-compose.yml: -------------------------------------------------------------------------------- 1 | services: 2 | editly: 3 | container_name: editly 4 | image: editly/editly:latest 5 | build: 6 | context: . 7 | dockerfile: Dockerfile 8 | volumes: 9 | - "outputs:/outputs" 10 | - ./examples/assets/:/app/examples/assets/ 11 | 12 | volumes: 13 | outputs: 14 | -------------------------------------------------------------------------------- /eslint.config.mjs: -------------------------------------------------------------------------------- 1 | // @ts-check 2 | 3 | import eslint from "@eslint/js"; 4 | import eslintConfigPrettier from "eslint-config-prettier"; 5 | import tseslint from "typescript-eslint"; 6 | 7 | export default tseslint.config( 8 | eslint.configs.recommended, 9 | tseslint.configs.recommended, 10 | eslintConfigPrettier, 11 | { ignores: ["dist/"] }, 12 | ); 13 | -------------------------------------------------------------------------------- /examples/README.md: -------------------------------------------------------------------------------- 1 | # Examples 2 | 3 | This folder contains examples and tests for different features. 4 | 5 | How to run examples: 6 | 7 | ```bash 8 | git clone https://github.com/mifi/editly.git 9 | cd editly 10 | npm install 11 | npm run build 12 | cd examples 13 | git clone https://github.com/mifi/editly-assets.git assets 14 | ./run commonFeatures.json5 15 | ``` 16 | 17 | ## Image slideshow with Ken Burns zoom 18 | 19 | ![](https://github.com/mifi/gifs/raw/master/kenburns.gif) 20 | 21 | [kenBurns.json5](https://github.com/mifi/editly/blob/master/examples/kenBurns.json5) 22 | 23 | ## News title 24 | 25 | ![](https://github.com/mifi/gifs/raw/master/newsTitle.gif) 26 | 27 | [newsTitle.json5](https://github.com/mifi/editly/blob/master/examples/newsTitle.json5) 28 | 29 | ## Resize modes 30 | 31 | ![](https://github.com/mifi/gifs/raw/master/resizeHorizontal.gif) 32 | 33 | [resizeHorizontal.json5](https://github.com/mifi/editly/blob/master/examples/resizeHorizontal.json5) 34 | 35 | ## Speed up / slow down with cutting 36 | 37 | ![](https://github.com/mifi/gifs/raw/master/speedTest.gif) 38 | 39 | [speedTest.json5](https://github.com/mifi/editly/blob/master/examples/speedTest.json5) 40 | 41 | ## Title and subtitle 42 | 43 | ![](https://github.com/mifi/gifs/raw/master/subtitle.gif) 44 | 45 | [subtitle.json5](https://github.com/mifi/editly/blob/master/examples/subtitle.json5) 46 | 47 | ## Video overlays with alpha channel 48 | 49 | [alpha.json5](https://github.com/mifi/editly/blob/master/examples/alpha.json5) 50 | 51 | ## Image overlays with alpha channel 52 | 53 | ![](https://github.com/mifi/gifs/raw/master/imageOverlay.gif) 54 | 55 | [imageOverlay.json5](https://github.com/mifi/editly/blob/master/examples/imageOverlay.json5) 56 | 57 | ## Partial overlays (B-roll) 58 | 59 | [visibleFromUntil.json5](https://github.com/mifi/editly/blob/master/examples/visibleFromUntil.json5) 60 | 61 | ## Audio layers 62 | 63 | - [audio1.json5](https://github.com/mifi/editly/blob/master/examples/audio1.json5) 64 | - [audio2.json5](https://github.com/mifi/editly/blob/master/examples/audio2.json5) 65 | 66 | ## Custom HTML5 canvas Javascript 67 | 68 | ![](https://github.com/mifi/gifs/raw/master/customCanvas.gif) 69 | 70 | [customCanvas.js](https://github.com/mifi/editly/blob/master/examples/customCanvas.js) 71 | 72 | ```bash 73 | node customCanvas.js 74 | ``` 75 | 76 | ## Custom Fabric.js 77 | 78 | ![](https://github.com/mifi/gifs/raw/master/customFabric.gif) 79 | 80 | [customFabric.js](https://github.com/mifi/editly/blob/master/examples/customFabric.js) 81 | 82 | ```bash 83 | node customFabric.js 84 | ``` 85 | 86 | ## `fabricImagePostProcessing` 87 | 88 | https://user-images.githubusercontent.com/907138/214545896-ab420beb-bd50-4974-9bad-9657e4f0c849.mp4 89 | 90 | [fabricImagePostProcessing.js](fabricImagePostProcessing.js) 91 | 92 | ## LosslessCut tutorial 93 | 94 | [This video](https://www.youtube.com/watch?v=pYHMxXy05Jg) was created with [losslesscut.json5](https://github.com/mifi/editly/blob/master/examples/losslesscut.json5) 95 | -------------------------------------------------------------------------------- /examples/alpha.json5: -------------------------------------------------------------------------------- 1 | { 2 | // enableFfmpegLog: true, 3 | outPath: "./alpha.mp4", 4 | clips: [ 5 | { 6 | duration: 2, 7 | layers: [ 8 | { type: "video", path: "./assets/lofoten.mp4", cutFrom: 0.4, cutTo: 2 }, 9 | { 10 | type: "video", 11 | path: "./assets/dancer1.webm", 12 | resizeMode: "contain", 13 | cutFrom: 0, 14 | cutTo: 6, 15 | }, 16 | ], 17 | }, 18 | { 19 | layers: [ 20 | { type: "video", path: "./assets/lofoten.mp4", cutFrom: 0.4, cutTo: 2 }, 21 | { type: "video", path: "./assets/dancer1.webm", resizeMode: "contain" }, 22 | ], 23 | }, 24 | ], 25 | } 26 | -------------------------------------------------------------------------------- /examples/audio-transition.json5: -------------------------------------------------------------------------------- 1 | { 2 | // enableFfmpegLog: true, 3 | outPath: "./audio-transition.mp4", 4 | keepSourceAudio: true, 5 | defaults: { 6 | duration: 3, 7 | transition: { duration: 1, name: "directional" }, 8 | layer: { fontPath: "./assets/Patua_One/PatuaOne-Regular.ttf" }, 9 | }, 10 | clips: [ 11 | { 12 | layers: [ 13 | { type: "title-background", text: "Default transition" }, 14 | { type: "audio", path: "./assets/sample1.m4a" }, 15 | ], 16 | }, 17 | { 18 | transition: { duration: 0.2 }, 19 | layers: [ 20 | { type: "title-background", text: "Fast transition" }, 21 | { type: "audio", path: "./assets/sample2.m4a" }, 22 | ], 23 | }, 24 | { 25 | transition: { duration: 0 }, 26 | layers: [ 27 | { type: "title-background", text: "No transition" }, 28 | { type: "audio", path: "./assets/sample1.m4a" }, 29 | ], 30 | }, 31 | { 32 | transition: { audioInCurve: "exp", audioOutCurve: "exp" }, 33 | layers: [ 34 | { type: "title-background", text: "Exp curve" }, 35 | { type: "audio", path: "./assets/sample2.m4a" }, 36 | ], 37 | }, 38 | { 39 | transition: { name: "dummy" }, 40 | layers: [ 41 | { type: "title-background", text: "Dummy" }, 42 | { type: "audio", path: "./assets/sample1.m4a" }, 43 | ], 44 | }, 45 | { 46 | transition: { duration: 2 }, 47 | layers: [ 48 | { type: "title-background", text: "Too short" }, 49 | { type: "audio", path: "./assets/sample2.m4a" }, 50 | ], 51 | }, 52 | { 53 | duration: 1, 54 | transition: { duration: 2 }, 55 | layers: [ 56 | { type: "title-background", text: "Too short" }, 57 | { type: "audio", path: "./assets/sample2.m4a" }, 58 | ], 59 | }, 60 | { 61 | duration: 1, 62 | transition: { duration: 2 }, 63 | layers: [ 64 | { type: "title-background", text: "Too short" }, 65 | { type: "audio", path: "./assets/sample2.m4a" }, 66 | ], 67 | }, 68 | { 69 | layers: [ 70 | { type: "title-background", text: "THE END" }, 71 | { type: "audio", path: "./assets/sample2.m4a" }, 72 | ], 73 | }, 74 | ], 75 | } 76 | -------------------------------------------------------------------------------- /examples/audio-volume.json5: -------------------------------------------------------------------------------- 1 | { 2 | outPath: "./audio-volume.mp4", 3 | width: 200, 4 | height: 200, 5 | clips: [{ duration: 2, layers: [{ type: "title-background", text: "Audio output volume" }] }], 6 | audioTracks: [ 7 | { 8 | path: "./assets/High [NCS Release] - JPB (No Copyright Music)-R8ZRCXy5vhA.m4a", 9 | cutFrom: 18, 10 | }, 11 | ], 12 | outputVolume: "-10dB", 13 | } 14 | -------------------------------------------------------------------------------- /examples/audio1.json5: -------------------------------------------------------------------------------- 1 | { 2 | // enableFfmpegLog: true, 3 | outPath: "./audio1.mp4", 4 | keepSourceAudio: true, 5 | defaults: { 6 | transition: null, 7 | layer: { fontPath: "./assets/Patua_One/PatuaOne-Regular.ttf" }, 8 | }, 9 | clips: [ 10 | { 11 | duration: 0.5, 12 | layers: [{ type: "video", path: "./assets/lofoten.mp4", cutFrom: 0.4, cutTo: 2 }], 13 | }, 14 | 15 | { 16 | layers: [ 17 | { type: "title-background", text: "test" }, 18 | { 19 | type: "audio", 20 | path: "./assets/High [NCS Release] - JPB (No Copyright Music)-R8ZRCXy5vhA.m4a", 21 | cutFrom: 2, 22 | cutTo: 5, 23 | }, 24 | ], 25 | }, 26 | 27 | { 28 | layers: [ 29 | { type: "video", path: "./assets/lofoten.mp4", cutFrom: 0, cutTo: 2, mixVolume: 0 }, 30 | { 31 | type: "audio", 32 | path: "./assets/High [NCS Release] - JPB (No Copyright Music)-R8ZRCXy5vhA.m4a", 33 | mixVolume: 0.1, 34 | }, 35 | ], 36 | }, 37 | 38 | { 39 | duration: 2, 40 | layers: [ 41 | { type: "video", path: "./assets/lofoten.mp4", cutFrom: 0.4, cutTo: 2 }, 42 | { 43 | type: "audio", 44 | path: "./assets/High [NCS Release] - JPB (No Copyright Music)-R8ZRCXy5vhA.m4a", 45 | cutFrom: 2, 46 | cutTo: 3, 47 | mixVolume: 0.5, 48 | }, 49 | ], 50 | }, 51 | 52 | { 53 | duration: 1.8, 54 | layers: [{ type: "video", path: "./assets/lofoten.mp4", cutFrom: 1, cutTo: 2 }], 55 | }, 56 | ], 57 | } 58 | -------------------------------------------------------------------------------- /examples/audio2.json5: -------------------------------------------------------------------------------- 1 | { 2 | // enableFfmpegLog: true, 3 | outPath: "./audio2.mp4", 4 | width: 200, 5 | height: 200, 6 | defaults: { 7 | layer: { fontPath: "./assets/Patua_One/PatuaOne-Regular.ttf" }, 8 | }, 9 | clips: [ 10 | { layers: [{ type: "video", path: "./assets/lofoten.mp4", cutFrom: 1, cutTo: 2 }] }, 11 | { duration: 15, layers: { type: "title-background", text: "Audio track" } }, 12 | ], 13 | audioNorm: { enable: true, gaussSize: 3, maxGain: 100 }, 14 | clipsAudioVolume: 50, 15 | audioTracks: [ 16 | { 17 | path: "./assets/High [NCS Release] - JPB (No Copyright Music)-R8ZRCXy5vhA.m4a", 18 | cutFrom: 18, 19 | }, 20 | { path: "./assets/winxp.mp3", mixVolume: 10, cutFrom: 1, cutTo: 2, start: 2 }, 21 | { path: "./assets/Julen_ribas.m4a", mixVolume: 50, cutTo: 7, start: 5 }, 22 | ], 23 | } 24 | -------------------------------------------------------------------------------- /examples/audio3.json5: -------------------------------------------------------------------------------- 1 | { 2 | outPath: "./audio3.mp4", 3 | width: 200, 4 | height: 200, 5 | defaults: { 6 | layer: { fontPath: "./assets/Patua_One/PatuaOne-Regular.ttf" }, 7 | }, 8 | clips: [ 9 | { 10 | layers: [ 11 | { type: "video", path: "./assets/lofoten.mp4", cutTo: 2 }, 12 | { type: "title", text: "Arbitrary audio" }, 13 | ], 14 | }, 15 | { 16 | duration: 3, 17 | layers: [ 18 | { type: "title-background", text: "Voice starts in 1 sec" }, 19 | { 20 | type: "detached-audio", 21 | path: "./assets/Julen_ribas.m4a", 22 | mixVolume: 50, 23 | cutFrom: 2, 24 | start: 1, 25 | }, 26 | ], 27 | }, 28 | { duration: 1, layers: [{ type: "title-background", text: "Voice continues over clip 2" }] }, 29 | { duration: 3, layers: [{ type: "title-background", text: "Voice continues over clip 3" }] }, 30 | { 31 | duration: 2, 32 | layers: [ 33 | { type: "title-background", text: "XP sound starts" }, 34 | { type: "detached-audio", path: "./assets/winxp.mp3", mixVolume: 10, cutFrom: 0.5 }, 35 | ], 36 | }, 37 | ], 38 | audioNorm: { enable: true, gaussSize: 3, maxGain: 100 }, 39 | audioTracks: [ 40 | { 41 | path: "./assets/High [NCS Release] - JPB (No Copyright Music)-R8ZRCXy5vhA.m4a", 42 | cutFrom: 18, 43 | }, 44 | ], 45 | } 46 | -------------------------------------------------------------------------------- /examples/audioLoop.json5: -------------------------------------------------------------------------------- 1 | { 2 | outPath: "./audioLoop.mp4", 3 | width: 200, 4 | height: 200, 5 | audioFilePath: "./assets/winxp.mp3", 6 | loopAudio: true, 7 | // Should properly cut off and not crash with EPIPE if loopAudio=false and audio duration is shorter than total duration 8 | // loopAudio: false, 9 | defaults: { 10 | layer: { fontPath: "./assets/Patua_One/PatuaOne-Regular.ttf" }, 11 | }, 12 | clips: [{ duration: 10, layers: [{ type: "title-background", text: "Looping audio!" }] }], 13 | } 14 | -------------------------------------------------------------------------------- /examples/commonFeatures.json5: -------------------------------------------------------------------------------- 1 | { 2 | // width: 2166, height: 1650, fps: 30, 3 | width: 720, 4 | height: 1280, 5 | fps: 30, 6 | outPath: "./commonFeatures.mp4", 7 | // outPath: './commonFeatures.gif', 8 | audioFilePath: "./assets/High [NCS Release] - JPB (No Copyright Music)-R8ZRCXy5vhA.m4a", 9 | defaults: { 10 | transition: { name: "random" }, 11 | layer: { fontPath: "./assets/Patua_One/PatuaOne-Regular.ttf" }, 12 | }, 13 | clips: [ 14 | { 15 | duration: 3, 16 | transition: { name: "directional-left" }, 17 | layers: [ 18 | { 19 | type: "title-background", 20 | text: "EDITLY\nVideo editing framework", 21 | background: { type: "linear-gradient", colors: ["#02aab0", "#00cdac"] }, 22 | }, 23 | ], 24 | }, 25 | { 26 | duration: 4, 27 | transition: { name: "dreamyzoom" }, 28 | layers: [ 29 | { 30 | type: "title-background", 31 | text: "Multi-line text with animated linear or radial gradients", 32 | background: { type: "radial-gradient" }, 33 | }, 34 | ], 35 | }, 36 | { 37 | duration: 3, 38 | transition: { name: "directional-right" }, 39 | layers: [{ type: "rainbow-colors" }, { type: "title", text: "Colorful backgrounds" }], 40 | }, 41 | { duration: 3, layers: [{ type: "pause" }, { type: "title", text: "and separators" }] }, 42 | 43 | { 44 | duration: 3, 45 | transition: { name: "fadegrayscale" }, 46 | layers: [ 47 | { 48 | type: "title-background", 49 | text: "Image slideshows with Ken Burns effect", 50 | background: { type: "linear-gradient" }, 51 | }, 52 | ], 53 | }, 54 | { 55 | duration: 2.5, 56 | transition: { name: "directionalWarp" }, 57 | layers: [{ type: "image", path: "./assets/vertical.jpg", zoomDirection: "out" }], 58 | }, 59 | { 60 | duration: 3, 61 | transition: { name: "dreamyzoom" }, 62 | layers: [ 63 | { type: "image", path: "./assets/img1.jpg", duration: 2.5, zoomDirection: "in" }, 64 | { 65 | type: "subtitle", 66 | text: "Indonesia has many spectacular locations. Here is the volcano Kelimutu, which has three lakes in its core, some days with three different colors!", 67 | }, 68 | { type: "title", position: "top", text: "With text" }, 69 | ], 70 | }, 71 | { 72 | duration: 3, 73 | transition: { name: "colorphase" }, 74 | layers: [ 75 | { type: "image", path: "./assets/img2.jpg", zoomDirection: "out" }, 76 | { 77 | type: "subtitle", 78 | text: "Komodo national park is the only home of the endangered Komodo dragons", 79 | }, 80 | ], 81 | }, 82 | { 83 | duration: 2.5, 84 | transition: { name: "simplezoom" }, 85 | layers: [{ type: "image", path: "./assets/img3.jpg", zoomDirection: "in" }], 86 | }, 87 | 88 | { 89 | duration: 1.5, 90 | transition: { name: "crosszoom", duration: 0.3 }, 91 | layers: [ 92 | { type: "video", path: "assets/kohlipe1.mp4", cutTo: 58 }, 93 | { type: "title", text: "Videos" }, 94 | ], 95 | }, 96 | { 97 | duration: 3, 98 | transition: { name: "fade" }, 99 | layers: [{ type: "video", path: "assets/kohlipe1.mp4", cutFrom: 58 }], 100 | }, 101 | { 102 | transition: { name: "fade" }, 103 | layers: [{ type: "video", path: "assets/kohlipe2.mp4", cutTo: 2.5 }], 104 | }, 105 | { 106 | duration: 1.5, 107 | layers: [{ type: "video", path: "assets/kohlipe3.mp4", cutFrom: 3, cutTo: 30 }], 108 | }, 109 | 110 | { 111 | duration: 3, 112 | transition: { name: "crosszoom" }, 113 | layers: [ 114 | { type: "gl", fragmentPath: "./assets/shaders/3l23Rh.frag" }, 115 | { type: "title", text: "OpenGL\nshaders" }, 116 | ], 117 | }, 118 | { duration: 3, layers: [{ type: "gl", fragmentPath: "./assets/shaders/MdXyzX.frag" }] }, 119 | { 120 | duration: 3, 121 | layers: [{ type: "gl", fragmentPath: "./assets/shaders/30daysofshade_010.frag" }], 122 | }, 123 | { 124 | duration: 3, 125 | layers: [{ type: "gl", fragmentPath: "./assets/shaders/wd2yDm.frag", speed: 5 }], 126 | }, 127 | 128 | { 129 | duration: 3, 130 | layers: [ 131 | { type: "image", path: "./assets/91083241_573589476840991_4224678072281051330_n.jpg" }, 132 | { type: "news-title", text: "BREAKING NEWS" }, 133 | { 134 | type: "subtitle", 135 | text: "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur.", 136 | backgroundColor: "rgba(0,0,0,0.5)", 137 | }, 138 | ], 139 | }, 140 | 141 | { 142 | duration: 3, 143 | layers: [ 144 | { type: "rainbow-colors" }, 145 | { 146 | type: "video", 147 | path: "./assets/tungestolen.mp4", 148 | resizeMode: "contain", 149 | width: 0.4, 150 | height: 0.4, 151 | top: 0.05, 152 | left: 0.95, 153 | originY: "top", 154 | originX: "right", 155 | }, 156 | { type: "title", position: "bottom", text: "Picture-in-Picture" }, 157 | ], 158 | }, 159 | 160 | { duration: 3, layers: [{ type: "editly-banner" }] }, 161 | ], 162 | } 163 | -------------------------------------------------------------------------------- /examples/contain-blur.json5: -------------------------------------------------------------------------------- 1 | { 2 | width: 3000, 3 | height: 2000, 4 | fps: 15, 5 | outPath: "./contain-blur.mp4", 6 | defaults: { 7 | transition: null, 8 | }, 9 | clips: [ 10 | { 11 | duration: 0.3, 12 | layers: [{ type: "image", path: "./assets/vertical.jpg", zoomDirection: null }], 13 | }, 14 | { 15 | duration: 0.5, 16 | layers: [{ type: "video", path: "./assets/IMG_1884.MOV", cutFrom: 0, cutTo: 2 }], 17 | }, 18 | ], 19 | } 20 | -------------------------------------------------------------------------------- /examples/customCanvas.ts: -------------------------------------------------------------------------------- 1 | import type { CustomCanvasFunctionArgs, CustomCanvasFunctionCallbacks } from "editly"; 2 | import editly from "editly"; 3 | 4 | function func({ canvas }: CustomCanvasFunctionArgs): CustomCanvasFunctionCallbacks { 5 | return { 6 | async onRender(progress) { 7 | const context = canvas.getContext("2d"); 8 | const centerX = canvas.width / 2; 9 | const centerY = canvas.height / 2; 10 | const radius = 40 * (1 + progress * 0.5); 11 | 12 | context.beginPath(); 13 | context.arc(centerX, centerY, radius, 0, 2 * Math.PI, false); 14 | context.fillStyle = "hsl(350, 100%, 37%)"; 15 | context.fill(); 16 | context.lineWidth = 5; 17 | context.strokeStyle = "#ffffff"; 18 | context.stroke(); 19 | }, 20 | 21 | onClose() { 22 | // Cleanup if you initialized anything 23 | }, 24 | }; 25 | } 26 | 27 | await editly({ 28 | // fast: true, 29 | // outPath: './customCanvas.mp4', 30 | outPath: "./customCanvas.gif", 31 | clips: [ 32 | { 33 | duration: 2, 34 | layers: [{ type: "rainbow-colors" }, { type: "canvas", func }], 35 | }, 36 | ], 37 | }); 38 | -------------------------------------------------------------------------------- /examples/customFabric.ts: -------------------------------------------------------------------------------- 1 | import { registerFont } from "canvas"; 2 | import type { CustomFabricFunctionArgs, CustomFabricFunctionCallbacks } from "editly"; 3 | import editly from "editly"; 4 | 5 | registerFont("./assets/Patua_One/PatuaOne-Regular.ttf", { family: "Patua One" }); 6 | 7 | function func({ width, height, fabric }: CustomFabricFunctionArgs): CustomFabricFunctionCallbacks { 8 | return { 9 | async onRender(progress, canvas) { 10 | canvas.backgroundColor = "hsl(33, 100%, 50%)"; 11 | 12 | const text = new fabric.FabricText(`PROGRESS\n${Math.floor(progress * 100)}%`, { 13 | originX: "center", 14 | originY: "center", 15 | left: width / 2, 16 | top: (height / 2) * (1 + (progress * 0.1 - 0.05)), 17 | fontSize: 20, 18 | fontFamily: "Patua One", 19 | textAlign: "center", 20 | fill: "white", 21 | }); 22 | 23 | canvas.add(text); 24 | }, 25 | 26 | onClose() { 27 | // Cleanup if you initialized anything 28 | }, 29 | }; 30 | } 31 | 32 | await editly({ 33 | // fast: true, 34 | outPath: "./customFabric.gif", 35 | // outPath: './customFabric.mp4', 36 | clips: [{ duration: 2, layers: [{ type: "fabric", func }] }], 37 | }); 38 | -------------------------------------------------------------------------------- /examples/customOutputArgs.json5: -------------------------------------------------------------------------------- 1 | { 2 | outPath: "./customOutputArgs.webp", 3 | clips: [{ duration: 2, layers: [{ type: "title-background", text: "Custom output args" }] }], 4 | customOutputArgs: ["-compression_level", "5", "-qscale", "60", "-vcodec", "libwebp"], 5 | } 6 | -------------------------------------------------------------------------------- /examples/fabricImagePostProcessing.ts: -------------------------------------------------------------------------------- 1 | import editly from "editly"; 2 | 3 | // See https://github.com/mifi/editly/pull/222 4 | 5 | await editly({ 6 | outPath: "./fabricImagePostProcessing.mp4", 7 | clips: [ 8 | { 9 | duration: 4, 10 | layers: [ 11 | { type: "video", path: "./assets/kohlipe1.mp4", cutFrom: 0, cutTo: 4 }, 12 | { 13 | type: "video", 14 | path: "./assets/kohlipe2.mp4", 15 | cutFrom: 0, 16 | cutTo: 4, 17 | resizeMode: "cover", 18 | originX: "center", 19 | originY: "center", 20 | left: 0.5, 21 | top: 0.5, 22 | width: 0.5, 23 | height: 0.5, 24 | fabricImagePostProcessing: async ({ image, fabric, canvas }) => { 25 | const circleArgs: ConstructorParameters[0] = { 26 | radius: Math.min(image.width, image.height) * 0.4, 27 | originX: "center", 28 | originY: "center", 29 | stroke: "white", 30 | strokeWidth: 22, 31 | }; 32 | image.set({ clipPath: new fabric.Circle(circleArgs) }); 33 | canvas.add( 34 | new fabric.Circle({ 35 | ...circleArgs, 36 | left: image.getCenterPoint().x, 37 | top: image.getCenterPoint().y, 38 | }), 39 | ); 40 | }, 41 | }, 42 | ], 43 | }, 44 | ], 45 | }); 46 | -------------------------------------------------------------------------------- /examples/gl.json5: -------------------------------------------------------------------------------- 1 | { 2 | outPath: "./gl.mp4", 3 | clips: [ 4 | { 5 | transition: null, 6 | duration: 3, 7 | layers: [{ type: "gl", fragmentPath: "./assets/shaders/3l23Rh.frag" }], 8 | }, 9 | { duration: 3, layers: [{ type: "gl", fragmentPath: "./assets/shaders/MdXyzX.frag" }] }, 10 | { 11 | duration: 3, 12 | layers: [{ type: "gl", fragmentPath: "./assets/shaders/30daysofshade_010.frag", speed: 1 }], 13 | }, 14 | { 15 | duration: 3, 16 | layers: [{ type: "gl", fragmentPath: "./assets/shaders/rainbow-background.frag" }], 17 | }, 18 | { 19 | duration: 3, 20 | layers: [{ type: "gl", fragmentPath: "./assets/shaders/wd2yDm.frag", speed: 5 }], 21 | }, 22 | ], 23 | } 24 | -------------------------------------------------------------------------------- /examples/gradients.json5: -------------------------------------------------------------------------------- 1 | { 2 | outPath: "./gradients.mp4", 3 | defaults: { 4 | transition: { name: "linearblur", duration: 0.1 }, 5 | }, 6 | clips: [ 7 | { duration: 1, layers: [{ type: "linear-gradient", colors: ["#02aab0", "#00cdac"] }] }, 8 | { duration: 1, layers: [{ type: "radial-gradient", colors: ["#b002aa", "#ac00cd"] }] }, 9 | { duration: 1, layers: [{ type: "linear-gradient" }] }, 10 | { duration: 1, layers: [{ type: "radial-gradient" }] }, 11 | ], 12 | } 13 | -------------------------------------------------------------------------------- /examples/image.json5: -------------------------------------------------------------------------------- 1 | { 2 | width: 600, 3 | height: 300, 4 | outPath: "./image.mp4", 5 | defaults: { 6 | transition: null, 7 | duration: 0.2, 8 | }, 9 | clips: [ 10 | { layers: [{ type: "image", path: "./assets/pano.jpg" }] }, 11 | { layers: [{ type: "image", path: "./assets/vertical.jpg" }] }, 12 | { 13 | layers: [ 14 | { type: "fill-color", color: "white" }, 15 | { type: "image", path: "./assets/pano.jpg", resizeMode: "contain" }, 16 | ], 17 | }, 18 | { 19 | layers: [ 20 | { type: "fill-color", color: "white" }, 21 | { type: "image", path: "./assets/vertical.jpg", resizeMode: "contain" }, 22 | ], 23 | }, 24 | { layers: [{ type: "image", path: "./assets/pano.jpg", resizeMode: "cover" }] }, 25 | { layers: [{ type: "image", path: "./assets/vertical.jpg", resizeMode: "cover" }] }, 26 | { layers: [{ type: "image", path: "./assets/pano.jpg", resizeMode: "stretch" }] }, 27 | { layers: [{ type: "image", path: "./assets/vertical.jpg", resizeMode: "stretch" }] }, 28 | ], 29 | } 30 | -------------------------------------------------------------------------------- /examples/imageOverlay.json5: -------------------------------------------------------------------------------- 1 | { 2 | outPath: "./imageOverlay.mp4", 3 | clips: [ 4 | { 5 | layers: [ 6 | { type: "video", path: "./assets/changi.mp4", cutTo: 2 }, 7 | { 8 | type: "image-overlay", 9 | path: "./assets/overlay.svg", 10 | width: 0.2, 11 | position: { x: 0.95, y: 0.03, originX: "right" }, 12 | }, 13 | { type: "image-overlay", path: "./assets/emoji.png", stop: 0.5, zoomDirection: "in" }, 14 | { 15 | type: "image-overlay", 16 | path: "./assets/emoji2.svg", 17 | position: "top", 18 | start: 0.7, 19 | stop: 1.5, 20 | width: 0.2, 21 | }, 22 | { 23 | type: "image-overlay", 24 | path: "./assets/emoji2.svg", 25 | position: "bottom", 26 | start: 0.7, 27 | stop: 1.5, 28 | height: 0.2, 29 | }, 30 | ], 31 | }, 32 | ], 33 | } 34 | -------------------------------------------------------------------------------- /examples/kenBurns.json5: -------------------------------------------------------------------------------- 1 | { 2 | outPath: "./kenBurns.mp4", 3 | defaults: { 4 | transition: { name: "fade" }, 5 | }, 6 | clips: [ 7 | { duration: 3, layers: [{ type: "image", path: "./assets/img2.jpg", zoomDirection: "out" }] }, 8 | { duration: 3, layers: [{ type: "image", path: "./assets/img3.jpg", zoomDirection: "in" }] }, 9 | { duration: 3, layers: [{ type: "image", path: "./assets/img1.jpg", zoomDirection: null }] }, 10 | ], 11 | } 12 | -------------------------------------------------------------------------------- /examples/mosaic.json5: -------------------------------------------------------------------------------- 1 | { 2 | // width: 200, height: 500, 3 | width: 500, 4 | height: 500, 5 | outPath: "./mosaic.mp4", 6 | defaults: { 7 | transition: { duration: 0 }, 8 | layer: { fontPath: "./assets/Patua_One/PatuaOne-Regular.ttf" }, 9 | layerType: { 10 | video: { width: 0.4, height: 0.4 }, 11 | }, 12 | }, 13 | clips: [ 14 | { 15 | duration: 2, 16 | layers: [ 17 | { 18 | type: "video", 19 | path: "./assets/palawan.mp4", 20 | cutFrom: 0, 21 | cutTo: 2, 22 | resizeMode: "cover", 23 | top: 0.5, 24 | left: 0.5, 25 | originY: "center", 26 | originX: "center", 27 | }, 28 | { 29 | type: "video", 30 | path: "./assets/palawan.mp4", 31 | cutFrom: 0, 32 | cutTo: 2, 33 | resizeMode: "contain", 34 | }, 35 | { 36 | type: "video", 37 | path: "./assets/palawan.mp4", 38 | cutFrom: 0, 39 | cutTo: 2, 40 | resizeMode: "contain-blur", 41 | left: 1, 42 | originX: "right", 43 | }, 44 | { 45 | type: "video", 46 | path: "./assets/IMG_1884.MOV", 47 | cutFrom: 0, 48 | cutTo: 2, 49 | resizeMode: "contain-blur", 50 | left: 1, 51 | top: 1, 52 | originX: "right", 53 | originY: "bottom", 54 | }, 55 | { 56 | type: "video", 57 | path: "./assets/palawan.mp4", 58 | cutFrom: 0, 59 | cutTo: 2, 60 | resizeMode: "stretch", 61 | top: 1, 62 | originY: "bottom", 63 | }, 64 | ], 65 | }, 66 | ], 67 | } 68 | -------------------------------------------------------------------------------- /examples/newsTitle.json5: -------------------------------------------------------------------------------- 1 | { 2 | width: 900, 3 | height: 1600, 4 | outPath: "./newsTitle.mp4", 5 | defaults: { 6 | layer: { fontPath: "./assets/Patua_One/PatuaOne-Regular.ttf" }, 7 | }, 8 | clips: [ 9 | { 10 | duration: 10, 11 | layers: [ 12 | { type: "image", path: "./assets/91083241_573589476840991_4224678072281051330_n.jpg" }, 13 | { type: "news-title", text: "BREAKING NEWS" }, 14 | { 15 | type: "subtitle", 16 | text: "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur.", 17 | backgroundColor: "rgba(0,0,0,0.5)", 18 | }, 19 | ], 20 | }, 21 | ], 22 | } 23 | -------------------------------------------------------------------------------- /examples/ph.json5: -------------------------------------------------------------------------------- 1 | { 2 | width: 240, 3 | height: 240, 4 | fps: 14, 5 | outPath: "./ph.gif", 6 | defaults: { 7 | transition: { duration: 0.4 }, 8 | layer: { fontPath: "./assets/Patua_One/PatuaOne-Regular.ttf" }, 9 | }, 10 | clips: [ 11 | { 12 | duration: 1, 13 | transition: { name: "directionalWarp" }, 14 | layers: [ 15 | { type: "image", path: "./assets/vertical.jpg", zoomDirection: "out" }, 16 | { type: "title", text: "EDITLY" }, 17 | ], 18 | }, 19 | { 20 | duration: 1.5, 21 | transition: { name: "dreamyzoom" }, 22 | layers: [ 23 | { type: "image", path: "./assets/img1.jpg", duration: 2.5, zoomDirection: "in" }, 24 | { type: "title", position: "bottom", text: "Video editing API" }, 25 | ], 26 | }, 27 | 28 | { 29 | duration: 2, 30 | layers: [ 31 | { type: "image", path: "./assets/91083241_573589476840991_4224678072281051330_n.jpg" }, 32 | { type: "news-title", text: "EDITLY" }, 33 | { type: "subtitle", text: "Get it from npm", backgroundColor: "rgba(0,0,0,0.5)" }, 34 | ], 35 | }, 36 | ], 37 | } 38 | -------------------------------------------------------------------------------- /examples/pip.json5: -------------------------------------------------------------------------------- 1 | { 2 | outPath: "./pip.mp4", 3 | width: 1280, 4 | height: 720, 5 | fps: 30, 6 | defaults: { 7 | layer: { fontPath: "./assets/Patua_One/PatuaOne-Regular.ttf" }, 8 | }, 9 | clips: [ 10 | { 11 | duration: 4, 12 | layers: [ 13 | { type: "rainbow-colors" }, 14 | { 15 | type: "video", 16 | path: "./assets/tungestolen.mp4", 17 | resizeMode: "cover", 18 | width: 0.3, 19 | height: 0.4, 20 | top: 0.05, 21 | left: 0.95, 22 | originY: "top", 23 | originX: "right", 24 | }, 25 | { 26 | type: "video", 27 | path: "./assets/tungestolen.mp4", 28 | resizeMode: "cover", 29 | width: 0.4, 30 | height: 0.2, 31 | top: 0.05, 32 | left: 0.05, 33 | originY: "top", 34 | originX: "left", 35 | }, 36 | { type: "title", position: "bottom", text: "Picture-in-Picture" }, 37 | ], 38 | }, 39 | ], 40 | } 41 | -------------------------------------------------------------------------------- /examples/position.json5: -------------------------------------------------------------------------------- 1 | { 2 | outPath: "./position.mp4", 3 | defaults: { 4 | layerType: { 5 | "image-overlay": { width: 0.1 }, 6 | }, 7 | }, 8 | clips: [ 9 | { 10 | layers: [ 11 | { type: "rainbow-colors" }, 12 | { type: "image-overlay", path: "./assets/emoji2.svg", position: "top" }, 13 | { type: "image-overlay", path: "./assets/emoji2.svg", position: "center" }, 14 | { type: "image-overlay", path: "./assets/emoji2.svg", position: "bottom" }, 15 | { type: "image-overlay", path: "./assets/emoji2.svg", position: "top-left" }, 16 | { type: "image-overlay", path: "./assets/emoji2.svg", position: "top-right" }, 17 | { type: "image-overlay", path: "./assets/emoji2.svg", position: "center-left" }, 18 | { type: "image-overlay", path: "./assets/emoji2.svg", position: "center-right" }, 19 | { type: "image-overlay", path: "./assets/emoji2.svg", position: "bottom-left" }, 20 | { type: "image-overlay", path: "./assets/emoji2.svg", position: "bottom-right" }, 21 | { 22 | type: "image-overlay", 23 | path: "./assets/emoji.png", 24 | width: 0.06, 25 | position: { originX: "center", originY: "center", x: 0.75, y: 0.75 }, 26 | }, 27 | ], 28 | }, 29 | ], 30 | } 31 | -------------------------------------------------------------------------------- /examples/remote.json5: -------------------------------------------------------------------------------- 1 | { 2 | outPath: "./remote.mp4", 3 | allowRemoteRequests: true, 4 | audioFilePath: "./assets/High [NCS Release] - JPB (No Copyright Music)-R8ZRCXy5vhA.m4a", 5 | clips: [ 6 | { layers: [{ type: "image", path: "https://picsum.photos/400/400" }] }, 7 | { layers: [{ type: "image", path: "https://picsum.photos/200/400" }] }, 8 | { layers: [{ type: "image", path: "https://picsum.photos/400/200" }] }, 9 | ], 10 | } 11 | -------------------------------------------------------------------------------- /examples/renderSingleFrame.ts: -------------------------------------------------------------------------------- 1 | import { renderSingleFrame } from "editly"; 2 | import fsExtra from "fs-extra"; 3 | import JSON from "json5"; 4 | 5 | await renderSingleFrame({ 6 | time: 0, 7 | clips: JSON.parse(await fsExtra.readFile("./videos.json5", "utf-8")).clips, 8 | outPath: "renderSingleFrame.png", 9 | }); 10 | -------------------------------------------------------------------------------- /examples/run: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | ## Run an example or all examples 3 | # Usage: ./run [example1.ts] [example2.json5] ... 4 | set -e 5 | 6 | # Get list of examples from args or all files in examples directory 7 | if [ "$#" -eq 0 ]; then 8 | examples=$(ls *.{ts,json5}) 9 | else 10 | examples=$@ 11 | fi 12 | 13 | for example in $examples; do 14 | echo "Running $example" 15 | if [[ $example == *.ts ]]; then 16 | npx tsx "$example" 17 | else 18 | node ../dist/cli.js --fast --json "$example" 19 | fi 20 | done 21 | -------------------------------------------------------------------------------- /examples/single.json5: -------------------------------------------------------------------------------- 1 | { 2 | // This is a test of a single clip to make sure that it works 3 | outPath: "./single.mp4", 4 | keepSourceAudio: true, 5 | clips: [{ layers: [{ type: "video", path: "./assets/lofoten.mp4", cutFrom: 0, cutTo: 2 }] }], 6 | } 7 | -------------------------------------------------------------------------------- /examples/slideInText.json5: -------------------------------------------------------------------------------- 1 | { 2 | outPath: "./slideInText.mp4", 3 | defaults: { 4 | layer: { fontPath: "./assets/Patua_One/PatuaOne-Regular.ttf" }, 5 | }, 6 | clips: [ 7 | { 8 | duration: 3, 9 | layers: [ 10 | { type: "image", path: "assets/img2.jpg" }, 11 | { 12 | type: "slide-in-text", 13 | text: "Text that slides in", 14 | textColor: "#fff", 15 | position: { x: 0.04, y: 0.93, originY: "bottom", originX: "left" }, 16 | fontSize: 0.05, 17 | }, 18 | ], 19 | }, 20 | ], 21 | } 22 | -------------------------------------------------------------------------------- /examples/smartFit.json5: -------------------------------------------------------------------------------- 1 | { 2 | // enableFfmpegLog: true, 3 | outPath: "./smartFit.mp4", 4 | defaults: { 5 | transition: null, 6 | layer: { backgroundColor: "white" }, 7 | }, 8 | clips: [ 9 | { layers: [{ type: "video", path: "./assets/changi.mp4", cutFrom: 0.4, cutTo: 2 }] }, 10 | { 11 | layers: [ 12 | { 13 | type: "video", 14 | path: "./assets/changi.mp4", 15 | cutFrom: 0.4, 16 | cutTo: 2, 17 | resizeMode: "contain", 18 | }, 19 | ], 20 | }, 21 | { 22 | layers: [ 23 | { 24 | type: "video", 25 | path: "./assets/changi.mp4", 26 | cutFrom: 0.4, 27 | cutTo: 2, 28 | resizeMode: "stretch", 29 | }, 30 | ], 31 | }, 32 | ], 33 | } 34 | -------------------------------------------------------------------------------- /examples/speedTest.json5: -------------------------------------------------------------------------------- 1 | { 2 | outPath: "./speedTest.mp4", 3 | defaults: { 4 | transition: null, 5 | layer: { fontPath: "./assets/Patua_One/PatuaOne-Regular.ttf" }, 6 | }, 7 | clips: [ 8 | { 9 | duration: 2, 10 | layers: [ 11 | { 12 | type: "title-background", 13 | text: "Speed up or slow down video", 14 | background: { type: "radial-gradient" }, 15 | }, 16 | ], 17 | }, 18 | { 19 | duration: 2, 20 | layers: [ 21 | { type: "video", path: "./assets/changi.mp4", cutFrom: 0, cutTo: 2 }, 22 | { type: "title", text: "Same speed" }, 23 | ], 24 | }, 25 | { 26 | duration: 1, 27 | layers: [ 28 | { type: "video", path: "./assets/changi.mp4", cutFrom: 0, cutTo: 4 }, 29 | { type: "title", text: "4x" }, 30 | ], 31 | }, 32 | { 33 | duration: 2, 34 | layers: [ 35 | { type: "video", path: "./assets/changi.mp4", cutFrom: 0, cutTo: 1 }, 36 | { type: "title", text: "1/2x" }, 37 | ], 38 | }, 39 | ], 40 | } 41 | -------------------------------------------------------------------------------- /examples/subtitle.json5: -------------------------------------------------------------------------------- 1 | { 2 | outPath: "./subtitle.mp4", 3 | defaults: { 4 | layer: { fontPath: "./assets/Patua_One/PatuaOne-Regular.ttf" }, 5 | layerType: { "fill-color": { color: "#00aa00" } }, 6 | }, 7 | clips: [ 8 | { 9 | duration: 2, 10 | layers: [ 11 | { type: "rainbow-colors" }, 12 | { 13 | type: "subtitle", 14 | text: "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident.", 15 | }, 16 | { type: "title", position: "top", text: "Subtitles" }, 17 | ], 18 | }, 19 | { 20 | duration: 2, 21 | layers: [ 22 | { type: "fill-color" }, 23 | { 24 | type: "title", 25 | position: { x: 0, y: 1, originY: "bottom" }, 26 | text: "Custom position", 27 | zoomDirection: null, 28 | }, 29 | ], 30 | }, 31 | ], 32 | } 33 | -------------------------------------------------------------------------------- /examples/timeoutTest.json5: -------------------------------------------------------------------------------- 1 | { 2 | outPath: "./timeoutTest.mp4", 3 | clips: [ 4 | { 5 | duration: 1.5, 6 | transition: { name: "crosszoom", duration: 0.3 }, 7 | layers: [{ type: "video", path: "./assets/tungestolen.mp4", cutTo: 58 }], 8 | }, 9 | { 10 | duration: 3, 11 | transition: { name: "fade" }, 12 | layers: [{ type: "video", path: "./assets/tungestolen.mp4", cutFrom: 0 }], 13 | }, 14 | ], 15 | } 16 | -------------------------------------------------------------------------------- /examples/transitionEasing.json5: -------------------------------------------------------------------------------- 1 | { 2 | fast: true, 3 | outPath: "./transitionEasing.mp4", 4 | defaults: { 5 | duration: 2, 6 | }, 7 | clips: [ 8 | { 9 | transition: { name: "directional", duration: 0.5 }, 10 | layers: [{ type: "video", path: "assets/changi.mp4", cutTo: 2 }], 11 | }, 12 | { 13 | transition: { name: "directional", duration: 0.5, params: { direction: [1, 0] } }, 14 | layers: [{ type: "video", path: "assets/lofoten.mp4", cutTo: 2 }], 15 | }, 16 | { 17 | transition: { name: "directional", duration: 0.5, easing: null }, 18 | layers: [{ type: "video", path: "assets/lofoten.mp4", cutTo: 2 }], 19 | }, 20 | { layers: [{ type: "pause" }] }, 21 | ], 22 | } 23 | -------------------------------------------------------------------------------- /examples/transitions.json5: -------------------------------------------------------------------------------- 1 | { 2 | fast: true, 3 | outPath: "./transitions.mp4", 4 | width: 1024, 5 | height: 576, 6 | defaults: { 7 | duration: 3, 8 | transition: { duration: 1 }, 9 | }, 10 | clips: [ 11 | { 12 | layers: [{ type: "title-background", text: "Transitions" }], 13 | transition: { name: "Bounce" }, 14 | }, 15 | { 16 | layers: [{ type: "title-background", text: "Bounce" }], 17 | transition: { name: "BowTieHorizontal" }, 18 | }, 19 | { 20 | layers: [{ type: "title-background", text: "BowTieHorizontal" }], 21 | transition: { name: "BowTieVertical" }, 22 | }, 23 | { 24 | layers: [{ type: "title-background", text: "BowTieVertical" }], 25 | transition: { name: "ButterflyWaveScrawler" }, 26 | }, 27 | { 28 | layers: [{ type: "title-background", text: "ButterflyWaveScrawler" }], 29 | transition: { name: "CircleCrop" }, 30 | }, 31 | { 32 | layers: [{ type: "title-background", text: "CircleCrop" }], 33 | transition: { name: "ColourDistance" }, 34 | }, 35 | { 36 | layers: [{ type: "title-background", text: "ColourDistance" }], 37 | transition: { name: "CrazyParametricFun" }, 38 | }, 39 | { 40 | layers: [{ type: "title-background", text: "CrazyParametricFun" }], 41 | transition: { name: "CrossZoom" }, 42 | }, 43 | { 44 | layers: [{ type: "title-background", text: "CrossZoom" }], 45 | transition: { name: "Directional" }, 46 | }, 47 | { 48 | layers: [{ type: "title-background", text: "Directional" }], 49 | transition: { name: "DoomScreenTransition" }, 50 | }, 51 | { 52 | layers: [{ type: "title-background", text: "DoomScreenTransition" }], 53 | transition: { name: "Dreamy" }, 54 | }, 55 | { 56 | layers: [{ type: "title-background", text: "Dreamy" }], 57 | transition: { name: "DreamyZoom" }, 58 | }, 59 | { 60 | layers: [{ type: "title-background", text: "DreamyZoom" }], 61 | transition: { name: "GlitchDisplace" }, 62 | }, 63 | { 64 | layers: [{ type: "title-background", text: "GlitchDisplace" }], 65 | transition: { name: "GlitchMemories" }, 66 | }, 67 | { 68 | layers: [{ type: "title-background", text: "GlitchMemories" }], 69 | transition: { name: "GridFlip" }, 70 | }, 71 | { 72 | layers: [{ type: "title-background", text: "GridFlip" }], 73 | transition: { name: "InvertedPageCurl" }, 74 | }, 75 | { 76 | layers: [{ type: "title-background", text: "InvertedPageCurl" }], 77 | transition: { name: "LinearBlur" }, 78 | }, 79 | { 80 | layers: [{ type: "title-background", text: "LinearBlur" }], 81 | transition: { name: "Mosaic" }, 82 | }, 83 | { 84 | layers: [{ type: "title-background", text: "Mosaic" }], 85 | transition: { name: "PolkaDotsCurtain" }, 86 | }, 87 | { 88 | layers: [{ type: "title-background", text: "PolkaDotsCurtain" }], 89 | transition: { name: "Radial" }, 90 | }, 91 | { 92 | layers: [{ type: "title-background", text: "Radial" }], 93 | transition: { name: "SimpleZoom" }, 94 | }, 95 | { 96 | layers: [{ type: "title-background", text: "SimpleZoom" }], 97 | transition: { name: "StereoViewer" }, 98 | }, 99 | { 100 | layers: [{ type: "title-background", text: "StereoViewer" }], 101 | transition: { name: "Swirl" }, 102 | }, 103 | { 104 | layers: [{ type: "title-background", text: "Swirl" }], 105 | transition: { name: "WaterDrop" }, 106 | }, 107 | { 108 | layers: [{ type: "title-background", text: "WaterDrop" }], 109 | transition: { name: "ZoomInCircles" }, 110 | }, 111 | { 112 | layers: [{ type: "title-background", text: "ZoomInCircles" }], 113 | transition: { name: "angular" }, 114 | }, 115 | { 116 | layers: [{ type: "title-background", text: "angular" }], 117 | transition: { name: "burn" }, 118 | }, 119 | { 120 | layers: [{ type: "title-background", text: "burn" }], 121 | transition: { name: "cannabisleaf" }, 122 | }, 123 | { 124 | layers: [{ type: "title-background", text: "cannabisleaf" }], 125 | transition: { name: "circle" }, 126 | }, 127 | { 128 | layers: [{ type: "title-background", text: "circle" }], 129 | transition: { name: "circleopen" }, 130 | }, 131 | { 132 | layers: [{ type: "title-background", text: "circleopen" }], 133 | transition: { name: "colorphase" }, 134 | }, 135 | { 136 | layers: [{ type: "title-background", text: "colorphase" }], 137 | transition: { name: "crosshatch" }, 138 | }, 139 | { 140 | layers: [{ type: "title-background", text: "crosshatch" }], 141 | transition: { name: "crosswarp" }, 142 | }, 143 | { 144 | layers: [{ type: "title-background", text: "crosswarp" }], 145 | transition: { name: "cube" }, 146 | }, 147 | { 148 | layers: [{ type: "title-background", text: "cube" }], 149 | transition: { name: "directionalwarp" }, 150 | }, 151 | { 152 | layers: [{ type: "title-background", text: "directionalwarp" }], 153 | transition: { name: "directionalwipe" }, 154 | }, 155 | { 156 | layers: [{ type: "title-background", text: "directionalwipe" }], 157 | transition: { name: "displacement" }, 158 | }, 159 | { 160 | layers: [{ type: "title-background", text: "displacement" }], 161 | transition: { name: "doorway" }, 162 | }, 163 | { 164 | layers: [{ type: "title-background", text: "doorway" }], 165 | transition: { name: "fade" }, 166 | }, 167 | { 168 | layers: [{ type: "title-background", text: "fade" }], 169 | transition: { name: "fadecolor" }, 170 | }, 171 | { 172 | layers: [{ type: "title-background", text: "fadecolor" }], 173 | transition: { name: "fadegrayscale" }, 174 | }, 175 | { 176 | layers: [{ type: "title-background", text: "fadegrayscale" }], 177 | transition: { name: "flyeye" }, 178 | }, 179 | { 180 | layers: [{ type: "title-background", text: "flyeye" }], 181 | transition: { name: "heart" }, 182 | }, 183 | { 184 | layers: [{ type: "title-background", text: "heart" }], 185 | transition: { name: "hexagonalize" }, 186 | }, 187 | { 188 | layers: [{ type: "title-background", text: "hexagonalize" }], 189 | transition: { name: "kaleidoscope" }, 190 | }, 191 | { 192 | layers: [{ type: "title-background", text: "kaleidoscope" }], 193 | transition: { name: "luma" }, 194 | }, 195 | { 196 | layers: [{ type: "title-background", text: "luma" }], 197 | transition: { name: "luminance_melt" }, 198 | }, 199 | { 200 | layers: [{ type: "title-background", text: "luminance_melt" }], 201 | transition: { name: "morph" }, 202 | }, 203 | { 204 | layers: [{ type: "title-background", text: "morph" }], 205 | transition: { name: "multiply_blend" }, 206 | }, 207 | { 208 | layers: [{ type: "title-background", text: "multiply_blend" }], 209 | transition: { name: "perlin" }, 210 | }, 211 | { 212 | layers: [{ type: "title-background", text: "perlin" }], 213 | transition: { name: "pinwheel" }, 214 | }, 215 | { 216 | layers: [{ type: "title-background", text: "pinwheel" }], 217 | transition: { name: "pixelize" }, 218 | }, 219 | { 220 | layers: [{ type: "title-background", text: "pixelize" }], 221 | transition: { name: "polar_function" }, 222 | }, 223 | { 224 | layers: [{ type: "title-background", text: "polar_function" }], 225 | transition: { name: "randomsquares" }, 226 | }, 227 | { 228 | layers: [{ type: "title-background", text: "randomsquares" }], 229 | transition: { name: "ripple" }, 230 | }, 231 | { 232 | layers: [{ type: "title-background", text: "ripple" }], 233 | transition: { name: "rotate_scale_fade" }, 234 | }, 235 | { 236 | layers: [{ type: "title-background", text: "rotate_scale_fade" }], 237 | transition: { name: "squareswire" }, 238 | }, 239 | { 240 | layers: [{ type: "title-background", text: "squareswire" }], 241 | transition: { name: "squeeze" }, 242 | }, 243 | { 244 | layers: [{ type: "title-background", text: "squeeze" }], 245 | transition: { name: "swap" }, 246 | }, 247 | { 248 | layers: [{ type: "title-background", text: "swap" }], 249 | transition: { name: "undulatingBurnOut" }, 250 | }, 251 | { 252 | layers: [{ type: "title-background", text: "undulatingBurnOut" }], 253 | transition: { name: "wind" }, 254 | }, 255 | { 256 | layers: [{ type: "title-background", text: "wind" }], 257 | transition: { name: "windowblinds" }, 258 | }, 259 | { 260 | layers: [{ type: "title-background", text: "windowblinds" }], 261 | transition: { name: "windowslice" }, 262 | }, 263 | { 264 | layers: [{ type: "title-background", text: "windowslice" }], 265 | transition: { name: "wipeDown" }, 266 | }, 267 | { 268 | layers: [{ type: "title-background", text: "wipeDown" }], 269 | transition: { name: "wipeLeft" }, 270 | }, 271 | { 272 | layers: [{ type: "title-background", text: "wipeLeft" }], 273 | transition: { name: "wipeRight" }, 274 | }, 275 | { 276 | layers: [{ type: "title-background", text: "wipeRight" }], 277 | transition: { name: "wipeUp" }, 278 | }, 279 | { 280 | layers: [{ type: "title-background", text: "wipeUp" }], 281 | transition: { duration: 0 }, 282 | }, 283 | ], 284 | } 285 | -------------------------------------------------------------------------------- /examples/transparentGradient.json5: -------------------------------------------------------------------------------- 1 | { 2 | // fast: true, 3 | outPath: "./transparentGradient.mp4", 4 | clips: [ 5 | { 6 | duration: 0.1, 7 | layers: [ 8 | { type: "fill-color", color: "green" }, 9 | { type: "linear-gradient", colors: ["#ffffffff", "#ffffff00"] }, 10 | ], 11 | }, 12 | ], 13 | } 14 | -------------------------------------------------------------------------------- /examples/videos.json5: -------------------------------------------------------------------------------- 1 | { 2 | width: 600, 3 | height: 800, 4 | outPath: "./videos.mp4", 5 | defaults: { 6 | transition: { duration: 0 }, 7 | layer: { fontPath: "./assets/Patua_One/PatuaOne-Regular.ttf" }, 8 | }, 9 | clips: [ 10 | { 11 | duration: 2, 12 | layers: [ 13 | { 14 | type: "title-background", 15 | text: "Editly can handle all formats and sizes with different fits", 16 | background: { type: "radial-gradient" }, 17 | }, 18 | ], 19 | }, 20 | { 21 | layers: [ 22 | { 23 | type: "video", 24 | path: "./assets/palawan.mp4", 25 | cutFrom: 0, 26 | cutTo: 2, 27 | resizeMode: "contain", 28 | }, 29 | { type: "title", text: "Contain" }, 30 | ], 31 | }, 32 | { 33 | layers: [ 34 | { 35 | type: "video", 36 | path: "./assets/palawan.mp4", 37 | cutFrom: 0, 38 | cutTo: 2, 39 | resizeMode: "contain-blur", 40 | }, 41 | { type: "title", text: "Contain (blur)" }, 42 | ], 43 | }, 44 | { 45 | layers: [ 46 | { 47 | type: "video", 48 | path: "./assets/IMG_1884.MOV", 49 | cutFrom: 0, 50 | cutTo: 2, 51 | resizeMode: "contain-blur", 52 | }, 53 | { type: "title", text: "Contain\n(blur, vertical)" }, 54 | ], 55 | }, 56 | { 57 | layers: [ 58 | { 59 | type: "video", 60 | path: "./assets/palawan.mp4", 61 | cutFrom: 0, 62 | cutTo: 2, 63 | resizeMode: "stretch", 64 | }, 65 | { type: "title", text: "Stretch" }, 66 | ], 67 | }, 68 | { 69 | layers: [ 70 | { type: "video", path: "./assets/palawan.mp4", cutFrom: 0, cutTo: 2, resizeMode: "cover" }, 71 | { type: "title", text: "Cover" }, 72 | ], 73 | }, 74 | ], 75 | } 76 | -------------------------------------------------------------------------------- /examples/videos2.json5: -------------------------------------------------------------------------------- 1 | { 2 | // verbose: true, 3 | // enableFfmpegLog: true, 4 | outPath: "./video2.mp4", 5 | defaults: { 6 | transition: { 7 | name: "linearblur", 8 | }, 9 | layer: { fontPath: "./assets/Patua_One/PatuaOne-Regular.ttf" }, 10 | }, 11 | clips: [ 12 | { 13 | layers: [ 14 | { type: "video", path: "./assets/changi.mp4", cutFrom: 0, cutTo: 2 }, 15 | { type: "title", text: "Video 1" }, 16 | ], 17 | }, 18 | { layers: [{ type: "video", path: "./assets/IMG_1884.MOV", cutFrom: 0, cutTo: 2 }] }, 19 | ], 20 | } 21 | -------------------------------------------------------------------------------- /examples/vignette.json5: -------------------------------------------------------------------------------- 1 | { 2 | outPath: "./vignette.mp4", 3 | clips: [ 4 | { 5 | layers: [ 6 | { type: "video", path: "./assets/tungestolen.mp4", cutTo: 2 }, 7 | { 8 | type: "image", 9 | path: "./assets/vignette.png", 10 | resizeMode: "stretch", 11 | zoomDirection: null, 12 | }, 13 | ], 14 | }, 15 | ], 16 | } 17 | -------------------------------------------------------------------------------- /examples/visibleFromUntil.json5: -------------------------------------------------------------------------------- 1 | { 2 | // enableFfmpegLog: true, 3 | outPath: "./visibleFromUntil.mp4", 4 | defaults: { 5 | layer: { fontPath: "./assets/Patua_One/PatuaOne-Regular.ttf" }, 6 | }, 7 | clips: [ 8 | { 9 | duration: 2, 10 | layers: [ 11 | { type: "video", path: "./assets/lofoten.mp4", cutFrom: 0.4, cutTo: 2 }, 12 | { 13 | type: "video", 14 | path: "./assets/dancer1.webm", 15 | resizeMode: "contain", 16 | cutFrom: 0, 17 | cutTo: 6, 18 | start: 0.5, 19 | stop: 1, 20 | }, 21 | ], 22 | }, 23 | { 24 | duration: 2, 25 | layers: [ 26 | { type: "video", path: "./assets/lofoten.mp4", cutFrom: 0.5, cutTo: 3.5 }, 27 | { type: "news-title", text: "Hei", start: 0.5, stop: 1 }, 28 | ], 29 | }, 30 | { 31 | layers: [ 32 | { type: "video", path: "./assets/lofoten.mp4", cutFrom: 0, cutTo: 4 }, 33 | { type: "video", path: "./assets/changi.mp4", cutFrom: 0, cutTo: 1, start: 1, stop: 2 }, 34 | ], 35 | }, 36 | ], 37 | } 38 | -------------------------------------------------------------------------------- /logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/mifi/editly/dc46674052eacd15ef6e0f563a58eb378b3804ad/logo.png -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "editly", 3 | "description": "Simple, sexy, declarative video editing", 4 | "version": "0.15.0-rc.1", 5 | "module": "./dist/index.js", 6 | "types": "dist/index.d.ts", 7 | "exports": { 8 | ".": "./dist/index.js" 9 | }, 10 | "author": "Mikael Finstad ", 11 | "contributors": [ 12 | "Patrick Connolly (https://github.com/patcon)", 13 | "Skayo (https://skayo.dev)" 14 | ], 15 | "type": "module", 16 | "license": "MIT", 17 | "dependencies": { 18 | "@types/fs-extra": "^11.0.4", 19 | "@types/gl": "^6.0.5", 20 | "@types/gl-shader": "^4.2.5", 21 | "@types/lodash-es": "^4.17.12", 22 | "@types/ndarray": "^1.0.14", 23 | "canvas": "^2.11.2", 24 | "compare-versions": "^6.1.0", 25 | "execa": "^9.5.2", 26 | "fabric": "^6.5.4", 27 | "file-type": "^20.0.0", 28 | "file-url": "^4.0.0", 29 | "fs-extra": "^11.2.0", 30 | "gl": "^8.1.6", 31 | "gl-buffer": "^2.1.2", 32 | "gl-shader": "^4.3.1", 33 | "gl-texture2d": "^2.1.0", 34 | "gl-transition": "^1.13.0", 35 | "gl-transitions": "^1.43.0", 36 | "json5": "^2.2.3", 37 | "lodash-es": "^4.17.21", 38 | "meow": "^10.1.3", 39 | "nanoid": "^5.0.7", 40 | "ndarray": "^1.0.19", 41 | "p-map": "^7.0.2" 42 | }, 43 | "scripts": { 44 | "build": "pkgroll --clean-dist --sourcemap", 45 | "prepublishOnly": "npm run build", 46 | "test": "vitest", 47 | "lint": "eslint .", 48 | "prepare": "husky || true" 49 | }, 50 | "repository": { 51 | "type": "git", 52 | "url": "git+https://github.com/mifi/editly.git" 53 | }, 54 | "bin": { 55 | "editly": "dist/cli.js" 56 | }, 57 | "devDependencies": { 58 | "@eslint/js": "^9.18.0", 59 | "@tsconfig/node-lts": "^22.0.1", 60 | "eslint": "^9.18.0", 61 | "eslint-config-prettier": "^10.0.1", 62 | "husky": "^9.1.7", 63 | "lint-staged": "^15.4.3", 64 | "pkgroll": "^2.6.1", 65 | "prettier": "3.5.1", 66 | "prettier-plugin-organize-imports": "4.1.0", 67 | "tsx": "^4.19.2", 68 | "typescript": "^5.7.3", 69 | "typescript-eslint": "^8.20.0", 70 | "vitest": "^3.0.4" 71 | }, 72 | "lint-staged": { 73 | "**/*.{js,ts,tsx}": "eslint --fix", 74 | "**/*": "prettier --write --ignore-unknown" 75 | } 76 | } 77 | -------------------------------------------------------------------------------- /shaders/rainbow-colors.frag: -------------------------------------------------------------------------------- 1 | #ifdef GL_ES 2 | precision mediump float; 3 | #endif 4 | 5 | uniform float time; 6 | uniform vec2 resolution; 7 | 8 | void main() { 9 | vec2 st = gl_FragCoord.xy/resolution.xy; 10 | st.x *= resolution.x/resolution.y; 11 | 12 | vec3 color = vec3(0.); 13 | color = vec3(st.x,st.y,abs(sin(time))); 14 | 15 | gl_FragColor = vec4(color,1.0); 16 | } 17 | -------------------------------------------------------------------------------- /src/BoxBlur.d.ts: -------------------------------------------------------------------------------- 1 | import type { CanvasRenderingContext2D } from "canvas"; 2 | 3 | declare function boxBlurImage( 4 | context: CanvasRenderingContext2D, 5 | width: number, 6 | height: number, 7 | radius: number, 8 | blurAlphaChannel: boolean, 9 | iterations: number, 10 | ); 11 | -------------------------------------------------------------------------------- /src/BoxBlur.js: -------------------------------------------------------------------------------- 1 | /* eslint-disable */ 2 | /* 3 | 4 | Superfast Blur - a fast Box Blur For Canvas 5 | 6 | Version: 0.5 7 | Author: Mario Klingemann 8 | Contact: mario@quasimondo.com 9 | Website: http://www.quasimondo.com/BoxBlurForCanvas 10 | Twitter: @quasimondo 11 | 12 | In case you find this class useful - especially in commercial projects - 13 | I am not totally unhappy for a small donation to my PayPal account 14 | mario@quasimondo.de 15 | 16 | Or support me on flattr: 17 | https://flattr.com/thing/140066/Superfast-Blur-a-pretty-fast-Box-Blur-Effect-for-CanvasJavascript 18 | 19 | Copyright (c) 2011 Mario Klingemann 20 | 21 | Permission is hereby granted, free of charge, to any person 22 | obtaining a copy of this software and associated documentation 23 | files (the "Software"), to deal in the Software without 24 | restriction, including without limitation the rights to use, 25 | copy, modify, merge, publish, distribute, sublicense, and/or sell 26 | copies of the Software, and to permit persons to whom the 27 | Software is furnished to do so, subject to the following 28 | conditions: 29 | 30 | The above copyright notice and this permission notice shall be 31 | included in all copies or substantial portions of the Software. 32 | 33 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 34 | EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES 35 | OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 36 | NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT 37 | HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, 38 | WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 39 | FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR 40 | OTHER DEALINGS IN THE SOFTWARE. 41 | */ 42 | 43 | 44 | var mul_table = [ 1,57,41,21,203,34,97,73,227,91,149,62,105,45,39,137,241,107,3,173,39,71,65,238,219,101,187,87,81,151,141,133,249,117,221,209,197,187,177,169,5,153,73,139,133,127,243,233,223,107,103,99,191,23,177,171,165,159,77,149,9,139,135,131,253,245,119,231,224,109,211,103,25,195,189,23,45,175,171,83,81,79,155,151,147,9,141,137,67,131,129,251,123,30,235,115,113,221,217,53,13,51,50,49,193,189,185,91,179,175,43,169,83,163,5,79,155,19,75,147,145,143,35,69,17,67,33,65,255,251,247,243,239,59,29,229,113,111,219,27,213,105,207,51,201,199,49,193,191,47,93,183,181,179,11,87,43,85,167,165,163,161,159,157,155,77,19,75,37,73,145,143,141,35,138,137,135,67,33,131,129,255,63,250,247,61,121,239,237,117,29,229,227,225,111,55,109,216,213,211,209,207,205,203,201,199,197,195,193,48,190,47,93,185,183,181,179,178,176,175,173,171,85,21,167,165,41,163,161,5,79,157,78,154,153,19,75,149,74,147,73,144,143,71,141,140,139,137,17,135,134,133,66,131,65,129,1]; 45 | 46 | var shg_table = [0,9,10,10,14,12,14,14,16,15,16,15,16,15,15,17,18,17,12,18,16,17,17,19,19,18,19,18,18,19,19,19,20,19,20,20,20,20,20,20,15,20,19,20,20,20,21,21,21,20,20,20,21,18,21,21,21,21,20,21,17,21,21,21,22,22,21,22,22,21,22,21,19,22,22,19,20,22,22,21,21,21,22,22,22,18,22,22,21,22,22,23,22,20,23,22,22,23,23,21,19,21,21,21,23,23,23,22,23,23,21,23,22,23,18,22,23,20,22,23,23,23,21,22,20,22,21,22,24,24,24,24,24,22,21,24,23,23,24,21,24,23,24,22,24,24,22,24,24,22,23,24,24,24,20,23,22,23,24,24,24,24,24,24,24,23,21,23,22,23,24,24,24,22,24,24,24,23,22,24,24,25,23,25,25,23,24,25,25,24,22,25,25,25,24,23,24,25,25,25,25,25,25,25,25,25,25,25,25,23,25,23,24,25,25,25,25,25,25,25,25,25,24,22,25,25,23,25,25,20,24,25,24,25,25,22,24,25,24,25,24,25,25,24,25,25,25,25,22,25,25,25,24,25,24,25,18]; 47 | 48 | 49 | export function boxBlurImage( context, width, height, radius, blurAlphaChannel, iterations ){ 50 | if ( isNaN(radius) || radius < 1 ) return; 51 | 52 | if ( blurAlphaChannel ) 53 | { 54 | boxBlurCanvasRGBA( context, 0, 0, width, height, radius, iterations ); 55 | } else { 56 | boxBlurCanvasRGB( context, 0, 0, width, height, radius, iterations ); 57 | } 58 | 59 | } 60 | 61 | 62 | function boxBlurCanvasRGBA( context, top_x, top_y, width, height, radius, iterations ){ 63 | if ( isNaN(radius) || radius < 1 ) return; 64 | 65 | radius |= 0; 66 | 67 | if ( isNaN(iterations) ) iterations = 1; 68 | iterations |= 0; 69 | if ( iterations > 3 ) iterations = 3; 70 | if ( iterations < 1 ) iterations = 1; 71 | 72 | var imageData; 73 | 74 | try { 75 | try { 76 | imageData = context.getImageData( top_x, top_y, width, height ); 77 | } catch(e) { 78 | 79 | // NOTE: this part is supposedly only needed if you want to work with local files 80 | // so it might be okay to remove the whole try/catch block and just use 81 | // imageData = context.getImageData( top_x, top_y, width, height ); 82 | try { 83 | netscape.security.PrivilegeManager.enablePrivilege("UniversalBrowserRead"); 84 | imageData = context.getImageData( top_x, top_y, width, height ); 85 | } catch(e) { 86 | alert("Cannot access local image"); 87 | throw new Error("unable to access local image data: " + e); 88 | return; 89 | } 90 | } 91 | } catch(e) { 92 | alert("Cannot access image"); 93 | throw new Error("unable to access image data: " + e); 94 | return; 95 | } 96 | 97 | var pixels = imageData.data; 98 | 99 | var rsum,gsum,bsum,asum,x,y,i,p,p1,p2,yp,yi,yw,idx,pa; 100 | var wm = width - 1; 101 | var hm = height - 1; 102 | var wh = width * height; 103 | var rad1 = radius + 1; 104 | 105 | var mul_sum = mul_table[radius]; 106 | var shg_sum = shg_table[radius]; 107 | 108 | var r = []; 109 | var g = []; 110 | var b = []; 111 | var a = []; 112 | 113 | var vmin = []; 114 | var vmax = []; 115 | 116 | while ( iterations-- > 0 ){ 117 | yw = yi = 0; 118 | 119 | for ( y=0; y < height; y++ ){ 120 | rsum = pixels[yw] * rad1; 121 | gsum = pixels[yw+1] * rad1; 122 | bsum = pixels[yw+2] * rad1; 123 | asum = pixels[yw+3] * rad1; 124 | 125 | 126 | for( i = 1; i <= radius; i++ ){ 127 | p = yw + (((i > wm ? wm : i )) << 2 ); 128 | rsum += pixels[p++]; 129 | gsum += pixels[p++]; 130 | bsum += pixels[p++]; 131 | asum += pixels[p] 132 | } 133 | 134 | for ( x = 0; x < width; x++ ) { 135 | r[yi] = rsum; 136 | g[yi] = gsum; 137 | b[yi] = bsum; 138 | a[yi] = asum; 139 | 140 | if( y==0) { 141 | vmin[x] = ( ( p = x + rad1) < wm ? p : wm ) << 2; 142 | vmax[x] = ( ( p = x - radius) > 0 ? p << 2 : 0 ); 143 | } 144 | 145 | p1 = yw + vmin[x]; 146 | p2 = yw + vmax[x]; 147 | 148 | rsum += pixels[p1++] - pixels[p2++]; 149 | gsum += pixels[p1++] - pixels[p2++]; 150 | bsum += pixels[p1++] - pixels[p2++]; 151 | asum += pixels[p1] - pixels[p2]; 152 | 153 | yi++; 154 | } 155 | yw += ( width << 2 ); 156 | } 157 | 158 | for ( x = 0; x < width; x++ ) { 159 | yp = x; 160 | rsum = r[yp] * rad1; 161 | gsum = g[yp] * rad1; 162 | bsum = b[yp] * rad1; 163 | asum = a[yp] * rad1; 164 | 165 | for( i = 1; i <= radius; i++ ) { 166 | yp += ( i > hm ? 0 : width ); 167 | rsum += r[yp]; 168 | gsum += g[yp]; 169 | bsum += b[yp]; 170 | asum += a[yp]; 171 | } 172 | 173 | yi = x << 2; 174 | for ( y = 0; y < height; y++) { 175 | 176 | pixels[yi+3] = pa = (asum * mul_sum) >>> shg_sum; 177 | if ( pa > 0 ) 178 | { 179 | pa = 255 / pa; 180 | pixels[yi] = ((rsum * mul_sum) >>> shg_sum) * pa; 181 | pixels[yi+1] = ((gsum * mul_sum) >>> shg_sum) * pa; 182 | pixels[yi+2] = ((bsum * mul_sum) >>> shg_sum) * pa; 183 | } else { 184 | pixels[yi] = pixels[yi+1] = pixels[yi+2] = 0; 185 | } 186 | if( x == 0 ) { 187 | vmin[y] = ( ( p = y + rad1) < hm ? p : hm ) * width; 188 | vmax[y] = ( ( p = y - radius) > 0 ? p * width : 0 ); 189 | } 190 | 191 | p1 = x + vmin[y]; 192 | p2 = x + vmax[y]; 193 | 194 | rsum += r[p1] - r[p2]; 195 | gsum += g[p1] - g[p2]; 196 | bsum += b[p1] - b[p2]; 197 | asum += a[p1] - a[p2]; 198 | 199 | yi += width << 2; 200 | } 201 | } 202 | } 203 | 204 | context.putImageData( imageData, top_x, top_y ); 205 | 206 | } 207 | 208 | function boxBlurCanvasRGB( context, top_x, top_y, width, height, radius, iterations ){ 209 | if ( isNaN(radius) || radius < 1 ) return; 210 | 211 | radius |= 0; 212 | 213 | if ( isNaN(iterations) ) iterations = 1; 214 | iterations |= 0; 215 | if ( iterations > 3 ) iterations = 3; 216 | if ( iterations < 1 ) iterations = 1; 217 | 218 | var imageData; 219 | 220 | try { 221 | imageData = context.getImageData( top_x, top_y, width, height ); 222 | } catch(e) { 223 | alert("Cannot access image"); 224 | throw new Error("unable to access image data: " + e); 225 | return; 226 | } 227 | 228 | var pixels = imageData.data; 229 | 230 | var rsum,gsum,bsum,asum,x,y,i,p,p1,p2,yp,yi,yw,idx; 231 | var wm = width - 1; 232 | var hm = height - 1; 233 | var wh = width * height; 234 | var rad1 = radius + 1; 235 | 236 | var r = []; 237 | var g = []; 238 | var b = []; 239 | 240 | var mul_sum = mul_table[radius]; 241 | var shg_sum = shg_table[radius]; 242 | 243 | var vmin = []; 244 | var vmax = []; 245 | 246 | while ( iterations-- > 0 ){ 247 | yw = yi = 0; 248 | 249 | for ( y=0; y < height; y++ ){ 250 | rsum = pixels[yw] * rad1; 251 | gsum = pixels[yw+1] * rad1; 252 | bsum = pixels[yw+2] * rad1; 253 | 254 | for( i = 1; i <= radius; i++ ){ 255 | p = yw + (((i > wm ? wm : i )) << 2 ); 256 | rsum += pixels[p++]; 257 | gsum += pixels[p++]; 258 | bsum += pixels[p++]; 259 | } 260 | 261 | for ( x = 0; x < width; x++ ){ 262 | r[yi] = rsum; 263 | g[yi] = gsum; 264 | b[yi] = bsum; 265 | 266 | if( y==0) { 267 | vmin[x] = ( ( p = x + rad1) < wm ? p : wm ) << 2; 268 | vmax[x] = ( ( p = x - radius) > 0 ? p << 2 : 0 ); 269 | } 270 | 271 | p1 = yw + vmin[x]; 272 | p2 = yw + vmax[x]; 273 | 274 | rsum += pixels[p1++] - pixels[p2++]; 275 | gsum += pixels[p1++] - pixels[p2++]; 276 | bsum += pixels[p1++] - pixels[p2++]; 277 | 278 | yi++; 279 | } 280 | yw += ( width << 2 ); 281 | } 282 | 283 | for ( x = 0; x < width; x++ ){ 284 | yp = x; 285 | rsum = r[yp] * rad1; 286 | gsum = g[yp] * rad1; 287 | bsum = b[yp] * rad1; 288 | 289 | for( i = 1; i <= radius; i++ ){ 290 | yp += ( i > hm ? 0 : width ); 291 | rsum += r[yp]; 292 | gsum += g[yp]; 293 | bsum += b[yp]; 294 | } 295 | 296 | yi = x << 2; 297 | for ( y = 0; y < height; y++){ 298 | pixels[yi] = (rsum * mul_sum) >>> shg_sum; 299 | pixels[yi+1] = (gsum * mul_sum) >>> shg_sum; 300 | pixels[yi+2] = (bsum * mul_sum) >>> shg_sum; 301 | 302 | if( x == 0 ) { 303 | vmin[y] = ( ( p = y + rad1) < hm ? p : hm ) * width; 304 | vmax[y] = ( ( p = y - radius) > 0 ? p * width : 0 ); 305 | } 306 | 307 | p1 = x + vmin[y]; 308 | p2 = x + vmax[y]; 309 | 310 | rsum += r[p1] - r[p2]; 311 | gsum += g[p1] - g[p2]; 312 | bsum += b[p1] - b[p2]; 313 | 314 | yi += width << 2; 315 | } 316 | } 317 | } 318 | context.putImageData( imageData, top_x, top_y ); 319 | 320 | } 321 | -------------------------------------------------------------------------------- /src/api/defineFrameSource.ts: -------------------------------------------------------------------------------- 1 | import type { StaticCanvas } from "fabric/node"; 2 | import type { DebugOptions } from "../configuration.js"; 3 | import type { BaseLayer, OptionalPromise } from "../types.js"; 4 | 5 | /** 6 | * A public API for defining new frame sources. 7 | */ 8 | export function defineFrameSource( 9 | type: T["type"], 10 | setup: FrameSourceSetupFunction, 11 | ): FrameSourceFactory { 12 | return { 13 | type, 14 | async setup(options: CreateFrameSourceOptions) { 15 | return new FrameSource(options, await setup(options)); 16 | }, 17 | }; 18 | } 19 | 20 | export type CreateFrameSourceOptions = DebugOptions & { 21 | width: number; 22 | height: number; 23 | duration: number; 24 | channels: number; 25 | framerateStr: string; 26 | params: T; 27 | }; 28 | 29 | export interface FrameSourceFactory { 30 | type: T["type"]; 31 | setup: (fn: CreateFrameSourceOptions) => Promise>; 32 | } 33 | 34 | export interface FrameSourceImplementation { 35 | readNextFrame( 36 | progress: number, 37 | canvas: StaticCanvas, 38 | offsetTime: number, 39 | ): OptionalPromise; 40 | close?(): OptionalPromise; 41 | } 42 | 43 | export type FrameSourceSetupFunction = ( 44 | fn: CreateFrameSourceOptions, 45 | ) => Promise; 46 | 47 | export class FrameSource { 48 | options: CreateFrameSourceOptions; 49 | implementation: FrameSourceImplementation; 50 | 51 | constructor(options: CreateFrameSourceOptions, implementation: FrameSourceImplementation) { 52 | this.options = options; 53 | this.implementation = implementation; 54 | } 55 | 56 | async readNextFrame(time: number, canvas: StaticCanvas) { 57 | const { start, layerDuration } = this.layer; 58 | 59 | const offsetTime = time - (start ?? 0); 60 | const offsetProgress = offsetTime / layerDuration!; 61 | const shouldDrawLayer = offsetProgress >= 0 && offsetProgress <= 1; 62 | 63 | // Skip drawing if the layer has not started or has already ended 64 | if (!shouldDrawLayer) return; 65 | 66 | return await this.implementation.readNextFrame(offsetProgress, canvas, offsetTime); 67 | } 68 | 69 | async close() { 70 | await this.implementation.close?.(); 71 | } 72 | 73 | get layer() { 74 | return this.options.params; 75 | } 76 | } 77 | -------------------------------------------------------------------------------- /src/api/index.ts: -------------------------------------------------------------------------------- 1 | export type * from "./defineFrameSource.js"; 2 | export { defineFrameSource } from "./defineFrameSource.js"; 3 | -------------------------------------------------------------------------------- /src/audio.ts: -------------------------------------------------------------------------------- 1 | import { flatMap } from "lodash-es"; 2 | import pMap from "p-map"; 3 | import { basename, join, resolve } from "path"; 4 | import type { Configuration } from "./configuration.js"; 5 | import { ffmpeg, getCutFromArgs, readFileStreams } from "./ffmpeg.js"; 6 | import type { TransitionOptions } from "./transition.js"; 7 | import type { 8 | AudioLayer, 9 | AudioNormalizationOptions, 10 | AudioTrack, 11 | Clip, 12 | VideoLayer, 13 | } from "./types.js"; 14 | 15 | export type AudioOptions = { 16 | verbose: boolean; 17 | tmpDir: string; 18 | }; 19 | 20 | export type EditAudioOptions = Pick< 21 | Configuration, 22 | "keepSourceAudio" | "clips" | "clipsAudioVolume" | "audioNorm" | "outputVolume" 23 | > & { 24 | arbitraryAudio: AudioTrack[]; 25 | }; 26 | 27 | type LayerWithAudio = (AudioLayer | VideoLayer) & { speedFactor: number }; 28 | 29 | export default ({ verbose, tmpDir }: AudioOptions) => { 30 | async function createMixedAudioClips({ 31 | clips, 32 | keepSourceAudio, 33 | }: { 34 | clips: Clip[]; 35 | keepSourceAudio?: boolean; 36 | }) { 37 | return pMap( 38 | clips, 39 | async (clip, i) => { 40 | const { duration, layers, transition } = clip; 41 | 42 | async function runInner(): Promise<{ clipAudioPath: string; silent: boolean }> { 43 | const clipAudioPath = join(tmpDir, `clip${i}-audio.flac`); 44 | 45 | async function createSilence() { 46 | if (verbose) console.log("create silence", duration); 47 | const args = [ 48 | "-nostdin", 49 | "-f", 50 | "lavfi", 51 | "-i", 52 | "anullsrc=channel_layout=stereo:sample_rate=44100", 53 | "-sample_fmt", 54 | "s32", 55 | "-ar", 56 | "48000", 57 | "-t", 58 | duration!.toString(), 59 | "-c:a", 60 | "flac", 61 | "-y", 62 | clipAudioPath, 63 | ]; 64 | await ffmpeg(args); 65 | 66 | return { silent: true, clipAudioPath }; 67 | } 68 | 69 | // Has user enabled keep source audio? 70 | if (!keepSourceAudio) return createSilence(); 71 | 72 | // TODO:[ts]: Layers is always an array once config is parsed. Fix this in types 73 | const audioLayers = layers.filter( 74 | ({ type, start, stop }) => 75 | ["audio", "video"].includes(type) && 76 | // TODO: We don't support audio for start/stop layers 77 | !start && 78 | stop == null, 79 | ) as LayerWithAudio[]; 80 | 81 | if (audioLayers.length === 0) return createSilence(); 82 | 83 | const processedAudioLayersRaw = await pMap( 84 | audioLayers, 85 | async (audioLayer, j) => { 86 | const { path, cutFrom, cutTo, speedFactor } = audioLayer; 87 | 88 | const streams = await readFileStreams(path); 89 | if (!streams.some((s) => s.codec_type === "audio")) return undefined; 90 | 91 | const layerAudioPath = join(tmpDir, `clip${i}-layer${j}-audio.flac`); 92 | 93 | try { 94 | let atempoFilter; 95 | if (Math.abs(speedFactor - 1) > 0.01) { 96 | if (verbose) console.log("audio speedFactor", speedFactor); 97 | const atempo = 1 / speedFactor; 98 | if (!(atempo >= 0.5 && atempo <= 100)) { 99 | // Required range by ffmpeg 100 | console.warn( 101 | `Audio speed ${atempo} is outside accepted range, using silence (clip ${i})`, 102 | ); 103 | return undefined; 104 | } 105 | atempoFilter = `atempo=${atempo}`; 106 | } 107 | 108 | const cutToArg = (cutTo! - cutFrom!) * speedFactor; 109 | 110 | const args = [ 111 | "-nostdin", 112 | ...getCutFromArgs({ cutFrom }), 113 | "-i", 114 | path, 115 | "-t", 116 | cutToArg!.toString(), 117 | "-sample_fmt", 118 | "s32", 119 | "-ar", 120 | "48000", 121 | "-map", 122 | "a:0", 123 | "-c:a", 124 | "flac", 125 | ...(atempoFilter ? ["-filter:a", atempoFilter] : []), 126 | "-y", 127 | layerAudioPath, 128 | ]; 129 | 130 | await ffmpeg(args); 131 | 132 | return [layerAudioPath, audioLayer]; 133 | } catch (err) { 134 | if (verbose) console.error("Cannot extract audio from video", path, err); 135 | // Fall back to silence 136 | return undefined; 137 | } 138 | }, 139 | { concurrency: 4 }, 140 | ); 141 | 142 | const processedAudioLayers = processedAudioLayersRaw.filter( 143 | (r): r is [string, LayerWithAudio] => r !== undefined, 144 | ); 145 | 146 | if (processedAudioLayers.length < 1) return createSilence(); 147 | 148 | if (processedAudioLayers.length === 1) 149 | return { clipAudioPath: processedAudioLayers[0][0], silent: false }; 150 | 151 | // Merge/mix all layers' audio 152 | const weights = processedAudioLayers.map(([, { mixVolume }]) => mixVolume ?? 1); 153 | const args = [ 154 | "-nostdin", 155 | ...flatMap(processedAudioLayers, ([layerAudioPath]) => ["-i", layerAudioPath]), 156 | "-filter_complex", 157 | `amix=inputs=${processedAudioLayers.length}:duration=longest:weights=${weights.join(" ")}`, 158 | "-c:a", 159 | "flac", 160 | "-y", 161 | clipAudioPath, 162 | ]; 163 | 164 | await ffmpeg(args); 165 | return { clipAudioPath, silent: false }; 166 | } 167 | 168 | const { clipAudioPath, silent } = await runInner(); 169 | 170 | return { 171 | path: resolve(clipAudioPath), // https://superuser.com/a/853262/658247 172 | transition, 173 | silent, 174 | }; 175 | }, 176 | { concurrency: 4 }, 177 | ); 178 | } 179 | 180 | async function crossFadeConcatClipAudio( 181 | clipAudio: { path: string; transition?: TransitionOptions | null }[], 182 | ) { 183 | if (clipAudio.length < 2) { 184 | return clipAudio[0].path; 185 | } 186 | 187 | const outPath = join(tmpDir, "audio-concat.flac"); 188 | 189 | if (verbose) 190 | console.log( 191 | "Combining audio", 192 | clipAudio.map(({ path }) => basename(path)), 193 | ); 194 | 195 | let inStream = "[0:a]"; 196 | const filterGraph = clipAudio 197 | .slice(0, -1) 198 | .map(({ transition }, i) => { 199 | const outStream = `[concat${i}]`; 200 | 201 | const epsilon = 0.0001; // If duration is 0, ffmpeg seems to default to 1 sec instead, hence epsilon. 202 | let ret = `${inStream}[${i + 1}:a]acrossfade=d=${Math.max(epsilon, transition?.duration ?? 0)}:c1=${transition?.audioOutCurve ?? "tri"}:c2=${transition?.audioInCurve ?? "tri"}`; 203 | 204 | inStream = outStream; 205 | 206 | if (i < clipAudio.length - 2) ret += outStream; 207 | return ret; 208 | }) 209 | .join(","); 210 | 211 | const args = [ 212 | "-nostdin", 213 | ...flatMap(clipAudio, ({ path }) => ["-i", path]), 214 | "-filter_complex", 215 | filterGraph, 216 | "-c", 217 | "flac", 218 | "-y", 219 | outPath, 220 | ]; 221 | await ffmpeg(args); 222 | 223 | return outPath; 224 | } 225 | 226 | // FIXME[ts]: parseConfig sets `loop` on arbitrary audio tracks. Should that be part of the `AudioTrack` interface? 227 | async function mixArbitraryAudio({ 228 | streams, 229 | audioNorm, 230 | outputVolume, 231 | }: { 232 | streams: (AudioTrack & { loop?: number })[]; 233 | audioNorm?: AudioNormalizationOptions; 234 | outputVolume?: number | string; 235 | }) { 236 | let maxGain = 30; 237 | let gaussSize = 5; 238 | if (audioNorm) { 239 | if (audioNorm.gaussSize != null) gaussSize = audioNorm.gaussSize; 240 | if (audioNorm.maxGain != null) maxGain = audioNorm.maxGain; 241 | } 242 | const enableAudioNorm = audioNorm && audioNorm.enable; 243 | 244 | // https://stackoverflow.com/questions/35509147/ffmpeg-amix-filter-volume-issue-with-inputs-of-different-duration 245 | let filterComplex = streams 246 | .map(({ start, cutFrom, cutTo }, i) => { 247 | const cutToArg = cutTo != null ? `:end=${cutTo}` : ""; 248 | const apadArg = i > 0 ? ",apad" : ""; // Don't pad the first track (audio from video clips with correct duration) 249 | 250 | return `[${i}:a]atrim=start=${cutFrom || 0}${cutToArg},adelay=delays=${Math.floor((start || 0) * 1000)}:all=1${apadArg}[a${i}]`; 251 | }) 252 | .join(";"); 253 | 254 | const volumeArg = outputVolume != null ? `,volume=${outputVolume}` : ""; 255 | const audioNormArg = enableAudioNorm ? `,dynaudnorm=g=${gaussSize}:maxgain=${maxGain}` : ""; 256 | filterComplex += `;${streams.map((_, i) => `[a${i}]`).join("")}amix=inputs=${streams.length}:duration=first:dropout_transition=0:weights=${streams.map((s) => (s.mixVolume != null ? s.mixVolume : 1)).join(" ")}${audioNormArg}${volumeArg}`; 257 | 258 | const mixedAudioPath = join(tmpDir, "audio-mixed.flac"); 259 | 260 | const args = [ 261 | "-nostdin", 262 | ...flatMap(streams, ({ path, loop }) => ["-stream_loop", (loop || 0).toString(), "-i", path]), 263 | "-vn", 264 | "-filter_complex", 265 | filterComplex, 266 | "-c:a", 267 | "flac", 268 | "-y", 269 | mixedAudioPath, 270 | ]; 271 | 272 | await ffmpeg(args); 273 | 274 | return mixedAudioPath; 275 | } 276 | 277 | async function editAudio({ 278 | keepSourceAudio, 279 | clips, 280 | arbitraryAudio, 281 | clipsAudioVolume, 282 | audioNorm, 283 | outputVolume, 284 | }: EditAudioOptions) { 285 | // We need clips to process audio, because we need to know duration 286 | if (clips.length === 0) return undefined; 287 | 288 | // No need to process audio if none of these are satisfied 289 | if (!(keepSourceAudio || arbitraryAudio.length > 0)) return undefined; 290 | 291 | console.log("Extracting audio/silence from all clips"); 292 | 293 | // Mix audio from each clip as separate files (or silent audio of appropriate length for clips with no audio) 294 | const clipAudio = await createMixedAudioClips({ clips, keepSourceAudio }); 295 | 296 | // Return no audio if only silent clips and no arbitrary audio 297 | if (clipAudio.every((ca) => ca.silent) && arbitraryAudio.length === 0) return undefined; 298 | 299 | // Merge & fade the clip audio files 300 | const concatedClipAudioPath = await crossFadeConcatClipAudio(clipAudio); 301 | 302 | const streams: AudioTrack[] = [ 303 | // The first stream is required, as it determines the length of the output audio. 304 | // All other streams will be truncated to its length 305 | { path: concatedClipAudioPath, mixVolume: clipsAudioVolume }, 306 | 307 | ...arbitraryAudio, 308 | ]; 309 | 310 | console.log("Mixing clip audio with arbitrary audio"); 311 | 312 | if (streams.length < 2) return concatedClipAudioPath; 313 | 314 | const mixedFile = await mixArbitraryAudio({ streams, audioNorm, outputVolume }); 315 | return mixedFile; 316 | } 317 | 318 | return { 319 | editAudio, 320 | }; 321 | }; 322 | -------------------------------------------------------------------------------- /src/cli.ts: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env node 2 | import assert from "assert"; 3 | import { fileTypeFromFile } from "file-type"; 4 | import { readFileSync } from "fs"; 5 | import JSON5 from "json5"; 6 | import meow from "meow"; 7 | import pMap from "p-map"; 8 | import { ConfigurationOptions } from "./configuration.js"; 9 | import Editly, { Layer } from "./index.js"; 10 | 11 | // See also readme 12 | const cli = meow( 13 | ` 14 | Usage 15 | $ editly CLIP1 [CLIP2 [CLIP3 ...]] 16 | where each CLIP can be one of the following: 17 | - A path to a video file 18 | - A path to an image 19 | - A quoted text to show in a title screen, prefixed by "title:" 20 | 21 | Or alternatively: 22 | $ editly --json JSON_PATH 23 | where JSON_PATH is the path to an edit spec JSON file, can be a normal JSON or JSON5 24 | 25 | Options 26 | --out Out video path (defaults to ./editly-out.mp4) - can also be a .gif 27 | --json Use JSON edit spec 28 | --transition-name Name of default transition to use (default: random) 29 | --transition-duration Default transition duration 30 | --clip-duration Default clip duration 31 | --width Width which all media will be converted to 32 | --height Height which all media will be converted to 33 | --fps FPS which all videos will be converted to 34 | --font-path Set default font to a .ttf 35 | --audio-file-path Add an audio track 36 | --loop-audio Loop the audio track if it is shorter than video? 37 | --keep-source-audio Keep audio from source files 38 | --output-volume Adjust audio output volume 39 | --allow-remote-requests 40 | 41 | --fast, -f Fast mode (low resolution and FPS, useful for getting a quick preview) 42 | --verbose, -v 43 | 44 | For more detailed explanation, see: 45 | https://github.com/mifi/editly 46 | 47 | Examples 48 | $ editly title:'My video' clip1.mov clip2.mov title:'My slideshow' img1.jpg img2.jpg title:'THE END' --audio-file-path /path/to/music.mp3 --font-path /path/to/my-favorite-font.ttf 49 | $ editly my-editly.json5 --out output.gif 50 | `, 51 | { 52 | importMeta: import.meta, 53 | flags: { 54 | verbose: { type: "boolean", alias: "v" }, 55 | keepSourceAudio: { type: "boolean" }, 56 | allowRemoteRequests: { type: "boolean" }, 57 | fast: { type: "boolean", alias: "f" }, 58 | transitionName: { type: "string" }, 59 | transitionDuration: { type: "number" }, 60 | clipDuration: { type: "number" }, 61 | width: { type: "number" }, 62 | height: { type: "number" }, 63 | fps: { type: "number" }, 64 | fontPath: { type: "string" }, 65 | loopAudio: { type: "boolean" }, 66 | outputVolume: { type: "string" }, 67 | json: { type: "string" }, 68 | out: { type: "string" }, 69 | audioFilePath: { type: "string" }, 70 | }, 71 | }, 72 | ); 73 | 74 | (async () => { 75 | let { json } = cli.flags; 76 | if (cli.input.length === 1 && /\.(json|json5|js)$/.test(cli.input[0].toLowerCase())) 77 | json = cli.input[0]; 78 | 79 | let params: Partial = { 80 | defaults: {}, 81 | }; 82 | 83 | if (json) { 84 | params = JSON5.parse(readFileSync(json, "utf-8")); 85 | } else { 86 | const clipsIn = cli.input; 87 | if (clipsIn.length < 1) cli.showHelp(); 88 | 89 | const clips: Layer[] = await pMap( 90 | clipsIn, 91 | async (clip) => { 92 | let match = clip.match(/^title:(.+)$/); 93 | if (match) return { type: "title-background", text: match[1] }; 94 | 95 | match = clip.match(/^https?:\/\/.*\.(jpg|jpeg|png|webp|gif|svg)$/); // todo improve 96 | if (match) return { type: "image", path: clip }; 97 | 98 | const fileType = await fileTypeFromFile(clip); 99 | if (!fileType) { 100 | console.error("Invalid file for clip", clip); 101 | cli.showHelp(); 102 | } 103 | 104 | const mime = fileType!.mime; 105 | 106 | if (mime.startsWith("video")) return { type: "video", path: clip }; 107 | if (mime.startsWith("image")) return { type: "image", path: clip }; 108 | 109 | throw new Error(`Unrecognized clip or file type "${clip}"`); 110 | }, 111 | { concurrency: 1 }, 112 | ); 113 | 114 | assert(clips.length > 0, "No clips specified"); 115 | 116 | params.clips = clips.map((clip) => ({ layers: [clip] })); 117 | } 118 | 119 | const { 120 | verbose, 121 | transitionName, 122 | transitionDuration, 123 | clipDuration, 124 | width, 125 | height, 126 | fps, 127 | audioFilePath, 128 | fontPath, 129 | fast, 130 | out: outPath, 131 | keepSourceAudio, 132 | loopAudio, 133 | outputVolume, 134 | allowRemoteRequests, 135 | } = cli.flags; 136 | 137 | if (transitionName || transitionDuration != null) { 138 | params.defaults!.transition = {}; 139 | if (transitionName) params.defaults!.transition!.name = transitionName; 140 | if (transitionDuration) params.defaults!.transition!.duration = transitionDuration; 141 | } 142 | 143 | if (clipDuration) params.defaults!.duration = clipDuration; 144 | 145 | if (fontPath) { 146 | params.defaults!.layer = { 147 | fontPath, 148 | }; 149 | } 150 | 151 | if (outPath) params.outPath = outPath; 152 | if (audioFilePath) params.audioFilePath = audioFilePath; 153 | if (loopAudio) params.loopAudio = loopAudio; 154 | if (outputVolume) params.outputVolume = outputVolume; 155 | if (keepSourceAudio) params.keepSourceAudio = true; 156 | if (allowRemoteRequests) params.allowRemoteRequests = true; 157 | if (width) params.width = width; 158 | if (height) params.height = height; 159 | if (fps) params.fps = fps; 160 | 161 | if (fast) params.fast = fast; 162 | if (verbose) params.verbose = verbose; 163 | 164 | if (params.verbose) console.log(JSON5.stringify(params, null, 2)); 165 | 166 | if (!params.outPath) params.outPath = "./editly-out.mp4"; 167 | 168 | await Editly(params as ConfigurationOptions); 169 | })().catch((err) => { 170 | console.error("Caught error", err); 171 | process.exitCode = 1; 172 | }); 173 | -------------------------------------------------------------------------------- /src/colors.ts: -------------------------------------------------------------------------------- 1 | // TODO make separate npm module 2 | 3 | // https://stackoverflow.com/a/4382138/6519037 4 | const allColors = [ 5 | "hsl(42, 100%, 50%)", 6 | "hsl(310, 34%, 37%)", 7 | "hsl(24, 100%, 50%)", 8 | "hsl(211, 38%, 74%)", 9 | "hsl(350, 100%, 37%)", 10 | "hsl(35, 52%, 59%)", 11 | "hsl(22, 11%, 45%)", 12 | "hsl(145, 100%, 24%)", 13 | "hsl(348, 87%, 71%)", 14 | "hsl(203, 100%, 27%)", 15 | "hsl(11, 100%, 68%)", 16 | "hsl(265, 37%, 34%)", 17 | "hsl(33, 100%, 50%)", 18 | "hsl(342, 63%, 42%)", 19 | "hsl(49, 100%, 47%)", 20 | "hsl(5, 81%, 27%)", 21 | "hsl(68, 100%, 33%)", 22 | "hsl(26, 61%, 21%)", 23 | "hsl(10, 88%, 51%)", 24 | "hsl(84, 33%, 12%)", 25 | ]; 26 | 27 | // https://digitalsynopsis.com/design/beautiful-color-ui-gradients-backgrounds/ 28 | const gradientColors = [ 29 | ["#ff9aac", "#ffa875"], 30 | ["#cc2b5e", "#753a88"], 31 | ["#42275a", "#734b6d"], 32 | ["#bdc3c7", "#2c3e50"], 33 | ["#de6262", "#ffb88c"], 34 | ["#eb3349", "#f45c43"], 35 | ["#dd5e89", "#f7bb97"], 36 | ["#56ab2f", "#a8e063"], 37 | ["#614385", "#516395"], 38 | ["#eecda3", "#ef629f"], 39 | ["#eacda3", "#d6ae7b"], 40 | ["#02aab0", "#00cdac"], 41 | ["#d66d75", "#e29587"], 42 | ["#000428", "#004e92"], 43 | ["#ddd6f3", "#faaca8"], 44 | ["#7b4397", "#dc2430"], 45 | ["#43cea2", "#185a9d"], 46 | ["#ba5370", "#f4e2d8"], 47 | ["#ff512f", "#dd2476"], 48 | ["#4568dc", "#b06ab3"], 49 | ["#ec6f66", "#f3a183"], 50 | ["#ffd89b", "#19547b"], 51 | ["#3a1c71", "#d76d77"], 52 | ["#4ca1af", "#c4e0e5"], 53 | ["#ff5f6d", "#ffc371"], 54 | ["#36d1dc", "#5b86e5"], 55 | ["#c33764", "#1d2671"], 56 | ["#141e30", "#243b55"], 57 | ["#ff7e5f", "#feb47b"], 58 | ["#ed4264", "#ffedbc"], 59 | ["#2b5876", "#4e4376"], 60 | ["#ff9966", "#ff5e62"], 61 | ["#aa076b", "#61045f"], 62 | ]; 63 | 64 | /* const lightGradients = [ 65 | [ 66 | '#ee9ca7', 67 | '#ffdde1', 68 | ], 69 | [ 70 | '#2193b0', 71 | '#6dd5ed', 72 | ], 73 | ]; */ 74 | 75 | export function getRandomColor(colors = allColors) { 76 | const index = Math.floor(Math.random() * colors.length); 77 | const remainingColors = [...colors]; 78 | remainingColors.splice(index, 1); 79 | return { remainingColors, color: colors[index] || allColors[0] }; 80 | } 81 | 82 | export function getRandomColors(num: number) { 83 | let colors = allColors; 84 | const out = []; 85 | for (let i = 0; i < Math.min(num, allColors.length); i += 1) { 86 | const { remainingColors, color } = getRandomColor(colors); 87 | out.push(color); 88 | colors = remainingColors; 89 | } 90 | return out; 91 | } 92 | 93 | export function getRandomGradient() { 94 | return gradientColors[Math.floor(Math.random() * gradientColors.length)]; 95 | } 96 | -------------------------------------------------------------------------------- /src/configuration.ts: -------------------------------------------------------------------------------- 1 | import assert from "assert"; 2 | import { merge } from "lodash-es"; 3 | import { nanoid } from "nanoid"; 4 | import { dirname, join } from "path"; 5 | import { expandLayerAliases } from "./sources/index.js"; 6 | import type { AudioNormalizationOptions, AudioTrack, Clip, DefaultOptions } from "./types.js"; 7 | 8 | export type DebugOptions = { 9 | verbose?: boolean; 10 | logTimes?: boolean; 11 | }; 12 | 13 | export type FfmpegConfig = { 14 | ffmpegPath?: string; 15 | ffprobePath?: string; 16 | enableFfmpegLog?: boolean; 17 | }; 18 | 19 | export type ConfigurationOptions = { 20 | /** 21 | * Output path (`.mp4` or `.mkv`, can also be a `.gif`). 22 | */ 23 | outPath: string; 24 | 25 | /** 26 | * List of clip objects that will be played in sequence. 27 | * Each clip can have one or more layers. 28 | * 29 | * @default [] 30 | */ 31 | clips: Clip[]; 32 | 33 | /** 34 | * Width which all media will be converted to. 35 | * 36 | * @default 640 37 | */ 38 | width?: number; 39 | 40 | /** 41 | * Height which all media will be converted to. 42 | * Decides height based on `width` and aspect ratio of the first video by default. 43 | */ 44 | height?: number; 45 | 46 | /** 47 | * FPS which all videos will be converted to. 48 | * Defaults to first video's FPS or `25`. 49 | */ 50 | fps?: number; 51 | 52 | /** 53 | * Specify custom output codec/format arguments for ffmpeg. 54 | * Automatically adds codec options (normally `h264`) by default. 55 | * 56 | * @see [Example]{@link https://github.com/mifi/editly/blob/master/examples/customOutputArgs.json5} 57 | */ 58 | customOutputArgs?: string[]; 59 | 60 | /** 61 | * Allow remote URLs as paths. 62 | * 63 | * @default false 64 | */ 65 | allowRemoteRequests?: boolean; 66 | 67 | /** 68 | * Fast mode (low resolution and FPS, useful for getting a quick preview ⏩). 69 | * 70 | * @default false 71 | */ 72 | fast?: boolean; 73 | 74 | /** 75 | * An object describing default options for clips and layers. 76 | */ 77 | defaults?: DefaultOptions; 78 | 79 | /** 80 | * List of arbitrary audio tracks. 81 | * 82 | * @default [] 83 | * @see [Audio tracks]{@link https://github.com/mifi/editly#arbitrary-audio-tracks} 84 | */ 85 | audioTracks?: AudioTrack[]; 86 | 87 | /** 88 | * Set an audio track for the whole video.. 89 | * 90 | * @see [Audio tracks]{@link https://github.com/mifi/editly#arbitrary-audio-tracks} 91 | */ 92 | audioFilePath?: string; 93 | 94 | /** 95 | * Background Volume 96 | * 97 | * @see [Audio tracks]{@link https://github.com/mifi/editly#arbitrary-audio-tracks} 98 | */ 99 | backgroundAudioVolume?: string | number; 100 | 101 | /** 102 | * Loop the audio track if it is shorter than video? 103 | * 104 | * @default false 105 | */ 106 | loopAudio?: boolean; 107 | 108 | /** 109 | * Keep source audio from `clips`? 110 | * 111 | * @default false 112 | */ 113 | keepSourceAudio?: boolean; 114 | 115 | /** 116 | * Volume of audio from `clips` relative to `audioTracks`. 117 | * 118 | * @default 1 119 | * @see [Audio tracks]{@link https://github.com/mifi/editly#arbitrary-audio-tracks} 120 | */ 121 | clipsAudioVolume?: number | string; 122 | 123 | /** 124 | * Adjust output [volume]{@link http://ffmpeg.org/ffmpeg-filters.html#volume} (final stage). 125 | * 126 | * @default 1 127 | * @see [Example]{@link https://github.com/mifi/editly/blob/master/examples/audio-volume.json5} 128 | * @example 129 | * 0.5 130 | * @example 131 | * '10db' 132 | */ 133 | outputVolume?: number | string; 134 | 135 | /** 136 | * Audio normalization. 137 | */ 138 | audioNorm?: AudioNormalizationOptions; 139 | 140 | /** 141 | * WARNING: Undocumented feature! 142 | */ 143 | keepTmp?: boolean; 144 | } & DebugOptions & 145 | FfmpegConfig; 146 | 147 | export type LayerSourceConfig = Pick< 148 | Configuration, 149 | "verbose" | "allowRemoteRequests" | "logTimes" | "tmpDir" 150 | >; 151 | 152 | const globalDefaults = { 153 | duration: 4, 154 | transition: { 155 | duration: 0.5, 156 | name: "random", 157 | audioOutCurve: "tri", 158 | audioInCurve: "tri", 159 | }, 160 | }; 161 | 162 | export class Configuration { 163 | clips: Clip[]; 164 | outPath: string; 165 | tmpDir: string; 166 | allowRemoteRequests: boolean; 167 | customOutputArgs?: string[]; 168 | defaults: DefaultOptions; 169 | 170 | // Video 171 | width?: number; 172 | height?: number; 173 | fps?: number; 174 | 175 | // Audio 176 | audioFilePath?: string; 177 | backgroundAudioVolume?: string | number; 178 | loopAudio?: boolean; 179 | keepSourceAudio?: boolean; 180 | audioNorm?: AudioNormalizationOptions; 181 | outputVolume?: number | string; 182 | clipsAudioVolume: string | number; 183 | audioTracks: AudioTrack[]; 184 | 185 | // Debug 186 | enableFfmpegLog: boolean; 187 | verbose: boolean; 188 | logTimes: boolean; 189 | keepTmp: boolean; 190 | fast: boolean; 191 | ffmpegPath: string; 192 | ffprobePath: string; 193 | 194 | constructor(input: ConfigurationOptions) { 195 | assert(input.outPath, "Please provide an output path"); 196 | assert(Array.isArray(input.clips) && input.clips.length > 0, "Please provide at least 1 clip"); 197 | assert( 198 | !input.customOutputArgs || Array.isArray(input.customOutputArgs), 199 | "customOutputArgs must be an array of arguments", 200 | ); 201 | 202 | this.outPath = input.outPath; 203 | this.width = input.width; 204 | this.height = input.height; 205 | this.fps = input.fps; 206 | this.audioFilePath = input.audioFilePath; 207 | this.backgroundAudioVolume = input.backgroundAudioVolume; 208 | this.loopAudio = input.loopAudio; 209 | this.clipsAudioVolume = input.clipsAudioVolume ?? 1; 210 | this.audioTracks = input.audioTracks ?? []; 211 | this.keepSourceAudio = input.keepSourceAudio; 212 | this.allowRemoteRequests = input.allowRemoteRequests ?? false; 213 | this.audioNorm = input.audioNorm; 214 | this.outputVolume = input.outputVolume; 215 | this.customOutputArgs = input.customOutputArgs; 216 | this.defaults = merge({}, globalDefaults, input.defaults); 217 | 218 | this.clips = input.clips.map((clip) => { 219 | let { layers } = clip; 220 | 221 | if (layers && !Array.isArray(layers)) layers = [layers]; // Allow single layer for convenience 222 | assert( 223 | Array.isArray(layers) && layers.length > 0, 224 | "clip.layers must be an array with at least one layer.", 225 | ); 226 | 227 | layers = layers 228 | .map(expandLayerAliases) 229 | .flat() 230 | .map((layer) => { 231 | assert(layer.type, 'All "layers" must have a type'); 232 | return merge( 233 | {}, 234 | this.defaults.layer ?? {}, 235 | this.defaults.layerType?.[layer.type] ?? {}, 236 | layer, 237 | ); 238 | }); 239 | 240 | const { transition } = merge({}, this.defaults, clip); 241 | assert(transition == null || typeof transition === "object", "Transition must be an object"); 242 | 243 | return { transition, layers, duration: clip.duration }; 244 | }); 245 | 246 | // Testing options: 247 | this.verbose = input.verbose ?? false; 248 | this.enableFfmpegLog = input.enableFfmpegLog ?? this.verbose; 249 | this.logTimes = input.logTimes ?? false; 250 | this.keepTmp = input.keepTmp ?? false; 251 | this.fast = input.fast ?? false; 252 | 253 | this.ffmpegPath = input.ffmpegPath ?? "ffmpeg"; 254 | this.ffprobePath = input.ffprobePath ?? "ffprobe"; 255 | 256 | this.tmpDir = join(this.outDir, `editly-tmp-${nanoid()}`); 257 | } 258 | 259 | get outDir() { 260 | return dirname(this.outPath); 261 | } 262 | 263 | get isGif() { 264 | return this.outPath.toLowerCase().endsWith(".gif"); 265 | } 266 | } 267 | -------------------------------------------------------------------------------- /src/easings.ts: -------------------------------------------------------------------------------- 1 | // https://easings.net/ 2 | 3 | export type EasingFunction = (progress: number) => number; 4 | 5 | export const easeOutExpo: EasingFunction = (x: number) => (x === 1 ? 1 : 1 - 2 ** (-10 * x)); 6 | export const easeInOutCubic: EasingFunction = (x: number) => 7 | x < 0.5 ? 4 * x * x * x : 1 - (-2 * x + 2) ** 3 / 2; 8 | export const linear: EasingFunction = (x: number) => x; 9 | -------------------------------------------------------------------------------- /src/ffmpeg.ts: -------------------------------------------------------------------------------- 1 | import assert from "assert"; 2 | import { compareVersions } from "compare-versions"; 3 | import { execa, type Options } from "execa"; 4 | import fsExtra from "fs-extra"; 5 | import { FfmpegConfig } from "./configuration.js"; 6 | 7 | export type Stream = { 8 | codec_type: string; 9 | codec_name: string; 10 | r_frame_rate: string; 11 | width?: number; 12 | height?: number; 13 | tags?: { 14 | rotate: string; 15 | }; 16 | side_data_list?: { 17 | rotation: string; 18 | }[]; 19 | }; 20 | 21 | const config: Required = { 22 | ffmpegPath: "ffmpeg", 23 | ffprobePath: "ffprobe", 24 | enableFfmpegLog: false, 25 | }; 26 | 27 | export function getFfmpegCommonArgs() { 28 | return ["-hide_banner", ...(config.enableFfmpegLog ? [] : ["-loglevel", "error"])]; 29 | } 30 | 31 | export function getCutFromArgs({ cutFrom }: { cutFrom?: number }) { 32 | return cutFrom ? ["-ss", cutFrom.toString()] : []; 33 | } 34 | 35 | export function getCutToArgs({ 36 | cutTo, 37 | cutFrom, 38 | speedFactor, 39 | }: { 40 | cutTo?: number; 41 | cutFrom?: number; 42 | speedFactor: number; 43 | }) { 44 | return cutFrom && cutTo ? ["-t", (cutTo - cutFrom) * speedFactor] : []; 45 | } 46 | 47 | export async function createConcatFile(segments: string[], concatFilePath: string) { 48 | // https://superuser.com/questions/787064/filename-quoting-in-ffmpeg-concat 49 | await fsExtra.writeFile( 50 | concatFilePath, 51 | segments.map((seg) => `file '${seg.replace(/'/g, "'\\''")}'`).join("\n"), 52 | ); 53 | } 54 | 55 | export async function testFf(exePath: string, name: string) { 56 | const minRequiredVersion = "4.3.1"; 57 | 58 | try { 59 | const { stdout } = await execa(exePath, ["-version"]); 60 | const firstLine = stdout.split("\n")[0]; 61 | const match = firstLine.match(`${name} version ([0-9.]+)`); 62 | assert(match, "Unknown version string"); 63 | const versionStr = match[1]; 64 | console.log(`${name} version ${versionStr}`); 65 | assert(compareVersions(versionStr, minRequiredVersion), "Version is outdated"); 66 | } catch (err) { 67 | console.error(`WARNING: ${name}:`, err); 68 | } 69 | } 70 | 71 | export async function configureFf(params: Partial) { 72 | Object.assign(config, params); 73 | await testFf(config.ffmpegPath, "ffmpeg"); 74 | await testFf(config.ffprobePath, "ffprobe"); 75 | } 76 | 77 | export function ffmpeg(args: string[], options?: Options) { 78 | if (config.enableFfmpegLog) console.log(`$ ${config.ffmpegPath} ${args.join(" ")}`); 79 | return execa(config.ffmpegPath, [...getFfmpegCommonArgs(), ...args], options); 80 | } 81 | 82 | export function ffprobe(args: string[]) { 83 | return execa(config.ffprobePath, args); 84 | } 85 | 86 | export function parseFps(fps?: string) { 87 | const match = typeof fps === "string" && fps.match(/^([0-9]+)\/([0-9]+)$/); 88 | if (match) { 89 | const num = parseInt(match[1], 10); 90 | const den = parseInt(match[2], 10); 91 | if (den > 0) return num / den; 92 | } 93 | return undefined; 94 | } 95 | 96 | export async function readDuration(p: string) { 97 | const { stdout } = await ffprobe([ 98 | "-v", 99 | "error", 100 | "-show_entries", 101 | "format=duration", 102 | "-of", 103 | "default=noprint_wrappers=1:nokey=1", 104 | p, 105 | ]); 106 | const parsed = parseFloat(stdout); 107 | assert(!Number.isNaN(parsed)); 108 | return parsed; 109 | } 110 | 111 | export async function readFileStreams(p: string) { 112 | const { stdout } = await ffprobe(["-show_entries", "stream", "-of", "json", p]); 113 | return JSON.parse(stdout).streams as Stream[]; 114 | } 115 | 116 | export async function readVideoFileInfo(p: string) { 117 | const streams = await readFileStreams(p); 118 | const stream = streams.find((s) => s.codec_type === "video"); // TODO 119 | 120 | if (!stream) { 121 | throw new Error(`Could not find a video stream in ${p}`); 122 | } 123 | 124 | const duration = await readDuration(p); 125 | 126 | let rotation = parseInt(stream.tags?.rotate ?? "", 10); 127 | 128 | // If we can't find rotation, try side_data_list 129 | if (Number.isNaN(rotation) && stream.side_data_list?.[0]?.rotation) { 130 | rotation = parseInt(stream.side_data_list[0].rotation, 10); 131 | } 132 | 133 | return { 134 | // numFrames: parseInt(stream.nb_frames, 10), 135 | duration, 136 | width: stream.width, // TODO coded_width? 137 | height: stream.height, 138 | framerateStr: stream.r_frame_rate, 139 | rotation: !Number.isNaN(rotation) ? rotation : undefined, 140 | }; 141 | } 142 | -------------------------------------------------------------------------------- /src/frameSource.ts: -------------------------------------------------------------------------------- 1 | import pMap from "p-map"; 2 | import type { DebugOptions } from "./configuration.js"; 3 | import type { ProcessedClip } from "./parseConfig.js"; 4 | import { createFabricCanvas, renderFabricCanvas, rgbaToFabricImage } from "./sources/fabric.js"; 5 | import { createLayerSource } from "./sources/index.js"; 6 | 7 | type FrameSourceOptions = DebugOptions & { 8 | clip: ProcessedClip; 9 | clipIndex: number; 10 | width: number; 11 | height: number; 12 | channels: number; 13 | framerateStr: string; 14 | }; 15 | 16 | export async function createFrameSource({ 17 | clip, 18 | clipIndex, 19 | width, 20 | height, 21 | channels, 22 | verbose, 23 | logTimes, 24 | framerateStr, 25 | }: FrameSourceOptions) { 26 | const { layers, duration } = clip; 27 | 28 | const visualLayers = layers.filter((layer) => layer.type !== "audio"); 29 | 30 | const layerFrameSources = await pMap( 31 | visualLayers, 32 | async (layer, layerIndex) => { 33 | if (verbose) 34 | console.log("createFrameSource", layer.type, "clip", clipIndex, "layer", layerIndex); 35 | const options = { 36 | width, 37 | height, 38 | duration, 39 | channels, 40 | verbose, 41 | logTimes, 42 | framerateStr, 43 | params: layer, 44 | }; 45 | return createLayerSource(options); 46 | }, 47 | { concurrency: 1 }, 48 | ); 49 | 50 | async function readNextFrame({ time }: { time: number }) { 51 | const canvas = createFabricCanvas({ width, height }); 52 | 53 | for (const frameSource of layerFrameSources) { 54 | if (logTimes) console.time("frameSource.readNextFrame"); 55 | const rgba = await frameSource.readNextFrame(time, canvas); 56 | if (logTimes) console.timeEnd("frameSource.readNextFrame"); 57 | 58 | // Frame sources can either render to the provided canvas and return nothing 59 | // OR return an raw RGBA blob which will be drawn onto the canvas 60 | if (rgba) { 61 | // Optimization: Don't need to draw to canvas if there's only one layer 62 | if (layerFrameSources.length === 1) return rgba; 63 | 64 | if (logTimes) console.time("rgbaToFabricImage"); 65 | const img = await rgbaToFabricImage({ width, height, rgba }); 66 | if (logTimes) console.timeEnd("rgbaToFabricImage"); 67 | canvas.add(img); 68 | } else { 69 | // Assume this frame source has drawn its content to the canvas 70 | } 71 | } 72 | // if (verbose) console.time('Merge frames'); 73 | 74 | if (logTimes) console.time("renderFabricCanvas"); 75 | const rgba = await renderFabricCanvas(canvas); 76 | if (logTimes) console.timeEnd("renderFabricCanvas"); 77 | return rgba; 78 | } 79 | 80 | async function close() { 81 | await pMap(layerFrameSources, (frameSource) => frameSource.close?.()); 82 | } 83 | 84 | return { 85 | readNextFrame, 86 | close, 87 | }; 88 | } 89 | 90 | export default { 91 | createFrameSource, 92 | }; 93 | -------------------------------------------------------------------------------- /src/index.ts: -------------------------------------------------------------------------------- 1 | import assert from "assert"; 2 | import { Options, ResultPromise } from "execa"; 3 | import fsExtra from "fs-extra"; 4 | import JSON5 from "json5"; 5 | 6 | import Audio from "./audio.js"; 7 | import { Configuration, type ConfigurationOptions } from "./configuration.js"; 8 | import { configureFf, ffmpeg, parseFps } from "./ffmpeg.js"; 9 | import { createFrameSource } from "./frameSource.js"; 10 | import parseConfig, { ProcessedClip } from "./parseConfig.js"; 11 | import { createFabricCanvas, rgbaToFabricImage } from "./sources/fabric.js"; 12 | import type { RenderSingleFrameConfig } from "./types.js"; 13 | import { assertFileValid, multipleOf2 } from "./util.js"; 14 | 15 | const channels = 4; 16 | 17 | export type * from "./transition.js"; 18 | export type * from "./types.js"; 19 | 20 | /** 21 | * Edit and render videos. 22 | * 23 | * @param config - ConfigurationOptions. 24 | */ 25 | async function Editly(input: ConfigurationOptions): Promise { 26 | const config = new Configuration(input); 27 | const { 28 | // Testing options: 29 | verbose = false, 30 | logTimes = false, 31 | keepTmp = false, 32 | fast = false, 33 | 34 | outPath, 35 | clips: clipsIn, 36 | clipsAudioVolume, 37 | audioTracks: arbitraryAudioIn, 38 | width: requestedWidth, 39 | height: requestedHeight, 40 | fps: requestedFps, 41 | audioFilePath: backgroundAudioPath, 42 | backgroundAudioVolume, 43 | loopAudio, 44 | keepSourceAudio, 45 | allowRemoteRequests, 46 | audioNorm, 47 | outputVolume, 48 | customOutputArgs, 49 | isGif, 50 | tmpDir, 51 | defaults, 52 | } = config; 53 | 54 | await configureFf(config); 55 | 56 | if (backgroundAudioPath) await assertFileValid(backgroundAudioPath, allowRemoteRequests); 57 | 58 | if (verbose) console.log(JSON5.stringify(config, null, 2)); 59 | 60 | const { clips, arbitraryAudio } = await parseConfig({ 61 | clips: clipsIn, 62 | arbitraryAudio: arbitraryAudioIn, 63 | backgroundAudioPath, 64 | backgroundAudioVolume, 65 | loopAudio, 66 | allowRemoteRequests, 67 | defaults, 68 | }); 69 | if (verbose) console.log("Calculated", JSON5.stringify({ clips, arbitraryAudio }, null, 2)); 70 | 71 | if (verbose) console.log({ tmpDir }); 72 | await fsExtra.mkdirp(tmpDir); 73 | 74 | const { editAudio } = Audio({ verbose, tmpDir }); 75 | 76 | const audioFilePath = !isGif 77 | ? await editAudio({ 78 | keepSourceAudio, 79 | arbitraryAudio, 80 | clipsAudioVolume, 81 | clips, 82 | audioNorm, 83 | outputVolume, 84 | }) 85 | : undefined; 86 | 87 | // Try to detect parameters from first video 88 | let firstVideoWidth; 89 | let firstVideoHeight; 90 | let firstVideoFramerateStr; 91 | 92 | clips.find( 93 | (clip) => 94 | clip && 95 | clip.layers.find((layer) => { 96 | if (layer.type === "video") { 97 | firstVideoWidth = layer.inputWidth; 98 | firstVideoHeight = layer.inputHeight; 99 | firstVideoFramerateStr = layer.framerateStr; 100 | 101 | return true; 102 | } 103 | return false; 104 | }), 105 | ); 106 | 107 | let width: number; 108 | let height: number; 109 | 110 | let desiredWidth; 111 | 112 | if (requestedWidth) desiredWidth = requestedWidth; 113 | else if (isGif) desiredWidth = 320; 114 | 115 | const roundDimension = (val: number) => (isGif ? Math.round(val) : multipleOf2(val)); 116 | 117 | if (firstVideoWidth && firstVideoHeight) { 118 | if (desiredWidth) { 119 | const calculatedHeight = (firstVideoHeight / firstVideoWidth) * desiredWidth; 120 | height = roundDimension(calculatedHeight); 121 | width = desiredWidth; 122 | } else { 123 | width = firstVideoWidth; 124 | height = firstVideoHeight; 125 | } 126 | } else if (desiredWidth) { 127 | width = desiredWidth; 128 | height = desiredWidth; 129 | // console.log(`Cannot detect width/height from video, set defaults ${width}x${height}`); 130 | } else { 131 | // No video 132 | width = 640; 133 | height = 640; 134 | } 135 | 136 | // User override? 137 | if (requestedWidth && requestedHeight) { 138 | width = requestedWidth; 139 | height = requestedHeight; 140 | } 141 | 142 | if (fast) { 143 | const numPixelsEachDirection = 250; 144 | const aspectRatio = width / height; 145 | width = roundDimension(numPixelsEachDirection * Math.sqrt(aspectRatio)); 146 | height = roundDimension(numPixelsEachDirection * Math.sqrt(1 / aspectRatio)); 147 | } 148 | 149 | assert(width, "Width not specified or detected"); 150 | assert(height, "Height not specified or detected"); 151 | 152 | if (!isGif) { 153 | // x264 requires multiple of 2, eg minimum 2 154 | width = Math.max(2, width); 155 | height = Math.max(2, height); 156 | } 157 | 158 | let fps: number; 159 | let framerateStr: string; 160 | 161 | if (fast) { 162 | fps = 15; 163 | framerateStr = String(fps); 164 | } else if (requestedFps && typeof requestedFps === "number") { 165 | fps = requestedFps; 166 | framerateStr = String(requestedFps); 167 | } else if (isGif) { 168 | fps = 10; 169 | framerateStr = String(fps); 170 | } else if (firstVideoFramerateStr) { 171 | fps = parseFps(firstVideoFramerateStr) ?? 25; 172 | framerateStr = firstVideoFramerateStr; 173 | } else { 174 | fps = 25; 175 | framerateStr = String(fps); 176 | } 177 | 178 | assert(fps, "FPS not specified or detected"); 179 | 180 | console.log(`${width}x${height} ${fps}fps`); 181 | 182 | const estimatedTotalFrames = 183 | fps * 184 | clips.reduce((acc, c, i) => { 185 | let newAcc = acc + c.duration; 186 | if (i !== clips.length - 1) newAcc -= c.transition.duration; 187 | return newAcc; 188 | }, 0); 189 | 190 | function getOutputArgs() { 191 | if (customOutputArgs) { 192 | assert(Array.isArray(customOutputArgs), "customOutputArgs must be an array of arguments"); 193 | return customOutputArgs; 194 | } 195 | 196 | // https://superuser.com/questions/556029/how-do-i-convert-a-video-to-gif-using-ffmpeg-with-reasonable-quality 197 | const videoOutputArgs = isGif 198 | ? [ 199 | "-vf", 200 | `format=rgb24,fps=${fps},scale=${width}:${height}:flags=lanczos,split[s0][s1];[s0]palettegen[p];[s1][p]paletteuse`, 201 | "-loop", 202 | "0", 203 | ] 204 | : [ 205 | "-vf", 206 | "format=yuv420p", 207 | "-vcodec", 208 | "libx264", 209 | "-profile:v", 210 | "high", 211 | ...(fast ? ["-preset:v", "ultrafast"] : ["-preset:v", "medium"]), 212 | "-crf", 213 | "18", 214 | 215 | "-movflags", 216 | "faststart", 217 | ]; 218 | 219 | const audioOutputArgs = audioFilePath ? ["-acodec", "aac", "-b:a", "128k"] : []; 220 | 221 | return [...audioOutputArgs, ...videoOutputArgs]; 222 | } 223 | 224 | function startFfmpegWriterProcess() { 225 | const args = [ 226 | "-f", 227 | "rawvideo", 228 | "-vcodec", 229 | "rawvideo", 230 | "-pix_fmt", 231 | "rgba", 232 | "-s", 233 | `${width}x${height}`, 234 | "-r", 235 | framerateStr, 236 | "-i", 237 | "-", 238 | 239 | ...(audioFilePath ? ["-i", audioFilePath] : []), 240 | 241 | ...(!isGif ? ["-map", "0:v:0"] : []), 242 | ...(audioFilePath ? ["-map", "1:a:0"] : []), 243 | 244 | ...getOutputArgs(), 245 | 246 | "-y", 247 | outPath, 248 | ]; 249 | return ffmpeg(args, { 250 | encoding: "buffer", 251 | buffer: false, 252 | stdin: "pipe", 253 | stdout: process.stdout, 254 | stderr: process.stderr, 255 | }); 256 | } 257 | 258 | let outProcess: ResultPromise | undefined; 259 | let outProcessExitCode; 260 | 261 | let frameSource1; 262 | let frameSource2; 263 | 264 | let frameSource1Data; 265 | 266 | let totalFramesWritten = 0; 267 | let fromClipFrameAt = 0; 268 | let toClipFrameAt = 0; 269 | 270 | let transitionFromClipId = 0; 271 | 272 | const getTransitionToClipId = () => transitionFromClipId + 1; 273 | const getTransitionFromClip = () => clips[transitionFromClipId]; 274 | const getTransitionToClip = () => clips[getTransitionToClipId()]; 275 | 276 | const getSource = async (clip: ProcessedClip, clipIndex: number) => 277 | createFrameSource({ 278 | clip, 279 | clipIndex, 280 | width, 281 | height, 282 | channels, 283 | verbose, 284 | logTimes, 285 | framerateStr, 286 | }); 287 | const getTransitionFromSource = async () => 288 | getSource(getTransitionFromClip(), transitionFromClipId); 289 | const getTransitionToSource = async () => 290 | getTransitionToClip() && getSource(getTransitionToClip(), getTransitionToClipId()); 291 | 292 | try { 293 | try { 294 | outProcess = startFfmpegWriterProcess(); 295 | 296 | let outProcessError; 297 | 298 | outProcess.on("exit", (code) => { 299 | if (verbose) console.log("Output ffmpeg exited", code); 300 | outProcessExitCode = code; 301 | }); 302 | 303 | // If we write and get an EPIPE (like when ffmpeg fails or is finished), we could get an unhandled rejection if we don't catch the promise 304 | // (and meow causes the CLI to exit on unhandled rejections making it hard to see) 305 | outProcess.catch((err) => { 306 | outProcessError = err; 307 | }); 308 | 309 | frameSource1 = await getTransitionFromSource(); 310 | frameSource2 = await getTransitionToSource(); 311 | 312 | while (!outProcessError) { 313 | const transitionToClip = getTransitionToClip(); 314 | const transitionFromClip = getTransitionFromClip(); 315 | const fromClipNumFrames = Math.round(transitionFromClip.duration * fps); 316 | const toClipNumFrames = transitionToClip && Math.round(transitionToClip.duration * fps); 317 | const fromClipProgress = fromClipFrameAt / fromClipNumFrames; 318 | const toClipProgress = transitionToClip && toClipFrameAt / toClipNumFrames; 319 | const fromClipTime = transitionFromClip.duration * fromClipProgress; 320 | const toClipTime = transitionToClip && transitionToClip.duration * toClipProgress; 321 | 322 | const currentTransition = transitionFromClip.transition; 323 | const transitionNumFrames = Math.round(currentTransition.duration * fps); 324 | const runTransitionOnFrame = currentTransition.create({ width, height, channels }); 325 | 326 | // Each clip has two transitions, make sure we leave enough room: 327 | const transitionNumFramesSafe = Math.floor( 328 | Math.min( 329 | Math.min( 330 | fromClipNumFrames, 331 | toClipNumFrames != null ? toClipNumFrames : Number.MAX_SAFE_INTEGER, 332 | ) / 2, 333 | transitionNumFrames, 334 | ), 335 | ); 336 | // How many frames into the transition are we? negative means not yet started 337 | const transitionFrameAt = fromClipFrameAt - (fromClipNumFrames - transitionNumFramesSafe); 338 | 339 | if (!verbose) { 340 | const percentDone = Math.floor(100 * (totalFramesWritten / estimatedTotalFrames)); 341 | if (totalFramesWritten % 10 === 0) 342 | process.stdout.write(`${String(percentDone).padStart(3, " ")}% `); 343 | } 344 | 345 | // console.log({ transitionFrameAt, transitionNumFramesSafe }) 346 | // const transitionLastFrameIndex = transitionNumFramesSafe - 1; 347 | const transitionLastFrameIndex = transitionNumFramesSafe; 348 | 349 | // Done with transition? 350 | if (transitionFrameAt >= transitionLastFrameIndex) { 351 | transitionFromClipId += 1; 352 | console.log( 353 | `Done with transition, switching to next transitionFromClip (${transitionFromClipId})`, 354 | ); 355 | 356 | if (!getTransitionFromClip()) { 357 | console.log("No more transitionFromClip, done"); 358 | break; 359 | } 360 | 361 | // Cleanup completed frameSource1, swap and load next frameSource2 362 | await frameSource1.close(); 363 | frameSource1 = frameSource2; 364 | frameSource2 = await getTransitionToSource(); 365 | 366 | fromClipFrameAt = transitionLastFrameIndex; 367 | toClipFrameAt = 0; 368 | 369 | continue; 370 | } 371 | 372 | if (logTimes) console.time("Read frameSource1"); 373 | const newFrameSource1Data = await frameSource1.readNextFrame({ time: fromClipTime }); 374 | if (logTimes) console.timeEnd("Read frameSource1"); 375 | // If we got no data, use the old data 376 | // TODO maybe abort? 377 | if (newFrameSource1Data) frameSource1Data = newFrameSource1Data; 378 | else console.warn("No frame data returned, using last frame"); 379 | 380 | const isInTransition = 381 | frameSource2 && transitionNumFramesSafe > 0 && transitionFrameAt >= 0; 382 | 383 | let outFrameData; 384 | 385 | if (isInTransition) { 386 | if (logTimes) console.time("Read frameSource2"); 387 | const frameSource2Data = await frameSource2.readNextFrame({ time: toClipTime }); 388 | if (logTimes) console.timeEnd("Read frameSource2"); 389 | 390 | if (frameSource2Data) { 391 | const progress = transitionFrameAt / transitionNumFramesSafe; 392 | 393 | if (logTimes) console.time("runTransitionOnFrame"); 394 | 395 | outFrameData = runTransitionOnFrame({ 396 | fromFrame: frameSource1Data!, 397 | toFrame: frameSource2Data, 398 | progress: progress, 399 | }); 400 | 401 | if (logTimes) console.timeEnd("runTransitionOnFrame"); 402 | } else { 403 | console.warn("Got no frame data from transitionToClip!"); 404 | // We have probably reached end of clip2 but transition is not complete. Just pass thru clip1 405 | outFrameData = frameSource1Data; 406 | } 407 | } else { 408 | // Not in transition. Pass thru clip 1 409 | outFrameData = frameSource1Data; 410 | } 411 | 412 | if (verbose) { 413 | if (isInTransition) 414 | console.log( 415 | "Writing frame:", 416 | totalFramesWritten, 417 | "from clip", 418 | transitionFromClipId, 419 | `(frame ${fromClipFrameAt})`, 420 | "to clip", 421 | getTransitionToClipId(), 422 | `(frame ${toClipFrameAt} / ${transitionNumFramesSafe})`, 423 | currentTransition.name, 424 | `${currentTransition.duration}s`, 425 | ); 426 | else 427 | console.log( 428 | "Writing frame:", 429 | totalFramesWritten, 430 | "from clip", 431 | transitionFromClipId, 432 | `(frame ${fromClipFrameAt})`, 433 | ); 434 | // console.log(outFrameData.length / 1e6, 'MB'); 435 | } 436 | 437 | const nullOutput = false; 438 | 439 | if (logTimes) console.time("outProcess.write"); 440 | 441 | // If we don't wait, then we get EINVAL when dealing with high resolution files (big writes) 442 | if (!nullOutput) await new Promise((r) => outProcess?.stdin?.write(outFrameData, r)); 443 | 444 | if (logTimes) console.timeEnd("outProcess.write"); 445 | 446 | if (outProcessError) break; 447 | 448 | totalFramesWritten += 1; 449 | fromClipFrameAt += 1; 450 | if (isInTransition) toClipFrameAt += 1; 451 | } // End while loop 452 | 453 | outProcess.stdin?.end(); 454 | } catch (err) { 455 | outProcess?.kill(); 456 | throw err; 457 | } finally { 458 | if (verbose) console.log("Cleanup"); 459 | if (frameSource1) await frameSource1.close(); 460 | if (frameSource2) await frameSource2.close(); 461 | } 462 | 463 | try { 464 | if (verbose) console.log("Waiting for output ffmpeg process to finish"); 465 | await outProcess; 466 | } catch (err) { 467 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 468 | if (outProcessExitCode !== 0 && !(err as any).isTerminated) throw err; 469 | } 470 | } finally { 471 | if (!keepTmp) await fsExtra.remove(tmpDir); 472 | } 473 | 474 | console.log(); 475 | console.log("Done. Output file can be found at:"); 476 | console.log(outPath); 477 | } 478 | 479 | /** 480 | * Pure function to get a frame at a certain time. 481 | * TODO: I think this does not respect transition durations 482 | * 483 | * @param config - ConfigurationOptions. 484 | */ 485 | export async function renderSingleFrame(input: RenderSingleFrameConfig): Promise { 486 | const time = input.time ?? 0; 487 | 488 | const config = new Configuration(input); 489 | const { 490 | clips: clipsIn, 491 | allowRemoteRequests, 492 | width = 800, 493 | height = 600, 494 | verbose, 495 | logTimes, 496 | outPath = `${Math.floor(Math.random() * 1e12)}.png`, 497 | defaults, 498 | } = config; 499 | 500 | configureFf(config); 501 | 502 | console.log({ clipsIn }); 503 | 504 | const { clips } = await parseConfig({ 505 | clips: clipsIn, 506 | arbitraryAudio: [], 507 | allowRemoteRequests, 508 | defaults, 509 | }); 510 | let clipStartTime = 0; 511 | const clip = clips.find((c) => { 512 | if (clipStartTime <= time && clipStartTime + c.duration > time) return true; 513 | clipStartTime += c.duration; 514 | return false; 515 | }); 516 | assert(clip, "No clip found at requested time"); 517 | const clipIndex = clips.indexOf(clip); 518 | const frameSource = await createFrameSource({ 519 | clip, 520 | clipIndex, 521 | width, 522 | height, 523 | channels, 524 | verbose, 525 | logTimes, 526 | framerateStr: "1", 527 | }); 528 | const rgba = await frameSource.readNextFrame({ time: time - clipStartTime }); 529 | 530 | // TODO converting rgba to png can be done more easily? 531 | const canvas = createFabricCanvas({ width, height }); 532 | const fabricImage = await rgbaToFabricImage({ width, height, rgba }); 533 | canvas.add(fabricImage); 534 | canvas.renderAll(); 535 | const internalCanvas = canvas.getNodeCanvas(); 536 | await fsExtra.writeFile(outPath, internalCanvas.toBuffer("image/png")); 537 | canvas.clear(); 538 | canvas.dispose(); 539 | await frameSource.close(); 540 | } 541 | 542 | Editly.renderSingleFrame = renderSingleFrame; 543 | 544 | export default Editly; 545 | -------------------------------------------------------------------------------- /src/parseConfig.ts: -------------------------------------------------------------------------------- 1 | import assert from "assert"; 2 | import { registerFont } from "canvas"; 3 | import flatMap from "lodash-es/flatMap.js"; 4 | import pMap from "p-map"; 5 | import { basename } from "path"; 6 | import { Configuration } from "./configuration.js"; 7 | import { readDuration, readVideoFileInfo } from "./ffmpeg.js"; 8 | import { Transition } from "./transition.js"; 9 | import type { 10 | AudioTrack, 11 | CanvasLayer, 12 | FabricLayer, 13 | ImageLayer, 14 | ImageOverlayLayer, 15 | Layer, 16 | NewsTitleLayer, 17 | SlideInTextLayer, 18 | SubtitleLayer, 19 | TitleLayer, 20 | VideoLayer, 21 | } from "./types.js"; 22 | import { assertFileValid } from "./util.js"; 23 | 24 | export type ProcessedClip = { 25 | layers: Layer[]; 26 | duration: number; 27 | transition: Transition; 28 | }; 29 | 30 | // Cache 31 | const loadedFonts: string[] = []; 32 | 33 | async function validateArbitraryAudio( 34 | audio: AudioTrack[] | undefined, 35 | allowRemoteRequests?: boolean, 36 | ) { 37 | assert(audio === undefined || Array.isArray(audio)); 38 | 39 | if (audio) { 40 | for (const { path, cutFrom, cutTo, start } of audio) { 41 | await assertFileValid(path, allowRemoteRequests); 42 | 43 | if (cutFrom != null && cutTo != null) assert(cutTo > cutFrom); 44 | if (cutFrom != null) assert(cutFrom >= 0); 45 | if (cutTo != null) assert(cutTo >= 0); 46 | assert(start == null || start >= 0, `Invalid "start" ${start}`); 47 | } 48 | } 49 | } 50 | 51 | type ParseConfigOptions = { 52 | backgroundAudioPath?: string; 53 | arbitraryAudio: AudioTrack[]; 54 | } & Pick< 55 | Configuration, 56 | "clips" | "backgroundAudioVolume" | "loopAudio" | "allowRemoteRequests" | "defaults" 57 | >; 58 | 59 | export default async function parseConfig({ 60 | clips, 61 | arbitraryAudio: arbitraryAudioIn, 62 | backgroundAudioPath, 63 | backgroundAudioVolume, 64 | loopAudio, 65 | allowRemoteRequests, 66 | defaults, 67 | }: ParseConfigOptions) { 68 | async function handleLayer(layer: Layer): Promise { 69 | // https://github.com/mifi/editly/issues/39 70 | if (layer.type === "image" || layer.type === "image-overlay") { 71 | await assertFileValid((layer as ImageOverlayLayer | ImageLayer).path, allowRemoteRequests); 72 | } else if (layer.type === "gl") { 73 | await assertFileValid(layer.fragmentPath, allowRemoteRequests); 74 | } 75 | 76 | if (["fabric", "canvas"].includes(layer.type)) { 77 | assert( 78 | typeof (layer as FabricLayer | CanvasLayer).func === "function", 79 | '"func" must be a function', 80 | ); 81 | } 82 | 83 | if ( 84 | [ 85 | "image", 86 | "image-overlay", 87 | "fabric", 88 | "canvas", 89 | "gl", 90 | "radial-gradient", 91 | "linear-gradient", 92 | "fill-color", 93 | ].includes(layer.type) 94 | ) { 95 | return layer; 96 | } 97 | 98 | if (["title", "subtitle", "news-title", "slide-in-text"].includes(layer.type)) { 99 | const { fontPath, ...rest } = layer as 100 | | TitleLayer 101 | | SubtitleLayer 102 | | NewsTitleLayer 103 | | SlideInTextLayer; 104 | assert(rest.text, "Please specify a text"); 105 | 106 | let { fontFamily } = rest; 107 | if (fontPath) { 108 | fontFamily = Buffer.from(basename(fontPath)).toString("base64"); 109 | if (!loadedFonts.includes(fontFamily)) { 110 | registerFont(fontPath, { family: fontFamily, weight: "regular", style: "normal" }); 111 | loadedFonts.push(fontFamily); 112 | } 113 | } 114 | return { ...rest, fontFamily }; 115 | } 116 | 117 | throw new Error(`Invalid layer type ${layer.type}`); 118 | } 119 | 120 | const detachedAudioByClip: Record = {}; 121 | 122 | let clipsOut: ProcessedClip[] = await pMap( 123 | clips, 124 | async (clip, clipIndex) => { 125 | const { layers } = clip; 126 | const transition = new Transition(clip.transition, clipIndex === clips.length - 1); 127 | 128 | let layersOut = flatMap( 129 | await pMap( 130 | layers, 131 | async (layer: T) => { 132 | if (layer.type === "video") { 133 | const { 134 | duration: fileDuration, 135 | width: widthIn, 136 | height: heightIn, 137 | framerateStr, 138 | rotation, 139 | } = await readVideoFileInfo(layer.path); 140 | let { cutFrom, cutTo } = layer; 141 | if (!cutFrom) cutFrom = 0; 142 | cutFrom = Math.max(cutFrom, 0); 143 | cutFrom = Math.min(cutFrom, fileDuration); 144 | 145 | if (!cutTo) cutTo = fileDuration; 146 | cutTo = Math.max(cutTo, cutFrom); 147 | cutTo = Math.min(cutTo, fileDuration); 148 | assert(cutFrom < cutTo, "cutFrom must be lower than cutTo"); 149 | 150 | const layerDuration = cutTo - cutFrom; 151 | 152 | const isRotated = rotation && [-90, 90, 270, -270].includes(rotation); 153 | const inputWidth = isRotated ? heightIn : widthIn; 154 | const inputHeight = isRotated ? widthIn : heightIn; 155 | 156 | return { 157 | ...layer, 158 | cutFrom, 159 | cutTo, 160 | layerDuration, 161 | framerateStr, 162 | inputWidth, 163 | inputHeight, 164 | } as T; 165 | } 166 | 167 | // Audio is handled later 168 | if (["audio", "detached-audio"].includes(layer.type)) return layer; 169 | 170 | return handleLayer(layer); 171 | }, 172 | { concurrency: 1 }, 173 | ), 174 | ); 175 | 176 | let clipDuration = clip.duration; 177 | 178 | if (!clipDuration) { 179 | const video = layersOut.find((layer): layer is VideoLayer => layer.type === "video"); 180 | clipDuration = video?.layerDuration ?? defaults.duration; 181 | } 182 | 183 | assert(clipDuration, `Duration parameter is required for videoless clip ${clipIndex}`); 184 | 185 | // We need to map again, because for audio, we need to know the correct clipDuration 186 | layersOut = ( 187 | await pMap(layersOut, async (layerIn: T) => { 188 | if (!layerIn.start) layerIn.start = 0; 189 | 190 | // This feature allows the user to show another layer overlayed (or replacing) parts of the lower layers (start - stop) 191 | const layerDuration = (layerIn.stop || clipDuration) - layerIn.start; 192 | assert( 193 | layerDuration > 0 && layerDuration <= clipDuration, 194 | `Invalid start ${layerIn.start} or stop ${layerIn.stop} (${clipDuration})`, 195 | ); 196 | // TODO Also need to handle video layers (speedFactor etc) 197 | // TODO handle audio in case of start/stop 198 | 199 | const layer: T = { ...layerIn, layerDuration }; 200 | 201 | if (layer.type === "audio") { 202 | const fileDuration = await readDuration(layer.path); 203 | let { cutFrom, cutTo } = layer; 204 | 205 | // console.log({ cutFrom, cutTo, fileDuration, clipDuration }); 206 | 207 | if (!cutFrom) cutFrom = 0; 208 | cutFrom = Math.max(cutFrom, 0); 209 | cutFrom = Math.min(cutFrom, fileDuration); 210 | 211 | if (!cutTo) cutTo = cutFrom + clipDuration; 212 | cutTo = Math.max(cutTo, cutFrom); 213 | cutTo = Math.min(cutTo, fileDuration); 214 | assert(cutFrom < cutTo, "cutFrom must be lower than cutTo"); 215 | 216 | const layerDuration = cutTo - cutFrom; 217 | 218 | const speedFactor = clipDuration / layerDuration; 219 | 220 | return { ...layer, cutFrom, cutTo, speedFactor }; 221 | } 222 | 223 | if (layer.type === "video") { 224 | let speedFactor; 225 | 226 | // If user explicitly specified duration for clip, it means that should be the output duration of the video 227 | if (clipDuration) { 228 | // Later we will speed up or slow down video using this factor 229 | speedFactor = clipDuration / layerDuration; 230 | } else { 231 | speedFactor = 1; 232 | } 233 | 234 | return { ...layer, speedFactor }; 235 | } 236 | 237 | // These audio tracks are detached from the clips (can run over multiple clips) 238 | // This is useful so we can have audio start relative to their parent clip's start time 239 | if (layer.type === "detached-audio") { 240 | if (!detachedAudioByClip[clipIndex]) detachedAudioByClip[clipIndex] = []; 241 | detachedAudioByClip[clipIndex].push(layer); 242 | return undefined; // Will be filtered out 243 | } 244 | 245 | return layer; 246 | }) 247 | ).filter((l) => l !== undefined); 248 | 249 | // Filter out deleted layers 250 | layersOut = layersOut.filter((l) => l); 251 | 252 | return { 253 | transition, 254 | duration: clipDuration, 255 | layers: layersOut, 256 | }; 257 | }, 258 | { concurrency: 1 }, 259 | ); 260 | 261 | let totalClipDuration = 0; 262 | const clipDetachedAudio: AudioTrack[] = []; 263 | 264 | // Need to map again because now we know all clip durations, and we can adjust transitions so they are safe 265 | clipsOut = await pMap(clipsOut, async (clip, i) => { 266 | const nextClip = clipsOut[i + 1]; 267 | 268 | // We clamp all transitions to half the length of every clip. If not, we risk that clips that are too short, 269 | // will be eaten by transitions and could cause de-sync issues with audio/video 270 | // NOTE: similar logic is duplicated in index.js 271 | let safeTransitionDuration = 0; 272 | if (nextClip) { 273 | // Each clip can have two transitions, make sure we leave enough room: 274 | safeTransitionDuration = Math.min( 275 | clip.duration / 2, 276 | nextClip.duration / 2, 277 | clip.transition!.duration!, 278 | ); 279 | } 280 | 281 | // We now know all clip durations so we can calculate the offset for detached audio tracks 282 | for (const { start, ...rest } of detachedAudioByClip[i] || []) { 283 | clipDetachedAudio.push({ ...rest, start: totalClipDuration + (start || 0) }); 284 | } 285 | 286 | totalClipDuration += clip.duration - safeTransitionDuration; 287 | clip.transition.duration = safeTransitionDuration; 288 | 289 | return clip; 290 | }); 291 | 292 | // Audio can either come from `audioFilePath`, `audio` or from "detached" audio layers from clips 293 | const arbitraryAudio = [ 294 | // Background audio is treated just like arbitrary audio 295 | ...(backgroundAudioPath 296 | ? [ 297 | { 298 | path: backgroundAudioPath, 299 | mixVolume: backgroundAudioVolume != null ? backgroundAudioVolume : 1, 300 | loop: loopAudio ? -1 : 0, 301 | }, 302 | ] 303 | : []), 304 | ...arbitraryAudioIn, 305 | ...clipDetachedAudio, 306 | ]; 307 | 308 | await validateArbitraryAudio(arbitraryAudio, allowRemoteRequests); 309 | 310 | return { 311 | clips: clipsOut, 312 | arbitraryAudio, 313 | }; 314 | } 315 | -------------------------------------------------------------------------------- /src/sources/canvas.ts: -------------------------------------------------------------------------------- 1 | import { createCanvas } from "canvas"; 2 | import { defineFrameSource } from "../api/index.js"; 3 | import type { CanvasLayer } from "../types.js"; 4 | import { canvasToRgba } from "./fabric.js"; 5 | 6 | export default defineFrameSource("canvas", async ({ width, height, params }) => { 7 | const canvas = createCanvas(width, height); 8 | const context = canvas.getContext("2d"); 9 | 10 | const { onClose, onRender } = await params.func({ width, height, canvas }); 11 | 12 | async function readNextFrame(progress: number) { 13 | context.clearRect(0, 0, canvas.width, canvas.height); 14 | await onRender(progress); 15 | // require('fs').writeFileSync(`${new Date().getTime()}.png`, canvas.toBuffer('image/png')); 16 | // I don't know any way to draw a node-canvas as a layer on a fabric.js canvas, other than converting to rgba first: 17 | return canvasToRgba(context); 18 | } 19 | 20 | return { 21 | readNextFrame, 22 | // Node canvas needs no cleanup https://github.com/Automattic/node-canvas/issues/1216#issuecomment-412390668 23 | close: onClose, 24 | }; 25 | }); 26 | -------------------------------------------------------------------------------- /src/sources/fabric.ts: -------------------------------------------------------------------------------- 1 | import { type CanvasRenderingContext2D, createCanvas, ImageData } from "canvas"; 2 | import * as fabric from "fabric/node"; 3 | import { boxBlurImage } from "../BoxBlur.js"; 4 | import { defineFrameSource } from "../api/index.js"; 5 | import type { FabricLayer } from "../types.js"; 6 | 7 | // Fabric is used as a fundament for compositing layers in editly 8 | 9 | export function canvasToRgba(ctx: CanvasRenderingContext2D) { 10 | // We cannot use toBuffer('raw') because it returns pre-multiplied alpha data (a different format) 11 | // https://gamedev.stackexchange.com/questions/138813/whats-the-difference-between-alpha-and-premulalpha 12 | // https://github.com/Automattic/node-canvas#image-pixel-formats-experimental 13 | const imageData = ctx.getImageData(0, 0, ctx.canvas.width, ctx.canvas.height); 14 | return Buffer.from(imageData.data); 15 | } 16 | 17 | export function fabricCanvasToRgba(fabricCanvas: fabric.StaticCanvas) { 18 | const internalCanvas = fabricCanvas.getNodeCanvas(); 19 | const ctx = internalCanvas.getContext("2d"); 20 | 21 | return canvasToRgba(ctx); 22 | } 23 | 24 | export function createFabricCanvas({ width, height }: { width: number; height: number }) { 25 | return new fabric.StaticCanvas(null, { width, height }); 26 | } 27 | 28 | export async function renderFabricCanvas(canvas: fabric.StaticCanvas) { 29 | // console.time('canvas.renderAll'); 30 | canvas.renderAll(); 31 | // console.timeEnd('canvas.renderAll'); 32 | const rgba = fabricCanvasToRgba(canvas); 33 | canvas.clear(); 34 | canvas.dispose(); 35 | return rgba; 36 | } 37 | 38 | export function toUint8ClampedArray(buffer: Buffer) { 39 | // return Uint8ClampedArray.from(buffer); 40 | // Some people are finding that manual copying is orders of magnitude faster than Uint8ClampedArray.from 41 | // Since I'm getting similar times for both methods, then why not: 42 | const data = new Uint8ClampedArray(buffer.length); 43 | for (let i = 0; i < buffer.length; i += 1) { 44 | data[i] = buffer[i]; 45 | } 46 | return data; 47 | } 48 | 49 | export async function rgbaToFabricImage({ 50 | width, 51 | height, 52 | rgba, 53 | }: { 54 | width: number; 55 | height: number; 56 | rgba: Buffer; 57 | }) { 58 | const canvas = createCanvas(width, height); 59 | 60 | // FIXME: Fabric tries to add a class to this, but DOM is not defined. Because node? 61 | // https://github.com/fabricjs/fabric.js/issues/10032 62 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 63 | (canvas as any).classList = new Set(); 64 | 65 | const ctx = canvas.getContext("2d"); 66 | // https://developer.mozilla.org/en-US/docs/Web/API/ImageData/ImageData 67 | // https://developer.mozilla.org/en-US/docs/Web/API/CanvasRenderingContext2D/putImageData 68 | ctx.putImageData(new ImageData(toUint8ClampedArray(rgba), width, height), 0, 0); 69 | // https://stackoverflow.com/questions/58209996/unable-to-render-tiff-images-and-add-it-as-a-fabric-object 70 | return new fabric.FabricImage(canvas); 71 | } 72 | 73 | export type BlurImageOptions = { 74 | mutableImg: fabric.FabricImage; 75 | width: number; 76 | height: number; 77 | }; 78 | 79 | export async function blurImage({ mutableImg, width, height }: BlurImageOptions) { 80 | mutableImg.set({ scaleX: width / mutableImg.width, scaleY: height / mutableImg.height }); 81 | 82 | const canvas = mutableImg.toCanvasElement(); 83 | const ctx = canvas.getContext("2d"); 84 | 85 | const blurAmount = Math.min(100, Math.max(width, height) / 10); // More than 100 seems to cause issues 86 | const passes = 1; 87 | boxBlurImage(ctx, width, height, blurAmount, false, passes); 88 | 89 | return new fabric.FabricImage(canvas); 90 | } // http://fabricjs.com/kitchensink 91 | 92 | export default defineFrameSource("fabric", async ({ width, height, params }) => { 93 | const { onRender, onClose } = await params.func({ width, height, fabric, params }); 94 | 95 | return { 96 | readNextFrame: onRender, 97 | close: onClose, 98 | }; 99 | }); 100 | -------------------------------------------------------------------------------- /src/sources/fill-color.ts: -------------------------------------------------------------------------------- 1 | import { Rect } from "fabric/node"; 2 | import { defineFrameSource } from "../api/index.js"; 3 | import { getRandomColors } from "../colors.js"; 4 | import type { FillColorLayer } from "../types.js"; 5 | 6 | export default defineFrameSource( 7 | "fill-color", 8 | async ({ params, width, height }) => { 9 | const { color } = params; 10 | 11 | const randomColor = getRandomColors(1)[0]; 12 | 13 | return { 14 | async readNextFrame(_, canvas) { 15 | const rect = new Rect({ 16 | left: 0, 17 | right: 0, 18 | width, 19 | height, 20 | fill: color || randomColor, 21 | }); 22 | canvas.add(rect); 23 | }, 24 | }; 25 | }, 26 | ); 27 | -------------------------------------------------------------------------------- /src/sources/gl.ts: -------------------------------------------------------------------------------- 1 | import GL from "gl"; 2 | import createShader from "gl-shader"; 3 | import { readFile } from "node:fs/promises"; 4 | import { defineFrameSource } from "../api/index.js"; 5 | import type { GlLayer } from "../types.js"; 6 | 7 | // I have no idea what I'm doing but it works ¯\_(ツ)_/¯ 8 | 9 | export default defineFrameSource("gl", async ({ width, height, channels, params }) => { 10 | const gl = GL(width, height); 11 | 12 | const defaultVertexSrc = ` 13 | attribute vec2 position; 14 | void main(void) { 15 | gl_Position = vec4(position, 0.0, 1.0 ); 16 | } 17 | `; 18 | const { 19 | vertexPath, 20 | fragmentPath, 21 | vertexSrc: vertexSrcIn, 22 | fragmentSrc: fragmentSrcIn, 23 | speed = 1, 24 | } = params; 25 | 26 | let fragmentSrc = fragmentSrcIn; 27 | let vertexSrc = vertexSrcIn; 28 | 29 | if (fragmentPath) fragmentSrc = (await readFile(fragmentPath)).toString(); 30 | if (vertexPath) vertexSrc = (await readFile(vertexPath)).toString(); 31 | 32 | if (!vertexSrc) vertexSrc = defaultVertexSrc; 33 | 34 | const shader = createShader(gl, vertexSrc, fragmentSrc ?? ""); 35 | const buffer = gl.createBuffer(); 36 | gl.bindBuffer(gl.ARRAY_BUFFER, buffer); 37 | // https://blog.mayflower.de/4584-Playing-around-with-pixel-shaders-in-WebGL.html 38 | 39 | gl.bufferData(gl.ARRAY_BUFFER, new Float32Array([-1, -1, 1, -1, 1, 1, -1, 1]), gl.STATIC_DRAW); 40 | 41 | async function readNextFrame(progress: number) { 42 | shader.bind(); 43 | 44 | shader.attributes.position.pointer(); 45 | 46 | shader.uniforms.resolution = [width, height]; 47 | shader.uniforms.time = progress * speed; 48 | 49 | gl.drawArrays(gl.TRIANGLE_FAN, 0, 4); 50 | 51 | const upsideDownArray = Buffer.allocUnsafe(width * height * channels); 52 | gl.readPixels(0, 0, width, height, gl.RGBA, gl.UNSIGNED_BYTE, upsideDownArray); 53 | const outArray = Buffer.allocUnsafe(width * height * channels); 54 | 55 | // Comes out upside down, flip it 56 | for (let i = 0; i < outArray.length; i += 4) { 57 | outArray[i + 0] = upsideDownArray[outArray.length - i + 0]; 58 | outArray[i + 1] = upsideDownArray[outArray.length - i + 1]; 59 | outArray[i + 2] = upsideDownArray[outArray.length - i + 2]; 60 | outArray[i + 3] = upsideDownArray[outArray.length - i + 3]; 61 | } 62 | return outArray; 63 | } 64 | 65 | return { 66 | readNextFrame, 67 | }; 68 | }); 69 | -------------------------------------------------------------------------------- /src/sources/image-overlay.ts: -------------------------------------------------------------------------------- 1 | import * as fabric from "fabric/node"; 2 | import { defineFrameSource } from "../api/index.js"; 3 | import type { ImageOverlayLayer } from "../types.js"; 4 | import { getPositionProps, getTranslationParams, getZoomParams, loadImage } from "../util.js"; 5 | 6 | export default defineFrameSource( 7 | "image-overlay", 8 | async ({ params, width, height }) => { 9 | const { 10 | path, 11 | position, 12 | width: relWidth, 13 | height: relHeight, 14 | zoomDirection, 15 | zoomAmount = 0.1, 16 | } = params; 17 | 18 | const imgData = await loadImage(path); 19 | 20 | const img = new fabric.FabricImage(imgData, getPositionProps({ position, width, height })); 21 | 22 | return { 23 | async readNextFrame(progress, canvas) { 24 | const scaleFactor = getZoomParams({ progress, zoomDirection, zoomAmount }); 25 | 26 | const translationParams = getTranslationParams({ progress, zoomDirection, zoomAmount }); 27 | img.left = width / 2 + translationParams; 28 | 29 | if (relWidth != null) { 30 | img.scaleToWidth(relWidth * width * scaleFactor); 31 | } else if (relHeight != null) { 32 | img.scaleToHeight(relHeight * height * scaleFactor); 33 | } else { 34 | // Default to screen width 35 | img.scaleToWidth(width * scaleFactor); 36 | } 37 | 38 | canvas.add(img); 39 | }, 40 | }; 41 | }, 42 | ); 43 | -------------------------------------------------------------------------------- /src/sources/image.ts: -------------------------------------------------------------------------------- 1 | import { FabricImage } from "fabric/node"; 2 | import { defineFrameSource } from "../api/index.js"; 3 | import type { ImageLayer } from "../types.js"; 4 | import { getTranslationParams, getZoomParams, loadImage } from "../util.js"; 5 | import { blurImage } from "./fabric.js"; 6 | 7 | export default defineFrameSource( 8 | "image", 9 | async ({ verbose, params, width, height }) => { 10 | const { path, zoomDirection = "in", zoomAmount = 0.1, resizeMode = "contain-blur" } = params; 11 | 12 | if (verbose) console.log("Loading", path); 13 | 14 | const imgData = await loadImage(path); 15 | 16 | const createImg = () => 17 | new FabricImage(imgData, { 18 | originX: "center", 19 | originY: "center", 20 | left: width / 2, 21 | top: height / 2, 22 | }); 23 | 24 | let blurredImg: FabricImage; 25 | // Blurred version 26 | if (resizeMode === "contain-blur") { 27 | // If we dispose mutableImg, seems to cause issues with the rendering of blurredImg 28 | const mutableImg = createImg(); 29 | if (verbose) console.log("Blurring background"); 30 | blurredImg = await blurImage({ mutableImg, width, height }); 31 | } 32 | 33 | return { 34 | async readNextFrame(progress, canvas) { 35 | const img = createImg(); 36 | 37 | const scaleFactor = getZoomParams({ progress, zoomDirection, zoomAmount }); 38 | const translationParams = getTranslationParams({ progress, zoomDirection, zoomAmount }); 39 | 40 | const ratioW = width / img.width; 41 | const ratioH = height / img.height; 42 | 43 | img.left = width / 2 + translationParams; 44 | 45 | if (["contain", "contain-blur"].includes(resizeMode)) { 46 | if (ratioW > ratioH) { 47 | img.scaleToHeight(height * scaleFactor); 48 | } else { 49 | img.scaleToWidth(width * scaleFactor); 50 | } 51 | } else if (resizeMode === "cover") { 52 | if (ratioW > ratioH) { 53 | img.scaleToWidth(width * scaleFactor); 54 | } else { 55 | img.scaleToHeight(height * scaleFactor); 56 | } 57 | } else if (resizeMode === "stretch") { 58 | img.set({ 59 | scaleX: (width / img.width) * scaleFactor, 60 | scaleY: (height / img.height) * scaleFactor, 61 | }); 62 | } 63 | 64 | if (blurredImg) canvas.add(blurredImg); 65 | canvas.add(img); 66 | }, 67 | 68 | close() { 69 | if (blurredImg) blurredImg.dispose(); 70 | // imgData.dispose(); 71 | }, 72 | }; 73 | }, 74 | ); 75 | -------------------------------------------------------------------------------- /src/sources/index.ts: -------------------------------------------------------------------------------- 1 | import assert from "assert"; 2 | 3 | import { join } from "path"; 4 | import { fileURLToPath } from "url"; 5 | import canvas from "./canvas.js"; 6 | import fabric from "./fabric.js"; 7 | import fillColor from "./fill-color.js"; 8 | import gl from "./gl.js"; 9 | import imageOverlay from "./image-overlay.js"; 10 | import image from "./image.js"; 11 | import linearGradient from "./linear-gradient.js"; 12 | import newsTitle from "./news-title.js"; 13 | import radialGradient from "./radial-gradient.js"; 14 | import slideInText from "./slide-in-text.js"; 15 | import subtitle from "./subtitle.js"; 16 | import title from "./title.js"; 17 | import video from "./video.js"; 18 | 19 | import type { CreateFrameSourceOptions, FrameSourceFactory } from "../api/index.js"; 20 | import type { 21 | BaseLayer, 22 | FillColorLayer, 23 | GlLayer, 24 | Layer, 25 | LinearGradientLayer, 26 | TitleLayer, 27 | } from "../types.js"; 28 | 29 | const dirname = fileURLToPath(new URL("..", import.meta.url)); 30 | 31 | const sources = [ 32 | canvas, 33 | fabric, 34 | fillColor, 35 | gl, 36 | imageOverlay, 37 | image, 38 | linearGradient, 39 | newsTitle, 40 | radialGradient, 41 | slideInText, 42 | subtitle, 43 | title, 44 | video, 45 | ]; 46 | 47 | export async function createLayerSource(options: CreateFrameSourceOptions) { 48 | const layer = options.params; 49 | const source = sources.find(({ type }) => type == layer.type) as 50 | | FrameSourceFactory 51 | | undefined; 52 | assert(source, `Invalid type ${layer.type}`); 53 | return await source.setup(options); 54 | } 55 | 56 | export function expandLayerAliases(params: Layer): Layer[] { 57 | if (params.type === "editly-banner") { 58 | return [ 59 | { type: "linear-gradient" } as LinearGradientLayer, 60 | { ...params, type: "title", text: "Made with\nEDITLY\nmifi.no" } as TitleLayer, 61 | ]; 62 | } 63 | 64 | if (params.type === "title-background") { 65 | const backgroundTypes: ("radial-gradient" | "linear-gradient" | "fill-color")[] = [ 66 | "radial-gradient", 67 | "linear-gradient", 68 | "fill-color", 69 | ]; 70 | 71 | const { 72 | background = { type: backgroundTypes[Math.floor(Math.random() * backgroundTypes.length)] }, 73 | ...title 74 | } = params; 75 | 76 | return [background, { ...title, type: "title" }]; 77 | } 78 | 79 | // TODO if random-background radial-gradient linear etc 80 | if (params.type === "pause") { 81 | return [{ ...params, type: "fill-color" } as FillColorLayer]; 82 | } 83 | 84 | if (params.type === "rainbow-colors") { 85 | return [{ type: "gl", fragmentPath: join(dirname, "shaders/rainbow-colors.frag") } as GlLayer]; 86 | } 87 | return [params]; 88 | } 89 | 90 | export default sources; 91 | -------------------------------------------------------------------------------- /src/sources/linear-gradient.ts: -------------------------------------------------------------------------------- 1 | import { Gradient } from "fabric/node"; 2 | import { defineFrameSource } from "../api/index.js"; 3 | import { getRandomGradient } from "../colors.js"; 4 | import type { LinearGradientLayer } from "../types.js"; 5 | import { getRekt } from "../util.js"; 6 | 7 | export default defineFrameSource( 8 | "linear-gradient", 9 | async ({ width, height, params }) => { 10 | const { colors: inColors } = params; 11 | const colors = inColors && inColors.length === 2 ? inColors : getRandomGradient(); 12 | 13 | return { 14 | async readNextFrame(progress, canvas) { 15 | const rect = getRekt(width, height); 16 | 17 | rect.set( 18 | "fill", 19 | new Gradient({ 20 | coords: { 21 | x1: 0, 22 | y1: 0, 23 | x2: width, 24 | y2: height, 25 | }, 26 | colorStops: [ 27 | { offset: 0, color: colors[0] }, 28 | { offset: 1, color: colors[1] }, 29 | ], 30 | }), 31 | ); 32 | 33 | rect.rotate(progress * 30); 34 | canvas.add(rect); 35 | }, 36 | }; 37 | }, 38 | ); 39 | -------------------------------------------------------------------------------- /src/sources/news-title.ts: -------------------------------------------------------------------------------- 1 | import { FabricText, Rect } from "fabric/node"; 2 | import { defineFrameSource } from "../api/index.js"; 3 | import { easeOutExpo } from "../easings.js"; 4 | import type { NewsTitleLayer } from "../types.js"; 5 | import { defaultFontFamily } from "../util.js"; 6 | 7 | export default defineFrameSource( 8 | "news-title", 9 | async ({ width, height, params }) => { 10 | const { 11 | text, 12 | textColor = "#ffffff", 13 | backgroundColor = "#d02a42", 14 | fontFamily = defaultFontFamily, 15 | delay = 0, 16 | speed = 1, 17 | } = params; 18 | const min = Math.min(width, height); 19 | const fontSize = Math.round(min * 0.05); 20 | 21 | return { 22 | async readNextFrame(progress, canvas) { 23 | const easedBgProgress = easeOutExpo( 24 | Math.max(0, Math.min((progress - delay) * speed * 3, 1)), 25 | ); 26 | const easedTextProgress = easeOutExpo( 27 | Math.max(0, Math.min((progress - delay - 0.02) * speed * 4, 1)), 28 | ); 29 | const easedTextOpacityProgress = easeOutExpo( 30 | Math.max(0, Math.min((progress - delay - 0.07) * speed * 4, 1)), 31 | ); 32 | 33 | const top = height * 0.08; 34 | 35 | const paddingV = 0.07 * min; 36 | const paddingH = 0.03 * min; 37 | 38 | const textBox = new FabricText(text, { 39 | top, 40 | left: paddingV + (easedTextProgress - 1) * width, 41 | fill: textColor, 42 | opacity: easedTextOpacityProgress, 43 | fontFamily, 44 | fontSize, 45 | charSpacing: width * 0.1, 46 | }); 47 | 48 | const bgWidth = textBox.width + paddingV * 2; 49 | const rect = new Rect({ 50 | top: top - paddingH, 51 | left: (easedBgProgress - 1) * bgWidth, 52 | width: bgWidth, 53 | height: textBox.height + paddingH * 2, 54 | fill: backgroundColor, 55 | }); 56 | 57 | canvas.add(rect); 58 | canvas.add(textBox); 59 | }, 60 | }; 61 | }, 62 | ); 63 | -------------------------------------------------------------------------------- /src/sources/radial-gradient.ts: -------------------------------------------------------------------------------- 1 | import * as fabric from "fabric/node"; 2 | import { defineFrameSource } from "../api/index.js"; 3 | import { getRandomGradient } from "../colors.js"; 4 | import type { RadialGradientLayer } from "../types.js"; 5 | import { getRekt } from "../util.js"; 6 | 7 | export default defineFrameSource( 8 | "radial-gradient", 9 | async ({ width, height, params }) => { 10 | const { colors: inColors } = params; 11 | 12 | const colors = inColors && inColors.length === 2 ? inColors : getRandomGradient(); 13 | 14 | return { 15 | async readNextFrame(progress, canvas) { 16 | // console.log('progress', progress); 17 | const max = Math.max(width, height); 18 | 19 | const r1 = 0; 20 | const r2 = max * (1 + progress) * 0.6; 21 | 22 | const rect = getRekt(width, height); 23 | 24 | const cx = 0.5 * rect.width; 25 | const cy = 0.5 * rect.height; 26 | 27 | rect.set( 28 | "fill", 29 | new fabric.Gradient({ 30 | type: "radial", 31 | coords: { 32 | r1, 33 | r2, 34 | x1: cx, 35 | y1: cy, 36 | x2: cx, 37 | y2: cy, 38 | }, 39 | colorStops: [ 40 | { offset: 0, color: colors[0] }, 41 | { offset: 1, color: colors[1] }, 42 | ], 43 | }), 44 | ); 45 | 46 | canvas.add(rect); 47 | }, 48 | }; 49 | }, 50 | ); 51 | -------------------------------------------------------------------------------- /src/sources/slide-in-text.ts: -------------------------------------------------------------------------------- 1 | import * as fabric from "fabric/node"; 2 | import { defineFrameSource } from "../api/index.js"; 3 | import { easeInOutCubic } from "../easings.js"; 4 | import type { SlideInTextLayer } from "../types.js"; 5 | import { defaultFontFamily, getFrameByKeyFrames, getPositionProps } from "../util.js"; 6 | 7 | export default defineFrameSource( 8 | "slide-in-text", 9 | async ({ width, height, params }) => { 10 | const { 11 | position, 12 | text, 13 | fontSize = 0.05, 14 | charSpacing = 0.1, 15 | textColor = "#ffffff", 16 | color = undefined, 17 | fontFamily = defaultFontFamily, 18 | } = params; 19 | 20 | if (color) { 21 | console.warn("slide-in-text: color is deprecated, use textColor."); 22 | } 23 | 24 | const fontSizeAbs = Math.round(width * fontSize); 25 | 26 | const { left, top, originX, originY } = getPositionProps({ position, width, height }); 27 | 28 | return { 29 | async readNextFrame(progress, canvas) { 30 | const textBox = new fabric.FabricText(text, { 31 | fill: color ?? textColor, 32 | fontFamily, 33 | fontSize: fontSizeAbs, 34 | charSpacing: width * charSpacing, 35 | }); 36 | 37 | const { opacity, textSlide } = getFrameByKeyFrames( 38 | [ 39 | { t: 0.1, props: { opacity: 1, textSlide: 0 } }, 40 | { t: 0.3, props: { opacity: 1, textSlide: 1 } }, 41 | { t: 0.8, props: { opacity: 1, textSlide: 1 } }, 42 | { t: 0.9, props: { opacity: 0, textSlide: 1 } }, 43 | ], 44 | progress, 45 | ); 46 | 47 | const fadedObject = await getFadedObject({ 48 | object: textBox, 49 | progress: easeInOutCubic(textSlide), 50 | }); 51 | fadedObject.set({ 52 | originX, 53 | originY, 54 | top, 55 | left, 56 | opacity, 57 | }); 58 | 59 | canvas.add(fadedObject); 60 | }, 61 | }; 62 | }, 63 | ); 64 | 65 | async function getFadedObject({ 66 | object, 67 | progress, 68 | }: { 69 | object: T; 70 | progress: number; 71 | }) { 72 | const rect = new fabric.Rect({ 73 | left: 0, 74 | width: object.width, 75 | height: object.height, 76 | top: 0, 77 | }); 78 | 79 | rect.set( 80 | "fill", 81 | new fabric.Gradient({ 82 | coords: { 83 | x1: 0, 84 | y1: 0, 85 | x2: object.width, 86 | y2: 0, 87 | }, 88 | colorStops: [ 89 | { offset: Math.max(0, progress * (1 + 0.2) - 0.2), color: "rgba(255,255,255,1)" }, 90 | { offset: Math.min(1, progress * (1 + 0.2)), color: "rgba(255,255,255,0)" }, 91 | ], 92 | }), 93 | ); 94 | 95 | const gradientMaskImg = rect.cloneAsImage({}); 96 | const fadedImage = object.cloneAsImage({}); 97 | 98 | fadedImage.filters.push( 99 | new fabric.filters.BlendImage({ 100 | image: gradientMaskImg, 101 | mode: "multiply", 102 | }), 103 | ); 104 | 105 | fadedImage.applyFilters(); 106 | 107 | return fadedImage; 108 | } 109 | -------------------------------------------------------------------------------- /src/sources/subtitle.ts: -------------------------------------------------------------------------------- 1 | import { Rect, Textbox } from "fabric/node"; 2 | import { defineFrameSource } from "../api/index.js"; 3 | import { easeOutExpo } from "../easings.js"; 4 | import type { SubtitleLayer } from "../types.js"; 5 | import { defaultFontFamily } from "../util.js"; 6 | 7 | export default defineFrameSource("subtitle", async ({ width, height, params }) => { 8 | const { 9 | text, 10 | textColor = "#ffffff", 11 | backgroundColor = "rgba(0,0,0,0.3)", 12 | fontFamily = defaultFontFamily, 13 | delay = 0, 14 | speed = 1, 15 | } = params; 16 | 17 | return { 18 | async readNextFrame(progress, canvas) { 19 | const easedProgress = easeOutExpo(Math.max(0, Math.min((progress - delay) * speed, 1))); 20 | 21 | const min = Math.min(width, height); 22 | const padding = 0.05 * min; 23 | 24 | const textBox = new Textbox(text, { 25 | fill: textColor, 26 | fontFamily, 27 | 28 | fontSize: min / 20, 29 | textAlign: "left", 30 | width: width - padding * 2, 31 | originX: "center", 32 | originY: "bottom", 33 | left: width / 2 + (-1 + easedProgress) * padding, 34 | top: height - padding, 35 | opacity: easedProgress, 36 | }); 37 | 38 | const rect = new Rect({ 39 | left: 0, 40 | width, 41 | height: textBox.height + padding * 2, 42 | top: height, 43 | originY: "bottom", 44 | fill: backgroundColor, 45 | opacity: easedProgress, 46 | }); 47 | 48 | canvas.add(rect); 49 | canvas.add(textBox); 50 | }, 51 | }; 52 | }); 53 | -------------------------------------------------------------------------------- /src/sources/title.ts: -------------------------------------------------------------------------------- 1 | import { Textbox } from "fabric/node"; 2 | import { defineFrameSource } from "../api//index.js"; 3 | import type { TitleLayer } from "../types.js"; 4 | import { 5 | defaultFontFamily, 6 | getPositionProps, 7 | getTranslationParams, 8 | getZoomParams, 9 | } from "../util.js"; 10 | 11 | export default defineFrameSource("title", async ({ width, height, params }) => { 12 | const { 13 | text, 14 | textColor = "#ffffff", 15 | fontFamily = defaultFontFamily, 16 | position = "center", 17 | zoomDirection = "in", 18 | zoomAmount = 0.2, 19 | } = params; 20 | const fontSize = Math.round(Math.min(width, height) * 0.1); 21 | 22 | const textBox = new Textbox(text, { 23 | fill: textColor, 24 | fontFamily, 25 | fontSize, 26 | textAlign: "center", 27 | width: width * 0.8, 28 | }); 29 | 30 | return { 31 | async readNextFrame(progress, canvas) { 32 | const scaleFactor = getZoomParams({ progress, zoomDirection, zoomAmount }); 33 | const translationParams = getTranslationParams({ progress, zoomDirection, zoomAmount }); 34 | 35 | // We need the text as an image in order to scale it 36 | const textImage = textBox.cloneAsImage({}); 37 | 38 | const { left, top, originX, originY } = getPositionProps({ position, width, height }); 39 | 40 | textImage.set({ 41 | originX, 42 | originY, 43 | left: left + translationParams, 44 | top, 45 | scaleX: scaleFactor, 46 | scaleY: scaleFactor, 47 | }); 48 | 49 | canvas.add(textImage); 50 | }, 51 | }; 52 | }); 53 | -------------------------------------------------------------------------------- /src/sources/video.ts: -------------------------------------------------------------------------------- 1 | import { ExecaError } from "execa"; 2 | import * as fabric from "fabric/node"; 3 | import { defineFrameSource } from "../api/index.js"; 4 | import { ffmpeg, readFileStreams } from "../ffmpeg.js"; 5 | import { rawVideoToFrames } from "../transforms/rawVideoToFrames.js"; 6 | import type { VideoLayer } from "../types.js"; 7 | import { blurImage, rgbaToFabricImage } from "./fabric.js"; 8 | 9 | export default defineFrameSource("video", async (options) => { 10 | const { 11 | width: canvasWidth, 12 | height: canvasHeight, 13 | channels, 14 | framerateStr, 15 | verbose, 16 | logTimes, 17 | params, 18 | } = options; 19 | 20 | const { 21 | path, 22 | cutFrom, 23 | cutTo, 24 | resizeMode = "contain-blur", 25 | speedFactor, 26 | inputWidth, 27 | inputHeight, 28 | width: requestedWidthRel, 29 | height: requestedHeightRel, 30 | left: leftRel = 0, 31 | top: topRel = 0, 32 | originX = "left", 33 | originY = "top", 34 | fabricImagePostProcessing = null, 35 | } = params; 36 | 37 | const requestedWidth = requestedWidthRel 38 | ? Math.round(requestedWidthRel * canvasWidth) 39 | : canvasWidth; 40 | const requestedHeight = requestedHeightRel 41 | ? Math.round(requestedHeightRel * canvasHeight) 42 | : canvasHeight; 43 | 44 | const left = leftRel * canvasWidth; 45 | const top = topRel * canvasHeight; 46 | 47 | const ratioW = requestedWidth / inputWidth!; 48 | const ratioH = requestedHeight / inputHeight!; 49 | const inputAspectRatio = inputWidth! / inputHeight!; 50 | 51 | let targetWidth = requestedWidth; 52 | let targetHeight = requestedHeight; 53 | 54 | let scaleFilter; 55 | if (["contain", "contain-blur"].includes(resizeMode)) { 56 | if (ratioW > ratioH) { 57 | targetHeight = requestedHeight; 58 | targetWidth = Math.round(requestedHeight * inputAspectRatio); 59 | } else { 60 | targetWidth = requestedWidth; 61 | targetHeight = Math.round(requestedWidth / inputAspectRatio); 62 | } 63 | 64 | scaleFilter = `scale=${targetWidth}:${targetHeight}`; 65 | } else if (resizeMode === "cover") { 66 | let scaledWidth; 67 | let scaledHeight; 68 | 69 | if (ratioW > ratioH) { 70 | scaledWidth = requestedWidth; 71 | scaledHeight = Math.round(requestedWidth / inputAspectRatio); 72 | } else { 73 | scaledHeight = requestedHeight; 74 | scaledWidth = Math.round(requestedHeight * inputAspectRatio); 75 | } 76 | 77 | // TODO improve performance by crop first, then scale? 78 | scaleFilter = `scale=${scaledWidth}:${scaledHeight},crop=${targetWidth}:${targetHeight}`; 79 | } else { 80 | // 'stretch' 81 | scaleFilter = `scale=${targetWidth}:${targetHeight}`; 82 | } 83 | 84 | if (verbose) console.log(scaleFilter); 85 | 86 | let ptsFilter = ""; 87 | if (speedFactor !== 1) { 88 | if (verbose) console.log("speedFactor", speedFactor); 89 | ptsFilter = `setpts=${speedFactor}*PTS,`; 90 | } 91 | 92 | // https://forum.unity.com/threads/settings-for-importing-a-video-with-an-alpha-channel.457657/ 93 | const streams = await readFileStreams(path); 94 | const firstVideoStream = streams.find((s) => s.codec_type === "video"); 95 | // https://superuser.com/a/1116905/658247 96 | 97 | let inputCodec; 98 | if (firstVideoStream?.codec_name === "vp8") inputCodec = "libvpx"; 99 | else if (firstVideoStream?.codec_name === "vp9") inputCodec = "libvpx-vp9"; 100 | 101 | // http://zulko.github.io/blog/2013/09/27/read-and-write-video-frames-in-python-using-ffmpeg/ 102 | // Testing: ffmpeg -i 'vid.mov' -t 1 -vcodec rawvideo -pix_fmt rgba -f image2pipe - | ffmpeg -f rawvideo -vcodec rawvideo -pix_fmt rgba -s 2166x1650 -i - -vf format=yuv420p -vcodec libx264 -y out.mp4 103 | // https://trac.ffmpeg.org/wiki/ChangingFrameRate 104 | const args = [ 105 | "-nostdin", 106 | ...(inputCodec ? ["-vcodec", inputCodec] : []), 107 | ...(cutFrom ? ["-ss", cutFrom.toString()] : []), 108 | "-i", 109 | path, 110 | ...(cutTo ? ["-t", ((cutTo - cutFrom!) * speedFactor!).toString()] : []), 111 | "-vf", 112 | `${ptsFilter}fps=${framerateStr},${scaleFilter}`, 113 | "-map", 114 | "v:0", 115 | "-vcodec", 116 | "rawvideo", 117 | "-pix_fmt", 118 | "rgba", 119 | "-f", 120 | "image2pipe", 121 | "-", 122 | ]; 123 | 124 | const controller = new AbortController(); 125 | const transform = rawVideoToFrames({ 126 | width: targetWidth, 127 | height: targetHeight, 128 | channels, 129 | signal: controller.signal, 130 | }); 131 | const ps = ffmpeg(args, { 132 | encoding: "buffer", 133 | buffer: false, 134 | stdin: "ignore", 135 | stdout: { transform }, 136 | stderr: process.stderr, 137 | // ffmpeg doesn't like to stop, force it 138 | forceKillAfterDelay: 1000, 139 | cancelSignal: controller.signal, 140 | }); 141 | 142 | // Ignore errors if the process is aborted 143 | ps.catch((err: ExecaError) => { 144 | if (!err.isCanceled) throw err; 145 | if (verbose) console.log("ffmpeg process aborted", path); 146 | }); 147 | 148 | // Convert process to iterator to fetch frame data 149 | const iterator = ps.iterable(); 150 | 151 | async function readNextFrame(progress: number, canvas: fabric.StaticCanvas, time: number) { 152 | const { value: rgba, done } = await iterator.next(); 153 | 154 | if (done) { 155 | if (verbose) console.log(path, "ffmpeg video stream ended"); 156 | return; 157 | } 158 | 159 | if (!rgba) { 160 | if (verbose) console.log(path, "No frame data received"); 161 | return; 162 | } 163 | 164 | if (logTimes) console.time("rgbaToFabricImage"); 165 | const img = await rgbaToFabricImage({ 166 | width: targetWidth, 167 | height: targetHeight, 168 | rgba: Buffer.from(rgba), 169 | }); 170 | if (logTimes) console.timeEnd("rgbaToFabricImage"); 171 | 172 | img.set({ 173 | originX, 174 | originY, 175 | }); 176 | 177 | let centerOffsetX = 0; 178 | let centerOffsetY = 0; 179 | if (resizeMode === "contain" || resizeMode === "contain-blur") { 180 | const dirX = originX === "left" ? 1 : -1; 181 | const dirY = originY === "top" ? 1 : -1; 182 | centerOffsetX = (dirX * (requestedWidth - targetWidth)) / 2; 183 | centerOffsetY = (dirY * (requestedHeight - targetHeight)) / 2; 184 | } 185 | 186 | img.set({ 187 | left: left + centerOffsetX, 188 | top: top + centerOffsetY, 189 | }); 190 | 191 | if (resizeMode === "contain-blur") { 192 | const mutableImg = img.cloneAsImage({}); 193 | const blurredImg = await blurImage({ 194 | mutableImg, 195 | width: requestedWidth, 196 | height: requestedHeight, 197 | }); 198 | blurredImg.set({ 199 | left, 200 | top, 201 | originX, 202 | originY, 203 | }); 204 | canvas.add(blurredImg); 205 | } 206 | 207 | if (fabricImagePostProcessing) { 208 | fabricImagePostProcessing({ image: img, progress, fabric, canvas, time }); 209 | } 210 | 211 | canvas.add(img); 212 | } 213 | 214 | const close = () => { 215 | if (verbose) console.log("Close", path); 216 | if (!ps.exitCode) controller.abort(); 217 | }; 218 | 219 | return { 220 | readNextFrame, 221 | close, 222 | }; 223 | }); 224 | -------------------------------------------------------------------------------- /src/transforms/rawVideoToFrames.ts: -------------------------------------------------------------------------------- 1 | import { Transform, TransformCallback, TransformOptions } from "stream"; 2 | 3 | export interface RawVideoToFramesOptions extends Omit { 4 | width: number; 5 | height: number; 6 | channels: number; 7 | } 8 | 9 | /** 10 | * Transforms a raw video stream into individual frames. 11 | */ 12 | export function rawVideoToFrames({ width, height, channels, ...options }: RawVideoToFramesOptions) { 13 | const frameByteSize = width * height * channels; 14 | let buffer = new Uint8Array(frameByteSize); 15 | let bytesRead = 0; 16 | 17 | return new Transform({ 18 | ...options, 19 | writableObjectMode: false, 20 | readableObjectMode: true, 21 | transform(chunk: Uint8Array, _: BufferEncoding, callback: TransformCallback) { 22 | let startAt = 0; 23 | 24 | // Find frames in chunk 25 | while (startAt < chunk.length) { 26 | const endAt = Math.min(startAt + frameByteSize - bytesRead, chunk.length); 27 | const bytesToRead = endAt - startAt; 28 | buffer.set(chunk.slice(startAt, endAt), bytesRead); 29 | bytesRead = (bytesRead + bytesToRead) % frameByteSize; 30 | 31 | if (bytesRead === 0) { 32 | // Emit frame 33 | this.push(buffer); 34 | 35 | // Reset data buffer 36 | buffer = new Uint8Array(frameByteSize); 37 | } 38 | 39 | // Move to next frame 40 | startAt = endAt; 41 | } 42 | 43 | callback(); 44 | }, 45 | }); 46 | } 47 | -------------------------------------------------------------------------------- /src/transition.ts: -------------------------------------------------------------------------------- 1 | import assert from "assert"; 2 | import GL from "gl"; 3 | import createBuffer from "gl-buffer"; 4 | import createTexture from "gl-texture2d"; 5 | import glTransition from "gl-transition"; 6 | import glTransitions, { type GlTransition } from "gl-transitions"; 7 | import ndarray from "ndarray"; 8 | import type { EasingFunction } from "./easings.js"; 9 | import * as easings from "./easings.js"; 10 | 11 | const { default: createTransition } = glTransition; 12 | 13 | const TransitionAliases: Record> = { 14 | "directional-left": { name: "directional", easing: "easeOutExpo", params: { direction: [1, 0] } }, 15 | "directional-right": { 16 | name: "directional", 17 | easing: "easeOutExpo", 18 | params: { direction: [-1, 0] }, 19 | }, 20 | "directional-down": { name: "directional", easing: "easeOutExpo", params: { direction: [0, 1] } }, 21 | "directional-up": { name: "directional", easing: "easeOutExpo", params: { direction: [0, -1] } }, 22 | }; 23 | 24 | const AllTransitions = [...glTransitions.map((t) => t.name), ...Object.keys(TransitionAliases)]; 25 | 26 | /** 27 | * @see [Transition types]{@link https://github.com/mifi/editly#transition-types} 28 | */ 29 | export type TransitionType = 30 | | "directional-left" 31 | | "directional-right" 32 | | "directional-up" 33 | | "directional-down" 34 | | "random" 35 | | "dummy" 36 | | string; 37 | 38 | /** 39 | * WARNING: Undocumented feature! 40 | */ 41 | export type GLTextureLike = { 42 | bind: (unit: number) => number; 43 | shape: [number, number]; 44 | }; 45 | 46 | /** 47 | * WARNING: Undocumented feature! 48 | */ 49 | export interface TransitionParams { 50 | /** 51 | * WARNING: Undocumented feature! 52 | */ 53 | [key: string]: number | boolean | GLTextureLike | number[]; 54 | } 55 | 56 | export type RunTransitionOptions = { 57 | fromFrame: Buffer; 58 | toFrame: Buffer; 59 | progress: number; 60 | transitionName?: string; 61 | transitionParams?: TransitionParams; 62 | }; 63 | 64 | /** 65 | * @see [Curve types]{@link https://trac.ffmpeg.org/wiki/AfadeCurves} 66 | */ 67 | export type CurveType = 68 | | "tri" 69 | | "qsin" 70 | | "hsin" 71 | | "esin" 72 | | "log" 73 | | "ipar" 74 | | "qua" 75 | | "cub" 76 | | "squ" 77 | | "cbr" 78 | | "par" 79 | | "exp" 80 | | "iqsin" 81 | | "ihsin" 82 | | "dese" 83 | | "desi" 84 | | "losi" 85 | | "nofade" 86 | | string; 87 | 88 | export type Easing = keyof typeof easings; 89 | 90 | export interface TransitionOptions { 91 | /** 92 | * Transition duration. 93 | * 94 | * @default 0.5 95 | */ 96 | duration?: number; 97 | 98 | /** 99 | * Transition type. 100 | * 101 | * @default 'random' 102 | * @see [Transition types]{@link https://github.com/mifi/editly#transition-types} 103 | */ 104 | name?: TransitionType; 105 | 106 | /** 107 | * [Fade out curve]{@link https://trac.ffmpeg.org/wiki/AfadeCurves} in audio cross fades. 108 | * 109 | * @default 'tri' 110 | */ 111 | audioOutCurve?: CurveType; 112 | 113 | /** 114 | * [Fade in curve]{@link https://trac.ffmpeg.org/wiki/AfadeCurves} in audio cross fades. 115 | * 116 | * @default 'tri' 117 | */ 118 | audioInCurve?: CurveType; 119 | 120 | /** 121 | * WARNING: Undocumented feature! 122 | */ 123 | easing?: Easing | null; 124 | 125 | /** 126 | * WARNING: Undocumented feature! 127 | */ 128 | params?: TransitionParams; 129 | } 130 | 131 | function getRandomTransition() { 132 | return AllTransitions[Math.floor(Math.random() * AllTransitions.length)]; 133 | } 134 | 135 | export class Transition { 136 | name?: string; 137 | duration: number; 138 | params?: TransitionParams; 139 | easingFunction: EasingFunction; 140 | source?: GlTransition; 141 | 142 | constructor(options?: TransitionOptions | null, isLastClip: boolean = false) { 143 | if (!options || isLastClip) options = { duration: 0 }; 144 | 145 | assert(typeof options === "object", "Transition must be an object"); 146 | assert( 147 | options.duration === 0 || options.name, 148 | "Please specify transition name or set duration to 0", 149 | ); 150 | 151 | if (options.name === "random") options.name = getRandomTransition(); 152 | const aliasedTransition = options.name && TransitionAliases[options.name]; 153 | if (aliasedTransition) Object.assign(options, aliasedTransition); 154 | 155 | this.duration = options.duration ?? 0; 156 | this.name = options.name; 157 | this.params = options.params; 158 | this.easingFunction = 159 | options.easing && easings[options.easing] ? easings[options.easing] : easings.linear; 160 | 161 | // A dummy transition can be used to have an audio transition without a video transition 162 | // (Note: You will lose a portion from both clips due to overlap) 163 | if (this.name && this.name !== "dummy") { 164 | this.source = glTransitions.find( 165 | ({ name }) => name.toLowerCase() === this.name?.toLowerCase(), 166 | ); 167 | assert(this.source, `Transition not found: ${this.name}`); 168 | } 169 | } 170 | 171 | create({ width, height, channels }: { width: number; height: number; channels: number }) { 172 | const gl = GL(width, height); 173 | const resizeMode = "stretch"; 174 | 175 | if (!gl) { 176 | throw new Error( 177 | "gl returned null, this probably means that some dependencies are not installed. See README.", 178 | ); 179 | } 180 | 181 | function convertFrame(buf: Buffer) { 182 | // @see https://github.com/stackgl/gl-texture2d/issues/16 183 | return ndarray(buf, [width, height, channels], [channels, width * channels, 1]); 184 | } 185 | 186 | return ({ fromFrame, toFrame, progress }: RunTransitionOptions) => { 187 | if (!this.source) { 188 | // No transition found, just switch frames half way through the transition. 189 | return this.easingFunction(progress) > 0.5 ? toFrame : fromFrame; 190 | } 191 | 192 | const buffer = createBuffer(gl, [-1, -1, -1, 4, 4, -1], gl.ARRAY_BUFFER, gl.STATIC_DRAW); 193 | let transition; 194 | 195 | try { 196 | transition = createTransition(gl, this.source, { resizeMode }); 197 | 198 | gl.clear(gl.COLOR_BUFFER_BIT); 199 | 200 | // console.time('runTransitionOnFrame internal'); 201 | const fromFrameNdArray = convertFrame(fromFrame); 202 | const textureFrom = createTexture(gl, fromFrameNdArray); 203 | textureFrom.minFilter = gl.LINEAR; 204 | textureFrom.magFilter = gl.LINEAR; 205 | 206 | // console.timeLog('runTransitionOnFrame internal'); 207 | const toFrameNdArray = convertFrame(toFrame); 208 | const textureTo = createTexture(gl, toFrameNdArray); 209 | textureTo.minFilter = gl.LINEAR; 210 | textureTo.magFilter = gl.LINEAR; 211 | 212 | buffer.bind(); 213 | transition.draw( 214 | this.easingFunction(progress), 215 | textureFrom, 216 | textureTo, 217 | gl.drawingBufferWidth, 218 | gl.drawingBufferHeight, 219 | this.params, 220 | ); 221 | 222 | textureFrom.dispose(); 223 | textureTo.dispose(); 224 | 225 | // console.timeLog('runTransitionOnFrame internal'); 226 | 227 | const outArray = Buffer.allocUnsafe(width * height * 4); 228 | gl.readPixels(0, 0, width, height, gl.RGBA, gl.UNSIGNED_BYTE, outArray); 229 | 230 | // console.timeEnd('runTransitionOnFrame internal'); 231 | 232 | return outArray; 233 | 234 | // require('fs').writeFileSync(`${new Date().getTime()}.raw`, outArray); 235 | // Testing: ffmpeg -f rawvideo -vcodec rawvideo -pix_fmt rgba -s 2166x1650 -i 1586619627191.raw -vf format=yuv420p -vcodec libx264 -y out.mp4 236 | } finally { 237 | buffer.dispose(); 238 | if (transition) transition.dispose(); 239 | } 240 | }; 241 | } 242 | } 243 | -------------------------------------------------------------------------------- /src/types/gl-buffer.d.ts: -------------------------------------------------------------------------------- 1 | declare module "gl-buffer" { 2 | export default function createBuffer( 3 | gl: WebGLRenderingContext, 4 | data: number[], 5 | target: number, 6 | usage: number, 7 | ): WebGLBuffer; 8 | } 9 | -------------------------------------------------------------------------------- /src/types/gl-texture2d.d.ts: -------------------------------------------------------------------------------- 1 | declare module "gl-texture2d" { 2 | import ndarray from "ndarray"; 3 | 4 | // There are other overloads for this function, but we only care about this one. 5 | declare function createTexture(gl: WebGLRenderingContext, data: ndarray): WebGLTexture; 6 | 7 | export default createTexture; 8 | } 9 | -------------------------------------------------------------------------------- /src/types/gl-transition.d.ts: -------------------------------------------------------------------------------- 1 | declare module "gl-transition" { 2 | type TransitionObjectLike = { 3 | glsl: string; 4 | defaultParams: { [key: string]: mixed }; 5 | paramsTypes: { [key: string]: string }; 6 | }; 7 | 8 | type GLTextureLike = { 9 | bind: (unit: number) => number; 10 | shape: [number, number]; 11 | }; 12 | 13 | type Options = { 14 | resizeMode?: "cover" | "contain" | "stretch"; 15 | }; 16 | 17 | declare function createTransition( 18 | gl: WebGLRenderingContext, 19 | transition: TransitionObjectLike, 20 | options: Options = {}, 21 | ): { 22 | // renders one frame of the transition (up to you to run the animation loop the way you want) 23 | draw: ( 24 | progress: number, 25 | from: GLTextureLike, 26 | to: GLTextureLike, 27 | width: number = gl.drawingBufferWidth, 28 | height: number = gl.drawingBufferHeight, 29 | params: { [key: string]: number | number[] | boolean | GLTextureLike } = {}, 30 | ) => void; 31 | // dispose and destroy all objects created by the function call. 32 | dispose: () => void; 33 | }; 34 | 35 | export = { default: createTransition }; 36 | } 37 | 38 | /* 39 | 40 | ( 41 | gl: WebGLRenderingContext, 42 | transition: TransitionObjectLike, 43 | options: Options = {} 44 | ) => { 45 | // renders one frame of the transition (up to you to run the animation loop the way you want) 46 | draw: ( 47 | progress: number, 48 | from: GLTextureLike, 49 | to: GLTextureLike, 50 | width: number = gl.drawingBufferWidth, 51 | height: number = gl.drawingBufferHeight, 52 | params: { [key: string]: number | boolean | GLTextureLike } = {} 53 | ) => void, 54 | // dispose and destroy all objects created by the function call. 55 | dispose: () => void, 56 | } 57 | */ 58 | -------------------------------------------------------------------------------- /src/types/gl-transitions.d.ts: -------------------------------------------------------------------------------- 1 | declare module "gl-transitions" { 2 | export type GlTransition = { 3 | name: string; 4 | author: string; 5 | license: string; 6 | glsl: string; 7 | defaultParams: { [key: string]: mixed }; 8 | paramsTypes: { [key: string]: string }; 9 | createdAt: string; 10 | updatedAt: string; 11 | }; 12 | 13 | declare const _default: GlTransition[]; 14 | export default _default; 15 | } 16 | -------------------------------------------------------------------------------- /src/util.ts: -------------------------------------------------------------------------------- 1 | import assert from "assert"; 2 | import type { TOriginX, TOriginY } from "fabric"; 3 | import * as fabric from "fabric/node"; 4 | import fileUrl from "file-url"; 5 | import { pathExists } from "fs-extra"; 6 | import { sortBy } from "lodash-es"; 7 | import type { KenBurns, Keyframe, Position, PositionObject } from "./types.js"; 8 | 9 | export function toArrayInteger(buffer: Buffer) { 10 | if (buffer.length > 0) { 11 | const data = new Uint8ClampedArray(buffer.length); 12 | for (let i = 0; i < buffer.length; i += 1) { 13 | data[i] = buffer[i]; 14 | } 15 | return data; 16 | } 17 | return []; 18 | } 19 | 20 | // x264 requires multiple of 2 21 | export const multipleOf2 = (x: number) => Math.round(x / 2) * 2; 22 | 23 | export function getPositionProps({ 24 | position, 25 | width, 26 | height, 27 | }: { 28 | position?: Position | PositionObject; 29 | width: number; 30 | height: number; 31 | }) { 32 | let originY: TOriginY = "center"; 33 | let originX: TOriginX = "center"; 34 | let top = height / 2; 35 | let left = width / 2; 36 | const margin = 0.05; 37 | 38 | if (typeof position === "string") { 39 | if (position === "top") { 40 | originY = "top"; 41 | top = height * margin; 42 | } else if (position === "bottom") { 43 | originY = "bottom"; 44 | top = height * (1 - margin); 45 | } else if (position === "center") { 46 | originY = "center"; 47 | top = height / 2; 48 | } else if (position === "top-left") { 49 | originX = "left"; 50 | originY = "top"; 51 | left = width * margin; 52 | top = height * margin; 53 | } else if (position === "top-right") { 54 | originX = "right"; 55 | originY = "top"; 56 | left = width * (1 - margin); 57 | top = height * margin; 58 | } else if (position === "center-left") { 59 | originX = "left"; 60 | originY = "center"; 61 | left = width * margin; 62 | top = height / 2; 63 | } else if (position === "center-right") { 64 | originX = "right"; 65 | originY = "center"; 66 | left = width * (1 - margin); 67 | top = height / 2; 68 | } else if (position === "bottom-left") { 69 | originX = "left"; 70 | originY = "bottom"; 71 | left = width * margin; 72 | top = height * (1 - margin); 73 | } else if (position === "bottom-right") { 74 | originX = "right"; 75 | originY = "bottom"; 76 | left = width * (1 - margin); 77 | top = height * (1 - margin); 78 | } 79 | } else { 80 | if (position?.x != null) { 81 | originX = position.originX || "left"; 82 | left = width * position.x; 83 | } 84 | if (position?.y != null) { 85 | originY = position.originY || "top"; 86 | top = height * position.y; 87 | } 88 | } 89 | 90 | return { originX, originY, top, left }; 91 | } 92 | 93 | export function getFrameByKeyFrames(keyframes: Keyframe[], progress: number) { 94 | if (keyframes.length < 2) throw new Error("Keyframes must be at least 2"); 95 | const sortedKeyframes = sortBy(keyframes, "t"); 96 | 97 | // TODO check that max is 1 98 | // TODO check that all keyframes have all props 99 | // TODO make smarter so user doesn't need to replicate non-changing props 100 | 101 | const invalidKeyframe = sortedKeyframes.find((k, i) => { 102 | if (i === 0) return false; 103 | return k.t === sortedKeyframes[i - 1].t; 104 | }); 105 | if (invalidKeyframe) throw new Error("Invalid keyframe"); 106 | 107 | let prevKeyframe = [...sortedKeyframes].reverse().find((k) => k.t < progress); 108 | if (!prevKeyframe) prevKeyframe = sortedKeyframes[0]; 109 | 110 | let nextKeyframe = sortedKeyframes.find((k) => k.t >= progress); 111 | if (!nextKeyframe) nextKeyframe = sortedKeyframes[sortedKeyframes.length - 1]; 112 | 113 | if (nextKeyframe.t === prevKeyframe.t) return prevKeyframe.props; 114 | 115 | const interProgress = (progress - prevKeyframe.t) / (nextKeyframe.t - prevKeyframe.t); 116 | return Object.fromEntries( 117 | Object.entries(prevKeyframe.props).map(([propName, prevVal]) => [ 118 | propName, 119 | prevVal + (nextKeyframe.props[propName] - prevVal) * interProgress, 120 | ]), 121 | ); 122 | } 123 | 124 | export const isUrl = (path: string) => /^https?:\/\//.test(path); 125 | 126 | export const assertFileValid = async (path: string, allowRemoteRequests?: boolean) => { 127 | if (isUrl(path)) { 128 | assert(allowRemoteRequests, "Remote requests are not allowed"); 129 | return; 130 | } 131 | assert(await pathExists(path), `File does not exist ${path}`); 132 | }; 133 | 134 | export const loadImage = (pathOrUrl: string) => 135 | fabric.util.loadImage(isUrl(pathOrUrl) ? pathOrUrl : fileUrl(pathOrUrl)); 136 | export const defaultFontFamily = "sans-serif"; 137 | 138 | export function getZoomParams({ 139 | progress, 140 | zoomDirection, 141 | zoomAmount = 0.1, 142 | }: KenBurns & { progress: number }) { 143 | let scaleFactor = 1; 144 | if (zoomDirection === "left" || zoomDirection === "right") return 1.3 + zoomAmount; 145 | if (zoomDirection === "in") scaleFactor = 1 + zoomAmount * progress; 146 | else if (zoomDirection === "out") scaleFactor = 1 + zoomAmount * (1 - progress); 147 | return scaleFactor; 148 | } 149 | 150 | export function getTranslationParams({ 151 | progress, 152 | zoomDirection, 153 | zoomAmount = 0.1, 154 | }: KenBurns & { progress: number }) { 155 | let translation = 0; 156 | const range = zoomAmount * 1000; 157 | 158 | if (zoomDirection === "right") translation = progress * range - range / 2; 159 | else if (zoomDirection === "left") translation = -(progress * range - range / 2); 160 | 161 | return translation; 162 | } 163 | 164 | export function getRekt(width: number, height: number) { 165 | // width and height with room to rotate 166 | return new fabric.Rect({ 167 | originX: "center", 168 | originY: "center", 169 | left: width / 2, 170 | top: height / 2, 171 | width: width * 2, 172 | height: height * 2, 173 | }); 174 | } 175 | -------------------------------------------------------------------------------- /test/configuration.test.ts: -------------------------------------------------------------------------------- 1 | import { describe, expect, test } from "vitest"; 2 | import { Configuration } from "../src/configuration.js"; 3 | 4 | // eslint-disable-next-line @typescript-eslint/no-explicit-any 5 | type BadData = any; 6 | 7 | describe("Configuration", () => { 8 | const input = { 9 | outPath: "test.mp4", 10 | clips: [{ layers: [{ type: "title", text: "Hello World" }] }], 11 | }; 12 | 13 | test("requires outPath", () => { 14 | expect(() => new Configuration({ ...input, outPath: undefined } as BadData)).toThrow( 15 | "Please provide an output path", 16 | ); 17 | }); 18 | 19 | test("requires clip with at least one layer", () => { 20 | expect(() => new Configuration({ ...input, clips: undefined } as BadData)).toThrow( 21 | "Please provide at least 1 clip", 22 | ); 23 | expect(() => new Configuration({ ...input, clips: [] })).toThrow( 24 | "Please provide at least 1 clip", 25 | ); 26 | expect(() => new Configuration({ ...input, clips: [{}] } as BadData)).toThrow( 27 | /clip.layers must be an array/, 28 | ); 29 | }); 30 | 31 | test("layers must have a type", () => { 32 | expect( 33 | () => new Configuration({ ...input, clips: [{ layers: { title: "Nope" } }] } as BadData), 34 | ).toThrow('All "layers" must have a type'); 35 | }); 36 | 37 | test("allows single layer for backward compatibility", () => { 38 | const config = new Configuration({ 39 | ...input, 40 | clips: [{ layers: input.clips[0].layers[0] }], 41 | } as BadData); 42 | expect(config.clips[0].layers.length).toBe(1); 43 | }); 44 | 45 | test("customOutputArgs must be an array", () => { 46 | expect(() => new Configuration({ ...input, customOutputArgs: "test" } as BadData)).toThrow( 47 | "customOutputArgs must be an array of arguments", 48 | ); 49 | expect( 50 | new Configuration({ ...input, customOutputArgs: ["test"] } as BadData).customOutputArgs, 51 | ).toEqual(["test"]); 52 | }); 53 | 54 | describe("defaults", () => { 55 | test("merges defaults on layers", () => { 56 | const config = new Configuration({ 57 | ...input, 58 | clips: [ 59 | { layers: [{ type: "title", text: "Clip with duration" }], duration: 3 }, 60 | { 61 | layers: [{ type: "title", text: "Clip with transition" }], 62 | transition: { duration: 1, name: "random" }, 63 | }, 64 | ], 65 | defaults: { 66 | duration: 5, 67 | transition: { 68 | duration: 0.5, 69 | name: "fade", 70 | audioOutCurve: "qsin", 71 | }, 72 | }, 73 | }); 74 | 75 | expect(config.clips[0].duration).toBe(3); 76 | expect(config.clips[0].transition!).toEqual({ 77 | duration: 0.5, 78 | name: "fade", 79 | audioOutCurve: "qsin", 80 | audioInCurve: "tri", 81 | }); 82 | 83 | expect(config.clips[1].transition).toEqual({ 84 | duration: 1, 85 | name: "random", 86 | audioOutCurve: "qsin", 87 | audioInCurve: "tri", 88 | }); 89 | }); 90 | }); 91 | }); 92 | -------------------------------------------------------------------------------- /test/integration.test.ts: -------------------------------------------------------------------------------- 1 | import { execa } from "execa"; 2 | import { expect, test } from "vitest"; 3 | import { readDuration } from "../src/ffmpeg.js"; 4 | 5 | test( 6 | "works", 7 | async () => { 8 | await execa("npx", [ 9 | "tsx", 10 | "src/cli.ts", 11 | "--allow-remote-requests", 12 | "title:'My video'", 13 | "https://raw.githubusercontent.com/mifi/editly-assets/main/overlay.svg", 14 | "title:'THE END'", 15 | "--fast", 16 | "--audio-file-path", 17 | "https://github.com/mifi/editly-assets/raw/main/winxp.mp3", 18 | ]); 19 | 20 | expect(await readDuration("editly-out.mp4")).toBe(11); 21 | }, 22 | 60 * 1000, 23 | ); 24 | -------------------------------------------------------------------------------- /test/transforms/rawVideoToFrames.test.ts: -------------------------------------------------------------------------------- 1 | import { expect, test } from "vitest"; 2 | import { rawVideoToFrames } from "../../src/transforms/rawVideoToFrames.js"; 3 | 4 | test("handles chunk of correct size", () => { 5 | const transform = rawVideoToFrames({ width: 2, height: 2, channels: 4 }); 6 | const chunk = new Uint8Array(2 * 2 * 4).fill(128); 7 | 8 | transform.write(chunk); 9 | 10 | const frame: Uint8Array = transform.read(); 11 | expect(frame).toBeInstanceOf(Uint8Array); 12 | expect(frame).toEqual(chunk); 13 | }); 14 | 15 | test("partial frames", () => { 16 | const transform = rawVideoToFrames({ width: 2, height: 2, channels: 4 }); 17 | 18 | // Write half the bytes 19 | transform.write(new Uint8Array(8).fill(128)); 20 | 21 | expect(transform.read()).toBeNull(); 22 | 23 | // Write rest of the frame and two extra bytes 24 | transform.write(new Uint8Array(10).fill(128)); 25 | 26 | const frame: Uint8Array = transform.read(); 27 | expect(frame).toEqual(new Uint8Array(16).fill(128)); 28 | 29 | // Remaining bytes 30 | transform.write(new Uint8Array(14).fill(128)); 31 | expect(transform.read()).toBeInstanceOf(Uint8Array); 32 | }); 33 | -------------------------------------------------------------------------------- /test/transition.test.ts: -------------------------------------------------------------------------------- 1 | import { describe, expect, test } from "vitest"; 2 | import { Transition } from "../src/transition.js"; 3 | 4 | describe("constructor", () => { 5 | test("null", () => { 6 | const transition = new Transition(null); 7 | expect(transition.duration).toBe(0); 8 | }); 9 | 10 | test("random transition", () => { 11 | const transition = new Transition({ name: "random", duration: 1 }); 12 | expect(transition.name).toBeDefined(); 13 | expect(transition.name).not.toBe("random"); 14 | }); 15 | 16 | test("directional-left", () => { 17 | const transition = new Transition({ name: "directional-left" }); 18 | expect(transition.name).toBe("directional"); 19 | expect(transition.params).toEqual({ direction: [1, 0] }); 20 | }); 21 | 22 | test("raises error with unknown transition", () => { 23 | expect(() => new Transition({ name: "unknown", duration: 1 })).toThrow( 24 | "Transition not found: unknown", 25 | ); 26 | }); 27 | }); 28 | 29 | describe("easingFunction", () => { 30 | test("linear", () => { 31 | const transition = new Transition({ name: "fade", easing: "linear" }); 32 | expect(transition.easingFunction(0.5)).toBe(0.5); 33 | }); 34 | 35 | test("easeOutExpo", () => { 36 | const transition = new Transition({ name: "fade", easing: "easeOutExpo" }); 37 | expect(transition.easingFunction(0.2)).toBe(0.75); 38 | }); 39 | 40 | test("easeInOutCubic", () => { 41 | const transition = new Transition({ name: "fade", easing: "easeInOutCubic" }); 42 | expect(transition.easingFunction(0.2)).toBeCloseTo(0.032, 3); 43 | expect(transition.easingFunction(0.5)).toBe(0.5); 44 | expect(transition.easingFunction(0.8)).toBeCloseTo(0.968, 3); 45 | }); 46 | }); 47 | -------------------------------------------------------------------------------- /tsconfig.json: -------------------------------------------------------------------------------- 1 | { 2 | "extends": "@tsconfig/node-lts/tsconfig.json", 3 | "compilerOptions": { 4 | "noEmit": true, 5 | 6 | /* Linting */ 7 | "noUnusedLocals": true, 8 | "noUnusedParameters": true, 9 | "noFallthroughCasesInSwitch": true, 10 | "noUncheckedSideEffectImports": true 11 | } 12 | } 13 | --------------------------------------------------------------------------------