├── .gitignore ├── README.md ├── index.js ├── lib ├── handler.js ├── input.js ├── output.js ├── peaks.js └── upload.js ├── package-lock.json ├── package.json ├── serverless.yml └── test ├── events ├── badFormat.json ├── basic.json ├── noAuth.json ├── noCallback.json ├── peaks.json └── rails.json ├── files ├── cantina.mp3 ├── cantina.raw ├── cantina.wav ├── cantina.webm ├── cantinaPeaks100.json └── mini.wav ├── handler.test.js ├── helpers.js ├── input.test.js ├── output.test.js ├── peaks.test.js └── upload.test.js /.gitignore: -------------------------------------------------------------------------------- 1 | *.zip 2 | node_modules/ 3 | .serverless/ 4 | .serverless-offline/ 5 | archive/ 6 | .npmignore 7 | .prettierrc 8 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # serverless-ffmpeg 2 | 3 | Audio and video asset transcoding with a serverless function that wraps FFmpeg. Use as a reference project or clone to deploy quickly to AWS Lambda or another compatible serverless platform. 4 | 5 | # Usage 6 | 7 | The function simply transcodes media based on parameters in the invoking event. In response to the following request, the function will download a file, transcode it to two different audio formats, upload both of those results to separate URLs, and make a callback to my server: 8 | ```json 9 | { 10 | "input": { 11 | "download": { 12 | "url": "https://www.example.com/download", 13 | "headers": { 14 | "Authorization": "bearer download" 15 | } 16 | } 17 | }, 18 | "outputs": [ 19 | { 20 | "format": "webm", 21 | "extension": "weba", 22 | "audio": { 23 | "bitrate": 96 24 | }, 25 | "video": false, 26 | "options": ["-dash 1"], 27 | "upload": { 28 | "url": "https://www.example.com/upload/1", 29 | "headers": { 30 | "Authorization": "bearer upload" 31 | } 32 | } 33 | }, 34 | { 35 | "format": "mp3", 36 | "audio": { 37 | "bitrate": 128 38 | }, 39 | "upload": { 40 | "type": "basic", 41 | "url": "https://www.example.com/upload/2", 42 | "headers": { 43 | "Authorization": "bearer upload" 44 | } 45 | } 46 | } 47 | ], 48 | "callback": { 49 | "url": "https://www.example.com/info", 50 | "headers": { 51 | "Authorization": "bearer callback" 52 | }, 53 | "method": "POST" 54 | } 55 | } 56 | ``` 57 | 58 | Because the function uses [fluent-ffmpeg](https://github.com/fluent-ffmpeg/node-fluent-ffmpeg) as an API, it supports the majority of that library's named options. You can specify an array of command line options for the input or for any output, however, so you can use most of ffmpeg's settings if you are familiar with the CLI. Multiple inputs and streaming are not supported at the moment. Here are the full output options: 59 | ```json 60 | { 61 | "format": "webm", 62 | "extension": "weba", 63 | "duration": 60, 64 | "seek": 30, 65 | "metadata": { 66 | "platform": "soundcloud" 67 | }, 68 | "options": ["-dash 1", "..."], 69 | "audio": { 70 | "bitrate": 96, 71 | "codec": "libopus", 72 | "channels": 2, 73 | "frequency": 44100, 74 | "quality": 0.9, 75 | "filters": ["volume=0.5", "..."] 76 | }, 77 | "video": { 78 | "fps": 60, 79 | "codec": "libvpx", 80 | "bitrate": 1000, 81 | "constantBitrate": true, 82 | "filters": ["fade=in:0:30", "..."], 83 | "frames": 240, 84 | "size": "640x480", 85 | "aspect": "4:3", 86 | "autopad": true, 87 | "keepDAR": true 88 | } 89 | } 90 | ``` 91 | 92 | Keep in mind that some parameters conflict with one another (the above would certainly fail). The function passes the above to fluent-ffmpeg which in turn passes it on to ffmpeg itself. If ffmpeg can't process the command, everything fails. In addition to transcoding, there are two plugins I added - one for generating waveform peaks data and another for performing direct uploads to rails. You can use them as follows: 93 | 94 | ```json 95 | { 96 | "peaks": { 97 | "count": 600, 98 | "quality": 0.8 99 | }, 100 | "outputs": [ 101 | { 102 | "upload":{ 103 | "type":"rails", 104 | } 105 | }, 106 | ] 107 | } 108 | ``` 109 | If the conversion and uploads are successful, the callback will include ffprobe generated metadata about the input file, peaks if peaks were requested, and blob ids if the uploads were of type "rails." If the function fails at any point - usually due to an ill-formed ffmpeg command or 400 level HTTP responses - it will try to make a callback with a relevant status code and status message. 110 | 111 | # Core Dependencies 112 | ### Asset Transcoding 113 | - [FFmpeg](https://ffmpeg.org/) 114 | - [John Van Sickle's FFmpeg Static Builds](https://johnvansickle.com/ffmpeg/) 115 | - [fluent-ffmpeg](https://github.com/fluent-ffmpeg/node-fluent-ffmpeg) 116 | 117 | ### Adapted Plugins 118 | - [ffmpeg-peaks](https://github.com/t4nz/ffmpeg-peaks) by [Gaetano Fiorello](https://github.com/t4nz) - for generating waveform data 119 | - [rails's browser client](https://github.com/rails/rails/tree/master/activestorage/app/javascript/activestorage) - direct uploads with rails 120 | 121 | 122 | ### Testing 123 | - [jest](https://github.com/facebook/jest) as framework 124 | - [nock](https://github.com/nock/nock) for mocking network calls in unit tests 125 | - [serverless-offline](https://github.com/dherault/serverless-offline) for local integration testing 126 | 127 | # Setup 128 | There are a couple steps you have to take before being able to deploy the function locally or to a cloud services provider. 129 | 130 | Most importantly, you have to get a static build of the **ffmpeg** and **ffprobe** binaries and make it available to your function. John Van Sickle maintains [static builds](https://johnvansickle.com/ffmpeg/) of Ffmpeg - you can download a zip which contains the two binaries (and support his great work by becoming a patron of [his Patreon](https://www.patreon.com/johnvansickle)). 131 | 132 | The binary is too big for a deployment package for most cloud providers, but at least on AWS you can make it available as a layer. After creating the layer, add it to your function in the AWS console or in your Serverless Framework, SAM, or CloudFormation template. 133 | 134 | For local integration testing I use [serverless-offline](https://github.com/dherault/serverless-offline). You don't need to be using Serverless Framework or have even have account to use the offline plugin, but you do need your cloud provider credentials set up locally so serverless-offline can download your layers. Using layers with serverless-offline also requires Docker running on your machine. 135 | 136 | # Notes on Performance 137 | 138 | ### Memory Size 139 | On AWS Lambda, I found that the optimal memory size for transcoding _audio_ files was somewhere between 1536MB and 2048MB and _video_ was somewhere between 6144MB and 8192MB. Gains past that point are marginal and are costing you more - whereas up until those memory sizes, scaling up is cost effective because it reduces billled duration nearly proportionally. 140 | 141 | Unless you're transcoding very large files, the problem is not actually memory availability - FFmpeg is quite memory efficient and the function doesn't use much memory in comparison to the actual files. Increasing memory size is effective, however, because Lambda CPU power is allocated proportionally. 142 | 143 | ### Concurrency 144 | Running multiple transcodings in parallel (as opposed to one transcoding with many outputs) was found to not make much of a difference in billed duration. Separating conversions across different functions is significantly faster, although the cumulative duration and cost will be higher than bundling them together. 145 | 146 | ### Duration Rule of Thumb 147 | 148 | How long does serverless media conversion take? With so many factors, it's very difficult to predict. As a very loose rule of thumb, to convert one minute of one compressed format to another takes about 4 seconds for audio-only and 30 seconds for video with audio. Multiple outputs, higher sample rates or frame rates, and higher bit depths or sizes will slow that down. 149 | -------------------------------------------------------------------------------- /index.js: -------------------------------------------------------------------------------- 1 | const handler = require("./lib/handler"); 2 | 3 | exports.handler = handler; 4 | -------------------------------------------------------------------------------- /lib/handler.js: -------------------------------------------------------------------------------- 1 | const ffmpeg = require("fluent-ffmpeg"); 2 | const fetch = require("node-fetch"); 3 | 4 | const { download, addInputOptions, probe } = require("./input"); 5 | const { prepForPeaks, getPeaks } = require("./peaks"); 6 | const { addOutputs } = require("./output.js"); 7 | const { uploadAll } = require("./upload"); 8 | 9 | if (process.env.BINARY_DIR) { 10 | ffmpeg.setFfmpegPath(process.env.BINARY_DIR + "ffmpeg"); 11 | ffmpeg.setFfprobePath(process.env.BINARY_DIR + "ffprobe"); 12 | } 13 | 14 | function log(message) { 15 | if (process.env.FFMPEG_LOG == 1) console.log(message); 16 | } 17 | 18 | function transcoding(command) { 19 | return new Promise((resolve, reject) => { 20 | command 21 | .on("start", (commandLine) => log(commandLine)) 22 | .on("codecData", (data) => log(data)) 23 | .on("progress", (progress) => log(`${Math.floor(progress.percent * 10) / 10}% complete`)) 24 | .on("error", (err) => reject(err)) 25 | .on("end", () => resolve()) 26 | .run(); 27 | }); 28 | } 29 | 30 | function callback(event) { 31 | if (!event.callback || !event.callback.url) 32 | throw new Error("no callback specified"); 33 | 34 | const { url, headers, method } = event.callback; 35 | return fetch(url, { 36 | method: method || "POST", 37 | headers: { 38 | "Content-Type": "application/json", 39 | ...(headers || {}), 40 | }, 41 | body: JSON.stringify(event), 42 | }).then((res) => log(`callback status ${res.status}: ${res.statusText}`)); 43 | } 44 | 45 | async function handler(event) { 46 | try { 47 | // queue up input 48 | log("begin download"); 49 | const localInput = await download(event.input); 50 | log("download complete"); 51 | const command = ffmpeg(localInput); 52 | addInputOptions(command, event.input); 53 | 54 | // if peaks are requested, add additional output for analysis 55 | if (event.peaks) prepForPeaks(event); 56 | 57 | // add outputs, run command 58 | addOutputs(command, event.outputs); 59 | await transcoding(command); 60 | log("transcoding complete - begin uploads and analysis"); 61 | 62 | // set up all our actions, probing, uploading and finding peaks 63 | const actions = []; 64 | actions.push(probe(localInput, event.input)); 65 | actions.push(...uploadAll(event.outputs)); 66 | actions.push(event.peaks ? getPeaks(event) : Promise.resolve()); 67 | 68 | // once everthing is processed make callback 69 | // TODO: use allSettled and manage errors 70 | await Promise.all(actions); 71 | log("uploads and analysis complete"); 72 | event.status = 200; 73 | return callback(event); 74 | } catch (e) { 75 | event.statusText = e.message; 76 | event.status = e.status || 422; 77 | return callback(event); 78 | } 79 | } 80 | 81 | module.exports = handler; 82 | -------------------------------------------------------------------------------- /lib/input.js: -------------------------------------------------------------------------------- 1 | const fetch = require("node-fetch"); 2 | const tmp = require("tmp"); 3 | const fs = require("fs"); 4 | const ffmpeg = require("fluent-ffmpeg"); 5 | 6 | // returns a promise for a readable stream download 7 | function download(input) { 8 | if (!input || !input.download || !input.download.url) { 9 | throw new Error("no downloadable input specified"); 10 | } 11 | 12 | const localInput = tmp.tmpNameSync(); 13 | return fetch(input.download.url, { 14 | method: "GET", 15 | headers: input.download.headers || {}, 16 | }) 17 | .then((res) => { 18 | if (res.ok) return res.body; 19 | 20 | const error = new Error(`resource unavailable: ${res.statusText}`); 21 | error.status = res.status; 22 | throw error; 23 | }) 24 | .then( 25 | (body) => 26 | new Promise((resolve, reject) => { 27 | body.pipe(fs.createWriteStream(localInput)); 28 | body.on("error", (err) => reject(err)); 29 | body.on("end", () => { 30 | input.local = localInput; 31 | resolve(localInput); 32 | }); 33 | }) 34 | ); 35 | } 36 | 37 | function addInputOptions(command, input) { 38 | if (input.format) command.inputFormat(input.format); 39 | if (input.seek) command.seekInput(input.seek); 40 | if (input.fps) command.inputFPS(input.fps); 41 | if (input.native) command.native(); 42 | if (input.options) command.inputOptions(input.options); 43 | } 44 | 45 | function probe(file, input) { 46 | return new Promise((resolve, reject) => { 47 | ffmpeg.ffprobe(file, (err, metadata) => { 48 | if (err) { 49 | reject(err); 50 | } else { 51 | if (input) input.metadata = metadata; 52 | resolve(metadata); 53 | } 54 | }); 55 | }); 56 | } 57 | 58 | module.exports = { download, addInputOptions, probe }; 59 | -------------------------------------------------------------------------------- /lib/output.js: -------------------------------------------------------------------------------- 1 | const tmp = require("tmp"); 2 | 3 | // returns the temporary filenames that will be used to save the outputs 4 | function addOutputs(command, outputs) { 5 | if (!outputs || !outputs.length || outputs.length < 1) 6 | throw new Error("output(s) must be specified"); 7 | return outputs.map((output) => addOutput(command, output)); 8 | } 9 | 10 | // returns the temporary filename that will be used to save the output 11 | function addOutput(command, output) { 12 | if (!output.format) throw new Error("output format must be specified"); 13 | if (output.video === false && output.audio === false) 14 | throw new Error(`no audio or video indicated for ${output.format}`); 15 | 16 | output.local = tmp.tmpNameSync(); 17 | command.output(output.local).format(output.format); 18 | 19 | addGeneralOptions(command, output); 20 | addAudioOptions(command, output.audio); 21 | addVideoOptions(command, output.video); 22 | 23 | return output.local; 24 | } 25 | 26 | function addGeneralOptions(command, output) { 27 | if (output.duration) command.duration(output.duration); 28 | if (output.seek) command.seek(output.seek); 29 | if (output.options) command.outputOptions(output.options); 30 | if (output.metadata) { 31 | for (const [key, value] of Object.entries(output.metadata)) { 32 | command.outputOptions(`-metadata ${key}="${value}"`); 33 | } 34 | } 35 | } 36 | 37 | function addAudioOptions(command, options) { 38 | if (options === false) { 39 | command.noAudio(); 40 | } else if (options) { 41 | if (options.codec) command.audioCodec(options.codec); 42 | if (options.bitrate) command.audioBitrate(options.bitrate); 43 | if (options.channels) command.audioChannels(options.channels); 44 | if (options.frequency) command.audioFrequency(options.frequency); 45 | if (options.quality) command.audioQuality(options.quality); 46 | if (options.filters) command.audioFilters(options.filters); 47 | } 48 | } 49 | 50 | function addVideoOptions(command, options) { 51 | if (options === false) { 52 | command.noVideo(); 53 | } else if (options) { 54 | if (options.codec) command.videoCodec(options.codec); 55 | if (options.bitrate) command.videoBitrate(options.bitrate, options.constantBitrate); 56 | if (options.filters) command.videoFilters(options.filters); 57 | if (options.fps) command.fps(options.fps); 58 | if (options.frames) command.frames(options.frames); 59 | if (options.size) command.size(options.size); 60 | if (options.aspect) command.aspect(options.aspect); 61 | if (options.autopad) { 62 | typeof options.autopad === "string" 63 | ? command.autopad(options.autopad) 64 | : command.autopad(); 65 | } 66 | if (options.keepDAR) command.keepDAR(); 67 | } 68 | } 69 | 70 | module.exports = { addOutputs, addOutput }; 71 | -------------------------------------------------------------------------------- /lib/peaks.js: -------------------------------------------------------------------------------- 1 | const fs = require("fs"); 2 | 3 | /** 4 | * Analyzes subranges of raw audio file to find and store peaks 5 | * @param {length} How many subranges to break the waveform into. 6 | * @param {totalSamples} How many samples there are in the whole audio. 7 | * For an AudioBuffer use AudioBuffer.length. 8 | * @param {splitChannels} Whether to return peak data split or merged 9 | * @param {sampleStep} Step between samples compared - 1 analyzes every sample 10 | */ 11 | class PeakStore { 12 | constructor(splitChannels, length, step, totalSamples) { 13 | this.length = length; 14 | this.totalSamples = totalSamples; 15 | this.splitChannels = splitChannels; 16 | this.sampleStep = step; 17 | this.mergedPeaks = []; 18 | } 19 | 20 | /** 21 | * Processes batches of samples and adds new peaks to the store 22 | * @param {buffers} buffers[i] is an array of floats containing the samples of channel i. 23 | */ 24 | update(buffers) { 25 | const sampleSize = this.totalSamples / this.length; 26 | const channels = buffers.length; 27 | 28 | if (this.lastMax === undefined) { 29 | this.lastMax = Array(channels).fill(0); 30 | this.lastMin = Array(channels).fill(0); 31 | this.indexI = Array(channels).fill(0); 32 | this.indexJ = Array(channels).fill(0); 33 | this.indexJJOverflow = Array(channels).fill(0); 34 | this.splitPeaks = Array(channels) 35 | .fill(null) 36 | .map((i) => []); 37 | } 38 | 39 | for (let c = 0; c < channels; c++) { 40 | let peaks = this.splitPeaks[c]; 41 | let chan = buffers[c]; 42 | 43 | let i; 44 | for (i = this.indexI[c]; i < this.length; i++) { 45 | let start = Math.max(~~(i * sampleSize), this.indexJ[c]); 46 | let end = ~~((i + 1) * sampleSize); 47 | let min = this.lastMin[c]; 48 | let max = this.lastMax[c]; 49 | 50 | let broken = false; 51 | let jj; 52 | for (let j = start; j < end; j += this.sampleStep) { 53 | jj = j - this.indexJ[c] + this.indexJJOverflow[c]; 54 | 55 | if (jj > chan.length - 1) { 56 | this.indexI[c] = i; 57 | this.indexJJOverflow[c] = jj - (chan.length - 1) - 1; 58 | this.indexJ[c] = j; 59 | this.lastMax[c] = max; 60 | this.lastMin[c] = min; 61 | broken = true; 62 | break; 63 | } 64 | 65 | let value = chan[jj]; 66 | 67 | if (value > max) { 68 | max = value; 69 | } 70 | 71 | if (value < min) { 72 | min = value; 73 | } 74 | } 75 | 76 | if (broken) break; 77 | else { 78 | this.lastMax[c] = 0; 79 | this.lastMin[c] = 0; 80 | } 81 | 82 | peaks[2 * i] = min; 83 | peaks[2 * i + 1] = max; 84 | 85 | if (c == 0 || min < this.mergedPeaks[2 * i]) { 86 | this.mergedPeaks[2 * i] = min; 87 | } 88 | 89 | if (c == 0 || max > this.mergedPeaks[2 * i + 1]) { 90 | this.mergedPeaks[2 * i + 1] = max; 91 | } 92 | } 93 | 94 | this.indexI[c] = i; // We finished for channel c. For the next call start from i = this.length so we do nothing. 95 | } 96 | } 97 | 98 | // returns split (Array of Arrays) or merged (Array) peaks 99 | get() { 100 | return this.splitChannels ? this.splitPeaks : this.mergedPeaks; 101 | } 102 | } 103 | 104 | class PeakFinder { 105 | constructor(length, numChannels = 1, precision = 1) { 106 | this.oddByte = null; 107 | this.sc = 0; 108 | 109 | this.numChannels = numChannels; 110 | this.length = length; 111 | this.precision = precision; 112 | } 113 | 114 | /** 115 | * Extracts peaks from an audio file. 116 | * Writes a JSON file if an output path was specified. 117 | * @param {String} rawFilePath - Source raw audio file path. 118 | * @param {String|Function} outputFilePath - Optional output json file path. 119 | */ 120 | async getPeaks(rawFilePath, outputFilePath = null) { 121 | return this.extractPeaks(rawFilePath) 122 | .catch((err) => { 123 | throw new Error(`extracting peaks: ${err.message}`); 124 | }) 125 | .then((peaks) => { 126 | if (outputFilePath) { 127 | fs.writeFileSync(outputFilePath, JSON.stringify(peaks)); 128 | } 129 | return peaks; 130 | }) 131 | .catch((err) => { 132 | throw new Error(`writing to file: ${err.message}`); 133 | }); 134 | } 135 | 136 | async extractPeaks(rawFilePath) { 137 | return new Promise((resolve, reject) => { 138 | const stats = fs.statSync(rawFilePath); 139 | const totalSamples = ~~(stats.size / 2 / this.numChannels); 140 | this.peakStore = new PeakStore( 141 | this.numChannels >= 2, 142 | this.length, 143 | this.precision, 144 | totalSamples 145 | ); 146 | 147 | const readable = fs.createReadStream(rawFilePath); 148 | readable.on("data", this.onChunkRead.bind(this)); 149 | readable.on("error", (err) => reject(err)); 150 | readable.on("end", () => resolve(this.peakStore.get())); 151 | }); 152 | } 153 | 154 | onChunkRead(chunk) { 155 | let i = 0; 156 | let value; 157 | let samples = Array(this.numChannels) 158 | .fill(null) 159 | .map((i) => []); 160 | 161 | if (this.oddByte !== null) { 162 | value = (chunk.readInt8(i++, true) << 8) | this.oddByte; 163 | samples[this.sc].push(value); 164 | this.sc = (this.sc + 1) % this.numChannels; 165 | } 166 | 167 | for (; i + 1 < chunk.length; i += 2) { 168 | value = chunk.readInt16LE(i, true); 169 | samples[this.sc].push(value); 170 | this.sc = (this.sc + 1) % this.numChannels; 171 | } 172 | this.oddByte = i < chunk.length ? chunk.readUInt8(i, true) : null; 173 | this.peakStore.update(samples); 174 | } 175 | } 176 | 177 | function peaksIntermediary(quality = 0.5) { 178 | quality = Math.min(Math.max(quality, 0.1), 1); 179 | return { 180 | format: "s16le", 181 | options: `-ar ${Math.round(44100 * quality ** 2)}`, 182 | audio: { 183 | options: "-ac 1", 184 | }, 185 | video: false 186 | }; 187 | } 188 | 189 | function prepForPeaks(event) { 190 | const intermediary = peaksIntermediary(event.peaks.quality); 191 | event.outputs.push(intermediary); 192 | event.peaks.intermediary = intermediary; 193 | return event; 194 | } 195 | 196 | function getPeaks(event) { 197 | if (!event.peaks.intermediary) 198 | throw new Error("server did not prepare file for finding peaks"); 199 | 200 | const pcm = event.peaks.intermediary.local; 201 | const finder = new PeakFinder(event.peaks.count || 600); 202 | return new Promise((resolve, reject) => { 203 | finder 204 | .getPeaks(pcm) 205 | .then((peaks) => { 206 | event.peaks = peaks; 207 | resolve(peaks); 208 | }) 209 | .catch((err) => reject(err)); 210 | }); 211 | } 212 | 213 | module.exports = { prepForPeaks, getPeaks }; 214 | -------------------------------------------------------------------------------- /lib/upload.js: -------------------------------------------------------------------------------- 1 | const fetch = require("node-fetch"); 2 | const fs = require("fs"); 3 | const mime = require("mime-types"); 4 | const path = require("path"); 5 | const md5file = require("md5-file"); 6 | 7 | function uploadAll(outputs) { 8 | const uploads = []; 9 | outputs.forEach((output) => { 10 | if (output.upload) uploads.push(upload(output)); 11 | }); 12 | return uploads; 13 | } 14 | 15 | function upload(output) { 16 | output.upload.byteSize = fs.statSync(output.local).size; 17 | output.upload.contentType = contentType(output); 18 | 19 | if (output.upload.type === "rails") { 20 | return railsUpload(output).then(({ signedId, upload }) => { 21 | output.id = signedId; 22 | return upload; 23 | }); 24 | } else { 25 | return basicUpload(output); 26 | } 27 | } 28 | 29 | function basicUpload({ local, upload }) { 30 | return fetch(upload.url, { 31 | method: "PUT", 32 | body: fs.createReadStream(local), 33 | headers: { 34 | "Content-Length": upload.byteSize, 35 | "Content-Type": upload.contentType, 36 | ...upload.headers, 37 | }, 38 | }).then((res) => { 39 | if (res.ok) return res; 40 | 41 | const error = new Error( 42 | `failed to upload ${upload.contentType}: ${res.statusText}` 43 | ); 44 | error.status = res.status; 45 | throw error; 46 | }); 47 | } 48 | 49 | function contentType(output) { 50 | return ( 51 | mime.lookup(output.extension) || 52 | mime.lookup(output.format) || 53 | "application/octet-stream" 54 | ); 55 | } 56 | 57 | const checksumBase64 = (filename) => 58 | Buffer.from(md5file.sync(filename), "hex").toString("base64"); 59 | 60 | async function railsUpload(output) { 61 | const blob = await railsBlob(output); 62 | output.upload = { 63 | ...output.upload, 64 | url: blob.direct_upload.url, 65 | headers: blob.direct_upload.headers, 66 | }; 67 | 68 | return { signedId: blob.signed_id, upload: basicUpload(output) }; 69 | } 70 | 71 | function railsBlob({ local, upload }) { 72 | return fetch(upload.url, { 73 | method: "POST", 74 | headers: { 75 | Accept: "application/json", 76 | "Content-Type": "application/json", 77 | ...upload.headers, 78 | }, 79 | body: JSON.stringify({ 80 | blob: { 81 | checksum: checksumBase64(local), 82 | byte_size: upload.byteSize, 83 | content_type: upload.contentType, 84 | filename: upload.name || path.basename(local), 85 | }, 86 | }), 87 | }).then((response) => { 88 | if (response.ok) return response.json(); 89 | 90 | const error = new Error( 91 | `failed to post blob to rails: ${response.statusText}` 92 | ); 93 | error.status = response.status; 94 | throw error; 95 | }); 96 | } 97 | 98 | module.exports = { uploadAll, upload, basicUpload, railsUpload, railsBlob }; 99 | -------------------------------------------------------------------------------- /package.json: -------------------------------------------------------------------------------- 1 | { 2 | "name": "ffmpeg-microservice", 3 | "version": "0.1.2", 4 | "description": "microservice for transcoding audio using ffmpeg and fluent-ffmpeg-node", 5 | "repository": "https://github.com/adghayes/ffmpeg-lambda", 6 | "main": "index.js", 7 | "scripts": { 8 | "pack": "npm-pack-zip", 9 | "test": "jest", 10 | "update-lambda": "npm run pack && aws lambda update-function-code --function-name $npm_package_name --zip-file fileb://$npm_package_name.zip" 11 | }, 12 | "keywords": [], 13 | "author": "Andrew Hayes", 14 | "license": "ISC", 15 | "dependencies": { 16 | "fluent-ffmpeg": "^2.1.2", 17 | "fs": "0.0.1-security", 18 | "md5-file": "^5.0.0", 19 | "mime-types": "^2.1.27", 20 | "node-fetch": "^2.6.1", 21 | "tmp": "^0.2.1" 22 | }, 23 | "devDependencies": { 24 | "app-root-path": "^3.0.0", 25 | "aws-sdk": "^2.804.0", 26 | "hasbin": "^1.2.3", 27 | "jest": "^26.6.3", 28 | "lodash.clonedeep": "^4.5.0", 29 | "nock": "^13.0.5", 30 | "npm-pack-zip": "^1.2.7", 31 | "serverless": "^2.16.1", 32 | "serverless-offline": "^6.8.0" 33 | }, 34 | "bundledDependencies": [ 35 | "fluent-ffmpeg", 36 | "node-fetch", 37 | "fs", 38 | "tmp", 39 | "mime-types", 40 | "md5-file" 41 | ] 42 | } 43 | -------------------------------------------------------------------------------- /serverless.yml: -------------------------------------------------------------------------------- 1 | org: adghayes 2 | app: juke 3 | service: ffmpeg 4 | 5 | frameworkVersion: "2" 6 | 7 | provider: 8 | name: aws 9 | runtime: nodejs12.x 10 | profile: default 11 | region: us-east-1 12 | 13 | package: 14 | exclude: 15 | - .serverless-offline/** 16 | - test/** 17 | - archive/** 18 | 19 | functions: 20 | transcode: 21 | runtime: nodejs12.x 22 | handler: index.handler 23 | layers: 24 | - arn:aws:lambda:us-east-1:086163355854:layer:ffmpeg:1 25 | environment: 26 | BINARY_DIR: /opt/ffmpeg-git-20201128-amd64-static/ 27 | FFMPEG_LOG: 1 28 | memorySize: 2048 29 | timeout: 300 30 | 31 | plugins: 32 | - serverless-offline 33 | 34 | custom: 35 | serverless-offline: 36 | useDocker: true 37 | -------------------------------------------------------------------------------- /test/events/badFormat.json: -------------------------------------------------------------------------------- 1 | { 2 | "input": { 3 | "download": { 4 | "url": "https://www.example.com/download/badFormat", 5 | "headers": { 6 | "Authorization": "bearer download" 7 | } 8 | } 9 | }, 10 | "outputs": [ 11 | { 12 | "format": "copacetic", 13 | "audio": { 14 | "bitrate": 96 15 | }, 16 | "metadata": { 17 | "platform": "platform" 18 | }, 19 | "options": ["-dash 1"], 20 | "upload": { 21 | "type": "basic", 22 | "url": "https://www.example.com/upload/badFormat-0", 23 | "headers": { 24 | "Authorization": "bearer upload" 25 | } 26 | } 27 | }, 28 | { 29 | "format": "mp3", 30 | "audio": { 31 | "bitrate": 64 32 | }, 33 | "metadata": { 34 | "platform": "platform" 35 | }, 36 | "upload": { 37 | "type": "basic", 38 | "url": "https://www.example.com/upload/badFormat-1", 39 | "headers": { 40 | "Authorization": "bearer upload" 41 | } 42 | } 43 | } 44 | ], 45 | "callback": { 46 | "url": "https://www.example.com/info/badFormat", 47 | "headers": { 48 | "Authorization": "bearer callback" 49 | }, 50 | "method": "POST" 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /test/events/basic.json: -------------------------------------------------------------------------------- 1 | { 2 | "input": { 3 | "download": { 4 | "url": "https://www.example.com/download/basic", 5 | "headers": { 6 | "Authorization": "bearer download" 7 | } 8 | } 9 | }, 10 | "outputs": [ 11 | { 12 | "format": "webm", 13 | "audio": { 14 | "bitrate": 96 15 | }, 16 | "metadata": { 17 | "platform": "platform" 18 | }, 19 | "options": ["-dash 1"], 20 | "upload": { 21 | "type": "basic", 22 | "url": "https://www.example.com/upload/basic-0", 23 | "headers": { 24 | "Authorization": "bearer upload" 25 | } 26 | } 27 | }, 28 | { 29 | "format": "mp3", 30 | "audio": { 31 | "bitrate": 64 32 | }, 33 | "metadata": { 34 | "platform": "platform" 35 | }, 36 | "upload": { 37 | "type": "basic", 38 | "url": "https://www.example.com/upload/basic-1", 39 | "headers": { 40 | "Authorization": "bearer upload" 41 | } 42 | } 43 | } 44 | ], 45 | "callback": { 46 | "url": "https://www.example.com/info/basic", 47 | "headers": { 48 | "Authorization": "bearer callback" 49 | }, 50 | "method": "POST" 51 | } 52 | } 53 | -------------------------------------------------------------------------------- /test/events/noAuth.json: -------------------------------------------------------------------------------- 1 | { 2 | "input": { 3 | "download": { 4 | "url": "https://www.example.com/download/noAuth" 5 | } 6 | }, 7 | "outputs": [ 8 | { 9 | "format": "webm", 10 | "audio": { 11 | "bitrate": 96 12 | }, 13 | "metadata": { 14 | "platform": "platform" 15 | }, 16 | "options": ["-dash 1"], 17 | "upload": { 18 | "type": "basic", 19 | "url": "https://www.example.com/upload/noAuth-0", 20 | "headers": { 21 | "Authorization": "bearer upload" 22 | } 23 | } 24 | }, 25 | { 26 | "format": "mp3", 27 | "audio": { 28 | "bitrate": 64 29 | }, 30 | "metadata": { 31 | "platform": "platform" 32 | }, 33 | "upload": { 34 | "type": "basic", 35 | "url": "https://www.example.com/upload/noAuth-1", 36 | "headers": { 37 | "Authorization": "bearer upload" 38 | } 39 | } 40 | } 41 | ], 42 | "callback": { 43 | "url": "https://www.example.com/info/noAuth", 44 | "headers": { 45 | "Authorization": "bearer callback" 46 | }, 47 | "method": "POST" 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /test/events/noCallback.json: -------------------------------------------------------------------------------- 1 | { 2 | "input": {}, 3 | "outputs": [], 4 | "callback": { 5 | "headers": { 6 | "Authorization": "bearer callback" 7 | }, 8 | "method": "POST" 9 | } 10 | } 11 | -------------------------------------------------------------------------------- /test/events/peaks.json: -------------------------------------------------------------------------------- 1 | { 2 | "peaks": { 3 | "count": 600, 4 | "quality": 0.8 5 | }, 6 | "input": { 7 | "download": { 8 | "url": "https://www.example.com/download/peaks", 9 | "headers": { 10 | "Authorization": "bearer download" 11 | } 12 | } 13 | }, 14 | "outputs": [ 15 | { 16 | "format": "webm", 17 | "audio": { 18 | "bitrate": 96 19 | }, 20 | "metadata": { 21 | "platform": "platform" 22 | }, 23 | "options": ["-dash 1"], 24 | "upload": { 25 | "type": "basic", 26 | "url": "https://www.example.com/upload/peaks", 27 | "headers": { 28 | "Authorization": "bearer upload" 29 | } 30 | } 31 | } 32 | ], 33 | "callback": { 34 | "url": "https://www.example.com/info/peaks", 35 | "headers": { 36 | "Authorization": "bearer callback" 37 | }, 38 | "method": "POST" 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /test/events/rails.json: -------------------------------------------------------------------------------- 1 | { 2 | "input": { 3 | "download": { 4 | "url": "https://www.example.com/download/rails", 5 | "headers": { 6 | "Authorization": "bearer download" 7 | } 8 | } 9 | }, 10 | "outputs": [ 11 | { 12 | "format": "webm", 13 | "audio": { 14 | "bitrate": 96 15 | }, 16 | "metadata": { 17 | "platform": "platform" 18 | }, 19 | "options": ["-dash 1"], 20 | "upload": { 21 | "type": "rails", 22 | "url": "https://www.example.com/rails/rails-0", 23 | "headers": { 24 | "Authorization": "bearer rails" 25 | }, 26 | "name": "webm_transcoding" 27 | } 28 | }, 29 | { 30 | "format": "mp3", 31 | "audio": { 32 | "bitrate": 64 33 | }, 34 | "metadata": { 35 | "platform": "platform" 36 | }, 37 | "upload": { 38 | "type": "rails", 39 | "url": "https://www.example.com/rails/rails-1", 40 | "headers": { 41 | "Authorization": "bearer rails" 42 | }, 43 | "name": "mp3_transcoding" 44 | } 45 | } 46 | ], 47 | "callback": { 48 | "url": "https://www.example.com/info/rails", 49 | "headers": { 50 | "Authorization": "bearer callback" 51 | }, 52 | "method": "POST" 53 | } 54 | } 55 | -------------------------------------------------------------------------------- /test/files/cantina.mp3: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/adghayes/serverless-ffmpeg/7fecb4a77bd73d12957af1dd3dc5a6ed2a2bfc70/test/files/cantina.mp3 -------------------------------------------------------------------------------- /test/files/cantina.raw: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/adghayes/serverless-ffmpeg/7fecb4a77bd73d12957af1dd3dc5a6ed2a2bfc70/test/files/cantina.raw -------------------------------------------------------------------------------- /test/files/cantina.wav: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/adghayes/serverless-ffmpeg/7fecb4a77bd73d12957af1dd3dc5a6ed2a2bfc70/test/files/cantina.wav -------------------------------------------------------------------------------- /test/files/cantina.webm: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/adghayes/serverless-ffmpeg/7fecb4a77bd73d12957af1dd3dc5a6ed2a2bfc70/test/files/cantina.webm -------------------------------------------------------------------------------- /test/files/cantinaPeaks100.json: -------------------------------------------------------------------------------- 1 | { 2 | "version": 2, 3 | "channels": 1, 4 | "sample_rate": 22050, 5 | "samples_per_pixel": 220, 6 | "bits": 16, 7 | "length": 301, 8 | "data": [ 9 | -27, 10 | 7, 11 | -19, 12 | 11, 13 | -18, 14 | 14, 15 | -20, 16 | 37, 17 | -11, 18 | 38, 19 | -13, 20 | 18, 21 | -21, 22 | 27, 23 | 11, 24 | 28, 25 | -15, 26 | 46, 27 | -23, 28 | -5, 29 | -45, 30 | -7, 31 | -42, 32 | 1, 33 | -14, 34 | -1, 35 | -10, 36 | 20, 37 | -12, 38 | 36, 39 | 33, 40 | 61, 41 | 1, 42 | 46, 43 | 0, 44 | 41, 45 | -9, 46 | 30, 47 | -8, 48 | 19, 49 | -5, 50 | 18, 51 | -14, 52 | 5, 53 | -13, 54 | 0, 55 | -13, 56 | 2, 57 | -12, 58 | 4, 59 | -9, 60 | 3, 61 | -9, 62 | 4, 63 | -3484, 64 | 2755, 65 | -3443, 66 | 3088, 67 | -1839, 68 | 2085, 69 | -2252, 70 | 1841, 71 | -1889, 72 | 1493, 73 | -1351, 74 | 1105, 75 | -1218, 76 | 1845, 77 | -2231, 78 | 1860, 79 | -1270, 80 | 1419, 81 | -3020, 82 | 3094, 83 | -2541, 84 | 2399, 85 | -1993, 86 | 2306, 87 | -2098, 88 | 1820, 89 | -1253, 90 | 1224, 91 | -1242, 92 | 1883, 93 | -1538, 94 | 1559, 95 | -1136, 96 | 1795, 97 | -1241, 98 | 1045, 99 | -906, 100 | 770, 101 | -820, 102 | 774, 103 | -844, 104 | 830, 105 | -669, 106 | 980, 107 | -834, 108 | 807, 109 | -779, 110 | 659, 111 | -705, 112 | 767, 113 | -671, 114 | 519, 115 | -402, 116 | 579, 117 | -446, 118 | 579, 119 | -566, 120 | 575, 121 | -497, 122 | 473, 123 | -433, 124 | 499, 125 | -743, 126 | 617, 127 | -2275, 128 | 2417, 129 | -3898, 130 | 3643, 131 | -4192, 132 | 5628, 133 | -7937, 134 | 6667, 135 | -8587, 136 | 6908, 137 | -4410, 138 | 9002, 139 | -6959, 140 | 8291, 141 | -8461, 142 | 5655, 143 | -5280, 144 | 5082, 145 | -5177, 146 | 4561, 147 | -5365, 148 | 3531, 149 | -4286, 150 | 3942, 151 | -2646, 152 | 3334, 153 | -4190, 154 | 4118, 155 | -4485, 156 | 2629, 157 | -4180, 158 | 3533, 159 | -3813, 160 | 3200, 161 | -3138, 162 | 3371, 163 | -3188, 164 | 2432, 165 | -2483, 166 | 3019, 167 | -2430, 168 | 2095, 169 | -1752, 170 | 2108, 171 | -1824, 172 | 1995, 173 | -1710, 174 | 2525, 175 | -3198, 176 | 2836, 177 | -5797, 178 | 6470, 179 | -6157, 180 | 5626, 181 | -5040, 182 | 4622, 183 | -5300, 184 | 3906, 185 | -4745, 186 | 5352, 187 | -4117, 188 | 6144, 189 | -3035, 190 | 4584, 191 | -4287, 192 | 3424, 193 | -3832, 194 | 2877, 195 | -3222, 196 | 2261, 197 | -3640, 198 | 2879, 199 | -4142, 200 | 2540, 201 | -3913, 202 | 2684, 203 | -2482, 204 | 2111, 205 | -2098, 206 | 1539, 207 | -1444, 208 | 1261, 209 | -1792, 210 | 1100, 211 | -1649, 212 | 1297, 213 | -1639, 214 | 1561, 215 | -1094, 216 | 1122, 217 | -1012, 218 | 1047, 219 | -877, 220 | 964, 221 | -1217, 222 | 1128, 223 | -1579, 224 | 3023, 225 | -3068, 226 | 4948, 227 | -5221, 228 | 5234, 229 | -6026, 230 | 3622, 231 | -5078, 232 | 4649, 233 | -5053, 234 | 4986, 235 | -3976, 236 | 3497, 237 | -3495, 238 | 3905, 239 | -2788, 240 | 3047, 241 | -3011, 242 | 3377, 243 | -2577, 244 | 2385, 245 | -2290, 246 | 2103, 247 | -2783, 248 | 2055, 249 | -2982, 250 | 2243, 251 | -3419, 252 | 2360, 253 | -2599, 254 | 1715, 255 | -2305, 256 | 1963, 257 | -1986, 258 | 1486, 259 | -1468, 260 | 2182, 261 | -1125, 262 | 1800, 263 | -1210, 264 | 1553, 265 | -1451, 266 | 1993, 267 | -2485, 268 | 2449, 269 | -2733, 270 | 3350, 271 | -7942, 272 | 6480, 273 | -8176, 274 | 7224, 275 | -6469, 276 | 6359, 277 | -5281, 278 | 6130, 279 | -4373, 280 | 4915, 281 | -4276, 282 | 3514, 283 | -3410, 284 | 3324, 285 | -2284, 286 | 2522, 287 | -3219, 288 | 2072, 289 | -3950, 290 | 2001, 291 | -3496, 292 | 1465, 293 | -2456, 294 | 1320, 295 | -2195, 296 | 1214, 297 | -1434, 298 | 942, 299 | -1620, 300 | 1010, 301 | -2053, 302 | 1439, 303 | -1887, 304 | 1566, 305 | -1504, 306 | 1487, 307 | -1019, 308 | 1134, 309 | -843, 310 | 793, 311 | -756, 312 | 761, 313 | -861, 314 | 628, 315 | -2644, 316 | 2324, 317 | -3933, 318 | 2925, 319 | -5326, 320 | 4409, 321 | -5536, 322 | 3927, 323 | -4041, 324 | 4802, 325 | -6055, 326 | 3683, 327 | -3450, 328 | 6198, 329 | -4397, 330 | 3866, 331 | -3422, 332 | 3166, 333 | -3138, 334 | 2971, 335 | -2322, 336 | 2125, 337 | -2417, 338 | 1883, 339 | -3862, 340 | 2414, 341 | -4299, 342 | 2838, 343 | -5348, 344 | 5080, 345 | -5074, 346 | 6357, 347 | -4760, 348 | 5314, 349 | -3821, 350 | 3314, 351 | -3642, 352 | 3919, 353 | -3830, 354 | 3798, 355 | -2741, 356 | 3514, 357 | -2671, 358 | 3351, 359 | -3339, 360 | 3054, 361 | -3777, 362 | 4248, 363 | -4005, 364 | 3415, 365 | -2355, 366 | 3160, 367 | -1657, 368 | 2430, 369 | -2139, 370 | 2084, 371 | -2231, 372 | 1817, 373 | -1537, 374 | 2031, 375 | -1152, 376 | 1538, 377 | -1027, 378 | 1434, 379 | -821, 380 | 1212, 381 | -936, 382 | 929, 383 | -741, 384 | 1045, 385 | -950, 386 | 735, 387 | -3748, 388 | 6472, 389 | -4809, 390 | 4440, 391 | -5848, 392 | 4234, 393 | -4986, 394 | 4530, 395 | -6150, 396 | 5516, 397 | -4712, 398 | 5260, 399 | -4741, 400 | 3764, 401 | -2745, 402 | 3678, 403 | -2274, 404 | 2626, 405 | -1846, 406 | 2339, 407 | -1859, 408 | 2568, 409 | -4079, 410 | 3169, 411 | -3513, 412 | 3798, 413 | -5922, 414 | 5211, 415 | -5816, 416 | 3967, 417 | -5964, 418 | 4607, 419 | -3112, 420 | 2271, 421 | -4732, 422 | 2260, 423 | -2763, 424 | 2483, 425 | -2690, 426 | 2422, 427 | -2328, 428 | 2387, 429 | -2772, 430 | 1973, 431 | -3196, 432 | 2275, 433 | -3432, 434 | 2867, 435 | -3780, 436 | 3290, 437 | -3898, 438 | 3414, 439 | -2677, 440 | 2817, 441 | -2616, 442 | 2016, 443 | -1732, 444 | 2047, 445 | -1521, 446 | 2390, 447 | -1798, 448 | 2394, 449 | -1960, 450 | 2129, 451 | -1752, 452 | 1390, 453 | -1656, 454 | 2251, 455 | -2923, 456 | 2193, 457 | -2121, 458 | 2925, 459 | -3054, 460 | 2700, 461 | -2219, 462 | 2297, 463 | -1930, 464 | 2621, 465 | -2317, 466 | 2498, 467 | -1197, 468 | 1578, 469 | -1006, 470 | 1350, 471 | -793, 472 | 821, 473 | -614, 474 | 716, 475 | -690, 476 | 545, 477 | -552, 478 | 655, 479 | -572, 480 | 597, 481 | -394, 482 | 397, 483 | -330, 484 | 415, 485 | -389, 486 | 452, 487 | -302, 488 | 412, 489 | -385, 490 | 341, 491 | -368, 492 | 325, 493 | -261, 494 | 449, 495 | -340, 496 | 430, 497 | -273, 498 | 301, 499 | -1649, 500 | 2194, 501 | -3944, 502 | 2569, 503 | -5092, 504 | 4298, 505 | -3833, 506 | 5243, 507 | -5198, 508 | 3829, 509 | -4628, 510 | 6330, 511 | -6275, 512 | 7335, 513 | -5370, 514 | 2975, 515 | -3062, 516 | 6882, 517 | -3589, 518 | 4010, 519 | -4729, 520 | 4203, 521 | -4059, 522 | 2330, 523 | -4264, 524 | 4243, 525 | -3659, 526 | 4904, 527 | -4134, 528 | 2711, 529 | -3479, 530 | 2495, 531 | -2259, 532 | 3787, 533 | -2068, 534 | 3646, 535 | -2376, 536 | 2274, 537 | -2450, 538 | 2555, 539 | -2363, 540 | 2860, 541 | -2467, 542 | 2305, 543 | -2076, 544 | 1372, 545 | -1803, 546 | 2060, 547 | -4524, 548 | 7389, 549 | -4927, 550 | 5333, 551 | -6041, 552 | 5718, 553 | -4365, 554 | 5808, 555 | -4780, 556 | 5169, 557 | -3888, 558 | 3926, 559 | -4700, 560 | 3896, 561 | -4122, 562 | 4029, 563 | -3937, 564 | 3475, 565 | -5157, 566 | 4128, 567 | -4225, 568 | 2839, 569 | -3909, 570 | 3393, 571 | -2883, 572 | 2444, 573 | -2185, 574 | 1943, 575 | -2473, 576 | 2370, 577 | -3066, 578 | 2351, 579 | -3586, 580 | 2359, 581 | -2455, 582 | 3085, 583 | -2350, 584 | 2450, 585 | -2238, 586 | 4080, 587 | -2139, 588 | 2594, 589 | -1512, 590 | 2128, 591 | -2526, 592 | 2333, 593 | -2932, 594 | 2600, 595 | -3748, 596 | 3380, 597 | -4275, 598 | 2684, 599 | -2858, 600 | 2914, 601 | -1273, 602 | 2769, 603 | -3049, 604 | 2635, 605 | -2591, 606 | 2668, 607 | -1965, 608 | 2149, 609 | -1051, 610 | 1700 611 | ] 612 | } 613 | -------------------------------------------------------------------------------- /test/files/mini.wav: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/adghayes/serverless-ffmpeg/7fecb4a77bd73d12957af1dd3dc5a6ed2a2bfc70/test/files/mini.wav -------------------------------------------------------------------------------- /test/handler.test.js: -------------------------------------------------------------------------------- 1 | const fs = require("fs"); 2 | 3 | const handler = require("../lib/handler"); 4 | const { 5 | downloadNock, 6 | railsNock, 7 | uploadNock, 8 | successNock, 9 | failureNock, 10 | } = require("./helpers.js"); 11 | 12 | const projectRoot = require("app-root-path"); 13 | const inputFile = projectRoot + "/test/files/cantina.wav"; 14 | const outputFiles = ["webm", "mp3"].map( 15 | (ext) => projectRoot + "/test/files/cantina." + ext 16 | ); 17 | 18 | test("basic event", async () => { 19 | const event = JSON.parse( 20 | fs.readFileSync(projectRoot + "/test/events/basic.json") 21 | ); 22 | const downloadScope = downloadNock(inputFile, "basic"); 23 | const uploadScopes = outputFiles.map((outputFile, idx) => 24 | uploadNock(outputFile, `basic-${idx}`) 25 | ); 26 | const successScope = successNock("basic"); 27 | 28 | await handler(event); 29 | 30 | expect(downloadScope.isDone()).toBe(true); 31 | uploadScopes.forEach((scope) => expect(scope.isDone()).toBe(true)); 32 | expect(successScope.isDone()).toBe(true); 33 | }); 34 | 35 | test("rails event", async () => { 36 | const event = JSON.parse( 37 | fs.readFileSync(projectRoot + "/test/events/rails.json") 38 | ); 39 | const downloadScope = downloadNock(inputFile, "rails"); 40 | const railsScopes = outputFiles.map((outputFile, idx) => 41 | railsNock(outputFile, `rails-${idx}`) 42 | ); 43 | const uploadScopes = outputFiles.map((outputFile, idx) => 44 | uploadNock(outputFile, `rails-${idx}`) 45 | ); 46 | const successScope = successNock("rails", null, true); 47 | 48 | await handler(event); 49 | 50 | expect(downloadScope.isDone()).toBe(true); 51 | railsScopes.forEach((scope) => expect(scope.isDone()).toBe(true)); 52 | uploadScopes.forEach((scope) => expect(scope.isDone()).toBe(true)); 53 | expect(successScope.isDone()).toBe(true); 54 | }); 55 | 56 | test("peaks event", async () => { 57 | const event = JSON.parse( 58 | fs.readFileSync(projectRoot + "/test/events/peaks.json") 59 | ); 60 | const downloadScope = downloadNock(inputFile, "peaks"); 61 | const uploadScope = uploadNock(outputFiles[0], "peaks"); 62 | const successScope = successNock("peaks", event.peaks.count, false); 63 | 64 | await handler(event); 65 | 66 | expect(downloadScope.isDone()).toBe(true); 67 | expect(uploadScope.isDone()).toBe(true); 68 | expect(successScope.isDone()).toBe(true); 69 | }); 70 | 71 | test("failure callback on ffmpeg failure", async () => { 72 | const event = JSON.parse( 73 | fs.readFileSync(projectRoot + "/test/events/badFormat.json") 74 | ); 75 | downloadNock(inputFile, "badFormat"); 76 | const failureScope = failureNock("badFormat"); 77 | 78 | await handler(event); 79 | expect(failureScope.isDone()).toBe(true); 80 | }); 81 | 82 | test("failure callback on download errors", async () => { 83 | const event = JSON.parse( 84 | fs.readFileSync(projectRoot + "/test/events/noAuth.json") 85 | ); 86 | downloadNock(inputFile, "noAuth"); 87 | const failureScope = failureNock("noAuth"); 88 | 89 | await handler(event); 90 | expect(failureScope.isDone()).toBe(true); 91 | }); 92 | 93 | test("throws error if no callback", async () => { 94 | const event = JSON.parse( 95 | fs.readFileSync(projectRoot + "/test/events/noCallback.json") 96 | ); 97 | 98 | expect.assertions(1); 99 | 100 | handler(event).catch((err) => expect(err).toBeTruthy()); 101 | }); 102 | -------------------------------------------------------------------------------- /test/helpers.js: -------------------------------------------------------------------------------- 1 | const nock = require("nock"); 2 | const mime = require("mime-types"); 3 | const fs = require("fs"); 4 | const tmp = require("tmp"); 5 | const md5file = require("md5-file"); 6 | 7 | const sizeCache = {}; 8 | function size(filename) { 9 | if (!sizeCache[filename]) { 10 | sizeCache[filename] = fs.statSync(filename).size; 11 | } 12 | 13 | return sizeCache[filename]; 14 | } 15 | 16 | const checksumCache = {}; 17 | function checksum(filename) { 18 | if (!checksumCache[filename]) { 19 | checksumCache[filename] = Buffer.from( 20 | md5file.sync(filename), 21 | "hex" 22 | ).toString("base64"); 23 | } 24 | 25 | return checksumCache[filename]; 26 | } 27 | 28 | function type(filename) { 29 | return mime.lookup(filename); 30 | } 31 | 32 | function auth(type) { 33 | return { 34 | Authorization: `bearer ${type}`, 35 | }; 36 | } 37 | 38 | function downloadNock(filename, id) { 39 | return nock("https://www.example.com", { 40 | reqheaders: auth("download"), 41 | }) 42 | .get(`/download/${id}`) 43 | .replyWithFile(200, filename, { 44 | "Content-Type": type(filename), 45 | "Content-Length": size(filename), 46 | }); 47 | } 48 | 49 | const railsResponse = (id) => ({ 50 | signed_id: id, 51 | direct_upload: { 52 | url: `https://www.example.com/upload/${id}`, 53 | headers: auth("upload"), 54 | }, 55 | }); 56 | 57 | const railsNock = (filename, id) => { 58 | return nock("https://www.example.com", { 59 | reqheaders: { 60 | Accept: "application/json", 61 | "Content-Type": "application/json", 62 | ...auth("rails"), 63 | }, 64 | }) 65 | .post(`/rails/${id}`, { 66 | blob: { 67 | filename: /.*/, 68 | content_type: type(filename), 69 | byte_size: /\d+/, 70 | checksum: /[a-zA-Z0-9+=]+/, 71 | }, 72 | }) 73 | .reply(200, () => railsResponse(id)); 74 | }; 75 | 76 | function uploadNock(filename, id) { 77 | return nock("https://www.example.com", { 78 | reqheaders: { 79 | "Content-Type": type(filename), 80 | "Content-Length": /\d+/, 81 | ...auth("upload"), 82 | }, 83 | }) 84 | .put(`/upload/${id}`, /.+/) 85 | .reply(200); 86 | } 87 | 88 | const successNock = (id, peaks = null, rails = null) => 89 | nock("https://www.example.com", { 90 | reqheaders: auth("callback"), 91 | }) 92 | .post(`/info/${id}`, (body) => { 93 | const peaksGood = 94 | !peaks || 95 | (body.peaks && body.peaks.length && body.peaks.length === peaks * 2); 96 | const idsGood = 97 | !rails || body.outputs.every((output) => !output.upload || output.id); 98 | return body.status && body.input.metadata && peaksGood && idsGood; 99 | }) 100 | .reply(200); 101 | 102 | const failureNock = (id) => 103 | nock("https://www.example.com", { 104 | reqheaders: auth("callback"), 105 | }) 106 | .post(`/info/${id}`, (body) => { 107 | return body.status > 299 && body.statusText; 108 | }) 109 | .reply(200); 110 | 111 | module.exports = { 112 | size, 113 | type, 114 | checksum, 115 | auth, 116 | downloadNock, 117 | railsNock, 118 | uploadNock, 119 | successNock, 120 | failureNock, 121 | }; 122 | -------------------------------------------------------------------------------- /test/input.test.js: -------------------------------------------------------------------------------- 1 | const nock = require("nock"); 2 | const fs = require("fs"); 3 | const ffmpeg = require("fluent-ffmpeg"); 4 | const tmp = require("tmp"); 5 | 6 | const { download, probe, addInputOptions } = require("../lib/input.js"); 7 | 8 | const projectRoot = require("app-root-path"); 9 | const testFile = projectRoot + "/test/files/cantina.wav"; 10 | 11 | const input = { 12 | download: { 13 | url: "https://www.example.com/download", 14 | headers: { 15 | Authorization: "bearer token", 16 | }, 17 | }, 18 | seek: 1, 19 | fps: 60, 20 | }; 21 | 22 | test("download throws error if no url specified", () => { 23 | expect(() => { 24 | download({ download: {} }); 25 | }).toThrow("specified"); 26 | }); 27 | 28 | test("download throws error if download refused", async () => { 29 | nock("https://www.example.com").get("/download").reply(404); 30 | 31 | await expect(download(input)).rejects.toThrow("unavailable"); 32 | }); 33 | 34 | test("download adds custom headers", async () => { 35 | const scope = nock("https://www.example.com", { 36 | reqheaders: input.download.headers, 37 | }) 38 | .get("/download") 39 | .replyWithFile(200, testFile); 40 | 41 | await download(input); 42 | expect(scope.isDone()).toBe(true); 43 | }); 44 | 45 | test("download creates file", async () => { 46 | nock("https://www.example.com").get("/download").replyWithFile(200, testFile); 47 | 48 | const createdFile = await download(input); 49 | expect(fs.existsSync(createdFile)).toBe(true); 50 | }); 51 | 52 | test("probe returns metadata object", async () => { 53 | const metadata = await probe(testFile); 54 | expect(metadata).toHaveProperty("streams"); 55 | expect(metadata).toHaveProperty("format"); 56 | }); 57 | 58 | test("probe attaches metadata to input", async () => { 59 | await probe(testFile, input); 60 | expect(input).toHaveProperty("metadata"); 61 | }); 62 | 63 | test("addInputOptions adds options to command", (done) => { 64 | const tempOutputFile = tmp.tmpNameSync(); 65 | const command = ffmpeg(testFile); 66 | command.addOutput(tempOutputFile).format("mp3"); 67 | addInputOptions(command, input); 68 | 69 | command.on("start", (commandLine) => { 70 | ["-ss 1", "-r 60"].forEach((flag) => { 71 | expect(commandLine).toMatch(flag); 72 | }); 73 | done(); 74 | }); 75 | command.run(); 76 | }); 77 | -------------------------------------------------------------------------------- /test/output.test.js: -------------------------------------------------------------------------------- 1 | const ffmpeg = require("fluent-ffmpeg"); 2 | 3 | const { addOutputs, addOutput } = require("../lib/output"); 4 | const projectRoot = require("app-root-path"); 5 | const testFile = projectRoot + "/test/files/cantina.wav"; 6 | 7 | const outputs = [ 8 | { 9 | format: "webm", 10 | audio: { 11 | bitrate: 96, 12 | }, 13 | video: false, 14 | metadata: { 15 | platform: "platform", 16 | }, 17 | options: ["-dash 1"], 18 | upload: { 19 | url: "https://www.example.com/upload/1", 20 | }, 21 | }, 22 | { 23 | format: "mp3", 24 | audio: { 25 | bitrate: 64, 26 | }, 27 | video: false, 28 | upload: { 29 | url: "https://www.example.com/upload/3", 30 | }, 31 | }, 32 | ]; 33 | 34 | const expectedFlags = [ 35 | ["-f webm", "-b:a 96k", "-vn", "-dash 1", '-metadata platform="platform"'], 36 | ["-f mp3", "-b:a 64k", "-vn"], 37 | ]; 38 | 39 | test("addOutput throws if no format specified", () => { 40 | expect(() => { 41 | addOutput(ffmpeg(testFile), {}); 42 | }).toThrow(); 43 | }); 44 | 45 | test("addOutput adds basic audio-only output", (done) => { 46 | const command = ffmpeg(testFile); 47 | const outputFile = addOutput(command, outputs[0]); 48 | 49 | command.on("start", (commandLine) => { 50 | expect(commandLine).toMatch(outputFile); 51 | expectedFlags[0].forEach((flag) => { 52 | expect(commandLine).toMatch(flag); 53 | }); 54 | done(); 55 | }); 56 | command.run(); 57 | }); 58 | 59 | test("addOutputs throws if zero outputs are provided", () => { 60 | expect(() => { 61 | addOutputs(ffmpeg(testFile), []); 62 | }).toThrow(); 63 | 64 | expect(() => { 65 | addOutputs(ffmpeg(testFile)); 66 | }).toThrow(); 67 | }); 68 | 69 | test("addOutputs adds multiple outputs in order", (done) => { 70 | const command = ffmpeg(testFile); 71 | const outputFiles = addOutputs(command, outputs); 72 | 73 | command.on("start", (commandLine) => { 74 | outputFiles.forEach((outputFile, idx) => { 75 | expect(commandLine).toMatch(outputFile); 76 | expectedFlags[idx].forEach((flag) => { 77 | expect(commandLine).toMatch(new RegExp(flag + "[^/]*" + outputFile)); 78 | }); 79 | }); 80 | done(); 81 | }); 82 | command.run(); 83 | }); 84 | -------------------------------------------------------------------------------- /test/peaks.test.js: -------------------------------------------------------------------------------- 1 | const fs = require("fs"); 2 | 3 | const { prepForPeaks, getPeaks } = require("../lib/peaks.js"); 4 | 5 | const projectRoot = require("app-root-path"); 6 | const testEvent = () => 7 | JSON.parse(fs.readFileSync(projectRoot + "/test/events/peaks.json")); 8 | 9 | test("prepForPeaks adds new output", () => { 10 | const event = testEvent(); 11 | const expected = { 12 | format: "s16le", 13 | options: expect.stringMatching(/-ar\s\d+/), 14 | audio: expect.anything(), 15 | video: false 16 | }; 17 | 18 | prepForPeaks(event); 19 | expect(event.peaks.intermediary).toMatchObject(expected); 20 | expect(event.outputs[event.outputs.length - 1]).toMatchObject(expected); 21 | }); 22 | 23 | /** 24 | * audiowaveform (aw) is a C++ program that generates peaks 25 | * and provides a way to roughly compare our generated peaks 26 | */ 27 | const testFile = projectRoot + "/test/files/cantina.raw"; 28 | const awOutput = JSON.parse( 29 | fs.readFileSync(projectRoot + "/test/files/cantinaPeaks100.json") 30 | ); 31 | const peaksCount = awOutput.length; 32 | const awPeaks = awOutput.data; 33 | 34 | // generate same number of peaks as audiowaveform 35 | const preppedEvent = testEvent(); 36 | preppedEvent.peaks = { 37 | count: peaksCount, 38 | intermediary: { 39 | local: testFile, 40 | }, 41 | }; 42 | 43 | const myPeaksPromise = getPeaks(preppedEvent); 44 | 45 | test("generated the correct number of peaks", async () => { 46 | const myPeaks = await myPeaksPromise; 47 | 48 | expect(myPeaks.length).toBe(peaksCount * 2); 49 | }); 50 | 51 | test("global max resembles audiowaveform's (exact value, adjacent index)", async () => { 52 | const myPeaks = await myPeaksPromise; 53 | 54 | const [awMax, awIndex] = findMax(awPeaks); 55 | const [myMax, myIndex] = findMax(myPeaks); 56 | 57 | expect(myMax).toBe(awMax); 58 | expect(myIndex).toBeLessThanOrEqual(awIndex + 1); 59 | expect(myIndex).toBeGreaterThanOrEqual(awIndex - 1); 60 | }); 61 | 62 | test("global min resembles audiowaveform's (exact value, adjacent index)", async () => { 63 | const myPeaks = await myPeaksPromise; 64 | 65 | const [awMin, awIndex] = findMin(awPeaks); 66 | const [myMin, myIndex] = findMin(myPeaks); 67 | 68 | expect(myMin).toBe(awMin); 69 | expect(myIndex).toBeLessThanOrEqual(awIndex + 1); 70 | expect(myIndex).toBeGreaterThanOrEqual(awIndex - 1); 71 | }); 72 | 73 | test("global average resembles audiowaveform's (25% margin)", async () => { 74 | margin = 5 / 4; 75 | const myPeaks = await myPeaksPromise; 76 | 77 | myAverage = absAverage(myPeaks); 78 | awAverage = absAverage(awPeaks); 79 | expect(myAverage).toBeLessThan(awAverage * margin); 80 | expect(myAverage).toBeGreaterThan(awAverage / margin); 81 | }); 82 | 83 | test("local averages (tenth of second) resemble audiowaveform's (33% margin)", async () => { 84 | const margin = 4 / 3; 85 | const step = 10; 86 | const myPeaks = await myPeaksPromise; 87 | for (let i = 0; i < length; i += step) { 88 | const mySlice = myPeaks.slice(i, i + step); 89 | const awSlice = awPeaks.slice(i, i + step); 90 | myAverage = absAverage(mySlice); 91 | awAverage = absAverage(awSlice); 92 | expect(myAverage).toBeLessThan(awAverage * margin); 93 | expect(myAverage).toBeGreaterThan(awAverage / margin); 94 | } 95 | }); 96 | 97 | function findMin(data) { 98 | const min = Math.min(...data); 99 | const index = data.indexOf(min); 100 | return [min, index]; 101 | } 102 | 103 | function findMax(data) { 104 | const max = Math.max(...data); 105 | const index = data.indexOf(max); 106 | return [max, index]; 107 | } 108 | 109 | function absAverage(numbers) { 110 | const sum = numbers.reduce((sum, number) => (sum += Math.abs(number))); 111 | return sum / numbers.length; 112 | } 113 | -------------------------------------------------------------------------------- /test/upload.test.js: -------------------------------------------------------------------------------- 1 | const path = require("path"); 2 | const fs = require("fs"); 3 | const nock = require("nock"); 4 | const cloneDeep = require("lodash.clonedeep"); 5 | 6 | const { 7 | uploadAll, 8 | upload, 9 | basicUpload, 10 | railsUpload, 11 | railsBlob, 12 | } = require("../lib/upload.js"); 13 | 14 | const { size, type, checksum, auth } = require("./helpers.js"); 15 | 16 | const projectRoot = require("app-root-path"); 17 | const testFile = projectRoot + "/test/files/mini.wav"; 18 | 19 | const outputBase = { 20 | local: testFile, 21 | format: path.extname(testFile), 22 | upload: { 23 | byteSize: size(testFile), 24 | contentType: type(testFile), 25 | }, 26 | }; 27 | 28 | test("basicUpload PUTS exact file to URL", async () => { 29 | const scope = nock("https://www.example.com") 30 | .put(`/upload/exact`, fs.readFileSync(testFile)) 31 | .reply(200); 32 | 33 | const output = cloneDeep(outputBase); 34 | output.upload.url = "https://www.example.com/upload/exact"; 35 | 36 | await basicUpload(output); 37 | expect(scope.isDone()).toBe(true); 38 | }); 39 | 40 | test("basicUpload PUTS to specified URL with required headers", async () => { 41 | const scope = nock("https://www.example.com", { 42 | reqheaders: { 43 | "Content-Type": type(testFile), 44 | "Content-Length": size(testFile), 45 | ...auth("upload"), 46 | }, 47 | }) 48 | .put(`/upload/headers`) 49 | .reply(200); 50 | 51 | const output = cloneDeep(outputBase); 52 | output.upload.url = "https://www.example.com/upload/headers"; 53 | output.upload.headers = auth("upload"); 54 | 55 | await basicUpload(output); 56 | expect(scope.isDone()).toBe(true); 57 | }); 58 | 59 | const railsOutput = cloneDeep(outputBase); 60 | railsOutput.upload.name = "anything"; 61 | railsOutput.upload.headers = auth("rails"); 62 | railsOutput.upload.type = "rails"; 63 | 64 | test("railsBlob POSTS blob attributes", async () => { 65 | const scope = nock("https://www.example.com") 66 | .post(`/rails/attributes`, { 67 | blob: { 68 | filename: /.*/, 69 | content_type: type(testFile), 70 | byte_size: size(testFile), 71 | checksum: checksum(testFile), 72 | }, 73 | }) 74 | .reply(200, {}); 75 | 76 | const output = cloneDeep(railsOutput); 77 | output.upload.url = "https://www.example.com/rails/attributes"; 78 | 79 | await railsBlob(output); 80 | expect(scope.isDone()).toBe(true); 81 | }); 82 | 83 | test("railsBlob POSTS necessary headers", async () => { 84 | const scope = nock("https://www.example.com", { 85 | reqheaders: { 86 | Accept: "application/json", 87 | "Content-Type": "application/json", 88 | ...auth("rails"), 89 | }, 90 | }) 91 | .post(`/rails/headers`) 92 | .reply(200, {}); 93 | 94 | const output = cloneDeep(railsOutput); 95 | output.upload.url = "https://www.example.com/rails/headers"; 96 | 97 | await railsBlob(output); 98 | expect(scope.isDone()).toBe(true); 99 | }); 100 | 101 | test("railsUpload", async () => { 102 | const railsScope = nock("https://www.example.com") 103 | .post(`/rails/upload`) 104 | .reply(200, { 105 | signed_id: 1, 106 | direct_upload: { 107 | url: `https://www.example.com/upload/railsDirect`, 108 | headers: auth("upload"), 109 | }, 110 | }); 111 | 112 | const uploadScope = nock("https://www.example.com", { 113 | reqheaders: auth("upload"), 114 | }) 115 | .put(`/upload/railsDirect`) 116 | .reply(200); 117 | 118 | const output = cloneDeep(railsOutput); 119 | output.upload.url = "https://www.example.com/rails/upload"; 120 | 121 | const { signedId, upload } = await railsUpload(output); 122 | 123 | expect(signedId).toBeTruthy(); 124 | expect(railsScope.isDone()).toBe(true); 125 | 126 | await upload; 127 | expect(uploadScope.isDone()).toBe(true); 128 | }); 129 | 130 | test("upload inserts content-length and content-type", async () => { 131 | const scope = nock("https://www.example.com", { 132 | reqheaders: { 133 | "Content-Type": type(testFile), 134 | "Content-Length": size(testFile), 135 | }, 136 | }) 137 | .put(`/upload/type`) 138 | .reply(200); 139 | 140 | const output = cloneDeep(outputBase); 141 | output.upload.url = "https://www.example.com/upload/type"; 142 | 143 | await upload(output); 144 | expect(scope.isDone()).toBe(true); 145 | }); 146 | 147 | test("uploadAll handles multiple uploads of varying types", async () => { 148 | const railsScope = nock("https://www.example.com") 149 | .post(`/rails/multiple`) 150 | .reply(200, { 151 | direct_upload: { url: `https://www.example.com/upload/multiple` }, 152 | }); 153 | 154 | const uploadScope = nock("https://www.example.com") 155 | .put(`/upload/multiple`) 156 | .twice() 157 | .reply(200); 158 | 159 | const basicOutput = cloneDeep(outputBase); 160 | basicOutput.upload.url = "https://www.example.com/upload/multiple"; 161 | 162 | const railsOutputClone = cloneDeep(railsOutput); 163 | railsOutputClone.upload.url = `https://www.example.com/rails/multiple`; 164 | 165 | const outputs = [railsOutputClone, basicOutput]; 166 | await Promise.all(uploadAll(outputs)); 167 | 168 | expect(railsScope.isDone()).toBe(true); 169 | expect(uploadScope.isDone()).toBe(true); 170 | }); 171 | --------------------------------------------------------------------------------